From c6dfd896304cb4e36c4020f4704d9537fd3e8e9f Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 27 Apr 2021 16:39:00 +0200 Subject: Filters: update tests to the new schema --- pydis_site/apps/api/tests/test_filterlists.py | 122 ----------- pydis_site/apps/api/tests/test_filters.py | 284 ++++++++++++++++++++++++++ pydis_site/apps/api/tests/test_models.py | 14 ++ 3 files changed, 298 insertions(+), 122 deletions(-) delete mode 100644 pydis_site/apps/api/tests/test_filterlists.py create mode 100644 pydis_site/apps/api/tests/test_filters.py (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_filterlists.py b/pydis_site/apps/api/tests/test_filterlists.py deleted file mode 100644 index 5a5bca60..00000000 --- a/pydis_site/apps/api/tests/test_filterlists.py +++ /dev/null @@ -1,122 +0,0 @@ -from django.urls import reverse - -from pydis_site.apps.api.models import FilterList -from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase - -URL = reverse('api:bot:filterlist-list') -JPEG_ALLOWLIST = { - "type": 'FILE_FORMAT', - "allowed": True, - "content": ".jpeg", -} -PNG_ALLOWLIST = { - "type": 'FILE_FORMAT', - "allowed": True, - "content": ".png", -} - - -class UnauthenticatedTests(AuthenticatedAPITestCase): - def setUp(self): - super().setUp() - self.client.force_authenticate(user=None) - - def test_cannot_read_allowedlist_list(self): - response = self.client.get(URL) - - self.assertEqual(response.status_code, 401) - - -class EmptyDatabaseTests(AuthenticatedAPITestCase): - @classmethod - def setUpTestData(cls): - FilterList.objects.all().delete() - - def test_returns_empty_object(self): - response = self.client.get(URL) - - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), []) - - -class FetchTests(AuthenticatedAPITestCase): - @classmethod - def setUpTestData(cls): - FilterList.objects.all().delete() - cls.jpeg_format = FilterList.objects.create(**JPEG_ALLOWLIST) - cls.png_format = FilterList.objects.create(**PNG_ALLOWLIST) - - def test_returns_name_in_list(self): - response = self.client.get(URL) - - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json()[0]["content"], self.jpeg_format.content) - self.assertEqual(response.json()[1]["content"], self.png_format.content) - - def test_returns_single_item_by_id(self): - response = self.client.get(f'{URL}/{self.jpeg_format.id}') - - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json().get("content"), self.jpeg_format.content) - - def test_returns_filter_list_types(self): - response = self.client.get(f'{URL}/get-types') - - self.assertEqual(response.status_code, 200) - for api_type, model_type in zip(response.json(), FilterList.FilterListType.choices): - self.assertEquals(api_type[0], model_type[0]) - self.assertEquals(api_type[1], model_type[1]) - - -class CreationTests(AuthenticatedAPITestCase): - @classmethod - def setUpTestData(cls): - FilterList.objects.all().delete() - - def test_returns_400_for_missing_params(self): - no_type_json = { - "allowed": True, - "content": ".jpeg" - } - no_allowed_json = { - "type": "FILE_FORMAT", - "content": ".jpeg" - } - no_content_json = { - "allowed": True, - "type": "FILE_FORMAT" - } - cases = [{}, no_type_json, no_allowed_json, no_content_json] - - for case in cases: - with self.subTest(case=case): - response = self.client.post(URL, data=case) - self.assertEqual(response.status_code, 400) - - def test_returns_201_for_successful_creation(self): - response = self.client.post(URL, data=JPEG_ALLOWLIST) - self.assertEqual(response.status_code, 201) - - def test_returns_400_for_duplicate_creation(self): - self.client.post(URL, data=JPEG_ALLOWLIST) - response = self.client.post(URL, data=JPEG_ALLOWLIST) - self.assertEqual(response.status_code, 400) - - -class DeletionTests(AuthenticatedAPITestCase): - @classmethod - def setUpTestData(cls): - FilterList.objects.all().delete() - cls.jpeg_format = FilterList.objects.create(**JPEG_ALLOWLIST) - cls.png_format = FilterList.objects.create(**PNG_ALLOWLIST) - - def test_deleting_unknown_id_returns_404(self): - response = self.client.delete(f"{URL}/200") - self.assertEqual(response.status_code, 404) - - def test_deleting_known_id_returns_204(self): - response = self.client.delete(f"{URL}/{self.jpeg_format.id}") - self.assertEqual(response.status_code, 204) - - response = self.client.get(f"{URL}/{self.jpeg_format.id}") - self.assertNotIn(self.png_format.content, response.json()) diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py new file mode 100644 index 00000000..de78ecfd --- /dev/null +++ b/pydis_site/apps/api/tests/test_filters.py @@ -0,0 +1,284 @@ +import contextlib +from dataclasses import dataclass +from typing import Any, Dict, Tuple, Type + +from django.db.models import Model +from django_hosts import reverse + +from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order + FilterList, + FilterSettings, + FilterAction, + ChannelRange, + Filter, + FilterOverride +) +from pydis_site.apps.api.tests.base import APISubdomainTestCase + + +@dataclass() +class TestSequence: + model: Type[Model] + route: str + object: Dict[str, Any] + ignored_fields: Tuple[str] = () + + def url(self, detail: bool = False) -> str: + return reverse(f'bot:{self.route}-{"detail" if detail else "list"}', host='api') + + +FK_FIELDS: Dict[Type[Model], Tuple[str]] = { + FilterList: ("default_settings",), + FilterSettings: ("default_action", "default_range"), + FilterAction: (), + ChannelRange: (), + Filter: (), + FilterOverride: ("filter_action", "filter_range") +} + + +def get_test_sequences() -> Dict[str, TestSequence]: + return { + "filter_list": TestSequence( + FilterList, + "filterlist", + { + "name": "testname", + "list_type": 0, + "default_settings": FilterSettings( + ping_type=[], + filter_dm=False, + dm_ping_type=[], + delete_messages=False, + bypass_roles=[], + enabled=False, + default_action=FilterAction( + user_dm=None, + infraction_type=None, + infraction_reason="", + infraction_duration=None + ), + default_range=ChannelRange( + disallowed_channels=[], + disallowed_categories=[], + allowed_channels=[], + allowed_category=[], + default=False + ) + ) + }, + ignored_fields=("filters",) + ), + "filter_settings": TestSequence( + FilterSettings, + "filtersettings", + { + "ping_type": ["onduty"], + "filter_dm": True, + "dm_ping_type": ["123456"], + "delete_messages": True, + "bypass_roles": [123456], + "enabled": True, + "default_action": FilterAction( + user_dm=None, + infraction_type=None, + infraction_reason="", + infraction_duration=None + ), + "default_range": ChannelRange( + disallowed_channels=[], + disallowed_categories=[], + allowed_channels=[], + allowed_category=[], + default=False + ) + } + ), + "filter_action": TestSequence( + FilterAction, + "filteraction", + { + "user_dm": "This is a DM message.", + "infraction_type": "Mute", + "infraction_reason": "Too long beard", + "infraction_duration": "1 02:03:00" + } + ), + "channel_range": TestSequence( + ChannelRange, + "channelrange", + { + "disallowed_channels": [1234], + "disallowed_categories": [5678], + "allowed_channels": [9101], + "allowed_category": [1121], + "default": True + } + ), + "filter": TestSequence( + Filter, + "filter", + { + "content": "bad word", + "description": "This is a really bad word.", + "additional_field": None, + "override": None + } + ), + "filter_override": TestSequence( + FilterOverride, + "filteroverride", + { + "ping_type": ["everyone"], + "filter_dm": False, + "dm_ping_type": ["here"], + "delete_messages": False, + "bypass_roles": [9876], + "enabled": True, + "filter_action": None, + "filter_range": None + } + ) + } + + +def save_nested_objects(object_: Model, save_root: bool = True) -> None: + for field in FK_FIELDS[object_.__class__]: + value = getattr(object_, field) + + if value is not None: + save_nested_objects(value) + + if save_root: + object_.save() + + +def clean_test_json(json: dict) -> dict: + for key, value in json.items(): + if isinstance(value, Model): + json[key] = value.id + + return json + + +def clean_api_json(json: dict, sequence: TestSequence) -> dict: + for field in sequence.ignored_fields + ("id",): + with contextlib.suppress(KeyError): + del json[field] + + return json + + +class GenericFilterTest(APISubdomainTestCase): + def test_cannot_read_unauthenticated(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + self.client.force_authenticate(user=None) + + response = self.client.get(sequence.url()) + self.assertEqual(response.status_code, 401) + + def test_empty_database(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + response = self.client.get(sequence.url()) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), []) + + def test_fetch(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + save_nested_objects(sequence.model(**sequence.object)) + + response = self.client.get(sequence.url()) + self.assertDictEqual( + clean_test_json(sequence.object), + clean_api_json(response.json()[0], sequence) + ) + + def test_fetch_by_id(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + saved = sequence.model(**sequence.object) + save_nested_objects(saved) + + response = self.client.get(f"{sequence.url()}/{saved.id}") + self.assertDictEqual( + clean_test_json(sequence.object), + clean_api_json(response.json(), sequence) + ) + + def test_fetch_non_existing(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + response = self.client.get(f"{sequence.url()}/42") + self.assertEqual(response.status_code, 404) + self.assertDictEqual(response.json(), {'detail': 'Not found.'}) + + def test_creation(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + save_nested_objects(sequence.model(**sequence.object), False) + data = clean_test_json(sequence.object.copy()) + response = self.client.post(sequence.url(), data=data) + + self.assertEqual(response.status_code, 201) + self.assertDictEqual( + clean_api_json(response.json(), sequence), + clean_test_json(sequence.object) + ) + + def test_creation_missing_field(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + save_nested_objects(sequence.model(**sequence.object), False) + data = clean_test_json(sequence.object.copy()) + + for field in sequence.model._meta.get_fields(): + with self.subTest(field=field): + if field.null or field.name in sequence.ignored_fields + ("id",): + continue + + test_data = data.copy() + del test_data[field.name] + + response = self.client.post(sequence.url(), data=test_data) + self.assertEqual(response.status_code, 400) + + def test_deletion(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + saved = sequence.model(**sequence.object) + save_nested_objects(saved) + + response = self.client.delete(f"{sequence.url()}/{saved.id}") + self.assertEqual(response.status_code, 204) + + def test_deletion_non_existing(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + response = self.client.delete(f"{sequence.url()}/42") + self.assertEqual(response.status_code, 404) + + def test_reject_invalid_ping(self) -> None: + url = reverse('bot:filteroverride-list', host='api') + data = { + "ping_type": ["invalid"] + } + + response = self.client.post(url, data=data) + + self.assertEqual(response.status_code, 400) + self.assertDictEqual(response.json(), {'ping_type': ["'invalid' isn't a valid ping type."]}) diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index 5c9ddea4..c8f4e1b1 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -7,6 +7,9 @@ from django.utils import timezone from pydis_site.apps.api.models import ( DeletedMessage, DocumentationLink, + Filter, + FilterList, + FilterSettings, Infraction, Message, MessageDeletionContext, @@ -106,6 +109,17 @@ class StringDunderMethodTests(SimpleTestCase): DocumentationLink( 'test', 'http://example.com', 'http://example.com' ), + FilterList( + name="forbidden_duckies", + list_type=0, + default_settings=FilterSettings() + ), + Filter( + content="ducky_nsfw", + description="This ducky is totally inappropriate!", + additional_field=None, + override=None + ), OffensiveMessage( id=602951077675139072, channel_id=291284109232308226, -- cgit v1.2.3 From 71a5e0d854c587ca2ae70aaec80f1110ea8800e5 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 27 Apr 2021 16:41:18 +0200 Subject: Filters: allowed_category -> allowed_categories --- pydis_site/apps/api/migrations/0070_new_filter_schema.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 2 +- pydis_site/apps/api/serializers.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 6 +++--- pydis_site/apps/api/viewsets/bot/filters.py | 8 ++++---- 5 files changed, 11 insertions(+), 11 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index de75e677..eb55e329 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -33,7 +33,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: disallowed_channels=[], disallowed_categories=[], allowed_channels=[], - allowed_category=[], + allowed_categories=[], default=True ) default_range.save() @@ -85,7 +85,7 @@ class Migration(migrations.Migration): ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('allowed_category', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('default', models.BooleanField()), ], ), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 869f947c..c2f776d3 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -142,7 +142,7 @@ class ChannelRange(models.Model): disallowed_channels = ArrayField(models.IntegerField()) disallowed_categories = ArrayField(models.IntegerField()) allowed_channels = ArrayField(models.IntegerField()) - allowed_category = ArrayField(models.IntegerField()) + allowed_categories = ArrayField(models.IntegerField()) default = models.BooleanField() diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 306dccb3..54acf366 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -180,7 +180,7 @@ class FilterChannelRangeSerializer(ModelSerializer): 'disallowed_channels', 'disallowed_categories', 'allowed_channels', - 'allowed_category', + 'allowed_categories', 'default' ) diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index de78ecfd..f38f3659 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -62,7 +62,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: disallowed_channels=[], disallowed_categories=[], allowed_channels=[], - allowed_category=[], + allowed_categories=[], default=False ) ) @@ -89,7 +89,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: disallowed_channels=[], disallowed_categories=[], allowed_channels=[], - allowed_category=[], + allowed_categories=[], default=False ) } @@ -111,7 +111,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: "disallowed_channels": [1234], "disallowed_categories": [5678], "allowed_channels": [9101], - "allowed_category": [1121], + "allowed_categories": [1121], "default": True } ), diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index fea53265..e290fc65 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -350,7 +350,7 @@ class FilterChannelRangeViewSet(ModelViewSet): ... "disallowed_channels": [], ... "disallowed_categories": [], ... "allowed_channels": [], - ... "allowed_category": [], + ... "allowed_categories": [], ... "default": True ... }, ... ... @@ -369,7 +369,7 @@ class FilterChannelRangeViewSet(ModelViewSet): ... "disallowed_channels": [], ... "disallowed_categories": [], ... "allowed_channels": [], - ... "allowed_category": [], + ... "allowed_categories": [], ... "default": True ... } @@ -385,7 +385,7 @@ class FilterChannelRangeViewSet(ModelViewSet): ... "disallowed_channels": [], ... "disallowed_categories": [], ... "allowed_channels": [], - ... "allowed_category": [], + ... "allowed_categories": [], ... "default": True ... } @@ -402,7 +402,7 @@ class FilterChannelRangeViewSet(ModelViewSet): ... "disallowed_channels": [], ... "disallowed_categories": [], ... "allowed_channels": [], - ... "allowed_category": [], + ... "allowed_categories": [], ... "default": True ... } -- cgit v1.2.3 From 1095346a1f86e43d5d5c39045a54354d1290fe0e Mon Sep 17 00:00:00 2001 From: kosayoda Date: Sun, 11 Jul 2021 16:35:41 +0800 Subject: Improve name of dm sent to triggered user. --- pydis_site/apps/api/migrations/0070_new_filter_schema.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 2 +- pydis_site/apps/api/serializers.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 6 +++--- pydis_site/apps/api/viewsets/bot/filters.py | 8 ++++---- 5 files changed, 11 insertions(+), 11 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index eb55e329..8580033a 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -23,7 +23,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: objects = filter_list_old.objects.filter(type=name) default_action = filter_action.objects.create( - user_dm=None, + dm_content=None, infraction_type=None, infraction_reason="", infraction_duration=None @@ -102,7 +102,7 @@ class Migration(migrations.Migration): name='FilterAction', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('user_dm', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), + ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('infraction_type', models.CharField(choices=[('Note', 'Note'), ('Warn', 'Warn'), ('Mute', 'Mute'), ('Kick', 'Kick'), ('Ban', 'Ban')], help_text='The infraction to apply to this user.', max_length=4, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 6f35bfb0..b5c80bda 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -106,7 +106,7 @@ class FilterSettings(models.Model): class FilterAction(models.Model): """The action to take when a filter is triggered.""" - user_dm = models.CharField( + dm_content = models.CharField( max_length=1000, null=True, help_text="The DM to send to a user triggering this filter." diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 54acf366..584d1f22 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -165,7 +165,7 @@ class FilterActionSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterAction - fields = ('id', 'user_dm', 'infraction_type', 'infraction_reason', 'infraction_duration') + fields = ('id', 'dm_content', 'infraction_type', 'infraction_reason', 'infraction_duration') class FilterChannelRangeSerializer(ModelSerializer): diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index f38f3659..2df671e0 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -53,7 +53,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: bypass_roles=[], enabled=False, default_action=FilterAction( - user_dm=None, + dm_content=None, infraction_type=None, infraction_reason="", infraction_duration=None @@ -80,7 +80,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: "bypass_roles": [123456], "enabled": True, "default_action": FilterAction( - user_dm=None, + dm_content=None, infraction_type=None, infraction_reason="", infraction_duration=None @@ -98,7 +98,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: FilterAction, "filteraction", { - "user_dm": "This is a DM message.", + "dm_content": "This is a DM message.", "infraction_type": "Mute", "infraction_reason": "Too long beard", "infraction_duration": "1 02:03:00" diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index e290fc65..9553fcac 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -264,7 +264,7 @@ class FilterActionViewSet(ModelViewSet): >>> [ ... { ... "id": 1, - ... "user_dm": "message", + ... "dm_content": "message", ... "infraction_type": "Warn", ... "infraction_reason": "", ... "infraction_duration": "01 12:34:56.123456" @@ -282,7 +282,7 @@ class FilterActionViewSet(ModelViewSet): #### Response format >>> { ... "id": 1, - ... "user_dm": "message", + ... "dm_content": "message", ... "infraction_type": "Warn", ... "infraction_reason": "", ... "infraction_duration": "01 12:34:56.123456" @@ -297,7 +297,7 @@ class FilterActionViewSet(ModelViewSet): #### Request body >>> { - ... "user_dm": "message", + ... "dm_content": "message", ... "infraction_type": "Warn", ... "infraction_reason": "", ... "infraction_duration": "01 12:34:56.123456" @@ -313,7 +313,7 @@ class FilterActionViewSet(ModelViewSet): #### Response format >>> { ... "id": 1, - ... "user_dm": "message", + ... "dm_content": "message", ... "infraction_type": "Warn", ... "infraction_reason": "", ... "infraction_duration": "01 12:34:56.123456" -- cgit v1.2.3 From b082de6662e1b57f6831d219b44d95f93ed8a884 Mon Sep 17 00:00:00 2001 From: kosayoda Date: Fri, 23 Jul 2021 18:58:35 +0800 Subject: Correct Filter-FilterList relationship. Instead of a many-many relationship, one filterlist has multiple filters. Nested serialization is read-only by default, so not all CRUD methods are implemented yet for the FilterList viewset. --- .../apps/api/migrations/0070_new_filter_schema.py | 11 ++-- pydis_site/apps/api/models/bot/filters.py | 6 +- pydis_site/apps/api/serializers.py | 22 ++++---- pydis_site/apps/api/tests/test_filters.py | 29 +++++++++- pydis_site/apps/api/viewsets/bot/filters.py | 64 +++++++--------------- 5 files changed, 68 insertions(+), 64 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index 8580033a..237ce7d7 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -54,17 +54,14 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: list_type=1 if type_ == "ALLOW" else 0 ) - new_objects = [] for object_ in objects: new_object = filter_.objects.create( content=object_.content, + filter_list = list_, description=object_.comment or "", additional_field=None, override=None ) new_object.save() - new_objects.append(new_object) - - list_.filters.add(*new_objects) class Migration(migrations.Migration): @@ -143,7 +140,6 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), ('list_type', models.IntegerField(choices=[], help_text='Whenever this list is an allowlist or denylist')), ('default_settings', models.ForeignKey(help_text='Default parameters of this list.', on_delete=django.db.models.deletion.CASCADE, to='api.FilterSettings')), - ('filters', models.ManyToManyField(help_text='The content of this list.', to='api.Filter', default=[])), ], ), migrations.AddField( @@ -151,6 +147,11 @@ class Migration(migrations.Migration): name='override', field=models.ForeignKey(help_text='Override the default settings.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='api.FilterOverride'), ), + migrations.AddField( + model_name='filter', + name='filter_list', + field=models.ForeignKey(help_text='The filter list containing this filter.', on_delete=django.db.models.deletion.CASCADE, related_name='filters', to='api.FilterList'), + ), migrations.AddConstraint( model_name='filterlist', constraint=models.UniqueConstraint(fields=('name', 'list_type'), name='unique_name_type'), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index b5c80bda..99d6d5e4 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -48,8 +48,6 @@ class FilterList(models.Model): choices=FilterListType.choices, help_text="Whether this list is an allowlist or denylist" ) - - filters = models.ManyToManyField("Filter", help_text="The content of this list.", default=[]) default_settings = models.ForeignKey( "FilterSettings", models.CASCADE, @@ -152,6 +150,10 @@ class Filter(models.Model): content = models.CharField(max_length=100, help_text="The definition of this filter.") description = models.CharField(max_length=200, help_text="Why this filter has been added.") additional_field = models.BooleanField(null=True, help_text="Implementation specific field.") + filter_list = models.ForeignKey( + FilterList, models.CASCADE, related_name="filters", + help_text="The filter list containing this filter." + ) override = models.ForeignKey( "FilterOverride", models.SET_NULL, diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 584d1f22..afcf4d55 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -117,9 +117,21 @@ class DocumentationLinkSerializer(ModelSerializer): fields = ('package', 'base_url', 'inventory_url') +class FilterSerializer(ModelSerializer): + """A class providing (de-)serialization of `Filter` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = Filter + fields = ('id', 'content', 'description', 'additional_field', 'filter_list', 'override') + + class FilterListSerializer(ModelSerializer): """A class providing (de-)serialization of `FilterList` instances.""" + filters = FilterSerializer(many=True, read_only=True) + class Meta: """Metadata defined for the Django REST Framework.""" @@ -185,16 +197,6 @@ class FilterChannelRangeSerializer(ModelSerializer): ) -class FilterSerializer(ModelSerializer): - """A class providing (de-)serialization of `Filter` instances.""" - - class Meta: - """Metadata defined for the Django REST Framework.""" - - model = Filter - fields = ('id', 'content', 'description', 'additional_field', 'override') - - class FilterOverrideSerializer(ModelSerializer): """A class providing (de-)serialization of `FilterOverride` instances.""" diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 2df671e0..f694053d 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -32,7 +32,7 @@ FK_FIELDS: Dict[Type[Model], Tuple[str]] = { FilterSettings: ("default_action", "default_range"), FilterAction: (), ChannelRange: (), - Filter: (), + Filter: ("filter_list",), FilterOverride: ("filter_action", "filter_range") } @@ -122,7 +122,32 @@ def get_test_sequences() -> Dict[str, TestSequence]: "content": "bad word", "description": "This is a really bad word.", "additional_field": None, - "override": None + "override": None, + "filter_list": FilterList( + name="testname", + list_type=0, + default_settings=FilterSettings( + ping_type=[], + filter_dm=False, + dm_ping_type=[], + delete_messages=False, + bypass_roles=[], + enabled=False, + default_action=FilterAction( + dm_content=None, + infraction_type=None, + infraction_reason="", + infraction_duration=None + ), + default_range=ChannelRange( + disallowed_channels=[], + disallowed_categories=[], + allowed_channels=[], + allowed_categories=[], + default=False + ) + ) + ) } ), "filter_override": TestSequence( diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 9553fcac..1b893f8c 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -20,7 +20,7 @@ from pydis_site.apps.api.serializers import ( # noqa: I101 - Preserving the fil class FilterListViewSet(ModelViewSet): """ - View providing CRUD operations on lists of items allowed or denied by our bot. + View providing GET/DELETE on lists of items allowed or denied by our bot. ## Routes ### GET /bot/filter/filter_lists @@ -33,8 +33,14 @@ class FilterListViewSet(ModelViewSet): ... "name": "guild_invite", ... "list_type": 1, ... "filters": [ - ... 1, - ... 2, + ... { + ... "id": 1, + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "override": 1, + ... "filter_list": 1 + ... }, ... ... ... ], ... "default_settings": 1 @@ -55,8 +61,14 @@ class FilterListViewSet(ModelViewSet): ... "name": "guild_invite", ... "list_type": 1, ... "filters": [ - ... 1, - ... 2, + ... { + ... "id": 1, + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "override": 1, + ... "filter_list": 1 + ... }, ... ... ... ], ... "default_settings": 1 @@ -66,45 +78,6 @@ class FilterListViewSet(ModelViewSet): - 200: returned on success - 404: returned if the id was not found. - ### POST /bot/filter/filter_lists - Adds a single FilterList item to the database. - - #### Request body - >>> { - ... "name": "guild_invite", - ... "list_type": 1, - ... "filters": [ - ... 1, - ... 2, - ... ... - ... ], - ... "default_settings": 1 - ... } - - #### Status codes - - 201: returned on success - - 400: if one of the given fields is invalid - - ### PATCH /bot/filter/filter_lists/ - Updates a specific FilterList item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "name": "guild_invite", - ... "list_type": 1, - ... "filters": [ - ... 1, - ... 2, - ... ... - ... ], - ... "default_settings": 1 - ... } - - #### Status codes - - 200: returned on success - - 400: if one of the given fields is invalid - ### DELETE /bot/filter/filter_lists/ Deletes the FilterList item with the given `id`. @@ -437,7 +410,8 @@ class FilterViewSet(ModelViewSet): ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, - ... "override": 1 + ... "override": 1, + ... "filter_list": 1 ... }, ... ... ... ] -- cgit v1.2.3 From 05e2bce1e82e422755396d1e6e489d6792ec0115 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Thu, 10 Mar 2022 20:56:47 +0200 Subject: Remove role validation Roles can be either IDs or names, so the current validation is not relevant anymore. Furthermore the ping fields can accept user IDs or names. --- .../apps/api/migrations/0079_new_filter_schema.py | 12 ++++---- pydis_site/apps/api/models/bot/filters.py | 32 ---------------------- pydis_site/apps/api/tests/test_filters.py | 11 -------- 3 files changed, 6 insertions(+), 49 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0079_new_filter_schema.py b/pydis_site/apps/api/migrations/0079_new_filter_schema.py index b67740d2..053f9782 100644 --- a/pydis_site/apps/api/migrations/0079_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0079_new_filter_schema.py @@ -97,11 +97,11 @@ class Migration(migrations.Migration): ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), ('additional_field', django.contrib.postgres.fields.jsonb.JSONField(help_text='Implementation specific field.', null=True)), - ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), - ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, null=True)), ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), - ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field], null=True)), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, null=True)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), @@ -120,11 +120,11 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), ('list_type', models.IntegerField(choices=[(1, 'Allow'), (0, 'Deny')], help_text='Whether this list is an allowlist or denylist')), - ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.')), - ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None)), ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), - ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field])), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 4dbf1875..13b332d2 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -16,32 +16,6 @@ class FilterListType(models.IntegerChoices): DENY = 0 -# Valid special values in ping related fields -VALID_PINGS = ("everyone", "here", "moderators", "onduty", "admins") -VALID_BYPASS_ROLES = ("staff",) - - -def validate_ping_field(value_list: List[str]) -> None: - """Validate that the values are either a special value or a UID.""" - for value in value_list: - # Check if it is a special value - if value in VALID_PINGS: - continue - # Check if it is a UID - if value.isnumeric(): - continue - - raise ValidationError(f"{value!r} isn't a valid ping type.") - - -def validate_bypass_roles_field(value_list: List[str]) -> None: - """Validate that the vclues are either a special value or a Role ID.""" - for value in value_list: - if value.isnumeric() or value in VALID_BYPASS_ROLES: - continue - raise ValidationError(f"{value!r} isn't a valid (bypass) role.") - - class FilterSettingsMixin(models.Model): """Mixin for common settings of a filters and filter lists.""" @@ -86,14 +60,12 @@ class FilterList(FilterSettingsMixin): ) guild_pings = ArrayField( models.CharField(max_length=20), - validators=(validate_ping_field,), help_text="Who to ping when this filter triggers.", null=False ) filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=False) dm_pings = ArrayField( models.CharField(max_length=20), - validators=(validate_ping_field,), help_text="Who to ping when this filter triggers on a DM.", null=False ) @@ -104,7 +76,6 @@ class FilterList(FilterSettingsMixin): bypass_roles = ArrayField( models.CharField(max_length=100), help_text="Roles and users who can bypass this filter.", - validators=(validate_bypass_roles_field,), null=False ) enabled = models.BooleanField( @@ -149,14 +120,12 @@ class Filter(FilterSettingsMixin): ) guild_pings = ArrayField( models.CharField(max_length=20), - validators=(validate_ping_field,), help_text="Who to ping when this filter triggers.", null=True ) filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=True) dm_pings = ArrayField( models.CharField(max_length=20), - validators=(validate_ping_field,), help_text="Who to ping when this filter triggers on a DM.", null=True ) @@ -167,7 +136,6 @@ class Filter(FilterSettingsMixin): bypass_roles = ArrayField( models.CharField(max_length=100), help_text="Roles and users who can bypass this filter.", - validators=(validate_bypass_roles_field,), null=True ) enabled = models.BooleanField( diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index f694053d..5f40c6f9 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -296,14 +296,3 @@ class GenericFilterTest(APISubdomainTestCase): response = self.client.delete(f"{sequence.url()}/42") self.assertEqual(response.status_code, 404) - - def test_reject_invalid_ping(self) -> None: - url = reverse('bot:filteroverride-list', host='api') - data = { - "ping_type": ["invalid"] - } - - response = self.client.post(url, data=data) - - self.assertEqual(response.status_code, 400) - self.assertDictEqual(response.json(), {'ping_type': ["'invalid' isn't a valid ping type."]}) -- cgit v1.2.3 From cb52e60a631041a08edb213f41cca8befba4bf7e Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Wed, 16 Mar 2022 23:17:46 +0000 Subject: Add tests for custom BumpedThread impl --- pydis_site/apps/api/tests/test_bumped_threads.py | 63 ++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 pydis_site/apps/api/tests/test_bumped_threads.py (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_bumped_threads.py b/pydis_site/apps/api/tests/test_bumped_threads.py new file mode 100644 index 00000000..316e3f0b --- /dev/null +++ b/pydis_site/apps/api/tests/test_bumped_threads.py @@ -0,0 +1,63 @@ +from django.urls import reverse + +from .base import AuthenticatedAPITestCase +from ..models import BumpedThread + + +class UnauthedBumpedThreadAPITests(AuthenticatedAPITestCase): + def setUp(self): + super().setUp() + self.client.force_authenticate(user=None) + + def test_detail_lookup_returns_401(self): + url = reverse('api:bot:bumpedthread-detail', args=(1,)) + response = self.client.get(url) + + self.assertEqual(response.status_code, 401) + + def test_list_returns_401(self): + url = reverse('api:bot:bumpedthread-list') + response = self.client.get(url) + + self.assertEqual(response.status_code, 401) + + def test_create_returns_401(self): + url = reverse('api:bot:bumpedthread-list') + response = self.client.post(url, {"thread_id": 3}) + + self.assertEqual(response.status_code, 401) + + def test_delete_returns_401(self): + url = reverse('api:bot:bumpedthread-detail', args=(1,)) + response = self.client.delete(url) + + self.assertEqual(response.status_code, 401) + + +class BumpedThreadAPITests(AuthenticatedAPITestCase): + @classmethod + def setUpTestData(cls): + cls.thread1 = BumpedThread.objects.create( + thread_id=1234, + ) + + def test_returns_bumped_threads_as_flat_list(self): + url = reverse('api:bot:bumpedthread-list') + + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), [1234]) + + def test_returns_204_for_existing_data(self): + url = reverse('api:bot:bumpedthread-detail', args=(1234,)) + + response = self.client.get(url) + self.assertEqual(response.status_code, 204) + self.assertEqual(response.content, b"") + + def test_returns_404_for_non_existing_data(self): + url = reverse('api:bot:bumpedthread-detail', args=(42,)) + + response = self.client.get(url) + self.assertEqual(response.status_code, 404) + self.assertEqual(response.json(), {"detail": "Not found."}) -- cgit v1.2.3 From d4f717ec186ffeedf7bdeb4991868160f1540b83 Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Thu, 30 Jun 2022 10:48:33 +0100 Subject: Remove embed validators for deleted messages These caused more harm than they were worth, as every time Discord updated a behaviour of an embed we would get errors and need ot update the validation. Instead we should just accept whatever discord gives us as correct --- .../api/migrations/0083_remove_embed_validation.py | 19 ++ pydis_site/apps/api/models/bot/message.py | 5 +- pydis_site/apps/api/models/utils.py | 172 ---------------- pydis_site/apps/api/tests/test_validators.py | 229 --------------------- 4 files changed, 20 insertions(+), 405 deletions(-) create mode 100644 pydis_site/apps/api/migrations/0083_remove_embed_validation.py delete mode 100644 pydis_site/apps/api/models/utils.py (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0083_remove_embed_validation.py b/pydis_site/apps/api/migrations/0083_remove_embed_validation.py new file mode 100644 index 00000000..e835bb66 --- /dev/null +++ b/pydis_site/apps/api/migrations/0083_remove_embed_validation.py @@ -0,0 +1,19 @@ +# Generated by Django 3.1.14 on 2022-06-30 09:41 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0082_otn_allow_big_solidus'), + ] + + operations = [ + migrations.AlterField( + model_name='deletedmessage', + name='embeds', + field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(), blank=True, help_text='Embeds attached to this message.', size=None), + ), + ] diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index bab3368d..bfa54721 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -7,7 +7,6 @@ from django.utils import timezone from pydis_site.apps.api.models.bot.user import User from pydis_site.apps.api.models.mixins import ModelReprMixin -from pydis_site.apps.api.models.utils import validate_embed class Message(ModelReprMixin, models.Model): @@ -48,9 +47,7 @@ class Message(ModelReprMixin, models.Model): blank=True ) embeds = pgfields.ArrayField( - models.JSONField( - validators=(validate_embed,) - ), + models.JSONField(), blank=True, help_text="Embeds attached to this message." ) diff --git a/pydis_site/apps/api/models/utils.py b/pydis_site/apps/api/models/utils.py deleted file mode 100644 index 859394d2..00000000 --- a/pydis_site/apps/api/models/utils.py +++ /dev/null @@ -1,172 +0,0 @@ -from collections.abc import Mapping -from typing import Any, Dict - -from django.core.exceptions import ValidationError -from django.core.validators import MaxLengthValidator, MinLengthValidator - - -def is_bool_validator(value: Any) -> None: - """Validates if a given value is of type bool.""" - if not isinstance(value, bool): - raise ValidationError(f"This field must be of type bool, not {type(value)}.") - - -def validate_embed_fields(fields: dict) -> None: - """Raises a ValidationError if any of the given embed fields is invalid.""" - field_validators = { - 'name': (MaxLengthValidator(limit_value=256),), - 'value': (MaxLengthValidator(limit_value=1024),), - 'inline': (is_bool_validator,), - } - - required_fields = ('name', 'value') - - for field in fields: - if not isinstance(field, Mapping): - raise ValidationError("Embed fields must be a mapping.") - - if not all(required_field in field for required_field in required_fields): - raise ValidationError( - f"Embed fields must contain the following fields: {', '.join(required_fields)}." - ) - - for field_name, value in field.items(): - if field_name not in field_validators: - raise ValidationError(f"Unknown embed field field: {field_name!r}.") - - for validator in field_validators[field_name]: - validator(value) - - -def validate_embed_footer(footer: Dict[str, str]) -> None: - """Raises a ValidationError if the given footer is invalid.""" - field_validators = { - 'text': ( - MinLengthValidator( - limit_value=1, - message="Footer text must not be empty." - ), - MaxLengthValidator(limit_value=2048) - ), - 'icon_url': (), - 'proxy_icon_url': () - } - - if not isinstance(footer, Mapping): - raise ValidationError("Embed footer must be a mapping.") - - for field_name, value in footer.items(): - if field_name not in field_validators: - raise ValidationError(f"Unknown embed footer field: {field_name!r}.") - - for validator in field_validators[field_name]: - validator(value) - - -def validate_embed_author(author: Any) -> None: - """Raises a ValidationError if the given author is invalid.""" - field_validators = { - 'name': ( - MinLengthValidator( - limit_value=1, - message="Embed author name must not be empty." - ), - MaxLengthValidator(limit_value=256) - ), - 'url': (), - 'icon_url': (), - 'proxy_icon_url': () - } - - if not isinstance(author, Mapping): - raise ValidationError("Embed author must be a mapping.") - - for field_name, value in author.items(): - if field_name not in field_validators: - raise ValidationError(f"Unknown embed author field: {field_name!r}.") - - for validator in field_validators[field_name]: - validator(value) - - -def validate_embed(embed: Any) -> None: - """ - Validate a JSON document containing an embed as possible to send on Discord. - - This attempts to rebuild the validation used by Discord - as well as possible by checking for various embed limits so we can - ensure that any embed we store here will also be accepted as a - valid embed by the Discord API. - - Using this directly is possible, although not intended - you usually - stick this onto the `validators` keyword argument of model fields. - - Example: - - >>> from django.db import models - >>> from pydis_site.apps.api.models.utils import validate_embed - >>> class MyMessage(models.Model): - ... embed = models.JSONField( - ... validators=( - ... validate_embed, - ... ) - ... ) - ... # ... - ... - - Args: - embed (Any): - A dictionary describing the contents of this embed. - See the official documentation for a full reference - of accepted keys by this dictionary: - https://discordapp.com/developers/docs/resources/channel#embed-object - - Raises: - ValidationError: - In case the given embed is deemed invalid, a `ValidationError` - is raised which in turn will allow Django to display errors - as appropriate. - """ - all_keys = { - 'title', 'type', 'description', 'url', 'timestamp', - 'color', 'footer', 'image', 'thumbnail', 'video', - 'provider', 'author', 'fields' - } - one_required_of = {'description', 'fields', 'image', 'title', 'video'} - field_validators = { - 'title': ( - MinLengthValidator( - limit_value=1, - message="Embed title must not be empty." - ), - MaxLengthValidator(limit_value=256) - ), - 'description': (MaxLengthValidator(limit_value=4096),), - 'fields': ( - MaxLengthValidator(limit_value=25), - validate_embed_fields - ), - 'footer': (validate_embed_footer,), - 'author': (validate_embed_author,) - } - - if not embed: - raise ValidationError("Tag embed must not be empty.") - - elif not isinstance(embed, Mapping): - raise ValidationError("Tag embed must be a mapping.") - - elif not any(field in embed for field in one_required_of): - raise ValidationError(f"Tag embed must contain one of the fields {one_required_of}.") - - for required_key in one_required_of: - if required_key in embed and not embed[required_key]: - raise ValidationError(f"Key {required_key!r} must not be empty.") - - for field_name, value in embed.items(): - if field_name not in all_keys: - raise ValidationError(f"Unknown field name: {field_name!r}") - - if field_name in field_validators: - for validator in field_validators[field_name]: - validator(value) diff --git a/pydis_site/apps/api/tests/test_validators.py b/pydis_site/apps/api/tests/test_validators.py index 551cc2aa..8c46fcbc 100644 --- a/pydis_site/apps/api/tests/test_validators.py +++ b/pydis_site/apps/api/tests/test_validators.py @@ -5,7 +5,6 @@ from django.test import TestCase from ..models.bot.bot_setting import validate_bot_setting_name from ..models.bot.offensive_message import future_date_validator -from ..models.utils import validate_embed REQUIRED_KEYS = ( @@ -22,234 +21,6 @@ class BotSettingValidatorTests(TestCase): validate_bot_setting_name('bad name') -class TagEmbedValidatorTests(TestCase): - def test_rejects_non_mapping(self): - with self.assertRaises(ValidationError): - validate_embed('non-empty non-mapping') - - def test_rejects_missing_required_keys(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'unknown': "key" - }) - - def test_rejects_one_correct_one_incorrect(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'provider': "??", - 'title': "" - }) - - def test_rejects_empty_required_key(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': '' - }) - - def test_rejects_list_as_embed(self): - with self.assertRaises(ValidationError): - validate_embed([]) - - def test_rejects_required_keys_and_unknown_keys(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "the duck walked up to the lemonade stand", - 'and': "he said to the man running the stand" - }) - - def test_rejects_too_long_title(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': 'a' * 257 - }) - - def test_rejects_too_many_fields(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [{} for _ in range(26)] - }) - - def test_rejects_too_long_description(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'description': 'd' * 4097 - }) - - def test_allows_valid_embed(self): - validate_embed({ - 'title': "My embed", - 'description': "look at my embed, my embed is amazing" - }) - - def test_allows_unvalidated_fields(self): - validate_embed({ - 'title': "My embed", - 'provider': "what am I??" - }) - - def test_rejects_fields_as_list_of_non_mappings(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': ['abc'] - }) - - def test_rejects_fields_with_unknown_fields(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'what': "is this field" - } - ] - }) - - def test_rejects_fields_with_too_long_name(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "a" * 257 - } - ] - }) - - def test_rejects_one_correct_one_incorrect_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "Totally valid", - 'value': "LOOK AT ME" - }, - { - 'name': "Totally valid", - 'value': "LOOK AT ME", - 'oh': "what is this key?" - } - ] - }) - - def test_rejects_missing_required_field_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "Totally valid", - 'inline': True, - } - ] - }) - - def test_rejects_invalid_inline_field_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "Totally valid", - 'value': "LOOK AT ME", - 'inline': "Totally not a boolean", - } - ] - }) - - def test_allows_valid_fields(self): - validate_embed({ - 'fields': [ - { - 'name': "valid", - 'value': "field", - }, - { - 'name': "valid", - 'value': "field", - 'inline': False, - }, - { - 'name': "valid", - 'value': "field", - 'inline': True, - }, - ] - }) - - def test_rejects_footer_as_non_mapping(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'footer': [] - }) - - def test_rejects_footer_with_unknown_fields(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'footer': { - 'duck': "quack" - } - }) - - def test_rejects_footer_with_empty_text(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'footer': { - 'text': "" - } - }) - - def test_allows_footer_with_proper_values(self): - validate_embed({ - 'title': "whatever", - 'footer': { - 'text': "django good" - } - }) - - def test_rejects_author_as_non_mapping(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': [] - }) - - def test_rejects_author_with_unknown_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': { - 'field': "that is unknown" - } - }) - - def test_rejects_author_with_empty_name(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': { - 'name': "" - } - }) - - def test_rejects_author_with_one_correct_one_incorrect(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': { - # Relies on "dictionary insertion order remembering" (D.I.O.R.) behaviour - 'url': "bobswebsite.com", - 'name': "" - } - }) - - def test_allows_author_with_proper_values(self): - validate_embed({ - 'title': "whatever", - 'author': { - 'name': "Bob" - } - }) - - class OffensiveMessageValidatorsTests(TestCase): def test_accepts_future_date(self): future_date_validator(datetime(3000, 1, 1, tzinfo=timezone.utc)) -- cgit v1.2.3 From 7b40cd8143fea0beb195c6940bf2356970fc6958 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Mon, 11 Jul 2022 04:27:16 +0400 Subject: Drop Migration Tests The migration test suite was not really used, and it doesn't entirely make sense to test a constant unchanging process either. Its behavior is also very coupled with django's internals, and locks us into the current version and setup. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/migrations/__init__.py | 1 - pydis_site/apps/api/tests/migrations/base.py | 102 ----- .../migrations/test_active_infraction_migration.py | 496 --------------------- pydis_site/apps/api/tests/migrations/test_base.py | 135 ------ 4 files changed, 734 deletions(-) delete mode 100644 pydis_site/apps/api/tests/migrations/__init__.py delete mode 100644 pydis_site/apps/api/tests/migrations/base.py delete mode 100644 pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py delete mode 100644 pydis_site/apps/api/tests/migrations/test_base.py (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/migrations/__init__.py b/pydis_site/apps/api/tests/migrations/__init__.py deleted file mode 100644 index 38e42ffc..00000000 --- a/pydis_site/apps/api/tests/migrations/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""This submodule contains tests for functions used in data migrations.""" diff --git a/pydis_site/apps/api/tests/migrations/base.py b/pydis_site/apps/api/tests/migrations/base.py deleted file mode 100644 index 0c0a5bd0..00000000 --- a/pydis_site/apps/api/tests/migrations/base.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Includes utilities for testing migrations.""" -from django.db import connection -from django.db.migrations.executor import MigrationExecutor -from django.test import TestCase - - -class MigrationsTestCase(TestCase): - """ - A `TestCase` subclass to test migration files. - - To be able to properly test a migration, we will need to inject data into the test database - before the migrations we want to test are applied, but after the older migrations have been - applied. This makes sure that we are testing "as if" we were actually applying this migration - to a database in the state it was in before introducing the new migration. - - To set up a MigrationsTestCase, create a subclass of this class and set the following - class-level attributes: - - - app: The name of the app that contains the migrations (e.g., `'api'`) - - migration_prior: The name* of the last migration file before the migrations you want to test - - migration_target: The name* of the last migration file we want to test - - *) Specify the file names without a path or the `.py` file extension. - - Additionally, overwrite the `setUpMigrationData` in the subclass to inject data into the - database before the migrations we want to test are applied. Please read the docstring of the - method for more information. An optional hook, `setUpPostMigrationData` is also provided. - """ - - # These class-level attributes should be set in classes that inherit from this base class. - app = None - migration_prior = None - migration_target = None - - @classmethod - def setUpTestData(cls): - """ - Injects data into the test database prior to the migration we're trying to test. - - This class methods reverts the test database back to the state of the last migration file - prior to the migrations we want to test. It will then allow the user to inject data into the - test database by calling the `setUpMigrationData` hook. After the data has been injected, it - will apply the migrations we want to test and call the `setUpPostMigrationData` hook. The - user can now test if the migration correctly migrated the injected test data. - """ - if not cls.app: - raise ValueError("The `app` attribute was not set.") - - if not cls.migration_prior or not cls.migration_target: - raise ValueError("Both ` migration_prior` and `migration_target` need to be set.") - - cls.migrate_from = [(cls.app, cls.migration_prior)] - cls.migrate_to = [(cls.app, cls.migration_target)] - - # Reverse to database state prior to the migrations we want to test - executor = MigrationExecutor(connection) - executor.migrate(cls.migrate_from) - - # Call the data injection hook with the current state of the project - old_apps = executor.loader.project_state(cls.migrate_from).apps - cls.setUpMigrationData(old_apps) - - # Run the migrations we want to test - executor = MigrationExecutor(connection) - executor.loader.build_graph() - executor.migrate(cls.migrate_to) - - # Save the project state so we're able to work with the correct model states - cls.apps = executor.loader.project_state(cls.migrate_to).apps - - # Call `setUpPostMigrationData` to potentially set up post migration data used in testing - cls.setUpPostMigrationData(cls.apps) - - @classmethod - def setUpMigrationData(cls, apps): - """ - Override this method to inject data into the test database before the migration is applied. - - This method will be called after setting up the database according to the migrations that - come before the migration(s) we are trying to test, but before the to-be-tested migration(s) - are applied. This allows us to simulate a database state just prior to the migrations we are - trying to test. - - To make sure we're creating objects according to the state the models were in at this point - in the migration history, use `apps.get_model(app_name: str, model_name: str)` to get the - appropriate model, e.g.: - - >>> Infraction = apps.get_model('api', 'Infraction') - """ - pass - - @classmethod - def setUpPostMigrationData(cls, apps): - """ - Set up additional test data after the target migration has been applied. - - Use `apps.get_model(app_name: str, model_name: str)` to get the correct instances of the - model classes: - - >>> Infraction = apps.get_model('api', 'Infraction') - """ - pass diff --git a/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py b/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py deleted file mode 100644 index 8dc29b34..00000000 --- a/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py +++ /dev/null @@ -1,496 +0,0 @@ -"""Tests for the data migration in `filename`.""" -import logging -from collections import ChainMap, namedtuple -from datetime import timedelta -from itertools import count -from typing import Dict, Iterable, Type, Union - -from django.db.models import Q -from django.forms.models import model_to_dict -from django.utils import timezone - -from pydis_site.apps.api.models import Infraction, User -from .base import MigrationsTestCase - -log = logging.getLogger(__name__) -log.setLevel(logging.DEBUG) - - -InfractionHistory = namedtuple('InfractionHistory', ("user_id", "infraction_history")) - - -class InfractionFactory: - """Factory that creates infractions for a User instance.""" - - infraction_id = count(1) - user_id = count(1) - default_values = { - 'active': True, - 'expires_at': None, - 'hidden': False, - } - - @classmethod - def create( - cls, - actor: User, - infractions: Iterable[Dict[str, Union[str, int, bool]]], - infraction_model: Type[Infraction] = Infraction, - user_model: Type[User] = User, - ) -> InfractionHistory: - """ - Creates `infractions` for the `user` with the given `actor`. - - The `infractions` dictionary can contain the following fields: - - `type` (required) - - `active` (default: True) - - `expires_at` (default: None; i.e, permanent) - - `hidden` (default: False). - - The parameters `infraction_model` and `user_model` can be used to pass in an instance of - both model classes from a different migration/project state. - """ - user_id = next(cls.user_id) - user = user_model.objects.create( - id=user_id, - name=f"Infracted user {user_id}", - discriminator=user_id, - avatar_hash=None, - ) - infraction_history = [] - - for infraction in infractions: - infraction = dict(infraction) - infraction["id"] = next(cls.infraction_id) - infraction = ChainMap(infraction, cls.default_values) - new_infraction = infraction_model.objects.create( - user=user, - actor=actor, - type=infraction["type"], - reason=f"`{infraction['type']}` infraction (ID: {infraction['id']} of {user}", - active=infraction['active'], - hidden=infraction['hidden'], - expires_at=infraction['expires_at'], - ) - infraction_history.append(new_infraction) - - return InfractionHistory(user_id=user_id, infraction_history=infraction_history) - - -class InfractionFactoryTests(MigrationsTestCase): - """Tests for the InfractionFactory.""" - - app = "api" - migration_prior = "0046_reminder_jump_url" - migration_target = "0046_reminder_jump_url" - - @classmethod - def setUpPostMigrationData(cls, apps): - """Create a default actor for all infractions.""" - cls.infraction_model = apps.get_model('api', 'Infraction') - cls.user_model = apps.get_model('api', 'User') - - cls.actor = cls.user_model.objects.create( - id=9999, - name="Unknown Moderator", - discriminator=1040, - avatar_hash=None, - ) - - def test_infraction_factory_total_count(self): - """Does the test database hold as many infractions as we tried to create?""" - InfractionFactory.create( - actor=self.actor, - infractions=( - {'type': 'kick', 'active': False, 'hidden': False}, - {'type': 'ban', 'active': True, 'hidden': False}, - {'type': 'note', 'active': False, 'hidden': True}, - ), - infraction_model=self.infraction_model, - user_model=self.user_model, - ) - database_count = Infraction.objects.all().count() - self.assertEqual(3, database_count) - - def test_infraction_factory_multiple_users(self): - """Does the test database hold as many infractions as we tried to create?""" - for _user in range(5): - InfractionFactory.create( - actor=self.actor, - infractions=( - {'type': 'kick', 'active': False, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': False}, - ), - infraction_model=self.infraction_model, - user_model=self.user_model, - ) - - # Check if infractions and users are recorded properly in the database - database_count = Infraction.objects.all().count() - self.assertEqual(database_count, 10) - - user_count = User.objects.all().count() - self.assertEqual(user_count, 5 + 1) - - def test_infraction_factory_sets_correct_fields(self): - """Does the InfractionFactory set the correct attributes?""" - infractions = ( - { - 'type': 'note', - 'active': False, - 'hidden': True, - 'expires_at': timezone.now() - }, - {'type': 'warning', 'active': False, 'hidden': False, 'expires_at': None}, - {'type': 'watch', 'active': False, 'hidden': True, 'expires_at': None}, - {'type': 'mute', 'active': True, 'hidden': False, 'expires_at': None}, - {'type': 'kick', 'active': True, 'hidden': True, 'expires_at': None}, - {'type': 'ban', 'active': True, 'hidden': False, 'expires_at': None}, - { - 'type': 'superstar', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() - }, - ) - - InfractionFactory.create( - actor=self.actor, - infractions=infractions, - infraction_model=self.infraction_model, - user_model=self.user_model, - ) - - for infraction in infractions: - with self.subTest(**infraction): - self.assertTrue(Infraction.objects.filter(**infraction).exists()) - - -class ActiveInfractionMigrationTests(MigrationsTestCase): - """ - Tests the active infraction data migration. - - The active infraction data migration should do the following things: - - 1. migrates all active notes, warnings, and kicks to an inactive status; - 2. migrates all users with multiple active infractions of a single type to have only one active - infraction of that type. The infraction with the longest duration stays active. - """ - - app = "api" - migration_prior = "0046_reminder_jump_url" - migration_target = "0047_active_infractions_migration" - - @classmethod - def setUpMigrationData(cls, apps): - """Sets up an initial database state that contains the relevant test cases.""" - # Fetch the Infraction and User model in the current migration state - cls.infraction_model = apps.get_model('api', 'Infraction') - cls.user_model = apps.get_model('api', 'User') - - cls.created_infractions = {} - - # Moderator that serves as actor for all infractions - cls.user_moderator = cls.user_model.objects.create( - id=9999, - name="Olivier de Vienne", - discriminator=1040, - avatar_hash=None, - ) - - # User #1: clean user with no infractions - cls.created_infractions["no infractions"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=[], - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #2: One inactive note infraction - cls.created_infractions["one inactive note"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': False, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #3: One active note infraction - cls.created_infractions["one active note"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #4: One active and one inactive note infraction - cls.created_infractions["one active and one inactive note"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': False, 'hidden': True}, - {'type': 'note', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #5: Once active note, one active kick, once active warning - cls.created_infractions["active note, kick, warning"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': True, 'hidden': True}, - {'type': 'kick', 'active': True, 'hidden': True}, - {'type': 'warning', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #6: One inactive ban and one active ban - cls.created_infractions["one inactive and one active ban"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': False, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #7: Two active permanent bans - cls.created_infractions["two active perm bans"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #8: Multiple active temporary bans - cls.created_infractions["multiple active temp bans"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=1) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=10) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=20) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=5) - }, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #9: One active permanent ban, two active temporary bans - cls.created_infractions["active perm, two active temp bans"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=10) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': None, - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=7) - }, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #10: One inactive permanent ban, two active temporary bans - cls.created_infractions["one inactive perm ban, two active temp bans"] = ( - InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=10) - }, - { - 'type': 'ban', - 'active': False, - 'hidden': True, - 'expires_at': None, - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=7) - }, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - ) - - # User #11: Active ban, active mute, active superstar - cls.created_infractions["active ban, mute, and superstar"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #12: Multiple active bans, active mutes, active superstars - cls.created_infractions["multiple active bans, mutes, stars"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - def test_all_never_active_types_became_inactive(self): - """Are all infractions of a non-active type inactive after the migration?""" - inactive_type_query = Q(type="note") | Q(type="warning") | Q(type="kick") - self.assertFalse( - self.infraction_model.objects.filter(inactive_type_query, active=True).exists() - ) - - def test_migration_left_clean_user_without_infractions(self): - """Do users without infractions have no infractions after the migration?""" - user_id, infraction_history = self.created_infractions["no infractions"] - self.assertFalse( - self.infraction_model.objects.filter(user__id=user_id).exists() - ) - - def test_migration_left_user_with_inactive_note_untouched(self): - """Did the migration leave users with only an inactive note untouched?""" - user_id, infraction_history = self.created_infractions["one inactive note"] - inactive_note = infraction_history[0] - self.assertTrue( - self.infraction_model.objects.filter(**model_to_dict(inactive_note)).exists() - ) - - def test_migration_only_touched_active_field_of_active_note(self): - """Does the migration only change the `active` field?""" - user_id, infraction_history = self.created_infractions["one active note"] - note = model_to_dict(infraction_history[0]) - note['active'] = False - self.assertTrue( - self.infraction_model.objects.filter(**note).exists() - ) - - def test_migration_only_touched_active_field_of_active_note_left_inactive_untouched(self): - """Does the migration only change the `active` field of active notes?""" - user_id, infraction_history = self.created_infractions["one active and one inactive note"] - for note in infraction_history: - with self.subTest(active=note.active): - note = model_to_dict(note) - note['active'] = False - self.assertTrue( - self.infraction_model.objects.filter(**note).exists() - ) - - def test_migration_migrates_all_nonactive_types_to_inactive(self): - """Do we set the `active` field of all non-active infractions to `False`?""" - user_id, infraction_history = self.created_infractions["active note, kick, warning"] - self.assertFalse( - self.infraction_model.objects.filter(user__id=user_id, active=True).exists() - ) - - def test_migration_leaves_user_with_one_active_ban_untouched(self): - """Do we leave a user with one active and one inactive ban untouched?""" - user_id, infraction_history = self.created_infractions["one inactive and one active ban"] - for infraction in infraction_history: - with self.subTest(active=infraction.active): - self.assertTrue( - self.infraction_model.objects.filter(**model_to_dict(infraction)).exists() - ) - - def test_migration_turns_double_active_perm_ban_into_single_active_perm_ban(self): - """Does the migration turn two active permanent bans into one active permanent ban?""" - user_id, infraction_history = self.created_infractions["two active perm bans"] - active_count = self.infraction_model.objects.filter(user__id=user_id, active=True).count() - self.assertEqual(active_count, 1) - - def test_migration_leaves_temporary_ban_with_longest_duration_active(self): - """Does the migration turn two active permanent bans into one active permanent ban?""" - user_id, infraction_history = self.created_infractions["multiple active temp bans"] - active_ban = self.infraction_model.objects.get(user__id=user_id, active=True) - self.assertEqual(active_ban.expires_at, infraction_history[2].expires_at) - - def test_migration_leaves_permanent_ban_active(self): - """Does the migration leave the permanent ban active?""" - user_id, infraction_history = self.created_infractions["active perm, two active temp bans"] - active_ban = self.infraction_model.objects.get(user__id=user_id, active=True) - self.assertIsNone(active_ban.expires_at) - - def test_migration_leaves_longest_temp_ban_active_with_inactive_permanent_ban(self): - """Does the longest temp ban stay active, even with an inactive perm ban present?""" - user_id, infraction_history = self.created_infractions[ - "one inactive perm ban, two active temp bans" - ] - active_ban = self.infraction_model.objects.get(user__id=user_id, active=True) - self.assertEqual(active_ban.expires_at, infraction_history[0].expires_at) - - def test_migration_leaves_all_active_types_active_if_one_of_each_exists(self): - """Do all active infractions stay active if only one of each is present?""" - user_id, infraction_history = self.created_infractions["active ban, mute, and superstar"] - active_count = self.infraction_model.objects.filter(user__id=user_id, active=True).count() - self.assertEqual(active_count, 4) - - def test_migration_reduces_all_active_types_to_a_single_active_infraction(self): - """Do we reduce all of the infraction types to one active infraction?""" - user_id, infraction_history = self.created_infractions["multiple active bans, mutes, stars"] - active_infractions = self.infraction_model.objects.filter(user__id=user_id, active=True) - self.assertEqual(len(active_infractions), 4) - types_observed = [infraction.type for infraction in active_infractions] - - for infraction_type in ('ban', 'mute', 'superstar', 'watch'): - with self.subTest(type=infraction_type): - self.assertIn(infraction_type, types_observed) diff --git a/pydis_site/apps/api/tests/migrations/test_base.py b/pydis_site/apps/api/tests/migrations/test_base.py deleted file mode 100644 index f69bc92c..00000000 --- a/pydis_site/apps/api/tests/migrations/test_base.py +++ /dev/null @@ -1,135 +0,0 @@ -import logging -from unittest.mock import call, patch - -from django.db.migrations.loader import MigrationLoader -from django.test import TestCase - -from .base import MigrationsTestCase, connection - -log = logging.getLogger(__name__) - - -class SpanishInquisition(MigrationsTestCase): - app = "api" - migration_prior = "scragly" - migration_target = "kosa" - - -@patch("pydis_site.apps.api.tests.migrations.base.MigrationExecutor") -class MigrationsTestCaseNoSideEffectsTests(TestCase): - """Tests the MigrationTestCase class with actual migration side effects disabled.""" - - def setUp(self): - """Set up an instance of MigrationsTestCase for use in tests.""" - self.test_case = SpanishInquisition() - - def test_missing_app_class_raises_value_error(self, _migration_executor): - """A MigrationsTestCase subclass should set the class-attribute `app`.""" - class Spam(MigrationsTestCase): - pass - - spam = Spam() - with self.assertRaises(ValueError, msg="The `app` attribute was not set."): - spam.setUpTestData() - - def test_missing_migration_class_attributes_raise_value_error(self, _migration_executor): - """A MigrationsTestCase subclass should set both `migration_prior` and `migration_target`""" - class Eggs(MigrationsTestCase): - app = "api" - migration_target = "lemon" - - class Bacon(MigrationsTestCase): - app = "api" - migration_prior = "mark" - - instances = (Eggs(), Bacon()) - - exception_message = "Both ` migration_prior` and `migration_target` need to be set." - for instance in instances: - with self.subTest( - migration_prior=instance.migration_prior, - migration_target=instance.migration_target, - ): - with self.assertRaises(ValueError, msg=exception_message): - instance.setUpTestData() - - @patch(f"{__name__}.SpanishInquisition.setUpMigrationData") - @patch(f"{__name__}.SpanishInquisition.setUpPostMigrationData") - def test_migration_data_hooks_are_called_once(self, pre_hook, post_hook, _migration_executor): - """The `setUpMigrationData` and `setUpPostMigrationData` hooks should be called once.""" - self.test_case.setUpTestData() - for hook in (pre_hook, post_hook): - with self.subTest(hook=repr(hook)): - hook.assert_called_once() - - def test_migration_executor_is_instantiated_twice(self, migration_executor): - """The `MigrationExecutor` should be instantiated with the database connection twice.""" - self.test_case.setUpTestData() - - expected_args = [call(connection), call(connection)] - self.assertEqual(migration_executor.call_args_list, expected_args) - - def test_project_state_is_loaded_for_correct_migration_files_twice(self, migration_executor): - """The `project_state` should first be loaded with `migrate_from`, then `migrate_to`.""" - self.test_case.setUpTestData() - - expected_args = [call(self.test_case.migrate_from), call(self.test_case.migrate_to)] - self.assertEqual(migration_executor().loader.project_state.call_args_list, expected_args) - - def test_loader_build_graph_gets_called_once(self, migration_executor): - """We should rebuild the migration graph before applying the second set of migrations.""" - self.test_case.setUpTestData() - - migration_executor().loader.build_graph.assert_called_once() - - def test_migration_executor_migrate_method_is_called_correctly_twice(self, migration_executor): - """The migrate method of the executor should be called twice with the correct arguments.""" - self.test_case.setUpTestData() - - self.assertEqual(migration_executor().migrate.call_count, 2) - calls = [call([('api', 'scragly')]), call([('api', 'kosa')])] - migration_executor().migrate.assert_has_calls(calls) - - -class LifeOfBrian(MigrationsTestCase): - app = "api" - migration_prior = "0046_reminder_jump_url" - migration_target = "0048_add_infractions_unique_constraints_active" - - @classmethod - def log_last_migration(cls): - """Parses the applied migrations dictionary to log the last applied migration.""" - loader = MigrationLoader(connection) - api_migrations = [ - migration for app, migration in loader.applied_migrations if app == cls.app - ] - last_migration = max(api_migrations, key=lambda name: int(name[:4])) - log.info(f"The last applied migration: {last_migration}") - - @classmethod - def setUpMigrationData(cls, apps): - """Method that logs the last applied migration at this point.""" - cls.log_last_migration() - - @classmethod - def setUpPostMigrationData(cls, apps): - """Method that logs the last applied migration at this point.""" - cls.log_last_migration() - - -class MigrationsTestCaseMigrationTest(TestCase): - """Tests if `MigrationsTestCase` travels to the right points in the migration history.""" - - def test_migrations_test_case_travels_to_correct_migrations_in_history(self): - """The test case should first revert to `migration_prior`, then go to `migration_target`.""" - brian = LifeOfBrian() - - with self.assertLogs(log, level=logging.INFO) as logs: - brian.setUpTestData() - - self.assertEqual(len(logs.records), 2) - - for time_point, record in zip(("migration_prior", "migration_target"), logs.records): - with self.subTest(time_point=time_point): - message = f"The last applied migration: {getattr(brian, time_point)}" - self.assertEqual(record.getMessage(), message) -- cgit v1.2.3 From ee25921da752d51215598bcd3eb5fd5ab74a4a46 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Mon, 11 Jul 2022 05:00:40 +0400 Subject: Remove Message Model Test The message model was tested by instantiating and confirming it has a string representation, but instantiating abstract models is undefined behavior, and can break with future versions of django. The behavior of the test is redundant anyway, since an abstract model wouldn't exist in isolation, and the desired behavior is confirmed by inheritors. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_models.py | 12 ------------ 1 file changed, 12 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index 0fad467c..c07d59cd 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -7,7 +7,6 @@ from pydis_site.apps.api.models import ( DeletedMessage, DocumentationLink, Infraction, - Message, MessageDeletionContext, Nomination, NominationEntry, @@ -116,17 +115,6 @@ class StringDunderMethodTests(SimpleTestCase): colour=0x5, permissions=0, position=10, ), - Message( - id=45, - author=User( - id=444, - name='bill', - discriminator=5, - ), - channel_id=666, - content="wooey", - embeds=[] - ), MessageDeletionContext( actor=User( id=5555, -- cgit v1.2.3 From fe4def75dc0a316789cec068a574713a2b2af92f Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 12 Jul 2022 09:25:05 +0400 Subject: Add GitHub Artifact API View Adds an API route to fetch GitHub build artifacts through a GitHub app. Signed-off-by: Hassan Abouelela --- .gitignore | 3 + poetry.lock | 67 +++++- pydis_site/apps/api/github_utils.py | 183 ++++++++++++++++ pydis_site/apps/api/tests/test_github_utils.py | 287 +++++++++++++++++++++++++ pydis_site/apps/api/urls.py | 9 +- pydis_site/apps/api/views.py | 53 +++++ pydis_site/settings.py | 10 +- pyproject.toml | 1 + 8 files changed, 609 insertions(+), 4 deletions(-) create mode 100644 pydis_site/apps/api/github_utils.py create mode 100644 pydis_site/apps/api/tests/test_github_utils.py (limited to 'pydis_site/apps/api/tests') diff --git a/.gitignore b/.gitignore index 45073da5..4fc4417d 100644 --- a/.gitignore +++ b/.gitignore @@ -132,3 +132,6 @@ log.* # Mac/OSX .DS_Store + +# Private keys +*.pem diff --git a/poetry.lock b/poetry.lock index f6576fba..1bee4397 100644 --- a/poetry.lock +++ b/poetry.lock @@ -67,6 +67,17 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfgv" version = "3.3.1" @@ -121,6 +132,25 @@ requests = ">=1.0.0" [package.extras] yaml = ["PyYAML (>=3.10)"] +[[package]] +name = "cryptography" +version = "37.0.4" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools_rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] + [[package]] name = "distlib" version = "0.3.4" @@ -607,6 +637,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "pydocstyle" version = "6.1.1" @@ -637,6 +675,23 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "pyjwt" +version = "2.4.0" +description = "JSON Web Token implementation in Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cryptography = {version = ">=3.3.1", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.3.1)"] +dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] + [[package]] name = "python-dotenv" version = "0.17.1" @@ -876,7 +931,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "3.9.*" -content-hash = "e71d10c3d478c5d99e842f4c449a093caa1d4b2d255eb0dfb19843c5265d4aca" +content-hash = "c656c07f40d32ee7d30c19a7084b40e1e851209a362a3fe882aa03c2fd286454" [metadata.files] anyio = [ @@ -896,6 +951,7 @@ certifi = [ {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, ] +cffi = [] cfgv = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, @@ -963,6 +1019,7 @@ coveralls = [ {file = "coveralls-2.2.0-py2.py3-none-any.whl", hash = "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc"}, {file = "coveralls-2.2.0.tar.gz", hash = "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617"}, ] +cryptography = [] distlib = [ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, @@ -1157,6 +1214,10 @@ pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, @@ -1166,6 +1227,10 @@ pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] +pyjwt = [ + {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, + {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, +] python-dotenv = [ {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"}, {file = "python_dotenv-0.17.1-py2.py3-none-any.whl", hash = "sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544"}, diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py new file mode 100644 index 00000000..70dccdff --- /dev/null +++ b/pydis_site/apps/api/github_utils.py @@ -0,0 +1,183 @@ +"""Utilities for working with the GitHub API.""" + +import asyncio +import datetime +import math + +import httpx +import jwt +from asgiref.sync import async_to_sync + +from pydis_site import settings + +MAX_POLLS = 20 +"""The maximum number of attempts at fetching a workflow run.""" + + +class ArtifactProcessingError(Exception): + """Base exception for other errors related to processing a GitHub artifact.""" + + status: int + + +class UnauthorizedError(ArtifactProcessingError): + """The application does not have permission to access the requested repo.""" + + status = 401 + + +class NotFoundError(ArtifactProcessingError): + """The requested resource could not be found.""" + + status = 404 + + +class ActionFailedError(ArtifactProcessingError): + """The requested workflow did not conclude successfully.""" + + status = 400 + + +class RunTimeoutError(ArtifactProcessingError): + """The requested workflow run was not ready in time.""" + + status = 408 + + +def generate_token() -> str: + """ + Generate a JWT token to access the GitHub API. + + The token is valid for roughly 10 minutes after generation, before the API starts + returning 401s. + + Refer to: + https://docs.github.com/en/developers/apps/building-github-apps/authenticating-with-github-apps#authenticating-as-a-github-app + """ + now = datetime.datetime.now() + return jwt.encode( + { + "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at + "exp": math.floor((now + datetime.timedelta(minutes=9)).timestamp()), # Expires at + "iss": settings.GITHUB_OAUTH_APP_ID, + }, + settings.GITHUB_OAUTH_KEY, + algorithm="RS256" + ) + + +async def authorize(owner: str, repo: str) -> httpx.AsyncClient: + """ + Get an access token for the requested repository. + + The process is roughly: + - GET app/installations to get a list of all app installations + - POST to get a token to access the given app + - GET installation/repositories and check if the requested one is part of those + """ + client = httpx.AsyncClient( + base_url=settings.GITHUB_API, + headers={"Authorization": f"bearer {generate_token()}"}, + timeout=settings.TIMEOUT_PERIOD, + ) + + try: + # Get a list of app installations we have access to + apps = await client.get("app/installations") + apps.raise_for_status() + + for app in apps.json(): + # Look for an installation with the right owner + if app["account"]["login"] != owner: + continue + + # Get the repositories of the specified owner + app_token = await client.post(app["access_tokens_url"]) + app_token.raise_for_status() + client.headers["Authorization"] = f"bearer {app_token.json()['token']}" + + repos = await client.get("installation/repositories") + repos.raise_for_status() + + # Search for the request repository + for accessible_repo in repos.json()["repositories"]: + if accessible_repo["name"] == repo: + # We've found the correct repository, and it's accessible with the current auth + return client + + raise NotFoundError( + "Could not find the requested repository. Make sure the application can access it." + ) + + except BaseException as e: + # Close the client if we encountered an unexpected exception + await client.aclose() + raise e + + +async def wait_for_run(client: httpx.AsyncClient, run: dict) -> str: + """Wait for the provided `run` to finish, and return the URL to its artifacts.""" + polls = 0 + while polls <= MAX_POLLS: + if run["status"] != "completed": + # The action is still processing, wait a bit longer + polls += 1 + await asyncio.sleep(10) + + elif run["conclusion"] != "success": + # The action failed, or did not run + raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") + + else: + # The desired action was found, and it ended successfully + return run["artifacts_url"] + + run = await client.get(run["url"]) + run.raise_for_status() + run = run.json() + + raise RunTimeoutError("The requested workflow was not ready in time.") + + +@async_to_sync +async def get_artifact( + owner: str, repo: str, sha: str, action_name: str, artifact_name: str +) -> str: + """Get a download URL for a build artifact.""" + client = await authorize(owner, repo) + + try: + # Get the workflow runs for this repository + runs = await client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100}) + runs.raise_for_status() + runs = runs.json() + + # Filter the runs for the one associated with the given SHA + for run in runs["workflow_runs"]: + if run["name"] == action_name and sha == run["head_sha"]: + break + else: + raise NotFoundError( + "Could not find a run matching the provided settings in the previous hundred runs." + ) + + # Wait for the workflow to finish + url = await wait_for_run(client, run) + + # Filter the artifacts, and return the download URL + artifacts = await client.get(url) + artifacts.raise_for_status() + + for artifact in artifacts.json()["artifacts"]: + if artifact["name"] == artifact_name: + data = await client.get(artifact["archive_download_url"]) + if data.status_code == 302: + return str(data.next_request.url) + + # The following line is left untested since it should in theory be impossible + data.raise_for_status() # pragma: no cover + + raise NotFoundError("Could not find an artifact matching the provided name.") + + finally: + await client.aclose() diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py new file mode 100644 index 00000000..dc17d609 --- /dev/null +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -0,0 +1,287 @@ +import asyncio +import datetime +import random +import unittest +from unittest import mock + +import django.test +import httpx +import jwt +import rest_framework.response +import rest_framework.test +from django.urls import reverse + +from .. import github_utils + + +def patched_raise_for_status(response: httpx.Response): + """Fake implementation of raise_for_status which does not need a request to be set.""" + if response.status_code // 100 != 2: # pragma: no cover + raise httpx.HTTPStatusError( + f"Non 2xx response code: {response.status_code}", + request=getattr(response, "_request", httpx.Request("GET", "")), + response=response + ) + + +class GeneralUtilityTests(unittest.TestCase): + """Test the utility methods which do not fit in another class.""" + + def test_token_generation(self): + """Test that the a valid JWT token is generated.""" + def encode(payload: dict, _: str, algorithm: str, *args, **kwargs) -> str: + """ + Intercept the encode method. + + It is performed with an algorithm which does not require a PEM key, as it may + not be available in testing environments. + """ + self.assertEqual("RS256", algorithm, "The GitHub App JWT must be signed using RS256.") + return original_encode( + payload, "secret-encoding-key", algorithm="HS256", *args, **kwargs + ) + + original_encode = jwt.encode + with mock.patch("jwt.encode", new=encode): + token = github_utils.generate_token() + decoded = jwt.decode(token, "secret-encoding-key", algorithms=["HS256"]) + + delta = datetime.timedelta(minutes=10) + self.assertAlmostEqual(decoded["exp"] - decoded["iat"], delta.total_seconds()) + self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) + + +@mock.patch("httpx.AsyncClient", autospec=True) +@mock.patch("asyncio.sleep", new=mock.AsyncMock(return_value=asyncio.Future)) +@mock.patch("httpx.Response.raise_for_status", new=patched_raise_for_status) +class WaitForTests(unittest.IsolatedAsyncioTestCase): + """Tests the wait_for utility.""" + + async def test_wait_for_successful_run(self, client_mock: mock.Mock): + """Test that the wait_for method handles successfully runs.""" + final_url = "some_url" + str(random.randint(0, 10)) + + client_mock.get.side_effect = responses = [ + httpx.Response(200, json={"status": "queued", "url": ""}), + httpx.Response(200, json={"status": "pending", "url": ""}), + httpx.Response(200, json={ + "status": "completed", + "conclusion": "success", + "url": "", + "artifacts_url": final_url + }) + ] + + result = await github_utils.wait_for_run(client_mock, responses[0].json()) + self.assertEqual(final_url, result) + + async def test_wait_for_failed_run(self, client_mock: mock.Mock): + """Test that the wait_for method handles failed runs.""" + client_mock.get.return_value = httpx.Response(200, json={ + "status": "completed", + "conclusion": "failed", + }) + + with self.assertRaises(github_utils.ActionFailedError): + await github_utils.wait_for_run(client_mock, {"status": "pending", "url": ""}) + + async def test_wait_for_timeout(self, client_mock: mock.Mock): + """Test that the wait_for method quits after a few attempts.""" + client_mock.get.side_effect = responses = [ + httpx.Response(200, json={"status": "pending", "url": ""}) + ] * (github_utils.MAX_POLLS + 5) + + with self.assertRaises(github_utils.RunTimeoutError): + await github_utils.wait_for_run(client_mock, responses[0].json()) + + +async def get_response_authorize( + _: httpx.AsyncClient, request: httpx.Request, **__ +) -> httpx.Response: + """ + Helper method for the authorize tests. + + Requests are intercepted before being sent out, and the appropriate responses are returned. + """ + path = request.url.path + auth = request.headers.get("Authorization") + + if request.method == "GET": + if path == "/app/installations": + if auth == "bearer JWT initial token": + return httpx.Response(200, request=request, json=[{ + "account": {"login": "VALID_OWNER"}, + "access_tokens_url": "https://example.com/ACCESS_TOKEN_URL" + }]) + else: + return httpx.Response( + 401, json={"error": "auth app/installations"}, request=request + ) + + elif path == "/installation/repositories": + if auth == "bearer app access token": + return httpx.Response(200, request=request, json={ + "repositories": [{ + "name": "VALID_REPO" + }] + }) + else: # pragma: no cover + return httpx.Response( + 401, json={"error": "auth installation/repositories"}, request=request + ) + + elif request.method == "POST": + if path == "/ACCESS_TOKEN_URL": + if auth == "bearer JWT initial token": + return httpx.Response(200, request=request, json={"token": "app access token"}) + else: # pragma: no cover + return httpx.Response(401, json={"error": "auth access_token"}, request=request) + + # Reaching this point means something has gone wrong + return httpx.Response(500, request=request) # pragma: no cover + + +@mock.patch("httpx.AsyncClient.send", new=get_response_authorize) +@mock.patch.object(github_utils, "generate_token", new=mock.Mock(return_value="JWT initial token")) +class AuthorizeTests(unittest.IsolatedAsyncioTestCase): + """Test the authorize utility.""" + + async def test_invalid_apps_auth(self): + """Test that an exception is raised if authorization was attempted with an invalid token.""" + with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): + with self.assertRaises(httpx.HTTPStatusError) as error: + await github_utils.authorize("VALID_OWNER", "VALID_REPO") + + exception: httpx.HTTPStatusError = error.exception + self.assertEqual(401, exception.response.status_code) + self.assertEqual("auth app/installations", exception.response.json()["error"]) + + async def test_missing_repo(self): + """Test that an exception is raised when the selected owner or repo are not available.""" + with self.assertRaises(github_utils.NotFoundError): + await github_utils.authorize("INVALID_OWNER", "VALID_REPO") + with self.assertRaises(github_utils.NotFoundError): + await github_utils.authorize("VALID_OWNER", "INVALID_REPO") + + async def test_valid_authorization(self): + """Test that an accessible repository can be accessed.""" + client = await github_utils.authorize("VALID_OWNER", "VALID_REPO") + self.assertEqual("bearer app access token", client.headers.get("Authorization")) + + +async def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response: + """ + Helper method for the get_artifact tests. + + Requests are intercepted before being sent out, and the appropriate responses are returned. + """ + path = request.url.path + + if "force_error" in path: + return httpx.Response(404, request=request) + + if request.method == "GET": + if path == "/repos/owner/repo/actions/runs": + return httpx.Response(200, request=request, json={"workflow_runs": [{ + "name": "action_name", + "head_sha": "action_sha" + }]}) + elif path == "/artifact_url": + return httpx.Response(200, request=request, json={"artifacts": [{ + "name": "artifact_name", + "archive_download_url": "artifact_download_url" + }]}) + elif path == "/artifact_download_url": + response = httpx.Response(302, request=request) + response.next_request = httpx.Request("GET", httpx.URL("https://final_download.url")) + return response + + # Reaching this point means something has gone wrong + return httpx.Response(500, request=request) # pragma: no cover + + +class ArtifactFetcherTests(unittest.IsolatedAsyncioTestCase): + """Test the get_artifact utility.""" + + def setUp(self) -> None: + self.call_args = ["owner", "repo", "action_sha", "action_name", "artifact_name"] + self.client = httpx.AsyncClient(base_url="https://example.com") + + self.patchers = [ + mock.patch.object(self.client, "send", new=get_response_get_artifact), + mock.patch.object(github_utils, "authorize", return_value=self.client), + mock.patch.object(github_utils, "wait_for_run", return_value="artifact_url"), + ] + + for patcher in self.patchers: + patcher.start() + + def tearDown(self) -> None: + for patcher in self.patchers: + patcher.stop() + + def test_client_closed_on_errors(self): + """Test that the client is terminated even if an error occurs at some point.""" + self.call_args[0] = "force_error" + with self.assertRaises(httpx.HTTPStatusError): + github_utils.get_artifact(*self.call_args) + self.assertTrue(self.client.is_closed) + + def test_missing(self): + """Test that an exception is raised if the requested artifact was not found.""" + cases = ( + "invalid sha", + "invalid action name", + "invalid artifact name", + ) + for i, name in enumerate(cases, 2): + with self.subTest(f"Test {name} raises an error"): + new_args = self.call_args.copy() + new_args[i] = name + + with self.assertRaises(github_utils.NotFoundError): + github_utils.get_artifact(*new_args) + + def test_valid(self): + """Test that the correct download URL is returned for valid requests.""" + url = github_utils.get_artifact(*self.call_args) + self.assertEqual("https://final_download.url", url) + self.assertTrue(self.client.is_closed) + + +@mock.patch.object(github_utils, "get_artifact") +class GitHubArtifactViewTests(django.test.TestCase): + """Test the GitHub artifact fetch API view.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.kwargs = { + "owner": "test_owner", + "repo": "test_repo", + "sha": "test_sha", + "action_name": "test_action", + "artifact_name": "test_artifact", + } + cls.url = reverse("api:github-artifacts", kwargs=cls.kwargs) + + async def test_successful(self, artifact_mock: mock.Mock): + """Test a proper response is returned with proper input.""" + artifact_mock.return_value = "final download url" + result = self.client.get(self.url) + + self.assertIsInstance(result, rest_framework.response.Response) + self.assertEqual({"url": artifact_mock.return_value}, result.data) + + async def test_failed_fetch(self, artifact_mock: mock.Mock): + """Test that a proper error is returned when the request fails.""" + artifact_mock.side_effect = github_utils.NotFoundError("Test error message") + result = self.client.get(self.url) + + self.assertIsInstance(result, rest_framework.response.Response) + self.assertEqual({ + "error_type": github_utils.NotFoundError.__name__, + "error": "Test error message", + "requested_resource": "/".join(self.kwargs.values()) + }, result.data) diff --git a/pydis_site/apps/api/urls.py b/pydis_site/apps/api/urls.py index 1e564b29..2757f176 100644 --- a/pydis_site/apps/api/urls.py +++ b/pydis_site/apps/api/urls.py @@ -1,7 +1,7 @@ from django.urls import include, path from rest_framework.routers import DefaultRouter -from .views import HealthcheckView, RulesView +from .views import GitHubArtifactsView, HealthcheckView, RulesView from .viewsets import ( AocAccountLinkViewSet, AocCompletionistBlockViewSet, @@ -86,5 +86,10 @@ urlpatterns = ( # from django_hosts.resolvers import reverse path('bot/', include((bot_router.urls, 'api'), namespace='bot')), path('healthcheck', HealthcheckView.as_view(), name='healthcheck'), - path('rules', RulesView.as_view(), name='rules') + path('rules', RulesView.as_view(), name='rules'), + path( + 'github/artifact/////', + GitHubArtifactsView.as_view(), + name="github-artifacts" + ), ) diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 816463f6..ad2d948e 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -1,7 +1,10 @@ from rest_framework.exceptions import ParseError +from rest_framework.request import Request from rest_framework.response import Response from rest_framework.views import APIView +from . import github_utils + class HealthcheckView(APIView): """ @@ -152,3 +155,53 @@ class RulesView(APIView): "Do not offer or ask for paid work of any kind." ), ]) + + +class GitHubArtifactsView(APIView): + """ + Provides utilities for interacting with the GitHub API and obtaining action artifacts. + + ## Routes + ### GET /github/artifacts + Returns a download URL for the artifact requested. + + { + 'url': 'https://pipelines.actions.githubusercontent.com/...' + } + + ### Exceptions + In case of an error, the following body will be returned: + + { + "error_type": "", + "error": "", + "requested_resource": "///" + } + + ## Authentication + Does not require any authentication nor permissions. + """ + + authentication_classes = () + permission_classes = () + + def get( + self, + request: Request, + *, + owner: str, + repo: str, + sha: str, + action_name: str, + artifact_name: str + ) -> Response: + """Return a download URL for the requested artifact.""" + try: + url = github_utils.get_artifact(owner, repo, sha, action_name, artifact_name) + return Response({"url": url}) + except github_utils.ArtifactProcessingError as e: + return Response({ + "error_type": e.__class__.__name__, + "error": str(e), + "requested_resource": f"{owner}/{repo}/{sha}/{action_name}/{artifact_name}" + }, status=e.status) diff --git a/pydis_site/settings.py b/pydis_site/settings.py index 03c16f4b..f382b052 100644 --- a/pydis_site/settings.py +++ b/pydis_site/settings.py @@ -21,7 +21,6 @@ import environ import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration - env = environ.Env( DEBUG=(bool, False), SITE_DSN=(str, ""), @@ -30,10 +29,19 @@ env = environ.Env( GIT_SHA=(str, 'development'), TIMEOUT_PERIOD=(int, 5), GITHUB_TOKEN=(str, None), + GITHUB_OAUTH_APP_ID=(str, None), + GITHUB_OAUTH_KEY=(str, None), ) GIT_SHA = env("GIT_SHA") +GITHUB_API = "https://api.github.com" GITHUB_TOKEN = env("GITHUB_TOKEN") +GITHUB_OAUTH_APP_ID = env("GITHUB_OAUTH_APP_ID") +GITHUB_OAUTH_KEY = env("GITHUB_OAUTH_KEY") + +if GITHUB_OAUTH_KEY and (oauth_file := Path(GITHUB_OAUTH_KEY)).is_file(): + # Allow the OAuth key to be loaded from a file + GITHUB_OAUTH_KEY = oauth_file.read_text(encoding="utf-8") sentry_sdk.init( dsn=env('SITE_DSN'), diff --git a/pyproject.toml b/pyproject.toml index 467fc8bc..1c24d308 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ markdown = "~=3.3.4" python-frontmatter = "~=1.0" django-prometheus = "~=2.1" django-distill = "~=2.9.0" +PyJWT = {version = "~=2.4.0", extras = ["crypto"]} [tool.poetry.dev-dependencies] coverage = "~=5.0" -- cgit v1.2.3 From 26a3c19b53883015e8ba87db2a668c3eece2ce20 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 12 Jul 2022 14:45:22 +0400 Subject: Make Awaiting Workflow Run A User Responsibility Moves the responsibility of re-requesting a workflow run from the API to the user. This makes the requests much shorter-lived, and allows the client to control how they want to handle sleeping and retrying. This also has the benefit of removing the only real piece of async code, so now the view is completely sync once again. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 79 +++++------ pydis_site/apps/api/tests/test_github_utils.py | 174 ++++++++++++------------- static-builds/netlify_build.py | 9 +- 3 files changed, 131 insertions(+), 131 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 70dccdff..707b36e5 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -1,17 +1,17 @@ """Utilities for working with the GitHub API.""" -import asyncio import datetime import math import httpx import jwt -from asgiref.sync import async_to_sync from pydis_site import settings -MAX_POLLS = 20 -"""The maximum number of attempts at fetching a workflow run.""" +MAX_RUN_TIME = datetime.timedelta(minutes=3) +"""The maximum time allowed before an action is declared timed out.""" +ISO_FORMAT_STRING = "%Y-%m-%dT%H:%M:%SZ" +"""The datetime string format GitHub uses.""" class ArtifactProcessingError(Exception): @@ -44,6 +44,12 @@ class RunTimeoutError(ArtifactProcessingError): status = 408 +class RunPendingError(ArtifactProcessingError): + """The requested workflow run is still pending, try again later.""" + + status = 202 + + def generate_token() -> str: """ Generate a JWT token to access the GitHub API. @@ -66,7 +72,7 @@ def generate_token() -> str: ) -async def authorize(owner: str, repo: str) -> httpx.AsyncClient: +def authorize(owner: str, repo: str) -> httpx.Client: """ Get an access token for the requested repository. @@ -75,7 +81,7 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: - POST to get a token to access the given app - GET installation/repositories and check if the requested one is part of those """ - client = httpx.AsyncClient( + client = httpx.Client( base_url=settings.GITHUB_API, headers={"Authorization": f"bearer {generate_token()}"}, timeout=settings.TIMEOUT_PERIOD, @@ -83,7 +89,7 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: try: # Get a list of app installations we have access to - apps = await client.get("app/installations") + apps = client.get("app/installations") apps.raise_for_status() for app in apps.json(): @@ -92,11 +98,11 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: continue # Get the repositories of the specified owner - app_token = await client.post(app["access_tokens_url"]) + app_token = client.post(app["access_tokens_url"]) app_token.raise_for_status() client.headers["Authorization"] = f"bearer {app_token.json()['token']}" - repos = await client.get("installation/repositories") + repos = client.get("installation/repositories") repos.raise_for_status() # Search for the request repository @@ -111,44 +117,39 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: except BaseException as e: # Close the client if we encountered an unexpected exception - await client.aclose() + client.close() raise e -async def wait_for_run(client: httpx.AsyncClient, run: dict) -> str: - """Wait for the provided `run` to finish, and return the URL to its artifacts.""" - polls = 0 - while polls <= MAX_POLLS: - if run["status"] != "completed": - # The action is still processing, wait a bit longer - polls += 1 - await asyncio.sleep(10) - - elif run["conclusion"] != "success": - # The action failed, or did not run - raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") +def check_run_status(run: dict) -> str: + """Check if the provided run has been completed, otherwise raise an exception.""" + created_at = datetime.datetime.strptime(run["created_at"], ISO_FORMAT_STRING) + run_time = datetime.datetime.now() - created_at + if run["status"] != "completed": + if run_time <= MAX_RUN_TIME: + raise RunPendingError( + f"The requested run is still pending. It was created " + f"{run_time.seconds // 60}:{run_time.seconds % 60 :>02} minutes ago." + ) else: - # The desired action was found, and it ended successfully - return run["artifacts_url"] + raise RunTimeoutError("The requested workflow was not ready in time.") - run = await client.get(run["url"]) - run.raise_for_status() - run = run.json() + if run["conclusion"] != "success": + # The action failed, or did not run + raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") - raise RunTimeoutError("The requested workflow was not ready in time.") + # The requested action is ready + return run["artifacts_url"] -@async_to_sync -async def get_artifact( - owner: str, repo: str, sha: str, action_name: str, artifact_name: str -) -> str: +def get_artifact(owner: str, repo: str, sha: str, action_name: str, artifact_name: str) -> str: """Get a download URL for a build artifact.""" - client = await authorize(owner, repo) + client = authorize(owner, repo) try: # Get the workflow runs for this repository - runs = await client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100}) + runs = client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100}) runs.raise_for_status() runs = runs.json() @@ -161,16 +162,16 @@ async def get_artifact( "Could not find a run matching the provided settings in the previous hundred runs." ) - # Wait for the workflow to finish - url = await wait_for_run(client, run) + # Check the workflow status + url = check_run_status(run) # Filter the artifacts, and return the download URL - artifacts = await client.get(url) + artifacts = client.get(url) artifacts.raise_for_status() for artifact in artifacts.json()["artifacts"]: if artifact["name"] == artifact_name: - data = await client.get(artifact["archive_download_url"]) + data = client.get(artifact["archive_download_url"]) if data.status_code == 302: return str(data.next_request.url) @@ -180,4 +181,4 @@ async def get_artifact( raise NotFoundError("Could not find an artifact matching the provided name.") finally: - await client.aclose() + client.close() diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index dc17d609..78f2979d 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -1,6 +1,4 @@ -import asyncio import datetime -import random import unittest from unittest import mock @@ -14,16 +12,6 @@ from django.urls import reverse from .. import github_utils -def patched_raise_for_status(response: httpx.Response): - """Fake implementation of raise_for_status which does not need a request to be set.""" - if response.status_code // 100 != 2: # pragma: no cover - raise httpx.HTTPStatusError( - f"Non 2xx response code: {response.status_code}", - request=getattr(response, "_request", httpx.Request("GET", "")), - response=response - ) - - class GeneralUtilityTests(unittest.TestCase): """Test the utility methods which do not fit in another class.""" @@ -51,53 +39,50 @@ class GeneralUtilityTests(unittest.TestCase): self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) -@mock.patch("httpx.AsyncClient", autospec=True) -@mock.patch("asyncio.sleep", new=mock.AsyncMock(return_value=asyncio.Future)) -@mock.patch("httpx.Response.raise_for_status", new=patched_raise_for_status) -class WaitForTests(unittest.IsolatedAsyncioTestCase): - """Tests the wait_for utility.""" - - async def test_wait_for_successful_run(self, client_mock: mock.Mock): - """Test that the wait_for method handles successfully runs.""" - final_url = "some_url" + str(random.randint(0, 10)) - - client_mock.get.side_effect = responses = [ - httpx.Response(200, json={"status": "queued", "url": ""}), - httpx.Response(200, json={"status": "pending", "url": ""}), - httpx.Response(200, json={ - "status": "completed", - "conclusion": "success", - "url": "", - "artifacts_url": final_url - }) - ] +class WaitForTests(unittest.TestCase): + """Tests the check_run_status utility.""" - result = await github_utils.wait_for_run(client_mock, responses[0].json()) - self.assertEqual(final_url, result) + def test_completed_run(self): + final_url = "some_url_string_1234" - async def test_wait_for_failed_run(self, client_mock: mock.Mock): - """Test that the wait_for method handles failed runs.""" - client_mock.get.return_value = httpx.Response(200, json={ + result = github_utils.check_run_status({ "status": "completed", - "conclusion": "failed", + "conclusion": "success", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + "artifacts_url": final_url, }) + self.assertEqual(final_url, result) - with self.assertRaises(github_utils.ActionFailedError): - await github_utils.wait_for_run(client_mock, {"status": "pending", "url": ""}) + def test_pending_run(self): + """Test that a pending run raises the proper exception.""" + with self.assertRaises(github_utils.RunPendingError): + github_utils.check_run_status({ + "status": "pending", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + }) - async def test_wait_for_timeout(self, client_mock: mock.Mock): - """Test that the wait_for method quits after a few attempts.""" - client_mock.get.side_effect = responses = [ - httpx.Response(200, json={"status": "pending", "url": ""}) - ] * (github_utils.MAX_POLLS + 5) + def test_timeout_error(self): + """Test that a timeout is declared after a certain duration.""" + # Set the creation time to well before the MAX_RUN_TIME + # to guarantee the right conclusion + created = ( + datetime.datetime.now() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + ).strftime(github_utils.ISO_FORMAT_STRING) with self.assertRaises(github_utils.RunTimeoutError): - await github_utils.wait_for_run(client_mock, responses[0].json()) + github_utils.check_run_status({"status": "pending", "created_at": created}) + + def test_failed_run(self): + """Test that a failed run raises the proper exception.""" + with self.assertRaises(github_utils.ActionFailedError): + github_utils.check_run_status({ + "status": "completed", + "conclusion": "failed", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + }) -async def get_response_authorize( - _: httpx.AsyncClient, request: httpx.Request, **__ -) -> httpx.Response: +def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> httpx.Response: """ Helper method for the authorize tests. @@ -141,76 +126,83 @@ async def get_response_authorize( return httpx.Response(500, request=request) # pragma: no cover -@mock.patch("httpx.AsyncClient.send", new=get_response_authorize) +@mock.patch("httpx.Client.send", new=get_response_authorize) @mock.patch.object(github_utils, "generate_token", new=mock.Mock(return_value="JWT initial token")) -class AuthorizeTests(unittest.IsolatedAsyncioTestCase): +class AuthorizeTests(unittest.TestCase): """Test the authorize utility.""" - async def test_invalid_apps_auth(self): + def test_invalid_apps_auth(self): """Test that an exception is raised if authorization was attempted with an invalid token.""" with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): with self.assertRaises(httpx.HTTPStatusError) as error: - await github_utils.authorize("VALID_OWNER", "VALID_REPO") + github_utils.authorize("VALID_OWNER", "VALID_REPO") exception: httpx.HTTPStatusError = error.exception self.assertEqual(401, exception.response.status_code) self.assertEqual("auth app/installations", exception.response.json()["error"]) - async def test_missing_repo(self): + def test_missing_repo(self): """Test that an exception is raised when the selected owner or repo are not available.""" with self.assertRaises(github_utils.NotFoundError): - await github_utils.authorize("INVALID_OWNER", "VALID_REPO") + github_utils.authorize("INVALID_OWNER", "VALID_REPO") with self.assertRaises(github_utils.NotFoundError): - await github_utils.authorize("VALID_OWNER", "INVALID_REPO") + github_utils.authorize("VALID_OWNER", "INVALID_REPO") - async def test_valid_authorization(self): + def test_valid_authorization(self): """Test that an accessible repository can be accessed.""" - client = await github_utils.authorize("VALID_OWNER", "VALID_REPO") + client = github_utils.authorize("VALID_OWNER", "VALID_REPO") self.assertEqual("bearer app access token", client.headers.get("Authorization")) -async def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response: - """ - Helper method for the get_artifact tests. +class ArtifactFetcherTests(unittest.TestCase): + """Test the get_artifact utility.""" - Requests are intercepted before being sent out, and the appropriate responses are returned. - """ - path = request.url.path + @staticmethod + def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response: + """ + Helper method for the get_artifact tests. - if "force_error" in path: - return httpx.Response(404, request=request) + Requests are intercepted before being sent out, and the appropriate responses are returned. + """ + path = request.url.path - if request.method == "GET": - if path == "/repos/owner/repo/actions/runs": - return httpx.Response(200, request=request, json={"workflow_runs": [{ - "name": "action_name", - "head_sha": "action_sha" - }]}) - elif path == "/artifact_url": - return httpx.Response(200, request=request, json={"artifacts": [{ - "name": "artifact_name", - "archive_download_url": "artifact_download_url" - }]}) - elif path == "/artifact_download_url": - response = httpx.Response(302, request=request) - response.next_request = httpx.Request("GET", httpx.URL("https://final_download.url")) - return response - - # Reaching this point means something has gone wrong - return httpx.Response(500, request=request) # pragma: no cover + if "force_error" in path: + return httpx.Response(404, request=request) + if request.method == "GET": + if path == "/repos/owner/repo/actions/runs": + return httpx.Response( + 200, request=request, json={"workflow_runs": [{ + "name": "action_name", + "head_sha": "action_sha" + }]} + ) + elif path == "/artifact_url": + return httpx.Response( + 200, request=request, json={"artifacts": [{ + "name": "artifact_name", + "archive_download_url": "artifact_download_url" + }]} + ) + elif path == "/artifact_download_url": + response = httpx.Response(302, request=request) + response.next_request = httpx.Request( + "GET", + httpx.URL("https://final_download.url") + ) + return response -class ArtifactFetcherTests(unittest.IsolatedAsyncioTestCase): - """Test the get_artifact utility.""" + # Reaching this point means something has gone wrong + return httpx.Response(500, request=request) # pragma: no cover def setUp(self) -> None: self.call_args = ["owner", "repo", "action_sha", "action_name", "artifact_name"] - self.client = httpx.AsyncClient(base_url="https://example.com") + self.client = httpx.Client(base_url="https://example.com") self.patchers = [ - mock.patch.object(self.client, "send", new=get_response_get_artifact), + mock.patch.object(self.client, "send", new=self.get_response_get_artifact), mock.patch.object(github_utils, "authorize", return_value=self.client), - mock.patch.object(github_utils, "wait_for_run", return_value="artifact_url"), + mock.patch.object(github_utils, "check_run_status", return_value="artifact_url"), ] for patcher in self.patchers: @@ -266,7 +258,7 @@ class GitHubArtifactViewTests(django.test.TestCase): } cls.url = reverse("api:github-artifacts", kwargs=cls.kwargs) - async def test_successful(self, artifact_mock: mock.Mock): + def test_successful(self, artifact_mock: mock.Mock): """Test a proper response is returned with proper input.""" artifact_mock.return_value = "final download url" result = self.client.get(self.url) @@ -274,7 +266,7 @@ class GitHubArtifactViewTests(django.test.TestCase): self.assertIsInstance(result, rest_framework.response.Response) self.assertEqual({"url": artifact_mock.return_value}, result.data) - async def test_failed_fetch(self, artifact_mock: mock.Mock): + def test_failed_fetch(self, artifact_mock: mock.Mock): """Test that a proper error is returned when the request fails.""" artifact_mock.side_effect = github_utils.NotFoundError("Test error message") result = self.client.get(self.url) diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 13cd0279..a473bd91 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -8,6 +8,7 @@ import json import os +import time import zipfile from pathlib import Path from urllib import parse @@ -29,7 +30,7 @@ if __name__ == "__main__": print(f"Fetching download URL from {download_url}") response = httpx.get(download_url, follow_redirects=True) - if response.status_code != 200: + if response.status_code // 100 != 2: try: print(response.json()) except json.JSONDecodeError: @@ -37,6 +38,12 @@ if __name__ == "__main__": response.raise_for_status() + # The workflow is still pending, retry in a bit + while response.status_code == 202: + print(f"{response.json()['error']}. Retrying in 10 seconds.") + time.sleep(10) + response = httpx.get(download_url, follow_redirects=True) + url = response.json()["url"] print(f"Downloading build from {url}") zipped_content = httpx.get(url, follow_redirects=True) -- cgit v1.2.3 From 124c84b9e4f8195485b6ff9b3896cc87e640e02b Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Thu, 14 Jul 2022 05:57:05 +0400 Subject: Clean Up Artifact Tests Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_github_utils.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index 78f2979d..a9eab9a5 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -21,7 +21,7 @@ class GeneralUtilityTests(unittest.TestCase): """ Intercept the encode method. - It is performed with an algorithm which does not require a PEM key, as it may + The result is encoded with an algorithm which does not require a PEM key, as it may not be available in testing environments. """ self.assertEqual("RS256", algorithm, "The GitHub App JWT must be signed using RS256.") @@ -39,10 +39,11 @@ class GeneralUtilityTests(unittest.TestCase): self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) -class WaitForTests(unittest.TestCase): +class CheckRunTests(unittest.TestCase): """Tests the check_run_status utility.""" def test_completed_run(self): + """Test that an already completed run returns the correct URL.""" final_url = "some_url_string_1234" result = github_utils.check_run_status({ @@ -245,20 +246,17 @@ class ArtifactFetcherTests(unittest.TestCase): class GitHubArtifactViewTests(django.test.TestCase): """Test the GitHub artifact fetch API view.""" - @classmethod - def setUpClass(cls): - super().setUpClass() - - cls.kwargs = { + def setUp(self): + self.kwargs = { "owner": "test_owner", "repo": "test_repo", "sha": "test_sha", "action_name": "test_action", "artifact_name": "test_artifact", } - cls.url = reverse("api:github-artifacts", kwargs=cls.kwargs) + self.url = reverse("api:github-artifacts", kwargs=self.kwargs) - def test_successful(self, artifact_mock: mock.Mock): + def test_correct_artifact(self, artifact_mock: mock.Mock): """Test a proper response is returned with proper input.""" artifact_mock.return_value = "final download url" result = self.client.get(self.url) -- cgit v1.2.3 From 37001bca59c1d3d5fc8a8dadffda00d55fc9e0b6 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sun, 24 Jul 2022 08:32:43 +0200 Subject: Use Dataclass For Workflow Run Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 41 ++++++++++++++++----- pydis_site/apps/api/tests/test_github_utils.py | 50 +++++++++++++++----------- 2 files changed, 62 insertions(+), 29 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 707b36e5..c4ace6b7 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -1,7 +1,8 @@ """Utilities for working with the GitHub API.""" - +import dataclasses import datetime import math +import typing import httpx import jwt @@ -50,6 +51,29 @@ class RunPendingError(ArtifactProcessingError): status = 202 +@dataclasses.dataclass(frozen=True) +class WorkflowRun: + """ + A workflow run from the GitHub API. + + https://docs.github.com/en/rest/actions/workflow-runs#get-a-workflow-run + """ + + name: str + head_sha: str + created_at: str + status: str + conclusion: str + artifacts_url: str + + @classmethod + def from_raw(cls, data: dict[str, typing.Any]): + """Create an instance using the raw data from the API, discarding unused fields.""" + return cls(**{ + key.name: data[key.name] for key in dataclasses.fields(cls) + }) + + def generate_token() -> str: """ Generate a JWT token to access the GitHub API. @@ -121,12 +145,12 @@ def authorize(owner: str, repo: str) -> httpx.Client: raise e -def check_run_status(run: dict) -> str: +def check_run_status(run: WorkflowRun) -> str: """Check if the provided run has been completed, otherwise raise an exception.""" - created_at = datetime.datetime.strptime(run["created_at"], ISO_FORMAT_STRING) + created_at = datetime.datetime.strptime(run.created_at, ISO_FORMAT_STRING) run_time = datetime.datetime.now() - created_at - if run["status"] != "completed": + if run.status != "completed": if run_time <= MAX_RUN_TIME: raise RunPendingError( f"The requested run is still pending. It was created " @@ -135,12 +159,12 @@ def check_run_status(run: dict) -> str: else: raise RunTimeoutError("The requested workflow was not ready in time.") - if run["conclusion"] != "success": + if run.conclusion != "success": # The action failed, or did not run - raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") + raise ActionFailedError(f"The requested workflow ended with: {run.conclusion}") # The requested action is ready - return run["artifacts_url"] + return run.artifacts_url def get_artifact(owner: str, repo: str, sha: str, action_name: str, artifact_name: str) -> str: @@ -155,7 +179,8 @@ def get_artifact(owner: str, repo: str, sha: str, action_name: str, artifact_nam # Filter the runs for the one associated with the given SHA for run in runs["workflow_runs"]: - if run["name"] == action_name and sha == run["head_sha"]: + run = WorkflowRun.from_raw(run) + if run.name == action_name and sha == run.head_sha: break else: raise NotFoundError( diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index a9eab9a5..f5e072a9 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -1,4 +1,6 @@ +import dataclasses import datetime +import typing import unittest from unittest import mock @@ -42,45 +44,46 @@ class GeneralUtilityTests(unittest.TestCase): class CheckRunTests(unittest.TestCase): """Tests the check_run_status utility.""" + run_kwargs: typing.Mapping = { + "name": "run_name", + "head_sha": "sha", + "status": "completed", + "conclusion": "success", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + "artifacts_url": "url", + } + def test_completed_run(self): """Test that an already completed run returns the correct URL.""" final_url = "some_url_string_1234" - result = github_utils.check_run_status({ - "status": "completed", - "conclusion": "success", - "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), - "artifacts_url": final_url, - }) + kwargs = dict(self.run_kwargs, artifacts_url=final_url) + result = github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) self.assertEqual(final_url, result) def test_pending_run(self): """Test that a pending run raises the proper exception.""" + kwargs = dict(self.run_kwargs, status="pending") with self.assertRaises(github_utils.RunPendingError): - github_utils.check_run_status({ - "status": "pending", - "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), - }) + github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) def test_timeout_error(self): """Test that a timeout is declared after a certain duration.""" + kwargs = dict(self.run_kwargs, status="pending") # Set the creation time to well before the MAX_RUN_TIME # to guarantee the right conclusion - created = ( + kwargs["created_at"] = ( datetime.datetime.now() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) ).strftime(github_utils.ISO_FORMAT_STRING) with self.assertRaises(github_utils.RunTimeoutError): - github_utils.check_run_status({"status": "pending", "created_at": created}) + github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) def test_failed_run(self): """Test that a failed run raises the proper exception.""" + kwargs = dict(self.run_kwargs, conclusion="failed") with self.assertRaises(github_utils.ActionFailedError): - github_utils.check_run_status({ - "status": "completed", - "conclusion": "failed", - "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), - }) + github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> httpx.Response: @@ -172,11 +175,16 @@ class ArtifactFetcherTests(unittest.TestCase): if request.method == "GET": if path == "/repos/owner/repo/actions/runs": + run = github_utils.WorkflowRun( + name="action_name", + head_sha="action_sha", + created_at=datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + status="completed", + conclusion="success", + artifacts_url="artifacts_url" + ) return httpx.Response( - 200, request=request, json={"workflow_runs": [{ - "name": "action_name", - "head_sha": "action_sha" - }]} + 200, request=request, json={"workflow_runs": [dataclasses.asdict(run)]} ) elif path == "/artifact_url": return httpx.Response( -- cgit v1.2.3 From f16d3b1b1d14cdf0de1e56ae2bc466152e930f34 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sun, 24 Jul 2022 10:06:47 +0200 Subject: Use UTC Time For GitHub API When reading the created_at time from the GitHub API, it'll be a naive date string with UTC time, so we use that instead of the system's time. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 2 +- pydis_site/apps/api/tests/test_github_utils.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index c4ace6b7..7d26b147 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -148,7 +148,7 @@ def authorize(owner: str, repo: str) -> httpx.Client: def check_run_status(run: WorkflowRun) -> str: """Check if the provided run has been completed, otherwise raise an exception.""" created_at = datetime.datetime.strptime(run.created_at, ISO_FORMAT_STRING) - run_time = datetime.datetime.now() - created_at + run_time = datetime.datetime.utcnow() - created_at if run.status != "completed": if run_time <= MAX_RUN_TIME: diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index f5e072a9..f642f689 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -49,7 +49,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + "created_at": datetime.datetime.utcnow().strftime(github_utils.ISO_FORMAT_STRING), "artifacts_url": "url", } @@ -73,7 +73,7 @@ class CheckRunTests(unittest.TestCase): # Set the creation time to well before the MAX_RUN_TIME # to guarantee the right conclusion kwargs["created_at"] = ( - datetime.datetime.now() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) ).strftime(github_utils.ISO_FORMAT_STRING) with self.assertRaises(github_utils.RunTimeoutError): -- cgit v1.2.3 From 25db8f564c0f5c473b165ccab14413ca4471ac7d Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sun, 14 Aug 2022 07:20:34 +0200 Subject: Explicitly Specify Infraction Time In Tests The infraction tests checked that the route returned infractions in the correct order, which is based on insertion time. This can be fragile however, since the insertion time can be very close (or identical) during the tests. That became especially more likely with PR #741 (commit 149e67b4) which improved database access speed. This is fixed by explicitly specifying the insertion time, and spacing them out properly. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_infractions.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index f1107734..89ee4e23 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -56,15 +56,17 @@ class InfractionTests(AuthenticatedAPITestCase): type='ban', reason='He terk my jerb!', hidden=True, + inserted_at=dt(2020, 10, 10, 0, 0, 0, tzinfo=timezone.utc), expires_at=dt(5018, 11, 20, 15, 52, tzinfo=timezone.utc), - active=True + active=True, ) cls.ban_inactive = Infraction.objects.create( user_id=cls.user.id, actor_id=cls.user.id, type='ban', reason='James is an ass, and we won\'t be working with him again.', - active=False + active=False, + inserted_at=dt(2020, 10, 10, 0, 1, 0, tzinfo=timezone.utc), ) cls.mute_permanent = Infraction.objects.create( user_id=cls.user.id, @@ -72,7 +74,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='mute', reason='He has a filthy mouth and I am his soap.', active=True, - expires_at=None + inserted_at=dt(2020, 10, 10, 0, 2, 0, tzinfo=timezone.utc), + expires_at=None, ) cls.superstar_expires_soon = Infraction.objects.create( user_id=cls.user.id, @@ -80,7 +83,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='superstar', reason='This one doesn\'t matter anymore.', active=True, - expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5) + inserted_at=dt(2020, 10, 10, 0, 3, 0, tzinfo=timezone.utc), + expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5), ) cls.voiceban_expires_later = Infraction.objects.create( user_id=cls.user.id, @@ -88,7 +92,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='voice_ban', reason='Jet engine mic', active=True, - expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5) + inserted_at=dt(2020, 10, 10, 0, 4, 0, tzinfo=timezone.utc), + expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5), ) def test_list_all(self): -- cgit v1.2.3 From 92a42694b6ad1a29e5a21e0b3e57639528837113 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 16 Aug 2022 23:45:25 +0400 Subject: Fix Tests For Tag Metadata Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_github_utils.py | 7 +- pydis_site/apps/content/tests/test_utils.py | 132 +++++++++++++++++++++++-- pydis_site/apps/content/tests/test_views.py | 36 ++++--- pydis_site/apps/content/utils.py | 2 +- 4 files changed, 148 insertions(+), 29 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index f642f689..6e25bc80 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -11,6 +11,7 @@ import rest_framework.response import rest_framework.test from django.urls import reverse +from pydis_site import settings from .. import github_utils @@ -49,7 +50,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.utcnow().strftime(github_utils.ISO_FORMAT_STRING), + "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT), "artifacts_url": "url", } @@ -74,7 +75,7 @@ class CheckRunTests(unittest.TestCase): # to guarantee the right conclusion kwargs["created_at"] = ( datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) - ).strftime(github_utils.ISO_FORMAT_STRING) + ).strftime(settings.GITHUB_TIMESTAMP_FORMAT) with self.assertRaises(github_utils.RunTimeoutError): github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) @@ -178,7 +179,7 @@ class ArtifactFetcherTests(unittest.TestCase): run = github_utils.WorkflowRun( name="action_name", head_sha="action_sha", - created_at=datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT), status="completed", conclusion="success", artifacts_url="artifacts_url" diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py index 556f633c..2ef033e4 100644 --- a/pydis_site/apps/content/tests/test_utils.py +++ b/pydis_site/apps/content/tests/test_utils.py @@ -1,3 +1,5 @@ +import datetime +import json import tarfile import tempfile import textwrap @@ -15,6 +17,18 @@ from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) +_time = datetime.datetime(2022, 10, 10, 10, 10, 10, tzinfo=datetime.timezone.utc) +_time_str = _time.strftime(settings.GITHUB_TIMESTAMP_FORMAT) +TEST_COMMIT_KWARGS = { + "sha": "123", + "message": "Hello world\n\nThis is a commit message", + "date": _time, + "author": json.dumps([ + {"name": "Author 1", "email": "mail1@example.com", "date": _time_str}, + {"name": "Author 2", "email": "mail2@example.com", "date": _time_str}, + ]), +} + class GetCategoryTests(MockPagesTestCase): """Tests for the get_category function.""" @@ -109,6 +123,10 @@ class GetPageTests(MockPagesTestCase): class TagUtilsTests(TestCase): """Tests for the tag-related utilities.""" + def setUp(self) -> None: + super().setUp() + self.commit = models.Commit.objects.create(**TEST_COMMIT_KWARGS) + @mock.patch.object(utils, "fetch_tags") def test_static_fetch(self, fetch_mock: mock.Mock): """Test that the static fetch function is only called at most once during static builds.""" @@ -121,9 +139,27 @@ class TagUtilsTests(TestCase): self.assertEqual(tags, result) self.assertEqual(tags, second_result) - @mock.patch("httpx.get") + @mock.patch("httpx.Client.get") def test_mocked_fetch(self, get_mock: mock.Mock): """Test that proper data is returned from fetch, but with a mocked API response.""" + fake_request = httpx.Request("GET", "https://google.com") + + # Metadata requests + returns = [httpx.Response( + request=fake_request, + status_code=200, + json=[ + {"type": "file", "name": "first_tag.md", "sha": "123"}, + {"type": "file", "name": "second_tag.md", "sha": "456"}, + {"type": "dir", "name": "some_group", "sha": "789", "url": "/some_group"}, + ] + ), httpx.Response( + request=fake_request, + status_code=200, + json=[{"type": "file", "name": "grouped_tag.md", "sha": "789123"}] + )] + + # Main content request bodies = ( "This is the first tag!", textwrap.dedent(""" @@ -156,33 +192,36 @@ class TagUtilsTests(TestCase): body = (tar_folder / "temp.tar").read_bytes() - get_mock.return_value = httpx.Response( + returns.append(httpx.Response( status_code=200, content=body, - request=httpx.Request("GET", "https://google.com"), - ) + request=fake_request, + )) + get_mock.side_effect = returns result = utils.fetch_tags() def sort(_tag: models.Tag) -> str: return _tag.name self.assertEqual(sorted([ - models.Tag(name="first_tag", body=bodies[0]), - models.Tag(name="second_tag", body=bodies[1]), - models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name), + models.Tag(name="first_tag", body=bodies[0], sha="123"), + models.Tag(name="second_tag", body=bodies[1], sha="245"), + models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name, sha="789123"), ], key=sort), sorted(result, key=sort)) def test_get_real_tag(self): """Test that a single tag is returned if it exists.""" - tag = models.Tag.objects.create(name="real-tag") + tag = models.Tag.objects.create(name="real-tag", last_commit=self.commit) result = utils.get_tag("real-tag") self.assertEqual(tag, result) def test_get_grouped_tag(self): """Test fetching a tag from a group.""" - tag = models.Tag.objects.create(name="real-tag", group="real-group") + tag = models.Tag.objects.create( + name="real-tag", group="real-group", last_commit=self.commit + ) result = utils.get_tag("real-group/real-tag") self.assertEqual(tag, result) @@ -269,3 +308,78 @@ class TagUtilsTests(TestCase): tag = models.Tag(**options) with self.subTest(tag=tag): self.assertEqual(url, tag.url) + + @mock.patch("httpx.Client.get") + def test_get_tag_commit(self, get_mock: mock.Mock): + """Test the get commit function with a normal tag.""" + tag = models.Tag.objects.create(name="example") + + authors = json.loads(self.commit.author) + + get_mock.return_value = httpx.Response( + request=httpx.Request("GET", "https://google.com"), + status_code=200, + json=[{ + "sha": self.commit.sha, + "commit": { + "message": self.commit.message, + "author": authors[0], + "committer": authors[1], + } + }] + ) + + result = utils.get_tag(tag.name) + self.assertEqual(tag, result) + + get_mock.assert_called_once() + call_params = get_mock.call_args[1]["params"] + + self.assertEqual({"path": "/bot/resources/tags/example.md"}, call_params) + self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit) + + @mock.patch("httpx.Client.get") + def test_get_group_tag_commit(self, get_mock: mock.Mock): + """Test the get commit function with a group tag.""" + tag = models.Tag.objects.create(name="example", group="group-name") + + authors = json.loads(self.commit.author) + authors.pop() + self.commit.author = json.dumps(authors) + self.commit.save() + + get_mock.return_value = httpx.Response( + request=httpx.Request("GET", "https://google.com"), + status_code=200, + json=[{ + "sha": self.commit.sha, + "commit": { + "message": self.commit.message, + "author": authors[0], + "committer": authors[0], + } + }] + ) + + utils.set_tag_commit(tag) + + get_mock.assert_called_once() + call_params = get_mock.call_args[1]["params"] + + self.assertEqual({"path": "/bot/resources/tags/group-name/example.md"}, call_params) + self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit) + + @mock.patch.object(utils, "set_tag_commit") + def test_exiting_commit(self, set_commit_mock: mock.Mock): + """Test that a commit is saved when the data has not changed.""" + tag = models.Tag.objects.create(name="tag-name", body="old body", last_commit=self.commit) + + # This is only applied to the object, not to the database + tag.last_commit = None + + utils.record_tags([tag]) + self.assertEqual(self.commit, tag.last_commit) + + result = utils.get_tag("tag-name") + self.assertEqual(tag, result) + set_commit_mock.assert_not_called() diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py index c5c25be4..658ac2cc 100644 --- a/pydis_site/apps/content/tests/test_views.py +++ b/pydis_site/apps/content/tests/test_views.py @@ -8,10 +8,11 @@ from django.http import Http404 from django.test import RequestFactory, SimpleTestCase, override_settings from django.urls import reverse -from pydis_site.apps.content.models import Tag +from pydis_site.apps.content.models import Commit, Tag from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) +from pydis_site.apps.content.tests.test_utils import TEST_COMMIT_KWARGS from pydis_site.apps.content.views import PageOrCategoryView @@ -193,11 +194,12 @@ class TagViewTests(django.test.TestCase): def setUp(self): """Set test helpers, then set up fake filesystem.""" super().setUp() + self.commit = Commit.objects.create(**TEST_COMMIT_KWARGS) def test_routing(self): """Test that the correct template is returned for each route.""" - Tag.objects.create(name="example") - Tag.objects.create(name="grouped-tag", group="group-name") + Tag.objects.create(name="example", last_commit=self.commit) + Tag.objects.create(name="grouped-tag", group="group-name", last_commit=self.commit) cases = [ ("/pages/tags/example/", "content/tag.html"), @@ -213,7 +215,7 @@ class TagViewTests(django.test.TestCase): def test_valid_tag_returns_200(self): """Test that a page is returned for a valid tag.""" - Tag.objects.create(name="example", body="This is the tag body.") + Tag.objects.create(name="example", body="This is the tag body.", last_commit=self.commit) response = self.client.get("/pages/tags/example/") self.assertEqual(200, response.status_code) self.assertIn("This is the tag body", response.content.decode("utf-8")) @@ -233,7 +235,7 @@ class TagViewTests(django.test.TestCase): Tag content here. """) - tag = Tag.objects.create(name="example", body=body) + tag = Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") expected = { "page_title": "example", @@ -256,7 +258,9 @@ class TagViewTests(django.test.TestCase): The only difference between this and a regular tag are the breadcrumbs, so only those are checked. """ - Tag.objects.create(name="example", body="Body text", group="group-name") + Tag.objects.create( + name="example", body="Body text", group="group-name", last_commit=self.commit + ) response = self.client.get("/pages/tags/group-name/example/") self.assertListEqual([ {"name": "Pages", "path": "."}, @@ -266,9 +270,9 @@ class TagViewTests(django.test.TestCase): def test_group_page(self): """Test rendering of a group's root page.""" - Tag.objects.create(name="tag-1", body="Body 1", group="group-name") - Tag.objects.create(name="tag-2", body="Body 2", group="group-name") - Tag.objects.create(name="not-included") + Tag.objects.create(name="tag-1", body="Body 1", group="group-name", last_commit=self.commit) + Tag.objects.create(name="tag-2", body="Body 2", group="group-name", last_commit=self.commit) + Tag.objects.create(name="not-included", last_commit=self.commit) response = self.client.get("/pages/tags/group-name/") content = response.content.decode("utf-8") @@ -298,7 +302,7 @@ class TagViewTests(django.test.TestCase): **This text is in bold** """) - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") content = response.content.decode("utf-8") @@ -317,7 +321,7 @@ class TagViewTests(django.test.TestCase): Tag body. """) - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") content = response.content.decode("utf-8") @@ -333,7 +337,7 @@ class TagViewTests(django.test.TestCase): --- """) - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") self.assertEqual( "Embed title", @@ -345,7 +349,7 @@ class TagViewTests(django.test.TestCase): """Test hyperlinking of tags works as intended.""" filler_before, filler_after = "empty filler text\n\n", "more\nfiller" body = filler_before + "`!tags return`" + filler_after - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) other_url = reverse("content:tag", kwargs={"location": "return"}) response = self.client.get("/pages/tags/example/") @@ -356,9 +360,9 @@ class TagViewTests(django.test.TestCase): def test_tag_root_page(self): """Test the root tag page which lists all tags.""" - Tag.objects.create(name="tag-1") - Tag.objects.create(name="tag-2") - Tag.objects.create(name="tag-3") + Tag.objects.create(name="tag-1", last_commit=self.commit) + Tag.objects.create(name="tag-2", last_commit=self.commit) + Tag.objects.create(name="tag-3", last_commit=self.commit) response = self.client.get("/pages/tags/") content = response.content.decode("utf-8") diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index e4a24a73..63f1c41c 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -130,7 +130,7 @@ def fetch_tags() -> list[Tag]: def set_tag_commit(tag: Tag) -> None: """Fetch commit information from the API, and save it for the tag.""" - if settings.STATIC_BUILD: + if settings.STATIC_BUILD: # pragma: no cover # Static builds request every page during build, which can ratelimit it. # Instead, we return some fake data. tag.last_commit = Commit( -- cgit v1.2.3 From 4998984727c2474bea7577c89d6fadda864cb538 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 19 Aug 2022 01:10:53 +0400 Subject: Fix Unittest Deprecation Warnings Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_filterlists.py | 4 ++-- pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py | 2 +- pydis_site/apps/api/viewsets/bot/aoc_link.py | 2 +- pydis_site/apps/api/viewsets/bot/infraction.py | 2 +- pydis_site/apps/api/viewsets/bot/nomination.py | 2 +- pydis_site/apps/api/viewsets/bot/reminder.py | 2 +- pydis_site/apps/api/viewsets/bot/user.py | 2 +- pydis_site/apps/content/tests/test_views.py | 2 +- pydis_site/apps/home/tests/test_repodata_helpers.py | 6 +++--- 9 files changed, 12 insertions(+), 12 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_filterlists.py b/pydis_site/apps/api/tests/test_filterlists.py index 5a5bca60..9959617e 100644 --- a/pydis_site/apps/api/tests/test_filterlists.py +++ b/pydis_site/apps/api/tests/test_filterlists.py @@ -64,8 +64,8 @@ class FetchTests(AuthenticatedAPITestCase): self.assertEqual(response.status_code, 200) for api_type, model_type in zip(response.json(), FilterList.FilterListType.choices): - self.assertEquals(api_type[0], model_type[0]) - self.assertEquals(api_type[1], model_type[1]) + self.assertEqual(api_type[0], model_type[0]) + self.assertEqual(api_type[1], model_type[1]) class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py index 3a4cec60..97efb63c 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py @@ -70,4 +70,4 @@ class AocCompletionistBlockViewSet( serializer_class = AocCompletionistBlockSerializer queryset = AocCompletionistBlock.objects.all() filter_backends = (DjangoFilterBackend,) - filter_fields = ("user__id", "is_blocked") + filterset_fields = ("user__id", "is_blocked") diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py index c7a96629..3cdc342d 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_link.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py @@ -68,4 +68,4 @@ class AocAccountLinkViewSet( serializer_class = AocAccountLinkSerializer queryset = AocAccountLink.objects.all() filter_backends = (DjangoFilterBackend,) - filter_fields = ("user__id", "aoc_username") + filterset_fields = ("user__id", "aoc_username") diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py index 7f31292f..2b89fdb3 100644 --- a/pydis_site/apps/api/viewsets/bot/infraction.py +++ b/pydis_site/apps/api/viewsets/bot/infraction.py @@ -154,7 +154,7 @@ class InfractionViewSet( queryset = Infraction.objects.all() pagination_class = LimitOffsetPaginationExtended filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) - filter_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type') + filterset_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type') search_fields = ('$reason',) frozen_fields = ('id', 'inserted_at', 'type', 'user', 'actor', 'hidden') diff --git a/pydis_site/apps/api/viewsets/bot/nomination.py b/pydis_site/apps/api/viewsets/bot/nomination.py index 144daab0..6af42bcb 100644 --- a/pydis_site/apps/api/viewsets/bot/nomination.py +++ b/pydis_site/apps/api/viewsets/bot/nomination.py @@ -172,7 +172,7 @@ class NominationViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge serializer_class = NominationSerializer queryset = Nomination.objects.all() filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) - filter_fields = ('user__id', 'active') + filterset_fields = ('user__id', 'active') frozen_fields = ('id', 'inserted_at', 'user', 'ended_at') frozen_on_create = ('ended_at', 'end_reason', 'active', 'inserted_at', 'reviewed') diff --git a/pydis_site/apps/api/viewsets/bot/reminder.py b/pydis_site/apps/api/viewsets/bot/reminder.py index 78d7cb3b..5f997052 100644 --- a/pydis_site/apps/api/viewsets/bot/reminder.py +++ b/pydis_site/apps/api/viewsets/bot/reminder.py @@ -125,4 +125,4 @@ class ReminderViewSet( serializer_class = ReminderSerializer queryset = Reminder.objects.prefetch_related('author') filter_backends = (DjangoFilterBackend, SearchFilter) - filter_fields = ('active', 'author__id') + filterset_fields = ('active', 'author__id') diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index 3318b2b9..ba1bcd9d 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -237,7 +237,7 @@ class UserViewSet(ModelViewSet): queryset = User.objects.all().order_by("id") pagination_class = UserListPagination filter_backends = (DjangoFilterBackend,) - filter_fields = ('name', 'discriminator') + filterset_fields = ('name', 'discriminator') def get_serializer(self, *args, **kwargs) -> ModelSerializer: """Set Serializer many attribute to True if request body contains a list.""" diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py index eadad7e3..a09d22d8 100644 --- a/pydis_site/apps/content/tests/test_views.py +++ b/pydis_site/apps/content/tests/test_views.py @@ -172,7 +172,7 @@ class PageOrCategoryViewTests(MockPagesTestCase, SimpleTestCase, TestCase): for item in context["breadcrumb_items"]: item["path"] = Path(item["path"]) - self.assertEquals( + self.assertEqual( context["breadcrumb_items"], [ {"name": PARSED_CATEGORY_INFO["title"], "path": Path(".")}, diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py index 4007eded..a963f733 100644 --- a/pydis_site/apps/home/tests/test_repodata_helpers.py +++ b/pydis_site/apps/home/tests/test_repodata_helpers.py @@ -42,7 +42,7 @@ class TestRepositoryMetadataHelpers(TestCase): metadata = self.home_view._get_repo_data() self.assertIsInstance(metadata[0], RepositoryMetadata) - self.assertEquals(len(metadata), len(self.home_view.repos)) + self.assertEqual(len(metadata), len(self.home_view.repos)) def test_returns_cached_metadata(self): """Test if the _get_repo_data helper returns cached data when available.""" @@ -82,7 +82,7 @@ class TestRepositoryMetadataHelpers(TestCase): repo = self.home_view.repos[0] self.assertIsInstance(api_data, dict) - self.assertEquals(len(api_data), len(self.home_view.repos)) + self.assertEqual(len(api_data), len(self.home_view.repos)) self.assertIn(repo, api_data.keys()) self.assertIn("stargazers_count", api_data[repo]) @@ -126,7 +126,7 @@ class TestRepositoryMetadataHelpers(TestCase): with self.assertLogs(): metadata = self.home_view._get_repo_data() - self.assertEquals(len(metadata), 0) + self.assertEqual(len(metadata), 0) def test_cleans_up_stale_metadata(self): """Tests that we clean up stale metadata when we start the HomeView.""" -- cgit v1.2.3 From a97e609aba84b2aa30184f1b4b2b422418d8ea6c Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 30 Sep 2022 18:43:46 +0400 Subject: Fix B026 Linting Errors Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_github_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index f642f689..2eaf48d9 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -28,7 +28,7 @@ class GeneralUtilityTests(unittest.TestCase): """ self.assertEqual("RS256", algorithm, "The GitHub App JWT must be signed using RS256.") return original_encode( - payload, "secret-encoding-key", algorithm="HS256", *args, **kwargs + payload, "secret-encoding-key", *args, algorithm="HS256", **kwargs ) original_encode = jwt.encode -- cgit v1.2.3 From fca789323f750ff74bb5f4de92f7a8b96eb51e1f Mon Sep 17 00:00:00 2001 From: wookie184 Date: Thu, 27 Oct 2022 16:21:30 +0100 Subject: Add tests for metricity activity endpoint --- pydis_site/apps/api/tests/test_users.py | 100 ++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index 5d10069d..60be8598 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -1,3 +1,4 @@ +import json import random from unittest.mock import Mock, patch @@ -502,6 +503,105 @@ class UserMetricityTests(AuthenticatedAPITestCase): "total_messages": total_messages }) + def test_metricity_activity_data(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + self.metricity.total_messages_in_past_n_days.return_value = [[0, 10]] + + # When + url = reverse("api:bot:user-metricity-activity-data") + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + data=json.dumps([0, 1]), + QUERY_STRING="days=10", + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 200) + self.metricity.total_messages_in_past_n_days.assert_called_once_with(["0", "1"], 10) + self.assertEqual(response.json(), [{"id": 0, "message_count": 10}]) + + def test_metricity_activity_data_invalid_days(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse("api:bot:user-metricity-activity-data") + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + data=json.dumps([0, 1]), + QUERY_STRING="days=fifty", + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), {"days": ["This query parameter must be an integer."]}) + + def test_metricity_activity_data_no_days(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse('api:bot:user-metricity-activity-data') + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + data=json.dumps([0, 1]), + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), {'days': ["This query parameter is required."]}) + + def test_metricity_activity_data_no_users(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse('api:bot:user-metricity-activity-data') + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + QUERY_STRING="days=10", + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), ['Expected a list of items but got type "dict".']) + + def test_metricity_activity_data_invalid_users(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse('api:bot:user-metricity-activity-data') + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + data=json.dumps([123, 'username']), + QUERY_STRING="days=10", + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), {'1': ['A valid integer is required.']}) + def mock_metricity_user(self, joined_at, total_messages, total_blocks, top_channel_activity): patcher = patch("pydis_site.apps.api.viewsets.bot.user.Metricity") self.metricity = patcher.start() -- cgit v1.2.3 From 798c499c3c7673612a6815c0ab77d95be066d7ce Mon Sep 17 00:00:00 2001 From: wookie184 Date: Wed, 2 Nov 2022 18:57:22 +0000 Subject: Change the endpoint to be a POST not a GET --- pydis_site/apps/api/models/bot/metricity.py | 2 +- pydis_site/apps/api/tests/test_users.py | 34 ++++++++--------------------- pydis_site/apps/api/viewsets/bot/user.py | 6 ++--- 3 files changed, 13 insertions(+), 29 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py index 73bc1f0c..f53dd33c 100644 --- a/pydis_site/apps/api/models/bot/metricity.py +++ b/pydis_site/apps/api/models/bot/metricity.py @@ -135,7 +135,7 @@ class Metricity: self, user_ids: list[str], days: int - ) -> list[tuple[int, int]]: + ) -> list[tuple[str, int]]: """ Query activity by a list of users in the past `days` days. diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index 60be8598..9c0fa6ba 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -1,4 +1,3 @@ -import json import random from unittest.mock import Mock, patch @@ -510,13 +509,10 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse("api:bot:user-metricity-activity-data") - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, - data=json.dumps([0, 1]), + data=[0, 1], QUERY_STRING="days=10", - content_type="application/json" ) # Then @@ -530,13 +526,10 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse("api:bot:user-metricity-activity-data") - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, - data=json.dumps([0, 1]), + data=[0, 1], QUERY_STRING="days=fifty", - content_type="application/json" ) # Then @@ -550,12 +543,9 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse('api:bot:user-metricity-activity-data') - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, - data=json.dumps([0, 1]), - content_type="application/json" + data=[0, 1], ) # Then @@ -569,12 +559,9 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse('api:bot:user-metricity-activity-data') - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, QUERY_STRING="days=10", - content_type="application/json" ) # Then @@ -588,13 +575,10 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse('api:bot:user-metricity-activity-data') - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, - data=json.dumps([123, 'username']), + data=[123, 'username'], QUERY_STRING="days=10", - content_type="application/json" ) # Then diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index f1aebee0..f803b3f6 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -139,7 +139,7 @@ class UserViewSet(ModelViewSet): - 200: returned on success - 404: if a user with the given `snowflake` could not be found - ### GET /bot/users/metricity_activity_data + ### POST /bot/users/metricity_activity_data Gets the number of messages sent on the server in a given period. Users with no messages in the specified period or who do not @@ -324,7 +324,7 @@ class UserViewSet(ModelViewSet): return Response(dict(detail="User not found in metricity"), status=status.HTTP_404_NOT_FOUND) - @action(detail=False) + @action(detail=False, methods=["POST"]) def metricity_activity_data(self, request: Request) -> Response: """Request handler for metricity_activity_data endpoint.""" if "days" in request.query_params: @@ -352,7 +352,7 @@ class UserViewSet(ModelViewSet): data = metricity.total_messages_in_past_n_days(user_ids, days) response_data = [ - {"id": d[0], "message_count": d[1]} + {"id": int(d[0]), "message_count": d[1]} for d in data ] return Response(response_data, status=status.HTTP_200_OK) -- cgit v1.2.3 From af1a11d05f716c7891ae3b661c470d2bbac44ee6 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Mon, 7 Nov 2022 18:28:24 +0100 Subject: add tests for new thread_id validation flow --- pydis_site/apps/api/tests/test_nominations.py | 32 +++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_nominations.py b/pydis_site/apps/api/tests/test_nominations.py index 62b2314c..b3742cdd 100644 --- a/pydis_site/apps/api/tests/test_nominations.py +++ b/pydis_site/apps/api/tests/test_nominations.py @@ -524,3 +524,35 @@ class NominationTests(AuthenticatedAPITestCase): self.assertEqual(response.json(), { 'actor': ["The actor doesn't exist or has not nominated the user."] }) + + def test_patch_nomination_set_thread_id_of_active_nomination(self): + url = reverse('api:bot:nomination-detail', args=(self.active_nomination.id,)) + data = {'thread_id': 9876543210} + response = self.client.patch(url, data=data) + self.assertEqual(response.status_code, 200) + + def test_patch_nomination_set_thread_id_and_reviewed_of_active_nomination(self): + url = reverse('api:bot:nomination-detail', args=(self.active_nomination.id,)) + data = {'thread_id': 9876543210, "reviewed": True} + response = self.client.patch(url, data=data) + self.assertEqual(response.status_code, 200) + + def test_modifying_thread_id_when_ending_nomination(self): + url = reverse('api:bot:nomination-detail', args=(self.active_nomination.id,)) + data = {'thread_id': 9876543210, 'active': False, 'end_reason': "What?"} + + response = self.client.patch(url, data=data) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.json(), { + 'thread_id': ['This field cannot be set when ending a nomination.'] + }) + + def test_patch_thread_id_for_inactive_nomination(self): + url = reverse('api:bot:nomination-detail', args=(self.inactive_nomination.id,)) + data = {'thread_id': 9876543210} + + response = self.client.patch(url, data=data) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.json(), { + 'thread_id': ['This field cannot be set if the nomination is inactive.'] + }) -- cgit v1.2.3 From c39ae63d407663f47bf2d824a259335234066801 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Wed, 9 Nov 2022 21:27:26 +0200 Subject: Rename delete_messages to the more generic remove_context --- pydis_site/apps/api/migrations/0085_new_filter_schema.py | 8 ++++---- .../apps/api/migrations/0086_unique_constraint_filters.py | 2 +- pydis_site/apps/api/migrations/0087_unique_filter_list.py | 8 ++++---- pydis_site/apps/api/migrations/0088_antispam_filter_list.py | 2 +- pydis_site/apps/api/models/bot/filters.py | 8 ++++---- pydis_site/apps/api/serializers.py | 4 ++-- pydis_site/apps/api/tests/test_filters.py | 8 ++++---- pydis_site/apps/api/viewsets/bot/filters.py | 12 ++++++------ 8 files changed, 26 insertions(+), 26 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0085_new_filter_schema.py b/pydis_site/apps/api/migrations/0085_new_filter_schema.py index d902be7f..96d03bf4 100644 --- a/pydis_site/apps/api/migrations/0085_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0085_new_filter_schema.py @@ -41,7 +41,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: guild_pings=(["Moderators"] if name != "FILE_FORMAT" else []), filter_dm=True, dm_pings=[], - delete_messages=(True if name != "FILTER_TOKEN" else False), + remove_context=(True if name != "FILTER_TOKEN" else False), bypass_roles=["Helpers"], enabled=True, dm_content=dm_content, @@ -68,7 +68,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: guild_pings=None, filter_dm=None, dm_pings=None, - delete_messages=None, + remove_context=None, bypass_roles=None, enabled=None, dm_content=None, @@ -109,7 +109,7 @@ class Migration(migrations.Migration): ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, null=True)), - ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), + ('remove_context', models.BooleanField(help_text='Whether this filter should remove the context (such as a message) triggering it.', null=True)), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, null=True)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True, blank=True)), @@ -136,7 +136,7 @@ class Migration(migrations.Migration): ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.')), ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None)), - ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), + ('remove_context', models.BooleanField(help_text='Whether this filter should remove the context (such as a message) triggering it.')), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, blank=True)), diff --git a/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py index 6fa99e9e..b83e395c 100644 --- a/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py @@ -25,7 +25,7 @@ class Migration(migrations.Migration): 'guild_pings', 'filter_dm', 'dm_pings', - 'delete_messages', + 'remove_context', 'bypass_roles', 'enabled', 'send_alert', diff --git a/pydis_site/apps/api/migrations/0087_unique_filter_list.py b/pydis_site/apps/api/migrations/0087_unique_filter_list.py index 96c2b17a..b8087d9c 100644 --- a/pydis_site/apps/api/migrations/0087_unique_filter_list.py +++ b/pydis_site/apps/api/migrations/0087_unique_filter_list.py @@ -17,7 +17,7 @@ def create_unique_list(apps: Apps, _): guild_pings=[], filter_dm=True, dm_pings=[], - delete_messages=False, + remove_context=False, bypass_roles=[], enabled=True, dm_content="", @@ -37,7 +37,7 @@ def create_unique_list(apps: Apps, _): content="everyone", filter_list=list_, description="", - delete_messages=True, + remove_context=True, bypass_roles=["Helpers"], dm_content=( "Please don't try to ping `@everyone` or `@here`. Your message has been removed. " @@ -51,7 +51,7 @@ def create_unique_list(apps: Apps, _): content="webhook", filter_list=list_, description="", - delete_messages=True, + remove_context=True, dm_content=( "Looks like you posted a Discord webhook URL. " "Therefore, your message has been removed, and your webhook has been deleted. " @@ -74,7 +74,7 @@ def create_unique_list(apps: Apps, _): content="discord_token", filter_list=list_, filter_dm=False, - delete_messages=True, + remove_context=True, dm_content=( "I noticed you posted a seemingly valid Discord API " "token in your message and have removed your message. " diff --git a/pydis_site/apps/api/migrations/0088_antispam_filter_list.py b/pydis_site/apps/api/migrations/0088_antispam_filter_list.py index 354e4520..fcb56781 100644 --- a/pydis_site/apps/api/migrations/0088_antispam_filter_list.py +++ b/pydis_site/apps/api/migrations/0088_antispam_filter_list.py @@ -17,7 +17,7 @@ def create_antispam_list(apps: Apps, _): guild_pings=["Moderators"], filter_dm=False, dm_pings=[], - delete_messages=True, + remove_context=True, bypass_roles=["Helpers"], enabled=True, dm_content="", diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 1ea21a48..4d8a4025 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -75,8 +75,8 @@ class FilterList(ModelTimestampMixin, ModelReprMixin, models.Model): help_text="Who to ping when this filter triggers on a DM.", null=False ) - delete_messages = models.BooleanField( - help_text="Whether this filter should delete messages triggering it.", + remove_context = models.BooleanField( + help_text="Whether this filter should remove the context (such as a message) triggering it.", null=False ) bypass_roles = ArrayField( @@ -185,8 +185,8 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): help_text="Who to ping when this filter triggers on a DM.", null=True ) - delete_messages = models.BooleanField( - help_text="Whether this filter should delete messages triggering it.", + remove_context = models.BooleanField( + help_text="Whether this filter should remove the context (such as a message) triggering it.", null=True ) bypass_roles = ArrayField( diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index d6bae2cb..eabca66e 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -154,7 +154,7 @@ SETTINGS_FIELDS = ( 'guild_pings', 'filter_dm', 'dm_pings', - 'delete_messages', + 'remove_context', 'send_alert', 'bypass_roles', 'enabled', @@ -187,7 +187,7 @@ BASE_SETTINGS_FIELDS = ( "bypass_roles", "filter_dm", "enabled", - "delete_messages", + "remove_context", "send_alert" ) INFRACTION_AND_NOTIFICATION_FIELDS = ( diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 5f40c6f9..f3afdaeb 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -49,7 +49,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: ping_type=[], filter_dm=False, dm_ping_type=[], - delete_messages=False, + remove_context=False, bypass_roles=[], enabled=False, default_action=FilterAction( @@ -76,7 +76,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: "ping_type": ["onduty"], "filter_dm": True, "dm_ping_type": ["123456"], - "delete_messages": True, + "remove_context": True, "bypass_roles": [123456], "enabled": True, "default_action": FilterAction( @@ -130,7 +130,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: ping_type=[], filter_dm=False, dm_ping_type=[], - delete_messages=False, + remove_context=False, bypass_roles=[], enabled=False, default_action=FilterAction( @@ -157,7 +157,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: "ping_type": ["everyone"], "filter_dm": False, "dm_ping_type": ["here"], - "delete_messages": False, + "remove_context": False, "bypass_roles": [9876], "enabled": True, "filter_action": None, diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index dd9a7d87..1eb05053 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -36,7 +36,7 @@ class FilterListViewSet(ModelViewSet): ... "filter_dm": None, ... "enabled": None ... "send_alert": True, - ... "delete_messages": None + ... "remove_context": None ... "infraction_and_notification": { ... "infraction_type": None, ... "infraction_reason": "", @@ -64,7 +64,7 @@ class FilterListViewSet(ModelViewSet): ... ], ... "filter_dm": True, ... "enabled": True - ... "delete_messages": True, + ... "remove_context": True, ... "send_alert": True ... "infraction_and_notification": { ... "infraction_type": "", @@ -111,7 +111,7 @@ class FilterListViewSet(ModelViewSet): ... "bypass_roles": None ... "filter_dm": None, ... "enabled": None - ... "delete_messages": None, + ... "remove_context": None, ... "send_alert": None ... "infraction_and_notification": { ... "infraction_type": None, @@ -140,7 +140,7 @@ class FilterListViewSet(ModelViewSet): ... ], ... "filter_dm": True, ... "enabled": True - ... "delete_messages": True + ... "remove_context": True ... "send_alert": True ... "infraction_and_notification": { ... "infraction_type": "", @@ -198,7 +198,7 @@ class FilterViewSet(ModelViewSet): ... "bypass_roles": None ... "filter_dm": None, ... "enabled": None - ... "delete_messages": True, + ... "remove_context": True, ... "send_alert": True ... "infraction": { ... "infraction_type": None, @@ -237,7 +237,7 @@ class FilterViewSet(ModelViewSet): ... "bypass_roles": None ... "filter_dm": None, ... "enabled": None - ... "delete_messages": True, + ... "remove_context": True, ... "send_alert": True ... "infraction": { ... "infraction_type": None, -- cgit v1.2.3 From 09b69ba789be11fda24493fce671b5bc37912382 Mon Sep 17 00:00:00 2001 From: wookie184 Date: Thu, 17 Nov 2022 19:58:36 +0000 Subject: Include users with no messages in response, and simplify response format --- pydis_site/apps/api/tests/test_users.py | 4 ++-- pydis_site/apps/api/viewsets/bot/user.py | 19 ++++++++----------- 2 files changed, 10 insertions(+), 13 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index 9c0fa6ba..d86e80bb 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -505,7 +505,7 @@ class UserMetricityTests(AuthenticatedAPITestCase): def test_metricity_activity_data(self): # Given self.mock_no_metricity_user() # Other functions shouldn't be used. - self.metricity.total_messages_in_past_n_days.return_value = [[0, 10]] + self.metricity.total_messages_in_past_n_days.return_value = [(0, 10)] # When url = reverse("api:bot:user-metricity-activity-data") @@ -518,7 +518,7 @@ class UserMetricityTests(AuthenticatedAPITestCase): # Then self.assertEqual(response.status_code, 200) self.metricity.total_messages_in_past_n_days.assert_called_once_with(["0", "1"], 10) - self.assertEqual(response.json(), [{"id": 0, "message_count": 10}]) + self.assertEqual(response.json(), {"0": 10, "1": 0}) def test_metricity_activity_data_invalid_days(self): # Given diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index f803b3f6..db73a83c 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -140,10 +140,8 @@ class UserViewSet(ModelViewSet): - 404: if a user with the given `snowflake` could not be found ### POST /bot/users/metricity_activity_data - Gets the number of messages sent on the server in a given period. - - Users with no messages in the specified period or who do not - exist are not included in the result. + Returns a mapping of user ID to message count in a given period for + the given user IDs. #### Required Query Parameters - days: how many days into the past to count message from. @@ -155,9 +153,10 @@ class UserViewSet(ModelViewSet): ... ] #### Response format - >>> [ - ... {"id": 409107086526644234, "message_count": 54} - ... ] + >>> { + ... "409107086526644234": 54, + ... "493839819168808962": 0 + ... } #### Status codes - 200: returned on success @@ -351,8 +350,6 @@ class UserViewSet(ModelViewSet): with Metricity() as metricity: data = metricity.total_messages_in_past_n_days(user_ids, days) - response_data = [ - {"id": int(d[0]), "message_count": d[1]} - for d in data - ] + default_data = {user_id: 0 for user_id in user_ids} + response_data = default_data | dict(data) return Response(response_data, status=status.HTTP_200_OK) -- cgit v1.2.3 From 3862e051407061186609dbeaab23ec53aeca2f94 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 27 Jan 2023 22:20:10 +0200 Subject: Update tests --- pydis_site/apps/api/models/__init__.py | 1 - pydis_site/apps/api/tests/test_filters.py | 309 ++++++++++++++++-------------- pydis_site/apps/api/tests/test_models.py | 3 - 3 files changed, 170 insertions(+), 143 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/models/__init__.py b/pydis_site/apps/api/models/__init__.py index 580c95a0..fee4c8d5 100644 --- a/pydis_site/apps/api/models/__init__.py +++ b/pydis_site/apps/api/models/__init__.py @@ -6,7 +6,6 @@ from .bot import ( BumpedThread, DocumentationLink, DeletedMessage, - FilterList, Infraction, Message, MessageDeletionContext, diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index f3afdaeb..cae78cd6 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -1,19 +1,13 @@ import contextlib from dataclasses import dataclass +from datetime import timedelta from typing import Any, Dict, Tuple, Type from django.db.models import Model -from django_hosts import reverse +from django.urls import reverse -from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order - FilterList, - FilterSettings, - FilterAction, - ChannelRange, - Filter, - FilterOverride -) -from pydis_site.apps.api.tests.base import APISubdomainTestCase +from pydis_site.apps.api.models.bot.filters import FilterList, Filter +from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase @dataclass() @@ -21,99 +15,76 @@ class TestSequence: model: Type[Model] route: str object: Dict[str, Any] - ignored_fields: Tuple[str] = () + ignored_fields: Tuple[str, ...] = () def url(self, detail: bool = False) -> str: - return reverse(f'bot:{self.route}-{"detail" if detail else "list"}', host='api') + return reverse(f'api:bot:{self.route}-{"detail" if detail else "list"}') -FK_FIELDS: Dict[Type[Model], Tuple[str]] = { - FilterList: ("default_settings",), - FilterSettings: ("default_action", "default_range"), - FilterAction: (), - ChannelRange: (), +FK_FIELDS: Dict[Type[Model], Tuple[str, ...]] = { + FilterList: (), Filter: ("filter_list",), - FilterOverride: ("filter_action", "filter_range") } def get_test_sequences() -> Dict[str, TestSequence]: + filter_list1_deny_dict = { + "name": "testname", + "list_type": 0, + "guild_pings": [], + "filter_dm": True, + "dm_pings": [], + "remove_context": False, + "bypass_roles": [], + "enabled": True, + "dm_content": "", + "dm_embed": "", + "infraction_type": "NONE", + "infraction_reason": "", + "infraction_duration": timedelta(seconds=0), + "infraction_channel": 0, + "disabled_channels": [], + "disabled_categories": [], + "enabled_channels": [], + "enabled_categories": [], + "send_alert": True + } + filter_list1_allow_dict = filter_list1_deny_dict.copy() + filter_list1_allow_dict["list_type"] = 1 + filter_list1_allow = FilterList(**filter_list1_allow_dict) + return { - "filter_list": TestSequence( + "filter_list1": TestSequence( FilterList, "filterlist", - { - "name": "testname", - "list_type": 0, - "default_settings": FilterSettings( - ping_type=[], - filter_dm=False, - dm_ping_type=[], - remove_context=False, - bypass_roles=[], - enabled=False, - default_action=FilterAction( - dm_content=None, - infraction_type=None, - infraction_reason="", - infraction_duration=None - ), - default_range=ChannelRange( - disallowed_channels=[], - disallowed_categories=[], - allowed_channels=[], - allowed_categories=[], - default=False - ) - ) - }, - ignored_fields=("filters",) + filter_list1_deny_dict, + ignored_fields=("filters", "created_at", "updated_at") ), - "filter_settings": TestSequence( - FilterSettings, - "filtersettings", + "filter_list2": TestSequence( + FilterList, + "filterlist", { - "ping_type": ["onduty"], - "filter_dm": True, - "dm_ping_type": ["123456"], + "name": "testname2", + "list_type": 1, + "guild_pings": ["Moderators"], + "filter_dm": False, + "dm_pings": ["here"], "remove_context": True, - "bypass_roles": [123456], - "enabled": True, - "default_action": FilterAction( - dm_content=None, - infraction_type=None, - infraction_reason="", - infraction_duration=None - ), - "default_range": ChannelRange( - disallowed_channels=[], - disallowed_categories=[], - allowed_channels=[], - allowed_categories=[], - default=False - ) - } - ), - "filter_action": TestSequence( - FilterAction, - "filteraction", - { - "dm_content": "This is a DM message.", - "infraction_type": "Mute", - "infraction_reason": "Too long beard", - "infraction_duration": "1 02:03:00" - } - ), - "channel_range": TestSequence( - ChannelRange, - "channelrange", - { - "disallowed_channels": [1234], - "disallowed_categories": [5678], - "allowed_channels": [9101], - "allowed_categories": [1121], - "default": True - } + "bypass_roles": ["123456"], + "enabled": False, + "dm_content": "testing testing", + "dm_embed": "one two three", + "infraction_type": "MUTE", + "infraction_reason": "stop testing", + "infraction_duration": timedelta(seconds=10.5), + "infraction_channel": 123, + "disabled_channels": ["python-general"], + "disabled_categories": ["CODE JAM"], + "enabled_channels": ["mighty-mice"], + "enabled_categories": ["Lobby"], + "send_alert": False + }, + ignored_fields=("filters", "created_at", "updated_at") ), "filter": TestSequence( Filter, @@ -121,58 +92,35 @@ def get_test_sequences() -> Dict[str, TestSequence]: { "content": "bad word", "description": "This is a really bad word.", - "additional_field": None, - "override": None, - "filter_list": FilterList( - name="testname", - list_type=0, - default_settings=FilterSettings( - ping_type=[], - filter_dm=False, - dm_ping_type=[], - remove_context=False, - bypass_roles=[], - enabled=False, - default_action=FilterAction( - dm_content=None, - infraction_type=None, - infraction_reason="", - infraction_duration=None - ), - default_range=ChannelRange( - disallowed_channels=[], - disallowed_categories=[], - allowed_channels=[], - allowed_categories=[], - default=False - ) - ) - ) - } + "additional_field": "{'hi': 'there'}", + "guild_pings": None, + "filter_dm": None, + "dm_pings": None, + "remove_context": None, + "bypass_roles": None, + "enabled": None, + "dm_content": None, + "dm_embed": None, + "infraction_type": None, + "infraction_reason": None, + "infraction_duration": None, + "infraction_channel": None, + "disabled_channels": None, + "disabled_categories": None, + "enabled_channels": None, + "enabled_categories": None, + "send_alert": None, + "filter_list": filter_list1_allow + }, + ignored_fields=("created_at", "updated_at") ), - "filter_override": TestSequence( - FilterOverride, - "filteroverride", - { - "ping_type": ["everyone"], - "filter_dm": False, - "dm_ping_type": ["here"], - "remove_context": False, - "bypass_roles": [9876], - "enabled": True, - "filter_action": None, - "filter_range": None - } - ) } def save_nested_objects(object_: Model, save_root: bool = True) -> None: - for field in FK_FIELDS[object_.__class__]: + for field in FK_FIELDS.get(object_.__class__, ()): value = getattr(object_, field) - - if value is not None: - save_nested_objects(value) + save_nested_objects(value) if save_root: object_.save() @@ -182,6 +130,8 @@ def clean_test_json(json: dict) -> dict: for key, value in json.items(): if isinstance(value, Model): json[key] = value.id + elif isinstance(value, timedelta): + json[key] = str(value.total_seconds()) return json @@ -194,7 +144,22 @@ def clean_api_json(json: dict, sequence: TestSequence) -> dict: return json -class GenericFilterTest(APISubdomainTestCase): +def flatten_settings(json: dict) -> dict: + settings = json.pop("settings", {}) + flattened_settings = {} + for entry, value in settings.items(): + if isinstance(value, dict): + flattened_settings.update(value) + else: + flattened_settings[entry] = value + + json.update(flattened_settings) + + return json + + +class GenericFilterTests(AuthenticatedAPITestCase): + def test_cannot_read_unauthenticated(self) -> None: for name, sequence in get_test_sequences().items(): with self.subTest(name=name): @@ -222,7 +187,7 @@ class GenericFilterTest(APISubdomainTestCase): response = self.client.get(sequence.url()) self.assertDictEqual( clean_test_json(sequence.object), - clean_api_json(response.json()[0], sequence) + clean_api_json(flatten_settings(response.json()[0]), sequence) ) def test_fetch_by_id(self) -> None: @@ -236,7 +201,7 @@ class GenericFilterTest(APISubdomainTestCase): response = self.client.get(f"{sequence.url()}/{saved.id}") self.assertDictEqual( clean_test_json(sequence.object), - clean_api_json(response.json(), sequence) + clean_api_json(flatten_settings(response.json()), sequence) ) def test_fetch_non_existing(self) -> None: @@ -259,14 +224,15 @@ class GenericFilterTest(APISubdomainTestCase): self.assertEqual(response.status_code, 201) self.assertDictEqual( - clean_api_json(response.json(), sequence), + clean_api_json(flatten_settings(response.json()), sequence), clean_test_json(sequence.object) ) def test_creation_missing_field(self) -> None: for name, sequence in get_test_sequences().items(): with self.subTest(name=name): - save_nested_objects(sequence.model(**sequence.object), False) + saved = sequence.model(**sequence.object) + save_nested_objects(saved) data = clean_test_json(sequence.object.copy()) for field in sequence.model._meta.get_fields(): @@ -296,3 +262,68 @@ class GenericFilterTest(APISubdomainTestCase): response = self.client.delete(f"{sequence.url()}/42") self.assertEqual(response.status_code, 404) + + +class FilterValidationTests(AuthenticatedAPITestCase): + + def test_filter_validation(self) -> None: + test_sequences = get_test_sequences() + base_filter = test_sequences["filter"] + base_filter_list = test_sequences["filter_list1"] + cases = ( + ({"infraction_reason": "hi"}, {}, 400), ({"infraction_duration": timedelta(seconds=10)}, {}, 400), + ({"infraction_reason": "hi"}, {"infraction_type": "NOTE"}, 200), + ({"infraction_duration": timedelta(seconds=10)}, {"infraction_type": "MUTE"}, 200), + ({"enabled_channels": ["admins"]}, {}, 200), ({"disabled_channels": ["123"]}, {}, 200), + ({"enabled_categories": ["CODE JAM"]}, {}, 200), ({"disabled_categories": ["CODE JAM"]}, {}, 200), + ({"enabled_channels": ["admins"], "disabled_channels": ["123", "admins"]}, {}, 400), + ({"enabled_categories": ["admins"], "disabled_categories": ["123", "admins"]}, {}, 400), + ({"enabled_channels": ["admins"]}, {"disabled_channels": ["123", "admins"]}, 400), + ({"enabled_categories": ["admins"]}, {"disabled_categories": ["123", "admins"]}, 400), + ) + + for filter_settings, filter_list_settings, response_code in cases: + with self.subTest(f_settings=filter_settings, fl_settings=filter_list_settings, response=response_code): + base_filter.model.objects.all().delete() + base_filter_list.model.objects.all().delete() + + case_filter_dict = base_filter.object.copy() + case_fl_dict = base_filter_list.object.copy() + case_fl_dict.update(filter_list_settings) + + case_fl = base_filter_list.model(**case_fl_dict) + case_filter_dict["filter_list"] = case_fl + case_filter = base_filter.model(**case_filter_dict) + save_nested_objects(case_filter) + + filter_settings["filter_list"] = case_fl + response = self.client.patch( + f"{base_filter.url()}/{case_filter.id}", data=clean_test_json(filter_settings) + ) + self.assertEqual(response.status_code, response_code) + + def test_filter_list_validation(self) -> None: + test_sequences = get_test_sequences() + base_filter_list = test_sequences["filter_list1"] + cases = ( + ({"infraction_reason": "hi"}, 400), ({"infraction_duration": timedelta(seconds=10)}, 400), + ({"infraction_reason": "hi", "infraction_type": "NOTE"}, 200), + ({"infraction_duration": timedelta(seconds=10), "infraction_type": "MUTE"}, 200), + ({"enabled_channels": ["admins"]}, 200), ({"disabled_channels": ["123"]}, 200), + ({"enabled_categories": ["CODE JAM"]}, 200), ({"disabled_categories": ["CODE JAM"]}, 200), + ({"enabled_channels": ["admins"], "disabled_channels": ["123", "admins"]}, 400), + ({"enabled_categories": ["admins"], "disabled_categories": ["123", "admins"]}, 400), + ) + + for filter_list_settings, response_code in cases: + with self.subTest(fl_settings=filter_list_settings, response=response_code): + base_filter_list.model.objects.all().delete() + + case_fl_dict = base_filter_list.object.copy() + case_fl = base_filter_list.model(**case_fl_dict) + save_nested_objects(case_fl) + + response = self.client.patch( + f"{base_filter_list.url()}/{case_fl.id}", data=clean_test_json(filter_list_settings) + ) + self.assertEqual(response.status_code, response_code) diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index b9b14a84..25d771cc 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -8,7 +8,6 @@ from pydis_site.apps.api.models import ( DocumentationLink, Filter, FilterList, - FilterSettings, Infraction, MessageDeletionContext, Nomination, @@ -110,13 +109,11 @@ class StringDunderMethodTests(SimpleTestCase): FilterList( name="forbidden_duckies", list_type=0, - default_settings=FilterSettings() ), Filter( content="ducky_nsfw", description="This ducky is totally inappropriate!", additional_field=None, - override=None ), OffensiveMessage( id=602951077675139072, -- cgit v1.2.3 From 5f538e9e876adc7f4a459fe230b46bdde61b3f64 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 27 Jan 2023 23:37:26 +0200 Subject: Make filter unique constraint use NULLS NOT DISTINCT The existing constraint was ineffective as null values were considered distinct, and so two filters with the same content and no overrides were considered different. This change uses a new PSQL 15 feature unsupported in django currently, and so it is added with raw SQL. --- .../migrations/0087_unique_constraint_filters.py | 39 ++++++++-------------- pydis_site/apps/api/serializers.py | 9 +++++ pydis_site/apps/api/tests/test_filters.py | 12 +++++++ 3 files changed, 34 insertions(+), 26 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0087_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0087_unique_constraint_filters.py index b2ff91f1..910e7b1b 100644 --- a/pydis_site/apps/api/migrations/0087_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0087_unique_constraint_filters.py @@ -1,5 +1,3 @@ -# Generated by Django 3.1.14 on 2022-03-22 16:31 - from django.db import migrations, models @@ -10,29 +8,18 @@ class Migration(migrations.Migration): ] operations = [ - migrations.AddConstraint( - model_name='filter', - constraint=models.UniqueConstraint(fields=( - 'content', - 'additional_field', - 'filter_list', - 'dm_content', - 'dm_embed', - 'infraction_type', - 'infraction_reason', - 'infraction_duration', - 'infraction_channel', - 'guild_pings', - 'filter_dm', - 'dm_pings', - 'remove_context', - 'bypass_roles', - 'enabled', - 'send_alert', - 'enabled_channels', - 'disabled_channels', - 'enabled_categories', - 'disabled_categories' - ), name='unique_filters'), + migrations.RunSQL( + "ALTER TABLE api_filter " + "ADD CONSTRAINT unique_filters UNIQUE NULLS NOT DISTINCT " + "(content, additional_field, filter_list_id, dm_content, dm_embed, infraction_type, infraction_reason, infraction_duration, infraction_channel, guild_pings, filter_dm, dm_pings, remove_context, bypass_roles, enabled, send_alert, enabled_channels, disabled_channels, enabled_categories, disabled_categories)", + state_operations=[ + migrations.AddConstraint( + model_name='filter', + constraint=models.UniqueConstraint( + fields=('content', 'additional_field', 'filter_list', 'dm_content', 'dm_embed', 'infraction_type', 'infraction_reason', 'infraction_duration', 'infraction_channel', 'guild_pings', 'filter_dm', 'dm_pings', 'remove_context', 'bypass_roles', 'enabled', 'send_alert', 'enabled_channels', 'disabled_channels', 'enabled_categories', 'disabled_categories'), + name='unique_filters' + ), + ), + ], ), ] diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 7f9461ec..8da47802 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -257,6 +257,15 @@ class FilterSerializer(ModelSerializer): ) + SETTINGS_FIELDS extra_kwargs = _create_filter_meta_extra_kwargs() + def create(self, validated_data: dict) -> User: + """Override the create method to catch violations of the custom uniqueness constraint.""" + try: + return super().create(validated_data) + except IntegrityError: + raise ValidationError( + "Check if a filter with this combination of content and settings already exists in this filter list." + ) + def to_representation(self, instance: Filter) -> dict: """ Provides a custom JSON representation to the Filter Serializers. diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index cae78cd6..73c8e0d9 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -327,3 +327,15 @@ class FilterValidationTests(AuthenticatedAPITestCase): f"{base_filter_list.url()}/{case_fl.id}", data=clean_test_json(filter_list_settings) ) self.assertEqual(response.status_code, response_code) + + def test_filter_unique_constraint(self) -> None: + test_filter = get_test_sequences()["filter"] + test_filter.model.objects.all().delete() + test_filter_object = test_filter.model(**test_filter.object) + save_nested_objects(test_filter_object, False) + + response = self.client.post(test_filter.url(), data=clean_test_json(test_filter.object)) + self.assertEqual(response.status_code, 201) + + response = self.client.post(test_filter.url(), data=clean_test_json(test_filter.object)) + self.assertEqual(response.status_code, 400) -- cgit v1.2.3 From d52a8c955aceccd719dd1511700aac9f2a564b0a Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 28 Jan 2023 16:24:58 +0200 Subject: Update viewsets, fix linting --- .../apps/api/migrations/0088_unique_filter_list.py | 2 +- pydis_site/apps/api/models/bot/filters.py | 12 +- pydis_site/apps/api/serializers.py | 50 +- pydis_site/apps/api/tests/test_filters.py | 24 +- pydis_site/apps/api/viewsets/bot/filters.py | 532 ++++++++++++++------- 5 files changed, 428 insertions(+), 192 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0088_unique_filter_list.py b/pydis_site/apps/api/migrations/0088_unique_filter_list.py index 3f3a34bb..98d14e2b 100644 --- a/pydis_site/apps/api/migrations/0088_unique_filter_list.py +++ b/pydis_site/apps/api/migrations/0088_unique_filter_list.py @@ -99,4 +99,4 @@ class Migration(migrations.Migration): code=create_unique_list, reverse_code=None ), - ] \ No newline at end of file + ] diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 4d8a4025..1eab79ba 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -4,8 +4,8 @@ from django.db import models from django.db.models import UniqueConstraint # Must be imported that way to avoid circular imports +from pydis_site.apps.api.models.mixins import ModelReprMixin, ModelTimestampMixin from .infraction import Infraction -from pydis_site.apps.api.models.mixins import ModelTimestampMixin, ModelReprMixin class FilterListType(models.IntegerChoices): @@ -76,7 +76,10 @@ class FilterList(ModelTimestampMixin, ModelReprMixin, models.Model): null=False ) remove_context = models.BooleanField( - help_text="Whether this filter should remove the context (such as a message) triggering it.", + help_text=( + "Whether this filter should remove the context (such as a message) " + "triggering it." + ), null=False ) bypass_roles = ArrayField( @@ -186,7 +189,10 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): null=True ) remove_context = models.BooleanField( - help_text="Whether this filter should remove the context (such as a message) triggering it.", + help_text=( + "Whether this filter should remove the context (such as a message) " + "triggering it." + ), null=True ) bypass_roles = ArrayField( diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 8da47802..a6328eff 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -21,9 +21,9 @@ from .models import ( BumpedThread, DeletedMessage, DocumentationLink, - Infraction, - FilterList, Filter, + FilterList, + Infraction, MessageDeletionContext, Nomination, NominationEntry, @@ -183,7 +183,9 @@ ALLOW_EMPTY_SETTINGS = ( ) # Required fields for custom JSON representation purposes -BASE_FILTER_FIELDS = ('id', 'created_at', 'updated_at', 'content', 'description', 'additional_field') +BASE_FILTER_FIELDS = ( + 'id', 'created_at', 'updated_at', 'content', 'description', 'additional_field' +) BASE_FILTERLIST_FIELDS = ('id', 'created_at', 'updated_at', 'name', 'list_type') BASE_SETTINGS_FIELDS = ( "bypass_roles", @@ -235,16 +237,31 @@ class FilterSerializer(ModelSerializer): def validate(self, data: dict) -> dict: """Perform infraction data + allowed and disallowed lists validation.""" if ( - (get_field_value(data, "infraction_reason") or get_field_value(data, "infraction_duration")) + ( + get_field_value(data, "infraction_reason") + or get_field_value(data, "infraction_duration") + ) and get_field_value(data, "infraction_type") == "NONE" ): - raise ValidationError("Infraction type is required with infraction duration or reason.") + raise ValidationError( + "Infraction type is required with infraction duration or reason." + ) - if set(get_field_value(data, "disabled_channels")) & set(get_field_value(data, "enabled_channels")): - raise ValidationError("You can't have the same value in both enabled and disabled channels lists.") + if ( + set(get_field_value(data, "disabled_channels")) + & set(get_field_value(data, "enabled_channels")) + ): + raise ValidationError( + "You can't have the same value in both enabled and disabled channels lists." + ) - if set(get_field_value(data, "disabled_categories")) & set(get_field_value(data, "enabled_categories")): - raise ValidationError("You can't have the same value in both enabled and disabled categories lists.") + if ( + set(get_field_value(data, "disabled_categories")) + & set(get_field_value(data, "enabled_categories")) + ): + raise ValidationError( + "You can't have the same value in both enabled and disabled categories lists." + ) return data @@ -253,7 +270,13 @@ class FilterSerializer(ModelSerializer): model = Filter fields = ( - 'id', 'created_at', 'updated_at', 'content', 'description', 'additional_field', 'filter_list' + 'id', + 'created_at', + 'updated_at', + 'content', + 'description', + 'additional_field', + 'filter_list' ) + SETTINGS_FIELDS extra_kwargs = _create_filter_meta_extra_kwargs() @@ -263,7 +286,8 @@ class FilterSerializer(ModelSerializer): return super().create(validated_data) except IntegrityError: raise ValidationError( - "Check if a filter with this combination of content and settings already exists in this filter list." + "Check if a filter with this combination of content " + "and settings already exists in this filter list." ) def to_representation(self, instance: Filter) -> dict: @@ -340,7 +364,9 @@ class FilterListSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterList - fields = ('id', 'created_at', 'updated_at', 'name', 'list_type', 'filters') + SETTINGS_FIELDS + fields = ( + 'id', 'created_at', 'updated_at', 'name', 'list_type', 'filters' + ) + SETTINGS_FIELDS extra_kwargs = _create_filter_list_meta_extra_kwargs() # Ensure there can only be one filter list with the same name and type. diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 73c8e0d9..62de23c4 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -6,7 +6,7 @@ from typing import Any, Dict, Tuple, Type from django.db.models import Model from django.urls import reverse -from pydis_site.apps.api.models.bot.filters import FilterList, Filter +from pydis_site.apps.api.models.bot.filters import Filter, FilterList from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase @@ -271,11 +271,14 @@ class FilterValidationTests(AuthenticatedAPITestCase): base_filter = test_sequences["filter"] base_filter_list = test_sequences["filter_list1"] cases = ( - ({"infraction_reason": "hi"}, {}, 400), ({"infraction_duration": timedelta(seconds=10)}, {}, 400), + ({"infraction_reason": "hi"}, {}, 400), + ({"infraction_duration": timedelta(seconds=10)}, {}, 400), ({"infraction_reason": "hi"}, {"infraction_type": "NOTE"}, 200), ({"infraction_duration": timedelta(seconds=10)}, {"infraction_type": "MUTE"}, 200), - ({"enabled_channels": ["admins"]}, {}, 200), ({"disabled_channels": ["123"]}, {}, 200), - ({"enabled_categories": ["CODE JAM"]}, {}, 200), ({"disabled_categories": ["CODE JAM"]}, {}, 200), + ({"enabled_channels": ["admins"]}, {}, 200), + ({"disabled_channels": ["123"]}, {}, 200), + ({"enabled_categories": ["CODE JAM"]}, {}, 200), + ({"disabled_categories": ["CODE JAM"]}, {}, 200), ({"enabled_channels": ["admins"], "disabled_channels": ["123", "admins"]}, {}, 400), ({"enabled_categories": ["admins"], "disabled_categories": ["123", "admins"]}, {}, 400), ({"enabled_channels": ["admins"]}, {"disabled_channels": ["123", "admins"]}, 400), @@ -283,7 +286,9 @@ class FilterValidationTests(AuthenticatedAPITestCase): ) for filter_settings, filter_list_settings, response_code in cases: - with self.subTest(f_settings=filter_settings, fl_settings=filter_list_settings, response=response_code): + with self.subTest( + f_settings=filter_settings, fl_settings=filter_list_settings, response=response_code + ): base_filter.model.objects.all().delete() base_filter_list.model.objects.all().delete() @@ -306,11 +311,13 @@ class FilterValidationTests(AuthenticatedAPITestCase): test_sequences = get_test_sequences() base_filter_list = test_sequences["filter_list1"] cases = ( - ({"infraction_reason": "hi"}, 400), ({"infraction_duration": timedelta(seconds=10)}, 400), + ({"infraction_reason": "hi"}, 400), + ({"infraction_duration": timedelta(seconds=10)}, 400), ({"infraction_reason": "hi", "infraction_type": "NOTE"}, 200), ({"infraction_duration": timedelta(seconds=10), "infraction_type": "MUTE"}, 200), ({"enabled_channels": ["admins"]}, 200), ({"disabled_channels": ["123"]}, 200), - ({"enabled_categories": ["CODE JAM"]}, 200), ({"disabled_categories": ["CODE JAM"]}, 200), + ({"enabled_categories": ["CODE JAM"]}, 200), + ({"disabled_categories": ["CODE JAM"]}, 200), ({"enabled_channels": ["admins"], "disabled_channels": ["123", "admins"]}, 400), ({"enabled_categories": ["admins"], "disabled_categories": ["123", "admins"]}, 400), ) @@ -324,7 +331,8 @@ class FilterValidationTests(AuthenticatedAPITestCase): save_nested_objects(case_fl) response = self.client.patch( - f"{base_filter_list.url()}/{case_fl.id}", data=clean_test_json(filter_list_settings) + f"{base_filter_list.url()}/{case_fl.id}", + data=clean_test_json(filter_list_settings) ) self.assertEqual(response.status_code, response_code) diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 1eb05053..8e677612 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -22,69 +22,79 @@ class FilterListViewSet(ModelViewSet): >>> [ ... { ... "id": 1, - ... "name": "invites", + ... "created_at": "2023-01-27T21:26:34.027293Z", + ... "updated_at": "2023-01-27T21:26:34.027308Z", + ... "name": "invite", ... "list_type": 1, ... "filters": [ ... { ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, - ... "filter_list": 1 + ... "filter_list": 1, ... "settings": { - ... "bypass_roles": None - ... "filter_dm": None, - ... "enabled": None - ... "send_alert": True, - ... "remove_context": None - ... "infraction_and_notification": { - ... "infraction_type": None, - ... "infraction_reason": "", - ... "infraction_duration": None - ... "dm_content": None, - ... "dm_embed": None - ... }, - ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None - ... } - ... "mentions": { - ... "ping_type": None - ... "dm_ping_type": None - ... } - ... } - ... + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } ... }, ... ... ... ], - ... "settings": { - ... "bypass_roles": [ - ... "staff" - ... ], - ... "filter_dm": True, - ... "enabled": True - ... "remove_context": True, - ... "send_alert": True - ... "infraction_and_notification": { - ... "infraction_type": "", - ... "infraction_reason": "", - ... "infraction_duration": "0.0", - ... "dm_content": "", - ... "dm_embed": "" - ... } - ... "channel_scope": { + ... "settings": { + ... "bypass_roles": [ + ... "Helpers" + ... ], + ... "filter_dm": True, + ... "enabled": True, + ... "remove_context": True, + ... "send_alert": True, + ... "infraction_and_notification": { + ... "infraction_type": "NONE", + ... "infraction_reason": "", + ... "infraction_duration": "0.0", + ... "infraction_channel": 0, + ... "dm_content": "Per Rule 6, your invite link has been removed...", + ... "dm_embed": "" + ... }, + ... "channel_scope": { ... "disabled_channels": [], - ... "disabled_categories": [], - ... "enabled_channels": [] - ... } - ... "mentions": { - ... "ping_type": [ - ... "onduty" - ... ] - ... "dm_ping_type": [] - ... } - ... }, + ... "disabled_categories": [ + ... "CODE JAM" + ... ], + ... "enabled_channels": [], + ... "enabled_categories": [] + ... }, + ... "mentions": { + ... "guild_pings": [ + ... "Moderators" + ... ], + ... "dm_pings": [] + ... } + ... } + ... }, ... ... ... ] @@ -97,75 +107,205 @@ class FilterListViewSet(ModelViewSet): #### Response format >>> { - ... "id": 1, - ... "name": "invites", - ... "list_type": 1, - ... "filters": [ - ... { - ... "id": 1, - ... "filter_list": 1 - ... "content": "267624335836053506", - ... "description": "Python Discord", - ... "additional_field": None, - ... "settings": { - ... "bypass_roles": None - ... "filter_dm": None, - ... "enabled": None - ... "remove_context": None, - ... "send_alert": None - ... "infraction_and_notification": { - ... "infraction_type": None, - ... "infraction_reason": "", - ... "infraction_duration": None - ... "dm_content": None, - ... "dm_embed": None - ... }, - ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None - ... } - ... "mentions": { - ... "ping_type": None - ... "dm_ping_type": None - ... } - ... } - ... - ... }, - ... ... + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.027293Z", + ... "updated_at": "2023-01-27T21:26:34.027308Z", + ... "name": "invite", + ... "list_type": 1, + ... "filters": [ + ... { + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } + ... }, + ... ... + ... ], + ... "settings": { + ... "bypass_roles": [ + ... "Helpers" ... ], - ... "settings": { - ... "bypass_roles": [ - ... "staff" - ... ], - ... "filter_dm": True, - ... "enabled": True - ... "remove_context": True - ... "send_alert": True - ... "infraction_and_notification": { - ... "infraction_type": "", - ... "infraction_reason": "", - ... "infraction_duration": "0.0", - ... "dm_content": "", - ... "dm_embed": "" - ... } - ... "channel_scope": { - ... "disabled_channels": [], - ... "disabled_categories": [], - ... "enabled_channels": [] - ... } - ... "mentions": { - ... "ping_type": [ - ... "onduty" - ... ] - ... "dm_ping_type": [] - ... } + ... "filter_dm": True, + ... "enabled": True, + ... "remove_context": True, + ... "send_alert": True, + ... "infraction_and_notification": { + ... "infraction_type": "NONE", + ... "infraction_reason": "", + ... "infraction_duration": "0.0", + ... "infraction_channel": 0, + ... "dm_content": "Per Rule 6, your invite link has been removed...", + ... "dm_embed": "" + ... }, + ... "channel_scope": { + ... "disabled_channels": [], + ... "disabled_categories": [ + ... "CODE JAM" + ... ], + ... "enabled_channels": [], + ... "enabled_categories": [] + ... }, + ... "mentions": { + ... "guild_pings": [ + ... "Moderators" + ... ], + ... "dm_pings": [] + ... } + ... } ... } #### Status codes - 200: returned on success - 404: returned if the id was not found. + ### POST /bot/filter/filter_lists + Adds a single FilterList item to the database. + + #### Request body + >>> { + ... "name": "invite", + ... "list_type": 1, + ... "bypass_roles": [ + ... "Helpers" + ... ], + ... "filter_dm": True, + ... "enabled": True, + ... "remove_context": True, + ... "send_alert": True, + ... "infraction_type": "NONE", + ... "infraction_reason": "", + ... "infraction_duration": "0.0", + ... "infraction_channel": 0, + ... "dm_content": "Per Rule 6, your invite link has been removed...", + ... "dm_embed": "", + ... "disabled_channels": [], + ... "disabled_categories": [ + ... "CODE JAM" + ... ], + ... "enabled_channels": [], + ... "enabled_categories": [] + ... "guild_pings": [ + ... "Moderators" + ... ], + ... "dm_pings": [] + ... } + + #### Status codes + - 201: returned on success + - 400: if one of the given fields is invalid + + ### PATCH /bot/filter/filter_lists/ + Updates a specific FilterList item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.027293Z", + ... "updated_at": "2023-01-27T21:26:34.027308Z", + ... "name": "invite", + ... "list_type": 1, + ... "filters": [ + ... { + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } + ... }, + ... ... + ... ], + ... "settings": { + ... "bypass_roles": [ + ... "Helpers" + ... ], + ... "filter_dm": True, + ... "enabled": True, + ... "remove_context": True, + ... "send_alert": True, + ... "infraction_and_notification": { + ... "infraction_type": "NONE", + ... "infraction_reason": "", + ... "infraction_duration": "0.0", + ... "infraction_channel": 0, + ... "dm_content": "Per Rule 6, your invite link has been removed...", + ... "dm_embed": "" + ... }, + ... "channel_scope": { + ... "disabled_channels": [], + ... "disabled_categories": [ + ... "CODE JAM" + ... ], + ... "enabled_channels": [], + ... "enabled_categories": [] + ... }, + ... "mentions": { + ... "guild_pings": [ + ... "Moderators" + ... ], + ... "dm_pings": [] + ... } + ... } + ... } + + #### Status codes + - 200: returned on success + - 400: if one of the given fields is invalid + ### DELETE /bot/filter/filter_lists/ Deletes the FilterList item with the given `id`. @@ -188,33 +328,39 @@ class FilterViewSet(ModelViewSet): #### Response format >>> [ - ... { - ... "id": 1, - ... "filter_list": 1 - ... "content": "267624335836053506", - ... "description": "Python Discord", - ... "additional_field": None, - ... "settings": { - ... "bypass_roles": None - ... "filter_dm": None, - ... "enabled": None - ... "remove_context": True, - ... "send_alert": True - ... "infraction": { - ... "infraction_type": None, - ... "infraction_reason": None, - ... "infraction_duration": None - ... }, - ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None - ... } - ... "mentions": { - ... "ping_type": None, - ... "dm_ping_type": None - ... } - ... } + ... { + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } ... }, ... ... ... ] @@ -228,32 +374,38 @@ class FilterViewSet(ModelViewSet): #### Response format >>> { - ... "id": 1, - ... "filter_list": 1 - ... "content": "267624335836053506", - ... "description": "Python Discord", - ... "additional_field": None, - ... "settings": { - ... "bypass_roles": None - ... "filter_dm": None, - ... "enabled": None - ... "remove_context": True, - ... "send_alert": True - ... "infraction": { - ... "infraction_type": None, - ... "infraction_reason": None, - ... "infraction_duration": None - ... }, - ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None, - ... } - ... "mentions": { - ... "ping_type": None - ... "dm_ping_type": None - ... } - ... } + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } ... } #### Status codes @@ -265,10 +417,27 @@ class FilterViewSet(ModelViewSet): #### Request body >>> { + ... "filter_list": 1, ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, - ... "override": 1 + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": False, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... "guild_pings": None, + ... "dm_pings": None ... } #### Status codes @@ -281,10 +450,37 @@ class FilterViewSet(ModelViewSet): #### Response format >>> { ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, - ... "override": 1 + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } ... } #### Status codes -- cgit v1.2.3 From 548131617b93482c9591e4d5b9ebe78aed36d88b Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 4 Mar 2023 20:04:41 +0200 Subject: Migrate infraction type `mute` to `timeout` --- .../api/migrations/0086_alter_mute_to_timeout.py | 25 ++++++++++++++++++++++ pydis_site/apps/api/models/bot/infraction.py | 2 +- pydis_site/apps/api/tests/test_infractions.py | 24 ++++++++++----------- 3 files changed, 38 insertions(+), 13 deletions(-) create mode 100644 pydis_site/apps/api/migrations/0086_alter_mute_to_timeout.py (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0086_alter_mute_to_timeout.py b/pydis_site/apps/api/migrations/0086_alter_mute_to_timeout.py new file mode 100644 index 00000000..8eb3ff6d --- /dev/null +++ b/pydis_site/apps/api/migrations/0086_alter_mute_to_timeout.py @@ -0,0 +1,25 @@ +from django.apps.registry import Apps +from django.db import migrations, models + +import pydis_site.apps.api.models + + +def rename_type(apps: Apps, _) -> None: + infractions: pydis_site.apps.api.models.Infraction = apps.get_model("api", "Infraction") + infractions.objects.filter(type="mute").update(type="timeout") + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0085_add_thread_id_to_nominations'), + ] + + operations = [ + migrations.AlterField( + model_name='infraction', + name='type', + field=models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('timeout', 'Timeout'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban'), ('voice_mute', 'Voice Mute')], help_text='The type of the infraction.', max_length=10), + ), + migrations.RunPython(rename_type, migrations.RunPython.noop) + ] diff --git a/pydis_site/apps/api/models/bot/infraction.py b/pydis_site/apps/api/models/bot/infraction.py index 218ee5ec..fcf8651e 100644 --- a/pydis_site/apps/api/models/bot/infraction.py +++ b/pydis_site/apps/api/models/bot/infraction.py @@ -12,7 +12,7 @@ class Infraction(ModelReprMixin, models.Model): ("note", "Note"), ("warning", "Warning"), ("watch", "Watch"), - ("mute", "Mute"), + ("timeout", "Timeout"), ("kick", "Kick"), ("ban", "Ban"), ("superstar", "Superstar"), diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index 89ee4e23..ceb5591b 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -68,10 +68,10 @@ class InfractionTests(AuthenticatedAPITestCase): active=False, inserted_at=dt(2020, 10, 10, 0, 1, 0, tzinfo=timezone.utc), ) - cls.mute_permanent = Infraction.objects.create( + cls.timeout_permanent = Infraction.objects.create( user_id=cls.user.id, actor_id=cls.user.id, - type='mute', + type='timeout', reason='He has a filthy mouth and I am his soap.', active=True, inserted_at=dt(2020, 10, 10, 0, 2, 0, tzinfo=timezone.utc), @@ -107,7 +107,7 @@ class InfractionTests(AuthenticatedAPITestCase): self.assertEqual(len(infractions), 5) self.assertEqual(infractions[0]['id'], self.voiceban_expires_later.id) self.assertEqual(infractions[1]['id'], self.superstar_expires_soon.id) - self.assertEqual(infractions[2]['id'], self.mute_permanent.id) + self.assertEqual(infractions[2]['id'], self.timeout_permanent.id) self.assertEqual(infractions[3]['id'], self.ban_inactive.id) self.assertEqual(infractions[4]['id'], self.ban_hidden.id) @@ -134,7 +134,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_permanent_false(self): url = reverse('api:bot:infraction-list') - response = self.client.get(f'{url}?type=mute&permanent=false') + response = self.client.get(f'{url}?type=timeout&permanent=false') self.assertEqual(response.status_code, 200) infractions = response.json() @@ -143,12 +143,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_permanent_true(self): url = reverse('api:bot:infraction-list') - response = self.client.get(f'{url}?type=mute&permanent=true') + response = self.client.get(f'{url}?type=timeout&permanent=true') self.assertEqual(response.status_code, 200) infractions = response.json() - self.assertEqual(infractions[0]['id'], self.mute_permanent.id) + self.assertEqual(infractions[0]['id'], self.timeout_permanent.id) def test_filter_after(self): url = reverse('api:bot:infraction-list') @@ -241,7 +241,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_manytypes(self): url = reverse('api:bot:infraction-list') - response = self.client.get(f'{url}?types=mute,ban') + response = self.client.get(f'{url}?types=timeout,ban') self.assertEqual(response.status_code, 200) infractions = response.json() @@ -249,7 +249,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_types_type_invalid(self): url = reverse('api:bot:infraction-list') - response = self.client.get(f'{url}?types=mute,ban&type=superstar') + response = self.client.get(f'{url}?types=timeout,ban&type=superstar') self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -519,7 +519,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_for_second_active_infraction_of_the_same_type(self): """Test if the API rejects a second active infraction of the same type for a given user.""" url = reverse('api:bot:infraction-list') - active_infraction_types = ('mute', 'ban', 'superstar') + active_infraction_types = ('timeout', 'ban', 'superstar') for infraction_type in active_infraction_types: with self.subTest(infraction_type=infraction_type): @@ -562,7 +562,7 @@ class CreationTests(AuthenticatedAPITestCase): first_active_infraction = { 'user': self.user.id, 'actor': self.user.id, - 'type': 'mute', + 'type': 'timeout', 'reason': 'Be silent!', 'hidden': True, 'active': True, @@ -649,9 +649,9 @@ class CreationTests(AuthenticatedAPITestCase): Infraction.objects.create( user=self.user, actor=self.user, - type="mute", + type="timeout", active=True, - reason="The first active mute" + reason="The first active timeout" ) def test_unique_constraint_accepts_active_infractions_for_different_users(self): -- cgit v1.2.3 From 0f40b114940164c65b10d1312b5a419ce025c799 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sun, 26 Mar 2023 23:12:34 +0300 Subject: Rename additional_field to additional_settings --- pydis_site/apps/api/migrations/0088_new_filter_schema.py | 4 ++-- .../apps/api/migrations/0089_unique_constraint_filters.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 4 +++- pydis_site/apps/api/serializers.py | 9 +++++---- pydis_site/apps/api/tests/test_filters.py | 2 +- pydis_site/apps/api/tests/test_models.py | 2 +- pydis_site/apps/api/viewsets/bot/filters.py | 14 +++++++------- 7 files changed, 21 insertions(+), 18 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0088_new_filter_schema.py b/pydis_site/apps/api/migrations/0088_new_filter_schema.py index 1506e4d7..2e1d78c9 100644 --- a/pydis_site/apps/api/migrations/0088_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0088_new_filter_schema.py @@ -64,7 +64,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: updated_at=object_.updated_at, filter_list=list_, description=object_.comment, - additional_field=None, + additional_settings=None, guild_pings=None, filter_dm=None, dm_pings=None, @@ -105,7 +105,7 @@ class Migration(migrations.Migration): ('updated_at', models.DateTimeField(auto_now=True)), ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), - ('additional_field', models.JSONField(help_text='Implementation specific field.', null=True)), + ('additional_settings', models.JSONField(help_text='Additional settings which are specific to this filter.', null=True)), ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, null=True)), diff --git a/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py index 0bcfd8a3..cb230a27 100644 --- a/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py @@ -11,13 +11,13 @@ class Migration(migrations.Migration): migrations.RunSQL( "ALTER TABLE api_filter " "ADD CONSTRAINT unique_filters UNIQUE NULLS NOT DISTINCT " - "(content, additional_field, filter_list_id, dm_content, dm_embed, infraction_type, infraction_reason, infraction_duration, infraction_channel, guild_pings, filter_dm, dm_pings, remove_context, bypass_roles, enabled, send_alert, enabled_channels, disabled_channels, enabled_categories, disabled_categories)", + "(content, additional_settings, filter_list_id, dm_content, dm_embed, infraction_type, infraction_reason, infraction_duration, infraction_channel, guild_pings, filter_dm, dm_pings, remove_context, bypass_roles, enabled, send_alert, enabled_channels, disabled_channels, enabled_categories, disabled_categories)", reverse_sql="ALTER TABLE api_filter DROP CONSTRAINT unique_filters", state_operations=[ migrations.AddConstraint( model_name='filter', constraint=models.UniqueConstraint( - fields=('content', 'additional_field', 'filter_list', 'dm_content', 'dm_embed', 'infraction_type', 'infraction_reason', 'infraction_duration', 'infraction_channel', 'guild_pings', 'filter_dm', 'dm_pings', 'remove_context', 'bypass_roles', 'enabled', 'send_alert', 'enabled_channels', 'disabled_channels', 'enabled_categories', 'disabled_categories'), + fields=('content', 'additional_settings', 'filter_list', 'dm_content', 'dm_embed', 'infraction_type', 'infraction_reason', 'infraction_duration', 'infraction_channel', 'guild_pings', 'filter_dm', 'dm_pings', 'remove_context', 'bypass_roles', 'enabled', 'send_alert', 'enabled_channels', 'disabled_channels', 'enabled_categories', 'disabled_categories'), name='unique_filters' ), ), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index c6f6f851..aadb39aa 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -131,7 +131,9 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): max_length=200, help_text="Why this filter has been added.", null=True ) - additional_field = models.JSONField(null=True, help_text="Implementation specific field.") + additional_settings = models.JSONField( + null=True, help_text="Additional settings which are specific to this filter." + ) filter_list = models.ForeignKey( FilterList, models.CASCADE, related_name="filters", help_text="The filter list containing this filter." diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index da02c837..a3779094 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -184,7 +184,7 @@ ALLOW_EMPTY_SETTINGS = ( # Required fields for custom JSON representation purposes BASE_FILTER_FIELDS = ( - 'id', 'created_at', 'updated_at', 'content', 'description', 'additional_field' + 'id', 'created_at', 'updated_at', 'content', 'description', 'additional_settings' ) BASE_FILTERLIST_FIELDS = ('id', 'created_at', 'updated_at', 'name', 'list_type') BASE_SETTINGS_FIELDS = ( @@ -279,7 +279,7 @@ class FilterSerializer(ModelSerializer): 'updated_at', 'content', 'description', - 'additional_field', + 'additional_settings', 'filter_list' ) + SETTINGS_FIELDS extra_kwargs = _create_meta_extra_kwargs(for_filter=True) @@ -382,9 +382,10 @@ class FilterListSerializer(ModelSerializer): Provides a custom JSON representation to the FilterList Serializers. This representation restructures how the Filter is represented. - It groups the Infraction, Channel and Mention related fields into their own separated group. + It groups the Infraction, Channel, and Mention related fields + into their own separated groups. - Furthermore, it puts the fields that meant to represent FilterList settings, + Furthermore, it puts the fields that are meant to represent FilterList settings, into a sub-field called `settings`. """ schema = {name: getattr(instance, name) for name in BASE_FILTERLIST_FIELDS} diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index f36e0617..3d3be51e 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -92,7 +92,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: { "content": "bad word", "description": "This is a really bad word.", - "additional_field": "{'hi': 'there'}", + "additional_settings": "{'hi': 'there'}", "guild_pings": None, "filter_dm": None, "dm_pings": None, diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index 25d771cc..d3341b35 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -113,7 +113,7 @@ class StringDunderMethodTests(SimpleTestCase): Filter( content="ducky_nsfw", description="This ducky is totally inappropriate!", - additional_field=None, + additional_settings=None, ), OffensiveMessage( id=602951077675139072, diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index c84da909..d6c2d18c 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -33,7 +33,7 @@ class FilterListViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -119,7 +119,7 @@ class FilterListViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -239,7 +239,7 @@ class FilterListViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -334,7 +334,7 @@ class FilterViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -379,7 +379,7 @@ class FilterViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -420,7 +420,7 @@ class FilterViewSet(ModelViewSet): ... "filter_list": 1, ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "bypass_roles": None, ... "filter_dm": None, ... "enabled": False, @@ -454,7 +454,7 @@ class FilterViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, -- cgit v1.2.3 From 4c923fa1cd6f1f5144036317b116aac745b3c345 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Wed, 5 Apr 2023 03:10:05 +0300 Subject: Add maximum auto-timeout duration validation --- pydis_site/apps/api/serializers.py | 31 +++++++++++++++++++++++++------ pydis_site/apps/api/tests/test_filters.py | 2 ++ 2 files changed, 27 insertions(+), 6 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index a3779094..2186b02c 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -1,4 +1,5 @@ """Converters from Django models to data interchange formats and back.""" +from datetime import timedelta from typing import Any from django.db.models.query import QuerySet @@ -210,6 +211,8 @@ CHANNEL_SCOPE_FIELDS = ( ) MENTIONS_FIELDS = ('guild_pings', 'dm_pings') +MAX_TIMEOUT_DURATION = timedelta(days=28) + def _create_meta_extra_kwargs(*, for_filter: bool) -> dict[str, dict[str, bool]]: """Create the extra kwargs for the Meta classes of the Filter and FilterList serializers.""" @@ -236,17 +239,24 @@ class FilterSerializer(ModelSerializer): def validate(self, data: dict) -> dict: """Perform infraction data + allowed and disallowed lists validation.""" + infraction_type = get_field_value(data, 'infraction_type') + infraction_duration = get_field_value(data, 'infraction_duration') if ( - ( - get_field_value(data, 'infraction_reason') - or get_field_value(data, 'infraction_duration') - ) - and get_field_value(data, 'infraction_type') == 'NONE' + (get_field_value(data, 'infraction_reason') or infraction_duration) + and infraction_type == 'NONE' ): raise ValidationError( "Infraction type is required with infraction duration or reason." ) + if ( + infraction_type == 'TIMEOUT' + and (not infraction_duration or infraction_duration > MAX_TIMEOUT_DURATION) + ): + raise ValidationError( + f"A timeout cannot be longer than {MAX_TIMEOUT_DURATION.days} days." + ) + common_channels = ( set(get_field_value(data, 'disabled_channels')) & set(get_field_value(data, 'enabled_channels')) @@ -328,8 +338,9 @@ class FilterListSerializer(ModelSerializer): def validate(self, data: dict) -> dict: """Perform infraction data + allow and disallowed lists validation.""" + infraction_duration = data.get('infraction_duration') if ( - data.get('infraction_reason') or data.get('infraction_duration') + data.get('infraction_reason') or infraction_duration ) and not data.get('infraction_type'): raise ValidationError("Infraction type is required with infraction duration or reason") @@ -344,6 +355,14 @@ class FilterListSerializer(ModelSerializer): f" {', '.join(repr(channel) for channel in common_channels)}." ) + if ( + data.get('infraction_type') == 'TIMEOUT' + and (not infraction_duration or infraction_duration > MAX_TIMEOUT_DURATION) + ): + raise ValidationError( + f"A timeout cannot be longer than {MAX_TIMEOUT_DURATION.days} days." + ) + if ( data.get('disabled_categories') is not None and data.get('enabled_categories') is not None diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 3d3be51e..ebc4a2cf 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -274,6 +274,7 @@ class FilterValidationTests(AuthenticatedAPITestCase): ({"infraction_reason": "hi"}, {}, 400), ({"infraction_duration": timedelta(seconds=10)}, {}, 400), ({"infraction_reason": "hi"}, {"infraction_type": "NOTE"}, 200), + ({"infraction_type": "TIMEOUT", "infraction_duration": timedelta(days=30)}, {}, 400), ({"infraction_duration": timedelta(seconds=10)}, {"infraction_type": "TIMEOUT"}, 200), ({"enabled_channels": ["admins"]}, {}, 200), ({"disabled_channels": ["123"]}, {}, 200), @@ -313,6 +314,7 @@ class FilterValidationTests(AuthenticatedAPITestCase): cases = ( ({"infraction_reason": "hi"}, 400), ({"infraction_duration": timedelta(seconds=10)}, 400), + ({"infraction_type": "TIMEOUT", "infraction_duration": timedelta(days=30)}, 400), ({"infraction_reason": "hi", "infraction_type": "NOTE"}, 200), ({"infraction_duration": timedelta(seconds=10), "infraction_type": "TIMEOUT"}, 200), ({"enabled_channels": ["admins"]}, 200), ({"disabled_channels": ["123"]}, 200), -- cgit v1.2.3 From b8ddedc31d54f46bb86a7e7d200c163ea8806ee0 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Thu, 6 Apr 2023 01:09:09 +0300 Subject: Make additional_settings non-null with dict default This makes sure that the value in the DB is always a valid JSON, ensuring the unique constraint will work properly. --- pydis_site/apps/api/migrations/0088_new_filter_schema.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/migrations/0088_new_filter_schema.py b/pydis_site/apps/api/migrations/0088_new_filter_schema.py index 2e1d78c9..675fdcec 100644 --- a/pydis_site/apps/api/migrations/0088_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0088_new_filter_schema.py @@ -64,7 +64,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: updated_at=object_.updated_at, filter_list=list_, description=object_.comment, - additional_settings=None, + additional_settings={}, guild_pings=None, filter_dm=None, dm_pings=None, @@ -105,7 +105,7 @@ class Migration(migrations.Migration): ('updated_at', models.DateTimeField(auto_now=True)), ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), - ('additional_settings', models.JSONField(help_text='Additional settings which are specific to this filter.', null=True)), + ('additional_settings', models.JSONField(help_text='Additional settings which are specific to this filter.', default=dict)), ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, null=True)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index aadb39aa..71f8771f 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -132,7 +132,7 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): help_text="Why this filter has been added.", null=True ) additional_settings = models.JSONField( - null=True, help_text="Additional settings which are specific to this filter." + help_text="Additional settings which are specific to this filter.", default=dict ) filter_list = models.ForeignKey( FilterList, models.CASCADE, related_name="filters", diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index ebc4a2cf..5059d651 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -230,6 +230,7 @@ class GenericFilterTests(AuthenticatedAPITestCase): def test_creation_missing_field(self) -> None: for name, sequence in get_test_sequences().items(): + ignored_fields = sequence.ignored_fields + ("id", "additional_settings") with self.subTest(name=name): saved = sequence.model(**sequence.object) save_nested_objects(saved) @@ -237,7 +238,7 @@ class GenericFilterTests(AuthenticatedAPITestCase): for field in sequence.model._meta.get_fields(): with self.subTest(field=field): - if field.null or field.name in sequence.ignored_fields + ("id",): + if field.null or field.name in ignored_fields: continue test_data = data.copy() -- cgit v1.2.3 From cbc67702bb58a3a2d3c80521f21e1fd6f7f203e9 Mon Sep 17 00:00:00 2001 From: Johannes Christ Date: Fri, 21 Apr 2023 22:21:13 +0200 Subject: Crosscheck rules between API and static rules --- pydis_site/apps/api/tests/test_rules.py | 38 ++++++++++++++++++++++++++++++ pydis_site/apps/api/views.py | 2 +- pydis_site/apps/content/resources/rules.md | 2 +- 3 files changed, 40 insertions(+), 2 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/pydis_site/apps/api/tests/test_rules.py b/pydis_site/apps/api/tests/test_rules.py index d08c5fae..3ee2d4e0 100644 --- a/pydis_site/apps/api/tests/test_rules.py +++ b/pydis_site/apps/api/tests/test_rules.py @@ -1,3 +1,7 @@ +import itertools +import re +from pathlib import Path + from django.urls import reverse from .base import AuthenticatedAPITestCase @@ -33,3 +37,37 @@ class RuleAPITests(AuthenticatedAPITestCase): url = reverse('api:rules') response = self.client.get(url + '?link_format=unknown') self.assertEqual(response.status_code, 400) + + +class RuleCorrectnessTests(AuthenticatedAPITestCase): + """Verifies that the rules from the API and by the static rules in the content app match.""" + + @classmethod + def setUpTestData(cls): + cls.markdown_rule_re = re.compile(r'^> \d+\. (.*)$') + + def test_rules_in_markdown_file_roughly_equal_api_rules(self) -> None: + url = reverse('api:rules') + api_response = self.client.get(url + '?link_format=md') + api_rules = tuple(rule for (rule, _tags) in api_response.json()) + + markdown_rules_path = ( + Path(__file__).parent.parent.parent / 'content' / 'resources' / 'rules.md' + ) + + markdown_rules = [] + for line in markdown_rules_path.read_text().splitlines(): + matches = self.markdown_rule_re.match(line) + if matches is not None: + markdown_rules.append(matches.group(1)) + + zipper = itertools.zip_longest(api_rules, markdown_rules) + for idx, (api_rule, markdown_rule) in enumerate(zipper): + with self.subTest(f"Rule {idx}"): + self.assertIsNotNone( + markdown_rule, f"The API has more rules than {markdown_rules_path}" + ) + self.assertIsNotNone( + api_rule, f"{markdown_rules_path} has more rules than the API endpoint" + ) + self.assertEqual(markdown_rule, api_rule) diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 54fbf809..b1b7dc0f 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -120,7 +120,7 @@ class RulesView(APIView): link_format ) discord_tos = self._format_link( - 'Terms Of Service', + 'Terms of Service', 'https://discordapp.com/terms', link_format ) diff --git a/pydis_site/apps/content/resources/rules.md b/pydis_site/apps/content/resources/rules.md index a0bdb38d..e55c6715 100644 --- a/pydis_site/apps/content/resources/rules.md +++ b/pydis_site/apps/content/resources/rules.md @@ -5,7 +5,7 @@ icon: fab fa-discord --- We have a small but strict set of rules on our server. Please read over them and take them on board. If you don't understand a rule or need to report an incident, please send a direct message to @ModMail! -> 1. Follow the [Python Discord Code of Conduct](/pages/code-of-conduct/). +> 1. Follow the [Python Discord Code of Conduct](https://pythondiscord.com/pages/code-of-conduct/). > 2. Follow the [Discord Community Guidelines](https://discordapp.com/guidelines) and [Terms of Service](https://discordapp.com/terms). > 3. Respect staff members and listen to their instructions. > 4. Use English to the best of your ability. Be polite if someone speaks English imperfectly. -- cgit v1.2.3 From 07855963a1eedd80c410ab2dd51fcae1200c9cee Mon Sep 17 00:00:00 2001 From: Johannes Christ Date: Wed, 10 May 2023 12:30:57 +0200 Subject: Switch to ruff for linting --- .github/workflows/lint-test.yaml | 17 +- .pre-commit-config.yaml | 12 +- poetry.lock | 700 ++++++--------------- pydis_site/apps/api/admin.py | 29 +- pydis_site/apps/api/github_utils.py | 13 +- pydis_site/apps/api/models/bot/message.py | 7 +- pydis_site/apps/api/models/bot/metricity.py | 19 +- pydis_site/apps/api/tests/base.py | 3 +- pydis_site/apps/api/tests/test_bumped_threads.py | 2 +- pydis_site/apps/api/tests/test_deleted_messages.py | 11 +- .../apps/api/tests/test_documentation_links.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 12 +- pydis_site/apps/api/tests/test_github_utils.py | 41 +- pydis_site/apps/api/tests/test_infractions.py | 110 ++-- pydis_site/apps/api/tests/test_models.py | 4 +- pydis_site/apps/api/tests/test_nominations.py | 2 +- .../apps/api/tests/test_off_topic_channel_names.py | 2 +- .../apps/api/tests/test_offensive_message.py | 10 +- pydis_site/apps/api/tests/test_reminders.py | 4 +- pydis_site/apps/api/tests/test_roles.py | 2 +- pydis_site/apps/api/tests/test_rules.py | 2 +- pydis_site/apps/api/tests/test_users.py | 29 +- pydis_site/apps/api/tests/test_validators.py | 4 +- pydis_site/apps/api/views.py | 4 +- pydis_site/apps/api/viewsets/bot/filters.py | 4 +- .../api/viewsets/bot/off_topic_channel_name.py | 7 +- pydis_site/apps/api/viewsets/bot/user.py | 5 +- pydis_site/apps/content/models/tag.py | 3 +- .../guides/pydis-guides/contributing/linting.md | 2 +- pydis_site/apps/content/tests/helpers.py | 20 +- pydis_site/apps/content/urls.py | 2 +- pydis_site/apps/content/utils.py | 16 +- pydis_site/apps/content/views/tags.py | 3 +- pydis_site/apps/events/urls.py | 2 +- pydis_site/apps/events/views/page.py | 3 +- .../apps/home/tests/test_repodata_helpers.py | 2 +- pydis_site/apps/home/views.py | 13 +- pydis_site/apps/redirect/urls.py | 25 +- pydis_site/apps/redirect/views.py | 3 +- pydis_site/apps/resources/views.py | 7 +- .../staff/templatetags/deletedmessage_filters.py | 3 +- .../staff/tests/test_deletedmessage_filters.py | 2 +- pyproject.toml | 39 +- static-builds/netlify_build.py | 6 +- 44 files changed, 445 insertions(+), 763 deletions(-) (limited to 'pydis_site/apps/api/tests') diff --git a/.github/workflows/lint-test.yaml b/.github/workflows/lint-test.yaml index 1c744470..a12fb650 100644 --- a/.github/workflows/lint-test.yaml +++ b/.github/workflows/lint-test.yaml @@ -24,20 +24,11 @@ jobs: # We will not run `flake8` here, as we will use a separate flake8 # action. - name: Run pre-commit hooks - run: SKIP=flake8 pre-commit run --all-files + run: SKIP=ruff pre-commit run --all-files - # Run flake8 and have it format the linting errors in the format of - # the GitHub Workflow command to register error annotations. This - # means that our flake8 output is automatically added as an error - # annotation to both the run result and in the "Files" tab of a - # pull request. - # - # Format used: - # ::error file={filename},line={line},col={col}::{message} - - name: Run flake8 - run: "flake8 \ - --format='::error file=%(path)s,line=%(row)d,col=%(col)d::\ - [flake8] %(code)s: %(text)s'" + # Run `ruff` using github formatting to enable automatic inline annotations. + - name: Run ruff + run: "ruff check --format=github ." - name: Migrations and run tests with coverage.py run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b2a03559..700dd0a0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,15 +10,11 @@ repos: args: [--fix=lf] - id: trailing-whitespace args: [--markdown-linebreak-ext=md] - - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.5.1 - hooks: - - id: python-check-blanket-noqa - repo: local hooks: - - id: flake8 - name: Flake8 - description: This hook runs flake8 within our project's environment. - entry: poetry run flake8 + - id: ruff + name: ruff + description: This hook runs ruff within our project's environment. + entry: poetry run ruff language: system types: [python] diff --git a/poetry.lock b/poetry.lock index d6e09cbe..4d9a87de 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "anyio" @@ -36,58 +36,16 @@ files = [ [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] -[[package]] -name = "attrs" -version = "22.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] - -[[package]] -name = "bandit" -version = "1.7.4" -description = "Security oriented static analyser for python code." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, - {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" -PyYAML = ">=5.3.1" -stevedore = ">=1.20.0" - -[package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] -toml = ["toml"] -yaml = ["PyYAML"] - [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] @@ -181,100 +139,87 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = "*" -files = [ - {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, - {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] [[package]] @@ -355,35 +300,31 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "39.0.1" +version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965"}, - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4"}, - {file = "cryptography-39.0.1-cp36-abi3-win32.whl", hash = "sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8"}, - {file = "cryptography-39.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a"}, - {file = "cryptography-39.0.1.tar.gz", hash = "sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, + {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, + {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, + {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, ] [package.dependencies] @@ -392,10 +333,10 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"] +pep8test = ["black", "check-manifest", "mypy", "ruff"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] test-randomorder = ["pytest-randomly"] tox = ["tox"] @@ -536,193 +477,19 @@ pytz = "*" [[package]] name = "filelock" -version = "3.9.0" +version = "3.12.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, - {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, + {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, + {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "6.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" - -[[package]] -name = "flake8-annotations" -version = "3.0.1" -description = "Flake8 Type Annotation Checks" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8_annotations-3.0.1-py3-none-any.whl", hash = "sha256:af78e3216ad800d7e144745ece6df706c81b3255290cbf870e54879d495e8ade"}, - {file = "flake8_annotations-3.0.1.tar.gz", hash = "sha256:ff37375e71e3b83f2a5a04d443c41e2c407de557a884f3300a7fa32f3c41cb0a"}, -] - -[package.dependencies] -attrs = ">=21.4" -flake8 = ">=5.0" - -[[package]] -name = "flake8-bandit" -version = "4.1.1" -description = "Automated security testing with bandit and flake8." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, - {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, -] - -[package.dependencies] -bandit = ">=1.7.3" -flake8 = ">=5.0.0" - -[[package]] -name = "flake8-bugbear" -version = "23.5.9" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-bugbear-23.5.9.tar.gz", hash = "sha256:695c84a5d7da54eb35d79a7354dbaf3aaba80de32250608868aa1c85534b2a86"}, - {file = "flake8_bugbear-23.5.9-py3-none-any.whl", hash = "sha256:631fa927fbc799e8ca636b849dd7dfc304812287137b6ecb3277821f028bee40"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=6.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, -] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-import-order" -version = "0.18.2" -description = "Flake8 and pylama plugin that checks the ordering of import statements." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-import-order-0.18.2.tar.gz", hash = "sha256:e23941f892da3e0c09d711babbb0c73bc735242e9b216b726616758a920d900e"}, - {file = "flake8_import_order-0.18.2-py2.py3-none-any.whl", hash = "sha256:82ed59f1083b629b030ee9d3928d9e06b6213eb196fe745b3a7d4af2168130df"}, -] - -[package.dependencies] -pycodestyle = "*" -setuptools = "*" - -[[package]] -name = "flake8-string-format" -version = "0.3.0" -description = "string format checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, - {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, -] - -[package.dependencies] -flake8 = "*" - -[[package]] -name = "flake8-tidy-imports" -version = "4.8.0" -description = "A flake8 plugin that helps you write tidier imports." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-tidy-imports-4.8.0.tar.gz", hash = "sha256:df44f9c841b5dfb3a7a1f0da8546b319d772c2a816a1afefcce43e167a593d83"}, - {file = "flake8_tidy_imports-4.8.0-py3-none-any.whl", hash = "sha256:25bd9799358edefa0e010ce2c587b093c3aba942e96aeaa99b6d0500ae1bf09c"}, -] - -[package.dependencies] -flake8 = ">=3.8.0" - -[[package]] -name = "flake8-todo" -version = "0.7" -description = "TODO notes checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, -] - -[package.dependencies] -pycodestyle = ">=2.0.0,<3.0.0" - -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.31" -description = "GitPython is a Python library used to interact with Git repositories" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, - {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" +docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "gunicorn" @@ -759,14 +526,14 @@ files = [ [[package]] name = "httpcore" -version = "0.16.3" +version = "0.17.0" description = "A minimal low-level HTTP client." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, - {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, + {file = "httpcore-0.17.0-py3-none-any.whl", hash = "sha256:0fdfea45e94f0c9fd96eab9286077f9ff788dd186635ae61b312693e4d943599"}, + {file = "httpcore-0.17.0.tar.gz", hash = "sha256:cc045a3241afbf60ce056202301b4d8b6af08845e3294055eb26b09913ef903c"}, ] [package.dependencies] @@ -805,14 +572,14 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "identify" -version = "2.5.18" +version = "2.5.24" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "identify-2.5.18-py2.py3-none-any.whl", hash = "sha256:93aac7ecf2f6abf879b8f29a8002d3c6de7086b8c28d88e1ad15045a15ab63f9"}, - {file = "identify-2.5.18.tar.gz", hash = "sha256:89e144fa560cc4cffb6ef2ab5e9fb18ed9f9b3cb054384bab4b95c12f6c309fe"}, + {file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"}, + {file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"}, ] [package.extras] @@ -860,18 +627,6 @@ files = [ [package.extras] testing = ["coverage", "pyyaml"] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "mslex" version = "0.3.0" @@ -899,48 +654,21 @@ files = [ [package.dependencies] setuptools = "*" -[[package]] -name = "pbr" -version = "5.11.1" -description = "Python Build Reasonableness" -category = "dev" -optional = false -python-versions = ">=2.6" -files = [ - {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, - {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, -] - -[[package]] -name = "pep8-naming" -version = "0.13.3" -description = "Check PEP-8 naming conventions, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971"}, - {file = "pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80"}, -] - -[package.dependencies] -flake8 = ">=5.0.0" - [[package]] name = "platformdirs" -version = "3.0.0" +version = "3.5.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, - {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, + {file = "platformdirs-3.5.0-py3-none-any.whl", hash = "sha256:47692bc24c1958e8b0f13dd727307cff1db103fca36399f457da8e05f222fdc4"}, + {file = "platformdirs-3.5.0.tar.gz", hash = "sha256:7954a68d0ba23558d753f73437c55f89027cf8f5108c19844d4b82e5af396335"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pre-commit" @@ -978,26 +706,26 @@ twisted = ["twisted"] [[package]] name = "psutil" -version = "5.9.4" +version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, + {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, + {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, + {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, + {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, + {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, + {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, + {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, + {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, ] [package.extras] @@ -1075,18 +803,6 @@ files = [ {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"}, ] -[[package]] -name = "pycodestyle" -version = "2.10.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, -] - [[package]] name = "pycparser" version = "2.21" @@ -1099,36 +815,6 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - -[[package]] -name = "pyflakes" -version = "3.0.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, -] - [[package]] name = "pyjwt" version = "2.7.0" @@ -1202,14 +888,14 @@ test = ["pyaml", "pytest", "toml"] [[package]] name = "pytz" -version = "2022.7.1" +version = "2023.3" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] [[package]] @@ -1264,26 +950,53 @@ files = [ [[package]] name = "requests" -version = "2.28.2" +version = "2.30.0" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.30.0-py3-none-any.whl", hash = "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294"}, + {file = "requests-2.30.0.tar.gz", hash = "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "ruff" +version = "0.0.265" +description = "An extremely fast Python linter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.265-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:30ddfe22de6ce4eb1260408f4480bbbce998f954dbf470228a21a9b2c45955e4"}, + {file = "ruff-0.0.265-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a11bd0889e88d3342e7bc514554bb4461bf6cc30ec115821c2425cfaac0b1b6a"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a9b38bdb40a998cbc677db55b6225a6c4fadcf8819eb30695e1b8470942426b"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8b44a245b60512403a6a03a5b5212da274d33862225c5eed3bcf12037eb19bb"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b279fa55ea175ef953208a6d8bfbcdcffac1c39b38cdb8c2bfafe9222add70bb"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5028950f7af9b119d43d91b215d5044976e43b96a0d1458d193ef0dd3c587bf8"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4057eb539a1d88eb84e9f6a36e0a999e0f261ed850ae5d5817e68968e7b89ed9"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d586e69ab5cbf521a1910b733412a5735936f6a610d805b89d35b6647e2a66aa"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa17b13cd3f29fc57d06bf34c31f21d043735cc9a681203d634549b0e41047d1"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9ac13b11d9ad3001de9d637974ec5402a67cefdf9fffc3929ab44c2fcbb850a1"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:62a9578b48cfd292c64ea3d28681dc16b1aa7445b7a7709a2884510fc0822118"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0f9967f84da42d28e3d9d9354cc1575f96ed69e6e40a7d4b780a7a0418d9409"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1d5a8de2fbaf91ea5699451a06f4074e7a312accfa774ad9327cde3e4fda2081"}, + {file = "ruff-0.0.265-py3-none-win32.whl", hash = "sha256:9e9db5ccb810742d621f93272e3cc23b5f277d8d00c4a79668835d26ccbe48dd"}, + {file = "ruff-0.0.265-py3-none-win_amd64.whl", hash = "sha256:f54facf286103006171a00ce20388d88ed1d6732db3b49c11feb9bf3d46f90e9"}, + {file = "ruff-0.0.265-py3-none-win_arm64.whl", hash = "sha256:c78470656e33d32ddc54e8482b1b0fc6de58f1195586731e5ff1405d74421499"}, + {file = "ruff-0.0.265.tar.gz", hash = "sha256:53c17f0dab19ddc22b254b087d1381b601b155acfa8feed514f0d6a413d0ab3a"}, +] + [[package]] name = "sentry-sdk" version = "1.22.2" @@ -1328,14 +1041,14 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "67.4.0" +version = "67.7.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.4.0-py3-none-any.whl", hash = "sha256:f106dee1b506dee5102cc3f3e9e68137bbad6d47b616be7991714b0c62204251"}, - {file = "setuptools-67.4.0.tar.gz", hash = "sha256:e5fd0a713141a4a105412233c63dc4e17ba0090c8e8334594ac790ec97792330"}, + {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, + {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, ] [package.extras] @@ -1343,18 +1056,6 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] - [[package]] name = "sniffio" version = "1.3.0" @@ -1367,18 +1068,6 @@ files = [ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - [[package]] name = "sqlparse" version = "0.4.4" @@ -1396,21 +1085,6 @@ dev = ["build", "flake8"] doc = ["sphinx"] test = ["pytest", "pytest-cov"] -[[package]] -name = "stevedore" -version = "5.0.0" -description = "Manage dynamic plugins for Python applications" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, - {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, -] - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - [[package]] name = "taskipy" version = "1.10.4" @@ -1443,26 +1117,26 @@ files = [ [[package]] name = "tzdata" -version = "2022.7" +version = "2023.3" description = "Provider of IANA time zone data" category = "main" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2022.7-py2.py3-none-any.whl", hash = "sha256:2b88858b0e3120792a3c0635c23daf36a7d7eeeca657c323da299d2094402a0d"}, - {file = "tzdata-2022.7.tar.gz", hash = "sha256:fe5f866eddd8b96e9fcba978f8e503c909b19ea7efda11e52e39494bad3a7bfa"}, + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] [[package]] name = "urllib3" -version = "1.26.14" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, - {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] [package.extras] @@ -1472,24 +1146,24 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.19.0" +version = "20.23.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.19.0-py3-none-any.whl", hash = "sha256:54eb59e7352b573aa04d53f80fc9736ed0ad5143af445a1e539aada6eb947dd1"}, - {file = "virtualenv-20.19.0.tar.gz", hash = "sha256:37a640ba82ed40b226599c522d411e4be5edb339a0c0de030c0dc7b646d61590"}, + {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, + {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<4" +filelock = ">=3.11,<4" +platformdirs = ">=3.2,<4" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] [[package]] name = "whitenoise" @@ -1509,4 +1183,4 @@ brotli = ["Brotli"] [metadata] lock-version = "2.0" python-versions = "3.10.*" -content-hash = "a89b60823f9d7717b78c1071654c0c1f237768669a8e37e76175730de209e804" +content-hash = "f9076a1d72b610e77d0c389a4992c814a4c81028232a1ec4f060f77f2cb9125c" diff --git a/pydis_site/apps/api/admin.py b/pydis_site/apps/api/admin.py index e123d150..f3cc0405 100644 --- a/pydis_site/apps/api/admin.py +++ b/pydis_site/apps/api/admin.py @@ -1,7 +1,7 @@ from __future__ import annotations import json -from typing import Iterable, Optional, Tuple +from collections.abc import Iterable from django import urls from django.contrib import admin @@ -62,16 +62,16 @@ class InfractionActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = Infraction.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None return queryset.filter(actor__id=self.value()) @@ -149,7 +149,7 @@ class DeletedMessageAdmin(admin.ModelAdmin): list_display = ("id", "author", "channel_id") - def embed_data(self, message: DeletedMessage) -> Optional[str]: + def embed_data(self, message: DeletedMessage) -> str | None: """Format embed data in a code block for better readability.""" if message.embeds: return format_html( @@ -157,6 +157,7 @@ class DeletedMessageAdmin(admin.ModelAdmin): "{0}", json.dumps(message.embeds, indent=4) ) + return None embed_data.short_description = "Embeds" @@ -229,16 +230,16 @@ class NominationActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = NominationEntry.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None nomination_ids = NominationEntry.objects.filter( actor__id=self.value() ).values_list("nomination_id").distinct() @@ -292,16 +293,16 @@ class NominationEntryActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = NominationEntry.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None return queryset.filter(actor__id=self.value()) @@ -425,15 +426,15 @@ class UserRoleFilter(admin.SimpleListFilter): title = "Role" parameter_name = "role" - def lookups(self, request: HttpRequest, model: UserAdmin) -> Iterable[Tuple[str, str]]: + def lookups(self, request: HttpRequest, model: UserAdmin) -> Iterable[tuple[str, str]]: """Selectable values for viewer to filter by.""" roles = Role.objects.all() return ((r.name, r.name) for r in roles) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None role = Role.objects.get(name=self.value()) return queryset.filter(roles__contains=[role.id]) diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 44c571c3..af659195 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -82,7 +82,7 @@ def generate_token() -> str: Refer to: https://docs.github.com/en/developers/apps/building-github-apps/authenticating-with-github-apps#authenticating-as-a-github-app """ - now = datetime.datetime.now() + now = datetime.datetime.now(tz=datetime.timezone.utc) return jwt.encode( { "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at @@ -145,8 +145,12 @@ def authorize(owner: str, repo: str) -> httpx.Client: def check_run_status(run: WorkflowRun) -> str: """Check if the provided run has been completed, otherwise raise an exception.""" - created_at = datetime.datetime.strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) - run_time = datetime.datetime.utcnow() - created_at + created_at = ( + datetime.datetime + .strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) + .replace(tzinfo=datetime.timezone.utc) + ) + run_time = datetime.datetime.now(tz=datetime.timezone.utc) - created_at if run.status != "completed": if run_time <= MAX_RUN_TIME: @@ -154,8 +158,7 @@ def check_run_status(run: WorkflowRun) -> str: f"The requested run is still pending. It was created " f"{run_time.seconds // 60}:{run_time.seconds % 60 :>02} minutes ago." ) - else: - raise RunTimeoutError("The requested workflow was not ready in time.") + raise RunTimeoutError("The requested workflow was not ready in time.") if run.conclusion != "success": # The action failed, or did not run diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index 89ae27e4..d8147cd4 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -61,9 +61,10 @@ class Message(ModelReprMixin, models.Model): @property def timestamp(self) -> datetime.datetime: """Attribute that represents the message timestamp as derived from the snowflake id.""" - return datetime.datetime.utcfromtimestamp( - ((self.id >> 22) + 1420070400000) / 1000 - ).replace(tzinfo=datetime.timezone.utc) + return datetime.datetime.fromtimestamp( + ((self.id >> 22) + 1420070400000) / 1000, + tz=datetime.timezone.utc, + ) class Meta: """Metadata provided for Django's ORM.""" diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py index f53dd33c..a55f5e5b 100644 --- a/pydis_site/apps/api/models/bot/metricity.py +++ b/pydis_site/apps/api/models/bot/metricity.py @@ -1,4 +1,3 @@ -from typing import List, Tuple from django.db import connections @@ -10,10 +9,9 @@ EXCLUDE_CHANNELS = ( ) -class NotFoundError(Exception): # noqa: N818 +class NotFoundError(Exception): """Raised when an entity cannot be found.""" - pass class Metricity: @@ -31,15 +29,14 @@ class Metricity: def user(self, user_id: str) -> dict: """Query a user's data.""" # TODO: Swap this back to some sort of verified at date - columns = ["joined_at"] - query = f"SELECT {','.join(columns)} FROM users WHERE id = '%s'" + query = "SELECT joined_at FROM users WHERE id = '%s'" self.cursor.execute(query, [user_id]) values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError - return dict(zip(columns, values)) + return {'joined_at': values[0]} def total_messages(self, user_id: str) -> int: """Query total number of messages for a user.""" @@ -58,7 +55,7 @@ class Metricity: values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError return values[0] @@ -88,11 +85,11 @@ class Metricity: values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError return values[0] - def top_channel_activity(self, user_id: str) -> List[Tuple[str, int]]: + def top_channel_activity(self, user_id: str) -> list[tuple[str, int]]: """ Query the top three channels in which the user is most active. @@ -127,7 +124,7 @@ class Metricity: values = self.cursor.fetchall() if not values: - raise NotFoundError() + raise NotFoundError return values diff --git a/pydis_site/apps/api/tests/base.py b/pydis_site/apps/api/tests/base.py index c9f3cb7e..704b22cf 100644 --- a/pydis_site/apps/api/tests/base.py +++ b/pydis_site/apps/api/tests/base.py @@ -61,6 +61,7 @@ class AuthenticatedAPITestCase(APITestCase): ... self.assertEqual(response.status_code, 200) """ - def setUp(self): + def setUp(self) -> None: + """Bootstrap the user and authenticate it.""" super().setUp() self.client.force_authenticate(test_user) diff --git a/pydis_site/apps/api/tests/test_bumped_threads.py b/pydis_site/apps/api/tests/test_bumped_threads.py index 316e3f0b..2e3892c7 100644 --- a/pydis_site/apps/api/tests/test_bumped_threads.py +++ b/pydis_site/apps/api/tests/test_bumped_threads.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import BumpedThread +from pydis_site.apps.api.models import BumpedThread class UnauthedBumpedThreadAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_deleted_messages.py b/pydis_site/apps/api/tests/test_deleted_messages.py index 1eb535d8..62d17e58 100644 --- a/pydis_site/apps/api/tests/test_deleted_messages.py +++ b/pydis_site/apps/api/tests/test_deleted_messages.py @@ -1,10 +1,9 @@ -from datetime import datetime +from datetime import datetime, timezone from django.urls import reverse -from django.utils import timezone from .base import AuthenticatedAPITestCase -from ..models import MessageDeletionContext, User +from pydis_site.apps.api.models import MessageDeletionContext, User class DeletedMessagesWithoutActorTests(AuthenticatedAPITestCase): @@ -18,7 +17,7 @@ class DeletedMessagesWithoutActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': None, - 'creation': datetime.utcnow().isoformat(), + 'creation': datetime.now(tz=timezone.utc).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -58,7 +57,7 @@ class DeletedMessagesWithActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': cls.actor.id, - 'creation': datetime.utcnow().isoformat(), + 'creation': datetime.now(tz=timezone.utc).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -90,7 +89,7 @@ class DeletedMessagesLogURLTests(AuthenticatedAPITestCase): cls.deletion_context = MessageDeletionContext.objects.create( actor=cls.actor, - creation=timezone.now() + creation=datetime.now(tz=timezone.utc), ) def test_valid_log_url(self): diff --git a/pydis_site/apps/api/tests/test_documentation_links.py b/pydis_site/apps/api/tests/test_documentation_links.py index 4e238cbb..f4a332cb 100644 --- a/pydis_site/apps/api/tests/test_documentation_links.py +++ b/pydis_site/apps/api/tests/test_documentation_links.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import DocumentationLink +from pydis_site.apps.api.models import DocumentationLink class UnauthedDocumentationLinkAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 5059d651..4cef1c8f 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -1,7 +1,7 @@ import contextlib from dataclasses import dataclass from datetime import timedelta -from typing import Any, Dict, Tuple, Type +from typing import Any from django.db.models import Model from django.urls import reverse @@ -12,22 +12,22 @@ from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase @dataclass() class TestSequence: - model: Type[Model] + model: type[Model] route: str - object: Dict[str, Any] - ignored_fields: Tuple[str, ...] = () + object: dict[str, Any] + ignored_fields: tuple[str, ...] = () def url(self, detail: bool = False) -> str: return reverse(f'api:bot:{self.route}-{"detail" if detail else "list"}') -FK_FIELDS: Dict[Type[Model], Tuple[str, ...]] = { +FK_FIELDS: dict[type[Model], tuple[str, ...]] = { FilterList: (), Filter: ("filter_list",), } -def get_test_sequences() -> Dict[str, TestSequence]: +def get_test_sequences() -> dict[str, TestSequence]: filter_list1_deny_dict = { "name": "testname", "list_type": 0, diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index 95bafec0..34fae875 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -12,7 +12,7 @@ import rest_framework.test from django.urls import reverse from pydis_site import settings -from .. import github_utils +from pydis_site.apps.api import github_utils class GeneralUtilityTests(unittest.TestCase): @@ -39,7 +39,8 @@ class GeneralUtilityTests(unittest.TestCase): delta = datetime.timedelta(minutes=10) self.assertAlmostEqual(decoded["exp"] - decoded["iat"], delta.total_seconds()) - self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) + then = datetime.datetime.now(tz=datetime.timezone.utc) + delta + self.assertLess(decoded["exp"], then.timestamp()) class CheckRunTests(unittest.TestCase): @@ -50,7 +51,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + "created_at": datetime.datetime.now(tz=datetime.timezone.utc).strftime(settings.GITHUB_TIMESTAMP_FORMAT), "artifacts_url": "url", } @@ -74,7 +75,8 @@ class CheckRunTests(unittest.TestCase): # Set the creation time to well before the MAX_RUN_TIME # to guarantee the right conclusion kwargs["created_at"] = ( - datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + datetime.datetime.now(tz=datetime.timezone.utc) + - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) ).strftime(settings.GITHUB_TIMESTAMP_FORMAT) with self.assertRaises(github_utils.RunTimeoutError): @@ -103,29 +105,26 @@ def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> htt "account": {"login": "VALID_OWNER"}, "access_tokens_url": "https://example.com/ACCESS_TOKEN_URL" }]) - else: - return httpx.Response( - 401, json={"error": "auth app/installations"}, request=request - ) + return httpx.Response( + 401, json={"error": "auth app/installations"}, request=request + ) - elif path == "/installation/repositories": + elif path == "/installation/repositories": # noqa: RET505 if auth == "bearer app access token": return httpx.Response(200, request=request, json={ "repositories": [{ "name": "VALID_REPO" }] }) - else: # pragma: no cover - return httpx.Response( - 401, json={"error": "auth installation/repositories"}, request=request - ) + return httpx.Response( # pragma: no cover + 401, json={"error": "auth installation/repositories"}, request=request + ) - elif request.method == "POST": + elif request.method == "POST": # noqa: RET505 if path == "/ACCESS_TOKEN_URL": if auth == "bearer JWT initial token": return httpx.Response(200, request=request, json={"token": "app access token"}) - else: # pragma: no cover - return httpx.Response(401, json={"error": "auth access_token"}, request=request) + return httpx.Response(401, json={"error": "auth access_token"}, request=request) # pragma: no cover # Reaching this point means something has gone wrong return httpx.Response(500, request=request) # pragma: no cover @@ -138,7 +137,7 @@ class AuthorizeTests(unittest.TestCase): def test_invalid_apps_auth(self): """Test that an exception is raised if authorization was attempted with an invalid token.""" - with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): + with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): # noqa: SIM117 with self.assertRaises(httpx.HTTPStatusError) as error: github_utils.authorize("VALID_OWNER", "VALID_REPO") @@ -179,7 +178,11 @@ class ArtifactFetcherTests(unittest.TestCase): run = github_utils.WorkflowRun( name="action_name", head_sha="action_sha", - created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + created_at=( + datetime.datetime + .now(tz=datetime.timezone.utc) + .strftime(settings.GITHUB_TIMESTAMP_FORMAT) + ), status="completed", conclusion="success", artifacts_url="artifacts_url" @@ -187,7 +190,7 @@ class ArtifactFetcherTests(unittest.TestCase): return httpx.Response( 200, request=request, json={"workflow_runs": [dataclasses.asdict(run)]} ) - elif path == "/artifact_url": + elif path == "/artifact_url": # noqa: RET505 return httpx.Response( 200, request=request, json={"artifacts": [{ "name": "artifact_name", diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index ceb5591b..71611ee9 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -8,8 +8,8 @@ from django.db.utils import IntegrityError from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Infraction, User -from ..serializers import InfractionSerializer +from pydis_site.apps.api.models import Infraction, User +from pydis_site.apps.api.serializers import InfractionSerializer class UnauthenticatedTests(AuthenticatedAPITestCase): @@ -152,8 +152,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_after(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?type=superstar&expires_after={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get(url, {'type': 'superstar', 'expires_after': target_time.isoformat()}) self.assertEqual(response.status_code, 200) infractions = response.json() @@ -161,8 +161,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?type=superstar&expires_before={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get(url, {'type': 'superstar', 'expires_before': target_time.isoformat()}) self.assertEqual(response.status_code, 200) infractions = response.json() @@ -185,11 +185,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_before_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=4) - target_time_late = datetime.datetime.utcnow() + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=4) + target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) response = self.client.get( - f'{url}?expires_before={target_time_late.isoformat()}' - f'&expires_after={target_time.isoformat()}' + url, + {'expires_before': target_time_late.isoformat(), + 'expires_after': target_time.isoformat()}, ) self.assertEqual(response.status_code, 200) @@ -198,11 +199,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_after_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - target_time_late = datetime.datetime.utcnow() + datetime.timedelta(hours=9) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=9) response = self.client.get( - f'{url}?expires_before={target_time.isoformat()}' - f'&expires_after={target_time_late.isoformat()}' + url, + {'expires_before': target_time.isoformat(), + 'expires_after': target_time_late.isoformat()}, ) self.assertEqual(response.status_code, 400) @@ -212,8 +214,11 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_after_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?permanent=true&expires_after={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get( + url, + {'permanent': 'true', 'expires_after': target_time.isoformat()}, + ) self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -221,8 +226,11 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?permanent=true&expires_before={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get( + url, + {'permanent': 'true', 'expires_before': target_time.isoformat()}, + ) self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -230,9 +238,10 @@ class InfractionTests(AuthenticatedAPITestCase): def test_nonpermanent_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) response = self.client.get( - f'{url}?permanent=false&expires_before={target_time.isoformat()}' + url, + {'permanent': 'false', 'expires_before': target_time.isoformat()}, ) self.assertEqual(response.status_code, 200) @@ -522,39 +531,38 @@ class CreationTests(AuthenticatedAPITestCase): active_infraction_types = ('timeout', 'ban', 'superstar') for infraction_type in active_infraction_types: - with self.subTest(infraction_type=infraction_type): - with transaction.atomic(): - first_active_infraction = { - 'user': self.user.id, - 'actor': self.user.id, - 'type': infraction_type, - 'reason': 'Take me on!', - 'active': True, - 'expires_at': '2019-10-04T12:52:00+00:00' - } + with self.subTest(infraction_type=infraction_type), transaction.atomic(): + first_active_infraction = { + 'user': self.user.id, + 'actor': self.user.id, + 'type': infraction_type, + 'reason': 'Take me on!', + 'active': True, + 'expires_at': '2019-10-04T12:52:00+00:00' + } + + # Post the first active infraction of a type and confirm it's accepted. + first_response = self.client.post(url, data=first_active_infraction) + self.assertEqual(first_response.status_code, 201) - # Post the first active infraction of a type and confirm it's accepted. - first_response = self.client.post(url, data=first_active_infraction) - self.assertEqual(first_response.status_code, 201) - - second_active_infraction = { - 'user': self.user.id, - 'actor': self.user.id, - 'type': infraction_type, - 'reason': 'Take on me!', - 'active': True, - 'expires_at': '2019-10-04T12:52:00+00:00' + second_active_infraction = { + 'user': self.user.id, + 'actor': self.user.id, + 'type': infraction_type, + 'reason': 'Take on me!', + 'active': True, + 'expires_at': '2019-10-04T12:52:00+00:00' + } + second_response = self.client.post(url, data=second_active_infraction) + self.assertEqual(second_response.status_code, 400) + self.assertEqual( + second_response.json(), + { + 'non_field_errors': [ + 'This user already has an active infraction of this type.' + ] } - second_response = self.client.post(url, data=second_active_infraction) - self.assertEqual(second_response.status_code, 400) - self.assertEqual( - second_response.json(), - { - 'non_field_errors': [ - 'This user already has an active infraction of this type.' - ] - } - ) + ) def test_returns_201_for_second_active_infraction_of_different_type(self): """Test if the API accepts a second active infraction of a different type than the first.""" diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index d3341b35..1cca133d 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -118,7 +118,7 @@ class StringDunderMethodTests(SimpleTestCase): OffensiveMessage( id=602951077675139072, channel_id=291284109232308226, - delete_date=dt(3000, 1, 1) + delete_date=dt(3000, 1, 1, tzinfo=timezone.utc) ), OffTopicChannelName(name='bob-the-builders-playground'), Role( @@ -132,7 +132,7 @@ class StringDunderMethodTests(SimpleTestCase): name='shawn', discriminator=555, ), - creation=dt.utcnow() + creation=dt.now(tz=timezone.utc) ), User( id=5, diff --git a/pydis_site/apps/api/tests/test_nominations.py b/pydis_site/apps/api/tests/test_nominations.py index b3742cdd..ee6b1fbd 100644 --- a/pydis_site/apps/api/tests/test_nominations.py +++ b/pydis_site/apps/api/tests/test_nominations.py @@ -3,7 +3,7 @@ from datetime import datetime as dt, timedelta, timezone from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Nomination, NominationEntry, User +from pydis_site.apps.api.models import Nomination, NominationEntry, User class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_off_topic_channel_names.py b/pydis_site/apps/api/tests/test_off_topic_channel_names.py index 34098c92..315f707d 100644 --- a/pydis_site/apps/api/tests/test_off_topic_channel_names.py +++ b/pydis_site/apps/api/tests/test_off_topic_channel_names.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import OffTopicChannelName +from pydis_site.apps.api.models import OffTopicChannelName class UnauthenticatedTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_offensive_message.py b/pydis_site/apps/api/tests/test_offensive_message.py index 3cf95b75..53f9cb48 100644 --- a/pydis_site/apps/api/tests/test_offensive_message.py +++ b/pydis_site/apps/api/tests/test_offensive_message.py @@ -3,13 +3,13 @@ import datetime from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import OffensiveMessage +from pydis_site.apps.api.models import OffensiveMessage class CreationTests(AuthenticatedAPITestCase): def test_accept_valid_data(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -32,7 +32,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_on_non_future_date(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() - datetime.timedelta(days=1) + delete_at = datetime.datetime.now() - datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -46,7 +46,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_on_negative_id_or_channel_id(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -72,7 +72,7 @@ class CreationTests(AuthenticatedAPITestCase): class ListTests(AuthenticatedAPITestCase): @classmethod def setUpTestData(cls): - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 aware_delete_at = delete_at.replace(tzinfo=datetime.timezone.utc) cls.messages = [ diff --git a/pydis_site/apps/api/tests/test_reminders.py b/pydis_site/apps/api/tests/test_reminders.py index e17569f0..9bb5fe4d 100644 --- a/pydis_site/apps/api/tests/test_reminders.py +++ b/pydis_site/apps/api/tests/test_reminders.py @@ -4,7 +4,7 @@ from django.forms.models import model_to_dict from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Reminder, User +from pydis_site.apps.api.models import Reminder, User class UnauthedReminderAPITests(AuthenticatedAPITestCase): @@ -59,7 +59,7 @@ class ReminderCreationTests(AuthenticatedAPITestCase): data = { 'author': self.author.id, 'content': 'Remember to...wait what was it again?', - 'expiration': datetime.utcnow().isoformat(), + 'expiration': datetime.now(tz=timezone.utc).isoformat(), 'jump_url': "https://www.google.com", 'channel_id': 123, 'mentions': [8888, 9999], diff --git a/pydis_site/apps/api/tests/test_roles.py b/pydis_site/apps/api/tests/test_roles.py index 73c80c77..d3031990 100644 --- a/pydis_site/apps/api/tests/test_roles.py +++ b/pydis_site/apps/api/tests/test_roles.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Role, User +from pydis_site.apps.api.models import Role, User class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_rules.py b/pydis_site/apps/api/tests/test_rules.py index 3ee2d4e0..662fb8e9 100644 --- a/pydis_site/apps/api/tests/test_rules.py +++ b/pydis_site/apps/api/tests/test_rules.py @@ -5,7 +5,7 @@ from pathlib import Path from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..views import RulesView +from pydis_site.apps.api.views import RulesView class RuleAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index d86e80bb..cff4a825 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -4,9 +4,9 @@ from unittest.mock import Mock, patch from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Infraction, Role, User -from ..models.bot.metricity import NotFoundError -from ..viewsets.bot.user import UserListPagination +from pydis_site.apps.api.models import Infraction, Role, User +from pydis_site.apps.api.models.bot.metricity import NotFoundError +from pydis_site.apps.api.viewsets.bot.user import UserListPagination class UnauthedUserAPITests(AuthenticatedAPITestCase): @@ -469,18 +469,17 @@ class UserMetricityTests(AuthenticatedAPITestCase): with self.subTest( voice_infractions=case['voice_infractions'], voice_gate_blocked=case['voice_gate_blocked'] - ): - with patch("pydis_site.apps.api.viewsets.bot.user.Infraction.objects.filter") as p: - p.return_value = case['voice_infractions'] - - url = reverse('api:bot:user-metricity-data', args=[0]) - response = self.client.get(url) - - self.assertEqual(response.status_code, 200) - self.assertEqual( - response.json()["voice_gate_blocked"], - case["voice_gate_blocked"] - ) + ), patch("pydis_site.apps.api.viewsets.bot.user.Infraction.objects.filter") as p: + p.return_value = case['voice_infractions'] + + url = reverse('api:bot:user-metricity-data', args=[0]) + response = self.client.get(url) + + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.json()["voice_gate_blocked"], + case["voice_gate_blocked"] + ) def test_metricity_review_data(self): # Given diff --git a/pydis_site/apps/api/tests/test_validators.py b/pydis_site/apps/api/tests/test_validators.py index 8c46fcbc..a7ec6e38 100644 --- a/pydis_site/apps/api/tests/test_validators.py +++ b/pydis_site/apps/api/tests/test_validators.py @@ -3,8 +3,8 @@ from datetime import datetime, timezone from django.core.exceptions import ValidationError from django.test import TestCase -from ..models.bot.bot_setting import validate_bot_setting_name -from ..models.bot.offensive_message import future_date_validator +from pydis_site.apps.api.models.bot.bot_setting import validate_bot_setting_name +from pydis_site.apps.api.models.bot.offensive_message import future_date_validator REQUIRED_KEYS = ( diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index b1b7dc0f..32f41667 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -93,7 +93,7 @@ class RulesView(APIView): """ if target == 'html': return f'{description}' - elif target == 'md': + elif target == 'md': # noqa: RET505 return f'[{description}]({link})' else: raise ValueError( @@ -101,7 +101,7 @@ class RulesView(APIView): ) # `format` here is the result format, we have a link format here instead. - def get(self, request, format=None): # noqa: D102,ANN001,ANN201 + def get(self, request, format=None): # noqa: ANN001, ANN201 """ Returns a list of our community rules coupled with their keywords. diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index d6c2d18c..9c9e8338 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -1,10 +1,10 @@ from rest_framework.viewsets import ModelViewSet -from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order +from pydis_site.apps.api.models.bot.filters import ( # - Preserving the filter order FilterList, Filter ) -from pydis_site.apps.api.serializers import ( # noqa: I101 - Preserving the filter order +from pydis_site.apps.api.serializers import ( # - Preserving the filter order FilterListSerializer, FilterSerializer, ) diff --git a/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py b/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py index d0519e86..1774004c 100644 --- a/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py +++ b/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py @@ -85,10 +85,9 @@ class OffTopicChannelNameViewSet(ModelViewSet): serializer.save() return Response(create_data, status=HTTP_201_CREATED) - else: - raise ParseError(detail={ - 'name': ["This query parameter is required."] - }) + raise ParseError(detail={ + 'name': ["This query parameter is required."] + }) def list(self, request: Request, *args, **kwargs) -> Response: """ diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index db73a83c..88fa3415 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -1,4 +1,3 @@ -import typing from collections import OrderedDict from django.db.models import Q @@ -24,14 +23,14 @@ class UserListPagination(PageNumberPagination): page_size = 2500 page_size_query_param = "page_size" - def get_next_page_number(self) -> typing.Optional[int]: + def get_next_page_number(self) -> int | None: """Get the next page number.""" if not self.page.has_next(): return None page_number = self.page.next_page_number() return page_number - def get_previous_page_number(self) -> typing.Optional[int]: + def get_previous_page_number(self) -> int | None: """Get the previous page number.""" if not self.page.has_previous(): return None diff --git a/pydis_site/apps/content/models/tag.py b/pydis_site/apps/content/models/tag.py index 1a20d775..7c49902f 100644 --- a/pydis_site/apps/content/models/tag.py +++ b/pydis_site/apps/content/models/tag.py @@ -30,8 +30,7 @@ class Commit(models.Model): def lines(self) -> collections.abc.Iterable[str]: """Return each line in the commit message.""" - for line in self.message.split("\n"): - yield line + yield from self.message.split("\n") def format_authors(self) -> collections.abc.Iterable[str]: """Return a nice representation of the author(s)' name and email.""" diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md index f6f8a5f2..b634f513 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md @@ -4,7 +4,7 @@ description: A guide for linting and setting up pre-commit. --- Your commit will be rejected by the build server if it fails to lint. -On most of our projects, we use `flake8` and `pre-commit` to ensure that the code style is consistent across the code base. +On most of our projects, we use `ruff` and `pre-commit` to ensure that the code style is consistent across the code base. `pre-commit` is a powerful tool that helps you automatically lint before you commit. If the linter complains, the commit is aborted so that you can fix the linting errors before committing again. diff --git a/pydis_site/apps/content/tests/helpers.py b/pydis_site/apps/content/tests/helpers.py index fad91050..0e7562e8 100644 --- a/pydis_site/apps/content/tests/helpers.py +++ b/pydis_site/apps/content/tests/helpers.py @@ -62,19 +62,19 @@ class MockPagesTestCase(TestCase): ├── not_a_page.md ├── tmp.md ├── tmp - |   ├── _info.yml - |   └── category - |    ├── _info.yml - |      └── subcategory_without_info + | ├── _info.yml + | └── category + | ├── _info.yml + | └── subcategory_without_info └── category -    ├── _info.yml -    ├── with_metadata.md -    └── subcategory -    ├── with_metadata.md -       └── without_metadata.md + ├── _info.yml + ├── with_metadata.md + └── subcategory + ├── with_metadata.md + └── without_metadata.md """ - def setUp(self): + def setUp(self) -> None: """Create the fake filesystem.""" Path(f"{BASE_PATH}/_info.yml").write_text(CATEGORY_INFO) Path(f"{BASE_PATH}/root.md").write_text(MARKDOWN_WITH_METADATA) diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py index a7695a27..baae154d 100644 --- a/pydis_site/apps/content/urls.py +++ b/pydis_site/apps/content/urls.py @@ -8,7 +8,7 @@ from . import utils, views app_name = "content" -def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]: +def __get_all_files(root: Path, folder: Path | None = None) -> list[str]: """Find all folders and markdown files recursively starting from `root`.""" if not folder: folder = root diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index c12893ef..347640dd 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -151,8 +151,11 @@ def set_tag_commit(tag: Tag) -> None: commit = data["commit"] author, committer = commit["author"], commit["committer"] - date = datetime.datetime.strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) - date = date.replace(tzinfo=datetime.timezone.utc) + date = ( + datetime.datetime + .strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) + .replace(tzinfo=datetime.timezone.utc) + ) if author["email"] == committer["email"]: authors = [author] @@ -212,9 +215,8 @@ def get_tags() -> list[Tag]: record_tags(tags) return tags - else: - # Get tags from database - return list(Tag.objects.all()) + + return list(Tag.objects.all()) def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: @@ -242,13 +244,13 @@ def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: if tag.last_commit is None and not skip_sync: set_tag_commit(tag) return tag - elif tag.group == name and group is None: + elif tag.group == name and group is None: # noqa: RET505 matches.append(tag) if matches: return matches - raise Tag.DoesNotExist() + raise Tag.DoesNotExist def get_tag_category(tags: list[Tag] | None = None, *, collapse_groups: bool) -> dict[str, dict]: diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py index 4f4bb5a2..8d3e3321 100644 --- a/pydis_site/apps/content/views/tags.py +++ b/pydis_site/apps/content/views/tags.py @@ -1,5 +1,4 @@ import re -import typing import frontmatter import markdown @@ -22,7 +21,7 @@ COMMAND_REGEX = re.compile(r"`*!tags? (?P[\w-]+)(?P [\w-]+)?`*") class TagView(TemplateView): """Handles tag pages.""" - tag: typing.Union[Tag, list[Tag]] + tag: Tag | list[Tag] is_group: bool def setup(self, *args, **kwargs) -> None: diff --git a/pydis_site/apps/events/urls.py b/pydis_site/apps/events/urls.py index 7ea65a31..6121d264 100644 --- a/pydis_site/apps/events/urls.py +++ b/pydis_site/apps/events/urls.py @@ -8,7 +8,7 @@ from pydis_site.apps.events.views import IndexView, PageView app_name = "events" -def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]: +def __get_all_files(root: Path, folder: Path | None = None) -> list[str]: """Find all folders and HTML files recursively starting from `root`.""" if not folder: folder = root diff --git a/pydis_site/apps/events/views/page.py b/pydis_site/apps/events/views/page.py index 1622ad70..adf9e952 100644 --- a/pydis_site/apps/events/views/page.py +++ b/pydis_site/apps/events/views/page.py @@ -1,4 +1,3 @@ -from typing import List from django.conf import settings from django.http import Http404 @@ -8,7 +7,7 @@ from django.views.generic import TemplateView class PageView(TemplateView): """Handles event pages showing.""" - def get_template_names(self) -> List[str]: + def get_template_names(self) -> list[str]: """Get specific template names.""" path: str = self.kwargs['path'] page_path = settings.EVENTS_PAGES_PATH / path diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py index a963f733..acf4a817 100644 --- a/pydis_site/apps/home/tests/test_repodata_helpers.py +++ b/pydis_site/apps/home/tests/test_repodata_helpers.py @@ -22,7 +22,7 @@ def mocked_requests_get(*args, **kwargs) -> "MockResponse": # noqa: F821 if args[0] == HomeView.github_api: json_path = Path(__file__).resolve().parent / "mock_github_api_response.json" - with open(json_path, 'r') as json_file: + with open(json_path) as json_file: mock_data = json.load(json_file) return MockResponse(mock_data, 200) diff --git a/pydis_site/apps/home/views.py b/pydis_site/apps/home/views.py index 8a165682..bfa9e02d 100644 --- a/pydis_site/apps/home/views.py +++ b/pydis_site/apps/home/views.py @@ -1,5 +1,4 @@ import logging -from typing import Dict, List import httpx from django.core.handlers.wsgi import WSGIRequest @@ -45,7 +44,7 @@ class HomeView(View): else: self.headers = {} - def _get_api_data(self) -> Dict[str, Dict[str, str]]: + def _get_api_data(self) -> dict[str, dict[str, str]]: """ Call the GitHub API and get information about our repos. @@ -54,7 +53,7 @@ class HomeView(View): repo_dict = {} try: # Fetch the data from the GitHub API - api_data: List[dict] = httpx.get( + api_data: list[dict] = httpx.get( self.github_api, headers=self.headers, timeout=settings.TIMEOUT_PERIOD @@ -89,7 +88,7 @@ class HomeView(View): return repo_dict - def _get_repo_data(self) -> List[RepositoryMetadata]: + def _get_repo_data(self) -> list[RepositoryMetadata]: """Build a list of RepositoryMetadata objects that we can use to populate the front page.""" # First off, load the timestamp of the least recently updated entry. if settings.STATIC_BUILD: @@ -121,8 +120,7 @@ class HomeView(View): if settings.STATIC_BUILD: return data - else: - return RepositoryMetadata.objects.bulk_create(data) + return RepositoryMetadata.objects.bulk_create(data) # If the data is stale, we should refresh it. if (timezone.now() - last_update).seconds > self.repository_cache_ttl: @@ -149,8 +147,7 @@ class HomeView(View): return database_repositories # Otherwise, if the data is fresher than 2 minutes old, we should just return it. - else: - return RepositoryMetadata.objects.all() + return RepositoryMetadata.objects.all() def get(self, request: WSGIRequest) -> HttpResponse: """Collect repo data and render the homepage view.""" diff --git a/pydis_site/apps/redirect/urls.py b/pydis_site/apps/redirect/urls.py index 067cccc3..a221ea12 100644 --- a/pydis_site/apps/redirect/urls.py +++ b/pydis_site/apps/redirect/urls.py @@ -83,22 +83,21 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]: return paths + redirect_path_name = "pages" if new_app_name == "content" else new_app_name + if len(data.redirect_arguments) > 0: + redirect_arg = data.redirect_arguments[0] else: - redirect_path_name = "pages" if new_app_name == "content" else new_app_name - if len(data.redirect_arguments) > 0: - redirect_arg = data.redirect_arguments[0] - else: - redirect_arg = "resources/" - new_redirect = f"/{redirect_path_name}/{redirect_arg}" + redirect_arg = "resources/" + new_redirect = f"/{redirect_path_name}/{redirect_arg}" - if new_redirect == "/resources/resources/": - new_redirect = "/resources/" + if new_redirect == "/resources/resources/": + new_redirect = "/resources/" - return [distill_path( - data.original_path, - lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)), - name=name, - )] + return [distill_path( + data.original_path, + lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)), + name=name, + )] urlpatterns = [] diff --git a/pydis_site/apps/redirect/views.py b/pydis_site/apps/redirect/views.py index 21180cdf..374daf2b 100644 --- a/pydis_site/apps/redirect/views.py +++ b/pydis_site/apps/redirect/views.py @@ -1,4 +1,3 @@ -import typing as t from django.views.generic import RedirectView @@ -15,7 +14,7 @@ class CustomRedirectView(RedirectView): """Overwrites original as_view to add static args.""" return super().as_view(**initkwargs) - def get_redirect_url(self, *args, **kwargs) -> t.Optional[str]: + def get_redirect_url(self, *args, **kwargs) -> str | None: """Extends default behaviour to use static args.""" args = self.static_args + args + tuple(kwargs.values()) if self.prefix_redirect: diff --git a/pydis_site/apps/resources/views.py b/pydis_site/apps/resources/views.py index 2375f722..a2cd8d0c 100644 --- a/pydis_site/apps/resources/views.py +++ b/pydis_site/apps/resources/views.py @@ -1,5 +1,4 @@ import json -import typing as t from pathlib import Path import yaml @@ -22,7 +21,7 @@ class ResourceView(View): """Sort a tuple by its key alphabetically, disregarding 'the' as a prefix.""" name, resource = tuple_ name = name.casefold() - if name.startswith("the ") or name.startswith("the_"): + if name.startswith(("the ", "the_")): return name[4:] return name @@ -48,7 +47,7 @@ class ResourceView(View): } for resource_name, resource in self.resources.items(): css_classes = [] - for tag_type in resource_tags.keys(): + for tag_type in resource_tags: # Store the tags into `resource_tags` tags = resource.get("tags", {}).get(tag_type, []) for tag in tags: @@ -102,7 +101,7 @@ class ResourceView(View): "difficulty": [to_kebabcase(tier) for tier in self.filters["Difficulty"]["filters"]], } - def get(self, request: WSGIRequest, resource_type: t.Optional[str] = None) -> HttpResponse: + def get(self, request: WSGIRequest, resource_type: str | None = None) -> HttpResponse: """List out all the resources, and any filtering options from the URL.""" # Add type filtering if the request is made to somewhere like /resources/video. # We also convert all spaces to dashes, so they'll correspond with the filters. diff --git a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py index 9d8f1819..c6638a3b 100644 --- a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py +++ b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import Union from django import template @@ -7,7 +6,7 @@ register = template.Library() @register.filter -def hex_colour(colour: Union[str, int]) -> str: +def hex_colour(colour: str | int) -> str: """ Converts the given representation of a colour to its RGB hex string. diff --git a/pydis_site/apps/staff/tests/test_deletedmessage_filters.py b/pydis_site/apps/staff/tests/test_deletedmessage_filters.py index 31215784..5e49f103 100644 --- a/pydis_site/apps/staff/tests/test_deletedmessage_filters.py +++ b/pydis_site/apps/staff/tests/test_deletedmessage_filters.py @@ -3,7 +3,7 @@ import enum from django.test import TestCase from django.utils import timezone -from ..templatetags import deletedmessage_filters +from pydis_site.apps.staff.templatetags import deletedmessage_filters class Colour(enum.IntEnum): diff --git a/pyproject.toml b/pyproject.toml index 40461f0b..9019efb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,18 +29,9 @@ whitenoise = "6.4.0" [tool.poetry.group.dev.dependencies] python-dotenv = "1.0.0" taskipy = "1.10.4" +ruff = "^0.0.265" [tool.poetry.group.lint.dependencies] -flake8 = "6.0.0" -flake8-annotations = "3.0.1" -flake8-bandit = "4.1.1" -flake8-bugbear = "23.5.9" -flake8-docstrings = "1.7.0" -flake8-import-order = "0.18.2" -flake8-tidy-imports = "4.8.0" -flake8-string-format = "0.3.0" -flake8-todo = "0.7" -pep8-naming = "0.13.3" pre-commit = "3.3.1" [tool.poetry.group.test.dependencies] @@ -50,6 +41,34 @@ coverage = "7.2.5" requires = ["poetry-core>=1.2.0"] build-backend = "poetry.core.masonry.api" +[tool.ruff] +target-version = "py310" +extend-exclude = [".cache"] +ignore = [ + "ANN002", "ANN003", "ANN101", "ANN102", "ANN204", "ANN206", "ANN401", + "B904", + "C401", "C408", + "D100", "D104", "D105", "D107", "D203", "D212", "D214", "D215", "D301", + "D400", "D401", "D402", "D404", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D412", "D413", "D414", "D416", "D417", + "E731", + "RET504", + "RUF005", + "S311", + "SIM102", "SIM108", +] +line-length = 120 +select = ["ANN", "B", "C4", "D", "DTZ", "E", "F", "ISC", "INT", "N", "PGH", "PIE", "RET", "RSE", "RUF", "S", "SIM", "T20", "TID", "UP", "W"] + +[tool.ruff.per-file-ignores] +"pydis_site/apps/**/migrations/*.py" = ["ALL"] +"manage.py" = ["T201"] +"pydis_site/apps/api/tests/base.py" = ["S106"] +"pydis_site/apps/**/tests/test_*.py" = ["ANN", "D"] +"static-builds/netlify_build.py" = ["T201"] +"pydis_site/apps/api/tests/test_off_topic_channel_names.py" = ["RUF001"] +"gunicorn.conf.py" = ["ANN", "D"] +"pydis_site/apps/api/models/bot/off_topic_channel_name.py" = ["RUF001"] + [tool.taskipy.tasks] start = "python manage.py run --debug" makemigrations = "python manage.py makemigrations" diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 36520c28..2d311a11 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -14,15 +14,15 @@ from pathlib import Path from urllib import parse import httpx +import contextlib def raise_response(response: httpx.Response) -> None: """Raise an exception from a response if necessary.""" if response.status_code // 100 != 2: - try: + with contextlib.suppress(json.JSONDecodeError): print(response.json()) - except json.JSONDecodeError: - pass + response.raise_for_status() -- cgit v1.2.3