From 2f5effe075287dab4965f3278031bcd433a83f7c Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Mon, 26 Apr 2021 17:17:06 +0200 Subject: Filter: new schema This commit adds new filter schema as described in #479 --- pydis_site/apps/api/models/__init__.py | 5 + pydis_site/apps/api/models/bot/__init__.py | 2 +- pydis_site/apps/api/models/bot/filter_list.py | 42 ------ pydis_site/apps/api/models/bot/filters.py | 187 ++++++++++++++++++++++++ pydis_site/apps/api/viewsets/bot/filter_list.py | 2 +- 5 files changed, 194 insertions(+), 44 deletions(-) delete mode 100644 pydis_site/apps/api/models/bot/filter_list.py create mode 100644 pydis_site/apps/api/models/bot/filters.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/__init__.py b/pydis_site/apps/api/models/__init__.py index fd5bf220..72f59b57 100644 --- a/pydis_site/apps/api/models/__init__.py +++ b/pydis_site/apps/api/models/__init__.py @@ -1,6 +1,11 @@ # flake8: noqa from .bot import ( FilterList, + FilterSettings, + FilterAction, + ChannelRange, + Filter, + FilterOverride, BotSetting, DocumentationLink, DeletedMessage, diff --git a/pydis_site/apps/api/models/bot/__init__.py b/pydis_site/apps/api/models/bot/__init__.py index ac864de3..1bfe0063 100644 --- a/pydis_site/apps/api/models/bot/__init__.py +++ b/pydis_site/apps/api/models/bot/__init__.py @@ -1,5 +1,5 @@ # flake8: noqa -from .filter_list import FilterList +from .filters import FilterList, FilterSettings, FilterAction, ChannelRange, Filter, FilterOverride from .bot_setting import BotSetting from .deleted_message import DeletedMessage from .documentation_link import DocumentationLink diff --git a/pydis_site/apps/api/models/bot/filter_list.py b/pydis_site/apps/api/models/bot/filter_list.py deleted file mode 100644 index d30f7213..00000000 --- a/pydis_site/apps/api/models/bot/filter_list.py +++ /dev/null @@ -1,42 +0,0 @@ -from django.db import models - -from pydis_site.apps.api.models.mixins import ModelReprMixin, ModelTimestampMixin - - -class FilterList(ModelTimestampMixin, ModelReprMixin, models.Model): - """An item that is either allowed or denied.""" - - FilterListType = models.TextChoices( - 'FilterListType', - 'GUILD_INVITE ' - 'FILE_FORMAT ' - 'DOMAIN_NAME ' - 'FILTER_TOKEN ' - 'REDIRECT ' - ) - type = models.CharField( - max_length=50, - help_text="The type of allowlist this is on.", - choices=FilterListType.choices, - ) - allowed = models.BooleanField( - help_text="Whether this item is on the allowlist or the denylist." - ) - content = models.TextField( - help_text="The data to add to the allow or denylist." - ) - comment = models.TextField( - help_text="Optional comment on this entry.", - null=True - ) - - class Meta: - """Metaconfig for this model.""" - - # This constraint ensures only one filterlist with the - # same content can exist. This means that we cannot have both an allow - # and a deny for the same item, and we cannot have duplicates of the - # same item. - constraints = [ - models.UniqueConstraint(fields=['content', 'type'], name='unique_filter_list'), - ] diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py new file mode 100644 index 00000000..dfc38e82 --- /dev/null +++ b/pydis_site/apps/api/models/bot/filters.py @@ -0,0 +1,187 @@ +from typing import List + +from django.contrib.postgres.fields import ArrayField +from django.core.exceptions import ValidationError +from django.db import models +from django.db.models import UniqueConstraint + + +class FilterListType(models.IntegerChoices): + """Choice between allow or deny for a list type.""" + + ALLOW: 1 + DENY: 0 + + +class InfractionType(models.TextChoices): + """Possible type of infractions.""" + + NOTE = "Note" + WARN = "Warn" + MUTE = "Mute" + KICK = "Kick" + BAN = "Ban" + + +# Valid special values in ping related fields +VALID_PINGS = ("everyone", "here", "moderators", "onduty", "admins") + + +def validate_ping_field(value_list: List[str]) -> None: + """Validate that the values are either a special value or a UID.""" + for value in value_list: + # Check if it is a special value + if value in VALID_PINGS: + continue + # Check if it is a UID + if value.isnumeric(): + continue + + raise ValidationError(f"{value!r} isn't a valid ping type.") + + +class FilterList(models.Model): + """Represent a list in its allow or deny form.""" + + name = models.CharField(max_length=50, help_text="The unique name of this list.") + list_type = models.IntegerField( + choices=FilterListType.choices, + help_text="Whenever this list is an allowlist or denylist" + ) + + filters = models.ManyToManyField("Filter", help_text="The content of this list.") + default_settings = models.ForeignKey( + "FilterSettings", + models.CASCADE, + help_text="Default parameters of this list." + ) + + class Meta: + """Constrain name and list_type unique.""" + + constraints = ( + UniqueConstraint(fields=("name", "list_type"), name="unique_name_type"), + ) + + def __str__(self) -> str: + return f"Filter {'allow' if self.list_type == 1 else 'deny'}list {self.name!r}" + + +class FilterSettings(models.Model): + """Persistent settings of a filter list.""" + + ping_type = ArrayField( + models.CharField(max_length=20), + validators=(validate_ping_field,), + help_text="Who to ping when this filter triggers." + ) + filter_dm = models.BooleanField(help_text="Whenever DMs should be filtered.") + dm_ping_type = ArrayField( + models.CharField(max_length=20), + validators=(validate_ping_field,), + help_text="Who to ping when this filter triggers on a DM." + ) + delete_messages = models.BooleanField( + help_text="Whenever this filter should delete messages triggering it." + ) + bypass_roles = ArrayField( + models.BigIntegerField(), + help_text="Roles and users who can bypass this filter." + ) + enabled = models.BooleanField( + help_text="Whenever ths filter is currently enabled." + ) + default_action = models.ForeignKey( + "FilterAction", + models.CASCADE, + help_text="The default action to perform." + ) + default_range = models.ForeignKey( + "ChannelRange", + models.CASCADE, + help_text="Where does this filter apply." + ) + + +class FilterAction(models.Model): + """The action to take when a filter is triggered.""" + + user_dm = models.CharField( + max_length=1000, + null=True, + help_text="The DM to send to a user triggering this filter." + ) + infraction_type = models.CharField( + choices=InfractionType.choices, + max_length=4, + null=True, + help_text="The infraction to apply to this user." + ) + infraction_reason = models.CharField( + max_length=1000, + help_text="The reason to give for the infraction." + ) + infraction_duration = models.DurationField( + null=True, + help_text="The duration of the infraction. Null if permanent." + ) + + +class ChannelRange(models.Model): + """ + Where a filter should apply. + + The resolution is done in the following order: + - disallowed channels + - disallowed categories + - allowed categories + - allowed channels + - default + """ + + disallowed_channels = ArrayField(models.IntegerField()) + disallowed_categories = ArrayField(models.IntegerField()) + allowed_channels = ArrayField(models.IntegerField()) + allowed_category = ArrayField(models.IntegerField()) + default = models.BooleanField() + + +class Filter(models.Model): + """One specific trigger of a list.""" + + content = models.CharField(max_length=100, help_text="The definition of this filter.") + description = models.CharField(max_length=200, help_text="Why this filter has been added.") + additional_field = models.BooleanField(null=True, help_text="Implementation specific field.") + override = models.ForeignKey( + "FilterOverride", + models.SET_NULL, + null=True, + help_text="Override the default settings." + ) + + def __str__(self) -> str: + return f"Filter {self.content!r}" + + +class FilterOverride(models.Model): + """ + Setting overrides of a specific filter. + + Any non-null value will override the default ones. + """ + + ping_type = ArrayField( + models.CharField(max_length=20), + validators=(validate_ping_field,), null=True + ) + filter_dm = models.BooleanField(null=True) + dm_ping_type = ArrayField( + models.CharField(max_length=20), + validators=(validate_ping_field,), + null=True + ) + delete_messages = models.BooleanField(null=True) + bypass_roles = ArrayField(models.IntegerField(), null=True) + enabled = models.BooleanField(null=True) + default_action = models.ForeignKey("FilterAction", models.CASCADE, null=True) + default_range = models.ForeignKey("ChannelRange", models.CASCADE, null=True) diff --git a/pydis_site/apps/api/viewsets/bot/filter_list.py b/pydis_site/apps/api/viewsets/bot/filter_list.py index 4b05acee..3eacdaaa 100644 --- a/pydis_site/apps/api/viewsets/bot/filter_list.py +++ b/pydis_site/apps/api/viewsets/bot/filter_list.py @@ -3,7 +3,7 @@ from rest_framework.request import Request from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet -from pydis_site.apps.api.models.bot.filter_list import FilterList +from pydis_site.apps.api.models.bot.filters import FilterList from pydis_site.apps.api.serializers import FilterListSerializer -- cgit v1.2.3 From c6bcca08e58855cf3c3f87602f752dd40b10efad Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Mon, 26 Apr 2021 17:29:01 +0200 Subject: Filters: Add new models to Django Admin --- pydis_site/apps/api/admin.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/admin.py b/pydis_site/apps/api/admin.py index 2aca38a1..e123d150 100644 --- a/pydis_site/apps/api/admin.py +++ b/pydis_site/apps/api/admin.py @@ -13,6 +13,8 @@ from .models import ( BotSetting, DeletedMessage, DocumentationLink, + Filter, + FilterList, Infraction, MessageDeletionContext, Nomination, @@ -194,6 +196,16 @@ class DeletedMessageInline(admin.TabularInline): model = DeletedMessage +@admin.register(FilterList) +class FilterListAdmin(admin.ModelAdmin): + """Admin formatting for the FilterList model.""" + + +@admin.register(Filter) +class FilterAdmin(admin.ModelAdmin): + """Admin formatting for the Filter model.""" + + @admin.register(MessageDeletionContext) class MessageDeletionContextAdmin(admin.ModelAdmin): """Admin formatting for the MessageDeletionContext model.""" -- cgit v1.2.3 From 87c78ceb49f6a2a0ab268fa2dde1850df5506eee Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Mon, 26 Apr 2021 17:30:10 +0200 Subject: Filters: Add migration to the new model This will take the currently defined filter list and put them inside the new schema while trying to keep defaults similar to our current setup. --- .../apps/api/migrations/0070_new_filter_schema.py | 165 +++++++++++++++++++++ 1 file changed, 165 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0070_new_filter_schema.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py new file mode 100644 index 00000000..e6d7ffe7 --- /dev/null +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -0,0 +1,165 @@ +# Modified migration file to migrate existing filters to the new one + +import django.contrib.postgres.fields +from django.apps.registry import Apps +from django.db import migrations, models +import django.db.models.deletion +from django.db.backends.base.schema import BaseDatabaseSchemaEditor + +import pydis_site.apps.api.models.bot.filters + +OLD_LIST_NAMES = (('GUILD_INVITE', 'ALLOW'), ('FILE_FORMAT', 'DENY'), ('DOMAIN_NAME', 'DENY'), ('FILTER_TOKEN', 'DENY')) + + +def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: + filter_: pydis_site.apps.api.models.Filter = apps.get_model("api", "Filter") + filter_list: pydis_site.apps.api.models.FilterList = apps.get_model("api", "FilterList") + filter_settings: pydis_site.apps.api.models.FilterSettings = apps.get_model("api", "FilterSettings") + channel_range: pydis_site.apps.api.models.ChannelRange = apps.get_model("api", "ChannelRange") + filter_action: pydis_site.apps.api.models.FilterAction = apps.get_model("api", "FilterAction") + filter_list_old = apps.get_model("api", "FilterListOld") + + for name, type_ in OLD_LIST_NAMES: + objects = filter_list_old.objects.filter(type=name) + + default_action = filter_action.objects.create( + user_dm=None, + infraction_type=None, + infraction_reason="", + infraction_duration=None + ) + default_action.save() + default_range = channel_range.objects.create( + disallowed_channels=[], + disallowed_categories=[], + allowed_channels=[], + allowed_category=[], + default=True + ) + default_range.save() + default_settings = filter_settings.objects.create( + ping_type=["onduty"], + filter_dm=True, + dm_ping_type=["onduty"], + delete_messages=True, + bypass_roles=[267630620367257601], + enabled=False, + default_action=default_action, + default_range=default_range + ) + default_settings.save() + list_ = filter_list.objects.create( + name=name.lower(), + default_settings=default_settings, + list_type=1 if type_ == "ALLOW" else 0 + ) + + new_objects = [] + for object_ in objects: + new_object = filter_.objects.create( + content=object_.content, + description=object_.comment or "", + additional_field=None, override=None + ) + new_object.save() + new_objects.append(new_object) + + list_.filters.add(*new_objects) + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0069_documentationlink_validators'), + ] + + operations = [ + migrations.RenameModel( + old_name='FilterList', + new_name='FilterListOld' + ), + migrations.CreateModel( + name='ChannelRange', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('allowed_category', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('default', models.BooleanField()), + ], + ), + migrations.CreateModel( + name='Filter', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), + ('description', models.CharField(help_text='Why this filter has been added.', max_length=200)), + ('additional_field', models.BooleanField(help_text='Implementation specific field.', null=True)), + ], + ), + migrations.CreateModel( + name='FilterAction', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('user_dm', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), + ('infraction_type', models.CharField(choices=[('Note', 'Note'), ('Warn', 'Warn'), ('Mute', 'Mute'), ('Kick', 'Kick'), ('Ban', 'Ban')], help_text='The infraction to apply to this user.', max_length=4, null=True)), + ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), + ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), + ], + ), + migrations.CreateModel( + name='FilterSettings', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('filter_dm', models.BooleanField(help_text='Whenever DMs should be filtered.')), + ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('delete_messages', models.BooleanField(help_text='Whenever this filter should delete messages triggering it.')), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.BigIntegerField(), help_text='Roles and users who can bypass this filter.', size=None)), + ('enabled', models.BooleanField(help_text='Whenever ths filter is currently enabled.')), + ('default_action', models.ForeignKey(help_text='The default action to perform.', on_delete=django.db.models.deletion.CASCADE, to='api.FilterAction')), + ('default_range', models.ForeignKey(help_text='Where does this filter apply.', on_delete=django.db.models.deletion.CASCADE, to='api.ChannelRange')), + ], + ), + migrations.CreateModel( + name='FilterOverride', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), null=True, size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('filter_dm', models.BooleanField(null=True)), + ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), null=True, size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('delete_messages', models.BooleanField(null=True)), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), + ('enabled', models.BooleanField(null=True)), + ('default_action', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.FilterAction')), + ('default_range', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.ChannelRange')), + ], + ), + migrations.CreateModel( + name='FilterList', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), + ('list_type', models.IntegerField(choices=[], help_text='Whenever this list is an allowlist or denylist')), + ('default_settings', models.ForeignKey(help_text='Default parameters of this list.', on_delete=django.db.models.deletion.CASCADE, to='api.FilterSettings')), + ('filters', models.ManyToManyField(help_text='The content of this list.', to='api.Filter')), + ], + ), + migrations.AddField( + model_name='filter', + name='override', + field=models.ForeignKey(help_text='Override the default settings.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='api.FilterOverride'), + ), + migrations.AddConstraint( + model_name='filterlist', + constraint=models.UniqueConstraint(fields=('name', 'list_type'), name='unique_name_type'), + ), + migrations.RunPython( + code=forward, # Core of the migration + reverse_code=lambda *_: None + ), + migrations.DeleteModel( + name='FilterListOld' + ) + ] -- cgit v1.2.3 From 64c5c617df68cc7b54fffbe8d76bb5c67d641c98 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 27 Apr 2021 10:54:21 +0200 Subject: Filters: hook the new models into the REST API --- .../apps/api/migrations/0070_new_filter_schema.py | 4 +- pydis_site/apps/api/models/bot/filters.py | 4 +- pydis_site/apps/api/serializers.py | 98 +++- pydis_site/apps/api/urls.py | 29 +- pydis_site/apps/api/viewsets/__init__.py | 7 +- pydis_site/apps/api/viewsets/bot/__init__.py | 9 +- pydis_site/apps/api/viewsets/bot/filter_list.py | 98 ---- pydis_site/apps/api/viewsets/bot/filters.py | 640 +++++++++++++++++++++ 8 files changed, 773 insertions(+), 116 deletions(-) delete mode 100644 pydis_site/apps/api/viewsets/bot/filter_list.py create mode 100644 pydis_site/apps/api/viewsets/bot/filters.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index e6d7ffe7..f4fc9494 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -132,8 +132,8 @@ class Migration(migrations.Migration): ('delete_messages', models.BooleanField(null=True)), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), ('enabled', models.BooleanField(null=True)), - ('default_action', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.FilterAction')), - ('default_range', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.ChannelRange')), + ('filter_action', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.FilterAction')), + ('filter_range', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.ChannelRange')), ], ), migrations.CreateModel( diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index dfc38e82..16ac206e 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -183,5 +183,5 @@ class FilterOverride(models.Model): delete_messages = models.BooleanField(null=True) bypass_roles = ArrayField(models.IntegerField(), null=True) enabled = models.BooleanField(null=True) - default_action = models.ForeignKey("FilterAction", models.CASCADE, null=True) - default_range = models.ForeignKey("ChannelRange", models.CASCADE, null=True) + filter_action = models.ForeignKey("FilterAction", models.CASCADE, null=True) + filter_range = models.ForeignKey("ChannelRange", models.CASCADE, null=True) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index de2fccff..306dccb3 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -12,12 +12,17 @@ from rest_framework.serializers import ( from rest_framework.settings import api_settings from rest_framework.validators import UniqueTogetherValidator -from .models import ( +from .models import ( # noqa: I101 - Preserving the filter order BotSetting, DeletedMessage, DocumentationLink, - FilterList, Infraction, + FilterList, + FilterSettings, + FilterAction, + ChannelRange, + Filter, + FilterOverride, MessageDeletionContext, Nomination, NominationEntry, @@ -119,24 +124,97 @@ class FilterListSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterList - fields = ('id', 'created_at', 'updated_at', 'type', 'allowed', 'content', 'comment') + fields = ('id', 'name', 'list_type', 'filters', 'default_settings') - # This validator ensures only one filterlist with the - # same content can exist. This means that we cannot have both an allow - # and a deny for the same item, and we cannot have duplicates of the - # same item. + # Ensure that we can only have one filter list with the same name and field validators = [ UniqueTogetherValidator( queryset=FilterList.objects.all(), - fields=['content', 'type'], + fields=('name', 'list_type'), message=( - "A filterlist for this item already exists. " - "Please note that you cannot add the same item to both allow and deny." + "A filterlist with the same name and type already exist." ) ), ] +class FilterSettingsSerializer(ModelSerializer): + """A class providing (de-)serialization of `FilterSettings` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = FilterSettings + fields = ( + 'id', + 'ping_type', + 'filter_dm', + 'dm_ping_type', + 'delete_messages', + 'bypass_roles', + 'enabled', + 'default_action', + 'default_range' + ) + + +class FilterActionSerializer(ModelSerializer): + """A class providing (de-)serialization of `FilterAction` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = FilterAction + fields = ('id', 'user_dm', 'infraction_type', 'infraction_reason', 'infraction_duration') + + +class FilterChannelRangeSerializer(ModelSerializer): + """A class providing (de-)serialization of `ChannelRange` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = ChannelRange + fields = ( + 'id', + 'disallowed_channels', + 'disallowed_categories', + 'allowed_channels', + 'allowed_category', + 'default' + ) + + +class FilterSerializer(ModelSerializer): + """A class providing (de-)serialization of `Filter` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = Filter + fields = ('id', 'content', 'description', 'additional_field', 'override') + + +class FilterOverrideSerializer(ModelSerializer): + """A class providing (de-)serialization of `FilterOverride` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = FilterOverride + fields = ( + 'id', + 'ping_type', + 'filter_dm', + 'dm_ping_type', + 'delete_messages', + 'bypass_roles', + 'enabled', + 'filter_action', + 'filter_range' + ) + + class InfractionSerializer(ModelSerializer): """A class providing (de-)serialization of `Infraction` instances.""" diff --git a/pydis_site/apps/api/urls.py b/pydis_site/apps/api/urls.py index b0ab545b..7af2e505 100644 --- a/pydis_site/apps/api/urls.py +++ b/pydis_site/apps/api/urls.py @@ -2,11 +2,16 @@ from django.urls import include, path from rest_framework.routers import DefaultRouter from .views import HealthcheckView, RulesView -from .viewsets import ( +from .viewsets import ( # noqa: I101 - Preserving the filter order BotSettingViewSet, DeletedMessageViewSet, DocumentationLinkViewSet, FilterListViewSet, + FilterSettingsViewSet, + FilterActionViewSet, + FilterChannelRangeViewSet, + FilterViewSet, + FilterOverrideViewSet, InfractionViewSet, NominationViewSet, OffTopicChannelNameViewSet, @@ -19,9 +24,29 @@ from .viewsets import ( # https://www.django-rest-framework.org/api-guide/routers/#defaultrouter bot_router = DefaultRouter(trailing_slash=False) bot_router.register( - 'filter-lists', + 'filter/filter_lists', FilterListViewSet ) +bot_router.register( + 'filter/filter_settings', + FilterSettingsViewSet +) +bot_router.register( + 'filter/filter_action', + FilterActionViewSet +) +bot_router.register( + 'filter/channel_range', + FilterChannelRangeViewSet +) +bot_router.register( + 'filter/filter_override', + FilterOverrideViewSet +) +bot_router.register( + 'filter/filters', + FilterViewSet +) bot_router.register( 'bot-settings', BotSettingViewSet diff --git a/pydis_site/apps/api/viewsets/__init__.py b/pydis_site/apps/api/viewsets/__init__.py index f133e77f..b3992d66 100644 --- a/pydis_site/apps/api/viewsets/__init__.py +++ b/pydis_site/apps/api/viewsets/__init__.py @@ -1,10 +1,15 @@ # flake8: noqa from .bot import ( - FilterListViewSet, BotSettingViewSet, DeletedMessageViewSet, DocumentationLinkViewSet, InfractionViewSet, + FilterListViewSet, + FilterSettingsViewSet, + FilterActionViewSet, + FilterChannelRangeViewSet, + FilterViewSet, + FilterOverrideViewSet, NominationViewSet, OffensiveMessageViewSet, OffTopicChannelNameViewSet, diff --git a/pydis_site/apps/api/viewsets/bot/__init__.py b/pydis_site/apps/api/viewsets/bot/__init__.py index 84b87eab..781624bd 100644 --- a/pydis_site/apps/api/viewsets/bot/__init__.py +++ b/pydis_site/apps/api/viewsets/bot/__init__.py @@ -1,5 +1,12 @@ # flake8: noqa -from .filter_list import FilterListViewSet +from .filters import ( + FilterListViewSet, + FilterSettingsViewSet, + FilterActionViewSet, + FilterChannelRangeViewSet, + FilterViewSet, + FilterOverrideViewSet +) from .bot_setting import BotSettingViewSet from .deleted_message import DeletedMessageViewSet from .documentation_link import DocumentationLinkViewSet diff --git a/pydis_site/apps/api/viewsets/bot/filter_list.py b/pydis_site/apps/api/viewsets/bot/filter_list.py deleted file mode 100644 index 3eacdaaa..00000000 --- a/pydis_site/apps/api/viewsets/bot/filter_list.py +++ /dev/null @@ -1,98 +0,0 @@ -from rest_framework.decorators import action -from rest_framework.request import Request -from rest_framework.response import Response -from rest_framework.viewsets import ModelViewSet - -from pydis_site.apps.api.models.bot.filters import FilterList -from pydis_site.apps.api.serializers import FilterListSerializer - - -class FilterListViewSet(ModelViewSet): - """ - View providing CRUD operations on items allowed or denied by our bot. - - ## Routes - ### GET /bot/filter-lists - Returns all filterlist items in the database. - - #### Response format - >>> [ - ... { - ... 'id': "2309268224", - ... 'created_at': "01-01-2020 ...", - ... 'updated_at': "01-01-2020 ...", - ... 'type': "file_format", - ... 'allowed': 'true', - ... 'content': ".jpeg", - ... 'comment': "Popular image format.", - ... }, - ... ... - ... ] - - #### Status codes - - 200: returned on success - - 401: returned if unauthenticated - - ### GET /bot/filter-lists/ - Returns a specific FilterList item from the database. - - #### Response format - >>> { - ... 'id': "2309268224", - ... 'created_at': "01-01-2020 ...", - ... 'updated_at': "01-01-2020 ...", - ... 'type': "file_format", - ... 'allowed': 'true', - ... 'content': ".jpeg", - ... 'comment': "Popular image format.", - ... } - - #### Status codes - - 200: returned on success - - 404: returned if the id was not found. - - ### GET /bot/filter-lists/get-types - Returns a list of valid list types that can be used in POST requests. - - #### Response format - >>> [ - ... ["GUILD_INVITE","Guild Invite"], - ... ["FILE_FORMAT","File Format"], - ... ["DOMAIN_NAME","Domain Name"], - ... ["FILTER_TOKEN","Filter Token"], - ... ["REDIRECT", "Redirect"] - ... ] - - #### Status codes - - 200: returned on success - - ### POST /bot/filter-lists - Adds a single FilterList item to the database. - - #### Request body - >>> { - ... 'type': str, - ... 'allowed': bool, - ... 'content': str, - ... 'comment': Optional[str], - ... } - - #### Status codes - - 201: returned on success - - 400: if one of the given fields is invalid - - ### DELETE /bot/filter-lists/ - Deletes the FilterList item with the given `id`. - - #### Status codes - - 204: returned on success - - 404: if a tag with the given `id` does not exist - """ - - serializer_class = FilterListSerializer - queryset = FilterList.objects.all() - - @action(detail=False, url_path='get-types', methods=["get"]) - def get_types(self, _: Request) -> Response: - """Get a list of all the types of FilterLists we support.""" - return Response(FilterList.FilterListType.choices) diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py new file mode 100644 index 00000000..fea53265 --- /dev/null +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -0,0 +1,640 @@ +from rest_framework.viewsets import ModelViewSet + +from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order + FilterList, + FilterSettings, + FilterAction, + ChannelRange, + Filter, + FilterOverride +) +from pydis_site.apps.api.serializers import ( # noqa: I101 - Preserving the filter order + FilterListSerializer, + FilterSettingsSerializer, + FilterActionSerializer, + FilterChannelRangeSerializer, + FilterSerializer, + FilterOverrideSerializer +) + + +class FilterListViewSet(ModelViewSet): + """ + View providing CRUD operations on lists of items allowed or denied by our bot. + + ## Routes + ### GET /bot/filter/filter_lists + Returns all FilterList items in the database. + + #### Response format + >>> [ + ... { + ... "id": 1, + ... "name": "guild_invite", + ... "list_type": 1, + ... "filters": [ + ... 1, + ... 2, + ... ... + ... ], + ... "default_settings": 1 + ... }, + ... ... + ... ] + + #### Status codes + - 200: returned on success + - 401: returned if unauthenticated + + ### GET /bot/filter/filter_lists/ + Returns a specific FilterList item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "name": "guild_invite", + ... "list_type": 1, + ... "filters": [ + ... 1, + ... 2, + ... ... + ... ], + ... "default_settings": 1 + ... } + + #### Status codes + - 200: returned on success + - 404: returned if the id was not found. + + ### POST /bot/filter/filter_lists + Adds a single FilterList item to the database. + + #### Request body + >>> { + ... "name": "guild_invite", + ... "list_type": 1, + ... "filters": [ + ... 1, + ... 2, + ... ... + ... ], + ... "default_settings": 1 + ... } + + #### Status codes + - 201: returned on success + - 400: if one of the given fields is invalid + + ### PATCH /bot/filter/filter_lists/ + Updates a specific FilterList item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "name": "guild_invite", + ... "list_type": 1, + ... "filters": [ + ... 1, + ... 2, + ... ... + ... ], + ... "default_settings": 1 + ... } + + #### Status codes + - 200: returned on success + - 400: if one of the given fields is invalid + + ### DELETE /bot/filter/filter_lists/ + Deletes the FilterList item with the given `id`. + + #### Status codes + - 204: returned on success + - 404: if a tag with the given `id` does not exist + """ + + serializer_class = FilterListSerializer + queryset = FilterList.objects.all() + + +class FilterSettingsViewSet(ModelViewSet): + """ + View providing CRUD operations on settings of items allowed or denied by our bot. + + ## Routes + ### GET /bot/filter/filter_settings + Returns all FilterSettings items in the database. + + #### Response format + >>> [ + ... { + ... "id": 1, + ... "ping_type": [ + ... "onduty", + ... ... + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty", + ... ... + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601, + ... ... + ... ], + ... "enabled": True, + ... "default_action": 1, + ... "default_range": 1 + ... }, + ... ... + ... ] + + #### Status codes + - 200: returned on success + - 401: returned if unauthenticated + + ### GET /bot/filter/filter_settings/ + Returns a specific FilterSettings item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "ping_type": [ + ... "onduty", + ... ... + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty", + ... ... + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601, + ... ... + ... ], + ... "enabled": True, + ... "default_action": 1, + ... "default_range": 1 + ... } + + #### Status codes + - 200: returned on success + - 404: returned if the id was not found. + + ### POST /bot/filter/filter_settings + Adds a single FilterSettings item to the database. + + #### Request body + >>> { + ... "ping_type": [ + ... "onduty", + ... ... + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty", + ... ... + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601, + ... ... + ... ], + ... "enabled": True, + ... "default_action": 1, + ... "default_range": 1 + ... } + + #### Status codes + - 201: returned on success + - 400: if one of the given fields is invalid + + ### PATCH /bot/filter/filter_settings/ + Updates a specific FilterSettings item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "ping_type": [ + ... "onduty", + ... ... + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty", + ... ... + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601, + ... ... + ... ], + ... "enabled": True, + ... "default_action": 1, + ... "default_range": 1 + ... } + + #### Status codes + - 200: returned on success + - 400: if one of the given fields is invalid + + ### DELETE /bot/filter/filter_settings/ + Deletes the FilterSettings item with the given `id`. + + #### Status codes + - 204: returned on success + - 404: if a tag with the given `id` does not exist + """ + + serializer_class = FilterSettingsSerializer + queryset = FilterSettings.objects.all() + + +class FilterActionViewSet(ModelViewSet): + """ + View providing CRUD operations on actions taken by items allowed or denied by our bot. + + ## Routes + ### GET /bot/filter/filter_action + Returns all FilterAction items in the database. + + #### Response format + >>> [ + ... { + ... "id": 1, + ... "user_dm": "message", + ... "infraction_type": "Warn", + ... "infraction_reason": "", + ... "infraction_duration": "01 12:34:56.123456" + ... }, + ... ... + ... ] + + #### Status codes + - 200: returned on success + - 401: returned if unauthenticated + + ### GET /bot/filter/filter_action/ + Returns a specific FilterAction item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "user_dm": "message", + ... "infraction_type": "Warn", + ... "infraction_reason": "", + ... "infraction_duration": "01 12:34:56.123456" + ... } + + #### Status codes + - 200: returned on success + - 404: returned if the id was not found. + + ### POST /bot/filter/filter_action + Adds a single FilterAction item to the database. + + #### Request body + >>> { + ... "user_dm": "message", + ... "infraction_type": "Warn", + ... "infraction_reason": "", + ... "infraction_duration": "01 12:34:56.123456" + ... } + + #### Status codes + - 201: returned on success + - 400: if one of the given fields is invalid + + ### PATCH /bot/filter/filter_action/ + Updates a specific FilterAction item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "user_dm": "message", + ... "infraction_type": "Warn", + ... "infraction_reason": "", + ... "infraction_duration": "01 12:34:56.123456" + ... } + + #### Status codes + - 200: returned on success + - 400: if one of the given fields is invalid + + ### DELETE /bot/filter/filter_action/ + Deletes the FilterAction item with the given `id`. + + #### Status codes + - 204: returned on success + - 404: if a tag with the given `id` does not exist + """ + + serializer_class = FilterActionSerializer + queryset = FilterAction.objects.all() + + +class FilterChannelRangeViewSet(ModelViewSet): + """ + View providing CRUD operations on channels targeted by items allowed or denied by our bot. + + ## Routes + ### GET /bot/filter/channel_range + Returns all ChannelRange items in the database. + + #### Response format + >>> [ + ... { + ... "id": 1, + ... "disallowed_channels": [], + ... "disallowed_categories": [], + ... "allowed_channels": [], + ... "allowed_category": [], + ... "default": True + ... }, + ... ... + ... ] + + #### Status codes + - 200: returned on success + - 401: returned if unauthenticated + + ### GET /bot/filter/channel_range/ + Returns a specific ChannelRange item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "disallowed_channels": [], + ... "disallowed_categories": [], + ... "allowed_channels": [], + ... "allowed_category": [], + ... "default": True + ... } + + #### Status codes + - 200: returned on success + - 404: returned if the id was not found. + + ### POST /bot/filter/channel_range + Adds a single ChannelRange item to the database. + + #### Request body + >>> { + ... "disallowed_channels": [], + ... "disallowed_categories": [], + ... "allowed_channels": [], + ... "allowed_category": [], + ... "default": True + ... } + + #### Status codes + - 201: returned on success + - 400: if one of the given fields is invalid + + ### PATCH /bot/filter/channel_range/ + Updates a specific ChannelRange item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "disallowed_channels": [], + ... "disallowed_categories": [], + ... "allowed_channels": [], + ... "allowed_category": [], + ... "default": True + ... } + + #### Status codes + - 200: returned on success + - 400: if one of the given fields is invalid + + ### DELETE /bot/filter/channel_range/ + Deletes the ChannelRange item with the given `id`. + + #### Status codes + - 204: returned on success + - 404: if a tag with the given `id` does not exist + """ + + serializer_class = FilterChannelRangeSerializer + queryset = ChannelRange.objects.all() + + +class FilterViewSet(ModelViewSet): + """ + View providing CRUD operations on items allowed or denied by our bot. + + ## Routes + ### GET /bot/filter/filters + Returns all Filter items in the database. + + #### Response format + >>> [ + ... { + ... "id": 1, + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "override": 1 + ... }, + ... ... + ... ] + + #### Status codes + - 200: returned on success + - 401: returned if unauthenticated + + ### GET /bot/filter/filters/ + Returns a specific Filter item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "override": 1 + ... } + + #### Status codes + - 200: returned on success + - 404: returned if the id was not found. + + ### POST /bot/filter/filters + Adds a single Filter item to the database. + + #### Request body + >>> { + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "override": 1 + ... } + + #### Status codes + - 201: returned on success + - 400: if one of the given fields is invalid + + ### PATCH /bot/filter/filters/ + Updates a specific Filter item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "override": 1 + ... } + + #### Status codes + - 200: returned on success + - 400: if one of the given fields is invalid + + ### DELETE /bot/filter/filters/ + Deletes the Filter item with the given `id`. + + #### Status codes + - 204: returned on success + - 404: if a tag with the given `id` does not exist + """ + + serializer_class = FilterSerializer + queryset = Filter.objects.all() + + +class FilterOverrideViewSet(ModelViewSet): + """ + View providing CRUD operations setting overrides of items allowed or denied by our bot. + + ## Routes + ### GET /bot/filter/filter_override + Returns all FilterOverride items in the database. + + #### Response format + >>> [ + ... { + ... "id": 1, + ... "ping_type": [ + ... "onduty", + ... ... + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty", + ... ... + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601, + ... ... + ... ], + ... "enabled": True, + ... "filter_action": 1, + ... "filter_range": 1 + ... }, + ... ... + ... ] + + #### Status codes + - 200: returned on success + - 401: returned if unauthenticated + + ### GET /bot/filter/filter_override/ + Returns a specific FilterOverride item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "ping_type": [ + ... "onduty", + ... ... + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty", + ... ... + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601, + ... ... + ... ], + ... "enabled": True, + ... "filter_action": 1, + ... "filter_range": 1 + ... } + + #### Status codes + - 200: returned on success + - 404: returned if the id was not found. + + ### POST /bot/filter/filter_override + Adds a single FilterOverride item to the database. + + #### Request body + >>> { + ... "ping_type": [ + ... "onduty", + ... ... + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty", + ... ... + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601, + ... ... + ... ], + ... "enabled": True, + ... "filter_action": 1, + ... "filter_range": 1 + ... } + + #### Status codes + - 201: returned on success + - 400: if one of the given fields is invalid + + ### PATCH /bot/filter/filter_override/ + Updates a specific FilterOverride item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "ping_type": [ + ... "onduty", + ... ... + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty", + ... ... + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601, + ... ... + ... ], + ... "enabled": True, + ... "filter_action": 1, + ... "filter_range": 1 + ... } + + #### Status codes + - 200: returned on success + - 400: if one of the given fields is invalid + + ### DELETE /bot/filter/filter_override/ + Deletes the FilterOverride item with the given `id`. + + #### Status codes + - 204: returned on success + - 404: if a tag with the given `id` does not exist + """ + + serializer_class = FilterOverrideSerializer + queryset = FilterOverride.objects.all() -- cgit v1.2.3 From c6dfd896304cb4e36c4020f4704d9537fd3e8e9f Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 27 Apr 2021 16:39:00 +0200 Subject: Filters: update tests to the new schema --- .../apps/api/migrations/0070_new_filter_schema.py | 2 +- pydis_site/apps/api/models/bot/filters.py | 2 +- pydis_site/apps/api/tests/test_filterlists.py | 122 --------- pydis_site/apps/api/tests/test_filters.py | 284 +++++++++++++++++++++ pydis_site/apps/api/tests/test_models.py | 14 + 5 files changed, 300 insertions(+), 124 deletions(-) delete mode 100644 pydis_site/apps/api/tests/test_filterlists.py create mode 100644 pydis_site/apps/api/tests/test_filters.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index f4fc9494..de75e677 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -143,7 +143,7 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), ('list_type', models.IntegerField(choices=[], help_text='Whenever this list is an allowlist or denylist')), ('default_settings', models.ForeignKey(help_text='Default parameters of this list.', on_delete=django.db.models.deletion.CASCADE, to='api.FilterSettings')), - ('filters', models.ManyToManyField(help_text='The content of this list.', to='api.Filter')), + ('filters', models.ManyToManyField(help_text='The content of this list.', to='api.Filter', default=[])), ], ), migrations.AddField( diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 16ac206e..869f947c 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -49,7 +49,7 @@ class FilterList(models.Model): help_text="Whenever this list is an allowlist or denylist" ) - filters = models.ManyToManyField("Filter", help_text="The content of this list.") + filters = models.ManyToManyField("Filter", help_text="The content of this list.", default=[]) default_settings = models.ForeignKey( "FilterSettings", models.CASCADE, diff --git a/pydis_site/apps/api/tests/test_filterlists.py b/pydis_site/apps/api/tests/test_filterlists.py deleted file mode 100644 index 5a5bca60..00000000 --- a/pydis_site/apps/api/tests/test_filterlists.py +++ /dev/null @@ -1,122 +0,0 @@ -from django.urls import reverse - -from pydis_site.apps.api.models import FilterList -from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase - -URL = reverse('api:bot:filterlist-list') -JPEG_ALLOWLIST = { - "type": 'FILE_FORMAT', - "allowed": True, - "content": ".jpeg", -} -PNG_ALLOWLIST = { - "type": 'FILE_FORMAT', - "allowed": True, - "content": ".png", -} - - -class UnauthenticatedTests(AuthenticatedAPITestCase): - def setUp(self): - super().setUp() - self.client.force_authenticate(user=None) - - def test_cannot_read_allowedlist_list(self): - response = self.client.get(URL) - - self.assertEqual(response.status_code, 401) - - -class EmptyDatabaseTests(AuthenticatedAPITestCase): - @classmethod - def setUpTestData(cls): - FilterList.objects.all().delete() - - def test_returns_empty_object(self): - response = self.client.get(URL) - - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), []) - - -class FetchTests(AuthenticatedAPITestCase): - @classmethod - def setUpTestData(cls): - FilterList.objects.all().delete() - cls.jpeg_format = FilterList.objects.create(**JPEG_ALLOWLIST) - cls.png_format = FilterList.objects.create(**PNG_ALLOWLIST) - - def test_returns_name_in_list(self): - response = self.client.get(URL) - - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json()[0]["content"], self.jpeg_format.content) - self.assertEqual(response.json()[1]["content"], self.png_format.content) - - def test_returns_single_item_by_id(self): - response = self.client.get(f'{URL}/{self.jpeg_format.id}') - - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json().get("content"), self.jpeg_format.content) - - def test_returns_filter_list_types(self): - response = self.client.get(f'{URL}/get-types') - - self.assertEqual(response.status_code, 200) - for api_type, model_type in zip(response.json(), FilterList.FilterListType.choices): - self.assertEquals(api_type[0], model_type[0]) - self.assertEquals(api_type[1], model_type[1]) - - -class CreationTests(AuthenticatedAPITestCase): - @classmethod - def setUpTestData(cls): - FilterList.objects.all().delete() - - def test_returns_400_for_missing_params(self): - no_type_json = { - "allowed": True, - "content": ".jpeg" - } - no_allowed_json = { - "type": "FILE_FORMAT", - "content": ".jpeg" - } - no_content_json = { - "allowed": True, - "type": "FILE_FORMAT" - } - cases = [{}, no_type_json, no_allowed_json, no_content_json] - - for case in cases: - with self.subTest(case=case): - response = self.client.post(URL, data=case) - self.assertEqual(response.status_code, 400) - - def test_returns_201_for_successful_creation(self): - response = self.client.post(URL, data=JPEG_ALLOWLIST) - self.assertEqual(response.status_code, 201) - - def test_returns_400_for_duplicate_creation(self): - self.client.post(URL, data=JPEG_ALLOWLIST) - response = self.client.post(URL, data=JPEG_ALLOWLIST) - self.assertEqual(response.status_code, 400) - - -class DeletionTests(AuthenticatedAPITestCase): - @classmethod - def setUpTestData(cls): - FilterList.objects.all().delete() - cls.jpeg_format = FilterList.objects.create(**JPEG_ALLOWLIST) - cls.png_format = FilterList.objects.create(**PNG_ALLOWLIST) - - def test_deleting_unknown_id_returns_404(self): - response = self.client.delete(f"{URL}/200") - self.assertEqual(response.status_code, 404) - - def test_deleting_known_id_returns_204(self): - response = self.client.delete(f"{URL}/{self.jpeg_format.id}") - self.assertEqual(response.status_code, 204) - - response = self.client.get(f"{URL}/{self.jpeg_format.id}") - self.assertNotIn(self.png_format.content, response.json()) diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py new file mode 100644 index 00000000..de78ecfd --- /dev/null +++ b/pydis_site/apps/api/tests/test_filters.py @@ -0,0 +1,284 @@ +import contextlib +from dataclasses import dataclass +from typing import Any, Dict, Tuple, Type + +from django.db.models import Model +from django_hosts import reverse + +from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order + FilterList, + FilterSettings, + FilterAction, + ChannelRange, + Filter, + FilterOverride +) +from pydis_site.apps.api.tests.base import APISubdomainTestCase + + +@dataclass() +class TestSequence: + model: Type[Model] + route: str + object: Dict[str, Any] + ignored_fields: Tuple[str] = () + + def url(self, detail: bool = False) -> str: + return reverse(f'bot:{self.route}-{"detail" if detail else "list"}', host='api') + + +FK_FIELDS: Dict[Type[Model], Tuple[str]] = { + FilterList: ("default_settings",), + FilterSettings: ("default_action", "default_range"), + FilterAction: (), + ChannelRange: (), + Filter: (), + FilterOverride: ("filter_action", "filter_range") +} + + +def get_test_sequences() -> Dict[str, TestSequence]: + return { + "filter_list": TestSequence( + FilterList, + "filterlist", + { + "name": "testname", + "list_type": 0, + "default_settings": FilterSettings( + ping_type=[], + filter_dm=False, + dm_ping_type=[], + delete_messages=False, + bypass_roles=[], + enabled=False, + default_action=FilterAction( + user_dm=None, + infraction_type=None, + infraction_reason="", + infraction_duration=None + ), + default_range=ChannelRange( + disallowed_channels=[], + disallowed_categories=[], + allowed_channels=[], + allowed_category=[], + default=False + ) + ) + }, + ignored_fields=("filters",) + ), + "filter_settings": TestSequence( + FilterSettings, + "filtersettings", + { + "ping_type": ["onduty"], + "filter_dm": True, + "dm_ping_type": ["123456"], + "delete_messages": True, + "bypass_roles": [123456], + "enabled": True, + "default_action": FilterAction( + user_dm=None, + infraction_type=None, + infraction_reason="", + infraction_duration=None + ), + "default_range": ChannelRange( + disallowed_channels=[], + disallowed_categories=[], + allowed_channels=[], + allowed_category=[], + default=False + ) + } + ), + "filter_action": TestSequence( + FilterAction, + "filteraction", + { + "user_dm": "This is a DM message.", + "infraction_type": "Mute", + "infraction_reason": "Too long beard", + "infraction_duration": "1 02:03:00" + } + ), + "channel_range": TestSequence( + ChannelRange, + "channelrange", + { + "disallowed_channels": [1234], + "disallowed_categories": [5678], + "allowed_channels": [9101], + "allowed_category": [1121], + "default": True + } + ), + "filter": TestSequence( + Filter, + "filter", + { + "content": "bad word", + "description": "This is a really bad word.", + "additional_field": None, + "override": None + } + ), + "filter_override": TestSequence( + FilterOverride, + "filteroverride", + { + "ping_type": ["everyone"], + "filter_dm": False, + "dm_ping_type": ["here"], + "delete_messages": False, + "bypass_roles": [9876], + "enabled": True, + "filter_action": None, + "filter_range": None + } + ) + } + + +def save_nested_objects(object_: Model, save_root: bool = True) -> None: + for field in FK_FIELDS[object_.__class__]: + value = getattr(object_, field) + + if value is not None: + save_nested_objects(value) + + if save_root: + object_.save() + + +def clean_test_json(json: dict) -> dict: + for key, value in json.items(): + if isinstance(value, Model): + json[key] = value.id + + return json + + +def clean_api_json(json: dict, sequence: TestSequence) -> dict: + for field in sequence.ignored_fields + ("id",): + with contextlib.suppress(KeyError): + del json[field] + + return json + + +class GenericFilterTest(APISubdomainTestCase): + def test_cannot_read_unauthenticated(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + self.client.force_authenticate(user=None) + + response = self.client.get(sequence.url()) + self.assertEqual(response.status_code, 401) + + def test_empty_database(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + response = self.client.get(sequence.url()) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), []) + + def test_fetch(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + save_nested_objects(sequence.model(**sequence.object)) + + response = self.client.get(sequence.url()) + self.assertDictEqual( + clean_test_json(sequence.object), + clean_api_json(response.json()[0], sequence) + ) + + def test_fetch_by_id(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + saved = sequence.model(**sequence.object) + save_nested_objects(saved) + + response = self.client.get(f"{sequence.url()}/{saved.id}") + self.assertDictEqual( + clean_test_json(sequence.object), + clean_api_json(response.json(), sequence) + ) + + def test_fetch_non_existing(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + response = self.client.get(f"{sequence.url()}/42") + self.assertEqual(response.status_code, 404) + self.assertDictEqual(response.json(), {'detail': 'Not found.'}) + + def test_creation(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + save_nested_objects(sequence.model(**sequence.object), False) + data = clean_test_json(sequence.object.copy()) + response = self.client.post(sequence.url(), data=data) + + self.assertEqual(response.status_code, 201) + self.assertDictEqual( + clean_api_json(response.json(), sequence), + clean_test_json(sequence.object) + ) + + def test_creation_missing_field(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + save_nested_objects(sequence.model(**sequence.object), False) + data = clean_test_json(sequence.object.copy()) + + for field in sequence.model._meta.get_fields(): + with self.subTest(field=field): + if field.null or field.name in sequence.ignored_fields + ("id",): + continue + + test_data = data.copy() + del test_data[field.name] + + response = self.client.post(sequence.url(), data=test_data) + self.assertEqual(response.status_code, 400) + + def test_deletion(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + saved = sequence.model(**sequence.object) + save_nested_objects(saved) + + response = self.client.delete(f"{sequence.url()}/{saved.id}") + self.assertEqual(response.status_code, 204) + + def test_deletion_non_existing(self) -> None: + for name, sequence in get_test_sequences().items(): + with self.subTest(name=name): + sequence.model.objects.all().delete() + + response = self.client.delete(f"{sequence.url()}/42") + self.assertEqual(response.status_code, 404) + + def test_reject_invalid_ping(self) -> None: + url = reverse('bot:filteroverride-list', host='api') + data = { + "ping_type": ["invalid"] + } + + response = self.client.post(url, data=data) + + self.assertEqual(response.status_code, 400) + self.assertDictEqual(response.json(), {'ping_type': ["'invalid' isn't a valid ping type."]}) diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index 5c9ddea4..c8f4e1b1 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -7,6 +7,9 @@ from django.utils import timezone from pydis_site.apps.api.models import ( DeletedMessage, DocumentationLink, + Filter, + FilterList, + FilterSettings, Infraction, Message, MessageDeletionContext, @@ -106,6 +109,17 @@ class StringDunderMethodTests(SimpleTestCase): DocumentationLink( 'test', 'http://example.com', 'http://example.com' ), + FilterList( + name="forbidden_duckies", + list_type=0, + default_settings=FilterSettings() + ), + Filter( + content="ducky_nsfw", + description="This ducky is totally inappropriate!", + additional_field=None, + override=None + ), OffensiveMessage( id=602951077675139072, channel_id=291284109232308226, -- cgit v1.2.3 From 71a5e0d854c587ca2ae70aaec80f1110ea8800e5 Mon Sep 17 00:00:00 2001 From: Matteo Bertucci Date: Tue, 27 Apr 2021 16:41:18 +0200 Subject: Filters: allowed_category -> allowed_categories --- pydis_site/apps/api/migrations/0070_new_filter_schema.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 2 +- pydis_site/apps/api/serializers.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 6 +++--- pydis_site/apps/api/viewsets/bot/filters.py | 8 ++++---- 5 files changed, 11 insertions(+), 11 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index de75e677..eb55e329 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -33,7 +33,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: disallowed_channels=[], disallowed_categories=[], allowed_channels=[], - allowed_category=[], + allowed_categories=[], default=True ) default_range.save() @@ -85,7 +85,7 @@ class Migration(migrations.Migration): ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('allowed_category', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('default', models.BooleanField()), ], ), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 869f947c..c2f776d3 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -142,7 +142,7 @@ class ChannelRange(models.Model): disallowed_channels = ArrayField(models.IntegerField()) disallowed_categories = ArrayField(models.IntegerField()) allowed_channels = ArrayField(models.IntegerField()) - allowed_category = ArrayField(models.IntegerField()) + allowed_categories = ArrayField(models.IntegerField()) default = models.BooleanField() diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 306dccb3..54acf366 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -180,7 +180,7 @@ class FilterChannelRangeSerializer(ModelSerializer): 'disallowed_channels', 'disallowed_categories', 'allowed_channels', - 'allowed_category', + 'allowed_categories', 'default' ) diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index de78ecfd..f38f3659 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -62,7 +62,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: disallowed_channels=[], disallowed_categories=[], allowed_channels=[], - allowed_category=[], + allowed_categories=[], default=False ) ) @@ -89,7 +89,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: disallowed_channels=[], disallowed_categories=[], allowed_channels=[], - allowed_category=[], + allowed_categories=[], default=False ) } @@ -111,7 +111,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: "disallowed_channels": [1234], "disallowed_categories": [5678], "allowed_channels": [9101], - "allowed_category": [1121], + "allowed_categories": [1121], "default": True } ), diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index fea53265..e290fc65 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -350,7 +350,7 @@ class FilterChannelRangeViewSet(ModelViewSet): ... "disallowed_channels": [], ... "disallowed_categories": [], ... "allowed_channels": [], - ... "allowed_category": [], + ... "allowed_categories": [], ... "default": True ... }, ... ... @@ -369,7 +369,7 @@ class FilterChannelRangeViewSet(ModelViewSet): ... "disallowed_channels": [], ... "disallowed_categories": [], ... "allowed_channels": [], - ... "allowed_category": [], + ... "allowed_categories": [], ... "default": True ... } @@ -385,7 +385,7 @@ class FilterChannelRangeViewSet(ModelViewSet): ... "disallowed_channels": [], ... "disallowed_categories": [], ... "allowed_channels": [], - ... "allowed_category": [], + ... "allowed_categories": [], ... "default": True ... } @@ -402,7 +402,7 @@ class FilterChannelRangeViewSet(ModelViewSet): ... "disallowed_channels": [], ... "disallowed_categories": [], ... "allowed_channels": [], - ... "allowed_category": [], + ... "allowed_categories": [], ... "default": True ... } -- cgit v1.2.3 From d48d5ddbaa6b068d3a24f55ee7c8f3760006f04b Mon Sep 17 00:00:00 2001 From: kosayoda Date: Sun, 11 Jul 2021 14:46:16 +0800 Subject: Improve help text message. --- pydis_site/apps/api/models/bot/filters.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index c2f776d3..a2d3af6a 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -46,7 +46,7 @@ class FilterList(models.Model): name = models.CharField(max_length=50, help_text="The unique name of this list.") list_type = models.IntegerField( choices=FilterListType.choices, - help_text="Whenever this list is an allowlist or denylist" + help_text="Whether this list is an allowlist or denylist" ) filters = models.ManyToManyField("Filter", help_text="The content of this list.", default=[]) @@ -75,31 +75,31 @@ class FilterSettings(models.Model): validators=(validate_ping_field,), help_text="Who to ping when this filter triggers." ) - filter_dm = models.BooleanField(help_text="Whenever DMs should be filtered.") + filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.") dm_ping_type = ArrayField( models.CharField(max_length=20), validators=(validate_ping_field,), help_text="Who to ping when this filter triggers on a DM." ) delete_messages = models.BooleanField( - help_text="Whenever this filter should delete messages triggering it." + help_text="Whether this filter should delete messages triggering it." ) bypass_roles = ArrayField( models.BigIntegerField(), help_text="Roles and users who can bypass this filter." ) enabled = models.BooleanField( - help_text="Whenever ths filter is currently enabled." + help_text="Whether this filter is currently enabled." ) default_action = models.ForeignKey( "FilterAction", models.CASCADE, - help_text="The default action to perform." + help_text="What action to perform on the triggering user." ) default_range = models.ForeignKey( "ChannelRange", models.CASCADE, - help_text="Where does this filter apply." + help_text="The channels and categories in which this filter applies." ) -- cgit v1.2.3 From 6694ac4159c6d0f17451997df7f20b1363952ef3 Mon Sep 17 00:00:00 2001 From: kosayoda Date: Sun, 11 Jul 2021 15:58:32 +0800 Subject: Fix faulty model enumeration. This also allows us to simplify the str dunder for a FilterList. --- pydis_site/apps/api/models/bot/filters.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index a2d3af6a..6f35bfb0 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -9,8 +9,8 @@ from django.db.models import UniqueConstraint class FilterListType(models.IntegerChoices): """Choice between allow or deny for a list type.""" - ALLOW: 1 - DENY: 0 + ALLOW = 1 + DENY = 0 class InfractionType(models.TextChoices): @@ -64,7 +64,7 @@ class FilterList(models.Model): ) def __str__(self) -> str: - return f"Filter {'allow' if self.list_type == 1 else 'deny'}list {self.name!r}" + return f"Filter {FilterListType(self.list_type).label}list {self.name!r}" class FilterSettings(models.Model): -- cgit v1.2.3 From 1095346a1f86e43d5d5c39045a54354d1290fe0e Mon Sep 17 00:00:00 2001 From: kosayoda Date: Sun, 11 Jul 2021 16:35:41 +0800 Subject: Improve name of dm sent to triggered user. --- pydis_site/apps/api/migrations/0070_new_filter_schema.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 2 +- pydis_site/apps/api/serializers.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 6 +++--- pydis_site/apps/api/viewsets/bot/filters.py | 8 ++++---- 5 files changed, 11 insertions(+), 11 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index eb55e329..8580033a 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -23,7 +23,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: objects = filter_list_old.objects.filter(type=name) default_action = filter_action.objects.create( - user_dm=None, + dm_content=None, infraction_type=None, infraction_reason="", infraction_duration=None @@ -102,7 +102,7 @@ class Migration(migrations.Migration): name='FilterAction', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('user_dm', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), + ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('infraction_type', models.CharField(choices=[('Note', 'Note'), ('Warn', 'Warn'), ('Mute', 'Mute'), ('Kick', 'Kick'), ('Ban', 'Ban')], help_text='The infraction to apply to this user.', max_length=4, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 6f35bfb0..b5c80bda 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -106,7 +106,7 @@ class FilterSettings(models.Model): class FilterAction(models.Model): """The action to take when a filter is triggered.""" - user_dm = models.CharField( + dm_content = models.CharField( max_length=1000, null=True, help_text="The DM to send to a user triggering this filter." diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 54acf366..584d1f22 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -165,7 +165,7 @@ class FilterActionSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterAction - fields = ('id', 'user_dm', 'infraction_type', 'infraction_reason', 'infraction_duration') + fields = ('id', 'dm_content', 'infraction_type', 'infraction_reason', 'infraction_duration') class FilterChannelRangeSerializer(ModelSerializer): diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index f38f3659..2df671e0 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -53,7 +53,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: bypass_roles=[], enabled=False, default_action=FilterAction( - user_dm=None, + dm_content=None, infraction_type=None, infraction_reason="", infraction_duration=None @@ -80,7 +80,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: "bypass_roles": [123456], "enabled": True, "default_action": FilterAction( - user_dm=None, + dm_content=None, infraction_type=None, infraction_reason="", infraction_duration=None @@ -98,7 +98,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: FilterAction, "filteraction", { - "user_dm": "This is a DM message.", + "dm_content": "This is a DM message.", "infraction_type": "Mute", "infraction_reason": "Too long beard", "infraction_duration": "1 02:03:00" diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index e290fc65..9553fcac 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -264,7 +264,7 @@ class FilterActionViewSet(ModelViewSet): >>> [ ... { ... "id": 1, - ... "user_dm": "message", + ... "dm_content": "message", ... "infraction_type": "Warn", ... "infraction_reason": "", ... "infraction_duration": "01 12:34:56.123456" @@ -282,7 +282,7 @@ class FilterActionViewSet(ModelViewSet): #### Response format >>> { ... "id": 1, - ... "user_dm": "message", + ... "dm_content": "message", ... "infraction_type": "Warn", ... "infraction_reason": "", ... "infraction_duration": "01 12:34:56.123456" @@ -297,7 +297,7 @@ class FilterActionViewSet(ModelViewSet): #### Request body >>> { - ... "user_dm": "message", + ... "dm_content": "message", ... "infraction_type": "Warn", ... "infraction_reason": "", ... "infraction_duration": "01 12:34:56.123456" @@ -313,7 +313,7 @@ class FilterActionViewSet(ModelViewSet): #### Response format >>> { ... "id": 1, - ... "user_dm": "message", + ... "dm_content": "message", ... "infraction_type": "Warn", ... "infraction_reason": "", ... "infraction_duration": "01 12:34:56.123456" -- cgit v1.2.3 From 9ec355955895d5b26ce99aade3c0c6ccf913e6a4 Mon Sep 17 00:00:00 2001 From: kosayoda Date: Sun, 11 Jul 2021 16:40:34 +0800 Subject: Migrate misc field names and help text changes. --- .../apps/api/migrations/0071_auto_20210711_0839.py | 44 ++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0071_auto_20210711_0839.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0071_auto_20210711_0839.py b/pydis_site/apps/api/migrations/0071_auto_20210711_0839.py new file mode 100644 index 00000000..e1c45fb6 --- /dev/null +++ b/pydis_site/apps/api/migrations/0071_auto_20210711_0839.py @@ -0,0 +1,44 @@ +# Generated by Django 3.0.14 on 2021-07-11 08:39 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0070_new_filter_schema'), + ] + + operations = [ + migrations.AlterField( + model_name='filterlist', + name='list_type', + field=models.IntegerField(choices=[(1, 'Allow'), (0, 'Deny')], help_text='Whether this list is an allowlist or denylist'), + ), + migrations.AlterField( + model_name='filtersettings', + name='default_action', + field=models.ForeignKey(help_text='What action to perform on the triggering user.', on_delete=django.db.models.deletion.CASCADE, to='api.FilterAction'), + ), + migrations.AlterField( + model_name='filtersettings', + name='default_range', + field=models.ForeignKey(help_text='The channels and categories in which this filter applies.', on_delete=django.db.models.deletion.CASCADE, to='api.ChannelRange'), + ), + migrations.AlterField( + model_name='filtersettings', + name='delete_messages', + field=models.BooleanField(help_text='Whether this filter should delete messages triggering it.'), + ), + migrations.AlterField( + model_name='filtersettings', + name='enabled', + field=models.BooleanField(help_text='Whether this filter is currently enabled.'), + ), + migrations.AlterField( + model_name='filtersettings', + name='filter_dm', + field=models.BooleanField(help_text='Whether DMs should be filtered.'), + ), + ] -- cgit v1.2.3 From b082de6662e1b57f6831d219b44d95f93ed8a884 Mon Sep 17 00:00:00 2001 From: kosayoda Date: Fri, 23 Jul 2021 18:58:35 +0800 Subject: Correct Filter-FilterList relationship. Instead of a many-many relationship, one filterlist has multiple filters. Nested serialization is read-only by default, so not all CRUD methods are implemented yet for the FilterList viewset. --- .../apps/api/migrations/0070_new_filter_schema.py | 11 ++-- pydis_site/apps/api/models/bot/filters.py | 6 +- pydis_site/apps/api/serializers.py | 22 ++++---- pydis_site/apps/api/tests/test_filters.py | 29 +++++++++- pydis_site/apps/api/viewsets/bot/filters.py | 64 +++++++--------------- 5 files changed, 68 insertions(+), 64 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index 8580033a..237ce7d7 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -54,17 +54,14 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: list_type=1 if type_ == "ALLOW" else 0 ) - new_objects = [] for object_ in objects: new_object = filter_.objects.create( content=object_.content, + filter_list = list_, description=object_.comment or "", additional_field=None, override=None ) new_object.save() - new_objects.append(new_object) - - list_.filters.add(*new_objects) class Migration(migrations.Migration): @@ -143,7 +140,6 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), ('list_type', models.IntegerField(choices=[], help_text='Whenever this list is an allowlist or denylist')), ('default_settings', models.ForeignKey(help_text='Default parameters of this list.', on_delete=django.db.models.deletion.CASCADE, to='api.FilterSettings')), - ('filters', models.ManyToManyField(help_text='The content of this list.', to='api.Filter', default=[])), ], ), migrations.AddField( @@ -151,6 +147,11 @@ class Migration(migrations.Migration): name='override', field=models.ForeignKey(help_text='Override the default settings.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='api.FilterOverride'), ), + migrations.AddField( + model_name='filter', + name='filter_list', + field=models.ForeignKey(help_text='The filter list containing this filter.', on_delete=django.db.models.deletion.CASCADE, related_name='filters', to='api.FilterList'), + ), migrations.AddConstraint( model_name='filterlist', constraint=models.UniqueConstraint(fields=('name', 'list_type'), name='unique_name_type'), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index b5c80bda..99d6d5e4 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -48,8 +48,6 @@ class FilterList(models.Model): choices=FilterListType.choices, help_text="Whether this list is an allowlist or denylist" ) - - filters = models.ManyToManyField("Filter", help_text="The content of this list.", default=[]) default_settings = models.ForeignKey( "FilterSettings", models.CASCADE, @@ -152,6 +150,10 @@ class Filter(models.Model): content = models.CharField(max_length=100, help_text="The definition of this filter.") description = models.CharField(max_length=200, help_text="Why this filter has been added.") additional_field = models.BooleanField(null=True, help_text="Implementation specific field.") + filter_list = models.ForeignKey( + FilterList, models.CASCADE, related_name="filters", + help_text="The filter list containing this filter." + ) override = models.ForeignKey( "FilterOverride", models.SET_NULL, diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 584d1f22..afcf4d55 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -117,9 +117,21 @@ class DocumentationLinkSerializer(ModelSerializer): fields = ('package', 'base_url', 'inventory_url') +class FilterSerializer(ModelSerializer): + """A class providing (de-)serialization of `Filter` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = Filter + fields = ('id', 'content', 'description', 'additional_field', 'filter_list', 'override') + + class FilterListSerializer(ModelSerializer): """A class providing (de-)serialization of `FilterList` instances.""" + filters = FilterSerializer(many=True, read_only=True) + class Meta: """Metadata defined for the Django REST Framework.""" @@ -185,16 +197,6 @@ class FilterChannelRangeSerializer(ModelSerializer): ) -class FilterSerializer(ModelSerializer): - """A class providing (de-)serialization of `Filter` instances.""" - - class Meta: - """Metadata defined for the Django REST Framework.""" - - model = Filter - fields = ('id', 'content', 'description', 'additional_field', 'override') - - class FilterOverrideSerializer(ModelSerializer): """A class providing (de-)serialization of `FilterOverride` instances.""" diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 2df671e0..f694053d 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -32,7 +32,7 @@ FK_FIELDS: Dict[Type[Model], Tuple[str]] = { FilterSettings: ("default_action", "default_range"), FilterAction: (), ChannelRange: (), - Filter: (), + Filter: ("filter_list",), FilterOverride: ("filter_action", "filter_range") } @@ -122,7 +122,32 @@ def get_test_sequences() -> Dict[str, TestSequence]: "content": "bad word", "description": "This is a really bad word.", "additional_field": None, - "override": None + "override": None, + "filter_list": FilterList( + name="testname", + list_type=0, + default_settings=FilterSettings( + ping_type=[], + filter_dm=False, + dm_ping_type=[], + delete_messages=False, + bypass_roles=[], + enabled=False, + default_action=FilterAction( + dm_content=None, + infraction_type=None, + infraction_reason="", + infraction_duration=None + ), + default_range=ChannelRange( + disallowed_channels=[], + disallowed_categories=[], + allowed_channels=[], + allowed_categories=[], + default=False + ) + ) + ) } ), "filter_override": TestSequence( diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 9553fcac..1b893f8c 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -20,7 +20,7 @@ from pydis_site.apps.api.serializers import ( # noqa: I101 - Preserving the fil class FilterListViewSet(ModelViewSet): """ - View providing CRUD operations on lists of items allowed or denied by our bot. + View providing GET/DELETE on lists of items allowed or denied by our bot. ## Routes ### GET /bot/filter/filter_lists @@ -33,8 +33,14 @@ class FilterListViewSet(ModelViewSet): ... "name": "guild_invite", ... "list_type": 1, ... "filters": [ - ... 1, - ... 2, + ... { + ... "id": 1, + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "override": 1, + ... "filter_list": 1 + ... }, ... ... ... ], ... "default_settings": 1 @@ -55,8 +61,14 @@ class FilterListViewSet(ModelViewSet): ... "name": "guild_invite", ... "list_type": 1, ... "filters": [ - ... 1, - ... 2, + ... { + ... "id": 1, + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "override": 1, + ... "filter_list": 1 + ... }, ... ... ... ], ... "default_settings": 1 @@ -66,45 +78,6 @@ class FilterListViewSet(ModelViewSet): - 200: returned on success - 404: returned if the id was not found. - ### POST /bot/filter/filter_lists - Adds a single FilterList item to the database. - - #### Request body - >>> { - ... "name": "guild_invite", - ... "list_type": 1, - ... "filters": [ - ... 1, - ... 2, - ... ... - ... ], - ... "default_settings": 1 - ... } - - #### Status codes - - 201: returned on success - - 400: if one of the given fields is invalid - - ### PATCH /bot/filter/filter_lists/ - Updates a specific FilterList item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "name": "guild_invite", - ... "list_type": 1, - ... "filters": [ - ... 1, - ... 2, - ... ... - ... ], - ... "default_settings": 1 - ... } - - #### Status codes - - 200: returned on success - - 400: if one of the given fields is invalid - ### DELETE /bot/filter/filter_lists/ Deletes the FilterList item with the given `id`. @@ -437,7 +410,8 @@ class FilterViewSet(ModelViewSet): ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, - ... "override": 1 + ... "override": 1, + ... "filter_list": 1 ... }, ... ... ... ] -- cgit v1.2.3 From 98d36f6fce899680fa10177556f06cc5357eb675 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 25 Sep 2021 13:04:39 +0300 Subject: Remove one-to-one relationships from filters tables --- pydis_site/apps/api/models/bot/filters.py | 153 ++++++++++++------------------ 1 file changed, 63 insertions(+), 90 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 99d6d5e4..68ac191b 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -1,3 +1,4 @@ +from abc import abstractmethod from typing import List from django.contrib.postgres.fields import ArrayField @@ -5,6 +6,8 @@ from django.core.exceptions import ValidationError from django.db import models from django.db.models import UniqueConstraint +from pydis_site.apps.api.models.mixins import AbstractModelMeta + class FilterListType(models.IntegerChoices): """Choice between allow or deny for a list type.""" @@ -40,70 +43,40 @@ def validate_ping_field(value_list: List[str]) -> None: raise ValidationError(f"{value!r} isn't a valid ping type.") -class FilterList(models.Model): - """Represent a list in its allow or deny form.""" - - name = models.CharField(max_length=50, help_text="The unique name of this list.") - list_type = models.IntegerField( - choices=FilterListType.choices, - help_text="Whether this list is an allowlist or denylist" - ) - default_settings = models.ForeignKey( - "FilterSettings", - models.CASCADE, - help_text="Default parameters of this list." - ) - - class Meta: - """Constrain name and list_type unique.""" - - constraints = ( - UniqueConstraint(fields=("name", "list_type"), name="unique_name_type"), - ) - - def __str__(self) -> str: - return f"Filter {FilterListType(self.list_type).label}list {self.name!r}" - +class FilterSettingsMixin(models.Model, metaclass=AbstractModelMeta): + """Mixin for settings of a filter list.""" -class FilterSettings(models.Model): - """Persistent settings of a filter list.""" + @staticmethod + @abstractmethod + def allow_null() -> bool: + """Abstract property for allowing null values.""" ping_type = ArrayField( models.CharField(max_length=20), validators=(validate_ping_field,), - help_text="Who to ping when this filter triggers." + help_text="Who to ping when this filter triggers.", + null=allow_null.__func__() ) - filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.") + filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=True) dm_ping_type = ArrayField( models.CharField(max_length=20), validators=(validate_ping_field,), - help_text="Who to ping when this filter triggers on a DM." + help_text="Who to ping when this filter triggers on a DM.", + null=allow_null.__func__() ) delete_messages = models.BooleanField( - help_text="Whether this filter should delete messages triggering it." + help_text="Whether this filter should delete messages triggering it.", + null=allow_null.__func__() ) bypass_roles = ArrayField( models.BigIntegerField(), - help_text="Roles and users who can bypass this filter." + help_text="Roles and users who can bypass this filter.", + null=allow_null.__func__() ) enabled = models.BooleanField( - help_text="Whether this filter is currently enabled." + help_text="Whether this filter is currently enabled.", + null=allow_null.__func__() ) - default_action = models.ForeignKey( - "FilterAction", - models.CASCADE, - help_text="What action to perform on the triggering user." - ) - default_range = models.ForeignKey( - "ChannelRange", - models.CASCADE, - help_text="The channels and categories in which this filter applies." - ) - - -class FilterAction(models.Model): - """The action to take when a filter is triggered.""" - dm_content = models.CharField( max_length=1000, null=True, @@ -124,27 +97,52 @@ class FilterAction(models.Model): help_text="The duration of the infraction. Null if permanent." ) - -class ChannelRange(models.Model): - """ - Where a filter should apply. - - The resolution is done in the following order: - - disallowed channels - - disallowed categories - - allowed categories - - allowed channels - - default - """ - + # Where a filter should apply. + # + # The resolution is done in the following order: + # - disallowed channels + # - disallowed categories + # - allowed categories + # - allowed channels + # - default disallowed_channels = ArrayField(models.IntegerField()) disallowed_categories = ArrayField(models.IntegerField()) allowed_channels = ArrayField(models.IntegerField()) allowed_categories = ArrayField(models.IntegerField()) default = models.BooleanField() + class Meta: + """Metaclass for settings mixin.""" + + abstract = True -class Filter(models.Model): + +class FilterList(FilterSettingsMixin): + """Represent a list in its allow or deny form.""" + + name = models.CharField(max_length=50, help_text="The unique name of this list.") + list_type = models.IntegerField( + choices=FilterListType.choices, + help_text="Whether this list is an allowlist or denylist" + ) + + @staticmethod + def allow_null() -> bool: + """Do not allow null values for default settings.""" + return False + + class Meta: + """Constrain name and list_type unique.""" + + constraints = ( + UniqueConstraint(fields=("name", "list_type"), name="unique_name_type"), + ) + + def __str__(self) -> str: + return f"Filter {FilterListType(self.list_type).label}list {self.name!r}" + + +class Filter(FilterSettingsMixin): """One specific trigger of a list.""" content = models.CharField(max_length=100, help_text="The definition of this filter.") @@ -154,36 +152,11 @@ class Filter(models.Model): FilterList, models.CASCADE, related_name="filters", help_text="The filter list containing this filter." ) - override = models.ForeignKey( - "FilterOverride", - models.SET_NULL, - null=True, - help_text="Override the default settings." - ) def __str__(self) -> str: return f"Filter {self.content!r}" - -class FilterOverride(models.Model): - """ - Setting overrides of a specific filter. - - Any non-null value will override the default ones. - """ - - ping_type = ArrayField( - models.CharField(max_length=20), - validators=(validate_ping_field,), null=True - ) - filter_dm = models.BooleanField(null=True) - dm_ping_type = ArrayField( - models.CharField(max_length=20), - validators=(validate_ping_field,), - null=True - ) - delete_messages = models.BooleanField(null=True) - bypass_roles = ArrayField(models.IntegerField(), null=True) - enabled = models.BooleanField(null=True) - filter_action = models.ForeignKey("FilterAction", models.CASCADE, null=True) - filter_range = models.ForeignKey("ChannelRange", models.CASCADE, null=True) + @staticmethod + def allow_null() -> bool: + """Allow null values for overrides.""" + return True -- cgit v1.2.3 From 08a52168dd3b0a9a366f5ca68c10437b83af5cf1 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 25 Sep 2021 13:05:52 +0300 Subject: Remove old one-to-one filters relationships serializers, views and URLs --- pydis_site/apps/api/models/__init__.py | 4 - pydis_site/apps/api/models/bot/__init__.py | 2 +- pydis_site/apps/api/serializers.py | 84 +---- pydis_site/apps/api/urls.py | 20 -- pydis_site/apps/api/viewsets/__init__.py | 4 - pydis_site/apps/api/viewsets/bot/__init__.py | 6 +- pydis_site/apps/api/viewsets/bot/filters.py | 450 +-------------------------- 7 files changed, 15 insertions(+), 555 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/__init__.py b/pydis_site/apps/api/models/__init__.py index 72f59b57..63087990 100644 --- a/pydis_site/apps/api/models/__init__.py +++ b/pydis_site/apps/api/models/__init__.py @@ -1,11 +1,7 @@ # flake8: noqa from .bot import ( FilterList, - FilterSettings, - FilterAction, - ChannelRange, Filter, - FilterOverride, BotSetting, DocumentationLink, DeletedMessage, diff --git a/pydis_site/apps/api/models/bot/__init__.py b/pydis_site/apps/api/models/bot/__init__.py index 1bfe0063..9ba763a4 100644 --- a/pydis_site/apps/api/models/bot/__init__.py +++ b/pydis_site/apps/api/models/bot/__init__.py @@ -1,5 +1,5 @@ # flake8: noqa -from .filters import FilterList, FilterSettings, FilterAction, ChannelRange, Filter, FilterOverride +from .filters import FilterList, Filter from .bot_setting import BotSetting from .deleted_message import DeletedMessage from .documentation_link import DocumentationLink diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index afcf4d55..ff2bd929 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -18,11 +18,7 @@ from .models import ( # noqa: I101 - Preserving the filter order DocumentationLink, Infraction, FilterList, - FilterSettings, - FilterAction, - ChannelRange, Filter, - FilterOverride, MessageDeletionContext, Nomination, NominationEntry, @@ -136,7 +132,18 @@ class FilterListSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterList - fields = ('id', 'name', 'list_type', 'filters', 'default_settings') + fields = ( + 'id', + 'name', + 'list_type', + 'filters', + 'ping_type', + 'filter_dm', + 'dm_ping_type', + 'delete_messages', + 'bypass_roles', + '' + ) # Ensure that we can only have one filter list with the same name and field validators = [ @@ -150,73 +157,6 @@ class FilterListSerializer(ModelSerializer): ] -class FilterSettingsSerializer(ModelSerializer): - """A class providing (de-)serialization of `FilterSettings` instances.""" - - class Meta: - """Metadata defined for the Django REST Framework.""" - - model = FilterSettings - fields = ( - 'id', - 'ping_type', - 'filter_dm', - 'dm_ping_type', - 'delete_messages', - 'bypass_roles', - 'enabled', - 'default_action', - 'default_range' - ) - - -class FilterActionSerializer(ModelSerializer): - """A class providing (de-)serialization of `FilterAction` instances.""" - - class Meta: - """Metadata defined for the Django REST Framework.""" - - model = FilterAction - fields = ('id', 'dm_content', 'infraction_type', 'infraction_reason', 'infraction_duration') - - -class FilterChannelRangeSerializer(ModelSerializer): - """A class providing (de-)serialization of `ChannelRange` instances.""" - - class Meta: - """Metadata defined for the Django REST Framework.""" - - model = ChannelRange - fields = ( - 'id', - 'disallowed_channels', - 'disallowed_categories', - 'allowed_channels', - 'allowed_categories', - 'default' - ) - - -class FilterOverrideSerializer(ModelSerializer): - """A class providing (de-)serialization of `FilterOverride` instances.""" - - class Meta: - """Metadata defined for the Django REST Framework.""" - - model = FilterOverride - fields = ( - 'id', - 'ping_type', - 'filter_dm', - 'dm_ping_type', - 'delete_messages', - 'bypass_roles', - 'enabled', - 'filter_action', - 'filter_range' - ) - - class InfractionSerializer(ModelSerializer): """A class providing (de-)serialization of `Infraction` instances.""" diff --git a/pydis_site/apps/api/urls.py b/pydis_site/apps/api/urls.py index 7af2e505..4e8edaf0 100644 --- a/pydis_site/apps/api/urls.py +++ b/pydis_site/apps/api/urls.py @@ -7,11 +7,7 @@ from .viewsets import ( # noqa: I101 - Preserving the filter order DeletedMessageViewSet, DocumentationLinkViewSet, FilterListViewSet, - FilterSettingsViewSet, - FilterActionViewSet, - FilterChannelRangeViewSet, FilterViewSet, - FilterOverrideViewSet, InfractionViewSet, NominationViewSet, OffTopicChannelNameViewSet, @@ -27,22 +23,6 @@ bot_router.register( 'filter/filter_lists', FilterListViewSet ) -bot_router.register( - 'filter/filter_settings', - FilterSettingsViewSet -) -bot_router.register( - 'filter/filter_action', - FilterActionViewSet -) -bot_router.register( - 'filter/channel_range', - FilterChannelRangeViewSet -) -bot_router.register( - 'filter/filter_override', - FilterOverrideViewSet -) bot_router.register( 'filter/filters', FilterViewSet diff --git a/pydis_site/apps/api/viewsets/__init__.py b/pydis_site/apps/api/viewsets/__init__.py index b3992d66..4cf4c655 100644 --- a/pydis_site/apps/api/viewsets/__init__.py +++ b/pydis_site/apps/api/viewsets/__init__.py @@ -5,11 +5,7 @@ from .bot import ( DocumentationLinkViewSet, InfractionViewSet, FilterListViewSet, - FilterSettingsViewSet, - FilterActionViewSet, - FilterChannelRangeViewSet, FilterViewSet, - FilterOverrideViewSet, NominationViewSet, OffensiveMessageViewSet, OffTopicChannelNameViewSet, diff --git a/pydis_site/apps/api/viewsets/bot/__init__.py b/pydis_site/apps/api/viewsets/bot/__init__.py index 781624bd..4649fcde 100644 --- a/pydis_site/apps/api/viewsets/bot/__init__.py +++ b/pydis_site/apps/api/viewsets/bot/__init__.py @@ -1,11 +1,7 @@ # flake8: noqa from .filters import ( FilterListViewSet, - FilterSettingsViewSet, - FilterActionViewSet, - FilterChannelRangeViewSet, - FilterViewSet, - FilterOverrideViewSet + FilterViewSet ) from .bot_setting import BotSettingViewSet from .deleted_message import DeletedMessageViewSet diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 1b893f8c..5b21de26 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -2,19 +2,11 @@ from rest_framework.viewsets import ModelViewSet from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order FilterList, - FilterSettings, - FilterAction, - ChannelRange, - Filter, - FilterOverride + Filter ) from pydis_site.apps.api.serializers import ( # noqa: I101 - Preserving the filter order FilterListSerializer, - FilterSettingsSerializer, - FilterActionSerializer, - FilterChannelRangeSerializer, FilterSerializer, - FilterOverrideSerializer ) @@ -90,311 +82,6 @@ class FilterListViewSet(ModelViewSet): queryset = FilterList.objects.all() -class FilterSettingsViewSet(ModelViewSet): - """ - View providing CRUD operations on settings of items allowed or denied by our bot. - - ## Routes - ### GET /bot/filter/filter_settings - Returns all FilterSettings items in the database. - - #### Response format - >>> [ - ... { - ... "id": 1, - ... "ping_type": [ - ... "onduty", - ... ... - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty", - ... ... - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601, - ... ... - ... ], - ... "enabled": True, - ... "default_action": 1, - ... "default_range": 1 - ... }, - ... ... - ... ] - - #### Status codes - - 200: returned on success - - 401: returned if unauthenticated - - ### GET /bot/filter/filter_settings/ - Returns a specific FilterSettings item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "ping_type": [ - ... "onduty", - ... ... - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty", - ... ... - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601, - ... ... - ... ], - ... "enabled": True, - ... "default_action": 1, - ... "default_range": 1 - ... } - - #### Status codes - - 200: returned on success - - 404: returned if the id was not found. - - ### POST /bot/filter/filter_settings - Adds a single FilterSettings item to the database. - - #### Request body - >>> { - ... "ping_type": [ - ... "onduty", - ... ... - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty", - ... ... - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601, - ... ... - ... ], - ... "enabled": True, - ... "default_action": 1, - ... "default_range": 1 - ... } - - #### Status codes - - 201: returned on success - - 400: if one of the given fields is invalid - - ### PATCH /bot/filter/filter_settings/ - Updates a specific FilterSettings item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "ping_type": [ - ... "onduty", - ... ... - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty", - ... ... - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601, - ... ... - ... ], - ... "enabled": True, - ... "default_action": 1, - ... "default_range": 1 - ... } - - #### Status codes - - 200: returned on success - - 400: if one of the given fields is invalid - - ### DELETE /bot/filter/filter_settings/ - Deletes the FilterSettings item with the given `id`. - - #### Status codes - - 204: returned on success - - 404: if a tag with the given `id` does not exist - """ - - serializer_class = FilterSettingsSerializer - queryset = FilterSettings.objects.all() - - -class FilterActionViewSet(ModelViewSet): - """ - View providing CRUD operations on actions taken by items allowed or denied by our bot. - - ## Routes - ### GET /bot/filter/filter_action - Returns all FilterAction items in the database. - - #### Response format - >>> [ - ... { - ... "id": 1, - ... "dm_content": "message", - ... "infraction_type": "Warn", - ... "infraction_reason": "", - ... "infraction_duration": "01 12:34:56.123456" - ... }, - ... ... - ... ] - - #### Status codes - - 200: returned on success - - 401: returned if unauthenticated - - ### GET /bot/filter/filter_action/ - Returns a specific FilterAction item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "dm_content": "message", - ... "infraction_type": "Warn", - ... "infraction_reason": "", - ... "infraction_duration": "01 12:34:56.123456" - ... } - - #### Status codes - - 200: returned on success - - 404: returned if the id was not found. - - ### POST /bot/filter/filter_action - Adds a single FilterAction item to the database. - - #### Request body - >>> { - ... "dm_content": "message", - ... "infraction_type": "Warn", - ... "infraction_reason": "", - ... "infraction_duration": "01 12:34:56.123456" - ... } - - #### Status codes - - 201: returned on success - - 400: if one of the given fields is invalid - - ### PATCH /bot/filter/filter_action/ - Updates a specific FilterAction item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "dm_content": "message", - ... "infraction_type": "Warn", - ... "infraction_reason": "", - ... "infraction_duration": "01 12:34:56.123456" - ... } - - #### Status codes - - 200: returned on success - - 400: if one of the given fields is invalid - - ### DELETE /bot/filter/filter_action/ - Deletes the FilterAction item with the given `id`. - - #### Status codes - - 204: returned on success - - 404: if a tag with the given `id` does not exist - """ - - serializer_class = FilterActionSerializer - queryset = FilterAction.objects.all() - - -class FilterChannelRangeViewSet(ModelViewSet): - """ - View providing CRUD operations on channels targeted by items allowed or denied by our bot. - - ## Routes - ### GET /bot/filter/channel_range - Returns all ChannelRange items in the database. - - #### Response format - >>> [ - ... { - ... "id": 1, - ... "disallowed_channels": [], - ... "disallowed_categories": [], - ... "allowed_channels": [], - ... "allowed_categories": [], - ... "default": True - ... }, - ... ... - ... ] - - #### Status codes - - 200: returned on success - - 401: returned if unauthenticated - - ### GET /bot/filter/channel_range/ - Returns a specific ChannelRange item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "disallowed_channels": [], - ... "disallowed_categories": [], - ... "allowed_channels": [], - ... "allowed_categories": [], - ... "default": True - ... } - - #### Status codes - - 200: returned on success - - 404: returned if the id was not found. - - ### POST /bot/filter/channel_range - Adds a single ChannelRange item to the database. - - #### Request body - >>> { - ... "disallowed_channels": [], - ... "disallowed_categories": [], - ... "allowed_channels": [], - ... "allowed_categories": [], - ... "default": True - ... } - - #### Status codes - - 201: returned on success - - 400: if one of the given fields is invalid - - ### PATCH /bot/filter/channel_range/ - Updates a specific ChannelRange item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "disallowed_channels": [], - ... "disallowed_categories": [], - ... "allowed_channels": [], - ... "allowed_categories": [], - ... "default": True - ... } - - #### Status codes - - 200: returned on success - - 400: if one of the given fields is invalid - - ### DELETE /bot/filter/channel_range/ - Deletes the ChannelRange item with the given `id`. - - #### Status codes - - 204: returned on success - - 404: if a tag with the given `id` does not exist - """ - - serializer_class = FilterChannelRangeSerializer - queryset = ChannelRange.objects.all() - - class FilterViewSet(ModelViewSet): """ View providing CRUD operations on items allowed or denied by our bot. @@ -477,138 +164,3 @@ class FilterViewSet(ModelViewSet): serializer_class = FilterSerializer queryset = Filter.objects.all() - - -class FilterOverrideViewSet(ModelViewSet): - """ - View providing CRUD operations setting overrides of items allowed or denied by our bot. - - ## Routes - ### GET /bot/filter/filter_override - Returns all FilterOverride items in the database. - - #### Response format - >>> [ - ... { - ... "id": 1, - ... "ping_type": [ - ... "onduty", - ... ... - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty", - ... ... - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601, - ... ... - ... ], - ... "enabled": True, - ... "filter_action": 1, - ... "filter_range": 1 - ... }, - ... ... - ... ] - - #### Status codes - - 200: returned on success - - 401: returned if unauthenticated - - ### GET /bot/filter/filter_override/ - Returns a specific FilterOverride item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "ping_type": [ - ... "onduty", - ... ... - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty", - ... ... - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601, - ... ... - ... ], - ... "enabled": True, - ... "filter_action": 1, - ... "filter_range": 1 - ... } - - #### Status codes - - 200: returned on success - - 404: returned if the id was not found. - - ### POST /bot/filter/filter_override - Adds a single FilterOverride item to the database. - - #### Request body - >>> { - ... "ping_type": [ - ... "onduty", - ... ... - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty", - ... ... - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601, - ... ... - ... ], - ... "enabled": True, - ... "filter_action": 1, - ... "filter_range": 1 - ... } - - #### Status codes - - 201: returned on success - - 400: if one of the given fields is invalid - - ### PATCH /bot/filter/filter_override/ - Updates a specific FilterOverride item from the database. - - #### Response format - >>> { - ... "id": 1, - ... "ping_type": [ - ... "onduty", - ... ... - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty", - ... ... - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601, - ... ... - ... ], - ... "enabled": True, - ... "filter_action": 1, - ... "filter_range": 1 - ... } - - #### Status codes - - 200: returned on success - - 400: if one of the given fields is invalid - - ### DELETE /bot/filter/filter_override/ - Deletes the FilterOverride item with the given `id`. - - #### Status codes - - 204: returned on success - - 404: if a tag with the given `id` does not exist - """ - - serializer_class = FilterOverrideSerializer - queryset = FilterOverride.objects.all() -- cgit v1.2.3 From faf1948eb39f0389633a6f86f2d4e406f6e83b74 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 25 Sep 2021 13:06:20 +0300 Subject: Add AbstractModelMeta mixin --- pydis_site/apps/api/models/mixins.py | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/mixins.py b/pydis_site/apps/api/models/mixins.py index 5d75b78b..d32e6e72 100644 --- a/pydis_site/apps/api/models/mixins.py +++ b/pydis_site/apps/api/models/mixins.py @@ -1,3 +1,4 @@ +from abc import ABCMeta from operator import itemgetter from django.db import models @@ -29,3 +30,7 @@ class ModelTimestampMixin(models.Model): """Metaconfig for the mixin.""" abstract = True + + +class AbstractModelMeta(ABCMeta, type(models.Model)): + """Metaclass for ABCModel class.""" -- cgit v1.2.3 From 679472436bbb6250fab91d333c3e6fe3a20dea90 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 25 Sep 2021 13:06:38 +0300 Subject: Update filters migrations --- .../apps/api/migrations/0070_new_filter_schema.py | 129 +++++++++------------ .../apps/api/migrations/0071_auto_20210711_0839.py | 44 ------- 2 files changed, 53 insertions(+), 120 deletions(-) delete mode 100644 pydis_site/apps/api/migrations/0071_auto_20210711_0839.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index 237ce7d7..7925f5ff 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -14,7 +14,6 @@ OLD_LIST_NAMES = (('GUILD_INVITE', 'ALLOW'), ('FILE_FORMAT', 'DENY'), ('DOMAIN_N def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: filter_: pydis_site.apps.api.models.Filter = apps.get_model("api", "Filter") filter_list: pydis_site.apps.api.models.FilterList = apps.get_model("api", "FilterList") - filter_settings: pydis_site.apps.api.models.FilterSettings = apps.get_model("api", "FilterSettings") channel_range: pydis_site.apps.api.models.ChannelRange = apps.get_model("api", "ChannelRange") filter_action: pydis_site.apps.api.models.FilterAction = apps.get_model("api", "FilterAction") filter_list_old = apps.get_model("api", "FilterListOld") @@ -22,44 +21,47 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: for name, type_ in OLD_LIST_NAMES: objects = filter_list_old.objects.filter(type=name) - default_action = filter_action.objects.create( + list_ = filter_list.objects.create( + name=name.lower(), + list_type=1 if type_ == "ALLOW" else 0, + ping_type=["onduty"], + filter_dm=True, + dm_ping_type=["onduty"], + delete_messages=True, + bypass_roles=[267630620367257601], + enabled=False, dm_content=None, infraction_type=None, infraction_reason="", - infraction_duration=None - ) - default_action.save() - default_range = channel_range.objects.create( + infraction_duration=None, disallowed_channels=[], disallowed_categories=[], allowed_channels=[], allowed_categories=[], default=True ) - default_range.save() - default_settings = filter_settings.objects.create( - ping_type=["onduty"], - filter_dm=True, - dm_ping_type=["onduty"], - delete_messages=True, - bypass_roles=[267630620367257601], - enabled=False, - default_action=default_action, - default_range=default_range - ) - default_settings.save() - list_ = filter_list.objects.create( - name=name.lower(), - default_settings=default_settings, - list_type=1 if type_ == "ALLOW" else 0 - ) for object_ in objects: new_object = filter_.objects.create( content=object_.content, - filter_list = list_, + filter_list=list_, description=object_.comment or "", - additional_field=None, override=None + additional_field=None, + ping_type=None, + filter_dm=None, + dm_ping_type=None, + delete_messages=None, + bypass_roles=None, + enabled=None, + dm_content=None, + infraction_type=None, + infraction_reason="", + infraction_duration=None, + disallowed_channels=[], + disallowed_categories=[], + allowed_channels=[], + allowed_categories=[], + default=False ) new_object.save() @@ -75,17 +77,6 @@ class Migration(migrations.Migration): old_name='FilterList', new_name='FilterListOld' ), - migrations.CreateModel( - name='ChannelRange', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('default', models.BooleanField()), - ], - ), migrations.CreateModel( name='Filter', fields=[ @@ -93,60 +84,46 @@ class Migration(migrations.Migration): ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200)), ('additional_field', models.BooleanField(help_text='Implementation specific field.', null=True)), - ], - ), - migrations.CreateModel( - name='FilterAction', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), + ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.BigIntegerField(), help_text='Roles and users who can bypass this filter.', size=None, null=True)), + ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('infraction_type', models.CharField(choices=[('Note', 'Note'), ('Warn', 'Warn'), ('Mute', 'Mute'), ('Kick', 'Kick'), ('Ban', 'Ban')], help_text='The infraction to apply to this user.', max_length=4, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), + ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('default', models.BooleanField()), ], ), migrations.CreateModel( - name='FilterSettings', + name='FilterList', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), + ('list_type', models.IntegerField(choices=[(1, 'Allow'), (0, 'Deny')], help_text='Whether this list is an allowlist or denylist')), ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), - ('filter_dm', models.BooleanField(help_text='Whenever DMs should be filtered.')), + ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.')), ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), - ('delete_messages', models.BooleanField(help_text='Whenever this filter should delete messages triggering it.')), + ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.BigIntegerField(), help_text='Roles and users who can bypass this filter.', size=None)), - ('enabled', models.BooleanField(help_text='Whenever ths filter is currently enabled.')), - ('default_action', models.ForeignKey(help_text='The default action to perform.', on_delete=django.db.models.deletion.CASCADE, to='api.FilterAction')), - ('default_range', models.ForeignKey(help_text='Where does this filter apply.', on_delete=django.db.models.deletion.CASCADE, to='api.ChannelRange')), - ], - ), - migrations.CreateModel( - name='FilterOverride', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), null=True, size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), - ('filter_dm', models.BooleanField(null=True)), - ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), null=True, size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), - ('delete_messages', models.BooleanField(null=True)), - ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), - ('enabled', models.BooleanField(null=True)), - ('filter_action', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.FilterAction')), - ('filter_range', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.ChannelRange')), - ], - ), - migrations.CreateModel( - name='FilterList', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), - ('list_type', models.IntegerField(choices=[], help_text='Whenever this list is an allowlist or denylist')), - ('default_settings', models.ForeignKey(help_text='Default parameters of this list.', on_delete=django.db.models.deletion.CASCADE, to='api.FilterSettings')), + ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), + ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), + ('infraction_type', models.CharField(choices=[('Note', 'Note'), ('Warn', 'Warn'), ('Mute', 'Mute'), ('Kick', 'Kick'), ('Ban', 'Ban')], help_text='The infraction to apply to this user.', max_length=4, null=True)), + ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), + ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), + ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('default', models.BooleanField()), ], ), - migrations.AddField( - model_name='filter', - name='override', - field=models.ForeignKey(help_text='Override the default settings.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='api.FilterOverride'), - ), migrations.AddField( model_name='filter', name='filter_list', diff --git a/pydis_site/apps/api/migrations/0071_auto_20210711_0839.py b/pydis_site/apps/api/migrations/0071_auto_20210711_0839.py deleted file mode 100644 index e1c45fb6..00000000 --- a/pydis_site/apps/api/migrations/0071_auto_20210711_0839.py +++ /dev/null @@ -1,44 +0,0 @@ -# Generated by Django 3.0.14 on 2021-07-11 08:39 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('api', '0070_new_filter_schema'), - ] - - operations = [ - migrations.AlterField( - model_name='filterlist', - name='list_type', - field=models.IntegerField(choices=[(1, 'Allow'), (0, 'Deny')], help_text='Whether this list is an allowlist or denylist'), - ), - migrations.AlterField( - model_name='filtersettings', - name='default_action', - field=models.ForeignKey(help_text='What action to perform on the triggering user.', on_delete=django.db.models.deletion.CASCADE, to='api.FilterAction'), - ), - migrations.AlterField( - model_name='filtersettings', - name='default_range', - field=models.ForeignKey(help_text='The channels and categories in which this filter applies.', on_delete=django.db.models.deletion.CASCADE, to='api.ChannelRange'), - ), - migrations.AlterField( - model_name='filtersettings', - name='delete_messages', - field=models.BooleanField(help_text='Whether this filter should delete messages triggering it.'), - ), - migrations.AlterField( - model_name='filtersettings', - name='enabled', - field=models.BooleanField(help_text='Whether this filter is currently enabled.'), - ), - migrations.AlterField( - model_name='filtersettings', - name='filter_dm', - field=models.BooleanField(help_text='Whether DMs should be filtered.'), - ), - ] -- cgit v1.2.3 From 75a4b0eb57520b247ecaa228440b1abbd6c65845 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Sat, 25 Sep 2021 13:08:01 +0300 Subject: Remove default field from FilterSettingsMixin and migration --- pydis_site/apps/api/migrations/0070_new_filter_schema.py | 8 ++------ pydis_site/apps/api/models/bot/filters.py | 2 -- 2 files changed, 2 insertions(+), 8 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index 7925f5ff..c1db2a07 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -37,8 +37,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: disallowed_channels=[], disallowed_categories=[], allowed_channels=[], - allowed_categories=[], - default=True + allowed_categories=[] ) for object_ in objects: @@ -60,8 +59,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: disallowed_channels=[], disallowed_categories=[], allowed_channels=[], - allowed_categories=[], - default=False + allowed_categories=[] ) new_object.save() @@ -98,7 +96,6 @@ class Migration(migrations.Migration): ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('default', models.BooleanField()), ], ), migrations.CreateModel( @@ -121,7 +118,6 @@ class Migration(migrations.Migration): ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('default', models.BooleanField()), ], ), migrations.AddField( diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 68ac191b..365259e7 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -104,12 +104,10 @@ class FilterSettingsMixin(models.Model, metaclass=AbstractModelMeta): # - disallowed categories # - allowed categories # - allowed channels - # - default disallowed_channels = ArrayField(models.IntegerField()) disallowed_categories = ArrayField(models.IntegerField()) allowed_channels = ArrayField(models.IntegerField()) allowed_categories = ArrayField(models.IntegerField()) - default = models.BooleanField() class Meta: """Metaclass for settings mixin.""" -- cgit v1.2.3 From c5092f2895447b672dd9101a32997ce8a1c737e3 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Fri, 8 Oct 2021 19:21:36 +0300 Subject: Remove old models from migration --- pydis_site/apps/api/migrations/0070_new_filter_schema.py | 2 -- 1 file changed, 2 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index c1db2a07..aa114ca1 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -14,8 +14,6 @@ OLD_LIST_NAMES = (('GUILD_INVITE', 'ALLOW'), ('FILE_FORMAT', 'DENY'), ('DOMAIN_N def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: filter_: pydis_site.apps.api.models.Filter = apps.get_model("api", "Filter") filter_list: pydis_site.apps.api.models.FilterList = apps.get_model("api", "FilterList") - channel_range: pydis_site.apps.api.models.ChannelRange = apps.get_model("api", "ChannelRange") - filter_action: pydis_site.apps.api.models.FilterAction = apps.get_model("api", "FilterAction") filter_list_old = apps.get_model("api", "FilterListOld") for name, type_ in OLD_LIST_NAMES: -- cgit v1.2.3 From 25da18321e82f0a3cd18923d59d86b59acec160d Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Thu, 28 Oct 2021 19:46:07 +0300 Subject: Update filters API to actually work --- .../apps/api/migrations/0070_new_filter_schema.py | 16 +-- pydis_site/apps/api/models/bot/filters.py | 129 ++++++++++++--------- pydis_site/apps/api/models/mixins.py | 5 - pydis_site/apps/api/serializers.py | 58 ++++++--- 4 files changed, 123 insertions(+), 85 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index aa114ca1..a595bda2 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -54,10 +54,10 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: infraction_type=None, infraction_reason="", infraction_duration=None, - disallowed_channels=[], - disallowed_categories=[], - allowed_channels=[], - allowed_categories=[] + disallowed_channels=None, + disallowed_categories=None, + allowed_channels=None, + allowed_categories=None ) new_object.save() @@ -90,10 +90,10 @@ class Migration(migrations.Migration): ('infraction_type', models.CharField(choices=[('Note', 'Note'), ('Warn', 'Warn'), ('Mute', 'Mute'), ('Kick', 'Kick'), ('Ban', 'Ban')], help_text='The infraction to apply to this user.', max_length=4, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), - ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), + ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), + ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), + ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), + ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), ], ), migrations.CreateModel( diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 365259e7..b9a081e6 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -1,4 +1,3 @@ -from abc import abstractmethod from typing import List from django.contrib.postgres.fields import ArrayField @@ -6,8 +5,6 @@ from django.core.exceptions import ValidationError from django.db import models from django.db.models import UniqueConstraint -from pydis_site.apps.api.models.mixins import AbstractModelMeta - class FilterListType(models.IntegerChoices): """Choice between allow or deny for a list type.""" @@ -43,40 +40,9 @@ def validate_ping_field(value_list: List[str]) -> None: raise ValidationError(f"{value!r} isn't a valid ping type.") -class FilterSettingsMixin(models.Model, metaclass=AbstractModelMeta): - """Mixin for settings of a filter list.""" - - @staticmethod - @abstractmethod - def allow_null() -> bool: - """Abstract property for allowing null values.""" +class FilterSettingsMixin(models.Model): + """Mixin for common settings of a filters and filter lists.""" - ping_type = ArrayField( - models.CharField(max_length=20), - validators=(validate_ping_field,), - help_text="Who to ping when this filter triggers.", - null=allow_null.__func__() - ) - filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=True) - dm_ping_type = ArrayField( - models.CharField(max_length=20), - validators=(validate_ping_field,), - help_text="Who to ping when this filter triggers on a DM.", - null=allow_null.__func__() - ) - delete_messages = models.BooleanField( - help_text="Whether this filter should delete messages triggering it.", - null=allow_null.__func__() - ) - bypass_roles = ArrayField( - models.BigIntegerField(), - help_text="Roles and users who can bypass this filter.", - null=allow_null.__func__() - ) - enabled = models.BooleanField( - help_text="Whether this filter is currently enabled.", - null=allow_null.__func__() - ) dm_content = models.CharField( max_length=1000, null=True, @@ -97,18 +63,6 @@ class FilterSettingsMixin(models.Model, metaclass=AbstractModelMeta): help_text="The duration of the infraction. Null if permanent." ) - # Where a filter should apply. - # - # The resolution is done in the following order: - # - disallowed channels - # - disallowed categories - # - allowed categories - # - allowed channels - disallowed_channels = ArrayField(models.IntegerField()) - disallowed_categories = ArrayField(models.IntegerField()) - allowed_channels = ArrayField(models.IntegerField()) - allowed_categories = ArrayField(models.IntegerField()) - class Meta: """Metaclass for settings mixin.""" @@ -123,11 +77,43 @@ class FilterList(FilterSettingsMixin): choices=FilterListType.choices, help_text="Whether this list is an allowlist or denylist" ) - - @staticmethod - def allow_null() -> bool: - """Do not allow null values for default settings.""" - return False + ping_type = ArrayField( + models.CharField(max_length=20), + validators=(validate_ping_field,), + help_text="Who to ping when this filter triggers.", + null=False + ) + filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=False) + dm_ping_type = ArrayField( + models.CharField(max_length=20), + validators=(validate_ping_field,), + help_text="Who to ping when this filter triggers on a DM.", + null=False + ) + delete_messages = models.BooleanField( + help_text="Whether this filter should delete messages triggering it.", + null=False + ) + bypass_roles = ArrayField( + models.BigIntegerField(), + help_text="Roles and users who can bypass this filter.", + null=False + ) + enabled = models.BooleanField( + help_text="Whether this filter is currently enabled.", + null=False + ) + # Where a filter should apply. + # + # The resolution is done in the following order: + # - disallowed channels + # - disallowed categories + # - allowed categories + # - allowed channels + disallowed_channels = ArrayField(models.IntegerField()) + disallowed_categories = ArrayField(models.IntegerField()) + allowed_channels = ArrayField(models.IntegerField()) + allowed_categories = ArrayField(models.IntegerField()) class Meta: """Constrain name and list_type unique.""" @@ -150,11 +136,38 @@ class Filter(FilterSettingsMixin): FilterList, models.CASCADE, related_name="filters", help_text="The filter list containing this filter." ) + ping_type = ArrayField( + models.CharField(max_length=20), + validators=(validate_ping_field,), + help_text="Who to ping when this filter triggers.", + null=True + ) + filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=True) + dm_ping_type = ArrayField( + models.CharField(max_length=20), + validators=(validate_ping_field,), + help_text="Who to ping when this filter triggers on a DM.", + null=True + ) + delete_messages = models.BooleanField( + help_text="Whether this filter should delete messages triggering it.", + null=True + ) + bypass_roles = ArrayField( + models.BigIntegerField(), + help_text="Roles and users who can bypass this filter.", + null=True + ) + enabled = models.BooleanField( + help_text="Whether this filter is currently enabled.", + null=True + ) + + # Check FilterList model for information about these properties. + disallowed_channels = ArrayField(models.IntegerField(), null=True) + disallowed_categories = ArrayField(models.IntegerField(), null=True) + allowed_channels = ArrayField(models.IntegerField(), null=True) + allowed_categories = ArrayField(models.IntegerField(), null=True) def __str__(self) -> str: return f"Filter {self.content!r}" - - @staticmethod - def allow_null() -> bool: - """Allow null values for overrides.""" - return True diff --git a/pydis_site/apps/api/models/mixins.py b/pydis_site/apps/api/models/mixins.py index d32e6e72..5d75b78b 100644 --- a/pydis_site/apps/api/models/mixins.py +++ b/pydis_site/apps/api/models/mixins.py @@ -1,4 +1,3 @@ -from abc import ABCMeta from operator import itemgetter from django.db import models @@ -30,7 +29,3 @@ class ModelTimestampMixin(models.Model): """Metaconfig for the mixin.""" abstract = True - - -class AbstractModelMeta(ABCMeta, type(models.Model)): - """Metaclass for ABCModel class.""" diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index ff2bd929..4e92b3a0 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -113,6 +113,29 @@ class DocumentationLinkSerializer(ModelSerializer): fields = ('package', 'base_url', 'inventory_url') +ALWAYS_OPTIONAL_SETTINGS = ( + 'dm_content', + 'infraction_type', + 'infraction_reason', + 'infraction_duration', +) + +REQUIRED_FOR_FILTER_LIST_SETTINGS = ( + 'ping_type', + 'filter_dm', + 'dm_ping_type', + 'delete_messages', + 'bypass_roles', + 'enabled', + 'disallowed_channels', + 'disallowed_categories', + 'allowed_channels', + 'allowed_categories', +) + +SETTINGS_FIELDS = ALWAYS_OPTIONAL_SETTINGS + REQUIRED_FOR_FILTER_LIST_SETTINGS + + class FilterSerializer(ModelSerializer): """A class providing (de-)serialization of `Filter` instances.""" @@ -120,7 +143,16 @@ class FilterSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = Filter - fields = ('id', 'content', 'description', 'additional_field', 'filter_list', 'override') + fields = ('id', 'content', 'description', 'additional_field', 'filter_list') + SETTINGS_FIELDS + extra_kwargs = { + field: {'required': False, 'allow_null': True} for field in SETTINGS_FIELDS + } | { + 'infraction_reason': {'allow_blank': True, 'allow_null': True, 'required': False}, + 'disallowed_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, + 'disallowed_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, + 'allowed_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, + 'allowed_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, + } class FilterListSerializer(ModelSerializer): @@ -132,18 +164,16 @@ class FilterListSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterList - fields = ( - 'id', - 'name', - 'list_type', - 'filters', - 'ping_type', - 'filter_dm', - 'dm_ping_type', - 'delete_messages', - 'bypass_roles', - '' - ) + fields = ('id', 'name', 'list_type', 'filters') + SETTINGS_FIELDS + extra_kwargs = { + field: {'required': False, 'allow_null': True} for field in ALWAYS_OPTIONAL_SETTINGS + } | { + 'infraction_reason': {'allow_blank': True, 'allow_null': True, 'required': False}, + 'disallowed_channels': {'allow_empty': True}, + 'disallowed_categories': {'allow_empty': True}, + 'allowed_channels': {'allow_empty': True}, + 'allowed_categories': {'allow_empty': True}, + } # Ensure that we can only have one filter list with the same name and field validators = [ @@ -200,7 +230,7 @@ class InfractionSerializer(ModelSerializer): if hidden and infr_type in ('superstar', 'warning', 'voice_ban'): raise ValidationError({'hidden': [f'{infr_type} infractions cannot be hidden.']}) - if not hidden and infr_type in ('note', ): + if not hidden and infr_type in ('note',): raise ValidationError({'hidden': [f'{infr_type} infractions must be hidden.']}) return attrs -- cgit v1.2.3 From 4c2eaff72ba9e95e1ef8d7b40396187783d87a50 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Thu, 28 Oct 2021 20:15:36 +0300 Subject: Add basic validation for infraction fields + use common infraction types --- pydis_site/apps/api/models/bot/filters.py | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index b9a081e6..eebcf703 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -5,6 +5,8 @@ from django.core.exceptions import ValidationError from django.db import models from django.db.models import UniqueConstraint +from pydis_site.apps.api.models import Infraction + class FilterListType(models.IntegerChoices): """Choice between allow or deny for a list type.""" @@ -13,16 +15,6 @@ class FilterListType(models.IntegerChoices): DENY = 0 -class InfractionType(models.TextChoices): - """Possible type of infractions.""" - - NOTE = "Note" - WARN = "Warn" - MUTE = "Mute" - KICK = "Kick" - BAN = "Ban" - - # Valid special values in ping related fields VALID_PINGS = ("everyone", "here", "moderators", "onduty", "admins") @@ -49,7 +41,7 @@ class FilterSettingsMixin(models.Model): help_text="The DM to send to a user triggering this filter." ) infraction_type = models.CharField( - choices=InfractionType.choices, + choices=Infraction.TYPE_CHOICES, max_length=4, null=True, help_text="The infraction to apply to this user." @@ -63,6 +55,11 @@ class FilterSettingsMixin(models.Model): help_text="The duration of the infraction. Null if permanent." ) + def clean(self): + """Validate infraction fields as whole.""" + if (self.infraction_duration or self.infraction_reason) and not self.infraction_type: + raise ValidationError("Infraction type is required if setting infraction duration or reason.") + class Meta: """Metaclass for settings mixin.""" -- cgit v1.2.3 From d8ad1bdbcfcc8a0881c0ceb4d7d486455d23e170 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Thu, 28 Oct 2021 20:31:09 +0300 Subject: Add validation to filters to not allow duplicated channels and categories --- pydis_site/apps/api/models/bot/filters.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index eebcf703..45dea2c4 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -112,6 +112,20 @@ class FilterList(FilterSettingsMixin): allowed_channels = ArrayField(models.IntegerField()) allowed_categories = ArrayField(models.IntegerField()) + def clean(self): + """Do not allow duplicates in allowed and disallowed lists.""" + # Still run infraction fields validation + super().clean() + + channels_collection = self.allowed_channels + self.disallowed_channels + categories_collection = self.allowed_categories + self.disallowed_categories + + if len(channels_collection) != len(set(channels_collection)): + raise ValidationError("Allowed and disallowed channels lists contain duplicates.") + + if len(categories_collection) != len(set(categories_collection)): + raise ValidationError("Allowed and disallowed categories lists contain duplicates.") + class Meta: """Constrain name and list_type unique.""" @@ -166,5 +180,20 @@ class Filter(FilterSettingsMixin): allowed_channels = ArrayField(models.IntegerField(), null=True) allowed_categories = ArrayField(models.IntegerField(), null=True) + def clean(self): + """Do not allow duplicates in allowed and disallowed lists.""" + # Still run infraction fields validation + super().clean() + + if self.allowed_channels is not None or self.disallowed_channels is not None: + channels_collection = self.allowed_channels + self.disallowed_channels + if len(channels_collection) != len(set(channels_collection)): + raise ValidationError("Allowed and disallowed channels lists contain duplicates.") + + if self.allowed_categories is not None or self.disallowed_categories is not None: + categories_collection = self.allowed_categories + self.disallowed_categories + if len(categories_collection) != len(set(categories_collection)): + raise ValidationError("Allowed and disallowed categories lists contain duplicates.") + def __str__(self) -> str: return f"Filter {self.content!r}" -- cgit v1.2.3 From f4152448dfa4cd9912c22134af01fe37f0b153f6 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Thu, 28 Oct 2021 20:38:59 +0300 Subject: Add validation to filters to not allow duplicates + additional_field -> JSON --- pydis_site/apps/api/migrations/0070_new_filter_schema.py | 6 +++--- pydis_site/apps/api/models/bot/filters.py | 9 +++++---- 2 files changed, 8 insertions(+), 7 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index a595bda2..8716cbad 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -79,7 +79,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200)), - ('additional_field', models.BooleanField(help_text='Implementation specific field.', null=True)), + ('additional_field', django.contrib.postgres.fields.jsonb.JSONField(help_text='Implementation specific field.', null=True)), ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), @@ -87,7 +87,7 @@ class Migration(migrations.Migration): ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.BigIntegerField(), help_text='Roles and users who can bypass this filter.', size=None, null=True)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), - ('infraction_type', models.CharField(choices=[('Note', 'Note'), ('Warn', 'Warn'), ('Mute', 'Mute'), ('Kick', 'Kick'), ('Ban', 'Ban')], help_text='The infraction to apply to this user.', max_length=4, null=True)), + ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), @@ -109,7 +109,7 @@ class Migration(migrations.Migration): ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.BigIntegerField(), help_text='Roles and users who can bypass this filter.', size=None)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), - ('infraction_type', models.CharField(choices=[('Note', 'Note'), ('Warn', 'Warn'), ('Mute', 'Mute'), ('Kick', 'Kick'), ('Ban', 'Ban')], help_text='The infraction to apply to this user.', max_length=4, null=True)), + ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 45dea2c4..472354f8 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -1,11 +1,12 @@ from typing import List -from django.contrib.postgres.fields import ArrayField +from django.contrib.postgres.fields import ArrayField, JSONField from django.core.exceptions import ValidationError from django.db import models from django.db.models import UniqueConstraint -from pydis_site.apps.api.models import Infraction +# Must be imported that way to avoid circular imports +from .infraction import Infraction class FilterListType(models.IntegerChoices): @@ -42,7 +43,7 @@ class FilterSettingsMixin(models.Model): ) infraction_type = models.CharField( choices=Infraction.TYPE_CHOICES, - max_length=4, + max_length=9, null=True, help_text="The infraction to apply to this user." ) @@ -142,7 +143,7 @@ class Filter(FilterSettingsMixin): content = models.CharField(max_length=100, help_text="The definition of this filter.") description = models.CharField(max_length=200, help_text="Why this filter has been added.") - additional_field = models.BooleanField(null=True, help_text="Implementation specific field.") + additional_field = JSONField(null=True, help_text="Implementation specific field.") filter_list = models.ForeignKey( FilterList, models.CASCADE, related_name="filters", help_text="The filter list containing this filter." -- cgit v1.2.3 From b49612db59ca075f64a4c7da11e3c9ce7e7b19eb Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Fri, 29 Oct 2021 20:11:27 +0300 Subject: Move filters validations to serializers --- pydis_site/apps/api/models/bot/filters.py | 34 ------------------------------- pydis_site/apps/api/serializers.py | 33 ++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 34 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 472354f8..3a1f3c6a 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -56,11 +56,6 @@ class FilterSettingsMixin(models.Model): help_text="The duration of the infraction. Null if permanent." ) - def clean(self): - """Validate infraction fields as whole.""" - if (self.infraction_duration or self.infraction_reason) and not self.infraction_type: - raise ValidationError("Infraction type is required if setting infraction duration or reason.") - class Meta: """Metaclass for settings mixin.""" @@ -113,20 +108,6 @@ class FilterList(FilterSettingsMixin): allowed_channels = ArrayField(models.IntegerField()) allowed_categories = ArrayField(models.IntegerField()) - def clean(self): - """Do not allow duplicates in allowed and disallowed lists.""" - # Still run infraction fields validation - super().clean() - - channels_collection = self.allowed_channels + self.disallowed_channels - categories_collection = self.allowed_categories + self.disallowed_categories - - if len(channels_collection) != len(set(channels_collection)): - raise ValidationError("Allowed and disallowed channels lists contain duplicates.") - - if len(categories_collection) != len(set(categories_collection)): - raise ValidationError("Allowed and disallowed categories lists contain duplicates.") - class Meta: """Constrain name and list_type unique.""" @@ -181,20 +162,5 @@ class Filter(FilterSettingsMixin): allowed_channels = ArrayField(models.IntegerField(), null=True) allowed_categories = ArrayField(models.IntegerField(), null=True) - def clean(self): - """Do not allow duplicates in allowed and disallowed lists.""" - # Still run infraction fields validation - super().clean() - - if self.allowed_channels is not None or self.disallowed_channels is not None: - channels_collection = self.allowed_channels + self.disallowed_channels - if len(channels_collection) != len(set(channels_collection)): - raise ValidationError("Allowed and disallowed channels lists contain duplicates.") - - if self.allowed_categories is not None or self.disallowed_categories is not None: - categories_collection = self.allowed_categories + self.disallowed_categories - if len(categories_collection) != len(set(categories_collection)): - raise ValidationError("Allowed and disallowed categories lists contain duplicates.") - def __str__(self) -> str: return f"Filter {self.content!r}" diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 4e92b3a0..b5f083b0 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -139,6 +139,23 @@ SETTINGS_FIELDS = ALWAYS_OPTIONAL_SETTINGS + REQUIRED_FOR_FILTER_LIST_SETTINGS class FilterSerializer(ModelSerializer): """A class providing (de-)serialization of `Filter` instances.""" + def validate(self, data): + """Perform infraction data + allow and disallowed lists validation.""" + if (data.get('infraction_reason') or data.get('infraction_duration')) and not data.get('infraction_type'): + raise ValidationError("Infraction type is required with infraction duration or reason") + + if data.get('allowed_channels') is not None and data.get('disallowed_channels') is not None: + channels_collection = data['allowed_channels'] + data['disallowed_channels'] + if len(channels_collection) != len(set(channels_collection)): + raise ValidationError("Allowed and disallowed channels lists contain duplicates.") + + if data.get('allowed_categories') is not None and data.get('disallowed_categories') is not None: + categories_collection = data['allowed_categories'] + data['disallowed_categories'] + if len(categories_collection) != len(set(categories_collection)): + raise ValidationError("Allowed and disallowed categories lists contain duplicates.") + + return data + class Meta: """Metadata defined for the Django REST Framework.""" @@ -160,6 +177,22 @@ class FilterListSerializer(ModelSerializer): filters = FilterSerializer(many=True, read_only=True) + def validate(self, data): + """Perform infraction data + allow and disallowed lists validation.""" + if (data['infraction_reason'] or data['infraction_duration']) and not data['infraction_type']: + raise ValidationError("Infraction type is required with infraction duration or reason") + + channels_collection = data['allowed_channels'] + data['disallowed_channels'] + categories_collection = data['allowed_categories'] + data['disallowed_categories'] + + if len(channels_collection) != len(set(channels_collection)): + raise ValidationError("Allowed and disallowed channels lists contain duplicates.") + + if len(categories_collection) != len(set(categories_collection)): + raise ValidationError("Allowed and disallowed categories lists contain duplicates.") + + return data + class Meta: """Metadata defined for the Django REST Framework.""" -- cgit v1.2.3 From 55d9288f11e2981eb9251f92164a597869c07cf9 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Fri, 29 Oct 2021 20:11:38 +0300 Subject: Add merge migration --- pydis_site/apps/api/migrations/0074_merge_20211017_0822.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0074_merge_20211017_0822.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0074_merge_20211017_0822.py b/pydis_site/apps/api/migrations/0074_merge_20211017_0822.py new file mode 100644 index 00000000..ae41ac71 --- /dev/null +++ b/pydis_site/apps/api/migrations/0074_merge_20211017_0822.py @@ -0,0 +1,14 @@ +# Generated by Django 3.0.14 on 2021-10-17 08:22 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0073_otn_allow_GT_and_LT'), + ('api', '0070_new_filter_schema'), + ] + + operations = [ + ] -- cgit v1.2.3 From 7d21797b8bc14b92a48bc782694e226b2562c1b5 Mon Sep 17 00:00:00 2001 From: ks129 <45097959+ks129@users.noreply.github.com> Date: Fri, 29 Oct 2021 20:17:03 +0300 Subject: Fix linting --- pydis_site/apps/api/serializers.py | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index b5f083b0..c82b0797 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -139,17 +139,25 @@ SETTINGS_FIELDS = ALWAYS_OPTIONAL_SETTINGS + REQUIRED_FOR_FILTER_LIST_SETTINGS class FilterSerializer(ModelSerializer): """A class providing (de-)serialization of `Filter` instances.""" - def validate(self, data): + def validate(self, data: dict) -> dict: """Perform infraction data + allow and disallowed lists validation.""" - if (data.get('infraction_reason') or data.get('infraction_duration')) and not data.get('infraction_type'): + if ( + data.get('infraction_reason') or data.get('infraction_duration') + ) and not data.get('infraction_type'): raise ValidationError("Infraction type is required with infraction duration or reason") - if data.get('allowed_channels') is not None and data.get('disallowed_channels') is not None: + if ( + data.get('allowed_channels') is not None + and data.get('disallowed_channels') is not None + ): channels_collection = data['allowed_channels'] + data['disallowed_channels'] if len(channels_collection) != len(set(channels_collection)): raise ValidationError("Allowed and disallowed channels lists contain duplicates.") - if data.get('allowed_categories') is not None and data.get('disallowed_categories') is not None: + if ( + data.get('allowed_categories') is not None + and data.get('disallowed_categories') is not None + ): categories_collection = data['allowed_categories'] + data['disallowed_categories'] if len(categories_collection) != len(set(categories_collection)): raise ValidationError("Allowed and disallowed categories lists contain duplicates.") @@ -160,7 +168,9 @@ class FilterSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = Filter - fields = ('id', 'content', 'description', 'additional_field', 'filter_list') + SETTINGS_FIELDS + fields = ( + 'id', 'content', 'description', 'additional_field', 'filter_list' + ) + SETTINGS_FIELDS extra_kwargs = { field: {'required': False, 'allow_null': True} for field in SETTINGS_FIELDS } | { @@ -177,9 +187,11 @@ class FilterListSerializer(ModelSerializer): filters = FilterSerializer(many=True, read_only=True) - def validate(self, data): + def validate(self, data: dict) -> dict: """Perform infraction data + allow and disallowed lists validation.""" - if (data['infraction_reason'] or data['infraction_duration']) and not data['infraction_type']: + if ( + data['infraction_reason'] or data['infraction_duration'] + ) and not data['infraction_type']: raise ValidationError("Infraction type is required with infraction duration or reason") channels_collection = data['allowed_channels'] + data['disallowed_channels'] -- cgit v1.2.3 From 8ab32d7820b57b9f3edb61d4bd93864b6037502b Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Sun, 5 Dec 2021 16:09:56 +0100 Subject: Adjust Filter JSON Schema From now on the Serializer will have a different JSON representation than the table schema itself, conforming to the format needed on the bot-side. --- pydis_site/apps/api/serializers.py | 30 +++++++ pydis_site/apps/api/viewsets/bot/filters.py | 127 +++++++++++++++++++++------- 2 files changed, 127 insertions(+), 30 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index c82b0797..864ab52e 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -133,6 +133,17 @@ REQUIRED_FOR_FILTER_LIST_SETTINGS = ( 'allowed_categories', ) +# Required fields for custom JSON representation purposes +BASE_FIELDS = ('id', 'content', 'description', 'additional_field') +BASE_SETTINGS_FIELDS = ("ping_type", "dm_ping_type", "bypass_roles", "filter_dm") +INFRACTION_FIELDS = ("infraction_type", "infraction_reason", "infraction_duration") +CHANNEL_SCOPE_FIELDS = ( + "allowed_channels", + "allowed_categories", + "disallowed_channels", + "disallowed_categories" +) + SETTINGS_FIELDS = ALWAYS_OPTIONAL_SETTINGS + REQUIRED_FOR_FILTER_LIST_SETTINGS @@ -181,6 +192,25 @@ class FilterSerializer(ModelSerializer): 'allowed_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, } + def to_representation(self, instance: Filter) -> dict: + """ + Provides a custom JSON representation to the Filter Serializers + + That does not affect how the Serializer works in general. + """ + item = Filter.objects.get(id=instance.id) + schema_settings = { + "settings": + {name: getattr(item, name) for name in BASE_SETTINGS_FIELDS} + | {"infraction": {name: getattr(item, name) for name in INFRACTION_FIELDS}} + | {"channel_scope": {name: getattr(item, name) for name in CHANNEL_SCOPE_FIELDS}} + } + + schema_base = {name: getattr(item, name) for name in BASE_FIELDS} | \ + {"filter_list": item.filter_list.id} + + return schema_base | schema_settings + class FilterListSerializer(ModelSerializer): """A class providing (de-)serialization of `FilterList` instances.""" diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 5b21de26..64329ebe 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -27,11 +27,28 @@ class FilterListViewSet(ModelViewSet): ... "filters": [ ... { ... "id": 1, + ... "filter_list": 1 ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, - ... "override": 1, - ... "filter_list": 1 + ... "settings": { + ... "ping_type": None, + ... "dm_ping_type": None + ... "bypass_roles": None + ... "filter_dm": None, + ... "infraction": { + ... "infraction_type": None, + ... "infraction_reason": "", + ... "infraction_duration": None + ... }, + ... "channel_scope": { + ... "allowed_channels": None, + ... "allowed_categories": None, + ... "disallowed_channels": None, + ... "disallowed_categories": None + ... } + ... } + ... ... }, ... ... ... ], @@ -48,23 +65,40 @@ class FilterListViewSet(ModelViewSet): Returns a specific FilterList item from the database. #### Response format - >>> { - ... "id": 1, - ... "name": "guild_invite", - ... "list_type": 1, - ... "filters": [ - ... { - ... "id": 1, - ... "content": "267624335836053506", - ... "description": "Python Discord", - ... "additional_field": None, - ... "override": 1, - ... "filter_list": 1 - ... }, - ... ... - ... ], - ... "default_settings": 1 - ... } + ... { + ... "id": 1, + ... "name": "guild_invite", + ... "list_type": 1, + ... "filters": [ + ... { + ... "id": 1, + ... "filter_list": 1 + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "settings": { + ... "ping_type": None, + ... "dm_ping_type": None + ... "bypass_roles": None + ... "filter_dm": None, + ... "infraction": { + ... "infraction_type": None, + ... "infraction_reason": "", + ... "infraction_duration": None + ... }, + ... "channel_scope": { + ... "allowed_channels": None, + ... "allowed_categories": None, + ... "disallowed_channels": None, + ... "disallowed_categories": None + ... } + ... } + ... + ... }, + ... ... + ... ], + ... "default_settings": 1 + ... } #### Status codes - 200: returned on success @@ -93,12 +127,28 @@ class FilterViewSet(ModelViewSet): #### Response format >>> [ ... { - ... "id": 1, - ... "content": "267624335836053506", - ... "description": "Python Discord", - ... "additional_field": None, - ... "override": 1, - ... "filter_list": 1 + ... "id": 1, + ... "filter_list": 1 + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "settings": { + ... "ping_type": None, + ... "dm_ping_type": None + ... "bypass_roles": None + ... "filter_dm": None, + ... "infraction": { + ... "infraction_type": None, + ... "infraction_reason": "", + ... "infraction_duration": None + ... }, + ... "channel_scope": { + ... "allowed_channels": None, + ... "allowed_categories": None, + ... "disallowed_channels": None, + ... "disallowed_categories": None + ... } + ... } ... }, ... ... ... ] @@ -112,11 +162,28 @@ class FilterViewSet(ModelViewSet): #### Response format >>> { - ... "id": 1, - ... "content": "267624335836053506", - ... "description": "Python Discord", - ... "additional_field": None, - ... "override": 1 + ... "id": 1, + ... "filter_list": 1 + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "settings": { + ... "ping_type": None, + ... "dm_ping_type": None + ... "bypass_roles": None + ... "filter_dm": None, + ... "infraction": { + ... "infraction_type": None, + ... "infraction_reason": "", + ... "infraction_duration": None + ... }, + ... "channel_scope": { + ... "allowed_channels": None, + ... "allowed_categories": None, + ... "disallowed_channels": None, + ... "disallowed_categories": None + ... } + ... } ... } #### Status codes -- cgit v1.2.3 From e3a45e09041898ffd0bccd3c730524e8c673e696 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Mon, 6 Dec 2021 20:22:01 +0100 Subject: Adjust FilterList Representation From now on the FilterList Serializer will contain a settings field with all the settings that were listed previously, on the model. --- pydis_site/apps/api/serializers.py | 17 ++++++++-- pydis_site/apps/api/viewsets/bot/filters.py | 49 +++++++++++++++++++++++++++-- 2 files changed, 60 insertions(+), 6 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 864ab52e..267cf761 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -181,7 +181,7 @@ class FilterSerializer(ModelSerializer): model = Filter fields = ( 'id', 'content', 'description', 'additional_field', 'filter_list' - ) + SETTINGS_FIELDS + ) extra_kwargs = { field: {'required': False, 'allow_null': True} for field in SETTINGS_FIELDS } | { @@ -194,7 +194,8 @@ class FilterSerializer(ModelSerializer): def to_representation(self, instance: Filter) -> dict: """ - Provides a custom JSON representation to the Filter Serializers + + Provides a custom JSON representation to the Filter Serializers. That does not affect how the Serializer works in general. """ @@ -239,7 +240,7 @@ class FilterListSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterList - fields = ('id', 'name', 'list_type', 'filters') + SETTINGS_FIELDS + fields = ('id', 'name', 'list_type', 'filters') extra_kwargs = { field: {'required': False, 'allow_null': True} for field in ALWAYS_OPTIONAL_SETTINGS } | { @@ -261,6 +262,16 @@ class FilterListSerializer(ModelSerializer): ), ] + def to_representation(self, instance: FilterList) -> dict: + """ + Provides a custom JSON representation to the FilterList Serializers. + + That does not affect how the Serializer works in general. + """ + ret = super().to_representation(instance) + ret["settings"] = {name: getattr(instance, name) for name in SETTINGS_FIELDS} + return ret + class InfractionSerializer(ModelSerializer): """A class providing (de-)serialization of `Infraction` instances.""" diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 64329ebe..cbadcf2b 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -52,7 +52,28 @@ class FilterListViewSet(ModelViewSet): ... }, ... ... ... ], - ... "default_settings": 1 + ... "settings": { + ... "dm_content": None, + ... "infraction_type": None, + ... "infraction_reason": "", + ... "infraction_duration": None, + ... "ping_type": [ + ... "onduty" + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty" + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601 + ... ], + ... "enabled": False, + ... "disallowed_channels": [], + ... "disallowed_categories": [], + ... "allowed_channels": [], + ... "allowed_categories": [] + ... } ... }, ... ... ... ] @@ -65,6 +86,7 @@ class FilterListViewSet(ModelViewSet): Returns a specific FilterList item from the database. #### Response format + >>> ... { ... "id": 1, ... "name": "guild_invite", @@ -95,9 +117,30 @@ class FilterListViewSet(ModelViewSet): ... } ... ... }, - ... ... + ... ... ], - ... "default_settings": 1 + ... "settings": { + ... "dm_content": None, + ... "infraction_type": None, + ... "infraction_reason": "", + ... "infraction_duration": None, + ... "ping_type": [ + ... "onduty" + ... ], + ... "filter_dm": True, + ... "dm_ping_type": [ + ... "onduty" + ... ], + ... "delete_messages": True, + ... "bypass_roles": [ + ... 267630620367257601 + ... ], + ... "enabled": False, + ... "disallowed_channels": [], + ... "disallowed_categories": [], + ... "allowed_channels": [], + ... "allowed_categories": [] + ... } ... } #### Status codes -- cgit v1.2.3 From 3e8f164525bdd3a728bb7383da237feb9aacb44e Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Thu, 9 Dec 2021 20:53:49 +0100 Subject: Adjust FilterList Schema to group settings into subcategories - This commit patches the FilterList serializer's schema, and puts the settings into the relevant subcategories. --- pydis_site/apps/api/serializers.py | 9 ++- pydis_site/apps/api/viewsets/bot/filters.py | 93 ++++++++++++++--------------- 2 files changed, 54 insertions(+), 48 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 267cf761..89005a9b 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -269,7 +269,14 @@ class FilterListSerializer(ModelSerializer): That does not affect how the Serializer works in general. """ ret = super().to_representation(instance) - ret["settings"] = {name: getattr(instance, name) for name in SETTINGS_FIELDS} + schema_base = {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} + schema_settings = { + "infraction": + {name: getattr(instance, name) for name in INFRACTION_FIELDS}} \ + | { + "channel_scope": + {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS}} + ret["settings"] = schema_base | schema_settings return ret diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index cbadcf2b..2b587696 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -52,31 +52,29 @@ class FilterListViewSet(ModelViewSet): ... }, ... ... ... ], - ... "settings": { - ... "dm_content": None, - ... "infraction_type": None, - ... "infraction_reason": "", - ... "infraction_duration": None, - ... "ping_type": [ - ... "onduty" - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty" - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601 - ... ], - ... "enabled": False, - ... "disallowed_channels": [], - ... "disallowed_categories": [], - ... "allowed_channels": [], - ... "allowed_categories": [] - ... } - ... }, - ... ... - ... ] + ... "settings": { + ... "ping_type": [ + ... "onduty" + ... ], + ... "dm_ping_type": [ + ... "onduty" + ... ], + ... "bypass_roles": [ + ... 267630620367257601 + ... ], + ... "filter_dm": True, + ... "infraction": { + ... "infraction_type": None, + ... "infraction_reason": "", + ... "infraction_duration": None, + ... } + ... "channel_scope": { + ... "disallowed_channels": [], + ... "disallowed_categories": [], + ... "allowed_channels": [], + ... "allowed_categories": [] + ... } + ... } #### Status codes - 200: returned on success @@ -120,28 +118,29 @@ class FilterListViewSet(ModelViewSet): ... ... ], ... "settings": { - ... "dm_content": None, - ... "infraction_type": None, - ... "infraction_reason": "", - ... "infraction_duration": None, - ... "ping_type": [ - ... "onduty" - ... ], - ... "filter_dm": True, - ... "dm_ping_type": [ - ... "onduty" - ... ], - ... "delete_messages": True, - ... "bypass_roles": [ - ... 267630620367257601 - ... ], - ... "enabled": False, - ... "disallowed_channels": [], - ... "disallowed_categories": [], - ... "allowed_channels": [], - ... "allowed_categories": [] - ... } - ... } + ... "ping_type": [ + ... "onduty" + ... ], + ... "dm_ping_type": [ + ... "onduty" + ... ], + ... "bypass_roles": [ + ... 267630620367257601 + ... ], + ... "filter_dm": True, + ... "infraction": { + ... "infraction_type": None, + ... "infraction_reason": "", + ... "infraction_duration": None, + ... } + ... "channel_scope": { + ... "disallowed_channels": [], + ... "disallowed_categories": [], + ... "allowed_channels": [], + ... "allowed_categories": [] + ... } + ... } + ... } #### Status codes - 200: returned on success -- cgit v1.2.3 From a24cac8d43893f792d4fa495cf2a9ce65f69051c Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Fri, 10 Dec 2021 22:19:09 +0100 Subject: Patch Filter and FilterList Serializer validation logic and representation - This commit patches an error with the FilterListSerializer validation logic, so that it won't raise an error when an optional field is not present. - It also adds the `enabled` and `delete_messages` fields, to the FilterSerializer's representation - Furthermore the commit introduces minor bug patches, regarding DRF Serializer Fields. --- pydis_site/apps/api/serializers.py | 73 +++++++++++++++++++---------- pydis_site/apps/api/viewsets/bot/filters.py | 12 +++++ 2 files changed, 61 insertions(+), 24 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 89005a9b..cb8313ac 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -134,8 +134,16 @@ REQUIRED_FOR_FILTER_LIST_SETTINGS = ( ) # Required fields for custom JSON representation purposes -BASE_FIELDS = ('id', 'content', 'description', 'additional_field') -BASE_SETTINGS_FIELDS = ("ping_type", "dm_ping_type", "bypass_roles", "filter_dm") +BASE_FILTER_FIELDS = ('id', 'content', 'description', 'additional_field') +BASE_FILTERLIST_FIELDS = ('id', 'name', 'list_type') +BASE_SETTINGS_FIELDS = ( + "ping_type", + "dm_ping_type", + "bypass_roles", + "filter_dm", + "enabled", + "delete_messages" +) INFRACTION_FIELDS = ("infraction_type", "infraction_reason", "infraction_duration") CHANNEL_SCOPE_FIELDS = ( "allowed_channels", @@ -181,7 +189,7 @@ class FilterSerializer(ModelSerializer): model = Filter fields = ( 'id', 'content', 'description', 'additional_field', 'filter_list' - ) + ) + SETTINGS_FIELDS extra_kwargs = { field: {'required': False, 'allow_null': True} for field in SETTINGS_FIELDS } | { @@ -199,16 +207,18 @@ class FilterSerializer(ModelSerializer): That does not affect how the Serializer works in general. """ - item = Filter.objects.get(id=instance.id) schema_settings = { "settings": - {name: getattr(item, name) for name in BASE_SETTINGS_FIELDS} - | {"infraction": {name: getattr(item, name) for name in INFRACTION_FIELDS}} - | {"channel_scope": {name: getattr(item, name) for name in CHANNEL_SCOPE_FIELDS}} + {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} + | {"infraction": {name: getattr(instance, name) for name in INFRACTION_FIELDS}} + | { + "channel_scope": + {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS} + } } - schema_base = {name: getattr(item, name) for name in BASE_FIELDS} | \ - {"filter_list": item.filter_list.id} + schema_base = {name: getattr(instance, name) for name in BASE_FILTER_FIELDS} | \ + {"filter_list": instance.filter_list.id} return schema_base | schema_settings @@ -221,18 +231,25 @@ class FilterListSerializer(ModelSerializer): def validate(self, data: dict) -> dict: """Perform infraction data + allow and disallowed lists validation.""" if ( - data['infraction_reason'] or data['infraction_duration'] - ) and not data['infraction_type']: + data.get('infraction_reason') or data.get('infraction_duration') + ) and not data.get('infraction_type'): raise ValidationError("Infraction type is required with infraction duration or reason") - channels_collection = data['allowed_channels'] + data['disallowed_channels'] - categories_collection = data['allowed_categories'] + data['disallowed_categories'] - - if len(channels_collection) != len(set(channels_collection)): - raise ValidationError("Allowed and disallowed channels lists contain duplicates.") + if ( + data.get('allowed_channels') is not None + and data.get('disallowed_channels') is not None + ): + channels_collection = data['allowed_channels'] + data['disallowed_channels'] + if len(channels_collection) != len(set(channels_collection)): + raise ValidationError("Allowed and disallowed channels lists contain duplicates.") - if len(categories_collection) != len(set(categories_collection)): - raise ValidationError("Allowed and disallowed categories lists contain duplicates.") + if ( + data.get('allowed_categories') is not None + and data.get('disallowed_categories') is not None + ): + categories_collection = data['allowed_categories'] + data['disallowed_categories'] + if len(categories_collection) != len(set(categories_collection)): + raise ValidationError("Allowed and disallowed categories lists contain duplicates.") return data @@ -240,7 +257,7 @@ class FilterListSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterList - fields = ('id', 'name', 'list_type', 'filters') + fields = ('id', 'name', 'list_type', 'filters') + SETTINGS_FIELDS extra_kwargs = { field: {'required': False, 'allow_null': True} for field in ALWAYS_OPTIONAL_SETTINGS } | { @@ -268,16 +285,24 @@ class FilterListSerializer(ModelSerializer): That does not affect how the Serializer works in general. """ - ret = super().to_representation(instance) - schema_base = {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} - schema_settings = { + # Fetches the relating filters + filters = [ + FilterSerializer(many=False).to_representation( + instance=item + ) for item in Filter.objects.filter( + filter_list=instance.id + ) + ] + schema_base = {name: getattr(instance, name) for name in BASE_FILTERLIST_FIELDS} \ + | {"filters": filters} + schema_settings_base = {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} + schema_settings_categories = { "infraction": {name: getattr(instance, name) for name in INFRACTION_FIELDS}} \ | { "channel_scope": {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS}} - ret["settings"] = schema_base | schema_settings - return ret + return schema_base | {"settings": schema_settings_base | schema_settings_categories} class InfractionSerializer(ModelSerializer): diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 2b587696..e8f3e3d9 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -36,6 +36,8 @@ class FilterListViewSet(ModelViewSet): ... "dm_ping_type": None ... "bypass_roles": None ... "filter_dm": None, + ... "enabled": False + ... "delete_messages": True ... "infraction": { ... "infraction_type": None, ... "infraction_reason": "", @@ -63,6 +65,8 @@ class FilterListViewSet(ModelViewSet): ... 267630620367257601 ... ], ... "filter_dm": True, + ... "enabled": False + ... "delete_messages": True ... "infraction": { ... "infraction_type": None, ... "infraction_reason": "", @@ -101,6 +105,8 @@ class FilterListViewSet(ModelViewSet): ... "dm_ping_type": None ... "bypass_roles": None ... "filter_dm": None, + ... "enabled": False + ... "delete_messages": True ... "infraction": { ... "infraction_type": None, ... "infraction_reason": "", @@ -128,6 +134,8 @@ class FilterListViewSet(ModelViewSet): ... 267630620367257601 ... ], ... "filter_dm": True, + ... "enabled": False + ... "delete_messages": True ... "infraction": { ... "infraction_type": None, ... "infraction_reason": "", @@ -179,6 +187,8 @@ class FilterViewSet(ModelViewSet): ... "dm_ping_type": None ... "bypass_roles": None ... "filter_dm": None, + ... "enabled": False + ... "delete_messages": True ... "infraction": { ... "infraction_type": None, ... "infraction_reason": "", @@ -214,6 +224,8 @@ class FilterViewSet(ModelViewSet): ... "dm_ping_type": None ... "bypass_roles": None ... "filter_dm": None, + ... "enabled": False + ... "delete_messages": True ... "infraction": { ... "infraction_type": None, ... "infraction_reason": "", -- cgit v1.2.3 From 4c93b1b9b75cce4e45bdbdae608f4497372c2b56 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Wed, 15 Dec 2021 19:25:55 +0100 Subject: Prepare FilterList and Filter models, serializers for the new filter schema - Rename channel scope fields: - "allowed" -> "disabled" eg.: "allowed_channels" -> "disabled_channels" - Rename FilterLists` names: filter_token -> tokens domain_name -> domains guild_invite -> invites file_format -> formats - Patch the docs and validators accordingly. --- ..._filter_and_filterlist_for_new_filter_schema.py | 80 ++++++++++++++++++ pydis_site/apps/api/models/bot/filters.py | 21 +++-- pydis_site/apps/api/serializers.py | 90 +++++++++++---------- pydis_site/apps/api/viewsets/bot/filters.py | 94 ++++++++++++---------- 4 files changed, 189 insertions(+), 96 deletions(-) create mode 100644 pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py new file mode 100644 index 00000000..30537e3d --- /dev/null +++ b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py @@ -0,0 +1,80 @@ +# Generated by Django 3.0.14 on 2021-12-11 23:14 +from django.apps.registry import Apps +from django.db import migrations, models +from django.db.backends.base.schema import BaseDatabaseSchemaEditor + + +def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: + FilterList = apps.get_model("api", "FilterList") + change_map = { + "filter_token": "tokens", + "domain_name": "domains", + "guild_invite": "invites", + "file_format": "formats" + } + for filter_list in FilterList.objects.all(): + if change_map.get(filter_list.name): + filter_list.name = change_map.get(filter_list.name) + filter_list.save() + + +def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: + FilterList = apps.get_model("api", "FilterList") + change_map = { + "tokens": "filter_token", + "domains": "domain_name", + "invites": "guild_invite", + "formats": "file_format" + } + for filter_list in FilterList.objects.all(): + if change_map.get(filter_list.name): + filter_list.name = change_map.get(filter_list.name) + filter_list.save() + + +class Migration(migrations.Migration): + dependencies = [ + ('api', '0074_merge_20211017_0822'), + ] + + operations = [ + migrations.RenameField( + model_name='filter', + old_name='allowed_categories', + new_name='disabled_categories', + ), + migrations.RenameField( + model_name='filter', + old_name='allowed_channels', + new_name='disabled_channels', + ), + migrations.RenameField( + model_name='filter', + old_name='disallowed_channels', + new_name='enabled_channels', + ), + migrations.RenameField( + model_name='filterlist', + old_name='allowed_categories', + new_name='disabled_categories', + ), + migrations.RenameField( + model_name='filterlist', + old_name='allowed_channels', + new_name='disabled_channels', + ), + migrations.RenameField( + model_name='filterlist', + old_name='disallowed_channels', + new_name='enabled_channels', + ), + migrations.RemoveField( + model_name='filterlist', + name='disallowed_categories', + ), + migrations.RemoveField( + model_name='filter', + name='disallowed_categories', + ), + migrations.RunPython(migrate_filterlist, unmigrate_filterlist) + ] diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 3a1f3c6a..ae877685 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -99,14 +99,12 @@ class FilterList(FilterSettingsMixin): # Where a filter should apply. # # The resolution is done in the following order: - # - disallowed channels - # - disallowed categories - # - allowed categories - # - allowed channels - disallowed_channels = ArrayField(models.IntegerField()) - disallowed_categories = ArrayField(models.IntegerField()) - allowed_channels = ArrayField(models.IntegerField()) - allowed_categories = ArrayField(models.IntegerField()) + # - enabled_channels + # - disabled_categories + # - disabled_channels + enabled_channels = ArrayField(models.IntegerField()) + disabled_channels = ArrayField(models.IntegerField()) + disabled_categories = ArrayField(models.IntegerField()) class Meta: """Constrain name and list_type unique.""" @@ -157,10 +155,9 @@ class Filter(FilterSettingsMixin): ) # Check FilterList model for information about these properties. - disallowed_channels = ArrayField(models.IntegerField(), null=True) - disallowed_categories = ArrayField(models.IntegerField(), null=True) - allowed_channels = ArrayField(models.IntegerField(), null=True) - allowed_categories = ArrayField(models.IntegerField(), null=True) + enabled_channels = ArrayField(models.IntegerField(), null=True) + disabled_channels = ArrayField(models.IntegerField(), null=True) + disabled_categories = ArrayField(models.IntegerField(), null=True) def __str__(self) -> str: return f"Filter {self.content!r}" diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index cb8313ac..784f8160 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -127,18 +127,15 @@ REQUIRED_FOR_FILTER_LIST_SETTINGS = ( 'delete_messages', 'bypass_roles', 'enabled', - 'disallowed_channels', - 'disallowed_categories', - 'allowed_channels', - 'allowed_categories', + 'enabled_channels', + 'disabled_channels', + 'disabled_categories', ) # Required fields for custom JSON representation purposes BASE_FILTER_FIELDS = ('id', 'content', 'description', 'additional_field') BASE_FILTERLIST_FIELDS = ('id', 'name', 'list_type') BASE_SETTINGS_FIELDS = ( - "ping_type", - "dm_ping_type", "bypass_roles", "filter_dm", "enabled", @@ -146,11 +143,11 @@ BASE_SETTINGS_FIELDS = ( ) INFRACTION_FIELDS = ("infraction_type", "infraction_reason", "infraction_duration") CHANNEL_SCOPE_FIELDS = ( - "allowed_channels", - "allowed_categories", - "disallowed_channels", - "disallowed_categories" + "disabled_channels", + "disabled_categories", + "enabled_channels", ) +MENTIONS_FIELDS = ("ping_type", "dm_ping_type") SETTINGS_FIELDS = ALWAYS_OPTIONAL_SETTINGS + REQUIRED_FOR_FILTER_LIST_SETTINGS @@ -166,20 +163,17 @@ class FilterSerializer(ModelSerializer): raise ValidationError("Infraction type is required with infraction duration or reason") if ( - data.get('allowed_channels') is not None - and data.get('disallowed_channels') is not None + data.get('disabled_channels') is not None + and data.get('enabled_channels') is not None ): - channels_collection = data['allowed_channels'] + data['disallowed_channels'] + channels_collection = data['disabled_channels'] + data['enabled_channels'] if len(channels_collection) != len(set(channels_collection)): - raise ValidationError("Allowed and disallowed channels lists contain duplicates.") + raise ValidationError("Enabled and Disabled channels lists contain duplicates.") - if ( - data.get('allowed_categories') is not None - and data.get('disallowed_categories') is not None - ): - categories_collection = data['allowed_categories'] + data['disallowed_categories'] + if data.get('disabled_categories') is not None: + categories_collection = data['disabled_categories'] if len(categories_collection) != len(set(categories_collection)): - raise ValidationError("Allowed and disallowed categories lists contain duplicates.") + raise ValidationError("Disabled categories lists contain duplicates.") return data @@ -194,18 +188,20 @@ class FilterSerializer(ModelSerializer): field: {'required': False, 'allow_null': True} for field in SETTINGS_FIELDS } | { 'infraction_reason': {'allow_blank': True, 'allow_null': True, 'required': False}, - 'disallowed_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, - 'disallowed_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, - 'allowed_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, - 'allowed_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, + 'enabled_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, + 'disabled_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, + 'disabled_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, } def to_representation(self, instance: Filter) -> dict: """ - Provides a custom JSON representation to the Filter Serializers. - That does not affect how the Serializer works in general. + This representation restructures how the Filter is represented. + It groups the Infraction, Channel and Mention related fields into their own separated group. + + Furthermore, it puts the fields that meant to represent Filter settings, + into a sub-field called `settings`. """ schema_settings = { "settings": @@ -214,6 +210,11 @@ class FilterSerializer(ModelSerializer): | { "channel_scope": {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS} + } | { + "mentions": + { + schema_field_name: getattr(instance, schema_field_name) + for schema_field_name in MENTIONS_FIELDS} } } @@ -236,20 +237,17 @@ class FilterListSerializer(ModelSerializer): raise ValidationError("Infraction type is required with infraction duration or reason") if ( - data.get('allowed_channels') is not None - and data.get('disallowed_channels') is not None + data.get('disabled_channels') is not None + and data.get('enabled_channels') is not None ): - channels_collection = data['allowed_channels'] + data['disallowed_channels'] + channels_collection = data['disabled_channels'] + data['enabled_channels'] if len(channels_collection) != len(set(channels_collection)): - raise ValidationError("Allowed and disallowed channels lists contain duplicates.") + raise ValidationError("Enabled and Disabled channels lists contain duplicates.") - if ( - data.get('allowed_categories') is not None - and data.get('disallowed_categories') is not None - ): - categories_collection = data['allowed_categories'] + data['disallowed_categories'] + if data.get('disabled_categories') is not None: + categories_collection = data['disabled_categories'] if len(categories_collection) != len(set(categories_collection)): - raise ValidationError("Allowed and disallowed categories lists contain duplicates.") + raise ValidationError("Disabled categories lists contain duplicates.") return data @@ -262,10 +260,9 @@ class FilterListSerializer(ModelSerializer): field: {'required': False, 'allow_null': True} for field in ALWAYS_OPTIONAL_SETTINGS } | { 'infraction_reason': {'allow_blank': True, 'allow_null': True, 'required': False}, - 'disallowed_channels': {'allow_empty': True}, - 'disallowed_categories': {'allow_empty': True}, - 'allowed_channels': {'allow_empty': True}, - 'allowed_categories': {'allow_empty': True}, + 'enabled_channels': {'allow_empty': True}, + 'disabled_channels': {'allow_empty': True}, + 'disabled_categories': {'allow_empty': True}, } # Ensure that we can only have one filter list with the same name and field @@ -283,7 +280,11 @@ class FilterListSerializer(ModelSerializer): """ Provides a custom JSON representation to the FilterList Serializers. - That does not affect how the Serializer works in general. + This representation restructures how the Filter is represented. + It groups the Infraction, Channel and Mention related fields into their own separated group. + + Furthermore, it puts the fields that meant to represent FilterList settings, + into a sub-field called `settings`. """ # Fetches the relating filters filters = [ @@ -301,7 +302,12 @@ class FilterListSerializer(ModelSerializer): {name: getattr(instance, name) for name in INFRACTION_FIELDS}} \ | { "channel_scope": - {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS}} + {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS}} | { + "mentions": { + schema_field_name: getattr(instance, schema_field_name) + for schema_field_name in MENTIONS_FIELDS + } + } return schema_base | {"settings": schema_settings_base | schema_settings_categories} diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index e8f3e3d9..20af079d 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -32,8 +32,6 @@ class FilterListViewSet(ModelViewSet): ... "description": "Python Discord", ... "additional_field": None, ... "settings": { - ... "ping_type": None, - ... "dm_ping_type": None ... "bypass_roles": None ... "filter_dm": None, ... "enabled": False @@ -44,11 +42,14 @@ class FilterListViewSet(ModelViewSet): ... "infraction_duration": None ... }, ... "channel_scope": { - ... "allowed_channels": None, - ... "allowed_categories": None, - ... "disallowed_channels": None, - ... "disallowed_categories": None + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None ... } + ... "mentions": { + ... "ping_type": None + ... "dm_ping_type": None + ... } ... } ... ... }, @@ -72,13 +73,18 @@ class FilterListViewSet(ModelViewSet): ... "infraction_reason": "", ... "infraction_duration": None, ... } - ... "channel_scope": { - ... "disallowed_channels": [], - ... "disallowed_categories": [], - ... "allowed_channels": [], - ... "allowed_categories": [] - ... } - ... } + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None + ... } + ... "mentions": { + ... "ping_type": None + ... "dm_ping_type": None + ... } + ... }, + ... ... + ... ] #### Status codes - 200: returned on success @@ -88,8 +94,7 @@ class FilterListViewSet(ModelViewSet): Returns a specific FilterList item from the database. #### Response format - >>> - ... { + >>> { ... "id": 1, ... "name": "guild_invite", ... "list_type": 1, @@ -101,8 +106,6 @@ class FilterListViewSet(ModelViewSet): ... "description": "Python Discord", ... "additional_field": None, ... "settings": { - ... "ping_type": None, - ... "dm_ping_type": None ... "bypass_roles": None ... "filter_dm": None, ... "enabled": False @@ -113,15 +116,18 @@ class FilterListViewSet(ModelViewSet): ... "infraction_duration": None ... }, ... "channel_scope": { - ... "allowed_channels": None, - ... "allowed_categories": None, - ... "disallowed_channels": None, - ... "disallowed_categories": None + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None ... } + ... "mentions": { + ... "ping_type": None + ... "dm_ping_type": None + ... } ... } ... ... }, - ... + ... ... ... ], ... "settings": { ... "ping_type": [ @@ -141,14 +147,16 @@ class FilterListViewSet(ModelViewSet): ... "infraction_reason": "", ... "infraction_duration": None, ... } - ... "channel_scope": { - ... "disallowed_channels": [], - ... "disallowed_categories": [], - ... "allowed_channels": [], - ... "allowed_categories": [] - ... } - ... } - ... } + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None + ... } + ... "mentions": { + ... "ping_type": None + ... "dm_ping_type": None + ... } + ... } #### Status codes - 200: returned on success @@ -183,8 +191,6 @@ class FilterViewSet(ModelViewSet): ... "description": "Python Discord", ... "additional_field": None, ... "settings": { - ... "ping_type": None, - ... "dm_ping_type": None ... "bypass_roles": None ... "filter_dm": None, ... "enabled": False @@ -195,11 +201,14 @@ class FilterViewSet(ModelViewSet): ... "infraction_duration": None ... }, ... "channel_scope": { - ... "allowed_channels": None, - ... "allowed_categories": None, - ... "disallowed_channels": None, - ... "disallowed_categories": None + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None ... } + ... "mentions": { + ... "ping_type": None, + ... "dm_ping_type": None + ... } ... } ... }, ... ... @@ -220,8 +229,6 @@ class FilterViewSet(ModelViewSet): ... "description": "Python Discord", ... "additional_field": None, ... "settings": { - ... "ping_type": None, - ... "dm_ping_type": None ... "bypass_roles": None ... "filter_dm": None, ... "enabled": False @@ -232,11 +239,14 @@ class FilterViewSet(ModelViewSet): ... "infraction_duration": None ... }, ... "channel_scope": { - ... "allowed_channels": None, - ... "allowed_categories": None, - ... "disallowed_channels": None, - ... "disallowed_categories": None + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, ... } + ... "mentions": { + ... "ping_type": None + ... "dm_ping_type": None + ... } ... } ... } -- cgit v1.2.3 From af3980fe65b997287ceaf68e53ce3ab7bf4607e5 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Wed, 22 Dec 2021 18:18:22 +0100 Subject: Patch Filter/FilterList's default values and add new fields - Patch default values, so that further implementations can be performed on the bot side - Add three new fields: "send_alert", and in settings under the "server_message" field: "send_message_text", and "server_message_embed" fields. - Patch documentation, and validators accordingly. - Perform further patches, and minor corrections. --- .../apps/api/migrations/0070_new_filter_schema.py | 28 ++++-- ..._filter_and_filterlist_for_new_filter_schema.py | 17 +++- .../api/migrations/0078_merge_20211218_2200.py | 14 +++ .../0079_add_server_message_and_alert_fields.py | 69 ++++++++++++++ pydis_site/apps/api/models/bot/filters.py | 44 ++++++++- pydis_site/apps/api/serializers.py | 19 +++- pydis_site/apps/api/viewsets/bot/filters.py | 104 ++++++++++++--------- 7 files changed, 234 insertions(+), 61 deletions(-) create mode 100644 pydis_site/apps/api/migrations/0078_merge_20211218_2200.py create mode 100644 pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index 8716cbad..f56c29f8 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -1,4 +1,5 @@ # Modified migration file to migrate existing filters to the new one +from datetime import timedelta import django.contrib.postgres.fields from django.apps.registry import Apps @@ -18,20 +19,27 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: for name, type_ in OLD_LIST_NAMES: objects = filter_list_old.objects.filter(type=name) + if name == "DOMAIN_NAME": + dm_content = "Your URL has been removed because it matched a blacklisted domain: {match}" + elif name == "GUILD_INVITE": + dm_content = "Per Rule 6, your invite link has been removed. " \ + "Our server rules can be found here: https://pythondiscord.com/pages/rules" + else: + dm_content = "" list_ = filter_list.objects.create( name=name.lower(), list_type=1 if type_ == "ALLOW" else 0, - ping_type=["onduty"], + ping_type=(["onduty"] if name != "FILE_FORMAT" else []), filter_dm=True, - dm_ping_type=["onduty"], - delete_messages=True, - bypass_roles=[267630620367257601], - enabled=False, - dm_content=None, - infraction_type=None, + dm_ping_type=[], + delete_messages=(True if name != "FILTER_TOKEN" else False), + bypass_roles=["staff"], + enabled=True, + dm_content=dm_content, + infraction_type="", infraction_reason="", - infraction_duration=None, + infraction_duration=timedelta(seconds=0), disallowed_channels=[], disallowed_categories=[], allowed_channels=[], @@ -84,7 +92,7 @@ class Migration(migrations.Migration): ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), - ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.BigIntegerField(), help_text='Roles and users who can bypass this filter.', size=None, null=True)), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field], null=True)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), @@ -106,7 +114,7 @@ class Migration(migrations.Migration): ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.')), ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), - ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.BigIntegerField(), help_text='Roles and users who can bypass this filter.', size=None)), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field])), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), diff --git a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py index 30537e3d..cc524fcb 100644 --- a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py @@ -10,12 +10,26 @@ def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> N "filter_token": "tokens", "domain_name": "domains", "guild_invite": "invites", - "file_format": "formats" + "file_format": "extensions" } for filter_list in FilterList.objects.all(): if change_map.get(filter_list.name): filter_list.name = change_map.get(filter_list.name) filter_list.save() + redirects = FilterList( + name="redirects", + ping_type=[], + dm_ping_type=[], + enabled_channels=[], + disabled_channels=[], + disabled_categories=[], + list_type=0, + filter_dm=True, + delete_messages=False, + bypass_roles=[0], + enabled=True + ) + redirects.save() def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: @@ -30,6 +44,7 @@ def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> if change_map.get(filter_list.name): filter_list.name = change_map.get(filter_list.name) filter_list.save() + FilterList.objects.filter(name="redirects").delete() class Migration(migrations.Migration): diff --git a/pydis_site/apps/api/migrations/0078_merge_20211218_2200.py b/pydis_site/apps/api/migrations/0078_merge_20211218_2200.py new file mode 100644 index 00000000..7fe559f5 --- /dev/null +++ b/pydis_site/apps/api/migrations/0078_merge_20211218_2200.py @@ -0,0 +1,14 @@ +# Generated by Django 3.1.14 on 2021-12-18 22:00 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0075_prepare_filter_and_filterlist_for_new_filter_schema'), + ('api', '0077_use_generic_jsonfield'), + ] + + operations = [ + ] diff --git a/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py b/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py new file mode 100644 index 00000000..f9803bd3 --- /dev/null +++ b/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py @@ -0,0 +1,69 @@ +# Generated by Django 3.1.14 on 2021-12-19 23:05 +from django.apps.registry import Apps +from django.db import migrations, models +from django.db.backends.base.schema import BaseDatabaseSchemaEditor + + +def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: + FilterList = apps.get_model("api", "FilterList") + change_map = { + "tokens": True, + "domains": True, + "invites": True, + "extensions": False, + "redirects": False + } + for filter_list in FilterList.objects.all(): + filter_list.send_alert = change_map.get(filter_list.name) + filter_list.server_message_text = "" + filter_list.server_message_embed = "" + filter_list.save() + + +def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: + FilterList = apps.get_model("api", "FilterList") + for filter_list in FilterList.objects.all(): + filter_list.send_alert = True + filter_list.server_message_text = None + filter_list.server_message_embed = None + filter_list.save() + + +class Migration(migrations.Migration): + dependencies = [ + ('api', '0078_merge_20211218_2200'), + ] + + operations = [ + migrations.AddField( + model_name='filter', + name='send_alert', + field=models.BooleanField(help_text='Whether alert should be sent.', null=True), + ), + migrations.AddField( + model_name='filter', + name='server_message_embed', + field=models.CharField(help_text='The content of the server message embed', max_length=100, null=True), + ), + migrations.AddField( + model_name='filter', + name='server_message_text', + field=models.CharField(help_text='The message to send on the server', max_length=100, null=True), + ), + migrations.AddField( + model_name='filterlist', + name='send_alert', + field=models.BooleanField(default=True, help_text='Whether alert should be sent.'), + ), + migrations.AddField( + model_name='filterlist', + name='server_message_embed', + field=models.CharField(help_text='The content of the server message embed', max_length=100, null=True), + ), + migrations.AddField( + model_name='filterlist', + name='server_message_text', + field=models.CharField(help_text='The message to send on the server', max_length=100, null=True), + ), + migrations.RunPython(migrate_filterlist, unmigrate_filterlist) + ] diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index ae877685..92251ee4 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -18,6 +18,7 @@ class FilterListType(models.IntegerChoices): # Valid special values in ping related fields VALID_PINGS = ("everyone", "here", "moderators", "onduty", "admins") +VALID_BYPASS_ROLES = ("staff",) def validate_ping_field(value_list: List[str]) -> None: @@ -33,6 +34,14 @@ def validate_ping_field(value_list: List[str]) -> None: raise ValidationError(f"{value!r} isn't a valid ping type.") +def validate_bypass_roles_field(value_list: List[str]) -> None: + """Validate that the vclues are either a special value or a Role ID.""" + for value in value_list: + if value.isnumeric() or value in VALID_BYPASS_ROLES: + continue + raise ValidationError(f"{value!r} isn't a valid (bypass) role.") + + class FilterSettingsMixin(models.Model): """Mixin for common settings of a filters and filter lists.""" @@ -88,14 +97,30 @@ class FilterList(FilterSettingsMixin): null=False ) bypass_roles = ArrayField( - models.BigIntegerField(), + models.CharField(max_length=100), help_text="Roles and users who can bypass this filter.", + validators=(validate_bypass_roles_field,), null=False ) enabled = models.BooleanField( help_text="Whether this filter is currently enabled.", null=False ) + send_alert = models.BooleanField( + help_text="Whether alert should be sent.", + null=False, + default=True + ) + server_message_text = models.CharField( + max_length=100, + help_text="The message to send on the server", + null=True + ) + server_message_embed = models.CharField( + max_length=100, + help_text="The content of the server message embed", + null=True + ) # Where a filter should apply. # # The resolution is done in the following order: @@ -145,14 +170,29 @@ class Filter(FilterSettingsMixin): null=True ) bypass_roles = ArrayField( - models.BigIntegerField(), + models.CharField(max_length=100), help_text="Roles and users who can bypass this filter.", + validators=(validate_bypass_roles_field,), null=True ) enabled = models.BooleanField( help_text="Whether this filter is currently enabled.", null=True ) + send_alert = models.BooleanField( + help_text="Whether alert should be sent.", + null=True + ) + server_message_text = models.CharField( + max_length=100, + help_text="The message to send on the server", + null=True + ) + server_message_embed = models.CharField( + max_length=100, + help_text="The content of the server message embed", + null=True + ) # Check FilterList model for information about these properties. enabled_channels = ArrayField(models.IntegerField(), null=True) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 784f8160..30af9512 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -139,7 +139,8 @@ BASE_SETTINGS_FIELDS = ( "bypass_roles", "filter_dm", "enabled", - "delete_messages" + "delete_messages", + "send_alert" ) INFRACTION_FIELDS = ("infraction_type", "infraction_reason", "infraction_duration") CHANNEL_SCOPE_FIELDS = ( @@ -147,6 +148,7 @@ CHANNEL_SCOPE_FIELDS = ( "disabled_categories", "enabled_channels", ) +SERVER_MESSAGE_FIELDS = ("server_message_text", "server_message_embed") MENTIONS_FIELDS = ("ping_type", "dm_ping_type") SETTINGS_FIELDS = ALWAYS_OPTIONAL_SETTINGS + REQUIRED_FOR_FILTER_LIST_SETTINGS @@ -214,10 +216,16 @@ class FilterSerializer(ModelSerializer): "mentions": { schema_field_name: getattr(instance, schema_field_name) - for schema_field_name in MENTIONS_FIELDS} + for schema_field_name in MENTIONS_FIELDS + } } + } | { + "server_message": + { + schema_field_name: getattr(instance, schema_field_name) + for schema_field_name in SERVER_MESSAGE_FIELDS + } } - schema_base = {name: getattr(instance, name) for name in BASE_FILTER_FIELDS} | \ {"filter_list": instance.filter_list.id} @@ -307,6 +315,11 @@ class FilterListSerializer(ModelSerializer): schema_field_name: getattr(instance, schema_field_name) for schema_field_name in MENTIONS_FIELDS } + } | { + "server_message": { + schema_field_name: getattr(instance, schema_field_name) + for schema_field_name in SERVER_MESSAGE_FIELDS + } } return schema_base | {"settings": schema_settings_base | schema_settings_categories} diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 20af079d..e52cd4e5 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -22,20 +22,21 @@ class FilterListViewSet(ModelViewSet): >>> [ ... { ... "id": 1, - ... "name": "guild_invite", + ... "name": "invites", ... "list_type": 1, ... "filters": [ ... { ... "id": 1, - ... "filter_list": 1 ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, + ... "filter_list": 1 ... "settings": { ... "bypass_roles": None ... "filter_dm": None, - ... "enabled": False - ... "delete_messages": True + ... "enabled": None + ... "send_alert": True, + ... "delete_messages": None ... "infraction": { ... "infraction_type": None, ... "infraction_reason": "", @@ -50,37 +51,42 @@ class FilterListViewSet(ModelViewSet): ... "ping_type": None ... "dm_ping_type": None ... } + ... "server_message": { + ... "server_message_text": None, + ... "server_message_embed": None + ... } ... } ... ... }, ... ... ... ], ... "settings": { - ... "ping_type": [ - ... "onduty" - ... ], - ... "dm_ping_type": [ - ... "onduty" - ... ], ... "bypass_roles": [ - ... 267630620367257601 + ... "staff" ... ], ... "filter_dm": True, - ... "enabled": False - ... "delete_messages": True + ... "enabled": True + ... "delete_messages": True, + ... "send_alert": True ... "infraction": { - ... "infraction_type": None, + ... "infraction_type": "", ... "infraction_reason": "", - ... "infraction_duration": None, + ... "infraction_duration": "0.0", ... } ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None - ... } + ... "disabled_channels": [], + ... "disabled_categories": [], + ... "enabled_channels": [] + ... } ... "mentions": { - ... "ping_type": None - ... "dm_ping_type": None + ... "ping_type": [ + ... "onduty" + ... ] + ... "dm_ping_type": [] + ... } + ... "server_message": { + ... "server_message_text": "", + ... "server_message_embed": "" ... } ... }, ... ... @@ -96,7 +102,7 @@ class FilterListViewSet(ModelViewSet): #### Response format >>> { ... "id": 1, - ... "name": "guild_invite", + ... "name": "invites", ... "list_type": 1, ... "filters": [ ... { @@ -108,8 +114,9 @@ class FilterListViewSet(ModelViewSet): ... "settings": { ... "bypass_roles": None ... "filter_dm": None, - ... "enabled": False - ... "delete_messages": True + ... "enabled": None + ... "delete_messages": None, + ... "send_alert": None ... "infraction": { ... "infraction_type": None, ... "infraction_reason": "", @@ -124,37 +131,42 @@ class FilterListViewSet(ModelViewSet): ... "ping_type": None ... "dm_ping_type": None ... } + ... "server_message": { + ... "server_message_text": None, + ... "server_message_embed": None + ... } ... } ... ... }, ... ... ... ], ... "settings": { - ... "ping_type": [ - ... "onduty" - ... ], - ... "dm_ping_type": [ - ... "onduty" - ... ], ... "bypass_roles": [ - ... 267630620367257601 + ... "staff" ... ], ... "filter_dm": True, - ... "enabled": False + ... "enabled": True ... "delete_messages": True + ... "send_alert": True ... "infraction": { - ... "infraction_type": None, + ... "infraction_type": "", ... "infraction_reason": "", - ... "infraction_duration": None, + ... "infraction_duration": "0.0", ... } ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None + ... "disabled_channels": [], + ... "disabled_categories": [], + ... "enabled_channels": [] ... } ... "mentions": { - ... "ping_type": None - ... "dm_ping_type": None + ... "ping_type": [ + ... "onduty" + ... ] + ... "dm_ping_type": [] + ... } + ... "server_message": { + ... "server_message_text": "", + ... "server_message_embed": "" ... } ... } @@ -193,11 +205,12 @@ class FilterViewSet(ModelViewSet): ... "settings": { ... "bypass_roles": None ... "filter_dm": None, - ... "enabled": False - ... "delete_messages": True + ... "enabled": None + ... "delete_messages": True, + ... "send_alert": True ... "infraction": { ... "infraction_type": None, - ... "infraction_reason": "", + ... "infraction_reason": None, ... "infraction_duration": None ... }, ... "channel_scope": { @@ -231,11 +244,12 @@ class FilterViewSet(ModelViewSet): ... "settings": { ... "bypass_roles": None ... "filter_dm": None, - ... "enabled": False - ... "delete_messages": True + ... "enabled": None + ... "delete_messages": True, + ... "send_alert": True ... "infraction": { ... "infraction_type": None, - ... "infraction_reason": "", + ... "infraction_reason": None, ... "infraction_duration": None ... }, ... "channel_scope": { -- cgit v1.2.3 From c2aaa8d672484a698b8aec6a65c2f4af3cff18b1 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Fri, 24 Dec 2021 14:15:52 +0100 Subject: Include 'dm_content ' field under Infraction settings in Filters/FilterLists --- pydis_site/apps/api/serializers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 30af9512..66236d92 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -142,7 +142,7 @@ BASE_SETTINGS_FIELDS = ( "delete_messages", "send_alert" ) -INFRACTION_FIELDS = ("infraction_type", "infraction_reason", "infraction_duration") +INFRACTION_FIELDS = ("infraction_type", "infraction_reason", "infraction_duration", "dm_content") CHANNEL_SCOPE_FIELDS = ( "disabled_channels", "disabled_categories", -- cgit v1.2.3 From 466fc7cc4297fe4f5d921f6ca950b926ecc2d14d Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Fri, 24 Dec 2021 14:22:29 +0100 Subject: Correct 'Redirect' FilterLists' default values. --- .../0075_prepare_filter_and_filterlist_for_new_filter_schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py index cc524fcb..56cbdedb 100644 --- a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py @@ -26,7 +26,7 @@ def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> N list_type=0, filter_dm=True, delete_messages=False, - bypass_roles=[0], + bypass_roles=["staff"], enabled=True ) redirects.save() -- cgit v1.2.3 From c082ad818608fd52238e61f9c69d99cfb2aa503b Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 25 Dec 2021 20:18:02 +0200 Subject: Merged infraction and notification settings in JSON The settings for infracting and notifying the user were merged under one field, which is renamed to "infraction_and_notification". The only place which sends a message in the server by default is the antimalware, the rest try to DM the user first, and antimalware can do the same. This avoids complications which may result from the filtering cog trying to send two messages: one for the defined server message, and another for a failed DM. --- .../0079_add_server_message_and_alert_fields.py | 22 ++++----------- pydis_site/apps/api/models/bot/filters.py | 25 ++++------------- pydis_site/apps/api/serializers.py | 28 ++++++++----------- pydis_site/apps/api/viewsets/bot/filters.py | 32 ++++++++-------------- 4 files changed, 34 insertions(+), 73 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py b/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py index f9803bd3..c6299cb9 100644 --- a/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py +++ b/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py @@ -15,8 +15,7 @@ def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> N } for filter_list in FilterList.objects.all(): filter_list.send_alert = change_map.get(filter_list.name) - filter_list.server_message_text = "" - filter_list.server_message_embed = "" + filter_list.dm_embed = "" filter_list.save() @@ -24,7 +23,6 @@ def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> FilterList = apps.get_model("api", "FilterList") for filter_list in FilterList.objects.all(): filter_list.send_alert = True - filter_list.server_message_text = None filter_list.server_message_embed = None filter_list.save() @@ -42,13 +40,8 @@ class Migration(migrations.Migration): ), migrations.AddField( model_name='filter', - name='server_message_embed', - field=models.CharField(help_text='The content of the server message embed', max_length=100, null=True), - ), - migrations.AddField( - model_name='filter', - name='server_message_text', - field=models.CharField(help_text='The message to send on the server', max_length=100, null=True), + name='dm_embed', + field=models.CharField(help_text='The content of the DM embed', max_length=2000, null=True), ), migrations.AddField( model_name='filterlist', @@ -57,13 +50,8 @@ class Migration(migrations.Migration): ), migrations.AddField( model_name='filterlist', - name='server_message_embed', - field=models.CharField(help_text='The content of the server message embed', max_length=100, null=True), - ), - migrations.AddField( - model_name='filterlist', - name='server_message_text', - field=models.CharField(help_text='The message to send on the server', max_length=100, null=True), + name='dm_embed', + field=models.CharField(help_text='The content of the DM embed', max_length=2000, null=True), ), migrations.RunPython(migrate_filterlist, unmigrate_filterlist) ] diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 92251ee4..97af21f8 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -50,6 +50,11 @@ class FilterSettingsMixin(models.Model): null=True, help_text="The DM to send to a user triggering this filter." ) + dm_embed = models.CharField( + max_length=2000, + help_text="The content of the DM embed", + null=True + ) infraction_type = models.CharField( choices=Infraction.TYPE_CHOICES, max_length=9, @@ -111,16 +116,6 @@ class FilterList(FilterSettingsMixin): null=False, default=True ) - server_message_text = models.CharField( - max_length=100, - help_text="The message to send on the server", - null=True - ) - server_message_embed = models.CharField( - max_length=100, - help_text="The content of the server message embed", - null=True - ) # Where a filter should apply. # # The resolution is done in the following order: @@ -183,16 +178,6 @@ class Filter(FilterSettingsMixin): help_text="Whether alert should be sent.", null=True ) - server_message_text = models.CharField( - max_length=100, - help_text="The message to send on the server", - null=True - ) - server_message_embed = models.CharField( - max_length=100, - help_text="The content of the server message embed", - null=True - ) # Check FilterList model for information about these properties. enabled_channels = ArrayField(models.IntegerField(), null=True) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 66236d92..91aac822 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -142,13 +142,18 @@ BASE_SETTINGS_FIELDS = ( "delete_messages", "send_alert" ) -INFRACTION_FIELDS = ("infraction_type", "infraction_reason", "infraction_duration", "dm_content") +INFRACTION_AND_NOTIFICATION_FIELDS = ( + "infraction_type", + "infraction_reason", + "infraction_duration", + "dm_content", + "dm_embed" +) CHANNEL_SCOPE_FIELDS = ( "disabled_channels", "disabled_categories", "enabled_channels", ) -SERVER_MESSAGE_FIELDS = ("server_message_text", "server_message_embed") MENTIONS_FIELDS = ("ping_type", "dm_ping_type") SETTINGS_FIELDS = ALWAYS_OPTIONAL_SETTINGS + REQUIRED_FOR_FILTER_LIST_SETTINGS @@ -208,8 +213,10 @@ class FilterSerializer(ModelSerializer): schema_settings = { "settings": {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} - | {"infraction": {name: getattr(instance, name) for name in INFRACTION_FIELDS}} | { + "infraction_and_notification": + {name: getattr(instance, name) for name in INFRACTION_AND_NOTIFICATION_FIELDS} + } | { "channel_scope": {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS} } | { @@ -219,12 +226,6 @@ class FilterSerializer(ModelSerializer): for schema_field_name in MENTIONS_FIELDS } } - } | { - "server_message": - { - schema_field_name: getattr(instance, schema_field_name) - for schema_field_name in SERVER_MESSAGE_FIELDS - } } schema_base = {name: getattr(instance, name) for name in BASE_FILTER_FIELDS} | \ {"filter_list": instance.filter_list.id} @@ -306,8 +307,8 @@ class FilterListSerializer(ModelSerializer): | {"filters": filters} schema_settings_base = {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} schema_settings_categories = { - "infraction": - {name: getattr(instance, name) for name in INFRACTION_FIELDS}} \ + "infraction_and_notification": + {name: getattr(instance, name) for name in INFRACTION_AND_NOTIFICATION_FIELDS}} \ | { "channel_scope": {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS}} | { @@ -315,11 +316,6 @@ class FilterListSerializer(ModelSerializer): schema_field_name: getattr(instance, schema_field_name) for schema_field_name in MENTIONS_FIELDS } - } | { - "server_message": { - schema_field_name: getattr(instance, schema_field_name) - for schema_field_name in SERVER_MESSAGE_FIELDS - } } return schema_base | {"settings": schema_settings_base | schema_settings_categories} diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index e52cd4e5..dd9a7d87 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -37,10 +37,12 @@ class FilterListViewSet(ModelViewSet): ... "enabled": None ... "send_alert": True, ... "delete_messages": None - ... "infraction": { + ... "infraction_and_notification": { ... "infraction_type": None, ... "infraction_reason": "", ... "infraction_duration": None + ... "dm_content": None, + ... "dm_embed": None ... }, ... "channel_scope": { ... "disabled_channels": None, @@ -51,10 +53,6 @@ class FilterListViewSet(ModelViewSet): ... "ping_type": None ... "dm_ping_type": None ... } - ... "server_message": { - ... "server_message_text": None, - ... "server_message_embed": None - ... } ... } ... ... }, @@ -68,10 +66,12 @@ class FilterListViewSet(ModelViewSet): ... "enabled": True ... "delete_messages": True, ... "send_alert": True - ... "infraction": { + ... "infraction_and_notification": { ... "infraction_type": "", ... "infraction_reason": "", ... "infraction_duration": "0.0", + ... "dm_content": "", + ... "dm_embed": "" ... } ... "channel_scope": { ... "disabled_channels": [], @@ -84,10 +84,6 @@ class FilterListViewSet(ModelViewSet): ... ] ... "dm_ping_type": [] ... } - ... "server_message": { - ... "server_message_text": "", - ... "server_message_embed": "" - ... } ... }, ... ... ... ] @@ -117,10 +113,12 @@ class FilterListViewSet(ModelViewSet): ... "enabled": None ... "delete_messages": None, ... "send_alert": None - ... "infraction": { + ... "infraction_and_notification": { ... "infraction_type": None, ... "infraction_reason": "", ... "infraction_duration": None + ... "dm_content": None, + ... "dm_embed": None ... }, ... "channel_scope": { ... "disabled_channels": None, @@ -131,10 +129,6 @@ class FilterListViewSet(ModelViewSet): ... "ping_type": None ... "dm_ping_type": None ... } - ... "server_message": { - ... "server_message_text": None, - ... "server_message_embed": None - ... } ... } ... ... }, @@ -148,10 +142,12 @@ class FilterListViewSet(ModelViewSet): ... "enabled": True ... "delete_messages": True ... "send_alert": True - ... "infraction": { + ... "infraction_and_notification": { ... "infraction_type": "", ... "infraction_reason": "", ... "infraction_duration": "0.0", + ... "dm_content": "", + ... "dm_embed": "" ... } ... "channel_scope": { ... "disabled_channels": [], @@ -164,10 +160,6 @@ class FilterListViewSet(ModelViewSet): ... ] ... "dm_ping_type": [] ... } - ... "server_message": { - ... "server_message_text": "", - ... "server_message_embed": "" - ... } ... } #### Status codes -- cgit v1.2.3 From 78e91c433b193682d82bbeecd6e73c2b01964b3d Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 1 Jan 2022 14:41:36 +0200 Subject: Merge migrations and correct filter defaults to be all null --- .../apps/api/migrations/0070_new_filter_schema.py | 4 +- .../0079_add_server_message_and_alert_fields.py | 57 --------------------- .../migrations/0079_dm_embed_and_alert_fields.py | 58 ++++++++++++++++++++++ 3 files changed, 60 insertions(+), 59 deletions(-) delete mode 100644 pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py create mode 100644 pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index f56c29f8..2c15605c 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -60,7 +60,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: enabled=None, dm_content=None, infraction_type=None, - infraction_reason="", + infraction_reason=None, infraction_duration=None, disallowed_channels=None, disallowed_categories=None, @@ -96,7 +96,7 @@ class Migration(migrations.Migration): ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), - ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), + ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), diff --git a/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py b/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py deleted file mode 100644 index c6299cb9..00000000 --- a/pydis_site/apps/api/migrations/0079_add_server_message_and_alert_fields.py +++ /dev/null @@ -1,57 +0,0 @@ -# Generated by Django 3.1.14 on 2021-12-19 23:05 -from django.apps.registry import Apps -from django.db import migrations, models -from django.db.backends.base.schema import BaseDatabaseSchemaEditor - - -def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: - FilterList = apps.get_model("api", "FilterList") - change_map = { - "tokens": True, - "domains": True, - "invites": True, - "extensions": False, - "redirects": False - } - for filter_list in FilterList.objects.all(): - filter_list.send_alert = change_map.get(filter_list.name) - filter_list.dm_embed = "" - filter_list.save() - - -def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: - FilterList = apps.get_model("api", "FilterList") - for filter_list in FilterList.objects.all(): - filter_list.send_alert = True - filter_list.server_message_embed = None - filter_list.save() - - -class Migration(migrations.Migration): - dependencies = [ - ('api', '0078_merge_20211218_2200'), - ] - - operations = [ - migrations.AddField( - model_name='filter', - name='send_alert', - field=models.BooleanField(help_text='Whether alert should be sent.', null=True), - ), - migrations.AddField( - model_name='filter', - name='dm_embed', - field=models.CharField(help_text='The content of the DM embed', max_length=2000, null=True), - ), - migrations.AddField( - model_name='filterlist', - name='send_alert', - field=models.BooleanField(default=True, help_text='Whether alert should be sent.'), - ), - migrations.AddField( - model_name='filterlist', - name='dm_embed', - field=models.CharField(help_text='The content of the DM embed', max_length=2000, null=True), - ), - migrations.RunPython(migrate_filterlist, unmigrate_filterlist) - ] diff --git a/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py b/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py new file mode 100644 index 00000000..49da62b6 --- /dev/null +++ b/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py @@ -0,0 +1,58 @@ +# Generated by Django 3.1.14 on 2021-12-19 23:05 +from django.apps.registry import Apps +from django.db import migrations, models +from django.db.backends.base.schema import BaseDatabaseSchemaEditor + + +def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: + FilterList = apps.get_model("api", "FilterList") + change_map = { + "tokens": True, + "domains": True, + "invites": True, + "extensions": False, + "redirects": False + } + for filter_list in FilterList.objects.all(): + filter_list.send_alert = change_map.get(filter_list.name) + filter_list.dm_embed = "" + filter_list.save() + + +def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: + FilterList = apps.get_model("api", "FilterList") + for filter_list in FilterList.objects.all(): + filter_list.send_alert = True + filter_list.server_message_embed = None + filter_list.save() + + +class Migration(migrations.Migration): + dependencies = [ + ('api', '0078_merge_20211213_0552'), + ('api', '0078_merge_20211218_2200'), + ] + + operations = [ + migrations.AddField( + model_name='filter', + name='send_alert', + field=models.BooleanField(help_text='Whether alert should be sent.', null=True), + ), + migrations.AddField( + model_name='filter', + name='dm_embed', + field=models.CharField(help_text='The content of the DM embed', max_length=2000, null=True), + ), + migrations.AddField( + model_name='filterlist', + name='send_alert', + field=models.BooleanField(default=True, help_text='Whether alert should be sent.'), + ), + migrations.AddField( + model_name='filterlist', + name='dm_embed', + field=models.CharField(help_text='The content of the DM embed', max_length=2000, null=True), + ), + migrations.RunPython(migrate_filterlist, unmigrate_filterlist) + ] -- cgit v1.2.3 From f30e1d9e4fc420085a1187fa12ac23efccd21663 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 15 Feb 2022 22:05:20 +0200 Subject: Allow filter descriptions to be null --- pydis_site/apps/api/migrations/0070_new_filter_schema.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py index 2c15605c..f33c112b 100644 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0070_new_filter_schema.py @@ -50,7 +50,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: new_object = filter_.objects.create( content=object_.content, filter_list=list_, - description=object_.comment or "", + description=object_.comment, additional_field=None, ping_type=None, filter_dm=None, @@ -86,7 +86,7 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), - ('description', models.CharField(help_text='Why this filter has been added.', max_length=200)), + ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), ('additional_field', django.contrib.postgres.fields.jsonb.JSONField(help_text='Implementation specific field.', null=True)), ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), -- cgit v1.2.3 From a28cdded7dabb62d639125dca2320234263809c2 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 15 Feb 2022 22:11:40 +0200 Subject: Use singular nouns for filter list names --- .../0075_prepare_filter_and_filterlist_for_new_filter_schema.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py index 56cbdedb..2a85fa63 100644 --- a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py @@ -7,10 +7,10 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: FilterList = apps.get_model("api", "FilterList") change_map = { - "filter_token": "tokens", - "domain_name": "domains", - "guild_invite": "invites", - "file_format": "extensions" + "filter_token": "token", + "domain_name": "domain", + "guild_invite": "invite", + "file_format": "extension" } for filter_list in FilterList.objects.all(): if change_map.get(filter_list.name): -- cgit v1.2.3 From 7d22d8427fa73e6209ffcea827d9e460b6c1d985 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Tue, 15 Feb 2022 22:59:04 +0100 Subject: Patch a minor issue with FilterList field naming in migrations --- ...75_prepare_filter_and_filterlist_for_new_filter_schema.py | 12 ++++++------ .../apps/api/migrations/0079_dm_embed_and_alert_fields.py | 10 +++++----- pydis_site/apps/api/serializers.py | 3 ++- 3 files changed, 13 insertions(+), 12 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py index 2a85fa63..1e24b379 100644 --- a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py @@ -17,7 +17,7 @@ def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> N filter_list.name = change_map.get(filter_list.name) filter_list.save() redirects = FilterList( - name="redirects", + name="redirect", ping_type=[], dm_ping_type=[], enabled_channels=[], @@ -35,16 +35,16 @@ def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> N def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: FilterList = apps.get_model("api", "FilterList") change_map = { - "tokens": "filter_token", - "domains": "domain_name", - "invites": "guild_invite", - "formats": "file_format" + "token": "filter_token", + "domain": "domain_name", + "invite": "guild_invite", + "format": "file_format" } for filter_list in FilterList.objects.all(): if change_map.get(filter_list.name): filter_list.name = change_map.get(filter_list.name) filter_list.save() - FilterList.objects.filter(name="redirects").delete() + FilterList.objects.filter(name="redirect").delete() class Migration(migrations.Migration): diff --git a/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py b/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py index 49da62b6..cae175df 100644 --- a/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py +++ b/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py @@ -7,11 +7,11 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: FilterList = apps.get_model("api", "FilterList") change_map = { - "tokens": True, - "domains": True, - "invites": True, - "extensions": False, - "redirects": False + "token": True, + "domain": True, + "invite": True, + "extension": False, + "redirect": False } for filter_list in FilterList.objects.all(): filter_list.send_alert = change_map.get(filter_list.name) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 88b6e2bd..99f2b630 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -215,7 +215,8 @@ class FilterSerializer(ModelSerializer): {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} | { "infraction_and_notification": - {name: getattr(instance, name) for name in INFRACTION_AND_NOTIFICATION_FIELDS} + {name: getattr(instance, name) + for name in INFRACTION_AND_NOTIFICATION_FIELDS} } | { "channel_scope": {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS} -- cgit v1.2.3 From 01ccc1dac80cc2958849d5be90255294f38878fb Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 22 Feb 2022 20:44:24 +0200 Subject: Changed channeld fields to char arrays, merged migrations - The fields concerning channels were changed to contains strings instead of integers in order to allow specifying channels and categories by name. The migrations were merged into a single migration. --- .../apps/api/migrations/0070_new_filter_schema.py | 145 ------------------- .../api/migrations/0074_merge_20211017_0822.py | 14 -- ..._filter_and_filterlist_for_new_filter_schema.py | 95 ------------- .../api/migrations/0078_merge_20211218_2200.py | 14 -- .../migrations/0079_dm_embed_and_alert_fields.py | 58 -------- .../apps/api/migrations/0079_new_filter_schema.py | 156 +++++++++++++++++++++ 6 files changed, 156 insertions(+), 326 deletions(-) delete mode 100644 pydis_site/apps/api/migrations/0070_new_filter_schema.py delete mode 100644 pydis_site/apps/api/migrations/0074_merge_20211017_0822.py delete mode 100644 pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py delete mode 100644 pydis_site/apps/api/migrations/0078_merge_20211218_2200.py delete mode 100644 pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py create mode 100644 pydis_site/apps/api/migrations/0079_new_filter_schema.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0070_new_filter_schema.py b/pydis_site/apps/api/migrations/0070_new_filter_schema.py deleted file mode 100644 index f33c112b..00000000 --- a/pydis_site/apps/api/migrations/0070_new_filter_schema.py +++ /dev/null @@ -1,145 +0,0 @@ -# Modified migration file to migrate existing filters to the new one -from datetime import timedelta - -import django.contrib.postgres.fields -from django.apps.registry import Apps -from django.db import migrations, models -import django.db.models.deletion -from django.db.backends.base.schema import BaseDatabaseSchemaEditor - -import pydis_site.apps.api.models.bot.filters - -OLD_LIST_NAMES = (('GUILD_INVITE', 'ALLOW'), ('FILE_FORMAT', 'DENY'), ('DOMAIN_NAME', 'DENY'), ('FILTER_TOKEN', 'DENY')) - - -def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: - filter_: pydis_site.apps.api.models.Filter = apps.get_model("api", "Filter") - filter_list: pydis_site.apps.api.models.FilterList = apps.get_model("api", "FilterList") - filter_list_old = apps.get_model("api", "FilterListOld") - - for name, type_ in OLD_LIST_NAMES: - objects = filter_list_old.objects.filter(type=name) - if name == "DOMAIN_NAME": - dm_content = "Your URL has been removed because it matched a blacklisted domain: {match}" - elif name == "GUILD_INVITE": - dm_content = "Per Rule 6, your invite link has been removed. " \ - "Our server rules can be found here: https://pythondiscord.com/pages/rules" - else: - dm_content = "" - - list_ = filter_list.objects.create( - name=name.lower(), - list_type=1 if type_ == "ALLOW" else 0, - ping_type=(["onduty"] if name != "FILE_FORMAT" else []), - filter_dm=True, - dm_ping_type=[], - delete_messages=(True if name != "FILTER_TOKEN" else False), - bypass_roles=["staff"], - enabled=True, - dm_content=dm_content, - infraction_type="", - infraction_reason="", - infraction_duration=timedelta(seconds=0), - disallowed_channels=[], - disallowed_categories=[], - allowed_channels=[], - allowed_categories=[] - ) - - for object_ in objects: - new_object = filter_.objects.create( - content=object_.content, - filter_list=list_, - description=object_.comment, - additional_field=None, - ping_type=None, - filter_dm=None, - dm_ping_type=None, - delete_messages=None, - bypass_roles=None, - enabled=None, - dm_content=None, - infraction_type=None, - infraction_reason=None, - infraction_duration=None, - disallowed_channels=None, - disallowed_categories=None, - allowed_channels=None, - allowed_categories=None - ) - new_object.save() - - -class Migration(migrations.Migration): - - dependencies = [ - ('api', '0069_documentationlink_validators'), - ] - - operations = [ - migrations.RenameModel( - old_name='FilterList', - new_name='FilterListOld' - ), - migrations.CreateModel( - name='Filter', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), - ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), - ('additional_field', django.contrib.postgres.fields.jsonb.JSONField(help_text='Implementation specific field.', null=True)), - ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), - ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), - ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), - ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), - ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field], null=True)), - ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), - ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), - ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), - ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), - ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), - ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), - ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), - ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), - ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), null=True, size=None)), - ], - ), - migrations.CreateModel( - name='FilterList', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), - ('list_type', models.IntegerField(choices=[(1, 'Allow'), (0, 'Deny')], help_text='Whether this list is an allowlist or denylist')), - ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), - ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.')), - ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=20), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), - ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), - ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field])), - ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), - ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), - ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), - ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), - ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), - ('disallowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('disallowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('allowed_channels', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ('allowed_categories', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)), - ], - ), - migrations.AddField( - model_name='filter', - name='filter_list', - field=models.ForeignKey(help_text='The filter list containing this filter.', on_delete=django.db.models.deletion.CASCADE, related_name='filters', to='api.FilterList'), - ), - migrations.AddConstraint( - model_name='filterlist', - constraint=models.UniqueConstraint(fields=('name', 'list_type'), name='unique_name_type'), - ), - migrations.RunPython( - code=forward, # Core of the migration - reverse_code=lambda *_: None - ), - migrations.DeleteModel( - name='FilterListOld' - ) - ] diff --git a/pydis_site/apps/api/migrations/0074_merge_20211017_0822.py b/pydis_site/apps/api/migrations/0074_merge_20211017_0822.py deleted file mode 100644 index ae41ac71..00000000 --- a/pydis_site/apps/api/migrations/0074_merge_20211017_0822.py +++ /dev/null @@ -1,14 +0,0 @@ -# Generated by Django 3.0.14 on 2021-10-17 08:22 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('api', '0073_otn_allow_GT_and_LT'), - ('api', '0070_new_filter_schema'), - ] - - operations = [ - ] diff --git a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py b/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py deleted file mode 100644 index 1e24b379..00000000 --- a/pydis_site/apps/api/migrations/0075_prepare_filter_and_filterlist_for_new_filter_schema.py +++ /dev/null @@ -1,95 +0,0 @@ -# Generated by Django 3.0.14 on 2021-12-11 23:14 -from django.apps.registry import Apps -from django.db import migrations, models -from django.db.backends.base.schema import BaseDatabaseSchemaEditor - - -def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: - FilterList = apps.get_model("api", "FilterList") - change_map = { - "filter_token": "token", - "domain_name": "domain", - "guild_invite": "invite", - "file_format": "extension" - } - for filter_list in FilterList.objects.all(): - if change_map.get(filter_list.name): - filter_list.name = change_map.get(filter_list.name) - filter_list.save() - redirects = FilterList( - name="redirect", - ping_type=[], - dm_ping_type=[], - enabled_channels=[], - disabled_channels=[], - disabled_categories=[], - list_type=0, - filter_dm=True, - delete_messages=False, - bypass_roles=["staff"], - enabled=True - ) - redirects.save() - - -def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: - FilterList = apps.get_model("api", "FilterList") - change_map = { - "token": "filter_token", - "domain": "domain_name", - "invite": "guild_invite", - "format": "file_format" - } - for filter_list in FilterList.objects.all(): - if change_map.get(filter_list.name): - filter_list.name = change_map.get(filter_list.name) - filter_list.save() - FilterList.objects.filter(name="redirect").delete() - - -class Migration(migrations.Migration): - dependencies = [ - ('api', '0074_merge_20211017_0822'), - ] - - operations = [ - migrations.RenameField( - model_name='filter', - old_name='allowed_categories', - new_name='disabled_categories', - ), - migrations.RenameField( - model_name='filter', - old_name='allowed_channels', - new_name='disabled_channels', - ), - migrations.RenameField( - model_name='filter', - old_name='disallowed_channels', - new_name='enabled_channels', - ), - migrations.RenameField( - model_name='filterlist', - old_name='allowed_categories', - new_name='disabled_categories', - ), - migrations.RenameField( - model_name='filterlist', - old_name='allowed_channels', - new_name='disabled_channels', - ), - migrations.RenameField( - model_name='filterlist', - old_name='disallowed_channels', - new_name='enabled_channels', - ), - migrations.RemoveField( - model_name='filterlist', - name='disallowed_categories', - ), - migrations.RemoveField( - model_name='filter', - name='disallowed_categories', - ), - migrations.RunPython(migrate_filterlist, unmigrate_filterlist) - ] diff --git a/pydis_site/apps/api/migrations/0078_merge_20211218_2200.py b/pydis_site/apps/api/migrations/0078_merge_20211218_2200.py deleted file mode 100644 index 7fe559f5..00000000 --- a/pydis_site/apps/api/migrations/0078_merge_20211218_2200.py +++ /dev/null @@ -1,14 +0,0 @@ -# Generated by Django 3.1.14 on 2021-12-18 22:00 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('api', '0075_prepare_filter_and_filterlist_for_new_filter_schema'), - ('api', '0077_use_generic_jsonfield'), - ] - - operations = [ - ] diff --git a/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py b/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py deleted file mode 100644 index cae175df..00000000 --- a/pydis_site/apps/api/migrations/0079_dm_embed_and_alert_fields.py +++ /dev/null @@ -1,58 +0,0 @@ -# Generated by Django 3.1.14 on 2021-12-19 23:05 -from django.apps.registry import Apps -from django.db import migrations, models -from django.db.backends.base.schema import BaseDatabaseSchemaEditor - - -def migrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: - FilterList = apps.get_model("api", "FilterList") - change_map = { - "token": True, - "domain": True, - "invite": True, - "extension": False, - "redirect": False - } - for filter_list in FilterList.objects.all(): - filter_list.send_alert = change_map.get(filter_list.name) - filter_list.dm_embed = "" - filter_list.save() - - -def unmigrate_filterlist(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: - FilterList = apps.get_model("api", "FilterList") - for filter_list in FilterList.objects.all(): - filter_list.send_alert = True - filter_list.server_message_embed = None - filter_list.save() - - -class Migration(migrations.Migration): - dependencies = [ - ('api', '0078_merge_20211213_0552'), - ('api', '0078_merge_20211218_2200'), - ] - - operations = [ - migrations.AddField( - model_name='filter', - name='send_alert', - field=models.BooleanField(help_text='Whether alert should be sent.', null=True), - ), - migrations.AddField( - model_name='filter', - name='dm_embed', - field=models.CharField(help_text='The content of the DM embed', max_length=2000, null=True), - ), - migrations.AddField( - model_name='filterlist', - name='send_alert', - field=models.BooleanField(default=True, help_text='Whether alert should be sent.'), - ), - migrations.AddField( - model_name='filterlist', - name='dm_embed', - field=models.CharField(help_text='The content of the DM embed', max_length=2000, null=True), - ), - migrations.RunPython(migrate_filterlist, unmigrate_filterlist) - ] diff --git a/pydis_site/apps/api/migrations/0079_new_filter_schema.py b/pydis_site/apps/api/migrations/0079_new_filter_schema.py new file mode 100644 index 00000000..94494186 --- /dev/null +++ b/pydis_site/apps/api/migrations/0079_new_filter_schema.py @@ -0,0 +1,156 @@ +# Modified migration file to migrate existing filters to the new one +from datetime import timedelta + +import django.contrib.postgres.fields +from django.apps.registry import Apps +from django.db import migrations, models +import django.db.models.deletion +from django.db.backends.base.schema import BaseDatabaseSchemaEditor + +import pydis_site.apps.api.models.bot.filters + +OLD_LIST_NAMES = (('GUILD_INVITE', 'ALLOW'), ('FILE_FORMAT', 'DENY'), ('DOMAIN_NAME', 'DENY'), ('FILTER_TOKEN', 'DENY'), ('REDIRECT', 'DENY')) +change_map = { + "FILTER_TOKEN": "token", + "DOMAIN_NAME": "domain", + "GUILD_INVITE": "invite", + "FILE_FORMAT": "extension", + "REDIRECT": "redirect" +} + + +def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: + filter_: pydis_site.apps.api.models.Filter = apps.get_model("api", "Filter") + filter_list: pydis_site.apps.api.models.FilterList = apps.get_model("api", "FilterList") + filter_list_old = apps.get_model("api", "FilterListOld") + + for name, type_ in OLD_LIST_NAMES: + objects = filter_list_old.objects.filter(type=name) + if name == "DOMAIN_NAME": + dm_content = "Your URL has been removed because it matched a blacklisted domain: {match}" + elif name == "GUILD_INVITE": + dm_content = "Per Rule 6, your invite link has been removed. " \ + "Our server rules can be found here: https://pythondiscord.com/pages/rules" + else: + dm_content = "" + + list_ = filter_list.objects.create( + name=change_map[name], + list_type=1 if type_ == "ALLOW" else 0, + ping_type=(["Moderators"] if name != "FILE_FORMAT" else []), + filter_dm=True, + dm_ping_type=[], + delete_messages=(True if name != "FILTER_TOKEN" else False), + bypass_roles=["Helpers"], + enabled=True, + dm_content=dm_content, + dm_embed="", + infraction_type="", + infraction_reason="", + infraction_duration=timedelta(seconds=0), + disabled_channels=[], + disabled_categories=(["CODE JAM"] if name in ("FILE_FORMAT", "GUILD_INVITE") else []), + enabled_channels=[], + send_alert=(name in ('GUILD_INVITE', 'DOMAIN_NAME', 'FILTER_TOKEN')) + ) + + for object_ in objects: + new_object = filter_.objects.create( + content=object_.content, + filter_list=list_, + description=object_.comment, + additional_field=None, + ping_type=None, + filter_dm=None, + dm_ping_type=None, + delete_messages=None, + bypass_roles=None, + enabled=None, + dm_content=None, + dm_embed=None, + infraction_type=None, + infraction_reason=None, + infraction_duration=None, + disabled_channels=None, + disabled_categories=None, + enabled_channels=None, + send_alert=None, + ) + new_object.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0078_merge_20211213_0552'), + ] + + operations = [ + migrations.RenameModel( + old_name='FilterList', + new_name='FilterListOld' + ), + migrations.CreateModel( + name='Filter', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), + ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), + ('additional_field', django.contrib.postgres.fields.jsonb.JSONField(help_text='Implementation specific field.', null=True)), + ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), + ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field], null=True)), + ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), + ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), + ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), + ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), + ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), + ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), + ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", null=True, size=None)), + ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", null=True, size=None)), + ('enabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", null=True, size=None)), + ('send_alert', models.BooleanField(help_text='Whether an alert should be sent.', null=True)), + ], + ), + migrations.CreateModel( + name='FilterList', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), + ('list_type', models.IntegerField(choices=[(1, 'Allow'), (0, 'Deny')], help_text='Whether this list is an allowlist or denylist')), + ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.')), + ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field])), + ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), + ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), + ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), + ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), + ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), + ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), + ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), + ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", size=None)), + ('enabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", size=None)), + ('send_alert', models.BooleanField(help_text='Whether an alert should be sent.')), + ], + ), + migrations.AddField( + model_name='filter', + name='filter_list', + field=models.ForeignKey(help_text='The filter list containing this filter.', on_delete=django.db.models.deletion.CASCADE, related_name='filters', to='api.FilterList'), + ), + migrations.AddConstraint( + model_name='filterlist', + constraint=models.UniqueConstraint(fields=('name', 'list_type'), name='unique_name_type'), + ), + migrations.RunPython( + code=forward, # Core of the migration + reverse_code=lambda *_: None + ), + migrations.DeleteModel( + name='FilterListOld' + ) + ] -- cgit v1.2.3 From 37f9296322d5aaef6aefc68eb97e6e1d5c0df531 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Wed, 23 Feb 2022 23:51:05 +0200 Subject: Extensions list is ALLOW, not DENY --- pydis_site/apps/api/migrations/0079_new_filter_schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0079_new_filter_schema.py b/pydis_site/apps/api/migrations/0079_new_filter_schema.py index 94494186..4728ea91 100644 --- a/pydis_site/apps/api/migrations/0079_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0079_new_filter_schema.py @@ -9,7 +9,7 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor import pydis_site.apps.api.models.bot.filters -OLD_LIST_NAMES = (('GUILD_INVITE', 'ALLOW'), ('FILE_FORMAT', 'DENY'), ('DOMAIN_NAME', 'DENY'), ('FILTER_TOKEN', 'DENY'), ('REDIRECT', 'DENY')) +OLD_LIST_NAMES = (('GUILD_INVITE', 'ALLOW'), ('FILE_FORMAT', 'ALLOW'), ('DOMAIN_NAME', 'DENY'), ('FILTER_TOKEN', 'DENY'), ('REDIRECT', 'DENY')) change_map = { "FILTER_TOKEN": "token", "DOMAIN_NAME": "domain", -- cgit v1.2.3 From a2fcfdf8fd80fc4cfd89be19ffb18a3c1799d2cb Mon Sep 17 00:00:00 2001 From: mbaruh Date: Thu, 24 Feb 2022 21:14:35 +0200 Subject: Create placeholder value for dm embed content in ext list Some value is needed to signal the bot a message should be sent for a blocked extension. The value itself will be changed at runtime, but this allows avoiding the bot code delving into the exact API response format. --- pydis_site/apps/api/migrations/0079_new_filter_schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0079_new_filter_schema.py b/pydis_site/apps/api/migrations/0079_new_filter_schema.py index 4728ea91..58ed0025 100644 --- a/pydis_site/apps/api/migrations/0079_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0079_new_filter_schema.py @@ -44,7 +44,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: bypass_roles=["Helpers"], enabled=True, dm_content=dm_content, - dm_embed="", + dm_embed="" if name != "FILE_FORMAT" else "*Defined at runtime.*", infraction_type="", infraction_reason="", infraction_duration=timedelta(seconds=0), -- cgit v1.2.3 From b0f4b93ee831d0873f134440a6554177cc043feb Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 26 Feb 2022 00:53:28 +0200 Subject: Add invites denylist to the migration --- pydis_site/apps/api/migrations/0079_new_filter_schema.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0079_new_filter_schema.py b/pydis_site/apps/api/migrations/0079_new_filter_schema.py index 58ed0025..43915edb 100644 --- a/pydis_site/apps/api/migrations/0079_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0079_new_filter_schema.py @@ -9,7 +9,7 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor import pydis_site.apps.api.models.bot.filters -OLD_LIST_NAMES = (('GUILD_INVITE', 'ALLOW'), ('FILE_FORMAT', 'ALLOW'), ('DOMAIN_NAME', 'DENY'), ('FILTER_TOKEN', 'DENY'), ('REDIRECT', 'DENY')) +OLD_LIST_NAMES = (('GUILD_INVITE', True), ('GUILD_INVITE', False), ('FILE_FORMAT', True), ('DOMAIN_NAME', False), ('FILTER_TOKEN', False), ('REDIRECT', False)) change_map = { "FILTER_TOKEN": "token", "DOMAIN_NAME": "domain", @@ -25,7 +25,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: filter_list_old = apps.get_model("api", "FilterListOld") for name, type_ in OLD_LIST_NAMES: - objects = filter_list_old.objects.filter(type=name) + objects = filter_list_old.objects.filter(type=name, allowed=type_) if name == "DOMAIN_NAME": dm_content = "Your URL has been removed because it matched a blacklisted domain: {match}" elif name == "GUILD_INVITE": @@ -36,7 +36,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: list_ = filter_list.objects.create( name=change_map[name], - list_type=1 if type_ == "ALLOW" else 0, + list_type=int(type_), ping_type=(["Moderators"] if name != "FILE_FORMAT" else []), filter_dm=True, dm_ping_type=[], -- cgit v1.2.3 From 02ea9e97f68e5388f7c3ade6ec48b11b272018bf Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 1 Mar 2022 23:49:52 +0200 Subject: Refine DM content for domains --- pydis_site/apps/api/migrations/0079_new_filter_schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0079_new_filter_schema.py b/pydis_site/apps/api/migrations/0079_new_filter_schema.py index 43915edb..89f70799 100644 --- a/pydis_site/apps/api/migrations/0079_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0079_new_filter_schema.py @@ -27,7 +27,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: for name, type_ in OLD_LIST_NAMES: objects = filter_list_old.objects.filter(type=name, allowed=type_) if name == "DOMAIN_NAME": - dm_content = "Your URL has been removed because it matched a blacklisted domain: {match}" + dm_content = "Your message has been removed because it contained a blocked domain: `{domain}`." elif name == "GUILD_INVITE": dm_content = "Per Rule 6, your invite link has been removed. " \ "Our server rules can be found here: https://pythondiscord.com/pages/rules" -- cgit v1.2.3 From a6b8c27e68b529b1060b1213b465457c5c0d685a Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Mon, 7 Mar 2022 20:18:18 +0100 Subject: Add support for storing AoC related data in site --- .../apps/api/migrations/0080_add_aoc_tables.py | 33 +++++++++++ pydis_site/apps/api/models/__init__.py | 2 + pydis_site/apps/api/models/bot/__init__.py | 2 + .../apps/api/models/bot/aoc_completionist_block.py | 21 +++++++ pydis_site/apps/api/models/bot/aoc_link.py | 20 +++++++ pydis_site/apps/api/serializers.py | 22 +++++++ pydis_site/apps/api/urls.py | 10 ++++ pydis_site/apps/api/viewsets/__init__.py | 2 + pydis_site/apps/api/viewsets/bot/__init__.py | 2 + .../api/viewsets/bot/aoc_completionist_block.py | 69 ++++++++++++++++++++++ pydis_site/apps/api/viewsets/bot/aoc_link.py | 69 ++++++++++++++++++++++ 11 files changed, 252 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0080_add_aoc_tables.py create mode 100644 pydis_site/apps/api/models/bot/aoc_completionist_block.py create mode 100644 pydis_site/apps/api/models/bot/aoc_link.py create mode 100644 pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py create mode 100644 pydis_site/apps/api/viewsets/bot/aoc_link.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0080_add_aoc_tables.py b/pydis_site/apps/api/migrations/0080_add_aoc_tables.py new file mode 100644 index 00000000..f129d86f --- /dev/null +++ b/pydis_site/apps/api/migrations/0080_add_aoc_tables.py @@ -0,0 +1,33 @@ +# Generated by Django 3.1.14 on 2022-03-06 16:07 + +from django.db import migrations, models +import django.db.models.deletion +import pydis_site.apps.api.models.mixins + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0079_merge_20220125_2022'), + ] + + operations = [ + migrations.CreateModel( + name='AocCompletionistBlock', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('is_blocked', models.BooleanField(default=True, help_text='Whether this user is actively being blocked from getting the AoC Completionist Role', verbose_name='Blocked')), + ('user', models.ForeignKey(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, to='api.user')), + ], + bases=(pydis_site.apps.api.models.mixins.ModelReprMixin, models.Model), + ), + migrations.CreateModel( + name='AocAccountLink', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('aoc_username', models.CharField(help_text='The AoC username associated with the Discord User.', max_length=120)), + ('user', models.ForeignKey(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, to='api.user')), + ], + bases=(pydis_site.apps.api.models.mixins.ModelReprMixin, models.Model), + ), + ] diff --git a/pydis_site/apps/api/models/__init__.py b/pydis_site/apps/api/models/__init__.py index fd5bf220..4f616986 100644 --- a/pydis_site/apps/api/models/__init__.py +++ b/pydis_site/apps/api/models/__init__.py @@ -10,6 +10,8 @@ from .bot import ( Nomination, NominationEntry, OffensiveMessage, + AocAccountLink, + AocCompletionistBlock, OffTopicChannelName, Reminder, Role, diff --git a/pydis_site/apps/api/models/bot/__init__.py b/pydis_site/apps/api/models/bot/__init__.py index ac864de3..ec0e701c 100644 --- a/pydis_site/apps/api/models/bot/__init__.py +++ b/pydis_site/apps/api/models/bot/__init__.py @@ -5,6 +5,8 @@ from .deleted_message import DeletedMessage from .documentation_link import DocumentationLink from .infraction import Infraction from .message import Message +from .aoc_completionist_block import AocCompletionistBlock +from .aoc_link import AocAccountLink from .message_deletion_context import MessageDeletionContext from .nomination import Nomination, NominationEntry from .off_topic_channel_name import OffTopicChannelName diff --git a/pydis_site/apps/api/models/bot/aoc_completionist_block.py b/pydis_site/apps/api/models/bot/aoc_completionist_block.py new file mode 100644 index 00000000..cac41ff1 --- /dev/null +++ b/pydis_site/apps/api/models/bot/aoc_completionist_block.py @@ -0,0 +1,21 @@ +from django.db import models + +from pydis_site.apps.api.models.bot.user import User +from pydis_site.apps.api.models.mixins import ModelReprMixin + + +class AocCompletionistBlock(ModelReprMixin, models.Model): + """A Discord user blocked from getting the AoC completionist Role.""" + + user = models.ForeignKey( + User, + on_delete=models.CASCADE, + help_text="The user that is blocked from getting the AoC Completionist Role" + ) + + is_blocked = models.BooleanField( + default=True, + help_text="Whether this user is actively being blocked " + "from getting the AoC Completionist Role", + verbose_name="Blocked" + ) diff --git a/pydis_site/apps/api/models/bot/aoc_link.py b/pydis_site/apps/api/models/bot/aoc_link.py new file mode 100644 index 00000000..6c7cc591 --- /dev/null +++ b/pydis_site/apps/api/models/bot/aoc_link.py @@ -0,0 +1,20 @@ +from django.db import models + +from pydis_site.apps.api.models.bot.user import User +from pydis_site.apps.api.models.mixins import ModelReprMixin + + +class AocAccountLink(ModelReprMixin, models.Model): + """An AoC account link for a Discord User.""" + + user = models.ForeignKey( + User, + on_delete=models.CASCADE, + help_text="The user that is blocked from getting the AoC Completionist Role" + ) + + aoc_username = models.CharField( + max_length=120, + help_text="The AoC username associated with the Discord User.", + blank=False + ) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 745aff42..0b0e4237 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -13,6 +13,8 @@ from rest_framework.settings import api_settings from rest_framework.validators import UniqueTogetherValidator from .models import ( + AocAccountLink, + AocCompletionistBlock, BotSetting, DeletedMessage, DocumentationLink, @@ -250,6 +252,26 @@ class ReminderSerializer(ModelSerializer): ) +class AocCompletionistBlockSerializer(ModelSerializer): + """A class providing (de-)serialization of `AocCompletionistBlock` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = AocCompletionistBlock + fields = ("user", "is_blocked") + + +class AocAccountLinkSerializer(ModelSerializer): + """A class providing (de-)serialization of `AocAccountLink` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = AocAccountLink + fields = ("user", "aoc_username") + + class RoleSerializer(ModelSerializer): """A class providing (de-)serialization of `Role` instances.""" diff --git a/pydis_site/apps/api/urls.py b/pydis_site/apps/api/urls.py index b0ab545b..7c55fc92 100644 --- a/pydis_site/apps/api/urls.py +++ b/pydis_site/apps/api/urls.py @@ -3,6 +3,8 @@ from rest_framework.routers import DefaultRouter from .views import HealthcheckView, RulesView from .viewsets import ( + AocAccountLinkViewSet, + AocCompletionistBlockViewSet, BotSettingViewSet, DeletedMessageViewSet, DocumentationLinkViewSet, @@ -34,6 +36,14 @@ bot_router.register( 'documentation-links', DocumentationLinkViewSet ) +bot_router.register( + "aoc-account-links", + AocAccountLinkViewSet +) +bot_router.register( + "aoc-completionist-blocks", + AocCompletionistBlockViewSet +) bot_router.register( 'infractions', InfractionViewSet diff --git a/pydis_site/apps/api/viewsets/__init__.py b/pydis_site/apps/api/viewsets/__init__.py index f133e77f..5fc1d64f 100644 --- a/pydis_site/apps/api/viewsets/__init__.py +++ b/pydis_site/apps/api/viewsets/__init__.py @@ -7,6 +7,8 @@ from .bot import ( InfractionViewSet, NominationViewSet, OffensiveMessageViewSet, + AocAccountLinkViewSet, + AocCompletionistBlockViewSet, OffTopicChannelNameViewSet, ReminderViewSet, RoleViewSet, diff --git a/pydis_site/apps/api/viewsets/bot/__init__.py b/pydis_site/apps/api/viewsets/bot/__init__.py index 84b87eab..f1d84729 100644 --- a/pydis_site/apps/api/viewsets/bot/__init__.py +++ b/pydis_site/apps/api/viewsets/bot/__init__.py @@ -7,6 +7,8 @@ from .infraction import InfractionViewSet from .nomination import NominationViewSet from .off_topic_channel_name import OffTopicChannelNameViewSet from .offensive_message import OffensiveMessageViewSet +from .aoc_link import AocAccountLinkViewSet +from .aoc_completionist_block import AocCompletionistBlockViewSet from .reminder import ReminderViewSet from .role import RoleViewSet from .user import UserViewSet diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py new file mode 100644 index 00000000..53bcb546 --- /dev/null +++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py @@ -0,0 +1,69 @@ +from django_filters.rest_framework import DjangoFilterBackend +from rest_framework.mixins import ( + CreateModelMixin, DestroyModelMixin, ListModelMixin, RetrieveModelMixin +) +from rest_framework.viewsets import GenericViewSet + +from pydis_site.apps.api.models.bot import AocCompletionistBlock +from pydis_site.apps.api.serializers import AocCompletionistBlockSerializer + + +class AocCompletionistBlockViewSet( + GenericViewSet, CreateModelMixin, DestroyModelMixin, RetrieveModelMixin, ListModelMixin +): + """ + View providing management for Users blocked from gettign the AoC completionist Role. + + ## Routes + + ### GET /bot/aoc-completionist-blocks/ + Returns all the AoC completionist blocks + + #### Response format + >>> [ + ... { + ... "user": 2, + ... "is_blocked": False + ... } + ... ] + + + ### GET /bot/aoc-completionist-blocks/ + Retrieve a single Block by User ID + + #### Response format + >>> + ... { + ... "user": 2, + ... "is_blocked": False + ... } + + #### Status codes + - 200: returned on success + - 404: returned if an AoC completionist block with the given user__id was not found. + + ### POST /bot/aoc-completionist-blocks + Adds a single AoC completionist block + + #### Request body + >>> { + ... 'user': int, + ... 'is_blocked': bool + ... } + + #### Status codes + - 204: returned on success + - 400: if one of the given fields is invalid + + ### DELETE /bot/aoc-completionist-blocks/ + Deletes the AoC Completionist block item with the given `user__id`. + #### Status codes + - 204: returned on success + - 404: if the AoC Completionist block with the given user__id does not exist + + """ + + serializer_class = AocCompletionistBlockSerializer + queryset = AocCompletionistBlock.objects.all() + filter_backends = (DjangoFilterBackend,) + filter_fields = ("user__id",) diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py new file mode 100644 index 00000000..b5b5420e --- /dev/null +++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py @@ -0,0 +1,69 @@ +from django_filters.rest_framework import DjangoFilterBackend +from rest_framework.mixins import ( + CreateModelMixin, DestroyModelMixin, ListModelMixin, RetrieveModelMixin +) +from rest_framework.viewsets import GenericViewSet + +from pydis_site.apps.api.models.bot import AocAccountLink +from pydis_site.apps.api.serializers import AocAccountLinkSerializer + + +class AocAccountLinkViewSet( + GenericViewSet, CreateModelMixin, DestroyModelMixin, RetrieveModelMixin, ListModelMixin +): + """ + View providing management for Users who linked their AoC accounts to their Discord Account. + + ## Routes + + ### GET /bot/aoc-account-links + Returns all the AoC account links + + #### Response format + >>> [ + ... { + ... "user": 2, + ... "aoc_username": "AoCUser1" + ... } + ... ] + + + ### GET /bot/aoc-account-links + Retrieve a AoC account link by User ID + + #### Response format + >>> + ... { + ... "user": 2, + ... "aoc_username": "AoCUser1" + ... } + + #### Status codes + - 200: returned on success + - 404: returned if an AoC account link with the given user__id was not found. + + ### POST /bot/aoc-account-links + Adds a single AoC account link block + + #### Request body + >>> { + ... 'user': int, + ... 'aoc_username': str + ... } + + #### Status codes + - 204: returned on success + - 400: if one of the given fields is invalid + + ### DELETE /bot/aoc-account-links/ + Deletes the AoC account link item with the given `user__id`. + #### Status codes + - 204: returned on success + - 404: if the AoC account link with the given user__id does not exist + + """ + + serializer_class = AocAccountLinkSerializer + queryset = AocAccountLink.objects.all() + filter_backends = (DjangoFilterBackend,) + filter_fields = ("user__id",) -- cgit v1.2.3 From 19c6e7b66cf5078a198f4a70fec38d66dd029564 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Tue, 8 Mar 2022 15:54:34 +0100 Subject: Enhance comments and table structure in AoC related modules - Set the user reference to be a OneToOne relation, on tables: AocCompletionistBlock and AocAccountLink. --- pydis_site/apps/api/migrations/0080_add_aoc_tables.py | 4 ++-- pydis_site/apps/api/models/bot/aoc_completionist_block.py | 2 +- pydis_site/apps/api/models/bot/aoc_link.py | 2 +- pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py | 4 ++-- pydis_site/apps/api/viewsets/bot/aoc_link.py | 4 ++-- 5 files changed, 8 insertions(+), 8 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0080_add_aoc_tables.py b/pydis_site/apps/api/migrations/0080_add_aoc_tables.py index f129d86f..c58a5d29 100644 --- a/pydis_site/apps/api/migrations/0080_add_aoc_tables.py +++ b/pydis_site/apps/api/migrations/0080_add_aoc_tables.py @@ -17,7 +17,7 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('is_blocked', models.BooleanField(default=True, help_text='Whether this user is actively being blocked from getting the AoC Completionist Role', verbose_name='Blocked')), - ('user', models.ForeignKey(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, to='api.user')), + ('user', models.OneToOneField(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, to='api.user')), ], bases=(pydis_site.apps.api.models.mixins.ModelReprMixin, models.Model), ), @@ -26,7 +26,7 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('aoc_username', models.CharField(help_text='The AoC username associated with the Discord User.', max_length=120)), - ('user', models.ForeignKey(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, to='api.user')), + ('user', models.OneToOneField(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, to='api.user')), ], bases=(pydis_site.apps.api.models.mixins.ModelReprMixin, models.Model), ), diff --git a/pydis_site/apps/api/models/bot/aoc_completionist_block.py b/pydis_site/apps/api/models/bot/aoc_completionist_block.py index cac41ff1..a89f9760 100644 --- a/pydis_site/apps/api/models/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/models/bot/aoc_completionist_block.py @@ -7,7 +7,7 @@ from pydis_site.apps.api.models.mixins import ModelReprMixin class AocCompletionistBlock(ModelReprMixin, models.Model): """A Discord user blocked from getting the AoC completionist Role.""" - user = models.ForeignKey( + user = models.OneToOneField( User, on_delete=models.CASCADE, help_text="The user that is blocked from getting the AoC Completionist Role" diff --git a/pydis_site/apps/api/models/bot/aoc_link.py b/pydis_site/apps/api/models/bot/aoc_link.py index 6c7cc591..9b47456d 100644 --- a/pydis_site/apps/api/models/bot/aoc_link.py +++ b/pydis_site/apps/api/models/bot/aoc_link.py @@ -7,7 +7,7 @@ from pydis_site.apps.api.models.mixins import ModelReprMixin class AocAccountLink(ModelReprMixin, models.Model): """An AoC account link for a Discord User.""" - user = models.ForeignKey( + user = models.OneToOneField( User, on_delete=models.CASCADE, help_text="The user that is blocked from getting the AoC Completionist Role" diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py index 53bcb546..c5568129 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py @@ -47,8 +47,8 @@ class AocCompletionistBlockViewSet( #### Request body >>> { - ... 'user': int, - ... 'is_blocked': bool + ... 'user': int, + ... 'is_blocked': bool ... } #### Status codes diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py index b5b5420e..263b548d 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_link.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py @@ -47,8 +47,8 @@ class AocAccountLinkViewSet( #### Request body >>> { - ... 'user': int, - ... 'aoc_username': str + ... 'user': int, + ... 'aoc_username': str ... } #### Status codes -- cgit v1.2.3 From 955122d028b81529fffbf73f9298d0f06cb2e412 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Wed, 9 Mar 2022 04:02:29 +0200 Subject: Change ping fields names --- pydis_site/apps/api/migrations/0079_new_filter_schema.py | 16 ++++++++-------- pydis_site/apps/api/models/bot/filters.py | 8 ++++---- pydis_site/apps/api/serializers.py | 6 +++--- 3 files changed, 15 insertions(+), 15 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0079_new_filter_schema.py b/pydis_site/apps/api/migrations/0079_new_filter_schema.py index 89f70799..b67740d2 100644 --- a/pydis_site/apps/api/migrations/0079_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0079_new_filter_schema.py @@ -37,9 +37,9 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: list_ = filter_list.objects.create( name=change_map[name], list_type=int(type_), - ping_type=(["Moderators"] if name != "FILE_FORMAT" else []), + guild_pings=(["Moderators"] if name != "FILE_FORMAT" else []), filter_dm=True, - dm_ping_type=[], + dm_pings=[], delete_messages=(True if name != "FILTER_TOKEN" else False), bypass_roles=["Helpers"], enabled=True, @@ -60,9 +60,9 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: filter_list=list_, description=object_.comment, additional_field=None, - ping_type=None, + guild_pings=None, filter_dm=None, - dm_ping_type=None, + dm_pings=None, delete_messages=None, bypass_roles=None, enabled=None, @@ -97,9 +97,9 @@ class Migration(migrations.Migration): ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), ('additional_field', django.contrib.postgres.fields.jsonb.JSONField(help_text='Implementation specific field.', null=True)), - ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), - ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field], null=True)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), @@ -120,9 +120,9 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), ('list_type', models.IntegerField(choices=[(1, 'Allow'), (0, 'Deny')], help_text='Whether this list is an allowlist or denylist')), - ('ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.')), - ('dm_ping_type', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field])), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 97af21f8..4dbf1875 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -84,14 +84,14 @@ class FilterList(FilterSettingsMixin): choices=FilterListType.choices, help_text="Whether this list is an allowlist or denylist" ) - ping_type = ArrayField( + guild_pings = ArrayField( models.CharField(max_length=20), validators=(validate_ping_field,), help_text="Who to ping when this filter triggers.", null=False ) filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=False) - dm_ping_type = ArrayField( + dm_pings = ArrayField( models.CharField(max_length=20), validators=(validate_ping_field,), help_text="Who to ping when this filter triggers on a DM.", @@ -147,14 +147,14 @@ class Filter(FilterSettingsMixin): FilterList, models.CASCADE, related_name="filters", help_text="The filter list containing this filter." ) - ping_type = ArrayField( + guild_pings = ArrayField( models.CharField(max_length=20), validators=(validate_ping_field,), help_text="Who to ping when this filter triggers.", null=True ) filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=True) - dm_ping_type = ArrayField( + dm_pings = ArrayField( models.CharField(max_length=20), validators=(validate_ping_field,), help_text="Who to ping when this filter triggers on a DM.", diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 99f2b630..5a637976 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -121,9 +121,9 @@ ALWAYS_OPTIONAL_SETTINGS = ( ) REQUIRED_FOR_FILTER_LIST_SETTINGS = ( - 'ping_type', + 'guild_pings', 'filter_dm', - 'dm_ping_type', + 'dm_pings', 'delete_messages', 'bypass_roles', 'enabled', @@ -154,7 +154,7 @@ CHANNEL_SCOPE_FIELDS = ( "disabled_categories", "enabled_channels", ) -MENTIONS_FIELDS = ("ping_type", "dm_ping_type") +MENTIONS_FIELDS = ("guild_pings", "dm_pings") SETTINGS_FIELDS = ALWAYS_OPTIONAL_SETTINGS + REQUIRED_FOR_FILTER_LIST_SETTINGS -- cgit v1.2.3 From b93dce5abcf225579b9407358f938ca3932e67a2 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Wed, 9 Mar 2022 19:37:30 +0100 Subject: Add reason field to AoC completionist block table --- pydis_site/apps/api/migrations/0080_add_aoc_tables.py | 1 + pydis_site/apps/api/models/bot/aoc_completionist_block.py | 4 ++++ pydis_site/apps/api/serializers.py | 2 +- pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py | 7 +++++-- 4 files changed, 11 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0080_add_aoc_tables.py b/pydis_site/apps/api/migrations/0080_add_aoc_tables.py index c58a5d29..917c5b7f 100644 --- a/pydis_site/apps/api/migrations/0080_add_aoc_tables.py +++ b/pydis_site/apps/api/migrations/0080_add_aoc_tables.py @@ -17,6 +17,7 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('is_blocked', models.BooleanField(default=True, help_text='Whether this user is actively being blocked from getting the AoC Completionist Role', verbose_name='Blocked')), + ('reason', models.TextField(help_text='The reason for the AoC Completionist Role Block.', null=True)), ('user', models.OneToOneField(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, to='api.user')), ], bases=(pydis_site.apps.api.models.mixins.ModelReprMixin, models.Model), diff --git a/pydis_site/apps/api/models/bot/aoc_completionist_block.py b/pydis_site/apps/api/models/bot/aoc_completionist_block.py index a89f9760..6605cbc4 100644 --- a/pydis_site/apps/api/models/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/models/bot/aoc_completionist_block.py @@ -19,3 +19,7 @@ class AocCompletionistBlock(ModelReprMixin, models.Model): "from getting the AoC Completionist Role", verbose_name="Blocked" ) + reason = models.TextField( + null=True, + help_text="The reason for the AoC Completionist Role Block." + ) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 0b0e4237..c97f7dba 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -259,7 +259,7 @@ class AocCompletionistBlockSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = AocCompletionistBlock - fields = ("user", "is_blocked") + fields = ("user", "is_blocked", "reason") class AocAccountLinkSerializer(ModelSerializer): diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py index c5568129..8e7d821c 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py @@ -24,6 +24,7 @@ class AocCompletionistBlockViewSet( ... { ... "user": 2, ... "is_blocked": False + ... "reason": "Too good to be true" ... } ... ] @@ -36,6 +37,7 @@ class AocCompletionistBlockViewSet( ... { ... "user": 2, ... "is_blocked": False + ... "reason": "Too good to be true" ... } #### Status codes @@ -47,8 +49,9 @@ class AocCompletionistBlockViewSet( #### Request body >>> { - ... 'user': int, - ... 'is_blocked': bool + ... "user": int, + ... "is_blocked": bool + ... "reason": string ... } #### Status codes -- cgit v1.2.3 From d18d0198f2a43066b7f6cb9542a25adea6e6b3f4 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Wed, 9 Mar 2022 23:07:38 +0100 Subject: Patch AoC tables to use the Discord user as PK. --- pydis_site/apps/api/migrations/0080_add_aoc_tables.py | 16 +++++++--------- .../apps/api/models/bot/aoc_completionist_block.py | 3 ++- pydis_site/apps/api/models/bot/aoc_link.py | 3 ++- pydis_site/apps/api/viewsets/bot/aoc_link.py | 8 +++++--- 4 files changed, 16 insertions(+), 14 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0080_add_aoc_tables.py b/pydis_site/apps/api/migrations/0080_add_aoc_tables.py index 917c5b7f..2c0c689a 100644 --- a/pydis_site/apps/api/migrations/0080_add_aoc_tables.py +++ b/pydis_site/apps/api/migrations/0080_add_aoc_tables.py @@ -13,21 +13,19 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='AocCompletionistBlock', + name='AocAccountLink', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('is_blocked', models.BooleanField(default=True, help_text='Whether this user is actively being blocked from getting the AoC Completionist Role', verbose_name='Blocked')), - ('reason', models.TextField(help_text='The reason for the AoC Completionist Role Block.', null=True)), - ('user', models.OneToOneField(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, to='api.user')), + ('user', models.OneToOneField(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='api.user')), + ('aoc_username', models.CharField(help_text='The AoC username associated with the Discord User.', max_length=120)), ], bases=(pydis_site.apps.api.models.mixins.ModelReprMixin, models.Model), ), migrations.CreateModel( - name='AocAccountLink', + name='AocCompletionistBlock', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('aoc_username', models.CharField(help_text='The AoC username associated with the Discord User.', max_length=120)), - ('user', models.OneToOneField(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, to='api.user')), + ('user', models.OneToOneField(help_text='The user that is blocked from getting the AoC Completionist Role', on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='api.user')), + ('is_blocked', models.BooleanField(default=True, help_text='Whether this user is actively being blocked from getting the AoC Completionist Role', verbose_name='Blocked')), + ('reason', models.TextField(help_text='The reason for the AoC Completionist Role Block.', null=True)), ], bases=(pydis_site.apps.api.models.mixins.ModelReprMixin, models.Model), ), diff --git a/pydis_site/apps/api/models/bot/aoc_completionist_block.py b/pydis_site/apps/api/models/bot/aoc_completionist_block.py index 6605cbc4..acbc0eba 100644 --- a/pydis_site/apps/api/models/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/models/bot/aoc_completionist_block.py @@ -10,7 +10,8 @@ class AocCompletionistBlock(ModelReprMixin, models.Model): user = models.OneToOneField( User, on_delete=models.CASCADE, - help_text="The user that is blocked from getting the AoC Completionist Role" + help_text="The user that is blocked from getting the AoC Completionist Role", + primary_key=True ) is_blocked = models.BooleanField( diff --git a/pydis_site/apps/api/models/bot/aoc_link.py b/pydis_site/apps/api/models/bot/aoc_link.py index 9b47456d..4e9d4882 100644 --- a/pydis_site/apps/api/models/bot/aoc_link.py +++ b/pydis_site/apps/api/models/bot/aoc_link.py @@ -10,7 +10,8 @@ class AocAccountLink(ModelReprMixin, models.Model): user = models.OneToOneField( User, on_delete=models.CASCADE, - help_text="The user that is blocked from getting the AoC Completionist Role" + help_text="The user that is blocked from getting the AoC Completionist Role", + primary_key=True ) aoc_username = models.CharField( diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py index 263b548d..c3fa6854 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_link.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py @@ -24,7 +24,8 @@ class AocAccountLinkViewSet( ... { ... "user": 2, ... "aoc_username": "AoCUser1" - ... } + ... }, + ... ... ... ] @@ -32,11 +33,12 @@ class AocAccountLinkViewSet( Retrieve a AoC account link by User ID #### Response format - >>> + >>> [ ... { ... "user": 2, ... "aoc_username": "AoCUser1" - ... } + ... }, + ... ] #### Status codes - 200: returned on success -- cgit v1.2.3 From 8c95f13c96d16d6f4d0736ee136563d603926c63 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Thu, 10 Mar 2022 17:28:39 +0100 Subject: Enhance code, documentation consistency in AoC related code Co-authored-by: Mark <1515135+MarkKoz@users.noreply.github.com> --- pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py | 7 ++++--- pydis_site/apps/api/viewsets/bot/aoc_link.py | 1 + 2 files changed, 5 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py index 8e7d821c..d3167d7b 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py @@ -23,7 +23,7 @@ class AocCompletionistBlockViewSet( >>> [ ... { ... "user": 2, - ... "is_blocked": False + ... "is_blocked": False, ... "reason": "Too good to be true" ... } ... ] @@ -36,7 +36,7 @@ class AocCompletionistBlockViewSet( >>> ... { ... "user": 2, - ... "is_blocked": False + ... "is_blocked": False, ... "reason": "Too good to be true" ... } @@ -50,7 +50,7 @@ class AocCompletionistBlockViewSet( #### Request body >>> { ... "user": int, - ... "is_blocked": bool + ... "is_blocked": bool, ... "reason": string ... } @@ -60,6 +60,7 @@ class AocCompletionistBlockViewSet( ### DELETE /bot/aoc-completionist-blocks/ Deletes the AoC Completionist block item with the given `user__id`. + #### Status codes - 204: returned on success - 404: if the AoC Completionist block with the given user__id does not exist diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py index c3fa6854..5f6f3a84 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_link.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py @@ -59,6 +59,7 @@ class AocAccountLinkViewSet( ### DELETE /bot/aoc-account-links/ Deletes the AoC account link item with the given `user__id`. + #### Status codes - 204: returned on success - 404: if the AoC account link with the given user__id does not exist -- cgit v1.2.3 From 05e2bce1e82e422755396d1e6e489d6792ec0115 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Thu, 10 Mar 2022 20:56:47 +0200 Subject: Remove role validation Roles can be either IDs or names, so the current validation is not relevant anymore. Furthermore the ping fields can accept user IDs or names. --- .../apps/api/migrations/0079_new_filter_schema.py | 12 ++++---- pydis_site/apps/api/models/bot/filters.py | 32 ---------------------- pydis_site/apps/api/tests/test_filters.py | 11 -------- 3 files changed, 6 insertions(+), 49 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0079_new_filter_schema.py b/pydis_site/apps/api/migrations/0079_new_filter_schema.py index b67740d2..053f9782 100644 --- a/pydis_site/apps/api/migrations/0079_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0079_new_filter_schema.py @@ -97,11 +97,11 @@ class Migration(migrations.Migration): ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), ('additional_field', django.contrib.postgres.fields.jsonb.JSONField(help_text='Implementation specific field.', null=True)), - ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), - ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field], null=True)), + ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, null=True)), ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), - ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field], null=True)), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, null=True)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), @@ -120,11 +120,11 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), ('list_type', models.IntegerField(choices=[(1, 'Allow'), (0, 'Deny')], help_text='Whether this list is an allowlist or denylist')), - ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.')), - ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_ping_field])), + ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None)), ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), - ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, validators=[pydis_site.apps.api.models.bot.filters.validate_bypass_roles_field])), + ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 4dbf1875..13b332d2 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -16,32 +16,6 @@ class FilterListType(models.IntegerChoices): DENY = 0 -# Valid special values in ping related fields -VALID_PINGS = ("everyone", "here", "moderators", "onduty", "admins") -VALID_BYPASS_ROLES = ("staff",) - - -def validate_ping_field(value_list: List[str]) -> None: - """Validate that the values are either a special value or a UID.""" - for value in value_list: - # Check if it is a special value - if value in VALID_PINGS: - continue - # Check if it is a UID - if value.isnumeric(): - continue - - raise ValidationError(f"{value!r} isn't a valid ping type.") - - -def validate_bypass_roles_field(value_list: List[str]) -> None: - """Validate that the vclues are either a special value or a Role ID.""" - for value in value_list: - if value.isnumeric() or value in VALID_BYPASS_ROLES: - continue - raise ValidationError(f"{value!r} isn't a valid (bypass) role.") - - class FilterSettingsMixin(models.Model): """Mixin for common settings of a filters and filter lists.""" @@ -86,14 +60,12 @@ class FilterList(FilterSettingsMixin): ) guild_pings = ArrayField( models.CharField(max_length=20), - validators=(validate_ping_field,), help_text="Who to ping when this filter triggers.", null=False ) filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=False) dm_pings = ArrayField( models.CharField(max_length=20), - validators=(validate_ping_field,), help_text="Who to ping when this filter triggers on a DM.", null=False ) @@ -104,7 +76,6 @@ class FilterList(FilterSettingsMixin): bypass_roles = ArrayField( models.CharField(max_length=100), help_text="Roles and users who can bypass this filter.", - validators=(validate_bypass_roles_field,), null=False ) enabled = models.BooleanField( @@ -149,14 +120,12 @@ class Filter(FilterSettingsMixin): ) guild_pings = ArrayField( models.CharField(max_length=20), - validators=(validate_ping_field,), help_text="Who to ping when this filter triggers.", null=True ) filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=True) dm_pings = ArrayField( models.CharField(max_length=20), - validators=(validate_ping_field,), help_text="Who to ping when this filter triggers on a DM.", null=True ) @@ -167,7 +136,6 @@ class Filter(FilterSettingsMixin): bypass_roles = ArrayField( models.CharField(max_length=100), help_text="Roles and users who can bypass this filter.", - validators=(validate_bypass_roles_field,), null=True ) enabled = models.BooleanField( diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index f694053d..5f40c6f9 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -296,14 +296,3 @@ class GenericFilterTest(APISubdomainTestCase): response = self.client.delete(f"{sequence.url()}/42") self.assertEqual(response.status_code, 404) - - def test_reject_invalid_ping(self) -> None: - url = reverse('bot:filteroverride-list', host='api') - data = { - "ping_type": ["invalid"] - } - - response = self.client.post(url, data=data) - - self.assertEqual(response.status_code, 400) - self.assertDictEqual(response.json(), {'ping_type': ["'invalid' isn't a valid ping type."]}) -- cgit v1.2.3 From 083cdf3f49805fd3c6d01fe538b7e01e12ca9b79 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Thu, 10 Mar 2022 19:18:05 +0100 Subject: Add new filter field, and patch the docs in AoC viewsets - Add the possibility to filter by `is_blocked` in the AoC completionist block viewset. - Patch various tense, and formatting inconsistencies in AoC viewsets --- .../apps/api/viewsets/bot/aoc_completionist_block.py | 6 +++--- pydis_site/apps/api/viewsets/bot/aoc_link.py | 19 +++++++++---------- 2 files changed, 12 insertions(+), 13 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py index d3167d7b..3a4cec60 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py @@ -42,7 +42,7 @@ class AocCompletionistBlockViewSet( #### Status codes - 200: returned on success - - 404: returned if an AoC completionist block with the given user__id was not found. + - 404: returned if an AoC completionist block with the given `user__id` was not found. ### POST /bot/aoc-completionist-blocks Adds a single AoC completionist block @@ -63,11 +63,11 @@ class AocCompletionistBlockViewSet( #### Status codes - 204: returned on success - - 404: if the AoC Completionist block with the given user__id does not exist + - 404: returned if the AoC Completionist block with the given `user__id` was not found """ serializer_class = AocCompletionistBlockSerializer queryset = AocCompletionistBlock.objects.all() filter_backends = (DjangoFilterBackend,) - filter_fields = ("user__id",) + filter_fields = ("user__id", "is_blocked") diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py index 5f6f3a84..9f22c1a1 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_link.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py @@ -29,20 +29,19 @@ class AocAccountLinkViewSet( ... ] - ### GET /bot/aoc-account-links + ### GET /bot/aoc-account-links/ Retrieve a AoC account link by User ID #### Response format - >>> [ - ... { - ... "user": 2, - ... "aoc_username": "AoCUser1" - ... }, - ... ] + >>> + ... { + ... "user": 2, + ... "aoc_username": "AoCUser1" + ... } #### Status codes - 200: returned on success - - 404: returned if an AoC account link with the given user__id was not found. + - 404: returned if an AoC account link with the given `user__id` was not found. ### POST /bot/aoc-account-links Adds a single AoC account link block @@ -55,14 +54,14 @@ class AocAccountLinkViewSet( #### Status codes - 204: returned on success - - 400: if one of the given fields is invalid + - 400: if one of the given fields was invalid ### DELETE /bot/aoc-account-links/ Deletes the AoC account link item with the given `user__id`. #### Status codes - 204: returned on success - - 404: if the AoC account link with the given user__id does not exist + - 404: returned if the AoC account link with the given `user__id` was not found """ -- cgit v1.2.3 From c7300a92c885b01a5663913fa73679fc680bfb74 Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Sat, 12 Mar 2022 16:48:54 +0100 Subject: Sync Filter models with relating migrations, adjust code consistency --- .../apps/api/migrations/0079_new_filter_schema.py | 2 +- pydis_site/apps/api/models/bot/filters.py | 57 ++++++++++++++-------- 2 files changed, 39 insertions(+), 20 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0079_new_filter_schema.py b/pydis_site/apps/api/migrations/0079_new_filter_schema.py index 053f9782..bd807f02 100644 --- a/pydis_site/apps/api/migrations/0079_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0079_new_filter_schema.py @@ -129,7 +129,7 @@ class Migration(migrations.Migration): ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), - ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000)), + ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", size=None)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 13b332d2..f8bbfd14 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -1,7 +1,4 @@ -from typing import List - from django.contrib.postgres.fields import ArrayField, JSONField -from django.core.exceptions import ValidationError from django.db import models from django.db.models import UniqueConstraint @@ -37,7 +34,8 @@ class FilterSettingsMixin(models.Model): ) infraction_reason = models.CharField( max_length=1000, - help_text="The reason to give for the infraction." + help_text="The reason to give for the infraction.", + null=True ) infraction_duration = models.DurationField( null=True, @@ -59,13 +57,13 @@ class FilterList(FilterSettingsMixin): help_text="Whether this list is an allowlist or denylist" ) guild_pings = ArrayField( - models.CharField(max_length=20), + models.CharField(max_length=100), help_text="Who to ping when this filter triggers.", null=False ) filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=False) dm_pings = ArrayField( - models.CharField(max_length=20), + models.CharField(max_length=100), help_text="Who to ping when this filter triggers on a DM.", null=False ) @@ -83,9 +81,7 @@ class FilterList(FilterSettingsMixin): null=False ) send_alert = models.BooleanField( - help_text="Whether alert should be sent.", - null=False, - default=True + help_text="Whether an alert should be sent.", ) # Where a filter should apply. # @@ -93,9 +89,18 @@ class FilterList(FilterSettingsMixin): # - enabled_channels # - disabled_categories # - disabled_channels - enabled_channels = ArrayField(models.IntegerField()) - disabled_channels = ArrayField(models.IntegerField()) - disabled_categories = ArrayField(models.IntegerField()) + enabled_channels = ArrayField( + models.CharField(max_length=100), + help_text="Channels in which to run the filter even if it's disabled in the category." + ) + disabled_channels = ArrayField( + models.CharField(max_length=100), + help_text="Channels in which to not run the filter." + ) + disabled_categories = ArrayField( + models.CharField(max_length=100), + help_text="Categories in which to not run the filter." + ) class Meta: """Constrain name and list_type unique.""" @@ -112,20 +117,23 @@ class Filter(FilterSettingsMixin): """One specific trigger of a list.""" content = models.CharField(max_length=100, help_text="The definition of this filter.") - description = models.CharField(max_length=200, help_text="Why this filter has been added.") + description = models.CharField( + max_length=200, + help_text="Why this filter has been added.", null=True + ) additional_field = JSONField(null=True, help_text="Implementation specific field.") filter_list = models.ForeignKey( FilterList, models.CASCADE, related_name="filters", help_text="The filter list containing this filter." ) guild_pings = ArrayField( - models.CharField(max_length=20), + models.CharField(max_length=100), help_text="Who to ping when this filter triggers.", null=True ) filter_dm = models.BooleanField(help_text="Whether DMs should be filtered.", null=True) dm_pings = ArrayField( - models.CharField(max_length=20), + models.CharField(max_length=100), help_text="Who to ping when this filter triggers on a DM.", null=True ) @@ -143,14 +151,25 @@ class Filter(FilterSettingsMixin): null=True ) send_alert = models.BooleanField( - help_text="Whether alert should be sent.", + help_text="Whether an alert should be sent.", null=True ) # Check FilterList model for information about these properties. - enabled_channels = ArrayField(models.IntegerField(), null=True) - disabled_channels = ArrayField(models.IntegerField(), null=True) - disabled_categories = ArrayField(models.IntegerField(), null=True) + enabled_channels = ArrayField( + models.CharField(max_length=100), + help_text="Channels in which to run the filter even if it's disabled in the category.", + null=True + ) + disabled_channels = ArrayField( + models.CharField(max_length=100), + help_text="Channels in which to not run the filter.", null=True + ) + disabled_categories = ArrayField( + models.CharField(max_length=100), + help_text="Categories in which to not run the filter.", + null=True + ) def __str__(self) -> str: return f"Filter {self.content!r}" -- cgit v1.2.3 From 30b7b4204b7e4b711960c952cccc15f667e2252f Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Sat, 19 Feb 2022 17:57:18 +0000 Subject: Move FilterList imports down so they're sorted --- pydis_site/apps/api/models/__init__.py | 2 +- pydis_site/apps/api/models/bot/__init__.py | 2 +- pydis_site/apps/api/urls.py | 16 ++++++++-------- pydis_site/apps/api/viewsets/__init__.py | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/__init__.py b/pydis_site/apps/api/models/__init__.py index 4f616986..e83473c9 100644 --- a/pydis_site/apps/api/models/__init__.py +++ b/pydis_site/apps/api/models/__init__.py @@ -1,9 +1,9 @@ # flake8: noqa from .bot import ( - FilterList, BotSetting, DocumentationLink, DeletedMessage, + FilterList, Infraction, Message, MessageDeletionContext, diff --git a/pydis_site/apps/api/models/bot/__init__.py b/pydis_site/apps/api/models/bot/__init__.py index ec0e701c..64676fdb 100644 --- a/pydis_site/apps/api/models/bot/__init__.py +++ b/pydis_site/apps/api/models/bot/__init__.py @@ -1,8 +1,8 @@ # flake8: noqa -from .filter_list import FilterList from .bot_setting import BotSetting from .deleted_message import DeletedMessage from .documentation_link import DocumentationLink +from .filter_list import FilterList from .infraction import Infraction from .message import Message from .aoc_completionist_block import AocCompletionistBlock diff --git a/pydis_site/apps/api/urls.py b/pydis_site/apps/api/urls.py index 7c55fc92..6b881fac 100644 --- a/pydis_site/apps/api/urls.py +++ b/pydis_site/apps/api/urls.py @@ -21,8 +21,12 @@ from .viewsets import ( # https://www.django-rest-framework.org/api-guide/routers/#defaultrouter bot_router = DefaultRouter(trailing_slash=False) bot_router.register( - 'filter-lists', - FilterListViewSet + "aoc-account-links", + AocAccountLinkViewSet +) +bot_router.register( + "aoc-completionist-blocks", + AocCompletionistBlockViewSet ) bot_router.register( 'bot-settings', @@ -37,12 +41,8 @@ bot_router.register( DocumentationLinkViewSet ) bot_router.register( - "aoc-account-links", - AocAccountLinkViewSet -) -bot_router.register( - "aoc-completionist-blocks", - AocCompletionistBlockViewSet + 'filter-lists', + FilterListViewSet ) bot_router.register( 'infractions', diff --git a/pydis_site/apps/api/viewsets/__init__.py b/pydis_site/apps/api/viewsets/__init__.py index 5fc1d64f..a62a9c01 100644 --- a/pydis_site/apps/api/viewsets/__init__.py +++ b/pydis_site/apps/api/viewsets/__init__.py @@ -1,9 +1,9 @@ # flake8: noqa from .bot import ( - FilterListViewSet, BotSettingViewSet, DeletedMessageViewSet, DocumentationLinkViewSet, + FilterListViewSet, InfractionViewSet, NominationViewSet, OffensiveMessageViewSet, -- cgit v1.2.3 From 0aed5f7913e7ce268ddb56127f84a5386ede5739 Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Sat, 19 Feb 2022 17:59:26 +0000 Subject: Add support for BumpedThreads to be stored in site Following our move to use Redis as just a cache, this PR allows the site to store a list of threads that need to be bumped. The bot will interact with this within the ThreadBumper cog. --- .../apps/api/migrations/0081_bumpedthread.py | 22 ++++++++ pydis_site/apps/api/models/__init__.py | 1 + pydis_site/apps/api/models/bot/__init__.py | 1 + pydis_site/apps/api/models/bot/bumped_thread.py | 22 ++++++++ pydis_site/apps/api/serializers.py | 11 ++++ pydis_site/apps/api/urls.py | 5 ++ pydis_site/apps/api/viewsets/__init__.py | 1 + pydis_site/apps/api/viewsets/bot/__init__.py | 1 + pydis_site/apps/api/viewsets/bot/bumped_thread.py | 65 ++++++++++++++++++++++ 9 files changed, 129 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0081_bumpedthread.py create mode 100644 pydis_site/apps/api/models/bot/bumped_thread.py create mode 100644 pydis_site/apps/api/viewsets/bot/bumped_thread.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0081_bumpedthread.py b/pydis_site/apps/api/migrations/0081_bumpedthread.py new file mode 100644 index 00000000..03e66cc1 --- /dev/null +++ b/pydis_site/apps/api/migrations/0081_bumpedthread.py @@ -0,0 +1,22 @@ +# Generated by Django 3.1.14 on 2022-02-19 16:26 + +import django.core.validators +from django.db import migrations, models +import pydis_site.apps.api.models.mixins + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0080_add_aoc_tables'), + ] + + operations = [ + migrations.CreateModel( + name='BumpedThread', + fields=[ + ('thread_id', models.BigIntegerField(help_text='The thread ID that should be bumped.', primary_key=True, serialize=False, validators=[django.core.validators.MinValueValidator(limit_value=0, message='Thread IDs cannot be negative.')], verbose_name='Thread ID')), + ], + bases=(pydis_site.apps.api.models.mixins.ModelReprMixin, models.Model), + ), + ] diff --git a/pydis_site/apps/api/models/__init__.py b/pydis_site/apps/api/models/__init__.py index e83473c9..a197e988 100644 --- a/pydis_site/apps/api/models/__init__.py +++ b/pydis_site/apps/api/models/__init__.py @@ -1,6 +1,7 @@ # flake8: noqa from .bot import ( BotSetting, + BumpedThread, DocumentationLink, DeletedMessage, FilterList, diff --git a/pydis_site/apps/api/models/bot/__init__.py b/pydis_site/apps/api/models/bot/__init__.py index 64676fdb..013bb85e 100644 --- a/pydis_site/apps/api/models/bot/__init__.py +++ b/pydis_site/apps/api/models/bot/__init__.py @@ -1,5 +1,6 @@ # flake8: noqa from .bot_setting import BotSetting +from .bumped_thread import BumpedThread from .deleted_message import DeletedMessage from .documentation_link import DocumentationLink from .filter_list import FilterList diff --git a/pydis_site/apps/api/models/bot/bumped_thread.py b/pydis_site/apps/api/models/bot/bumped_thread.py new file mode 100644 index 00000000..cdf9a950 --- /dev/null +++ b/pydis_site/apps/api/models/bot/bumped_thread.py @@ -0,0 +1,22 @@ +from django.core.validators import MinValueValidator +from django.db import models + +from pydis_site.apps.api.models.mixins import ModelReprMixin + + +class BumpedThread(ModelReprMixin, models.Model): + """A list of thread IDs to be bumped.""" + + thread_id = models.BigIntegerField( + primary_key=True, + help_text=( + "The thread ID that should be bumped." + ), + validators=( + MinValueValidator( + limit_value=0, + message="Thread IDs cannot be negative." + ), + ), + verbose_name="Thread ID", + ) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index c97f7dba..b9e06081 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -16,6 +16,7 @@ from .models import ( AocAccountLink, AocCompletionistBlock, BotSetting, + BumpedThread, DeletedMessage, DocumentationLink, FilterList, @@ -41,6 +42,16 @@ class BotSettingSerializer(ModelSerializer): fields = ('name', 'data') +class BumpedThreadSerializer(ModelSerializer): + """A class providing (de-)serialization of `BumpedThread` instances.""" + + class Meta: + """Metadata defined for the Django REST Framework.""" + + model = BumpedThread + fields = ('thread_id',) + + class DeletedMessageSerializer(ModelSerializer): """ A class providing (de-)serialization of `DeletedMessage` instances. diff --git a/pydis_site/apps/api/urls.py b/pydis_site/apps/api/urls.py index 6b881fac..1e564b29 100644 --- a/pydis_site/apps/api/urls.py +++ b/pydis_site/apps/api/urls.py @@ -6,6 +6,7 @@ from .viewsets import ( AocAccountLinkViewSet, AocCompletionistBlockViewSet, BotSettingViewSet, + BumpedThreadViewSet, DeletedMessageViewSet, DocumentationLinkViewSet, FilterListViewSet, @@ -32,6 +33,10 @@ bot_router.register( 'bot-settings', BotSettingViewSet ) +bot_router.register( + 'bumped-threads', + BumpedThreadViewSet +) bot_router.register( 'deleted-messages', DeletedMessageViewSet diff --git a/pydis_site/apps/api/viewsets/__init__.py b/pydis_site/apps/api/viewsets/__init__.py index a62a9c01..ec52416a 100644 --- a/pydis_site/apps/api/viewsets/__init__.py +++ b/pydis_site/apps/api/viewsets/__init__.py @@ -1,6 +1,7 @@ # flake8: noqa from .bot import ( BotSettingViewSet, + BumpedThreadViewSet, DeletedMessageViewSet, DocumentationLinkViewSet, FilterListViewSet, diff --git a/pydis_site/apps/api/viewsets/bot/__init__.py b/pydis_site/apps/api/viewsets/bot/__init__.py index f1d84729..262aa59f 100644 --- a/pydis_site/apps/api/viewsets/bot/__init__.py +++ b/pydis_site/apps/api/viewsets/bot/__init__.py @@ -1,6 +1,7 @@ # flake8: noqa from .filter_list import FilterListViewSet from .bot_setting import BotSettingViewSet +from .bumped_thread import BumpedThreadViewSet from .deleted_message import DeletedMessageViewSet from .documentation_link import DocumentationLinkViewSet from .infraction import InfractionViewSet diff --git a/pydis_site/apps/api/viewsets/bot/bumped_thread.py b/pydis_site/apps/api/viewsets/bot/bumped_thread.py new file mode 100644 index 00000000..6594ac6e --- /dev/null +++ b/pydis_site/apps/api/viewsets/bot/bumped_thread.py @@ -0,0 +1,65 @@ +from rest_framework.mixins import ( + CreateModelMixin, DestroyModelMixin, ListModelMixin, RetrieveModelMixin +) +from rest_framework.viewsets import GenericViewSet + +from pydis_site.apps.api.models.bot import BumpedThread +from pydis_site.apps.api.serializers import BumpedThreadSerializer + + +class BumpedThreadViewSet( + GenericViewSet, CreateModelMixin, DestroyModelMixin, RetrieveModelMixin, ListModelMixin +): + """ + View providing CRUD (Minus the U) operations on threads to be bumped. + + ## Routes + ### GET /bot/bumped-threads + Returns all BumpedThread items in the database. + + #### Response format + >>> [ + ... { + ... 'thread_id': "941705627405811793", + ... }, + ... ... + ... ] + + #### Status codes + - 200: returned on success + - 401: returned if unauthenticated + + ### GET /bot/bumped-threads/ + Returns a specific BumpedThread item from the database. + + #### Response format + >>> { + ... 'thread_id': "941705627405811793", + ... } + + #### Status codes + - 200: returned on success + - 404: returned if a BumpedThread with the given thread_id was not found. + + ### POST /bot/bumped-threads + Adds a single BumpedThread item to the database. + + #### Request body + >>> { + ... 'thread_id': int, + ... } + + #### Status codes + - 201: returned on success + - 400: if one of the given fields is invalid + + ### DELETE /bot/bumped-threads/ + Deletes the BumpedThread item with the given `thread_id`. + + #### Status codes + - 204: returned on success + - 404: if a BumpedThread with the given `thread_id` does not exist + """ + + serializer_class = BumpedThreadSerializer + queryset = BumpedThread.objects.all() -- cgit v1.2.3 From 3e9557056c06a39c077b76d718eb35b99a365711 Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Wed, 16 Mar 2022 21:46:46 +0000 Subject: Only return list of ints when retrieving all BumpedThreads --- pydis_site/apps/api/serializers.py | 17 +++++++++++++++++ pydis_site/apps/api/viewsets/bot/bumped_thread.py | 7 +------ 2 files changed, 18 insertions(+), 6 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index b9e06081..dfdda915 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -42,12 +42,29 @@ class BotSettingSerializer(ModelSerializer): fields = ('name', 'data') +class ListBumpedThreadSerializer(ListSerializer): + """Custom ListSerializer to override to_representation() when list views are triggered.""" + + def to_representation(self, objects: list[BumpedThread]) -> int: + """ + Used by the `ListModelMixin` to return just the list of bumped thread ids. + + We want to only return the thread_id attribute, hence it is unnecessary + to create a nested dictionary. + + Additionally, this allows bumped thread routes to simply return an + array of thread_id ints instead of objects, saving on bandwidth. + """ + return [obj.thread_id for obj in objects] + + class BumpedThreadSerializer(ModelSerializer): """A class providing (de-)serialization of `BumpedThread` instances.""" class Meta: """Metadata defined for the Django REST Framework.""" + list_serializer_class = ListBumpedThreadSerializer model = BumpedThread fields = ('thread_id',) diff --git a/pydis_site/apps/api/viewsets/bot/bumped_thread.py b/pydis_site/apps/api/viewsets/bot/bumped_thread.py index 6594ac6e..0972379b 100644 --- a/pydis_site/apps/api/viewsets/bot/bumped_thread.py +++ b/pydis_site/apps/api/viewsets/bot/bumped_thread.py @@ -18,12 +18,7 @@ class BumpedThreadViewSet( Returns all BumpedThread items in the database. #### Response format - >>> [ - ... { - ... 'thread_id': "941705627405811793", - ... }, - ... ... - ... ] + >>> list[int] #### Status codes - 200: returned on success -- cgit v1.2.3 From a0180619e77d884aeedb1a89748c0115c8ec8c56 Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Wed, 16 Mar 2022 21:47:57 +0000 Subject: Don't return the BumpedThread object when retrieving single We only need to check for existence, so sending the full object isn't needed. --- pydis_site/apps/api/viewsets/bot/bumped_thread.py | 24 ++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/viewsets/bot/bumped_thread.py b/pydis_site/apps/api/viewsets/bot/bumped_thread.py index 0972379b..9d77bb6b 100644 --- a/pydis_site/apps/api/viewsets/bot/bumped_thread.py +++ b/pydis_site/apps/api/viewsets/bot/bumped_thread.py @@ -1,6 +1,8 @@ from rest_framework.mixins import ( - CreateModelMixin, DestroyModelMixin, ListModelMixin, RetrieveModelMixin + CreateModelMixin, DestroyModelMixin, ListModelMixin ) +from rest_framework.request import Request +from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet from pydis_site.apps.api.models.bot import BumpedThread @@ -8,7 +10,7 @@ from pydis_site.apps.api.serializers import BumpedThreadSerializer class BumpedThreadViewSet( - GenericViewSet, CreateModelMixin, DestroyModelMixin, RetrieveModelMixin, ListModelMixin + GenericViewSet, CreateModelMixin, DestroyModelMixin, ListModelMixin ): """ View providing CRUD (Minus the U) operations on threads to be bumped. @@ -25,15 +27,10 @@ class BumpedThreadViewSet( - 401: returned if unauthenticated ### GET /bot/bumped-threads/ - Returns a specific BumpedThread item from the database. - - #### Response format - >>> { - ... 'thread_id': "941705627405811793", - ... } + Returns whether a specific BumpedThread exists in the database. #### Status codes - - 200: returned on success + - 204: returned on success - 404: returned if a BumpedThread with the given thread_id was not found. ### POST /bot/bumped-threads @@ -58,3 +55,12 @@ class BumpedThreadViewSet( serializer_class = BumpedThreadSerializer queryset = BumpedThread.objects.all() + + def retrieve(self, request: Request, *args, **kwargs) -> Response: + """ + DRF method for checking if the given BumpedThread exists. + + Called by the Django Rest Framework in response to the corresponding HTTP request. + """ + self.get_object() + return Response(status=204) -- cgit v1.2.3 From cb52e60a631041a08edb213f41cca8befba4bf7e Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Wed, 16 Mar 2022 23:17:46 +0000 Subject: Add tests for custom BumpedThread impl --- pydis_site/apps/api/tests/test_bumped_threads.py | 63 ++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 pydis_site/apps/api/tests/test_bumped_threads.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_bumped_threads.py b/pydis_site/apps/api/tests/test_bumped_threads.py new file mode 100644 index 00000000..316e3f0b --- /dev/null +++ b/pydis_site/apps/api/tests/test_bumped_threads.py @@ -0,0 +1,63 @@ +from django.urls import reverse + +from .base import AuthenticatedAPITestCase +from ..models import BumpedThread + + +class UnauthedBumpedThreadAPITests(AuthenticatedAPITestCase): + def setUp(self): + super().setUp() + self.client.force_authenticate(user=None) + + def test_detail_lookup_returns_401(self): + url = reverse('api:bot:bumpedthread-detail', args=(1,)) + response = self.client.get(url) + + self.assertEqual(response.status_code, 401) + + def test_list_returns_401(self): + url = reverse('api:bot:bumpedthread-list') + response = self.client.get(url) + + self.assertEqual(response.status_code, 401) + + def test_create_returns_401(self): + url = reverse('api:bot:bumpedthread-list') + response = self.client.post(url, {"thread_id": 3}) + + self.assertEqual(response.status_code, 401) + + def test_delete_returns_401(self): + url = reverse('api:bot:bumpedthread-detail', args=(1,)) + response = self.client.delete(url) + + self.assertEqual(response.status_code, 401) + + +class BumpedThreadAPITests(AuthenticatedAPITestCase): + @classmethod + def setUpTestData(cls): + cls.thread1 = BumpedThread.objects.create( + thread_id=1234, + ) + + def test_returns_bumped_threads_as_flat_list(self): + url = reverse('api:bot:bumpedthread-list') + + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), [1234]) + + def test_returns_204_for_existing_data(self): + url = reverse('api:bot:bumpedthread-detail', args=(1234,)) + + response = self.client.get(url) + self.assertEqual(response.status_code, 204) + self.assertEqual(response.content, b"") + + def test_returns_404_for_non_existing_data(self): + url = reverse('api:bot:bumpedthread-detail', args=(42,)) + + response = self.client.get(url) + self.assertEqual(response.status_code, 404) + self.assertEqual(response.json(), {"detail": "Not found."}) -- cgit v1.2.3 From 429d98a5349b55e63f93e6192d8b2b35262dc60b Mon Sep 17 00:00:00 2001 From: ChrisJL Date: Fri, 18 Mar 2022 15:11:34 +0000 Subject: fixup: don't use "We" in docstring Co-authored-by: Mark <1515135+MarkKoz@users.noreply.github.com> --- pydis_site/apps/api/serializers.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index dfdda915..e53ccffa 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -49,8 +49,7 @@ class ListBumpedThreadSerializer(ListSerializer): """ Used by the `ListModelMixin` to return just the list of bumped thread ids. - We want to only return the thread_id attribute, hence it is unnecessary - to create a nested dictionary. + Only the thread_id field is useful, hence it is unnecessary to create a nested dictionary. Additionally, this allows bumped thread routes to simply return an array of thread_id ints instead of objects, saving on bandwidth. -- cgit v1.2.3 From 3ff628ad44b80f1c5483832f72ee8b63bcbc4fdb Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Tue, 22 Mar 2022 18:55:02 +0100 Subject: Add UniqueConstraint to the Filter model - The UniqueConstraint includes every field, except for id and description. --- .../migrations/0080_unique_constraint_filters.py | 36 ++++++++++++++++++++++ pydis_site/apps/api/models/bot/filters.py | 28 ++++++++++++++++- 2 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 pydis_site/apps/api/migrations/0080_unique_constraint_filters.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0080_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0080_unique_constraint_filters.py new file mode 100644 index 00000000..0b3b4162 --- /dev/null +++ b/pydis_site/apps/api/migrations/0080_unique_constraint_filters.py @@ -0,0 +1,36 @@ +# Generated by Django 3.1.14 on 2022-03-22 16:31 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0079_new_filter_schema'), + ] + + operations = [ + migrations.AddConstraint( + model_name='filter', + constraint=models.UniqueConstraint(fields=( + 'dm_content', + 'dm_embed', + 'infraction_type', + 'infraction_reason', + 'infraction_duration', + 'content', + 'additional_field', + 'filter_list', + 'guild_pings', + 'filter_dm', + 'dm_pings', + 'delete_messages', + 'bypass_roles', + 'enabled', + 'send_alert', + 'enabled_channels', + 'disabled_channels', + 'disabled_categories' + ), name='unique_filters'), + ), + ] diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index f8bbfd14..708ceadc 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -113,7 +113,7 @@ class FilterList(FilterSettingsMixin): return f"Filter {FilterListType(self.list_type).label}list {self.name!r}" -class Filter(FilterSettingsMixin): +class FilterBase(FilterSettingsMixin): """One specific trigger of a list.""" content = models.CharField(max_length=100, help_text="The definition of this filter.") @@ -173,3 +173,29 @@ class Filter(FilterSettingsMixin): def __str__(self) -> str: return f"Filter {self.content!r}" + + class Meta: + """Metaclass for FilterBase to make it abstract model.""" + + abstract = True + + +class Filter(FilterBase): + """ + The main Filter models based on `FilterBase`. + + The purpose to have this model is to have access to the Fields of the Filter model + and set the unique constraint based on those fields. + """ + + class Meta: + """Metaclass Filter to set the unique constraint.""" + + constraints = ( + UniqueConstraint( + fields=tuple( + [field.name for field in FilterBase._meta.fields + if field.name != "id" and field.name != "description"] + ), + name="unique_filters"), + ) -- cgit v1.2.3 From a4eda4a2da65a1a7f903a8c5b1da37c366baee0b Mon Sep 17 00:00:00 2001 From: D0rs4n <41237606+D0rs4n@users.noreply.github.com> Date: Wed, 13 Apr 2022 23:13:02 +0200 Subject: Adjust filtering settings for the AoC link viewset --- pydis_site/apps/api/viewsets/bot/aoc_link.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py index 9f22c1a1..c7a96629 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_link.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py @@ -68,4 +68,4 @@ class AocAccountLinkViewSet( serializer_class = AocAccountLinkSerializer queryset = AocAccountLink.objects.all() filter_backends = (DjangoFilterBackend,) - filter_fields = ("user__id",) + filter_fields = ("user__id", "aoc_username") -- cgit v1.2.3 From ebad433d4fa299085fd3bbba4a349de3088fb5d4 Mon Sep 17 00:00:00 2001 From: mina Date: Thu, 21 Apr 2022 17:51:57 -0400 Subject: Add support for BIG SOLIDUS unicode characters for off topic names We must add support for all B I G S O L I D U S. This is necessary. --- .../apps/api/migrations/0082_otn_allow_big_solidus.py | 19 +++++++++++++++++++ .../apps/api/models/bot/off_topic_channel_name.py | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 pydis_site/apps/api/migrations/0082_otn_allow_big_solidus.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0082_otn_allow_big_solidus.py b/pydis_site/apps/api/migrations/0082_otn_allow_big_solidus.py new file mode 100644 index 00000000..abbb98ec --- /dev/null +++ b/pydis_site/apps/api/migrations/0082_otn_allow_big_solidus.py @@ -0,0 +1,19 @@ +# Generated by Django 3.1.14 on 2022-04-21 23:29 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0081_bumpedthread'), + ] + + operations = [ + migrations.AlterField( + model_name='offtopicchannelname', + name='name', + field=models.CharField(help_text='The actual channel name that will be used on our Discord server.', max_length=96, primary_key=True, serialize=False, validators=[django.core.validators.RegexValidator(regex="^[a-z0-9\\U0001d5a0-\\U0001d5b9-ǃ?’'<>⧹⧸]+$")]), + ), + ] diff --git a/pydis_site/apps/api/models/bot/off_topic_channel_name.py b/pydis_site/apps/api/models/bot/off_topic_channel_name.py index e9fec114..b380efad 100644 --- a/pydis_site/apps/api/models/bot/off_topic_channel_name.py +++ b/pydis_site/apps/api/models/bot/off_topic_channel_name.py @@ -11,7 +11,7 @@ class OffTopicChannelName(ModelReprMixin, models.Model): primary_key=True, max_length=96, validators=( - RegexValidator(regex=r"^[a-z0-9\U0001d5a0-\U0001d5b9-ǃ?’'<>]+$"), + RegexValidator(regex=r"^[a-z0-9\U0001d5a0-\U0001d5b9-ǃ?’'<>⧹⧸]+$"), ), help_text="The actual channel name that will be used on our Discord server." ) -- cgit v1.2.3 From d4f717ec186ffeedf7bdeb4991868160f1540b83 Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Thu, 30 Jun 2022 10:48:33 +0100 Subject: Remove embed validators for deleted messages These caused more harm than they were worth, as every time Discord updated a behaviour of an embed we would get errors and need ot update the validation. Instead we should just accept whatever discord gives us as correct --- .../api/migrations/0083_remove_embed_validation.py | 19 ++ pydis_site/apps/api/models/bot/message.py | 5 +- pydis_site/apps/api/models/utils.py | 172 ---------------- pydis_site/apps/api/tests/test_validators.py | 229 --------------------- 4 files changed, 20 insertions(+), 405 deletions(-) create mode 100644 pydis_site/apps/api/migrations/0083_remove_embed_validation.py delete mode 100644 pydis_site/apps/api/models/utils.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0083_remove_embed_validation.py b/pydis_site/apps/api/migrations/0083_remove_embed_validation.py new file mode 100644 index 00000000..e835bb66 --- /dev/null +++ b/pydis_site/apps/api/migrations/0083_remove_embed_validation.py @@ -0,0 +1,19 @@ +# Generated by Django 3.1.14 on 2022-06-30 09:41 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0082_otn_allow_big_solidus'), + ] + + operations = [ + migrations.AlterField( + model_name='deletedmessage', + name='embeds', + field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(), blank=True, help_text='Embeds attached to this message.', size=None), + ), + ] diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index bab3368d..bfa54721 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -7,7 +7,6 @@ from django.utils import timezone from pydis_site.apps.api.models.bot.user import User from pydis_site.apps.api.models.mixins import ModelReprMixin -from pydis_site.apps.api.models.utils import validate_embed class Message(ModelReprMixin, models.Model): @@ -48,9 +47,7 @@ class Message(ModelReprMixin, models.Model): blank=True ) embeds = pgfields.ArrayField( - models.JSONField( - validators=(validate_embed,) - ), + models.JSONField(), blank=True, help_text="Embeds attached to this message." ) diff --git a/pydis_site/apps/api/models/utils.py b/pydis_site/apps/api/models/utils.py deleted file mode 100644 index 859394d2..00000000 --- a/pydis_site/apps/api/models/utils.py +++ /dev/null @@ -1,172 +0,0 @@ -from collections.abc import Mapping -from typing import Any, Dict - -from django.core.exceptions import ValidationError -from django.core.validators import MaxLengthValidator, MinLengthValidator - - -def is_bool_validator(value: Any) -> None: - """Validates if a given value is of type bool.""" - if not isinstance(value, bool): - raise ValidationError(f"This field must be of type bool, not {type(value)}.") - - -def validate_embed_fields(fields: dict) -> None: - """Raises a ValidationError if any of the given embed fields is invalid.""" - field_validators = { - 'name': (MaxLengthValidator(limit_value=256),), - 'value': (MaxLengthValidator(limit_value=1024),), - 'inline': (is_bool_validator,), - } - - required_fields = ('name', 'value') - - for field in fields: - if not isinstance(field, Mapping): - raise ValidationError("Embed fields must be a mapping.") - - if not all(required_field in field for required_field in required_fields): - raise ValidationError( - f"Embed fields must contain the following fields: {', '.join(required_fields)}." - ) - - for field_name, value in field.items(): - if field_name not in field_validators: - raise ValidationError(f"Unknown embed field field: {field_name!r}.") - - for validator in field_validators[field_name]: - validator(value) - - -def validate_embed_footer(footer: Dict[str, str]) -> None: - """Raises a ValidationError if the given footer is invalid.""" - field_validators = { - 'text': ( - MinLengthValidator( - limit_value=1, - message="Footer text must not be empty." - ), - MaxLengthValidator(limit_value=2048) - ), - 'icon_url': (), - 'proxy_icon_url': () - } - - if not isinstance(footer, Mapping): - raise ValidationError("Embed footer must be a mapping.") - - for field_name, value in footer.items(): - if field_name not in field_validators: - raise ValidationError(f"Unknown embed footer field: {field_name!r}.") - - for validator in field_validators[field_name]: - validator(value) - - -def validate_embed_author(author: Any) -> None: - """Raises a ValidationError if the given author is invalid.""" - field_validators = { - 'name': ( - MinLengthValidator( - limit_value=1, - message="Embed author name must not be empty." - ), - MaxLengthValidator(limit_value=256) - ), - 'url': (), - 'icon_url': (), - 'proxy_icon_url': () - } - - if not isinstance(author, Mapping): - raise ValidationError("Embed author must be a mapping.") - - for field_name, value in author.items(): - if field_name not in field_validators: - raise ValidationError(f"Unknown embed author field: {field_name!r}.") - - for validator in field_validators[field_name]: - validator(value) - - -def validate_embed(embed: Any) -> None: - """ - Validate a JSON document containing an embed as possible to send on Discord. - - This attempts to rebuild the validation used by Discord - as well as possible by checking for various embed limits so we can - ensure that any embed we store here will also be accepted as a - valid embed by the Discord API. - - Using this directly is possible, although not intended - you usually - stick this onto the `validators` keyword argument of model fields. - - Example: - - >>> from django.db import models - >>> from pydis_site.apps.api.models.utils import validate_embed - >>> class MyMessage(models.Model): - ... embed = models.JSONField( - ... validators=( - ... validate_embed, - ... ) - ... ) - ... # ... - ... - - Args: - embed (Any): - A dictionary describing the contents of this embed. - See the official documentation for a full reference - of accepted keys by this dictionary: - https://discordapp.com/developers/docs/resources/channel#embed-object - - Raises: - ValidationError: - In case the given embed is deemed invalid, a `ValidationError` - is raised which in turn will allow Django to display errors - as appropriate. - """ - all_keys = { - 'title', 'type', 'description', 'url', 'timestamp', - 'color', 'footer', 'image', 'thumbnail', 'video', - 'provider', 'author', 'fields' - } - one_required_of = {'description', 'fields', 'image', 'title', 'video'} - field_validators = { - 'title': ( - MinLengthValidator( - limit_value=1, - message="Embed title must not be empty." - ), - MaxLengthValidator(limit_value=256) - ), - 'description': (MaxLengthValidator(limit_value=4096),), - 'fields': ( - MaxLengthValidator(limit_value=25), - validate_embed_fields - ), - 'footer': (validate_embed_footer,), - 'author': (validate_embed_author,) - } - - if not embed: - raise ValidationError("Tag embed must not be empty.") - - elif not isinstance(embed, Mapping): - raise ValidationError("Tag embed must be a mapping.") - - elif not any(field in embed for field in one_required_of): - raise ValidationError(f"Tag embed must contain one of the fields {one_required_of}.") - - for required_key in one_required_of: - if required_key in embed and not embed[required_key]: - raise ValidationError(f"Key {required_key!r} must not be empty.") - - for field_name, value in embed.items(): - if field_name not in all_keys: - raise ValidationError(f"Unknown field name: {field_name!r}") - - if field_name in field_validators: - for validator in field_validators[field_name]: - validator(value) diff --git a/pydis_site/apps/api/tests/test_validators.py b/pydis_site/apps/api/tests/test_validators.py index 551cc2aa..8c46fcbc 100644 --- a/pydis_site/apps/api/tests/test_validators.py +++ b/pydis_site/apps/api/tests/test_validators.py @@ -5,7 +5,6 @@ from django.test import TestCase from ..models.bot.bot_setting import validate_bot_setting_name from ..models.bot.offensive_message import future_date_validator -from ..models.utils import validate_embed REQUIRED_KEYS = ( @@ -22,234 +21,6 @@ class BotSettingValidatorTests(TestCase): validate_bot_setting_name('bad name') -class TagEmbedValidatorTests(TestCase): - def test_rejects_non_mapping(self): - with self.assertRaises(ValidationError): - validate_embed('non-empty non-mapping') - - def test_rejects_missing_required_keys(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'unknown': "key" - }) - - def test_rejects_one_correct_one_incorrect(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'provider': "??", - 'title': "" - }) - - def test_rejects_empty_required_key(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': '' - }) - - def test_rejects_list_as_embed(self): - with self.assertRaises(ValidationError): - validate_embed([]) - - def test_rejects_required_keys_and_unknown_keys(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "the duck walked up to the lemonade stand", - 'and': "he said to the man running the stand" - }) - - def test_rejects_too_long_title(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': 'a' * 257 - }) - - def test_rejects_too_many_fields(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [{} for _ in range(26)] - }) - - def test_rejects_too_long_description(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'description': 'd' * 4097 - }) - - def test_allows_valid_embed(self): - validate_embed({ - 'title': "My embed", - 'description': "look at my embed, my embed is amazing" - }) - - def test_allows_unvalidated_fields(self): - validate_embed({ - 'title': "My embed", - 'provider': "what am I??" - }) - - def test_rejects_fields_as_list_of_non_mappings(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': ['abc'] - }) - - def test_rejects_fields_with_unknown_fields(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'what': "is this field" - } - ] - }) - - def test_rejects_fields_with_too_long_name(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "a" * 257 - } - ] - }) - - def test_rejects_one_correct_one_incorrect_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "Totally valid", - 'value': "LOOK AT ME" - }, - { - 'name': "Totally valid", - 'value': "LOOK AT ME", - 'oh': "what is this key?" - } - ] - }) - - def test_rejects_missing_required_field_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "Totally valid", - 'inline': True, - } - ] - }) - - def test_rejects_invalid_inline_field_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'fields': [ - { - 'name': "Totally valid", - 'value': "LOOK AT ME", - 'inline': "Totally not a boolean", - } - ] - }) - - def test_allows_valid_fields(self): - validate_embed({ - 'fields': [ - { - 'name': "valid", - 'value': "field", - }, - { - 'name': "valid", - 'value': "field", - 'inline': False, - }, - { - 'name': "valid", - 'value': "field", - 'inline': True, - }, - ] - }) - - def test_rejects_footer_as_non_mapping(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'footer': [] - }) - - def test_rejects_footer_with_unknown_fields(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'footer': { - 'duck': "quack" - } - }) - - def test_rejects_footer_with_empty_text(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'footer': { - 'text': "" - } - }) - - def test_allows_footer_with_proper_values(self): - validate_embed({ - 'title': "whatever", - 'footer': { - 'text': "django good" - } - }) - - def test_rejects_author_as_non_mapping(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': [] - }) - - def test_rejects_author_with_unknown_field(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': { - 'field': "that is unknown" - } - }) - - def test_rejects_author_with_empty_name(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': { - 'name': "" - } - }) - - def test_rejects_author_with_one_correct_one_incorrect(self): - with self.assertRaises(ValidationError): - validate_embed({ - 'title': "whatever", - 'author': { - # Relies on "dictionary insertion order remembering" (D.I.O.R.) behaviour - 'url': "bobswebsite.com", - 'name': "" - } - }) - - def test_allows_author_with_proper_values(self): - validate_embed({ - 'title': "whatever", - 'author': { - 'name': "Bob" - } - }) - - class OffensiveMessageValidatorsTests(TestCase): def test_accepts_future_date(self): future_date_validator(datetime(3000, 1, 1, tzinfo=timezone.utc)) -- cgit v1.2.3 From 084357dbcc48445262fe078e7cb035d46be02e48 Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Thu, 30 Jun 2022 10:49:24 +0100 Subject: Remove embed validators from old migrations Since the util file has been deleted, these migrations were referencing a missing file --- pydis_site/apps/api/migrations/0019_deletedmessage.py | 2 +- pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py | 3 +-- pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py | 3 +-- 3 files changed, 3 insertions(+), 5 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0019_deletedmessage.py b/pydis_site/apps/api/migrations/0019_deletedmessage.py index 6b848d64..25d04434 100644 --- a/pydis_site/apps/api/migrations/0019_deletedmessage.py +++ b/pydis_site/apps/api/migrations/0019_deletedmessage.py @@ -18,7 +18,7 @@ class Migration(migrations.Migration): ('id', models.BigIntegerField(help_text='The message ID as taken from Discord.', primary_key=True, serialize=False, validators=[django.core.validators.MinValueValidator(limit_value=0, message='Message IDs cannot be negative.')])), ('channel_id', models.BigIntegerField(help_text='The channel ID that this message was sent in, taken from Discord.', validators=[django.core.validators.MinValueValidator(limit_value=0, message='Channel IDs cannot be negative.')])), ('content', models.CharField(help_text='The content of this message, taken from Discord.', max_length=2000)), - ('embeds', django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[pydis_site.apps.api.models.utils.validate_embed]), help_text='Embeds attached to this message.', size=None)), + ('embeds', django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[]), help_text='Embeds attached to this message.', size=None)), ('author', models.ForeignKey(help_text='The author of this message.', on_delete=django.db.models.deletion.CASCADE, to='api.User')), ('deletion_context', models.ForeignKey(help_text='The deletion context this message is part of.', on_delete=django.db.models.deletion.CASCADE, to='api.MessageDeletionContext')), ], diff --git a/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py b/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py index 124c6a57..622f21d1 100644 --- a/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py +++ b/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py @@ -3,7 +3,6 @@ import django.contrib.postgres.fields import django.contrib.postgres.fields.jsonb from django.db import migrations -import pydis_site.apps.api.models.utils class Migration(migrations.Migration): @@ -16,6 +15,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='deletedmessage', name='embeds', - field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[pydis_site.apps.api.models.utils.validate_embed]), blank=True, help_text='Embeds attached to this message.', size=None), + field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[]), blank=True, help_text='Embeds attached to this message.', size=None), ), ] diff --git a/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py b/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py index 9e8f2fb9..95ef5850 100644 --- a/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py +++ b/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py @@ -2,7 +2,6 @@ import django.contrib.postgres.fields from django.db import migrations, models -import pydis_site.apps.api.models.utils class Migration(migrations.Migration): @@ -20,6 +19,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='deletedmessage', name='embeds', - field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(validators=[pydis_site.apps.api.models.utils.validate_embed]), blank=True, help_text='Embeds attached to this message.', size=None), + field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(validators=[]), blank=True, help_text='Embeds attached to this message.', size=None), ), ] -- cgit v1.2.3 From 7b40cd8143fea0beb195c6940bf2356970fc6958 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Mon, 11 Jul 2022 04:27:16 +0400 Subject: Drop Migration Tests The migration test suite was not really used, and it doesn't entirely make sense to test a constant unchanging process either. Its behavior is also very coupled with django's internals, and locks us into the current version and setup. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/migrations/__init__.py | 1 - pydis_site/apps/api/tests/migrations/base.py | 102 ----- .../migrations/test_active_infraction_migration.py | 496 --------------------- pydis_site/apps/api/tests/migrations/test_base.py | 135 ------ 4 files changed, 734 deletions(-) delete mode 100644 pydis_site/apps/api/tests/migrations/__init__.py delete mode 100644 pydis_site/apps/api/tests/migrations/base.py delete mode 100644 pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py delete mode 100644 pydis_site/apps/api/tests/migrations/test_base.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/migrations/__init__.py b/pydis_site/apps/api/tests/migrations/__init__.py deleted file mode 100644 index 38e42ffc..00000000 --- a/pydis_site/apps/api/tests/migrations/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""This submodule contains tests for functions used in data migrations.""" diff --git a/pydis_site/apps/api/tests/migrations/base.py b/pydis_site/apps/api/tests/migrations/base.py deleted file mode 100644 index 0c0a5bd0..00000000 --- a/pydis_site/apps/api/tests/migrations/base.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Includes utilities for testing migrations.""" -from django.db import connection -from django.db.migrations.executor import MigrationExecutor -from django.test import TestCase - - -class MigrationsTestCase(TestCase): - """ - A `TestCase` subclass to test migration files. - - To be able to properly test a migration, we will need to inject data into the test database - before the migrations we want to test are applied, but after the older migrations have been - applied. This makes sure that we are testing "as if" we were actually applying this migration - to a database in the state it was in before introducing the new migration. - - To set up a MigrationsTestCase, create a subclass of this class and set the following - class-level attributes: - - - app: The name of the app that contains the migrations (e.g., `'api'`) - - migration_prior: The name* of the last migration file before the migrations you want to test - - migration_target: The name* of the last migration file we want to test - - *) Specify the file names without a path or the `.py` file extension. - - Additionally, overwrite the `setUpMigrationData` in the subclass to inject data into the - database before the migrations we want to test are applied. Please read the docstring of the - method for more information. An optional hook, `setUpPostMigrationData` is also provided. - """ - - # These class-level attributes should be set in classes that inherit from this base class. - app = None - migration_prior = None - migration_target = None - - @classmethod - def setUpTestData(cls): - """ - Injects data into the test database prior to the migration we're trying to test. - - This class methods reverts the test database back to the state of the last migration file - prior to the migrations we want to test. It will then allow the user to inject data into the - test database by calling the `setUpMigrationData` hook. After the data has been injected, it - will apply the migrations we want to test and call the `setUpPostMigrationData` hook. The - user can now test if the migration correctly migrated the injected test data. - """ - if not cls.app: - raise ValueError("The `app` attribute was not set.") - - if not cls.migration_prior or not cls.migration_target: - raise ValueError("Both ` migration_prior` and `migration_target` need to be set.") - - cls.migrate_from = [(cls.app, cls.migration_prior)] - cls.migrate_to = [(cls.app, cls.migration_target)] - - # Reverse to database state prior to the migrations we want to test - executor = MigrationExecutor(connection) - executor.migrate(cls.migrate_from) - - # Call the data injection hook with the current state of the project - old_apps = executor.loader.project_state(cls.migrate_from).apps - cls.setUpMigrationData(old_apps) - - # Run the migrations we want to test - executor = MigrationExecutor(connection) - executor.loader.build_graph() - executor.migrate(cls.migrate_to) - - # Save the project state so we're able to work with the correct model states - cls.apps = executor.loader.project_state(cls.migrate_to).apps - - # Call `setUpPostMigrationData` to potentially set up post migration data used in testing - cls.setUpPostMigrationData(cls.apps) - - @classmethod - def setUpMigrationData(cls, apps): - """ - Override this method to inject data into the test database before the migration is applied. - - This method will be called after setting up the database according to the migrations that - come before the migration(s) we are trying to test, but before the to-be-tested migration(s) - are applied. This allows us to simulate a database state just prior to the migrations we are - trying to test. - - To make sure we're creating objects according to the state the models were in at this point - in the migration history, use `apps.get_model(app_name: str, model_name: str)` to get the - appropriate model, e.g.: - - >>> Infraction = apps.get_model('api', 'Infraction') - """ - pass - - @classmethod - def setUpPostMigrationData(cls, apps): - """ - Set up additional test data after the target migration has been applied. - - Use `apps.get_model(app_name: str, model_name: str)` to get the correct instances of the - model classes: - - >>> Infraction = apps.get_model('api', 'Infraction') - """ - pass diff --git a/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py b/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py deleted file mode 100644 index 8dc29b34..00000000 --- a/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py +++ /dev/null @@ -1,496 +0,0 @@ -"""Tests for the data migration in `filename`.""" -import logging -from collections import ChainMap, namedtuple -from datetime import timedelta -from itertools import count -from typing import Dict, Iterable, Type, Union - -from django.db.models import Q -from django.forms.models import model_to_dict -from django.utils import timezone - -from pydis_site.apps.api.models import Infraction, User -from .base import MigrationsTestCase - -log = logging.getLogger(__name__) -log.setLevel(logging.DEBUG) - - -InfractionHistory = namedtuple('InfractionHistory', ("user_id", "infraction_history")) - - -class InfractionFactory: - """Factory that creates infractions for a User instance.""" - - infraction_id = count(1) - user_id = count(1) - default_values = { - 'active': True, - 'expires_at': None, - 'hidden': False, - } - - @classmethod - def create( - cls, - actor: User, - infractions: Iterable[Dict[str, Union[str, int, bool]]], - infraction_model: Type[Infraction] = Infraction, - user_model: Type[User] = User, - ) -> InfractionHistory: - """ - Creates `infractions` for the `user` with the given `actor`. - - The `infractions` dictionary can contain the following fields: - - `type` (required) - - `active` (default: True) - - `expires_at` (default: None; i.e, permanent) - - `hidden` (default: False). - - The parameters `infraction_model` and `user_model` can be used to pass in an instance of - both model classes from a different migration/project state. - """ - user_id = next(cls.user_id) - user = user_model.objects.create( - id=user_id, - name=f"Infracted user {user_id}", - discriminator=user_id, - avatar_hash=None, - ) - infraction_history = [] - - for infraction in infractions: - infraction = dict(infraction) - infraction["id"] = next(cls.infraction_id) - infraction = ChainMap(infraction, cls.default_values) - new_infraction = infraction_model.objects.create( - user=user, - actor=actor, - type=infraction["type"], - reason=f"`{infraction['type']}` infraction (ID: {infraction['id']} of {user}", - active=infraction['active'], - hidden=infraction['hidden'], - expires_at=infraction['expires_at'], - ) - infraction_history.append(new_infraction) - - return InfractionHistory(user_id=user_id, infraction_history=infraction_history) - - -class InfractionFactoryTests(MigrationsTestCase): - """Tests for the InfractionFactory.""" - - app = "api" - migration_prior = "0046_reminder_jump_url" - migration_target = "0046_reminder_jump_url" - - @classmethod - def setUpPostMigrationData(cls, apps): - """Create a default actor for all infractions.""" - cls.infraction_model = apps.get_model('api', 'Infraction') - cls.user_model = apps.get_model('api', 'User') - - cls.actor = cls.user_model.objects.create( - id=9999, - name="Unknown Moderator", - discriminator=1040, - avatar_hash=None, - ) - - def test_infraction_factory_total_count(self): - """Does the test database hold as many infractions as we tried to create?""" - InfractionFactory.create( - actor=self.actor, - infractions=( - {'type': 'kick', 'active': False, 'hidden': False}, - {'type': 'ban', 'active': True, 'hidden': False}, - {'type': 'note', 'active': False, 'hidden': True}, - ), - infraction_model=self.infraction_model, - user_model=self.user_model, - ) - database_count = Infraction.objects.all().count() - self.assertEqual(3, database_count) - - def test_infraction_factory_multiple_users(self): - """Does the test database hold as many infractions as we tried to create?""" - for _user in range(5): - InfractionFactory.create( - actor=self.actor, - infractions=( - {'type': 'kick', 'active': False, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': False}, - ), - infraction_model=self.infraction_model, - user_model=self.user_model, - ) - - # Check if infractions and users are recorded properly in the database - database_count = Infraction.objects.all().count() - self.assertEqual(database_count, 10) - - user_count = User.objects.all().count() - self.assertEqual(user_count, 5 + 1) - - def test_infraction_factory_sets_correct_fields(self): - """Does the InfractionFactory set the correct attributes?""" - infractions = ( - { - 'type': 'note', - 'active': False, - 'hidden': True, - 'expires_at': timezone.now() - }, - {'type': 'warning', 'active': False, 'hidden': False, 'expires_at': None}, - {'type': 'watch', 'active': False, 'hidden': True, 'expires_at': None}, - {'type': 'mute', 'active': True, 'hidden': False, 'expires_at': None}, - {'type': 'kick', 'active': True, 'hidden': True, 'expires_at': None}, - {'type': 'ban', 'active': True, 'hidden': False, 'expires_at': None}, - { - 'type': 'superstar', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() - }, - ) - - InfractionFactory.create( - actor=self.actor, - infractions=infractions, - infraction_model=self.infraction_model, - user_model=self.user_model, - ) - - for infraction in infractions: - with self.subTest(**infraction): - self.assertTrue(Infraction.objects.filter(**infraction).exists()) - - -class ActiveInfractionMigrationTests(MigrationsTestCase): - """ - Tests the active infraction data migration. - - The active infraction data migration should do the following things: - - 1. migrates all active notes, warnings, and kicks to an inactive status; - 2. migrates all users with multiple active infractions of a single type to have only one active - infraction of that type. The infraction with the longest duration stays active. - """ - - app = "api" - migration_prior = "0046_reminder_jump_url" - migration_target = "0047_active_infractions_migration" - - @classmethod - def setUpMigrationData(cls, apps): - """Sets up an initial database state that contains the relevant test cases.""" - # Fetch the Infraction and User model in the current migration state - cls.infraction_model = apps.get_model('api', 'Infraction') - cls.user_model = apps.get_model('api', 'User') - - cls.created_infractions = {} - - # Moderator that serves as actor for all infractions - cls.user_moderator = cls.user_model.objects.create( - id=9999, - name="Olivier de Vienne", - discriminator=1040, - avatar_hash=None, - ) - - # User #1: clean user with no infractions - cls.created_infractions["no infractions"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=[], - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #2: One inactive note infraction - cls.created_infractions["one inactive note"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': False, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #3: One active note infraction - cls.created_infractions["one active note"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #4: One active and one inactive note infraction - cls.created_infractions["one active and one inactive note"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': False, 'hidden': True}, - {'type': 'note', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #5: Once active note, one active kick, once active warning - cls.created_infractions["active note, kick, warning"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'note', 'active': True, 'hidden': True}, - {'type': 'kick', 'active': True, 'hidden': True}, - {'type': 'warning', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #6: One inactive ban and one active ban - cls.created_infractions["one inactive and one active ban"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': False, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #7: Two active permanent bans - cls.created_infractions["two active perm bans"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #8: Multiple active temporary bans - cls.created_infractions["multiple active temp bans"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=1) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=10) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=20) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=5) - }, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #9: One active permanent ban, two active temporary bans - cls.created_infractions["active perm, two active temp bans"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=10) - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': None, - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=7) - }, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #10: One inactive permanent ban, two active temporary bans - cls.created_infractions["one inactive perm ban, two active temp bans"] = ( - InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=10) - }, - { - 'type': 'ban', - 'active': False, - 'hidden': True, - 'expires_at': None, - }, - { - 'type': 'ban', - 'active': True, - 'hidden': True, - 'expires_at': timezone.now() + timedelta(days=7) - }, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - ) - - # User #11: Active ban, active mute, active superstar - cls.created_infractions["active ban, mute, and superstar"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - # User #12: Multiple active bans, active mutes, active superstars - cls.created_infractions["multiple active bans, mutes, stars"] = InfractionFactory.create( - actor=cls.user_moderator, - infractions=( - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'ban', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'mute', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'superstar', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - {'type': 'watch', 'active': True, 'hidden': True}, - ), - infraction_model=cls.infraction_model, - user_model=cls.user_model, - ) - - def test_all_never_active_types_became_inactive(self): - """Are all infractions of a non-active type inactive after the migration?""" - inactive_type_query = Q(type="note") | Q(type="warning") | Q(type="kick") - self.assertFalse( - self.infraction_model.objects.filter(inactive_type_query, active=True).exists() - ) - - def test_migration_left_clean_user_without_infractions(self): - """Do users without infractions have no infractions after the migration?""" - user_id, infraction_history = self.created_infractions["no infractions"] - self.assertFalse( - self.infraction_model.objects.filter(user__id=user_id).exists() - ) - - def test_migration_left_user_with_inactive_note_untouched(self): - """Did the migration leave users with only an inactive note untouched?""" - user_id, infraction_history = self.created_infractions["one inactive note"] - inactive_note = infraction_history[0] - self.assertTrue( - self.infraction_model.objects.filter(**model_to_dict(inactive_note)).exists() - ) - - def test_migration_only_touched_active_field_of_active_note(self): - """Does the migration only change the `active` field?""" - user_id, infraction_history = self.created_infractions["one active note"] - note = model_to_dict(infraction_history[0]) - note['active'] = False - self.assertTrue( - self.infraction_model.objects.filter(**note).exists() - ) - - def test_migration_only_touched_active_field_of_active_note_left_inactive_untouched(self): - """Does the migration only change the `active` field of active notes?""" - user_id, infraction_history = self.created_infractions["one active and one inactive note"] - for note in infraction_history: - with self.subTest(active=note.active): - note = model_to_dict(note) - note['active'] = False - self.assertTrue( - self.infraction_model.objects.filter(**note).exists() - ) - - def test_migration_migrates_all_nonactive_types_to_inactive(self): - """Do we set the `active` field of all non-active infractions to `False`?""" - user_id, infraction_history = self.created_infractions["active note, kick, warning"] - self.assertFalse( - self.infraction_model.objects.filter(user__id=user_id, active=True).exists() - ) - - def test_migration_leaves_user_with_one_active_ban_untouched(self): - """Do we leave a user with one active and one inactive ban untouched?""" - user_id, infraction_history = self.created_infractions["one inactive and one active ban"] - for infraction in infraction_history: - with self.subTest(active=infraction.active): - self.assertTrue( - self.infraction_model.objects.filter(**model_to_dict(infraction)).exists() - ) - - def test_migration_turns_double_active_perm_ban_into_single_active_perm_ban(self): - """Does the migration turn two active permanent bans into one active permanent ban?""" - user_id, infraction_history = self.created_infractions["two active perm bans"] - active_count = self.infraction_model.objects.filter(user__id=user_id, active=True).count() - self.assertEqual(active_count, 1) - - def test_migration_leaves_temporary_ban_with_longest_duration_active(self): - """Does the migration turn two active permanent bans into one active permanent ban?""" - user_id, infraction_history = self.created_infractions["multiple active temp bans"] - active_ban = self.infraction_model.objects.get(user__id=user_id, active=True) - self.assertEqual(active_ban.expires_at, infraction_history[2].expires_at) - - def test_migration_leaves_permanent_ban_active(self): - """Does the migration leave the permanent ban active?""" - user_id, infraction_history = self.created_infractions["active perm, two active temp bans"] - active_ban = self.infraction_model.objects.get(user__id=user_id, active=True) - self.assertIsNone(active_ban.expires_at) - - def test_migration_leaves_longest_temp_ban_active_with_inactive_permanent_ban(self): - """Does the longest temp ban stay active, even with an inactive perm ban present?""" - user_id, infraction_history = self.created_infractions[ - "one inactive perm ban, two active temp bans" - ] - active_ban = self.infraction_model.objects.get(user__id=user_id, active=True) - self.assertEqual(active_ban.expires_at, infraction_history[0].expires_at) - - def test_migration_leaves_all_active_types_active_if_one_of_each_exists(self): - """Do all active infractions stay active if only one of each is present?""" - user_id, infraction_history = self.created_infractions["active ban, mute, and superstar"] - active_count = self.infraction_model.objects.filter(user__id=user_id, active=True).count() - self.assertEqual(active_count, 4) - - def test_migration_reduces_all_active_types_to_a_single_active_infraction(self): - """Do we reduce all of the infraction types to one active infraction?""" - user_id, infraction_history = self.created_infractions["multiple active bans, mutes, stars"] - active_infractions = self.infraction_model.objects.filter(user__id=user_id, active=True) - self.assertEqual(len(active_infractions), 4) - types_observed = [infraction.type for infraction in active_infractions] - - for infraction_type in ('ban', 'mute', 'superstar', 'watch'): - with self.subTest(type=infraction_type): - self.assertIn(infraction_type, types_observed) diff --git a/pydis_site/apps/api/tests/migrations/test_base.py b/pydis_site/apps/api/tests/migrations/test_base.py deleted file mode 100644 index f69bc92c..00000000 --- a/pydis_site/apps/api/tests/migrations/test_base.py +++ /dev/null @@ -1,135 +0,0 @@ -import logging -from unittest.mock import call, patch - -from django.db.migrations.loader import MigrationLoader -from django.test import TestCase - -from .base import MigrationsTestCase, connection - -log = logging.getLogger(__name__) - - -class SpanishInquisition(MigrationsTestCase): - app = "api" - migration_prior = "scragly" - migration_target = "kosa" - - -@patch("pydis_site.apps.api.tests.migrations.base.MigrationExecutor") -class MigrationsTestCaseNoSideEffectsTests(TestCase): - """Tests the MigrationTestCase class with actual migration side effects disabled.""" - - def setUp(self): - """Set up an instance of MigrationsTestCase for use in tests.""" - self.test_case = SpanishInquisition() - - def test_missing_app_class_raises_value_error(self, _migration_executor): - """A MigrationsTestCase subclass should set the class-attribute `app`.""" - class Spam(MigrationsTestCase): - pass - - spam = Spam() - with self.assertRaises(ValueError, msg="The `app` attribute was not set."): - spam.setUpTestData() - - def test_missing_migration_class_attributes_raise_value_error(self, _migration_executor): - """A MigrationsTestCase subclass should set both `migration_prior` and `migration_target`""" - class Eggs(MigrationsTestCase): - app = "api" - migration_target = "lemon" - - class Bacon(MigrationsTestCase): - app = "api" - migration_prior = "mark" - - instances = (Eggs(), Bacon()) - - exception_message = "Both ` migration_prior` and `migration_target` need to be set." - for instance in instances: - with self.subTest( - migration_prior=instance.migration_prior, - migration_target=instance.migration_target, - ): - with self.assertRaises(ValueError, msg=exception_message): - instance.setUpTestData() - - @patch(f"{__name__}.SpanishInquisition.setUpMigrationData") - @patch(f"{__name__}.SpanishInquisition.setUpPostMigrationData") - def test_migration_data_hooks_are_called_once(self, pre_hook, post_hook, _migration_executor): - """The `setUpMigrationData` and `setUpPostMigrationData` hooks should be called once.""" - self.test_case.setUpTestData() - for hook in (pre_hook, post_hook): - with self.subTest(hook=repr(hook)): - hook.assert_called_once() - - def test_migration_executor_is_instantiated_twice(self, migration_executor): - """The `MigrationExecutor` should be instantiated with the database connection twice.""" - self.test_case.setUpTestData() - - expected_args = [call(connection), call(connection)] - self.assertEqual(migration_executor.call_args_list, expected_args) - - def test_project_state_is_loaded_for_correct_migration_files_twice(self, migration_executor): - """The `project_state` should first be loaded with `migrate_from`, then `migrate_to`.""" - self.test_case.setUpTestData() - - expected_args = [call(self.test_case.migrate_from), call(self.test_case.migrate_to)] - self.assertEqual(migration_executor().loader.project_state.call_args_list, expected_args) - - def test_loader_build_graph_gets_called_once(self, migration_executor): - """We should rebuild the migration graph before applying the second set of migrations.""" - self.test_case.setUpTestData() - - migration_executor().loader.build_graph.assert_called_once() - - def test_migration_executor_migrate_method_is_called_correctly_twice(self, migration_executor): - """The migrate method of the executor should be called twice with the correct arguments.""" - self.test_case.setUpTestData() - - self.assertEqual(migration_executor().migrate.call_count, 2) - calls = [call([('api', 'scragly')]), call([('api', 'kosa')])] - migration_executor().migrate.assert_has_calls(calls) - - -class LifeOfBrian(MigrationsTestCase): - app = "api" - migration_prior = "0046_reminder_jump_url" - migration_target = "0048_add_infractions_unique_constraints_active" - - @classmethod - def log_last_migration(cls): - """Parses the applied migrations dictionary to log the last applied migration.""" - loader = MigrationLoader(connection) - api_migrations = [ - migration for app, migration in loader.applied_migrations if app == cls.app - ] - last_migration = max(api_migrations, key=lambda name: int(name[:4])) - log.info(f"The last applied migration: {last_migration}") - - @classmethod - def setUpMigrationData(cls, apps): - """Method that logs the last applied migration at this point.""" - cls.log_last_migration() - - @classmethod - def setUpPostMigrationData(cls, apps): - """Method that logs the last applied migration at this point.""" - cls.log_last_migration() - - -class MigrationsTestCaseMigrationTest(TestCase): - """Tests if `MigrationsTestCase` travels to the right points in the migration history.""" - - def test_migrations_test_case_travels_to_correct_migrations_in_history(self): - """The test case should first revert to `migration_prior`, then go to `migration_target`.""" - brian = LifeOfBrian() - - with self.assertLogs(log, level=logging.INFO) as logs: - brian.setUpTestData() - - self.assertEqual(len(logs.records), 2) - - for time_point, record in zip(("migration_prior", "migration_target"), logs.records): - with self.subTest(time_point=time_point): - message = f"The last applied migration: {getattr(brian, time_point)}" - self.assertEqual(record.getMessage(), message) -- cgit v1.2.3 From ee25921da752d51215598bcd3eb5fd5ab74a4a46 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Mon, 11 Jul 2022 05:00:40 +0400 Subject: Remove Message Model Test The message model was tested by instantiating and confirming it has a string representation, but instantiating abstract models is undefined behavior, and can break with future versions of django. The behavior of the test is redundant anyway, since an abstract model wouldn't exist in isolation, and the desired behavior is confirmed by inheritors. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_models.py | 12 ------------ 1 file changed, 12 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index 0fad467c..c07d59cd 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -7,7 +7,6 @@ from pydis_site.apps.api.models import ( DeletedMessage, DocumentationLink, Infraction, - Message, MessageDeletionContext, Nomination, NominationEntry, @@ -116,17 +115,6 @@ class StringDunderMethodTests(SimpleTestCase): colour=0x5, permissions=0, position=10, ), - Message( - id=45, - author=User( - id=444, - name='bill', - discriminator=5, - ), - channel_id=666, - content="wooey", - embeds=[] - ), MessageDeletionContext( actor=User( id=5555, -- cgit v1.2.3 From 36a461b57c5b901fb5cfb81966bac3f0387fd590 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Mon, 11 Jul 2022 05:05:51 +0400 Subject: Bump flake8-bandit To v3 Bumps flake-bandit to v3 to fix an incompatibility with the bandit package. This also bumps flake8-annotations to a legally acceptable version, which introduces ANN401, which disallows `typing.Any` annotations (for the most part, refer to the docs). Signed-off-by: Hassan Abouelela --- poetry.lock | 271 +++++++++++-------------- pydis_site/apps/api/pagination.py | 5 +- pydis_site/apps/content/views/page_category.py | 4 +- pyproject.toml | 2 +- 4 files changed, 120 insertions(+), 162 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/poetry.lock b/poetry.lock index fcdc9d84..826c4fca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -17,7 +17,7 @@ trio = ["trio (>=0.16)"] [[package]] name = "asgiref" -version = "3.5.0" +version = "3.5.2" description = "ASGI specs, helper code, and adapters" category = "main" optional = false @@ -42,7 +42,7 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "bandit" -version = "1.7.2" +version = "1.7.4" description = "Security oriented static analyser for python code." category = "dev" optional = false @@ -55,17 +55,17 @@ PyYAML = ">=5.3.1" stevedore = ">=1.20.0" [package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] +test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"] toml = ["toml"] yaml = ["pyyaml"] [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.6.15" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "cfgv" @@ -77,18 +77,18 @@ python-versions = ">=3.6.1" [[package]] name = "charset-normalizer" -version = "2.0.11" +version = "2.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" [package.extras] unicode_backport = ["unicodedata2"] [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.5" description = "Cross-platform colored terminal text." category = "dev" optional = false @@ -190,7 +190,7 @@ prometheus-client = ">=0.7" [[package]] name = "django-simple-bulma" -version = "2.4.0" +version = "2.5.0" description = "Django application to add the Bulma CSS framework and its extensions" category = "main" optional = false @@ -224,7 +224,7 @@ python-versions = "*" [[package]] name = "filelock" -version = "3.4.2" +version = "3.7.1" description = "A platform independent file lock." category = "dev" optional = false @@ -249,25 +249,26 @@ pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "flake8-annotations" -version = "2.7.0" +version = "2.9.0" description = "Flake8 Type Annotation Checks" category = "dev" optional = false -python-versions = ">=3.6.2,<4.0.0" +python-versions = ">=3.7,<4.0" [package.dependencies] -flake8 = ">=3.7,<5.0" +attrs = ">=21.4,<22.0" +flake8 = ">=3.7" [[package]] name = "flake8-bandit" -version = "2.1.2" +version = "3.0.0" description = "Automated security testing with bandit and flake8." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] -bandit = "*" +bandit = ">=1.7.3" flake8 = "*" flake8-polyfill = "*" pycodestyle = "*" @@ -334,7 +335,7 @@ flake8 = "*" [[package]] name = "flake8-tidy-imports" -version = "4.6.0" +version = "4.8.0" description = "A flake8 plugin that helps you write tidier imports." category = "dev" optional = false @@ -367,7 +368,7 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.26" +version = "3.1.27" description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false @@ -438,7 +439,7 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "identify" -version = "2.4.6" +version = "2.5.1" description = "File identification library for Python" category = "dev" optional = false @@ -457,7 +458,7 @@ python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.10.1" +version = "4.12.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -467,9 +468,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "libsass" @@ -484,7 +485,7 @@ six = "*" [[package]] name = "markdown" -version = "3.3.6" +version = "3.3.7" description = "Python implementation of Markdown." category = "main" optional = false @@ -514,15 +515,15 @@ python-versions = ">=3.5" [[package]] name = "nodeenv" -version = "1.6.0" +version = "1.7.0" description = "Node.js virtual environment builder" category = "dev" optional = false -python-versions = "*" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" [[package]] name = "pbr" -version = "5.8.0" +version = "5.9.0" description = "Python Build Reasonableness" category = "dev" optional = false @@ -530,7 +531,7 @@ python-versions = ">=2.6" [[package]] name = "pep8-naming" -version = "0.12.1" +version = "0.13.0" description = "Check PEP-8 naming conventions, plugin for flake8" category = "dev" optional = false @@ -538,27 +539,26 @@ python-versions = "*" [package.dependencies] flake8 = ">=3.9.1" -flake8-polyfill = ">=1.0.2,<2" [[package]] name = "platformdirs" -version = "2.4.1" +version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] [[package]] name = "pre-commit" -version = "2.17.0" +version = "2.19.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] cfgv = ">=2.0.0" @@ -570,7 +570,7 @@ virtualenv = ">=20.0.8" [[package]] name = "prometheus-client" -version = "0.13.1" +version = "0.14.1" description = "Python client for the Prometheus monitoring system." category = "main" optional = false @@ -581,14 +581,14 @@ twisted = ["twisted"] [[package]] name = "psutil" -version = "5.9.0" +version = "5.9.1" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] +test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] [[package]] name = "psycopg2-binary" @@ -622,7 +622,7 @@ toml = ["toml"] [[package]] name = "pyfakefs" -version = "4.5.4" +version = "4.5.6" description = "pyfakefs implements a fake file system that mocks the Python file system modules." category = "dev" optional = false @@ -664,7 +664,7 @@ test = ["pytest", "toml", "pyaml"] [[package]] name = "pytz" -version = "2021.3" +version = "2022.1" description = "World timezone definitions, modern and historical" category = "main" optional = false @@ -812,20 +812,20 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "urllib3" -version = "1.26.8" +version = "1.26.10" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.13.0" +version = "20.15.1" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -854,20 +854,20 @@ brotli = ["brotli"] [[package]] name = "zipp" -version = "3.7.0" +version = "3.8.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "3.9.*" -content-hash = "c94949e29f868689d9c99379dbf4f9479e2ddfe5e6d49e15b57d016210a50379" +content-hash = "91913e2e96ab2e0e78a09334241062359605135d64f458e710f66d00fb670e05" [metadata.files] anyio = [ @@ -875,32 +875,26 @@ anyio = [ {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, ] asgiref = [ - {file = "asgiref-3.5.0-py3-none-any.whl", hash = "sha256:88d59c13d634dcffe0510be048210188edd79aeccb6a6c9028cdad6f31d730a9"}, - {file = "asgiref-3.5.0.tar.gz", hash = "sha256:2f8abc20f7248433085eda803936d98992f1343ddb022065779f37c5da0181d0"}, + {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, + {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, ] attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] -bandit = [ - {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, - {file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"}, -] +bandit = [] certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, ] cfgv = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] -charset-normalizer = [ - {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, - {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, -] +charset-normalizer = [] colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] coverage = [ {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, @@ -983,10 +977,7 @@ django-prometheus = [ {file = "django-prometheus-2.2.0.tar.gz", hash = "sha256:240378a1307c408bd5fc85614a3a57f1ce633d4a222c9e291e2bbf325173b801"}, {file = "django_prometheus-2.2.0-py2.py3-none-any.whl", hash = "sha256:e6616770d8820b8834762764bf1b76ec08e1b98e72a6f359d488a2e15fe3537c"}, ] -django-simple-bulma = [ - {file = "django-simple-bulma-2.4.0.tar.gz", hash = "sha256:99a15261b0c61062a128af3c6a45da9c066d6a4a548c9063464e0fb7a5438aa1"}, - {file = "django_simple_bulma-2.4.0-py3-none-any.whl", hash = "sha256:95d5e26bebbf6a0184e33df844a0ff534bdfd91431e413d1a844d47a75c55fff"}, -] +django-simple-bulma = [] djangorestframework = [ {file = "djangorestframework-3.12.4-py3-none-any.whl", hash = "sha256:6d1d59f623a5ad0509fe0d6bfe93cbdfe17b8116ebc8eda86d45f6e16e819aaf"}, {file = "djangorestframework-3.12.4.tar.gz", hash = "sha256:f747949a8ddac876e879190df194b925c177cdeb725a099db1460872f7c0a7f2"}, @@ -994,21 +985,16 @@ djangorestframework = [ docopt = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] -filelock = [ - {file = "filelock-3.4.2-py3-none-any.whl", hash = "sha256:cf0fc6a2f8d26bd900f19bf33915ca70ba4dd8c56903eeb14e1e7a2fd7590146"}, - {file = "filelock-3.4.2.tar.gz", hash = "sha256:38b4f4c989f9d06d44524df1b24bd19e167d851f19b50bf3e3559952dddc5b80"}, -] +filelock = [] flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] flake8-annotations = [ - {file = "flake8-annotations-2.7.0.tar.gz", hash = "sha256:52e53c05b0c06cac1c2dec192ea2c36e85081238add3bd99421d56f574b9479b"}, - {file = "flake8_annotations-2.7.0-py3-none-any.whl", hash = "sha256:3edfbbfb58e404868834fe6ec3eaf49c139f64f0701259f707d043185545151e"}, -] -flake8-bandit = [ - {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, + {file = "flake8-annotations-2.9.0.tar.gz", hash = "sha256:63fb3f538970b6a8dfd84125cf5af16f7b22e52d5032acb3b7eb23645ecbda9b"}, + {file = "flake8_annotations-2.9.0-py3-none-any.whl", hash = "sha256:84f46de2964cb18fccea968d9eafce7cf857e34d913d515120795b9af6498d56"}, ] +flake8-bandit = [] flake8-bugbear = [ {file = "flake8-bugbear-20.11.1.tar.gz", hash = "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538"}, {file = "flake8_bugbear-20.11.1-py36.py37.py38-none-any.whl", hash = "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703"}, @@ -1030,8 +1016,8 @@ flake8-string-format = [ {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, ] flake8-tidy-imports = [ - {file = "flake8-tidy-imports-4.6.0.tar.gz", hash = "sha256:3e193d8c4bb4492408a90e956d888b27eed14c698387c9b38230da3dad78058f"}, - {file = "flake8_tidy_imports-4.6.0-py3-none-any.whl", hash = "sha256:6ae9f55d628156e19d19f4c359dd5d3e95431a9bd514f5e2748c53c1398c66b2"}, + {file = "flake8-tidy-imports-4.8.0.tar.gz", hash = "sha256:df44f9c841b5dfb3a7a1f0da8546b319d772c2a816a1afefcce43e167a593d83"}, + {file = "flake8_tidy_imports-4.8.0-py3-none-any.whl", hash = "sha256:25bd9799358edefa0e010ce2c587b093c3aba942e96aeaa99b6d0500ae1bf09c"}, ] flake8-todo = [ {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, @@ -1041,8 +1027,8 @@ gitdb = [ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.26-py3-none-any.whl", hash = "sha256:26ac35c212d1f7b16036361ca5cff3ec66e11753a0d677fb6c48fa4e1a9dd8d6"}, - {file = "GitPython-3.1.26.tar.gz", hash = "sha256:fc8868f63a2e6d268fb25f481995ba185a85a66fcad126f039323ff6635669ee"}, + {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, + {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] gunicorn = [ {file = "gunicorn-20.0.4-py2.py3-none-any.whl", hash = "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"}, @@ -1055,17 +1041,14 @@ h11 = [ httpcore = [] httpx = [] identify = [ - {file = "identify-2.4.6-py2.py3-none-any.whl", hash = "sha256:cf06b1639e0dca0c184b1504d8b73448c99a68e004a80524c7923b95f7b6837c"}, - {file = "identify-2.4.6.tar.gz", hash = "sha256:233679e3f61a02015d4293dbccf16aa0e4996f868bd114688b8c124f18826706"}, + {file = "identify-2.5.1-py2.py3-none-any.whl", hash = "sha256:0dca2ea3e4381c435ef9c33ba100a78a9b40c0bab11189c7cf121f75815efeaa"}, + {file = "identify-2.5.1.tar.gz", hash = "sha256:3d11b16f3fe19f52039fb7e39c9c884b21cb1b586988114fbe42671f03de3e82"}, ] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] -importlib-metadata = [ - {file = "importlib_metadata-4.10.1-py3-none-any.whl", hash = "sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6"}, - {file = "importlib_metadata-4.10.1.tar.gz", hash = "sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"}, -] +importlib-metadata = [] libsass = [ {file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"}, {file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"}, @@ -1078,10 +1061,7 @@ libsass = [ {file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"}, {file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"}, ] -markdown = [ - {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"}, - {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"}, -] +markdown = [] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, @@ -1090,63 +1070,51 @@ mslex = [ {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"}, {file = "mslex-0.3.0.tar.gz", hash = "sha256:4a1ac3f25025cad78ad2fe499dd16d42759f7a3801645399cce5c404415daa97"}, ] -nodeenv = [ - {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, - {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, -] -pbr = [ - {file = "pbr-5.8.0-py2.py3-none-any.whl", hash = "sha256:176e8560eaf61e127817ef93d8a844803abb27a4d4637f0ff3bb783129be2e0a"}, - {file = "pbr-5.8.0.tar.gz", hash = "sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"}, -] -pep8-naming = [ - {file = "pep8-naming-0.12.1.tar.gz", hash = "sha256:bb2455947757d162aa4cad55dba4ce029005cd1692f2899a21d51d8630ca7841"}, - {file = "pep8_naming-0.12.1-py2.py3-none-any.whl", hash = "sha256:4a8daeaeb33cfcde779309fc0c9c0a68a3bbe2ad8a8308b763c5068f86eb9f37"}, -] +nodeenv = [] +pbr = [] +pep8-naming = [] platformdirs = [ - {file = "platformdirs-2.4.1-py3-none-any.whl", hash = "sha256:1d7385c7db91728b83efd0ca99a5afb296cab9d0ed8313a45ed8ba17967ecfca"}, - {file = "platformdirs-2.4.1.tar.gz", hash = "sha256:440633ddfebcc36264232365d7840a970e75e1018d15b4327d11f91909045fda"}, + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] pre-commit = [ - {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"}, - {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"}, -] -prometheus-client = [ - {file = "prometheus_client-0.13.1-py3-none-any.whl", hash = "sha256:357a447fd2359b0a1d2e9b311a0c5778c330cfbe186d880ad5a6b39884652316"}, - {file = "prometheus_client-0.13.1.tar.gz", hash = "sha256:ada41b891b79fca5638bd5cfe149efa86512eaa55987893becd2c6d8d0a5dfc5"}, + {file = "pre_commit-2.19.0-py2.py3-none-any.whl", hash = "sha256:10c62741aa5704faea2ad69cb550ca78082efe5697d6f04e5710c3c229afdd10"}, + {file = "pre_commit-2.19.0.tar.gz", hash = "sha256:4233a1e38621c87d9dda9808c6606d7e7ba0e087cd56d3fe03202a01d2919615"}, ] +prometheus-client = [] psutil = [ - {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"}, - {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"}, - {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"}, - {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"}, - {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"}, - {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"}, - {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"}, - {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"}, - {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"}, - {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"}, - {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"}, - {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"}, - {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"}, - {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"}, - {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"}, - {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"}, - {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"}, - {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"}, - {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"}, - {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"}, - {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"}, - {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"}, - {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"}, - {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"}, - {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"}, - {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"}, - {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"}, - {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"}, - {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"}, - {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"}, - {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"}, - {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"}, + {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"}, + {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"}, + {file = "psutil-5.9.1-cp27-cp27m-win32.whl", hash = "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc"}, + {file = "psutil-5.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2"}, + {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0"}, + {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22"}, + {file = "psutil-5.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9"}, + {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8"}, + {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de"}, + {file = "psutil-5.9.1-cp310-cp310-win32.whl", hash = "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329"}, + {file = "psutil-5.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021"}, + {file = "psutil-5.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237"}, + {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453"}, + {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685"}, + {file = "psutil-5.9.1-cp36-cp36m-win32.whl", hash = "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36"}, + {file = "psutil-5.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d"}, + {file = "psutil-5.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc"}, + {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676"}, + {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4"}, + {file = "psutil-5.9.1-cp37-cp37m-win32.whl", hash = "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b"}, + {file = "psutil-5.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680"}, + {file = "psutil-5.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1"}, + {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4"}, + {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b"}, + {file = "psutil-5.9.1-cp38-cp38-win32.whl", hash = "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689"}, + {file = "psutil-5.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0"}, + {file = "psutil-5.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81"}, + {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e"}, + {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537"}, + {file = "psutil-5.9.1-cp39-cp39-win32.whl", hash = "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574"}, + {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"}, + {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"}, ] psycopg2-binary = [ {file = "psycopg2-binary-2.8.6.tar.gz", hash = "sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0"}, @@ -1193,10 +1161,7 @@ pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, ] -pyfakefs = [ - {file = "pyfakefs-4.5.4-py3-none-any.whl", hash = "sha256:e0cc0d22cb74badf4fb2143a112817d7aea1a58ee9dca015a68bf38c3691cb52"}, - {file = "pyfakefs-4.5.4.tar.gz", hash = "sha256:5b5951e873f73bf12e3a19d8e4470c4b7962c51df753cf8c4caaf64e24a0a323"}, -] +pyfakefs = [] pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, @@ -1210,8 +1175,8 @@ python-frontmatter = [ {file = "python_frontmatter-1.0.0-py3-none-any.whl", hash = "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08"}, ] pytz = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, + {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, + {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, ] pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, @@ -1285,19 +1250,13 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -urllib3 = [ - {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, - {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, -] -virtualenv = [ - {file = "virtualenv-20.13.0-py2.py3-none-any.whl", hash = "sha256:339f16c4a86b44240ba7223d0f93a7887c3ca04b5f9c8129da7958447d079b09"}, - {file = "virtualenv-20.13.0.tar.gz", hash = "sha256:d8458cf8d59d0ea495ad9b34c2599487f8a7772d796f9910858376d1600dd2dd"}, -] +urllib3 = [] +virtualenv = [] whitenoise = [ {file = "whitenoise-5.3.0-py2.py3-none-any.whl", hash = "sha256:d963ef25639d1417e8a247be36e6aedd8c7c6f0a08adcb5a89146980a96b577c"}, {file = "whitenoise-5.3.0.tar.gz", hash = "sha256:d234b871b52271ae7ed6d9da47ffe857c76568f11dd30e28e18c5869dbd11e12"}, ] zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, ] diff --git a/pydis_site/apps/api/pagination.py b/pydis_site/apps/api/pagination.py index 2a325460..61707d33 100644 --- a/pydis_site/apps/api/pagination.py +++ b/pydis_site/apps/api/pagination.py @@ -1,7 +1,6 @@ -import typing - from rest_framework.pagination import LimitOffsetPagination from rest_framework.response import Response +from rest_framework.utils.serializer_helpers import ReturnList class LimitOffsetPaginationExtended(LimitOffsetPagination): @@ -44,6 +43,6 @@ class LimitOffsetPaginationExtended(LimitOffsetPagination): default_limit = 100 - def get_paginated_response(self, data: typing.Any) -> Response: + def get_paginated_response(self, data: ReturnList) -> Response: """Override to skip metadata i.e. `count`, `next`, and `previous`.""" return Response(data) diff --git a/pydis_site/apps/content/views/page_category.py b/pydis_site/apps/content/views/page_category.py index 5af77aff..356eb021 100644 --- a/pydis_site/apps/content/views/page_category.py +++ b/pydis_site/apps/content/views/page_category.py @@ -3,7 +3,7 @@ from pathlib import Path import frontmatter from django.conf import settings -from django.http import Http404 +from django.http import Http404, HttpRequest, HttpResponse from django.views.generic import TemplateView from pydis_site.apps.content import utils @@ -12,7 +12,7 @@ from pydis_site.apps.content import utils class PageOrCategoryView(TemplateView): """Handles pages and page categories.""" - def dispatch(self, request: t.Any, *args, **kwargs) -> t.Any: + def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: """Conform URL path location to the filesystem path.""" self.location = Path(kwargs.get("location", "")) diff --git a/pyproject.toml b/pyproject.toml index 01e7eda2..6ef7c407 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ django-distill = "~=2.9.0" coverage = "~=5.0" flake8 = "~=3.7" flake8-annotations = "~=2.0" -flake8-bandit = "~=2.1" +flake8-bandit = "~=3.0" flake8-bugbear = "~=20.1" flake8-docstrings = "~=1.5" flake8-import-order = "~=0.18" -- cgit v1.2.3 From fe4def75dc0a316789cec068a574713a2b2af92f Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 12 Jul 2022 09:25:05 +0400 Subject: Add GitHub Artifact API View Adds an API route to fetch GitHub build artifacts through a GitHub app. Signed-off-by: Hassan Abouelela --- .gitignore | 3 + poetry.lock | 67 +++++- pydis_site/apps/api/github_utils.py | 183 ++++++++++++++++ pydis_site/apps/api/tests/test_github_utils.py | 287 +++++++++++++++++++++++++ pydis_site/apps/api/urls.py | 9 +- pydis_site/apps/api/views.py | 53 +++++ pydis_site/settings.py | 10 +- pyproject.toml | 1 + 8 files changed, 609 insertions(+), 4 deletions(-) create mode 100644 pydis_site/apps/api/github_utils.py create mode 100644 pydis_site/apps/api/tests/test_github_utils.py (limited to 'pydis_site/apps/api') diff --git a/.gitignore b/.gitignore index 45073da5..4fc4417d 100644 --- a/.gitignore +++ b/.gitignore @@ -132,3 +132,6 @@ log.* # Mac/OSX .DS_Store + +# Private keys +*.pem diff --git a/poetry.lock b/poetry.lock index f6576fba..1bee4397 100644 --- a/poetry.lock +++ b/poetry.lock @@ -67,6 +67,17 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + [[package]] name = "cfgv" version = "3.3.1" @@ -121,6 +132,25 @@ requests = ">=1.0.0" [package.extras] yaml = ["PyYAML (>=3.10)"] +[[package]] +name = "cryptography" +version = "37.0.4" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools_rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] + [[package]] name = "distlib" version = "0.3.4" @@ -607,6 +637,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "pydocstyle" version = "6.1.1" @@ -637,6 +675,23 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "pyjwt" +version = "2.4.0" +description = "JSON Web Token implementation in Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cryptography = {version = ">=3.3.1", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.3.1)"] +dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] + [[package]] name = "python-dotenv" version = "0.17.1" @@ -876,7 +931,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "3.9.*" -content-hash = "e71d10c3d478c5d99e842f4c449a093caa1d4b2d255eb0dfb19843c5265d4aca" +content-hash = "c656c07f40d32ee7d30c19a7084b40e1e851209a362a3fe882aa03c2fd286454" [metadata.files] anyio = [ @@ -896,6 +951,7 @@ certifi = [ {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, ] +cffi = [] cfgv = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, @@ -963,6 +1019,7 @@ coveralls = [ {file = "coveralls-2.2.0-py2.py3-none-any.whl", hash = "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc"}, {file = "coveralls-2.2.0.tar.gz", hash = "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617"}, ] +cryptography = [] distlib = [ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, @@ -1157,6 +1214,10 @@ pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, @@ -1166,6 +1227,10 @@ pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] +pyjwt = [ + {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, + {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, +] python-dotenv = [ {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"}, {file = "python_dotenv-0.17.1-py2.py3-none-any.whl", hash = "sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544"}, diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py new file mode 100644 index 00000000..70dccdff --- /dev/null +++ b/pydis_site/apps/api/github_utils.py @@ -0,0 +1,183 @@ +"""Utilities for working with the GitHub API.""" + +import asyncio +import datetime +import math + +import httpx +import jwt +from asgiref.sync import async_to_sync + +from pydis_site import settings + +MAX_POLLS = 20 +"""The maximum number of attempts at fetching a workflow run.""" + + +class ArtifactProcessingError(Exception): + """Base exception for other errors related to processing a GitHub artifact.""" + + status: int + + +class UnauthorizedError(ArtifactProcessingError): + """The application does not have permission to access the requested repo.""" + + status = 401 + + +class NotFoundError(ArtifactProcessingError): + """The requested resource could not be found.""" + + status = 404 + + +class ActionFailedError(ArtifactProcessingError): + """The requested workflow did not conclude successfully.""" + + status = 400 + + +class RunTimeoutError(ArtifactProcessingError): + """The requested workflow run was not ready in time.""" + + status = 408 + + +def generate_token() -> str: + """ + Generate a JWT token to access the GitHub API. + + The token is valid for roughly 10 minutes after generation, before the API starts + returning 401s. + + Refer to: + https://docs.github.com/en/developers/apps/building-github-apps/authenticating-with-github-apps#authenticating-as-a-github-app + """ + now = datetime.datetime.now() + return jwt.encode( + { + "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at + "exp": math.floor((now + datetime.timedelta(minutes=9)).timestamp()), # Expires at + "iss": settings.GITHUB_OAUTH_APP_ID, + }, + settings.GITHUB_OAUTH_KEY, + algorithm="RS256" + ) + + +async def authorize(owner: str, repo: str) -> httpx.AsyncClient: + """ + Get an access token for the requested repository. + + The process is roughly: + - GET app/installations to get a list of all app installations + - POST to get a token to access the given app + - GET installation/repositories and check if the requested one is part of those + """ + client = httpx.AsyncClient( + base_url=settings.GITHUB_API, + headers={"Authorization": f"bearer {generate_token()}"}, + timeout=settings.TIMEOUT_PERIOD, + ) + + try: + # Get a list of app installations we have access to + apps = await client.get("app/installations") + apps.raise_for_status() + + for app in apps.json(): + # Look for an installation with the right owner + if app["account"]["login"] != owner: + continue + + # Get the repositories of the specified owner + app_token = await client.post(app["access_tokens_url"]) + app_token.raise_for_status() + client.headers["Authorization"] = f"bearer {app_token.json()['token']}" + + repos = await client.get("installation/repositories") + repos.raise_for_status() + + # Search for the request repository + for accessible_repo in repos.json()["repositories"]: + if accessible_repo["name"] == repo: + # We've found the correct repository, and it's accessible with the current auth + return client + + raise NotFoundError( + "Could not find the requested repository. Make sure the application can access it." + ) + + except BaseException as e: + # Close the client if we encountered an unexpected exception + await client.aclose() + raise e + + +async def wait_for_run(client: httpx.AsyncClient, run: dict) -> str: + """Wait for the provided `run` to finish, and return the URL to its artifacts.""" + polls = 0 + while polls <= MAX_POLLS: + if run["status"] != "completed": + # The action is still processing, wait a bit longer + polls += 1 + await asyncio.sleep(10) + + elif run["conclusion"] != "success": + # The action failed, or did not run + raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") + + else: + # The desired action was found, and it ended successfully + return run["artifacts_url"] + + run = await client.get(run["url"]) + run.raise_for_status() + run = run.json() + + raise RunTimeoutError("The requested workflow was not ready in time.") + + +@async_to_sync +async def get_artifact( + owner: str, repo: str, sha: str, action_name: str, artifact_name: str +) -> str: + """Get a download URL for a build artifact.""" + client = await authorize(owner, repo) + + try: + # Get the workflow runs for this repository + runs = await client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100}) + runs.raise_for_status() + runs = runs.json() + + # Filter the runs for the one associated with the given SHA + for run in runs["workflow_runs"]: + if run["name"] == action_name and sha == run["head_sha"]: + break + else: + raise NotFoundError( + "Could not find a run matching the provided settings in the previous hundred runs." + ) + + # Wait for the workflow to finish + url = await wait_for_run(client, run) + + # Filter the artifacts, and return the download URL + artifacts = await client.get(url) + artifacts.raise_for_status() + + for artifact in artifacts.json()["artifacts"]: + if artifact["name"] == artifact_name: + data = await client.get(artifact["archive_download_url"]) + if data.status_code == 302: + return str(data.next_request.url) + + # The following line is left untested since it should in theory be impossible + data.raise_for_status() # pragma: no cover + + raise NotFoundError("Could not find an artifact matching the provided name.") + + finally: + await client.aclose() diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py new file mode 100644 index 00000000..dc17d609 --- /dev/null +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -0,0 +1,287 @@ +import asyncio +import datetime +import random +import unittest +from unittest import mock + +import django.test +import httpx +import jwt +import rest_framework.response +import rest_framework.test +from django.urls import reverse + +from .. import github_utils + + +def patched_raise_for_status(response: httpx.Response): + """Fake implementation of raise_for_status which does not need a request to be set.""" + if response.status_code // 100 != 2: # pragma: no cover + raise httpx.HTTPStatusError( + f"Non 2xx response code: {response.status_code}", + request=getattr(response, "_request", httpx.Request("GET", "")), + response=response + ) + + +class GeneralUtilityTests(unittest.TestCase): + """Test the utility methods which do not fit in another class.""" + + def test_token_generation(self): + """Test that the a valid JWT token is generated.""" + def encode(payload: dict, _: str, algorithm: str, *args, **kwargs) -> str: + """ + Intercept the encode method. + + It is performed with an algorithm which does not require a PEM key, as it may + not be available in testing environments. + """ + self.assertEqual("RS256", algorithm, "The GitHub App JWT must be signed using RS256.") + return original_encode( + payload, "secret-encoding-key", algorithm="HS256", *args, **kwargs + ) + + original_encode = jwt.encode + with mock.patch("jwt.encode", new=encode): + token = github_utils.generate_token() + decoded = jwt.decode(token, "secret-encoding-key", algorithms=["HS256"]) + + delta = datetime.timedelta(minutes=10) + self.assertAlmostEqual(decoded["exp"] - decoded["iat"], delta.total_seconds()) + self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) + + +@mock.patch("httpx.AsyncClient", autospec=True) +@mock.patch("asyncio.sleep", new=mock.AsyncMock(return_value=asyncio.Future)) +@mock.patch("httpx.Response.raise_for_status", new=patched_raise_for_status) +class WaitForTests(unittest.IsolatedAsyncioTestCase): + """Tests the wait_for utility.""" + + async def test_wait_for_successful_run(self, client_mock: mock.Mock): + """Test that the wait_for method handles successfully runs.""" + final_url = "some_url" + str(random.randint(0, 10)) + + client_mock.get.side_effect = responses = [ + httpx.Response(200, json={"status": "queued", "url": ""}), + httpx.Response(200, json={"status": "pending", "url": ""}), + httpx.Response(200, json={ + "status": "completed", + "conclusion": "success", + "url": "", + "artifacts_url": final_url + }) + ] + + result = await github_utils.wait_for_run(client_mock, responses[0].json()) + self.assertEqual(final_url, result) + + async def test_wait_for_failed_run(self, client_mock: mock.Mock): + """Test that the wait_for method handles failed runs.""" + client_mock.get.return_value = httpx.Response(200, json={ + "status": "completed", + "conclusion": "failed", + }) + + with self.assertRaises(github_utils.ActionFailedError): + await github_utils.wait_for_run(client_mock, {"status": "pending", "url": ""}) + + async def test_wait_for_timeout(self, client_mock: mock.Mock): + """Test that the wait_for method quits after a few attempts.""" + client_mock.get.side_effect = responses = [ + httpx.Response(200, json={"status": "pending", "url": ""}) + ] * (github_utils.MAX_POLLS + 5) + + with self.assertRaises(github_utils.RunTimeoutError): + await github_utils.wait_for_run(client_mock, responses[0].json()) + + +async def get_response_authorize( + _: httpx.AsyncClient, request: httpx.Request, **__ +) -> httpx.Response: + """ + Helper method for the authorize tests. + + Requests are intercepted before being sent out, and the appropriate responses are returned. + """ + path = request.url.path + auth = request.headers.get("Authorization") + + if request.method == "GET": + if path == "/app/installations": + if auth == "bearer JWT initial token": + return httpx.Response(200, request=request, json=[{ + "account": {"login": "VALID_OWNER"}, + "access_tokens_url": "https://example.com/ACCESS_TOKEN_URL" + }]) + else: + return httpx.Response( + 401, json={"error": "auth app/installations"}, request=request + ) + + elif path == "/installation/repositories": + if auth == "bearer app access token": + return httpx.Response(200, request=request, json={ + "repositories": [{ + "name": "VALID_REPO" + }] + }) + else: # pragma: no cover + return httpx.Response( + 401, json={"error": "auth installation/repositories"}, request=request + ) + + elif request.method == "POST": + if path == "/ACCESS_TOKEN_URL": + if auth == "bearer JWT initial token": + return httpx.Response(200, request=request, json={"token": "app access token"}) + else: # pragma: no cover + return httpx.Response(401, json={"error": "auth access_token"}, request=request) + + # Reaching this point means something has gone wrong + return httpx.Response(500, request=request) # pragma: no cover + + +@mock.patch("httpx.AsyncClient.send", new=get_response_authorize) +@mock.patch.object(github_utils, "generate_token", new=mock.Mock(return_value="JWT initial token")) +class AuthorizeTests(unittest.IsolatedAsyncioTestCase): + """Test the authorize utility.""" + + async def test_invalid_apps_auth(self): + """Test that an exception is raised if authorization was attempted with an invalid token.""" + with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): + with self.assertRaises(httpx.HTTPStatusError) as error: + await github_utils.authorize("VALID_OWNER", "VALID_REPO") + + exception: httpx.HTTPStatusError = error.exception + self.assertEqual(401, exception.response.status_code) + self.assertEqual("auth app/installations", exception.response.json()["error"]) + + async def test_missing_repo(self): + """Test that an exception is raised when the selected owner or repo are not available.""" + with self.assertRaises(github_utils.NotFoundError): + await github_utils.authorize("INVALID_OWNER", "VALID_REPO") + with self.assertRaises(github_utils.NotFoundError): + await github_utils.authorize("VALID_OWNER", "INVALID_REPO") + + async def test_valid_authorization(self): + """Test that an accessible repository can be accessed.""" + client = await github_utils.authorize("VALID_OWNER", "VALID_REPO") + self.assertEqual("bearer app access token", client.headers.get("Authorization")) + + +async def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response: + """ + Helper method for the get_artifact tests. + + Requests are intercepted before being sent out, and the appropriate responses are returned. + """ + path = request.url.path + + if "force_error" in path: + return httpx.Response(404, request=request) + + if request.method == "GET": + if path == "/repos/owner/repo/actions/runs": + return httpx.Response(200, request=request, json={"workflow_runs": [{ + "name": "action_name", + "head_sha": "action_sha" + }]}) + elif path == "/artifact_url": + return httpx.Response(200, request=request, json={"artifacts": [{ + "name": "artifact_name", + "archive_download_url": "artifact_download_url" + }]}) + elif path == "/artifact_download_url": + response = httpx.Response(302, request=request) + response.next_request = httpx.Request("GET", httpx.URL("https://final_download.url")) + return response + + # Reaching this point means something has gone wrong + return httpx.Response(500, request=request) # pragma: no cover + + +class ArtifactFetcherTests(unittest.IsolatedAsyncioTestCase): + """Test the get_artifact utility.""" + + def setUp(self) -> None: + self.call_args = ["owner", "repo", "action_sha", "action_name", "artifact_name"] + self.client = httpx.AsyncClient(base_url="https://example.com") + + self.patchers = [ + mock.patch.object(self.client, "send", new=get_response_get_artifact), + mock.patch.object(github_utils, "authorize", return_value=self.client), + mock.patch.object(github_utils, "wait_for_run", return_value="artifact_url"), + ] + + for patcher in self.patchers: + patcher.start() + + def tearDown(self) -> None: + for patcher in self.patchers: + patcher.stop() + + def test_client_closed_on_errors(self): + """Test that the client is terminated even if an error occurs at some point.""" + self.call_args[0] = "force_error" + with self.assertRaises(httpx.HTTPStatusError): + github_utils.get_artifact(*self.call_args) + self.assertTrue(self.client.is_closed) + + def test_missing(self): + """Test that an exception is raised if the requested artifact was not found.""" + cases = ( + "invalid sha", + "invalid action name", + "invalid artifact name", + ) + for i, name in enumerate(cases, 2): + with self.subTest(f"Test {name} raises an error"): + new_args = self.call_args.copy() + new_args[i] = name + + with self.assertRaises(github_utils.NotFoundError): + github_utils.get_artifact(*new_args) + + def test_valid(self): + """Test that the correct download URL is returned for valid requests.""" + url = github_utils.get_artifact(*self.call_args) + self.assertEqual("https://final_download.url", url) + self.assertTrue(self.client.is_closed) + + +@mock.patch.object(github_utils, "get_artifact") +class GitHubArtifactViewTests(django.test.TestCase): + """Test the GitHub artifact fetch API view.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + cls.kwargs = { + "owner": "test_owner", + "repo": "test_repo", + "sha": "test_sha", + "action_name": "test_action", + "artifact_name": "test_artifact", + } + cls.url = reverse("api:github-artifacts", kwargs=cls.kwargs) + + async def test_successful(self, artifact_mock: mock.Mock): + """Test a proper response is returned with proper input.""" + artifact_mock.return_value = "final download url" + result = self.client.get(self.url) + + self.assertIsInstance(result, rest_framework.response.Response) + self.assertEqual({"url": artifact_mock.return_value}, result.data) + + async def test_failed_fetch(self, artifact_mock: mock.Mock): + """Test that a proper error is returned when the request fails.""" + artifact_mock.side_effect = github_utils.NotFoundError("Test error message") + result = self.client.get(self.url) + + self.assertIsInstance(result, rest_framework.response.Response) + self.assertEqual({ + "error_type": github_utils.NotFoundError.__name__, + "error": "Test error message", + "requested_resource": "/".join(self.kwargs.values()) + }, result.data) diff --git a/pydis_site/apps/api/urls.py b/pydis_site/apps/api/urls.py index 1e564b29..2757f176 100644 --- a/pydis_site/apps/api/urls.py +++ b/pydis_site/apps/api/urls.py @@ -1,7 +1,7 @@ from django.urls import include, path from rest_framework.routers import DefaultRouter -from .views import HealthcheckView, RulesView +from .views import GitHubArtifactsView, HealthcheckView, RulesView from .viewsets import ( AocAccountLinkViewSet, AocCompletionistBlockViewSet, @@ -86,5 +86,10 @@ urlpatterns = ( # from django_hosts.resolvers import reverse path('bot/', include((bot_router.urls, 'api'), namespace='bot')), path('healthcheck', HealthcheckView.as_view(), name='healthcheck'), - path('rules', RulesView.as_view(), name='rules') + path('rules', RulesView.as_view(), name='rules'), + path( + 'github/artifact/////', + GitHubArtifactsView.as_view(), + name="github-artifacts" + ), ) diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 816463f6..ad2d948e 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -1,7 +1,10 @@ from rest_framework.exceptions import ParseError +from rest_framework.request import Request from rest_framework.response import Response from rest_framework.views import APIView +from . import github_utils + class HealthcheckView(APIView): """ @@ -152,3 +155,53 @@ class RulesView(APIView): "Do not offer or ask for paid work of any kind." ), ]) + + +class GitHubArtifactsView(APIView): + """ + Provides utilities for interacting with the GitHub API and obtaining action artifacts. + + ## Routes + ### GET /github/artifacts + Returns a download URL for the artifact requested. + + { + 'url': 'https://pipelines.actions.githubusercontent.com/...' + } + + ### Exceptions + In case of an error, the following body will be returned: + + { + "error_type": "", + "error": "", + "requested_resource": "///" + } + + ## Authentication + Does not require any authentication nor permissions. + """ + + authentication_classes = () + permission_classes = () + + def get( + self, + request: Request, + *, + owner: str, + repo: str, + sha: str, + action_name: str, + artifact_name: str + ) -> Response: + """Return a download URL for the requested artifact.""" + try: + url = github_utils.get_artifact(owner, repo, sha, action_name, artifact_name) + return Response({"url": url}) + except github_utils.ArtifactProcessingError as e: + return Response({ + "error_type": e.__class__.__name__, + "error": str(e), + "requested_resource": f"{owner}/{repo}/{sha}/{action_name}/{artifact_name}" + }, status=e.status) diff --git a/pydis_site/settings.py b/pydis_site/settings.py index 03c16f4b..f382b052 100644 --- a/pydis_site/settings.py +++ b/pydis_site/settings.py @@ -21,7 +21,6 @@ import environ import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration - env = environ.Env( DEBUG=(bool, False), SITE_DSN=(str, ""), @@ -30,10 +29,19 @@ env = environ.Env( GIT_SHA=(str, 'development'), TIMEOUT_PERIOD=(int, 5), GITHUB_TOKEN=(str, None), + GITHUB_OAUTH_APP_ID=(str, None), + GITHUB_OAUTH_KEY=(str, None), ) GIT_SHA = env("GIT_SHA") +GITHUB_API = "https://api.github.com" GITHUB_TOKEN = env("GITHUB_TOKEN") +GITHUB_OAUTH_APP_ID = env("GITHUB_OAUTH_APP_ID") +GITHUB_OAUTH_KEY = env("GITHUB_OAUTH_KEY") + +if GITHUB_OAUTH_KEY and (oauth_file := Path(GITHUB_OAUTH_KEY)).is_file(): + # Allow the OAuth key to be loaded from a file + GITHUB_OAUTH_KEY = oauth_file.read_text(encoding="utf-8") sentry_sdk.init( dsn=env('SITE_DSN'), diff --git a/pyproject.toml b/pyproject.toml index 467fc8bc..1c24d308 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ markdown = "~=3.3.4" python-frontmatter = "~=1.0" django-prometheus = "~=2.1" django-distill = "~=2.9.0" +PyJWT = {version = "~=2.4.0", extras = ["crypto"]} [tool.poetry.dev-dependencies] coverage = "~=5.0" -- cgit v1.2.3 From 26a3c19b53883015e8ba87db2a668c3eece2ce20 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 12 Jul 2022 14:45:22 +0400 Subject: Make Awaiting Workflow Run A User Responsibility Moves the responsibility of re-requesting a workflow run from the API to the user. This makes the requests much shorter-lived, and allows the client to control how they want to handle sleeping and retrying. This also has the benefit of removing the only real piece of async code, so now the view is completely sync once again. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 79 +++++------ pydis_site/apps/api/tests/test_github_utils.py | 174 ++++++++++++------------- static-builds/netlify_build.py | 9 +- 3 files changed, 131 insertions(+), 131 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 70dccdff..707b36e5 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -1,17 +1,17 @@ """Utilities for working with the GitHub API.""" -import asyncio import datetime import math import httpx import jwt -from asgiref.sync import async_to_sync from pydis_site import settings -MAX_POLLS = 20 -"""The maximum number of attempts at fetching a workflow run.""" +MAX_RUN_TIME = datetime.timedelta(minutes=3) +"""The maximum time allowed before an action is declared timed out.""" +ISO_FORMAT_STRING = "%Y-%m-%dT%H:%M:%SZ" +"""The datetime string format GitHub uses.""" class ArtifactProcessingError(Exception): @@ -44,6 +44,12 @@ class RunTimeoutError(ArtifactProcessingError): status = 408 +class RunPendingError(ArtifactProcessingError): + """The requested workflow run is still pending, try again later.""" + + status = 202 + + def generate_token() -> str: """ Generate a JWT token to access the GitHub API. @@ -66,7 +72,7 @@ def generate_token() -> str: ) -async def authorize(owner: str, repo: str) -> httpx.AsyncClient: +def authorize(owner: str, repo: str) -> httpx.Client: """ Get an access token for the requested repository. @@ -75,7 +81,7 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: - POST to get a token to access the given app - GET installation/repositories and check if the requested one is part of those """ - client = httpx.AsyncClient( + client = httpx.Client( base_url=settings.GITHUB_API, headers={"Authorization": f"bearer {generate_token()}"}, timeout=settings.TIMEOUT_PERIOD, @@ -83,7 +89,7 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: try: # Get a list of app installations we have access to - apps = await client.get("app/installations") + apps = client.get("app/installations") apps.raise_for_status() for app in apps.json(): @@ -92,11 +98,11 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: continue # Get the repositories of the specified owner - app_token = await client.post(app["access_tokens_url"]) + app_token = client.post(app["access_tokens_url"]) app_token.raise_for_status() client.headers["Authorization"] = f"bearer {app_token.json()['token']}" - repos = await client.get("installation/repositories") + repos = client.get("installation/repositories") repos.raise_for_status() # Search for the request repository @@ -111,44 +117,39 @@ async def authorize(owner: str, repo: str) -> httpx.AsyncClient: except BaseException as e: # Close the client if we encountered an unexpected exception - await client.aclose() + client.close() raise e -async def wait_for_run(client: httpx.AsyncClient, run: dict) -> str: - """Wait for the provided `run` to finish, and return the URL to its artifacts.""" - polls = 0 - while polls <= MAX_POLLS: - if run["status"] != "completed": - # The action is still processing, wait a bit longer - polls += 1 - await asyncio.sleep(10) - - elif run["conclusion"] != "success": - # The action failed, or did not run - raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") +def check_run_status(run: dict) -> str: + """Check if the provided run has been completed, otherwise raise an exception.""" + created_at = datetime.datetime.strptime(run["created_at"], ISO_FORMAT_STRING) + run_time = datetime.datetime.now() - created_at + if run["status"] != "completed": + if run_time <= MAX_RUN_TIME: + raise RunPendingError( + f"The requested run is still pending. It was created " + f"{run_time.seconds // 60}:{run_time.seconds % 60 :>02} minutes ago." + ) else: - # The desired action was found, and it ended successfully - return run["artifacts_url"] + raise RunTimeoutError("The requested workflow was not ready in time.") - run = await client.get(run["url"]) - run.raise_for_status() - run = run.json() + if run["conclusion"] != "success": + # The action failed, or did not run + raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") - raise RunTimeoutError("The requested workflow was not ready in time.") + # The requested action is ready + return run["artifacts_url"] -@async_to_sync -async def get_artifact( - owner: str, repo: str, sha: str, action_name: str, artifact_name: str -) -> str: +def get_artifact(owner: str, repo: str, sha: str, action_name: str, artifact_name: str) -> str: """Get a download URL for a build artifact.""" - client = await authorize(owner, repo) + client = authorize(owner, repo) try: # Get the workflow runs for this repository - runs = await client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100}) + runs = client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100}) runs.raise_for_status() runs = runs.json() @@ -161,16 +162,16 @@ async def get_artifact( "Could not find a run matching the provided settings in the previous hundred runs." ) - # Wait for the workflow to finish - url = await wait_for_run(client, run) + # Check the workflow status + url = check_run_status(run) # Filter the artifacts, and return the download URL - artifacts = await client.get(url) + artifacts = client.get(url) artifacts.raise_for_status() for artifact in artifacts.json()["artifacts"]: if artifact["name"] == artifact_name: - data = await client.get(artifact["archive_download_url"]) + data = client.get(artifact["archive_download_url"]) if data.status_code == 302: return str(data.next_request.url) @@ -180,4 +181,4 @@ async def get_artifact( raise NotFoundError("Could not find an artifact matching the provided name.") finally: - await client.aclose() + client.close() diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index dc17d609..78f2979d 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -1,6 +1,4 @@ -import asyncio import datetime -import random import unittest from unittest import mock @@ -14,16 +12,6 @@ from django.urls import reverse from .. import github_utils -def patched_raise_for_status(response: httpx.Response): - """Fake implementation of raise_for_status which does not need a request to be set.""" - if response.status_code // 100 != 2: # pragma: no cover - raise httpx.HTTPStatusError( - f"Non 2xx response code: {response.status_code}", - request=getattr(response, "_request", httpx.Request("GET", "")), - response=response - ) - - class GeneralUtilityTests(unittest.TestCase): """Test the utility methods which do not fit in another class.""" @@ -51,53 +39,50 @@ class GeneralUtilityTests(unittest.TestCase): self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) -@mock.patch("httpx.AsyncClient", autospec=True) -@mock.patch("asyncio.sleep", new=mock.AsyncMock(return_value=asyncio.Future)) -@mock.patch("httpx.Response.raise_for_status", new=patched_raise_for_status) -class WaitForTests(unittest.IsolatedAsyncioTestCase): - """Tests the wait_for utility.""" - - async def test_wait_for_successful_run(self, client_mock: mock.Mock): - """Test that the wait_for method handles successfully runs.""" - final_url = "some_url" + str(random.randint(0, 10)) - - client_mock.get.side_effect = responses = [ - httpx.Response(200, json={"status": "queued", "url": ""}), - httpx.Response(200, json={"status": "pending", "url": ""}), - httpx.Response(200, json={ - "status": "completed", - "conclusion": "success", - "url": "", - "artifacts_url": final_url - }) - ] +class WaitForTests(unittest.TestCase): + """Tests the check_run_status utility.""" - result = await github_utils.wait_for_run(client_mock, responses[0].json()) - self.assertEqual(final_url, result) + def test_completed_run(self): + final_url = "some_url_string_1234" - async def test_wait_for_failed_run(self, client_mock: mock.Mock): - """Test that the wait_for method handles failed runs.""" - client_mock.get.return_value = httpx.Response(200, json={ + result = github_utils.check_run_status({ "status": "completed", - "conclusion": "failed", + "conclusion": "success", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + "artifacts_url": final_url, }) + self.assertEqual(final_url, result) - with self.assertRaises(github_utils.ActionFailedError): - await github_utils.wait_for_run(client_mock, {"status": "pending", "url": ""}) + def test_pending_run(self): + """Test that a pending run raises the proper exception.""" + with self.assertRaises(github_utils.RunPendingError): + github_utils.check_run_status({ + "status": "pending", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + }) - async def test_wait_for_timeout(self, client_mock: mock.Mock): - """Test that the wait_for method quits after a few attempts.""" - client_mock.get.side_effect = responses = [ - httpx.Response(200, json={"status": "pending", "url": ""}) - ] * (github_utils.MAX_POLLS + 5) + def test_timeout_error(self): + """Test that a timeout is declared after a certain duration.""" + # Set the creation time to well before the MAX_RUN_TIME + # to guarantee the right conclusion + created = ( + datetime.datetime.now() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + ).strftime(github_utils.ISO_FORMAT_STRING) with self.assertRaises(github_utils.RunTimeoutError): - await github_utils.wait_for_run(client_mock, responses[0].json()) + github_utils.check_run_status({"status": "pending", "created_at": created}) + + def test_failed_run(self): + """Test that a failed run raises the proper exception.""" + with self.assertRaises(github_utils.ActionFailedError): + github_utils.check_run_status({ + "status": "completed", + "conclusion": "failed", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + }) -async def get_response_authorize( - _: httpx.AsyncClient, request: httpx.Request, **__ -) -> httpx.Response: +def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> httpx.Response: """ Helper method for the authorize tests. @@ -141,76 +126,83 @@ async def get_response_authorize( return httpx.Response(500, request=request) # pragma: no cover -@mock.patch("httpx.AsyncClient.send", new=get_response_authorize) +@mock.patch("httpx.Client.send", new=get_response_authorize) @mock.patch.object(github_utils, "generate_token", new=mock.Mock(return_value="JWT initial token")) -class AuthorizeTests(unittest.IsolatedAsyncioTestCase): +class AuthorizeTests(unittest.TestCase): """Test the authorize utility.""" - async def test_invalid_apps_auth(self): + def test_invalid_apps_auth(self): """Test that an exception is raised if authorization was attempted with an invalid token.""" with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): with self.assertRaises(httpx.HTTPStatusError) as error: - await github_utils.authorize("VALID_OWNER", "VALID_REPO") + github_utils.authorize("VALID_OWNER", "VALID_REPO") exception: httpx.HTTPStatusError = error.exception self.assertEqual(401, exception.response.status_code) self.assertEqual("auth app/installations", exception.response.json()["error"]) - async def test_missing_repo(self): + def test_missing_repo(self): """Test that an exception is raised when the selected owner or repo are not available.""" with self.assertRaises(github_utils.NotFoundError): - await github_utils.authorize("INVALID_OWNER", "VALID_REPO") + github_utils.authorize("INVALID_OWNER", "VALID_REPO") with self.assertRaises(github_utils.NotFoundError): - await github_utils.authorize("VALID_OWNER", "INVALID_REPO") + github_utils.authorize("VALID_OWNER", "INVALID_REPO") - async def test_valid_authorization(self): + def test_valid_authorization(self): """Test that an accessible repository can be accessed.""" - client = await github_utils.authorize("VALID_OWNER", "VALID_REPO") + client = github_utils.authorize("VALID_OWNER", "VALID_REPO") self.assertEqual("bearer app access token", client.headers.get("Authorization")) -async def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response: - """ - Helper method for the get_artifact tests. +class ArtifactFetcherTests(unittest.TestCase): + """Test the get_artifact utility.""" - Requests are intercepted before being sent out, and the appropriate responses are returned. - """ - path = request.url.path + @staticmethod + def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response: + """ + Helper method for the get_artifact tests. - if "force_error" in path: - return httpx.Response(404, request=request) + Requests are intercepted before being sent out, and the appropriate responses are returned. + """ + path = request.url.path - if request.method == "GET": - if path == "/repos/owner/repo/actions/runs": - return httpx.Response(200, request=request, json={"workflow_runs": [{ - "name": "action_name", - "head_sha": "action_sha" - }]}) - elif path == "/artifact_url": - return httpx.Response(200, request=request, json={"artifacts": [{ - "name": "artifact_name", - "archive_download_url": "artifact_download_url" - }]}) - elif path == "/artifact_download_url": - response = httpx.Response(302, request=request) - response.next_request = httpx.Request("GET", httpx.URL("https://final_download.url")) - return response - - # Reaching this point means something has gone wrong - return httpx.Response(500, request=request) # pragma: no cover + if "force_error" in path: + return httpx.Response(404, request=request) + if request.method == "GET": + if path == "/repos/owner/repo/actions/runs": + return httpx.Response( + 200, request=request, json={"workflow_runs": [{ + "name": "action_name", + "head_sha": "action_sha" + }]} + ) + elif path == "/artifact_url": + return httpx.Response( + 200, request=request, json={"artifacts": [{ + "name": "artifact_name", + "archive_download_url": "artifact_download_url" + }]} + ) + elif path == "/artifact_download_url": + response = httpx.Response(302, request=request) + response.next_request = httpx.Request( + "GET", + httpx.URL("https://final_download.url") + ) + return response -class ArtifactFetcherTests(unittest.IsolatedAsyncioTestCase): - """Test the get_artifact utility.""" + # Reaching this point means something has gone wrong + return httpx.Response(500, request=request) # pragma: no cover def setUp(self) -> None: self.call_args = ["owner", "repo", "action_sha", "action_name", "artifact_name"] - self.client = httpx.AsyncClient(base_url="https://example.com") + self.client = httpx.Client(base_url="https://example.com") self.patchers = [ - mock.patch.object(self.client, "send", new=get_response_get_artifact), + mock.patch.object(self.client, "send", new=self.get_response_get_artifact), mock.patch.object(github_utils, "authorize", return_value=self.client), - mock.patch.object(github_utils, "wait_for_run", return_value="artifact_url"), + mock.patch.object(github_utils, "check_run_status", return_value="artifact_url"), ] for patcher in self.patchers: @@ -266,7 +258,7 @@ class GitHubArtifactViewTests(django.test.TestCase): } cls.url = reverse("api:github-artifacts", kwargs=cls.kwargs) - async def test_successful(self, artifact_mock: mock.Mock): + def test_successful(self, artifact_mock: mock.Mock): """Test a proper response is returned with proper input.""" artifact_mock.return_value = "final download url" result = self.client.get(self.url) @@ -274,7 +266,7 @@ class GitHubArtifactViewTests(django.test.TestCase): self.assertIsInstance(result, rest_framework.response.Response) self.assertEqual({"url": artifact_mock.return_value}, result.data) - async def test_failed_fetch(self, artifact_mock: mock.Mock): + def test_failed_fetch(self, artifact_mock: mock.Mock): """Test that a proper error is returned when the request fails.""" artifact_mock.side_effect = github_utils.NotFoundError("Test error message") result = self.client.get(self.url) diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 13cd0279..a473bd91 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -8,6 +8,7 @@ import json import os +import time import zipfile from pathlib import Path from urllib import parse @@ -29,7 +30,7 @@ if __name__ == "__main__": print(f"Fetching download URL from {download_url}") response = httpx.get(download_url, follow_redirects=True) - if response.status_code != 200: + if response.status_code // 100 != 2: try: print(response.json()) except json.JSONDecodeError: @@ -37,6 +38,12 @@ if __name__ == "__main__": response.raise_for_status() + # The workflow is still pending, retry in a bit + while response.status_code == 202: + print(f"{response.json()['error']}. Retrying in 10 seconds.") + time.sleep(10) + response = httpx.get(download_url, follow_redirects=True) + url = response.json()["url"] print(f"Downloading build from {url}") zipped_content = httpx.get(url, follow_redirects=True) -- cgit v1.2.3 From 124c84b9e4f8195485b6ff9b3896cc87e640e02b Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Thu, 14 Jul 2022 05:57:05 +0400 Subject: Clean Up Artifact Tests Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_github_utils.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index 78f2979d..a9eab9a5 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -21,7 +21,7 @@ class GeneralUtilityTests(unittest.TestCase): """ Intercept the encode method. - It is performed with an algorithm which does not require a PEM key, as it may + The result is encoded with an algorithm which does not require a PEM key, as it may not be available in testing environments. """ self.assertEqual("RS256", algorithm, "The GitHub App JWT must be signed using RS256.") @@ -39,10 +39,11 @@ class GeneralUtilityTests(unittest.TestCase): self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) -class WaitForTests(unittest.TestCase): +class CheckRunTests(unittest.TestCase): """Tests the check_run_status utility.""" def test_completed_run(self): + """Test that an already completed run returns the correct URL.""" final_url = "some_url_string_1234" result = github_utils.check_run_status({ @@ -245,20 +246,17 @@ class ArtifactFetcherTests(unittest.TestCase): class GitHubArtifactViewTests(django.test.TestCase): """Test the GitHub artifact fetch API view.""" - @classmethod - def setUpClass(cls): - super().setUpClass() - - cls.kwargs = { + def setUp(self): + self.kwargs = { "owner": "test_owner", "repo": "test_repo", "sha": "test_sha", "action_name": "test_action", "artifact_name": "test_artifact", } - cls.url = reverse("api:github-artifacts", kwargs=cls.kwargs) + self.url = reverse("api:github-artifacts", kwargs=self.kwargs) - def test_successful(self, artifact_mock: mock.Mock): + def test_correct_artifact(self, artifact_mock: mock.Mock): """Test a proper response is returned with proper input.""" artifact_mock.return_value = "final download url" result = self.client.get(self.url) -- cgit v1.2.3 From 37001bca59c1d3d5fc8a8dadffda00d55fc9e0b6 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sun, 24 Jul 2022 08:32:43 +0200 Subject: Use Dataclass For Workflow Run Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 41 ++++++++++++++++----- pydis_site/apps/api/tests/test_github_utils.py | 50 +++++++++++++++----------- 2 files changed, 62 insertions(+), 29 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 707b36e5..c4ace6b7 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -1,7 +1,8 @@ """Utilities for working with the GitHub API.""" - +import dataclasses import datetime import math +import typing import httpx import jwt @@ -50,6 +51,29 @@ class RunPendingError(ArtifactProcessingError): status = 202 +@dataclasses.dataclass(frozen=True) +class WorkflowRun: + """ + A workflow run from the GitHub API. + + https://docs.github.com/en/rest/actions/workflow-runs#get-a-workflow-run + """ + + name: str + head_sha: str + created_at: str + status: str + conclusion: str + artifacts_url: str + + @classmethod + def from_raw(cls, data: dict[str, typing.Any]): + """Create an instance using the raw data from the API, discarding unused fields.""" + return cls(**{ + key.name: data[key.name] for key in dataclasses.fields(cls) + }) + + def generate_token() -> str: """ Generate a JWT token to access the GitHub API. @@ -121,12 +145,12 @@ def authorize(owner: str, repo: str) -> httpx.Client: raise e -def check_run_status(run: dict) -> str: +def check_run_status(run: WorkflowRun) -> str: """Check if the provided run has been completed, otherwise raise an exception.""" - created_at = datetime.datetime.strptime(run["created_at"], ISO_FORMAT_STRING) + created_at = datetime.datetime.strptime(run.created_at, ISO_FORMAT_STRING) run_time = datetime.datetime.now() - created_at - if run["status"] != "completed": + if run.status != "completed": if run_time <= MAX_RUN_TIME: raise RunPendingError( f"The requested run is still pending. It was created " @@ -135,12 +159,12 @@ def check_run_status(run: dict) -> str: else: raise RunTimeoutError("The requested workflow was not ready in time.") - if run["conclusion"] != "success": + if run.conclusion != "success": # The action failed, or did not run - raise ActionFailedError(f"The requested workflow ended with: {run['conclusion']}") + raise ActionFailedError(f"The requested workflow ended with: {run.conclusion}") # The requested action is ready - return run["artifacts_url"] + return run.artifacts_url def get_artifact(owner: str, repo: str, sha: str, action_name: str, artifact_name: str) -> str: @@ -155,7 +179,8 @@ def get_artifact(owner: str, repo: str, sha: str, action_name: str, artifact_nam # Filter the runs for the one associated with the given SHA for run in runs["workflow_runs"]: - if run["name"] == action_name and sha == run["head_sha"]: + run = WorkflowRun.from_raw(run) + if run.name == action_name and sha == run.head_sha: break else: raise NotFoundError( diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index a9eab9a5..f5e072a9 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -1,4 +1,6 @@ +import dataclasses import datetime +import typing import unittest from unittest import mock @@ -42,45 +44,46 @@ class GeneralUtilityTests(unittest.TestCase): class CheckRunTests(unittest.TestCase): """Tests the check_run_status utility.""" + run_kwargs: typing.Mapping = { + "name": "run_name", + "head_sha": "sha", + "status": "completed", + "conclusion": "success", + "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + "artifacts_url": "url", + } + def test_completed_run(self): """Test that an already completed run returns the correct URL.""" final_url = "some_url_string_1234" - result = github_utils.check_run_status({ - "status": "completed", - "conclusion": "success", - "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), - "artifacts_url": final_url, - }) + kwargs = dict(self.run_kwargs, artifacts_url=final_url) + result = github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) self.assertEqual(final_url, result) def test_pending_run(self): """Test that a pending run raises the proper exception.""" + kwargs = dict(self.run_kwargs, status="pending") with self.assertRaises(github_utils.RunPendingError): - github_utils.check_run_status({ - "status": "pending", - "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), - }) + github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) def test_timeout_error(self): """Test that a timeout is declared after a certain duration.""" + kwargs = dict(self.run_kwargs, status="pending") # Set the creation time to well before the MAX_RUN_TIME # to guarantee the right conclusion - created = ( + kwargs["created_at"] = ( datetime.datetime.now() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) ).strftime(github_utils.ISO_FORMAT_STRING) with self.assertRaises(github_utils.RunTimeoutError): - github_utils.check_run_status({"status": "pending", "created_at": created}) + github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) def test_failed_run(self): """Test that a failed run raises the proper exception.""" + kwargs = dict(self.run_kwargs, conclusion="failed") with self.assertRaises(github_utils.ActionFailedError): - github_utils.check_run_status({ - "status": "completed", - "conclusion": "failed", - "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), - }) + github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> httpx.Response: @@ -172,11 +175,16 @@ class ArtifactFetcherTests(unittest.TestCase): if request.method == "GET": if path == "/repos/owner/repo/actions/runs": + run = github_utils.WorkflowRun( + name="action_name", + head_sha="action_sha", + created_at=datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + status="completed", + conclusion="success", + artifacts_url="artifacts_url" + ) return httpx.Response( - 200, request=request, json={"workflow_runs": [{ - "name": "action_name", - "head_sha": "action_sha" - }]} + 200, request=request, json={"workflow_runs": [dataclasses.asdict(run)]} ) elif path == "/artifact_url": return httpx.Response( -- cgit v1.2.3 From f16d3b1b1d14cdf0de1e56ae2bc466152e930f34 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sun, 24 Jul 2022 10:06:47 +0200 Subject: Use UTC Time For GitHub API When reading the created_at time from the GitHub API, it'll be a naive date string with UTC time, so we use that instead of the system's time. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 2 +- pydis_site/apps/api/tests/test_github_utils.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index c4ace6b7..7d26b147 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -148,7 +148,7 @@ def authorize(owner: str, repo: str) -> httpx.Client: def check_run_status(run: WorkflowRun) -> str: """Check if the provided run has been completed, otherwise raise an exception.""" created_at = datetime.datetime.strptime(run.created_at, ISO_FORMAT_STRING) - run_time = datetime.datetime.now() - created_at + run_time = datetime.datetime.utcnow() - created_at if run.status != "completed": if run_time <= MAX_RUN_TIME: diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index f5e072a9..f642f689 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -49,7 +49,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + "created_at": datetime.datetime.utcnow().strftime(github_utils.ISO_FORMAT_STRING), "artifacts_url": "url", } @@ -73,7 +73,7 @@ class CheckRunTests(unittest.TestCase): # Set the creation time to well before the MAX_RUN_TIME # to guarantee the right conclusion kwargs["created_at"] = ( - datetime.datetime.now() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) ).strftime(github_utils.ISO_FORMAT_STRING) with self.assertRaises(github_utils.RunTimeoutError): -- cgit v1.2.3 From dfc32e28103d652170868d09b49ba98ea95c91bf Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Wed, 27 Jul 2022 21:33:13 +0100 Subject: Add a field to track the time an infraction was last applied A default is set for backwards compatibility with bot version that don't explicitly give a value. --- .../api/migrations/0084_infraction_last_applied.py | 19 +++++++++++++++++++ pydis_site/apps/api/models/bot/infraction.py | 6 ++++++ 2 files changed, 25 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0084_infraction_last_applied.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0084_infraction_last_applied.py b/pydis_site/apps/api/migrations/0084_infraction_last_applied.py new file mode 100644 index 00000000..0977fa20 --- /dev/null +++ b/pydis_site/apps/api/migrations/0084_infraction_last_applied.py @@ -0,0 +1,19 @@ +# Generated by Django 4.0.6 on 2022-07-27 20:32 + +from django.db import migrations, models +import django.utils.timezone + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0083_remove_embed_validation'), + ] + + operations = [ + migrations.AddField( + model_name='infraction', + name='last_applied', + field=models.DateTimeField(default=django.utils.timezone.now, help_text='The date and time of when this infraction was last applied.'), + ), + ] diff --git a/pydis_site/apps/api/models/bot/infraction.py b/pydis_site/apps/api/models/bot/infraction.py index c9303024..218ee5ec 100644 --- a/pydis_site/apps/api/models/bot/infraction.py +++ b/pydis_site/apps/api/models/bot/infraction.py @@ -23,6 +23,12 @@ class Infraction(ModelReprMixin, models.Model): default=timezone.now, help_text="The date and time of the creation of this infraction." ) + last_applied = models.DateTimeField( + # This default is for backwards compatibility with bot versions + # that don't explicitly give a value. + default=timezone.now, + help_text="The date and time of when this infraction was last applied." + ) expires_at = models.DateTimeField( null=True, help_text=( -- cgit v1.2.3 From 163201d27fa7505632a36a3d918ebb9321856554 Mon Sep 17 00:00:00 2001 From: Chris Lovering Date: Wed, 27 Jul 2022 22:31:09 +0100 Subject: Backdate last_applied dates to use value of inserted_at --- pydis_site/apps/api/migrations/0084_infraction_last_applied.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0084_infraction_last_applied.py b/pydis_site/apps/api/migrations/0084_infraction_last_applied.py index 0977fa20..7704ddb8 100644 --- a/pydis_site/apps/api/migrations/0084_infraction_last_applied.py +++ b/pydis_site/apps/api/migrations/0084_infraction_last_applied.py @@ -1,7 +1,13 @@ # Generated by Django 4.0.6 on 2022-07-27 20:32 -from django.db import migrations, models import django.utils.timezone +from django.db import migrations, models +from django.apps.registry import Apps + + +def set_last_applied_to_inserted_at(apps: Apps, schema_editor): + Infractions = apps.get_model("api", "infraction") + Infractions.objects.all().update(last_applied=models.F("inserted_at")) class Migration(migrations.Migration): @@ -16,4 +22,5 @@ class Migration(migrations.Migration): name='last_applied', field=models.DateTimeField(default=django.utils.timezone.now, help_text='The date and time of when this infraction was last applied.'), ), + migrations.RunPython(set_last_applied_to_inserted_at) ] -- cgit v1.2.3 From 562b6f0d783583838e51a86086aa441f093de102 Mon Sep 17 00:00:00 2001 From: ionite34 Date: Wed, 3 Aug 2022 17:15:05 -0400 Subject: Added `last_applied` to `serializers` --- pydis_site/apps/api/serializers.py | 1 + 1 file changed, 1 insertion(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index e53ccffa..9228c1f4 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -176,6 +176,7 @@ class InfractionSerializer(ModelSerializer): fields = ( 'id', 'inserted_at', + 'last_applied', 'expires_at', 'active', 'user', -- cgit v1.2.3 From 460ccffe266373febcd1676d609d65f03de5a967 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 12 Aug 2022 16:32:59 +0200 Subject: Rename GitHub App Environment Variables Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 4 ++-- pydis_site/settings.py | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 7d26b147..ad24165d 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -89,9 +89,9 @@ def generate_token() -> str: { "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at "exp": math.floor((now + datetime.timedelta(minutes=9)).timestamp()), # Expires at - "iss": settings.GITHUB_OAUTH_APP_ID, + "iss": settings.GITHUB_APP_ID, }, - settings.GITHUB_OAUTH_KEY, + settings.GITHUB_APP_KEY, algorithm="RS256" ) diff --git a/pydis_site/settings.py b/pydis_site/settings.py index f382b052..bbf1d3aa 100644 --- a/pydis_site/settings.py +++ b/pydis_site/settings.py @@ -29,19 +29,19 @@ env = environ.Env( GIT_SHA=(str, 'development'), TIMEOUT_PERIOD=(int, 5), GITHUB_TOKEN=(str, None), - GITHUB_OAUTH_APP_ID=(str, None), - GITHUB_OAUTH_KEY=(str, None), + GITHUB_APP_ID=(str, None), + GITHUB_APP_KEY=(str, None), ) GIT_SHA = env("GIT_SHA") GITHUB_API = "https://api.github.com" GITHUB_TOKEN = env("GITHUB_TOKEN") -GITHUB_OAUTH_APP_ID = env("GITHUB_OAUTH_APP_ID") -GITHUB_OAUTH_KEY = env("GITHUB_OAUTH_KEY") +GITHUB_APP_ID = env("GITHUB_APP_ID") +GITHUB_APP_KEY = env("GITHUB_APP_KEY") -if GITHUB_OAUTH_KEY and (oauth_file := Path(GITHUB_OAUTH_KEY)).is_file(): +if GITHUB_APP_KEY and (key_file := Path(GITHUB_APP_KEY)).is_file(): # Allow the OAuth key to be loaded from a file - GITHUB_OAUTH_KEY = oauth_file.read_text(encoding="utf-8") + GITHUB_APP_KEY = key_file.read_text(encoding="utf-8") sentry_sdk.init( dsn=env('SITE_DSN'), -- cgit v1.2.3 From 79fee144823ce642a48af038398478144146730c Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 12 Aug 2022 22:54:17 +0200 Subject: Bump Deadline For GitHub Artifacts Route Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index ad24165d..5d7bcdc3 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -9,7 +9,7 @@ import jwt from pydis_site import settings -MAX_RUN_TIME = datetime.timedelta(minutes=3) +MAX_RUN_TIME = datetime.timedelta(minutes=10) """The maximum time allowed before an action is declared timed out.""" ISO_FORMAT_STRING = "%Y-%m-%dT%H:%M:%SZ" """The datetime string format GitHub uses.""" -- cgit v1.2.3 From 25db8f564c0f5c473b165ccab14413ca4471ac7d Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Sun, 14 Aug 2022 07:20:34 +0200 Subject: Explicitly Specify Infraction Time In Tests The infraction tests checked that the route returned infractions in the correct order, which is based on insertion time. This can be fragile however, since the insertion time can be very close (or identical) during the tests. That became especially more likely with PR #741 (commit 149e67b4) which improved database access speed. This is fixed by explicitly specifying the insertion time, and spacing them out properly. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_infractions.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index f1107734..89ee4e23 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -56,15 +56,17 @@ class InfractionTests(AuthenticatedAPITestCase): type='ban', reason='He terk my jerb!', hidden=True, + inserted_at=dt(2020, 10, 10, 0, 0, 0, tzinfo=timezone.utc), expires_at=dt(5018, 11, 20, 15, 52, tzinfo=timezone.utc), - active=True + active=True, ) cls.ban_inactive = Infraction.objects.create( user_id=cls.user.id, actor_id=cls.user.id, type='ban', reason='James is an ass, and we won\'t be working with him again.', - active=False + active=False, + inserted_at=dt(2020, 10, 10, 0, 1, 0, tzinfo=timezone.utc), ) cls.mute_permanent = Infraction.objects.create( user_id=cls.user.id, @@ -72,7 +74,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='mute', reason='He has a filthy mouth and I am his soap.', active=True, - expires_at=None + inserted_at=dt(2020, 10, 10, 0, 2, 0, tzinfo=timezone.utc), + expires_at=None, ) cls.superstar_expires_soon = Infraction.objects.create( user_id=cls.user.id, @@ -80,7 +83,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='superstar', reason='This one doesn\'t matter anymore.', active=True, - expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5) + inserted_at=dt(2020, 10, 10, 0, 3, 0, tzinfo=timezone.utc), + expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5), ) cls.voiceban_expires_later = Infraction.objects.create( user_id=cls.user.id, @@ -88,7 +92,8 @@ class InfractionTests(AuthenticatedAPITestCase): type='voice_ban', reason='Jet engine mic', active=True, - expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5) + inserted_at=dt(2020, 10, 10, 0, 4, 0, tzinfo=timezone.utc), + expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5), ) def test_list_all(self): -- cgit v1.2.3 From 5dfe019745b53ceb8ce37f0db937d6e2a302f6d7 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 16 Aug 2022 18:58:29 +0400 Subject: Move GitHub strptime Format To Settings Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 4 +--- pydis_site/settings.py | 2 ++ 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 5d7bcdc3..e9d7347b 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -11,8 +11,6 @@ from pydis_site import settings MAX_RUN_TIME = datetime.timedelta(minutes=10) """The maximum time allowed before an action is declared timed out.""" -ISO_FORMAT_STRING = "%Y-%m-%dT%H:%M:%SZ" -"""The datetime string format GitHub uses.""" class ArtifactProcessingError(Exception): @@ -147,7 +145,7 @@ def authorize(owner: str, repo: str) -> httpx.Client: def check_run_status(run: WorkflowRun) -> str: """Check if the provided run has been completed, otherwise raise an exception.""" - created_at = datetime.datetime.strptime(run.created_at, ISO_FORMAT_STRING) + created_at = datetime.datetime.strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) run_time = datetime.datetime.utcnow() - created_at if run.status != "completed": diff --git a/pydis_site/settings.py b/pydis_site/settings.py index 315ea737..9fbd0273 100644 --- a/pydis_site/settings.py +++ b/pydis_site/settings.py @@ -38,6 +38,8 @@ GITHUB_API = "https://api.github.com" GITHUB_TOKEN = env("GITHUB_TOKEN") GITHUB_APP_ID = env("GITHUB_APP_ID") GITHUB_APP_KEY = env("GITHUB_APP_KEY") +GITHUB_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%SZ" +"""The datetime string format GitHub uses.""" if GITHUB_APP_KEY and (key_file := Path(GITHUB_APP_KEY)).is_file(): # Allow the OAuth key to be loaded from a file -- cgit v1.2.3 From 92a42694b6ad1a29e5a21e0b3e57639528837113 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Tue, 16 Aug 2022 23:45:25 +0400 Subject: Fix Tests For Tag Metadata Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_github_utils.py | 7 +- pydis_site/apps/content/tests/test_utils.py | 132 +++++++++++++++++++++++-- pydis_site/apps/content/tests/test_views.py | 36 ++++--- pydis_site/apps/content/utils.py | 2 +- 4 files changed, 148 insertions(+), 29 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index f642f689..6e25bc80 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -11,6 +11,7 @@ import rest_framework.response import rest_framework.test from django.urls import reverse +from pydis_site import settings from .. import github_utils @@ -49,7 +50,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.utcnow().strftime(github_utils.ISO_FORMAT_STRING), + "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT), "artifacts_url": "url", } @@ -74,7 +75,7 @@ class CheckRunTests(unittest.TestCase): # to guarantee the right conclusion kwargs["created_at"] = ( datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) - ).strftime(github_utils.ISO_FORMAT_STRING) + ).strftime(settings.GITHUB_TIMESTAMP_FORMAT) with self.assertRaises(github_utils.RunTimeoutError): github_utils.check_run_status(github_utils.WorkflowRun(**kwargs)) @@ -178,7 +179,7 @@ class ArtifactFetcherTests(unittest.TestCase): run = github_utils.WorkflowRun( name="action_name", head_sha="action_sha", - created_at=datetime.datetime.now().strftime(github_utils.ISO_FORMAT_STRING), + created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT), status="completed", conclusion="success", artifacts_url="artifacts_url" diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py index 556f633c..2ef033e4 100644 --- a/pydis_site/apps/content/tests/test_utils.py +++ b/pydis_site/apps/content/tests/test_utils.py @@ -1,3 +1,5 @@ +import datetime +import json import tarfile import tempfile import textwrap @@ -15,6 +17,18 @@ from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) +_time = datetime.datetime(2022, 10, 10, 10, 10, 10, tzinfo=datetime.timezone.utc) +_time_str = _time.strftime(settings.GITHUB_TIMESTAMP_FORMAT) +TEST_COMMIT_KWARGS = { + "sha": "123", + "message": "Hello world\n\nThis is a commit message", + "date": _time, + "author": json.dumps([ + {"name": "Author 1", "email": "mail1@example.com", "date": _time_str}, + {"name": "Author 2", "email": "mail2@example.com", "date": _time_str}, + ]), +} + class GetCategoryTests(MockPagesTestCase): """Tests for the get_category function.""" @@ -109,6 +123,10 @@ class GetPageTests(MockPagesTestCase): class TagUtilsTests(TestCase): """Tests for the tag-related utilities.""" + def setUp(self) -> None: + super().setUp() + self.commit = models.Commit.objects.create(**TEST_COMMIT_KWARGS) + @mock.patch.object(utils, "fetch_tags") def test_static_fetch(self, fetch_mock: mock.Mock): """Test that the static fetch function is only called at most once during static builds.""" @@ -121,9 +139,27 @@ class TagUtilsTests(TestCase): self.assertEqual(tags, result) self.assertEqual(tags, second_result) - @mock.patch("httpx.get") + @mock.patch("httpx.Client.get") def test_mocked_fetch(self, get_mock: mock.Mock): """Test that proper data is returned from fetch, but with a mocked API response.""" + fake_request = httpx.Request("GET", "https://google.com") + + # Metadata requests + returns = [httpx.Response( + request=fake_request, + status_code=200, + json=[ + {"type": "file", "name": "first_tag.md", "sha": "123"}, + {"type": "file", "name": "second_tag.md", "sha": "456"}, + {"type": "dir", "name": "some_group", "sha": "789", "url": "/some_group"}, + ] + ), httpx.Response( + request=fake_request, + status_code=200, + json=[{"type": "file", "name": "grouped_tag.md", "sha": "789123"}] + )] + + # Main content request bodies = ( "This is the first tag!", textwrap.dedent(""" @@ -156,33 +192,36 @@ class TagUtilsTests(TestCase): body = (tar_folder / "temp.tar").read_bytes() - get_mock.return_value = httpx.Response( + returns.append(httpx.Response( status_code=200, content=body, - request=httpx.Request("GET", "https://google.com"), - ) + request=fake_request, + )) + get_mock.side_effect = returns result = utils.fetch_tags() def sort(_tag: models.Tag) -> str: return _tag.name self.assertEqual(sorted([ - models.Tag(name="first_tag", body=bodies[0]), - models.Tag(name="second_tag", body=bodies[1]), - models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name), + models.Tag(name="first_tag", body=bodies[0], sha="123"), + models.Tag(name="second_tag", body=bodies[1], sha="245"), + models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name, sha="789123"), ], key=sort), sorted(result, key=sort)) def test_get_real_tag(self): """Test that a single tag is returned if it exists.""" - tag = models.Tag.objects.create(name="real-tag") + tag = models.Tag.objects.create(name="real-tag", last_commit=self.commit) result = utils.get_tag("real-tag") self.assertEqual(tag, result) def test_get_grouped_tag(self): """Test fetching a tag from a group.""" - tag = models.Tag.objects.create(name="real-tag", group="real-group") + tag = models.Tag.objects.create( + name="real-tag", group="real-group", last_commit=self.commit + ) result = utils.get_tag("real-group/real-tag") self.assertEqual(tag, result) @@ -269,3 +308,78 @@ class TagUtilsTests(TestCase): tag = models.Tag(**options) with self.subTest(tag=tag): self.assertEqual(url, tag.url) + + @mock.patch("httpx.Client.get") + def test_get_tag_commit(self, get_mock: mock.Mock): + """Test the get commit function with a normal tag.""" + tag = models.Tag.objects.create(name="example") + + authors = json.loads(self.commit.author) + + get_mock.return_value = httpx.Response( + request=httpx.Request("GET", "https://google.com"), + status_code=200, + json=[{ + "sha": self.commit.sha, + "commit": { + "message": self.commit.message, + "author": authors[0], + "committer": authors[1], + } + }] + ) + + result = utils.get_tag(tag.name) + self.assertEqual(tag, result) + + get_mock.assert_called_once() + call_params = get_mock.call_args[1]["params"] + + self.assertEqual({"path": "/bot/resources/tags/example.md"}, call_params) + self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit) + + @mock.patch("httpx.Client.get") + def test_get_group_tag_commit(self, get_mock: mock.Mock): + """Test the get commit function with a group tag.""" + tag = models.Tag.objects.create(name="example", group="group-name") + + authors = json.loads(self.commit.author) + authors.pop() + self.commit.author = json.dumps(authors) + self.commit.save() + + get_mock.return_value = httpx.Response( + request=httpx.Request("GET", "https://google.com"), + status_code=200, + json=[{ + "sha": self.commit.sha, + "commit": { + "message": self.commit.message, + "author": authors[0], + "committer": authors[0], + } + }] + ) + + utils.set_tag_commit(tag) + + get_mock.assert_called_once() + call_params = get_mock.call_args[1]["params"] + + self.assertEqual({"path": "/bot/resources/tags/group-name/example.md"}, call_params) + self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit) + + @mock.patch.object(utils, "set_tag_commit") + def test_exiting_commit(self, set_commit_mock: mock.Mock): + """Test that a commit is saved when the data has not changed.""" + tag = models.Tag.objects.create(name="tag-name", body="old body", last_commit=self.commit) + + # This is only applied to the object, not to the database + tag.last_commit = None + + utils.record_tags([tag]) + self.assertEqual(self.commit, tag.last_commit) + + result = utils.get_tag("tag-name") + self.assertEqual(tag, result) + set_commit_mock.assert_not_called() diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py index c5c25be4..658ac2cc 100644 --- a/pydis_site/apps/content/tests/test_views.py +++ b/pydis_site/apps/content/tests/test_views.py @@ -8,10 +8,11 @@ from django.http import Http404 from django.test import RequestFactory, SimpleTestCase, override_settings from django.urls import reverse -from pydis_site.apps.content.models import Tag +from pydis_site.apps.content.models import Commit, Tag from pydis_site.apps.content.tests.helpers import ( BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA ) +from pydis_site.apps.content.tests.test_utils import TEST_COMMIT_KWARGS from pydis_site.apps.content.views import PageOrCategoryView @@ -193,11 +194,12 @@ class TagViewTests(django.test.TestCase): def setUp(self): """Set test helpers, then set up fake filesystem.""" super().setUp() + self.commit = Commit.objects.create(**TEST_COMMIT_KWARGS) def test_routing(self): """Test that the correct template is returned for each route.""" - Tag.objects.create(name="example") - Tag.objects.create(name="grouped-tag", group="group-name") + Tag.objects.create(name="example", last_commit=self.commit) + Tag.objects.create(name="grouped-tag", group="group-name", last_commit=self.commit) cases = [ ("/pages/tags/example/", "content/tag.html"), @@ -213,7 +215,7 @@ class TagViewTests(django.test.TestCase): def test_valid_tag_returns_200(self): """Test that a page is returned for a valid tag.""" - Tag.objects.create(name="example", body="This is the tag body.") + Tag.objects.create(name="example", body="This is the tag body.", last_commit=self.commit) response = self.client.get("/pages/tags/example/") self.assertEqual(200, response.status_code) self.assertIn("This is the tag body", response.content.decode("utf-8")) @@ -233,7 +235,7 @@ class TagViewTests(django.test.TestCase): Tag content here. """) - tag = Tag.objects.create(name="example", body=body) + tag = Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") expected = { "page_title": "example", @@ -256,7 +258,9 @@ class TagViewTests(django.test.TestCase): The only difference between this and a regular tag are the breadcrumbs, so only those are checked. """ - Tag.objects.create(name="example", body="Body text", group="group-name") + Tag.objects.create( + name="example", body="Body text", group="group-name", last_commit=self.commit + ) response = self.client.get("/pages/tags/group-name/example/") self.assertListEqual([ {"name": "Pages", "path": "."}, @@ -266,9 +270,9 @@ class TagViewTests(django.test.TestCase): def test_group_page(self): """Test rendering of a group's root page.""" - Tag.objects.create(name="tag-1", body="Body 1", group="group-name") - Tag.objects.create(name="tag-2", body="Body 2", group="group-name") - Tag.objects.create(name="not-included") + Tag.objects.create(name="tag-1", body="Body 1", group="group-name", last_commit=self.commit) + Tag.objects.create(name="tag-2", body="Body 2", group="group-name", last_commit=self.commit) + Tag.objects.create(name="not-included", last_commit=self.commit) response = self.client.get("/pages/tags/group-name/") content = response.content.decode("utf-8") @@ -298,7 +302,7 @@ class TagViewTests(django.test.TestCase): **This text is in bold** """) - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") content = response.content.decode("utf-8") @@ -317,7 +321,7 @@ class TagViewTests(django.test.TestCase): Tag body. """) - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") content = response.content.decode("utf-8") @@ -333,7 +337,7 @@ class TagViewTests(django.test.TestCase): --- """) - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) response = self.client.get("/pages/tags/example/") self.assertEqual( "Embed title", @@ -345,7 +349,7 @@ class TagViewTests(django.test.TestCase): """Test hyperlinking of tags works as intended.""" filler_before, filler_after = "empty filler text\n\n", "more\nfiller" body = filler_before + "`!tags return`" + filler_after - Tag.objects.create(name="example", body=body) + Tag.objects.create(name="example", body=body, last_commit=self.commit) other_url = reverse("content:tag", kwargs={"location": "return"}) response = self.client.get("/pages/tags/example/") @@ -356,9 +360,9 @@ class TagViewTests(django.test.TestCase): def test_tag_root_page(self): """Test the root tag page which lists all tags.""" - Tag.objects.create(name="tag-1") - Tag.objects.create(name="tag-2") - Tag.objects.create(name="tag-3") + Tag.objects.create(name="tag-1", last_commit=self.commit) + Tag.objects.create(name="tag-2", last_commit=self.commit) + Tag.objects.create(name="tag-3", last_commit=self.commit) response = self.client.get("/pages/tags/") content = response.content.decode("utf-8") diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index e4a24a73..63f1c41c 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -130,7 +130,7 @@ def fetch_tags() -> list[Tag]: def set_tag_commit(tag: Tag) -> None: """Fetch commit information from the API, and save it for the tag.""" - if settings.STATIC_BUILD: + if settings.STATIC_BUILD: # pragma: no cover # Static builds request every page during build, which can ratelimit it. # Instead, we return some fake data. tag.last_commit = Commit( -- cgit v1.2.3 From c0823236d20e801550fccdbb021d8aabb56d59c0 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Thu, 18 Aug 2022 16:58:08 +0100 Subject: add collection of keywords per rule In reference to issue #2108, this commit aims to add an initial set of keywords per rule. These keywords will be later in the "rule" bot command in order to make rule identification easier --- pydis_site/apps/api/views.py | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index ad2d948e..f96d6a8d 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -124,35 +124,44 @@ class RulesView(APIView): return Response([ ( - f"Follow the {pydis_coc}." + f"Follow the {pydis_coc}.", + {"coc", "conduct", "code"} ), ( - f"Follow the {discord_community_guidelines} and {discord_tos}." + f"Follow the {discord_community_guidelines} and {discord_tos}.", + {"guidelines", "discord_tos"} ), ( - "Respect staff members and listen to their instructions." + "Respect staff members and listen to their instructions.", + {"staff", "instructions"} ), ( "Use English to the best of your ability. " - "Be polite if someone speaks English imperfectly." + "Be polite if someone speaks English imperfectly.", + {"english", "language"} ), ( "Do not provide or request help on projects that may break laws, " - "breach terms of services, or are malicious or inappropriate." + "breach terms of services, or are malicious or inappropriate.", + {"infraction", "tos", "breach", "malicious", "inappropriate"} ), ( - "Do not post unapproved advertising." + "Do not post unapproved advertising.", + {"ads", "advertising"} ), ( "Keep discussions relevant to the channel topic. " - "Each channel's description tells you the topic." + "Each channel's description tells you the topic.", + {"off-topic", "topic", "relevance"} ), ( "Do not help with ongoing exams. When helping with homework, " - "help people learn how to do the assignment without doing it for them." + "help people learn how to do the assignment without doing it for them.", + {"exams", "assignment", "assignments", "homework"} ), ( - "Do not offer or ask for paid work of any kind." + "Do not offer or ask for paid work of any kind.", + {"work", "money"} ), ]) -- cgit v1.2.3 From cb40babfb49fb3ec7e0a5c1ba4c68d3cce4df91a Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 19 Aug 2022 00:46:39 +0400 Subject: Fix Django Deprecation Warnings Removes a few features which were deprecated in django 4.0. Running with warnings enabled shows no other errors. USE_L10N was deprecated, and is now enabled by default. In future versions of django, it'll be impossible to turn localization off. Explicitly defining the custom_app_config for the API app is no longer necessary as django can pick it up on its own. Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/__init__.py | 1 - pydis_site/settings.py | 1 - 2 files changed, 2 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/__init__.py b/pydis_site/apps/api/__init__.py index afa5b4d5..e69de29b 100644 --- a/pydis_site/apps/api/__init__.py +++ b/pydis_site/apps/api/__init__.py @@ -1 +0,0 @@ -default_app_config = 'pydis_site.apps.api.apps.ApiConfig' diff --git a/pydis_site/settings.py b/pydis_site/settings.py index bbf1d3aa..e08a2630 100644 --- a/pydis_site/settings.py +++ b/pydis_site/settings.py @@ -200,7 +200,6 @@ AUTH_PASSWORD_VALIDATORS = [ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True -USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) -- cgit v1.2.3 From 4998984727c2474bea7577c89d6fadda864cb538 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 19 Aug 2022 01:10:53 +0400 Subject: Fix Unittest Deprecation Warnings Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_filterlists.py | 4 ++-- pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py | 2 +- pydis_site/apps/api/viewsets/bot/aoc_link.py | 2 +- pydis_site/apps/api/viewsets/bot/infraction.py | 2 +- pydis_site/apps/api/viewsets/bot/nomination.py | 2 +- pydis_site/apps/api/viewsets/bot/reminder.py | 2 +- pydis_site/apps/api/viewsets/bot/user.py | 2 +- pydis_site/apps/content/tests/test_views.py | 2 +- pydis_site/apps/home/tests/test_repodata_helpers.py | 6 +++--- 9 files changed, 12 insertions(+), 12 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_filterlists.py b/pydis_site/apps/api/tests/test_filterlists.py index 5a5bca60..9959617e 100644 --- a/pydis_site/apps/api/tests/test_filterlists.py +++ b/pydis_site/apps/api/tests/test_filterlists.py @@ -64,8 +64,8 @@ class FetchTests(AuthenticatedAPITestCase): self.assertEqual(response.status_code, 200) for api_type, model_type in zip(response.json(), FilterList.FilterListType.choices): - self.assertEquals(api_type[0], model_type[0]) - self.assertEquals(api_type[1], model_type[1]) + self.assertEqual(api_type[0], model_type[0]) + self.assertEqual(api_type[1], model_type[1]) class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py index 3a4cec60..97efb63c 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py @@ -70,4 +70,4 @@ class AocCompletionistBlockViewSet( serializer_class = AocCompletionistBlockSerializer queryset = AocCompletionistBlock.objects.all() filter_backends = (DjangoFilterBackend,) - filter_fields = ("user__id", "is_blocked") + filterset_fields = ("user__id", "is_blocked") diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py index c7a96629..3cdc342d 100644 --- a/pydis_site/apps/api/viewsets/bot/aoc_link.py +++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py @@ -68,4 +68,4 @@ class AocAccountLinkViewSet( serializer_class = AocAccountLinkSerializer queryset = AocAccountLink.objects.all() filter_backends = (DjangoFilterBackend,) - filter_fields = ("user__id", "aoc_username") + filterset_fields = ("user__id", "aoc_username") diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py index 7f31292f..2b89fdb3 100644 --- a/pydis_site/apps/api/viewsets/bot/infraction.py +++ b/pydis_site/apps/api/viewsets/bot/infraction.py @@ -154,7 +154,7 @@ class InfractionViewSet( queryset = Infraction.objects.all() pagination_class = LimitOffsetPaginationExtended filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) - filter_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type') + filterset_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type') search_fields = ('$reason',) frozen_fields = ('id', 'inserted_at', 'type', 'user', 'actor', 'hidden') diff --git a/pydis_site/apps/api/viewsets/bot/nomination.py b/pydis_site/apps/api/viewsets/bot/nomination.py index 144daab0..6af42bcb 100644 --- a/pydis_site/apps/api/viewsets/bot/nomination.py +++ b/pydis_site/apps/api/viewsets/bot/nomination.py @@ -172,7 +172,7 @@ class NominationViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge serializer_class = NominationSerializer queryset = Nomination.objects.all() filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter) - filter_fields = ('user__id', 'active') + filterset_fields = ('user__id', 'active') frozen_fields = ('id', 'inserted_at', 'user', 'ended_at') frozen_on_create = ('ended_at', 'end_reason', 'active', 'inserted_at', 'reviewed') diff --git a/pydis_site/apps/api/viewsets/bot/reminder.py b/pydis_site/apps/api/viewsets/bot/reminder.py index 78d7cb3b..5f997052 100644 --- a/pydis_site/apps/api/viewsets/bot/reminder.py +++ b/pydis_site/apps/api/viewsets/bot/reminder.py @@ -125,4 +125,4 @@ class ReminderViewSet( serializer_class = ReminderSerializer queryset = Reminder.objects.prefetch_related('author') filter_backends = (DjangoFilterBackend, SearchFilter) - filter_fields = ('active', 'author__id') + filterset_fields = ('active', 'author__id') diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index 3318b2b9..ba1bcd9d 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -237,7 +237,7 @@ class UserViewSet(ModelViewSet): queryset = User.objects.all().order_by("id") pagination_class = UserListPagination filter_backends = (DjangoFilterBackend,) - filter_fields = ('name', 'discriminator') + filterset_fields = ('name', 'discriminator') def get_serializer(self, *args, **kwargs) -> ModelSerializer: """Set Serializer many attribute to True if request body contains a list.""" diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py index eadad7e3..a09d22d8 100644 --- a/pydis_site/apps/content/tests/test_views.py +++ b/pydis_site/apps/content/tests/test_views.py @@ -172,7 +172,7 @@ class PageOrCategoryViewTests(MockPagesTestCase, SimpleTestCase, TestCase): for item in context["breadcrumb_items"]: item["path"] = Path(item["path"]) - self.assertEquals( + self.assertEqual( context["breadcrumb_items"], [ {"name": PARSED_CATEGORY_INFO["title"], "path": Path(".")}, diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py index 4007eded..a963f733 100644 --- a/pydis_site/apps/home/tests/test_repodata_helpers.py +++ b/pydis_site/apps/home/tests/test_repodata_helpers.py @@ -42,7 +42,7 @@ class TestRepositoryMetadataHelpers(TestCase): metadata = self.home_view._get_repo_data() self.assertIsInstance(metadata[0], RepositoryMetadata) - self.assertEquals(len(metadata), len(self.home_view.repos)) + self.assertEqual(len(metadata), len(self.home_view.repos)) def test_returns_cached_metadata(self): """Test if the _get_repo_data helper returns cached data when available.""" @@ -82,7 +82,7 @@ class TestRepositoryMetadataHelpers(TestCase): repo = self.home_view.repos[0] self.assertIsInstance(api_data, dict) - self.assertEquals(len(api_data), len(self.home_view.repos)) + self.assertEqual(len(api_data), len(self.home_view.repos)) self.assertIn(repo, api_data.keys()) self.assertIn("stargazers_count", api_data[repo]) @@ -126,7 +126,7 @@ class TestRepositoryMetadataHelpers(TestCase): with self.assertLogs(): metadata = self.home_view._get_repo_data() - self.assertEquals(len(metadata), 0) + self.assertEqual(len(metadata), 0) def test_cleans_up_stale_metadata(self): """Tests that we clean up stale metadata when we start the HomeView.""" -- cgit v1.2.3 From a75ba77bc8b83af97decde6c5ac4317bb3b5253d Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 18 Sep 2022 18:38:02 +0100 Subject: add docstrings explaining the values that the Rules view returns --- pydis_site/apps/api/views.py | 1 + 1 file changed, 1 insertion(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index f96d6a8d..66f4b18c 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -100,6 +100,7 @@ class RulesView(APIView): # `format` here is the result format, we have a link format here instead. def get(self, request, format=None): # noqa: D102,ANN001,ANN201 + """Returns a list of our community rules coupled with their keywords.""" link_format = request.query_params.get('link_format', 'md') if link_format not in ('html', 'md'): raise ParseError( -- cgit v1.2.3 From 672ba65b02fb111235fd3b928d5c84ee8b59cc54 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 18 Sep 2022 18:43:05 +0100 Subject: add suggested keywords --- pydis_site/apps/api/views.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 66f4b18c..836f11ce 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -126,43 +126,43 @@ class RulesView(APIView): return Response([ ( f"Follow the {pydis_coc}.", - {"coc", "conduct", "code"} + ["coc", "conduct", "code"] ), ( f"Follow the {discord_community_guidelines} and {discord_tos}.", - {"guidelines", "discord_tos"} + ["discord", "guidelines", "discord_tos"] ), ( "Respect staff members and listen to their instructions.", - {"staff", "instructions"} + ["respect", "staff", "instructions"] ), ( "Use English to the best of your ability. " "Be polite if someone speaks English imperfectly.", - {"english", "language"} + ["english", "language"] ), ( "Do not provide or request help on projects that may break laws, " "breach terms of services, or are malicious or inappropriate.", - {"infraction", "tos", "breach", "malicious", "inappropriate"} + ["infraction", "tos", "breach", "malicious", "inappropriate"] ), ( "Do not post unapproved advertising.", - {"ads", "advertising"} + ["ad", "ads", "advert", "advertising"] ), ( "Keep discussions relevant to the channel topic. " "Each channel's description tells you the topic.", - {"off-topic", "topic", "relevance"} + ["off-topic", "topic", "relevance"] ), ( "Do not help with ongoing exams. When helping with homework, " "help people learn how to do the assignment without doing it for them.", - {"exams", "assignment", "assignments", "homework"} + ["exam", "exams", "assignment", "assignments", "homework"] ), ( "Do not offer or ask for paid work of any kind.", - {"work", "money"} + ["paid", "work", "money"] ), ]) -- cgit v1.2.3 From f07f03abd53789062045afb027ef2d5ecfd63f11 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 18 Sep 2022 18:44:33 +0100 Subject: make docstring more explicit about the returned content --- pydis_site/apps/api/views.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 836f11ce..f76a78ff 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -100,7 +100,12 @@ class RulesView(APIView): # `format` here is the result format, we have a link format here instead. def get(self, request, format=None): # noqa: D102,ANN001,ANN201 - """Returns a list of our community rules coupled with their keywords.""" + """ + Returns a list of our community rules coupled with their keywords. + + Each item in the returned list is a tuple with the rule as first item + and a list of keywords that match that rules as second item. + """ link_format = request.query_params.get('link_format', 'md') if link_format not in ('html', 'md'): raise ParseError( -- cgit v1.2.3 From 13ae666f0281c7dcfbbe79fdf431f28cd822ec19 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 18 Sep 2022 18:55:18 +0100 Subject: update the RulesView class' docstrings --- pydis_site/apps/api/views.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index f76a78ff..3cb7e8bd 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -37,12 +37,15 @@ class RulesView(APIView): ## Routes ### GET /rules - Returns a JSON array containing the server's rules: + Returns a JSON array containing the server's rules + coupled with a list of keywords that will be used + when searching for particular rules upon using the + bot's `!rule` command >>> [ - ... "Eat candy.", - ... "Wake up at 4 AM.", - ... "Take your medicine." + ... ("Eat candy.", ["candy", "sweet"]), + ... ("Wake up at 4 AM.", ["wake_up", "early", "early_bird"]), + ... ("Take your medicine.", ["medicine", "health"]) ... ] Since some of the the rules require links, this view -- cgit v1.2.3 From 49909a3165344b0fdbd81300cc752327e08f0ff5 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 18 Sep 2022 18:57:16 +0100 Subject: replace sweet with "sweets" to refer more to candy --- pydis_site/apps/api/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 3cb7e8bd..2c5343de 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -43,7 +43,7 @@ class RulesView(APIView): bot's `!rule` command >>> [ - ... ("Eat candy.", ["candy", "sweet"]), + ... ("Eat candy.", ["candy", "sweets"]), ... ("Wake up at 4 AM.", ["wake_up", "early", "early_bird"]), ... ("Take your medicine.", ["medicine", "health"]) ... ] -- cgit v1.2.3 From e6db60717efe799e0900ad6cf60f4962f1aef7a1 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 18 Sep 2022 19:11:36 +0100 Subject: restrict the RulesView docstrings to just what it does --- pydis_site/apps/api/views.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 2c5343de..9e5ad73c 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -38,9 +38,8 @@ class RulesView(APIView): ## Routes ### GET /rules Returns a JSON array containing the server's rules - coupled with a list of keywords that will be used - when searching for particular rules upon using the - bot's `!rule` command + and keywords relating to each rule. + Example response: >>> [ ... ("Eat candy.", ["candy", "sweets"]), -- cgit v1.2.3 From 7fd81a80049763f1dc71815da27d65ce4344a191 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 18 Sep 2022 22:18:50 +0100 Subject: replace parenthesis with square brackets since tuples are serialized are lists --- pydis_site/apps/api/views.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 9e5ad73c..34167a38 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -42,9 +42,9 @@ class RulesView(APIView): Example response: >>> [ - ... ("Eat candy.", ["candy", "sweets"]), - ... ("Wake up at 4 AM.", ["wake_up", "early", "early_bird"]), - ... ("Take your medicine.", ["medicine", "health"]) + ... ["Eat candy.", ["candy", "sweets"]], + ... ["Wake up at 4 AM.", ["wake_up", "early", "early_bird"]], + ... ["Take your medicine.", ["medicine", "health"]] ... ] Since some of the the rules require links, this view -- cgit v1.2.3 From 8c07c20c184371552d6811398ed5208ed2213c9b Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 27 Sep 2022 00:57:09 +0300 Subject: Add voice mute to infraction choices --- pydis_site/apps/api/migrations/0084_new_filter_schema.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0084_new_filter_schema.py b/pydis_site/apps/api/migrations/0084_new_filter_schema.py index 10e83b8b..393f4b9f 100644 --- a/pydis_site/apps/api/migrations/0084_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0084_new_filter_schema.py @@ -105,7 +105,7 @@ class Migration(migrations.Migration): ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), - ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), + ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ("voice_mute", "Voice Mute"), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=10, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", null=True, size=None)), @@ -128,7 +128,7 @@ class Migration(migrations.Migration): ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), - ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=9, null=True)), + ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ("voice_mute", "Voice Mute"), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=10, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 708ceadc..f90eb6e6 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -28,7 +28,7 @@ class FilterSettingsMixin(models.Model): ) infraction_type = models.CharField( choices=Infraction.TYPE_CHOICES, - max_length=9, + max_length=10, null=True, help_text="The infraction to apply to this user." ) -- cgit v1.2.3 From e7e55af80b3853b75b86e3fb347af330f9c1d376 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 27 Sep 2022 00:59:39 +0300 Subject: Use the new models.JSONField --- pydis_site/apps/api/migrations/0084_new_filter_schema.py | 2 +- pydis_site/apps/api/models/bot/filters.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0084_new_filter_schema.py b/pydis_site/apps/api/migrations/0084_new_filter_schema.py index 393f4b9f..96431b4a 100644 --- a/pydis_site/apps/api/migrations/0084_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0084_new_filter_schema.py @@ -96,7 +96,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), - ('additional_field', django.contrib.postgres.fields.jsonb.JSONField(help_text='Implementation specific field.', null=True)), + ('additional_field', models.JSONField(help_text='Implementation specific field.', null=True)), ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, null=True)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index f90eb6e6..33891890 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -1,4 +1,4 @@ -from django.contrib.postgres.fields import ArrayField, JSONField +from django.contrib.postgres.fields import ArrayField from django.db import models from django.db.models import UniqueConstraint @@ -121,7 +121,7 @@ class FilterBase(FilterSettingsMixin): max_length=200, help_text="Why this filter has been added.", null=True ) - additional_field = JSONField(null=True, help_text="Implementation specific field.") + additional_field = models.JSONField(null=True, help_text="Implementation specific field.") filter_list = models.ForeignKey( FilterList, models.CASCADE, related_name="filters", help_text="The filter list containing this filter." -- cgit v1.2.3 From 870238e5ed31ae5dfb0e22fe0bc131f40d855013 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 27 Sep 2022 12:21:32 +0300 Subject: Convert the infraction choices to uppercase This is done to match the values used on the bot. --- pydis_site/apps/api/migrations/0084_new_filter_schema.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0084_new_filter_schema.py b/pydis_site/apps/api/migrations/0084_new_filter_schema.py index 96431b4a..ba228d70 100644 --- a/pydis_site/apps/api/migrations/0084_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0084_new_filter_schema.py @@ -105,7 +105,7 @@ class Migration(migrations.Migration): ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), - ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ("voice_mute", "Voice Mute"), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=10, null=True)), + ('infraction_type', models.CharField(choices=[('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", null=True, size=None)), @@ -128,7 +128,7 @@ class Migration(migrations.Migration): ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), - ('infraction_type', models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('mute', 'Mute'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ("voice_mute", "Voice Mute"), ('voice_ban', 'Voice Ban')], help_text='The infraction to apply to this user.', max_length=10, null=True)), + ('infraction_type', models.CharField(choices=[('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 33891890..1fb9707d 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -27,7 +27,7 @@ class FilterSettingsMixin(models.Model): null=True ) infraction_type = models.CharField( - choices=Infraction.TYPE_CHOICES, + choices=[(choices[0].upper(), choices[1]) for choices in Infraction.TYPE_CHOICES], max_length=10, null=True, help_text="The infraction to apply to this user." -- cgit v1.2.3 From c7255b765a77e7d4363e837560323212d1703638 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 30 Sep 2022 18:38:01 +0400 Subject: Bump To Django 4.1.1 & Update Dependencies Update outdated and broken dependencies. Signed-off-by: Hassan Abouelela --- poetry.lock | 850 ++++++++++----------- .../apps/api/migrations/0013_specialsnake_image.py | 3 +- pydis_site/apps/api/models/bot/message.py | 11 +- pydis_site/apps/api/viewsets/bot/infraction.py | 17 +- pyproject.toml | 23 +- 5 files changed, 441 insertions(+), 463 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/poetry.lock b/poetry.lock index 3a11355a..f27400a9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -28,17 +28,17 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "bandit" @@ -61,7 +61,7 @@ yaml = ["PyYAML"] [[package]] name = "certifi" -version = "2022.6.15" +version = "2022.9.24" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -88,7 +88,7 @@ python-versions = ">=3.6.1" [[package]] name = "charset-normalizer" -version = "2.1.0" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -107,34 +107,18 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "5.5" +version = "6.5.0" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" - -[package.extras] -toml = ["toml"] - -[[package]] -name = "coveralls" -version = "2.2.0" -description = "Show coverage stats online via coveralls.io" -category = "dev" -optional = false -python-versions = ">= 3.5" - -[package.dependencies] -coverage = ">=4.1,<6.0" -docopt = ">=0.6.1" -requests = ">=1.0.0" +python-versions = ">=3.7" [package.extras] -yaml = ["PyYAML (>=3.10)"] +toml = ["tomli"] [[package]] name = "cryptography" -version = "37.0.4" +version = "38.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -144,16 +128,16 @@ python-versions = ">=3.6" cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools_rust (>=0.11.4)"] +sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] name = "distlib" -version = "0.3.4" +version = "0.3.6" description = "Distribution utilities" category = "dev" optional = false @@ -161,14 +145,14 @@ python-versions = "*" [[package]] name = "Django" -version = "4.0.7" +version = "4.1.1" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." category = "main" optional = false python-versions = ">=3.8" [package.dependencies] -asgiref = ">=3.4.1,<4" +asgiref = ">=3.5.2,<4" sqlparse = ">=0.2.2" tzdata = {version = "*", markers = "sys_platform == \"win32\""} @@ -178,7 +162,7 @@ bcrypt = ["bcrypt"] [[package]] name = "django-distill" -version = "2.9.2" +version = "3.0.1" description = "Static site renderer and publisher for Django." category = "main" optional = false @@ -190,22 +174,27 @@ requests = "*" [[package]] name = "django-environ" -version = "0.4.5" -description = "Django-environ allows you to utilize 12factor inspired environment variables to configure your Django application." +version = "0.9.0" +description = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.4,<4" + +[package.extras] +develop = ["coverage[toml] (>=5.0a4)", "furo (>=2021.8.17b43,<2021.9.0)", "pytest (>=4.6.11)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] +docs = ["furo (>=2021.8.17b43,<2021.9.0)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] +testing = ["coverage[toml] (>=5.0a4)", "pytest (>=4.6.11)"] [[package]] name = "django-filter" -version = "21.1" +version = "22.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -Django = ">=2.2" +Django = ">=3.2" [[package]] name = "django-prometheus" @@ -235,64 +224,56 @@ dev = ["PyGithub (>=1.43,<2.0)", "flake8 (>=3.8,<4.0)", "flake8-annotations (>=2 [[package]] name = "djangorestframework" -version = "3.13.1" +version = "3.14.0" description = "Web APIs for Django, made easy." category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -django = ">=2.2" +django = ">=3.0" pytz = "*" -[[package]] -name = "docopt" -version = "0.6.2" -description = "Pythonic argument parser, that will make you smile" -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "filelock" -version = "3.7.1" +version = "3.8.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] -testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" -version = "3.9.2" +version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6.1" [package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" [[package]] name = "flake8-annotations" -version = "2.9.0" +version = "2.9.1" description = "Flake8 Type Annotation Checks" category = "dev" optional = false python-versions = ">=3.7,<4.0" [package.dependencies] -attrs = ">=21.4,<22.0" +attrs = ">=21.4" flake8 = ">=3.7" [[package]] name = "flake8-bandit" -version = "3.0.0" +version = "4.1.1" description = "Automated security testing with bandit and flake8." category = "dev" optional = false @@ -300,13 +281,11 @@ python-versions = ">=3.6" [package.dependencies] bandit = ">=1.7.3" -flake8 = "*" -flake8-polyfill = "*" -pycodestyle = "*" +flake8 = ">=5.0.0" [[package]] name = "flake8-bugbear" -version = "20.11.1" +version = "22.9.23" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -317,7 +296,7 @@ attrs = ">=19.2.0" flake8 = ">=3.0.0" [package.extras] -dev = ["black", "coverage", "hypothesis", "hypothesmith"] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] [[package]] name = "flake8-docstrings" @@ -343,17 +322,6 @@ python-versions = "*" pycodestyle = "*" setuptools = "*" -[[package]] -name = "flake8-polyfill" -version = "1.0.2" -description = "Polyfill package for Flake8 plugins" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -flake8 = "*" - [[package]] name = "flake8-string-format" version = "0.3.0" @@ -399,7 +367,7 @@ python-versions = ">=3.6" smmap = ">=3.0.1,<6" [[package]] -name = "gitpython" +name = "GitPython" version = "3.1.27" description = "GitPython is a python library used to interact with Git repositories" category = "dev" @@ -411,18 +379,18 @@ gitdb = ">=4.0.1,<5" [[package]] name = "gunicorn" -version = "20.0.4" +version = "20.1.0" description = "WSGI HTTP Server for UNIX" category = "main" optional = false -python-versions = ">=3.4" +python-versions = ">=3.5" [package.dependencies] setuptools = ">=3.0" [package.extras] -eventlet = ["eventlet (>=0.9.7)"] -gevent = ["gevent (>=0.13)"] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] setproctitle = ["setproctitle"] tornado = ["tornado (>=0.2)"] @@ -474,7 +442,7 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "identify" -version = "2.5.1" +version = "2.5.5" description = "File identification library for Python" category = "dev" optional = false @@ -485,28 +453,12 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" -[[package]] -name = "importlib-metadata" -version = "4.12.0" -description = "Read metadata from Python packages" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] - [[package]] name = "libsass" version = "0.21.0" @@ -519,26 +471,23 @@ python-versions = "*" six = "*" [[package]] -name = "markdown" -version = "3.3.7" +name = "Markdown" +version = "3.4.1" description = "Python implementation of Markdown." category = "main" optional = false -python-versions = ">=3.6" - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +python-versions = ">=3.7" [package.extras] testing = ["coverage", "pyyaml"] [[package]] name = "mccabe" -version = "0.6.1" +version = "0.7.0" description = "McCabe checker, plugin for flake8" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "mslex" @@ -561,7 +510,7 @@ setuptools = "*" [[package]] name = "pbr" -version = "5.9.0" +version = "5.10.0" description = "Python Build Reasonableness" category = "dev" optional = false @@ -569,11 +518,11 @@ python-versions = ">=2.6" [[package]] name = "pep8-naming" -version = "0.13.0" +version = "0.13.2" description = "Check PEP-8 naming conventions, plugin for flake8" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] flake8 = ">=3.9.1" @@ -619,7 +568,7 @@ twisted = ["twisted"] [[package]] name = "psutil" -version = "5.9.1" +version = "5.9.2" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false @@ -630,19 +579,19 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "psycopg2-binary" -version = "2.8.6" +version = "2.9.3" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +python-versions = ">=3.6" [[package]] name = "pycodestyle" -version = "2.7.0" +version = "2.9.1" description = "Python style guide checker" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [[package]] name = "pycparser" @@ -668,44 +617,45 @@ toml = ["toml"] [[package]] name = "pyfakefs" -version = "4.5.6" +version = "4.7.0" description = "pyfakefs implements a fake file system that mocks the Python file system modules." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "pyflakes" -version = "2.3.1" +version = "2.5.0" description = "passive checker of Python programs" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [[package]] -name = "pyjwt" -version = "2.4.0" +name = "PyJWT" +version = "2.5.0" description = "JSON Web Token implementation in Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] cryptography = {version = ">=3.3.1", optional = true, markers = "extra == \"crypto\""} +types-cryptography = {version = ">=3.3.21", optional = true, markers = "extra == \"crypto\""} [package.extras] -crypto = ["cryptography (>=3.3.1)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +crypto = ["cryptography (>=3.3.1)", "types-cryptography (>=3.3.21)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "types-cryptography (>=3.3.21)", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "python-dotenv" -version = "0.17.1" +version = "0.21.0" description = "Read key-value pairs from a .env file and set them as environment variables" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.extras] cli = ["click (>=5.0)"] @@ -727,19 +677,19 @@ test = ["pyaml", "pytest", "toml"] [[package]] name = "pytz" -version = "2022.1" +version = "2022.2.1" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" [[package]] -name = "pyyaml" -version = "5.4.1" +name = "PyYAML" +version = "6.0" description = "YAML parser and emitter for Python" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.6" [[package]] name = "requests" @@ -775,7 +725,7 @@ idna2008 = ["idna"] [[package]] name = "sentry-sdk" -version = "0.20.3" +version = "1.9.9" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = false @@ -783,7 +733,7 @@ python-versions = "*" [package.dependencies] certifi = "*" -urllib3 = ">=1.10.0" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] @@ -793,12 +743,16 @@ celery = ["celery (>=3)"] chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"] +httpx = ["httpx (>=0.16.0)"] pure_eval = ["asttokens", "executing", "pure-eval"] pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] rq = ["rq (>=0.6)"] sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] tornado = ["tornado (>=5)"] [[package]] @@ -832,11 +786,11 @@ python-versions = ">=3.6" [[package]] name = "sniffio" -version = "1.2.0" +version = "1.3.0" description = "Sniff out which async library your code is running under" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "snowballstemmer" @@ -848,7 +802,7 @@ python-versions = "*" [[package]] name = "sqlparse" -version = "0.4.2" +version = "0.4.3" description = "A non-validating SQL parser." category = "main" optional = false @@ -856,27 +810,28 @@ python-versions = ">=3.5" [[package]] name = "stevedore" -version = "3.5.0" +version = "4.0.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "taskipy" -version = "1.7.0" +version = "1.10.3" description = "tasks runner for python projects" category = "dev" optional = false python-versions = ">=3.6,<4.0" [package.dependencies] -mslex = ">=0.3.0,<0.4.0" +colorama = ">=0.4.4,<0.5.0" +mslex = {version = ">=0.3.0,<0.4.0", markers = "sys_platform == \"win32\""} psutil = ">=5.7.2,<6.0.0" -toml = ">=0.10.0,<0.11.0" +tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version >= \"3.7\" and python_version < \"4.0\""} [[package]] name = "toml" @@ -886,9 +841,25 @@ category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "types-cryptography" +version = "3.3.23" +description = "Typing stubs for cryptography" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "tzdata" -version = "2022.1" +version = "2022.4" description = "Provider of IANA time zone data" category = "main" optional = false @@ -896,7 +867,7 @@ python-versions = ">=2" [[package]] name = "urllib3" -version = "1.26.10" +version = "1.26.12" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -904,54 +875,41 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.15.1" +version = "20.16.5" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6" [package.dependencies] -distlib = ">=0.3.1,<1" -filelock = ">=3.2,<4" -platformdirs = ">=2,<3" -six = ">=1.9.0,<2" +distlib = ">=0.3.5,<1" +filelock = ">=3.4.1,<4" +platformdirs = ">=2.4,<3" [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] +docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"] +testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] [[package]] name = "whitenoise" -version = "5.3.0" +version = "6.2.0" description = "Radically simplified static file serving for WSGI applications" category = "main" optional = false -python-versions = ">=3.5, <4" - -[package.extras] -brotli = ["Brotli"] - -[[package]] -name = "zipp" -version = "3.8.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" -optional = false python-versions = ">=3.7" [package.extras] -docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +brotli = ["Brotli"] [metadata] lock-version = "1.1" -python-versions = "3.9.*" -content-hash = "c656c07f40d32ee7d30c19a7084b40e1e851209a362a3fe882aa03c2fd286454" +python-versions = "3.10.*" +content-hash = "f7f16c0f23efb7dc3772c7cef2a2dd00d761754c6b145e826a26e5c664baf39c" [metadata.files] anyio = [ @@ -963,16 +921,16 @@ asgiref = [ {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, ] attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] bandit = [ {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, ] certifi = [ - {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, - {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, + {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, + {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, ] cffi = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, @@ -1045,113 +1003,111 @@ cfgv = [ {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"}, - {file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"}, + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, ] colorama = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] coverage = [ - {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, - {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, - {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, - {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, - {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, - {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, - {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, - {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, - {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, - {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, - {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, - {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, - {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, - {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, - {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, - {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, - {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, - {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, - {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, - {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, - {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, - {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, - {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, - {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, -] -coveralls = [ - {file = "coveralls-2.2.0-py2.py3-none-any.whl", hash = "sha256:2301a19500b06649d2ec4f2858f9c69638d7699a4c63027c5d53daba666147cc"}, - {file = "coveralls-2.2.0.tar.gz", hash = "sha256:b990ba1f7bc4288e63340be0433698c1efe8217f78c689d254c2540af3d38617"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] cryptography = [ - {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884"}, - {file = "cryptography-37.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8"}, - {file = "cryptography-37.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280"}, - {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3"}, - {file = "cryptography-37.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59"}, - {file = "cryptography-37.0.4-cp36-abi3-win32.whl", hash = "sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157"}, - {file = "cryptography-37.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327"}, - {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b"}, - {file = "cryptography-37.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282"}, - {file = "cryptography-37.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a"}, - {file = "cryptography-37.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab"}, - {file = "cryptography-37.0.4.tar.gz", hash = "sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82"}, + {file = "cryptography-38.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:10d1f29d6292fc95acb597bacefd5b9e812099d75a6469004fd38ba5471a977f"}, + {file = "cryptography-38.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3fc26e22840b77326a764ceb5f02ca2d342305fba08f002a8c1f139540cdfaad"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3b72c360427889b40f36dc214630e688c2fe03e16c162ef0aa41da7ab1455153"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:194044c6b89a2f9f169df475cc167f6157eb9151cc69af8a2a163481d45cc407"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca9f6784ea96b55ff41708b92c3f6aeaebde4c560308e5fbbd3173fbc466e94e"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:16fa61e7481f4b77ef53991075de29fc5bacb582a1244046d2e8b4bb72ef66d0"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d4ef6cc305394ed669d4d9eebf10d3a101059bdcf2669c366ec1d14e4fb227bd"}, + {file = "cryptography-38.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3261725c0ef84e7592597606f6583385fed2a5ec3909f43bc475ade9729a41d6"}, + {file = "cryptography-38.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0297ffc478bdd237f5ca3a7dc96fc0d315670bfa099c04dc3a4a2172008a405a"}, + {file = "cryptography-38.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89ed49784ba88c221756ff4d4755dbc03b3c8d2c5103f6d6b4f83a0fb1e85294"}, + {file = "cryptography-38.0.1-cp36-abi3-win32.whl", hash = "sha256:ac7e48f7e7261207d750fa7e55eac2d45f720027d5703cd9007e9b37bbb59ac0"}, + {file = "cryptography-38.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ad7353f6ddf285aeadfaf79e5a6829110106ff8189391704c1d8801aa0bae45a"}, + {file = "cryptography-38.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:896dd3a66959d3a5ddcfc140a53391f69ff1e8f25d93f0e2e7830c6de90ceb9d"}, + {file = "cryptography-38.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d3971e2749a723e9084dd507584e2a2761f78ad2c638aa31e80bc7a15c9db4f9"}, + {file = "cryptography-38.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:79473cf8a5cbc471979bd9378c9f425384980fcf2ab6534b18ed7d0d9843987d"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9e69ae01f99abe6ad646947bba8941e896cb3aa805be2597a0400e0764b5818"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5067ee7f2bce36b11d0e334abcd1ccf8c541fc0bbdaf57cdd511fdee53e879b6"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3e3a2599e640927089f932295a9a247fc40a5bdf69b0484532f530471a382750"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2e5856248a416767322c8668ef1845ad46ee62629266f84a8f007a317141013"}, + {file = "cryptography-38.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:64760ba5331e3f1794d0bcaabc0d0c39e8c60bf67d09c93dc0e54189dfd7cfe5"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b6c9b706316d7b5a137c35e14f4103e2115b088c412140fdbd5f87c73284df61"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0163a849b6f315bf52815e238bc2b2346604413fa7c1601eea84bcddb5fb9ac"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:d1a5bd52d684e49a36582193e0b89ff267704cd4025abefb9e26803adeb3e5fb"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:765fa194a0f3372d83005ab83ab35d7c5526c4e22951e46059b8ac678b44fa5a"}, + {file = "cryptography-38.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:52e7bee800ec869b4031093875279f1ff2ed12c1e2f74923e8f49c916afd1d3b"}, + {file = "cryptography-38.0.1.tar.gz", hash = "sha256:1db3d807a14931fa317f96435695d9ec386be7b84b618cc61cfa5d08b0ae33d7"}, ] distlib = [ - {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, - {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, ] Django = [ - {file = "Django-4.0.7-py3-none-any.whl", hash = "sha256:41bd65a9e5f8a89cdbfa7a7bba45cd7431ae89e750af82dea8a35fd1a7ecbe66"}, - {file = "Django-4.0.7.tar.gz", hash = "sha256:9c6d5ad36be798e562ddcaa6b17b1c3ff2d3c4f529a47432b69fb9a30f847461"}, + {file = "Django-4.1.1-py3-none-any.whl", hash = "sha256:acb21fac9275f9972d81c7caf5761a89ec3ea25fe74545dd26b8a48cb3a0203e"}, + {file = "Django-4.1.1.tar.gz", hash = "sha256:a153ffd5143bf26a877bfae2f4ec736ebd8924a46600ca089ad96b54a1d4e28e"}, ] django-distill = [ - {file = "django-distill-2.9.2.tar.gz", hash = "sha256:91d5f45c2ff78b8efd4805ff5ec17df4ba815bbf51ca12a2cea65727d2f1d42e"}, + {file = "django-distill-3.0.1.tar.gz", hash = "sha256:8bbac5e45d2afc61cc718d587c6026267c985305f5e599465f2ebc4b0cba9ebf"}, ] django-environ = [ - {file = "django-environ-0.4.5.tar.gz", hash = "sha256:6c9d87660142608f63ec7d5ce5564c49b603ea8ff25da595fd6098f6dc82afde"}, - {file = "django_environ-0.4.5-py2.py3-none-any.whl", hash = "sha256:c57b3c11ec1f319d9474e3e5a79134f40174b17c7cc024bbb2fad84646b120c4"}, + {file = "django-environ-0.9.0.tar.gz", hash = "sha256:bff5381533056328c9ac02f71790bd5bf1cea81b1beeb648f28b81c9e83e0a21"}, + {file = "django_environ-0.9.0-py2.py3-none-any.whl", hash = "sha256:f21a5ef8cc603da1870bbf9a09b7e5577ab5f6da451b843dbcc721a7bca6b3d9"}, ] django-filter = [ - {file = "django-filter-21.1.tar.gz", hash = "sha256:632a251fa8f1aadb4b8cceff932bb52fe2f826dd7dfe7f3eac40e5c463d6836e"}, - {file = "django_filter-21.1-py3-none-any.whl", hash = "sha256:f4a6737a30104c98d2e2a5fb93043f36dd7978e0c7ddc92f5998e85433ea5063"}, + {file = "django-filter-22.1.tar.gz", hash = "sha256:ed473b76e84f7e83b2511bb2050c3efb36d135207d0128dfe3ae4b36e3594ba5"}, + {file = "django_filter-22.1-py3-none-any.whl", hash = "sha256:ed429e34760127e3520a67f415bec4c905d4649fbe45d0d6da37e6ff5e0287eb"}, ] django-prometheus = [ {file = "django-prometheus-2.2.0.tar.gz", hash = "sha256:240378a1307c408bd5fc85614a3a57f1ce633d4a222c9e291e2bbf325173b801"}, @@ -1162,31 +1118,28 @@ django-simple-bulma = [ {file = "django_simple_bulma-2.5.0-py3-none-any.whl", hash = "sha256:c413b031494d80f674068a782440c6ec5f20a12501ee7464d6f781a5777fa89c"}, ] djangorestframework = [ - {file = "djangorestframework-3.13.1-py3-none-any.whl", hash = "sha256:24c4bf58ed7e85d1fe4ba250ab2da926d263cd57d64b03e8dcef0ac683f8b1aa"}, - {file = "djangorestframework-3.13.1.tar.gz", hash = "sha256:0c33407ce23acc68eca2a6e46424b008c9c02eceb8cf18581921d0092bc1f2ee"}, -] -docopt = [ - {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, + {file = "djangorestframework-3.14.0-py3-none-any.whl", hash = "sha256:eb63f58c9f218e1a7d064d17a70751f528ed4e1d35547fdade9aaf4cd103fd08"}, + {file = "djangorestframework-3.14.0.tar.gz", hash = "sha256:579a333e6256b09489cbe0a067e66abe55c6595d8926be6b99423786334350c8"}, ] filelock = [ - {file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"}, - {file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"}, + {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, + {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, ] flake8 = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, ] flake8-annotations = [ - {file = "flake8-annotations-2.9.0.tar.gz", hash = "sha256:63fb3f538970b6a8dfd84125cf5af16f7b22e52d5032acb3b7eb23645ecbda9b"}, - {file = "flake8_annotations-2.9.0-py3-none-any.whl", hash = "sha256:84f46de2964cb18fccea968d9eafce7cf857e34d913d515120795b9af6498d56"}, + {file = "flake8-annotations-2.9.1.tar.gz", hash = "sha256:11f09efb99ae63c8f9d6b492b75fe147fbc323179fddfe00b2e56eefeca42f57"}, + {file = "flake8_annotations-2.9.1-py3-none-any.whl", hash = "sha256:a4385158a7a9fc8af1d8820a2f4c8d03387997006a83f5f8bfe5bc6085bdf88a"}, ] flake8-bandit = [ - {file = "flake8_bandit-3.0.0-py2.py3-none-any.whl", hash = "sha256:61b617f4f7cdaa0e2b1e6bf7b68afb2b619a227bb3e3ae00dd36c213bd17900a"}, - {file = "flake8_bandit-3.0.0.tar.gz", hash = "sha256:54d19427e6a8d50322a7b02e1841c0a7c22d856975f3459803320e0e18e2d6a1"}, + {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, + {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-20.11.1.tar.gz", hash = "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538"}, - {file = "flake8_bugbear-20.11.1-py36.py37.py38-none-any.whl", hash = "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703"}, + {file = "flake8-bugbear-22.9.23.tar.gz", hash = "sha256:17b9623325e6e0dcdcc80ed9e4aa811287fcc81d7e03313b8736ea5733759937"}, + {file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"}, ] flake8-docstrings = [ {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, @@ -1196,10 +1149,6 @@ flake8-import-order = [ {file = "flake8-import-order-0.18.1.tar.gz", hash = "sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92"}, {file = "flake8_import_order-0.18.1-py2.py3-none-any.whl", hash = "sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543"}, ] -flake8-polyfill = [ - {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, - {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, -] flake8-string-format = [ {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, @@ -1215,13 +1164,13 @@ gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] -gitpython = [ +GitPython = [ {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] gunicorn = [ - {file = "gunicorn-20.0.4-py2.py3-none-any.whl", hash = "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"}, - {file = "gunicorn-20.0.4.tar.gz", hash = "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626"}, + {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, ] h11 = [ {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, @@ -1236,16 +1185,12 @@ httpx = [ {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, ] identify = [ - {file = "identify-2.5.1-py2.py3-none-any.whl", hash = "sha256:0dca2ea3e4381c435ef9c33ba100a78a9b40c0bab11189c7cf121f75815efeaa"}, - {file = "identify-2.5.1.tar.gz", hash = "sha256:3d11b16f3fe19f52039fb7e39c9c884b21cb1b586988114fbe42671f03de3e82"}, + {file = "identify-2.5.5-py2.py3-none-any.whl", hash = "sha256:ef78c0d96098a3b5fe7720be4a97e73f439af7cf088ebf47b620aeaa10fadf97"}, + {file = "identify-2.5.5.tar.gz", hash = "sha256:322a5699daecf7c6fd60e68852f36f2ecbb6a36ff6e6e973e0d2bb6fca203ee6"}, ] idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, - {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] libsass = [ {file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"}, @@ -1259,13 +1204,13 @@ libsass = [ {file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"}, {file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"}, ] -markdown = [ - {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, - {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, +Markdown = [ + {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"}, + {file = "Markdown-3.4.1.tar.gz", hash = "sha256:3b809086bb6efad416156e00a0da66fe47618a5d6918dd688f53f40c8e4cfeff"}, ] mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] mslex = [ {file = "mslex-0.3.0-py2.py3-none-any.whl", hash = "sha256:380cb14abf8fabf40e56df5c8b21a6d533dc5cbdcfe42406bbf08dda8f42e42a"}, @@ -1276,12 +1221,12 @@ nodeenv = [ {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, ] pbr = [ - {file = "pbr-5.9.0-py2.py3-none-any.whl", hash = "sha256:e547125940bcc052856ded43be8e101f63828c2d94239ffbe2b327ba3d5ccf0a"}, - {file = "pbr-5.9.0.tar.gz", hash = "sha256:e8dca2f4b43560edef58813969f52a56cef023146cbb8931626db80e6c1c4308"}, + {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, + {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, ] pep8-naming = [ - {file = "pep8-naming-0.13.0.tar.gz", hash = "sha256:9f38e6dcf867a1fb7ad47f5ff72c0ddae544a6cf64eb9f7600b7b3c0bb5980b5"}, - {file = "pep8_naming-0.13.0-py3-none-any.whl", hash = "sha256:069ea20e97f073b3e6d4f789af2a57816f281ca64b86210c7d471117a4b6bfd0"}, + {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, + {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, ] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, @@ -1296,79 +1241,103 @@ prometheus-client = [ {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, ] psutil = [ - {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87"}, - {file = "psutil-5.9.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af"}, - {file = "psutil-5.9.1-cp27-cp27m-win32.whl", hash = "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc"}, - {file = "psutil-5.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2"}, - {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0"}, - {file = "psutil-5.9.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22"}, - {file = "psutil-5.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9"}, - {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8"}, - {file = "psutil-5.9.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de"}, - {file = "psutil-5.9.1-cp310-cp310-win32.whl", hash = "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329"}, - {file = "psutil-5.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021"}, - {file = "psutil-5.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237"}, - {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453"}, - {file = "psutil-5.9.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685"}, - {file = "psutil-5.9.1-cp36-cp36m-win32.whl", hash = "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36"}, - {file = "psutil-5.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d"}, - {file = "psutil-5.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc"}, - {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676"}, - {file = "psutil-5.9.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4"}, - {file = "psutil-5.9.1-cp37-cp37m-win32.whl", hash = "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b"}, - {file = "psutil-5.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680"}, - {file = "psutil-5.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1"}, - {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4"}, - {file = "psutil-5.9.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b"}, - {file = "psutil-5.9.1-cp38-cp38-win32.whl", hash = "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689"}, - {file = "psutil-5.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0"}, - {file = "psutil-5.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81"}, - {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e"}, - {file = "psutil-5.9.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537"}, - {file = "psutil-5.9.1-cp39-cp39-win32.whl", hash = "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574"}, - {file = "psutil-5.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5"}, - {file = "psutil-5.9.1.tar.gz", hash = "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954"}, + {file = "psutil-5.9.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:8f024fbb26c8daf5d70287bb3edfafa22283c255287cf523c5d81721e8e5d82c"}, + {file = "psutil-5.9.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:b2f248ffc346f4f4f0d747ee1947963613216b06688be0be2e393986fe20dbbb"}, + {file = "psutil-5.9.2-cp27-cp27m-win32.whl", hash = "sha256:b1928b9bf478d31fdffdb57101d18f9b70ed4e9b0e41af751851813547b2a9ab"}, + {file = "psutil-5.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:404f4816c16a2fcc4eaa36d7eb49a66df2d083e829d3e39ee8759a411dbc9ecf"}, + {file = "psutil-5.9.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:94e621c6a4ddb2573d4d30cba074f6d1aa0186645917df42c811c473dd22b339"}, + {file = "psutil-5.9.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:256098b4f6ffea6441eb54ab3eb64db9ecef18f6a80d7ba91549195d55420f84"}, + {file = "psutil-5.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:614337922702e9be37a39954d67fdb9e855981624d8011a9927b8f2d3c9625d9"}, + {file = "psutil-5.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39ec06dc6c934fb53df10c1672e299145ce609ff0611b569e75a88f313634969"}, + {file = "psutil-5.9.2-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3ac2c0375ef498e74b9b4ec56df3c88be43fe56cac465627572dbfb21c4be34"}, + {file = "psutil-5.9.2-cp310-cp310-win32.whl", hash = "sha256:e4c4a7636ffc47b7141864f1c5e7d649f42c54e49da2dd3cceb1c5f5d29bfc85"}, + {file = "psutil-5.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:f4cb67215c10d4657e320037109939b1c1d2fd70ca3d76301992f89fe2edb1f1"}, + {file = "psutil-5.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dc9bda7d5ced744622f157cc8d8bdd51735dafcecff807e928ff26bdb0ff097d"}, + {file = "psutil-5.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75291912b945a7351d45df682f9644540d564d62115d4a20d45fa17dc2d48f8"}, + {file = "psutil-5.9.2-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4018d5f9b6651f9896c7a7c2c9f4652e4eea53f10751c4e7d08a9093ab587ec"}, + {file = "psutil-5.9.2-cp36-cp36m-win32.whl", hash = "sha256:f40ba362fefc11d6bea4403f070078d60053ed422255bd838cd86a40674364c9"}, + {file = "psutil-5.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9770c1d25aee91417eba7869139d629d6328a9422ce1cdd112bd56377ca98444"}, + {file = "psutil-5.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42638876b7f5ef43cef8dcf640d3401b27a51ee3fa137cb2aa2e72e188414c32"}, + {file = "psutil-5.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91aa0dac0c64688667b4285fa29354acfb3e834e1fd98b535b9986c883c2ce1d"}, + {file = "psutil-5.9.2-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fb54941aac044a61db9d8eb56fc5bee207db3bc58645d657249030e15ba3727"}, + {file = "psutil-5.9.2-cp37-cp37m-win32.whl", hash = "sha256:7cbb795dcd8ed8fd238bc9e9f64ab188f3f4096d2e811b5a82da53d164b84c3f"}, + {file = "psutil-5.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5d39e3a2d5c40efa977c9a8dd4f679763c43c6c255b1340a56489955dbca767c"}, + {file = "psutil-5.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd331866628d18223a4265371fd255774affd86244fc307ef66eaf00de0633d5"}, + {file = "psutil-5.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b315febaebae813326296872fdb4be92ad3ce10d1d742a6b0c49fb619481ed0b"}, + {file = "psutil-5.9.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7929a516125f62399d6e8e026129c8835f6c5a3aab88c3fff1a05ee8feb840d"}, + {file = "psutil-5.9.2-cp38-cp38-win32.whl", hash = "sha256:561dec454853846d1dd0247b44c2e66a0a0c490f937086930ec4b8f83bf44f06"}, + {file = "psutil-5.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:67b33f27fc0427483b61563a16c90d9f3b547eeb7af0ef1b9fe024cdc9b3a6ea"}, + {file = "psutil-5.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3591616fa07b15050b2f87e1cdefd06a554382e72866fcc0ab2be9d116486c8"}, + {file = "psutil-5.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b29f581b5edab1f133563272a6011925401804d52d603c5c606936b49c8b97"}, + {file = "psutil-5.9.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4642fd93785a29353d6917a23e2ac6177308ef5e8be5cc17008d885cb9f70f12"}, + {file = "psutil-5.9.2-cp39-cp39-win32.whl", hash = "sha256:ed29ea0b9a372c5188cdb2ad39f937900a10fb5478dc077283bf86eeac678ef1"}, + {file = "psutil-5.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:68b35cbff92d1f7103d8f1db77c977e72f49fcefae3d3d2b91c76b0e7aef48b8"}, + {file = "psutil-5.9.2.tar.gz", hash = "sha256:feb861a10b6c3bb00701063b37e4afc754f8217f0f09c42280586bd6ac712b5c"}, ] psycopg2-binary = [ - {file = "psycopg2-binary-2.8.6.tar.gz", hash = "sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d14b140a4439d816e3b1229a4a525df917d6ea22a0771a2a78332273fd9528a4"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1fabed9ea2acc4efe4671b92c669a213db744d2af8a9fc5d69a8e9bc14b7a9db"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f5ab93a2cb2d8338b1674be43b442a7f544a0971da062a5da774ed40587f18f5"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-win32.whl", hash = "sha256:b4afc542c0ac0db720cf516dd20c0846f71c248d2b3d21013aa0d4ef9c71ca25"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-win_amd64.whl", hash = "sha256:e74a55f6bad0e7d3968399deb50f61f4db1926acf4a6d83beaaa7df986f48b1c"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ad20d2eb875aaa1ea6d0f2916949f5c08a19c74d05b16ce6ebf6d24f2c9f75d1"}, - {file = "psycopg2_binary-2.8.6-cp34-cp34m-win32.whl", hash = "sha256:950bc22bb56ee6ff142a2cb9ee980b571dd0912b0334aa3fe0fe3788d860bea2"}, - {file = "psycopg2_binary-2.8.6-cp34-cp34m-win_amd64.whl", hash = "sha256:b8a3715b3c4e604bcc94c90a825cd7f5635417453b253499664f784fc4da0152"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d1b4ab59e02d9008efe10ceabd0b31e79519da6fb67f7d8e8977118832d0f449"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:ac0c682111fbf404525dfc0f18a8b5f11be52657d4f96e9fcb75daf4f3984859"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-win32.whl", hash = "sha256:aaa4213c862f0ef00022751161df35804127b78adf4a2755b9f991a507e425fd"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-win_amd64.whl", hash = "sha256:c2507d796fca339c8fb03216364cca68d87e037c1f774977c8fc377627d01c71"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ee69dad2c7155756ad114c02db06002f4cded41132cc51378e57aad79cc8e4f4"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e82aba2188b9ba309fd8e271702bd0d0fc9148ae3150532bbb474f4590039ffb"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d5227b229005a696cc67676e24c214740efd90b148de5733419ac9aaba3773da"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-win32.whl", hash = "sha256:a0eb43a07386c3f1f1ebb4dc7aafb13f67188eab896e7397aa1ee95a9c884eb2"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:e1f57aa70d3f7cc6947fd88636a481638263ba04a742b4a37dd25c373e41491a"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:833709a5c66ca52f1d21d41865a637223b368c0ee76ea54ca5bad6f2526c7679"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ba28584e6bca48c59eecbf7efb1576ca214b47f05194646b081717fa628dfddf"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6a32f3a4cb2f6e1a0b15215f448e8ce2da192fd4ff35084d80d5e39da683e79b"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-win32.whl", hash = "sha256:0e4dc3d5996760104746e6cfcdb519d9d2cd27c738296525d5867ea695774e67"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:cec7e622ebc545dbb4564e483dd20e4e404da17ae07e06f3e780b2dacd5cee66"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ba381aec3a5dc29634f20692349d73f2d21f17653bda1decf0b52b11d694541f"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a0c50db33c32594305b0ef9abc0cb7db13de7621d2cadf8392a1d9b3c437ef77"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2dac98e85565d5688e8ab7bdea5446674a83a3945a8f416ad0110018d1501b94"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-win32.whl", hash = "sha256:bd1be66dde2b82f80afb9459fc618216753f67109b859a361cf7def5c7968729"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:8cd0fb36c7412996859cb4606a35969dd01f4ea34d9812a141cd920c3b18be77"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:89705f45ce07b2dfa806ee84439ec67c5d9a0ef20154e0e475e2b2ed392a5b83"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:42ec1035841b389e8cc3692277a0bd81cdfe0b65d575a2c8862cec7a80e62e52"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-win32.whl", hash = "sha256:6422f2ff0919fd720195f64ffd8f924c1395d30f9a495f31e2392c2efafb5056"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:15978a1fbd225583dd8cdaf37e67ccc278b5abecb4caf6b2d6b8e2b948e953f6"}, + {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f2534ab7dc7e776a263b463a16e189eb30e85ec9bbe1bff9e78dae802608932"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-win32.whl", hash = "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e6aa71ae45f952a2205377773e76f4e3f27951df38e69a4c95440c779e013560"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-win32.whl", hash = "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b3a24a1982ae56461cc24f6680604fffa2c1b818e9dc55680da038792e004d18"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-win32.whl", hash = "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f"}, ] pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, ] pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, @@ -1379,59 +1348,70 @@ pydocstyle = [ {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, ] pyfakefs = [ - {file = "pyfakefs-4.5.6-py3-none-any.whl", hash = "sha256:6bb4e27457b0bc90e3ebfe5aed4f1b8c32a18713ba44e925f304bb9b9816a03c"}, - {file = "pyfakefs-4.5.6.tar.gz", hash = "sha256:914d7bf994406cfbefee0b4d45918f60c15b406afe93f8194a804da5a450a822"}, + {file = "pyfakefs-4.7.0-py3-none-any.whl", hash = "sha256:29203a7482b25406dd3ea41c8740be2697c6058b0f6577485c3ae9cd4c5e96cd"}, + {file = "pyfakefs-4.7.0.tar.gz", hash = "sha256:f22d30d93d2989bf2d2c67b606a14cbab2df0be912c09dcdb590ea4931073ade"}, ] pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] -pyjwt = [ - {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, - {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, +PyJWT = [ + {file = "PyJWT-2.5.0-py3-none-any.whl", hash = "sha256:8d82e7087868e94dd8d7d418e5088ce64f7daab4b36db654cbaedb46f9d1ca80"}, + {file = "PyJWT-2.5.0.tar.gz", hash = "sha256:e77ab89480905d86998442ac5788f35333fa85f65047a534adc38edf3c88fc3b"}, ] python-dotenv = [ - {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"}, - {file = "python_dotenv-0.17.1-py2.py3-none-any.whl", hash = "sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544"}, + {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, + {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, ] python-frontmatter = [ {file = "python-frontmatter-1.0.0.tar.gz", hash = "sha256:e98152e977225ddafea6f01f40b4b0f1de175766322004c826ca99842d19a7cd"}, {file = "python_frontmatter-1.0.0-py3-none-any.whl", hash = "sha256:766ae75f1b301ffc5fe3494339147e0fd80bc3deff3d7590a93991978b579b08"}, ] pytz = [ - {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, - {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, -] -pyyaml = [ - {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, - {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, - {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, - {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, - {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, - {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, - {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, - {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, - {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, + {file = "pytz-2022.2.1-py2.py3-none-any.whl", hash = "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197"}, + {file = "pytz-2022.2.1.tar.gz", hash = "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5"}, +] +PyYAML = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] requests = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, @@ -1442,8 +1422,8 @@ rfc3986 = [ {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] sentry-sdk = [ - {file = "sentry-sdk-0.20.3.tar.gz", hash = "sha256:4ae8d1ced6c67f1c8ea51d82a16721c166c489b76876c9f2c202b8a50334b237"}, - {file = "sentry_sdk-0.20.3-py2.py3-none-any.whl", hash = "sha256:e75c8c58932bda8cd293ea8e4b242527129e1caaec91433d21b8b2f20fee030b"}, + {file = "sentry-sdk-1.9.9.tar.gz", hash = "sha256:d6c71d2f85710b66822adaa954af7912bab135d6c85febd5b0f3dfd4ab37e181"}, + {file = "sentry_sdk-1.9.9-py2.py3-none-any.whl", hash = "sha256:ef925b5338625448645a778428d8f22a3d17de8b28cc8e6fba60b93393ad86fe"}, ] setuptools = [ {file = "setuptools-65.4.1-py3-none-any.whl", hash = "sha256:1b6bdc6161661409c5f21508763dc63ab20a9ac2f8ba20029aaaa7fdb9118012"}, @@ -1458,46 +1438,50 @@ smmap = [ {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] sniffio = [ - {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, - {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] sqlparse = [ - {file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"}, - {file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"}, + {file = "sqlparse-0.4.3-py3-none-any.whl", hash = "sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34"}, + {file = "sqlparse-0.4.3.tar.gz", hash = "sha256:69ca804846bb114d2ec380e4360a8a340db83f0ccf3afceeb1404df028f57268"}, ] stevedore = [ - {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, - {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, + {file = "stevedore-4.0.0-py3-none-any.whl", hash = "sha256:87e4d27fe96d0d7e4fc24f0cbe3463baae4ec51e81d95fbe60d2474636e0c7d8"}, + {file = "stevedore-4.0.0.tar.gz", hash = "sha256:f82cc99a1ff552310d19c379827c2c64dd9f85a38bcd5559db2470161867b786"}, ] taskipy = [ - {file = "taskipy-1.7.0-py3-none-any.whl", hash = "sha256:9e284c10898e9dee01a3e72220b94b192b1daa0f560271503a6df1da53d03844"}, - {file = "taskipy-1.7.0.tar.gz", hash = "sha256:960e480b1004971e76454ecd1a0484e640744a30073a1069894a311467f85ed8"}, + {file = "taskipy-1.10.3-py3-none-any.whl", hash = "sha256:4c0070ca53868d97989f7ab5c6f237525d52ee184f9b967576e8fe427ed9d0b8"}, + {file = "taskipy-1.10.3.tar.gz", hash = "sha256:112beaf21e3d5569950b99162a1de003fa885fabee9e450757a6b874be914877"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +types-cryptography = [ + {file = "types-cryptography-3.3.23.tar.gz", hash = "sha256:b85c45fd4d3d92e8b18e9a5ee2da84517e8fff658e3ef5755c885b1c2a27c1fe"}, + {file = "types_cryptography-3.3.23-py3-none-any.whl", hash = "sha256:913b3e66a502edbf4bfc3bb45e33ab476040c56942164a7ff37bd1f0ef8ef783"}, +] tzdata = [ - {file = "tzdata-2022.1-py2.py3-none-any.whl", hash = "sha256:238e70234214138ed7b4e8a0fab0e5e13872edab3be586ab8198c407620e2ab9"}, - {file = "tzdata-2022.1.tar.gz", hash = "sha256:8b536a8ec63dc0751342b3984193a3118f8fca2afe25752bb9b7fffd398552d3"}, + {file = "tzdata-2022.4-py2.py3-none-any.whl", hash = "sha256:74da81ecf2b3887c94e53fc1d466d4362aaf8b26fc87cda18f22004544694583"}, + {file = "tzdata-2022.4.tar.gz", hash = "sha256:ada9133fbd561e6ec3d1674d3fba50251636e918aa97bd59d63735bef5a513bb"}, ] urllib3 = [ - {file = "urllib3-1.26.10-py2.py3-none-any.whl", hash = "sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec"}, - {file = "urllib3-1.26.10.tar.gz", hash = "sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6"}, + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] virtualenv = [ - {file = "virtualenv-20.15.1-py2.py3-none-any.whl", hash = "sha256:b30aefac647e86af6d82bfc944c556f8f1a9c90427b2fb4e3bfbf338cb82becf"}, - {file = "virtualenv-20.15.1.tar.gz", hash = "sha256:288171134a2ff3bfb1a2f54f119e77cd1b81c29fc1265a2356f3e8d14c7d58c4"}, + {file = "virtualenv-20.16.5-py3-none-any.whl", hash = "sha256:d07dfc5df5e4e0dbc92862350ad87a36ed505b978f6c39609dc489eadd5b0d27"}, + {file = "virtualenv-20.16.5.tar.gz", hash = "sha256:227ea1b9994fdc5ea31977ba3383ef296d7472ea85be9d6732e42a91c04e80da"}, ] whitenoise = [ - {file = "whitenoise-5.3.0-py2.py3-none-any.whl", hash = "sha256:d963ef25639d1417e8a247be36e6aedd8c7c6f0a08adcb5a89146980a96b577c"}, - {file = "whitenoise-5.3.0.tar.gz", hash = "sha256:d234b871b52271ae7ed6d9da47ffe857c76568f11dd30e28e18c5869dbd11e12"}, -] -zipp = [ - {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, - {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, + {file = "whitenoise-6.2.0-py3-none-any.whl", hash = "sha256:8e9c600a5c18bd17655ef668ad55b5edf6c24ce9bdca5bf607649ca4b1e8e2c2"}, + {file = "whitenoise-6.2.0.tar.gz", hash = "sha256:8fa943c6d4cd9e27673b70c21a07b0aa120873901e099cd46cab40f7cc96d567"}, ] diff --git a/pydis_site/apps/api/migrations/0013_specialsnake_image.py b/pydis_site/apps/api/migrations/0013_specialsnake_image.py index a0d0d318..8ba3432f 100644 --- a/pydis_site/apps/api/migrations/0013_specialsnake_image.py +++ b/pydis_site/apps/api/migrations/0013_specialsnake_image.py @@ -2,7 +2,6 @@ import datetime from django.db import migrations, models -from django.utils.timezone import utc class Migration(migrations.Migration): @@ -15,7 +14,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='specialsnake', name='image', - field=models.URLField(default=datetime.datetime(2018, 10, 23, 11, 51, 23, 703868, tzinfo=utc)), + field=models.URLField(default=datetime.datetime(2018, 10, 23, 11, 51, 23, 703868, tzinfo=datetime.timezone.utc)), preserve_default=False, ), ] diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index bfa54721..89ae27e4 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -1,9 +1,8 @@ -from datetime import datetime +import datetime from django.contrib.postgres import fields as pgfields from django.core.validators import MinValueValidator from django.db import models -from django.utils import timezone from pydis_site.apps.api.models.bot.user import User from pydis_site.apps.api.models.mixins import ModelReprMixin @@ -60,11 +59,11 @@ class Message(ModelReprMixin, models.Model): ) @property - def timestamp(self) -> datetime: + def timestamp(self) -> datetime.datetime: """Attribute that represents the message timestamp as derived from the snowflake id.""" - tz_naive_datetime = datetime.utcfromtimestamp(((self.id >> 22) + 1420070400000) / 1000) - tz_aware_datetime = timezone.make_aware(tz_naive_datetime, timezone=timezone.utc) - return tz_aware_datetime + return datetime.datetime.utcfromtimestamp( + ((self.id >> 22) + 1420070400000) / 1000 + ).replace(tzinfo=datetime.timezone.utc) class Meta: """Metadata provided for Django's ORM.""" diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py index 2b89fdb3..93d29391 100644 --- a/pydis_site/apps/api/viewsets/bot/infraction.py +++ b/pydis_site/apps/api/viewsets/bot/infraction.py @@ -1,9 +1,8 @@ -from datetime import datetime +import datetime from django.db import IntegrityError from django.db.models import QuerySet from django.http.request import HttpRequest -from django.utils import timezone from django_filters.rest_framework import DjangoFilterBackend from rest_framework.decorators import action from rest_framework.exceptions import ValidationError @@ -185,23 +184,21 @@ class InfractionViewSet( filter_expires_after = self.request.query_params.get('expires_after') if filter_expires_after: try: - expires_after_parsed = datetime.fromisoformat(filter_expires_after) + expires_after_parsed = datetime.datetime.fromisoformat(filter_expires_after) except ValueError: raise ValidationError({'expires_after': ['failed to convert to datetime']}) - additional_filters['expires_at__gte'] = timezone.make_aware( - expires_after_parsed, - timezone=timezone.utc, + additional_filters['expires_at__gte'] = expires_after_parsed.replace( + tzinfo=datetime.timezone.utc ) filter_expires_before = self.request.query_params.get('expires_before') if filter_expires_before: try: - expires_before_parsed = datetime.fromisoformat(filter_expires_before) + expires_before_parsed = datetime.datetime.fromisoformat(filter_expires_before) except ValueError: raise ValidationError({'expires_before': ['failed to convert to datetime']}) - additional_filters['expires_at__lte'] = timezone.make_aware( - expires_before_parsed, - timezone=timezone.utc, + additional_filters['expires_at__lte'] = expires_before_parsed.replace( + tzinfo=datetime.timezone.utc ) if 'expires_at__lte' in additional_filters and 'expires_at__gte' in additional_filters: diff --git a/pyproject.toml b/pyproject.toml index 382b31d9..c7f88891 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,40 +7,39 @@ license = "MIT" [tool.poetry.dependencies] python = "3.10.*" -django = "4.0" # 4.1 blocked by upstream bug, wait for 4.1.1: https://code.djangoproject.com/ticket/33919 +django = "4.1.1" django-environ = "0.9.0" django-filter = "22.1" -djangorestframework = "3.13.1" +djangorestframework = "3.14.0" psycopg2-binary = "2.9.3" django-simple-bulma = "2.5.0" whitenoise = "6.2.0" httpx = "0.23.0" pyyaml = "6.0" gunicorn = "20.1.0" -sentry-sdk = "1.9.5" +sentry-sdk = "1.9.9" markdown = "3.4.1" python-frontmatter = "1.0.0" django-prometheus = "2.2.0" django-distill = "3.0.1" -PyJWT = {version = "2.4.0", extras = ["crypto"]} +PyJWT = {version = "2.5.0", extras = ["crypto"]} [tool.poetry.dev-dependencies] -coverage = "6.4.4" +coverage = "6.5.0" flake8 = "5.0.4" flake8-annotations = "2.9.1" -# flake8-bandit is broken at the moment with flake8 v5: https://github.com/tylerwince/flake8-bandit/pull/35 -# flake8-bandit = "3.0.0" -flake8-bugbear = "22.7.1" +flake8-bandit = "4.1.1" +flake8-bugbear = "22.9.23" flake8-docstrings = "1.6.0" flake8-import-order = "0.18.1" flake8-tidy-imports = "4.8.0" flake8-string-format = "0.3.0" flake8-todo = "0.7" -pep8-naming = "0.13.1" +pep8-naming = "0.13.2" pre-commit = "2.20.0" -pyfakefs = "4.6.3" -taskipy = "1.10.2" -python-dotenv = "0.20.0" +pyfakefs = "4.7.0" +taskipy = "1.10.3" +python-dotenv = "0.21.0" [build-system] requires = ["poetry-core>=1.0.0"] -- cgit v1.2.3 From a97e609aba84b2aa30184f1b4b2b422418d8ea6c Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 30 Sep 2022 18:43:46 +0400 Subject: Fix B026 Linting Errors Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/tests/test_github_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index f642f689..2eaf48d9 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -28,7 +28,7 @@ class GeneralUtilityTests(unittest.TestCase): """ self.assertEqual("RS256", algorithm, "The GitHub App JWT must be signed using RS256.") return original_encode( - payload, "secret-encoding-key", algorithm="HS256", *args, **kwargs + payload, "secret-encoding-key", *args, algorithm="HS256", **kwargs ) original_encode = jwt.encode -- cgit v1.2.3 From f3b9c75b56dd2583f2f5e35146dbaf9c5429e769 Mon Sep 17 00:00:00 2001 From: Hassan Abouelela Date: Fri, 30 Sep 2022 22:14:57 +0400 Subject: Increase Request Timeout For GitHub API Signed-off-by: Hassan Abouelela --- pydis_site/apps/api/github_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 5d7bcdc3..986c64e1 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -108,7 +108,7 @@ def authorize(owner: str, repo: str) -> httpx.Client: client = httpx.Client( base_url=settings.GITHUB_API, headers={"Authorization": f"bearer {generate_token()}"}, - timeout=settings.TIMEOUT_PERIOD, + timeout=10, ) try: -- cgit v1.2.3 From 855ce1e018bbb3a489c28768f60300c297890281 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 30 Sep 2022 21:51:25 +0300 Subject: Fix send_alert not being added correctly in serializers --- pydis_site/apps/api/serializers.py | 1 + 1 file changed, 1 insertion(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 0976ed29..50200035 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -154,6 +154,7 @@ REQUIRED_FOR_FILTER_LIST_SETTINGS = ( 'filter_dm', 'dm_pings', 'delete_messages', + 'send_alert', 'bypass_roles', 'enabled', 'enabled_channels', -- cgit v1.2.3 From 1970a3651db1e1a4f2ef92c85a0a733fa23fa6f0 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 1 Oct 2022 19:48:25 +0300 Subject: Bring back enabled categories There needs to be a way to only enable a filter in a specific category, so this setting now fulfills that role. Disabled channels can be used to disable a filter in a specific channel within the category. --- pydis_site/apps/api/migrations/0084_new_filter_schema.py | 4 ++++ .../apps/api/migrations/0085_unique_constraint_filters.py | 1 + pydis_site/apps/api/models/bot/filters.py | 14 +++++++++----- pydis_site/apps/api/serializers.py | 6 ++++-- 4 files changed, 18 insertions(+), 7 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0084_new_filter_schema.py b/pydis_site/apps/api/migrations/0084_new_filter_schema.py index ba228d70..74e1f009 100644 --- a/pydis_site/apps/api/migrations/0084_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0084_new_filter_schema.py @@ -51,6 +51,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: disabled_channels=[], disabled_categories=(["CODE JAM"] if name in ("FILE_FORMAT", "GUILD_INVITE") else []), enabled_channels=[], + enabled_categories=[], send_alert=(name in ('GUILD_INVITE', 'DOMAIN_NAME', 'FILTER_TOKEN')) ) @@ -74,6 +75,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: disabled_channels=None, disabled_categories=None, enabled_channels=None, + enabled_categories=None, send_alert=None, ) new_object.save() @@ -111,6 +113,7 @@ class Migration(migrations.Migration): ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", null=True, size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", null=True, size=None)), ('enabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", null=True, size=None)), + ('enabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="The only categories in which to run the filter.", null=True, size=None)), ('send_alert', models.BooleanField(help_text='Whether an alert should be sent.', null=True)), ], ), @@ -134,6 +137,7 @@ class Migration(migrations.Migration): ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", size=None)), ('enabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", size=None)), + ('enabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="The only categories in which to run the filter.", size=None)), ('send_alert', models.BooleanField(help_text='Whether an alert should be sent.')), ], ), diff --git a/pydis_site/apps/api/migrations/0085_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0085_unique_constraint_filters.py index 418c6e71..55ede901 100644 --- a/pydis_site/apps/api/migrations/0085_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0085_unique_constraint_filters.py @@ -30,6 +30,7 @@ class Migration(migrations.Migration): 'send_alert', 'enabled_channels', 'disabled_channels', + 'enabled_categories', 'disabled_categories' ), name='unique_filters'), ), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 1fb9707d..95a10e42 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -84,11 +84,6 @@ class FilterList(FilterSettingsMixin): help_text="Whether an alert should be sent.", ) # Where a filter should apply. - # - # The resolution is done in the following order: - # - enabled_channels - # - disabled_categories - # - disabled_channels enabled_channels = ArrayField( models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category." @@ -97,6 +92,10 @@ class FilterList(FilterSettingsMixin): models.CharField(max_length=100), help_text="Channels in which to not run the filter." ) + enabled_categories = ArrayField( + models.CharField(max_length=100), + help_text="The only categories in which to run the filter." + ) disabled_categories = ArrayField( models.CharField(max_length=100), help_text="Categories in which to not run the filter." @@ -165,6 +164,11 @@ class FilterBase(FilterSettingsMixin): models.CharField(max_length=100), help_text="Channels in which to not run the filter.", null=True ) + enabled_categories = ArrayField( + models.CharField(max_length=100), + help_text="The only categories in which to run the filter.", + null=True + ) disabled_categories = ArrayField( models.CharField(max_length=100), help_text="Categories in which to not run the filter.", diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 50200035..26bda035 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -159,6 +159,7 @@ REQUIRED_FOR_FILTER_LIST_SETTINGS = ( 'enabled', 'enabled_channels', 'disabled_channels', + 'enabled_categories', 'disabled_categories', ) @@ -183,6 +184,7 @@ CHANNEL_SCOPE_FIELDS = ( "disabled_channels", "disabled_categories", "enabled_channels", + "enabled_categories" ) MENTIONS_FIELDS = ("guild_pings", "dm_pings") @@ -208,9 +210,9 @@ class FilterSerializer(ModelSerializer): raise ValidationError("Enabled and Disabled channels lists contain duplicates.") if data.get('disabled_categories') is not None: - categories_collection = data['disabled_categories'] + categories_collection = data['disabled_categories'] + data['enabled_categories'] if len(categories_collection) != len(set(categories_collection)): - raise ValidationError("Disabled categories lists contain duplicates.") + raise ValidationError("Enabled and Disabled categories lists contain duplicates.") return data -- cgit v1.2.3 From 862d00162309f4c061508545a377309bbd1871eb Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 7 Oct 2022 16:51:49 +0300 Subject: Properly add dm_embed to serializers --- pydis_site/apps/api/serializers.py | 1 + 1 file changed, 1 insertion(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 7c1c107a..0dcbf2ee 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -144,6 +144,7 @@ class DocumentationLinkSerializer(ModelSerializer): ALWAYS_OPTIONAL_SETTINGS = ( 'dm_content', + 'dm_embed', 'infraction_type', 'infraction_reason', 'infraction_duration', -- cgit v1.2.3 From e5d655a81f71c4b5bfb15d567bc11f88023e5879 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 8 Oct 2022 01:18:59 +0300 Subject: Add infraction channel setting --- pydis_site/apps/api/migrations/0085_new_filter_schema.py | 5 +++++ .../apps/api/migrations/0086_unique_constraint_filters.py | 1 + pydis_site/apps/api/models/bot/filters.py | 11 +++++++++++ pydis_site/apps/api/serializers.py | 6 +++++- 4 files changed, 22 insertions(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0085_new_filter_schema.py b/pydis_site/apps/api/migrations/0085_new_filter_schema.py index d16c26ac..2e721df4 100644 --- a/pydis_site/apps/api/migrations/0085_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0085_new_filter_schema.py @@ -3,6 +3,7 @@ from datetime import timedelta import django.contrib.postgres.fields from django.apps.registry import Apps +from django.core.validators import MinValueValidator from django.db import migrations, models import django.db.models.deletion from django.db.backends.base.schema import BaseDatabaseSchemaEditor @@ -48,6 +49,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: infraction_type="", infraction_reason="", infraction_duration=timedelta(seconds=0), + infraction_channel=None, disabled_channels=[], disabled_categories=(["CODE JAM"] if name in ("FILE_FORMAT", "GUILD_INVITE") else []), enabled_channels=[], @@ -72,6 +74,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: infraction_type=None, infraction_reason=None, infraction_duration=None, + infraction_channel=None, disabled_channels=None, disabled_categories=None, enabled_channels=None, @@ -110,6 +113,7 @@ class Migration(migrations.Migration): ('infraction_type', models.CharField(choices=[('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), + ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.", null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", null=True, size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", null=True, size=None)), ('enabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", null=True, size=None)), @@ -134,6 +138,7 @@ class Migration(migrations.Migration): ('infraction_type', models.CharField(choices=[('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), + ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.", null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", size=None)), ('enabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", size=None)), diff --git a/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py index 8072ed2e..e7816e19 100644 --- a/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py @@ -18,6 +18,7 @@ class Migration(migrations.Migration): 'infraction_type', 'infraction_reason', 'infraction_duration', + 'infraction_channel', 'content', 'additional_field', 'filter_list', diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 95a10e42..22482870 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -1,4 +1,5 @@ from django.contrib.postgres.fields import ArrayField +from django.core.validators import MinValueValidator from django.db import models from django.db.models import UniqueConstraint @@ -41,6 +42,16 @@ class FilterSettingsMixin(models.Model): null=True, help_text="The duration of the infraction. Null if permanent." ) + infraction_channel = models.BigIntegerField( + validators=( + MinValueValidator( + limit_value=0, + message="Channel IDs cannot be negative." + ), + ), + help_text="Channel in which to send the infraction.", + null=True + ) class Meta: """Metaclass for settings mixin.""" diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 0dcbf2ee..83471ca2 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -148,6 +148,7 @@ ALWAYS_OPTIONAL_SETTINGS = ( 'infraction_type', 'infraction_reason', 'infraction_duration', + 'infraction_channel', ) REQUIRED_FOR_FILTER_LIST_SETTINGS = ( @@ -178,6 +179,7 @@ INFRACTION_AND_NOTIFICATION_FIELDS = ( "infraction_type", "infraction_reason", "infraction_duration", + "infraction_channel", "dm_content", "dm_embed" ) @@ -230,6 +232,7 @@ class FilterSerializer(ModelSerializer): 'infraction_reason': {'allow_blank': True, 'allow_null': True, 'required': False}, 'enabled_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, 'disabled_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, + 'enabled_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, 'disabled_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, } @@ -305,6 +308,7 @@ class FilterListSerializer(ModelSerializer): 'infraction_reason': {'allow_blank': True, 'allow_null': True, 'required': False}, 'enabled_channels': {'allow_empty': True}, 'disabled_channels': {'allow_empty': True}, + 'enabled_categories': {'allow_empty': True}, 'disabled_categories': {'allow_empty': True}, } @@ -314,7 +318,7 @@ class FilterListSerializer(ModelSerializer): queryset=FilterList.objects.all(), fields=('name', 'list_type'), message=( - "A filterlist with the same name and type already exist." + "A filterlist with the same name and type already exists." ) ), ] -- cgit v1.2.3 From e23fcc3f1d8575243bb4acee3b8747d05e21ef22 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 8 Oct 2022 12:02:56 +0300 Subject: Fix categories validation --- pydis_site/apps/api/serializers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 83471ca2..13cd7fea 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -212,7 +212,10 @@ class FilterSerializer(ModelSerializer): if len(channels_collection) != len(set(channels_collection)): raise ValidationError("Enabled and Disabled channels lists contain duplicates.") - if data.get('disabled_categories') is not None: + if ( + data.get('disabled_categories') is not None + and data.get('enabled_categories') is not None + ): categories_collection = data['disabled_categories'] + data['enabled_categories'] if len(categories_collection) != len(set(categories_collection)): raise ValidationError("Enabled and Disabled categories lists contain duplicates.") -- cgit v1.2.3 From 88a2be8ec1dc0bb85d3ac50f3f24b70a8ce12b3e Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 8 Oct 2022 12:07:38 +0300 Subject: Allow ping arrays to be empty --- pydis_site/apps/api/serializers.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 13cd7fea..7a5e76b7 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -237,6 +237,8 @@ class FilterSerializer(ModelSerializer): 'disabled_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, 'enabled_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, 'disabled_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, + 'guild_pings': {'allow_empty': True, 'allow_null': True, 'required': False}, + 'dm_pings': {'allow_empty': True, 'allow_null': True, 'required': False}, } def to_representation(self, instance: Filter) -> dict: @@ -313,6 +315,8 @@ class FilterListSerializer(ModelSerializer): 'disabled_channels': {'allow_empty': True}, 'enabled_categories': {'allow_empty': True}, 'disabled_categories': {'allow_empty': True}, + 'guild_pings': {'allow_empty': True}, + 'dm_pings': {'allow_empty': True}, } # Ensure that we can only have one filter list with the same name and field -- cgit v1.2.3 From c3747b6d09ff968858eab698eb5fcffb9c3fbd1f Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 8 Oct 2022 15:44:37 +0300 Subject: Allow char fields to be blank This is necessary allow filters to define a blank message when the default is not blank. Additionally allows bypass_roles to be empty like the other array fields --- pydis_site/apps/api/migrations/0085_new_filter_schema.py | 12 ++++++------ pydis_site/apps/api/models/bot/filters.py | 5 ++++- pydis_site/apps/api/serializers.py | 12 +++++++++++- 3 files changed, 21 insertions(+), 8 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0085_new_filter_schema.py b/pydis_site/apps/api/migrations/0085_new_filter_schema.py index 2e721df4..a38194ef 100644 --- a/pydis_site/apps/api/migrations/0085_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0085_new_filter_schema.py @@ -108,10 +108,10 @@ class Migration(migrations.Migration): ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, null=True)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), - ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), - ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), + ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True, blank=True)), + ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True, blank=True)), ('infraction_type', models.CharField(choices=[('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), - ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), + ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True, blank=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.", null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", null=True, size=None)), @@ -133,10 +133,10 @@ class Migration(migrations.Migration): ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), - ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True)), - ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True)), + ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True, blank=True)), + ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True, blank=True)), ('infraction_type', models.CharField(choices=[('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), - ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True)), + ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True, blank=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.", null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 22482870..81b72c6e 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -20,12 +20,14 @@ class FilterSettingsMixin(models.Model): dm_content = models.CharField( max_length=1000, null=True, + blank=True, help_text="The DM to send to a user triggering this filter." ) dm_embed = models.CharField( max_length=2000, help_text="The content of the DM embed", - null=True + null=True, + blank=True ) infraction_type = models.CharField( choices=[(choices[0].upper(), choices[1]) for choices in Infraction.TYPE_CHOICES], @@ -36,6 +38,7 @@ class FilterSettingsMixin(models.Model): infraction_reason = models.CharField( max_length=1000, help_text="The reason to give for the infraction.", + blank=True, null=True ) infraction_duration = models.DurationField( diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 7a5e76b7..a42d567b 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -151,6 +151,13 @@ ALWAYS_OPTIONAL_SETTINGS = ( 'infraction_channel', ) +ALWAYS_BLANKABLE_SETTINGS = ( + 'dm_content', + 'dm_embed', + 'infraction_type', + 'infraction_reason', +) + REQUIRED_FOR_FILTER_LIST_SETTINGS = ( 'guild_pings', 'filter_dm', @@ -310,7 +317,10 @@ class FilterListSerializer(ModelSerializer): extra_kwargs = { field: {'required': False, 'allow_null': True} for field in ALWAYS_OPTIONAL_SETTINGS } | { - 'infraction_reason': {'allow_blank': True, 'allow_null': True, 'required': False}, + field: {'allow_blank': True, 'allow_null': True, 'required': False} + for field in ALWAYS_BLANKABLE_SETTINGS + } | { + 'bypass_roles': {'allow_empty': True}, 'enabled_channels': {'allow_empty': True}, 'disabled_channels': {'allow_empty': True}, 'enabled_categories': {'allow_empty': True}, -- cgit v1.2.3 From 65559eee45f0b17e4db3c80ad8b147d5413fab6f Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 18 Oct 2022 18:59:32 +0300 Subject: Refactors filters serialier --- pydis_site/apps/api/serializers.py | 170 +++++++++++++++++++------------------ 1 file changed, 89 insertions(+), 81 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index a42d567b..aac8d06e 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -142,7 +142,24 @@ class DocumentationLinkSerializer(ModelSerializer): fields = ('package', 'base_url', 'inventory_url') -ALWAYS_OPTIONAL_SETTINGS = ( +# region: filters serializers + + +REQUIRED_FOR_FILTER_LIST_SETTINGS = ( + 'guild_pings', + 'filter_dm', + 'dm_pings', + 'delete_messages', + 'send_alert', + 'bypass_roles', + 'enabled', + 'enabled_channels', + 'disabled_channels', + 'enabled_categories', + 'disabled_categories', +) + +OPTIONAL_FOR_FILTER_LIST_SETTINGS = ( 'dm_content', 'dm_embed', 'infraction_type', @@ -151,25 +168,21 @@ ALWAYS_OPTIONAL_SETTINGS = ( 'infraction_channel', ) -ALWAYS_BLANKABLE_SETTINGS = ( +ALLOW_BLANK_SETTINGS = ( 'dm_content', 'dm_embed', 'infraction_type', 'infraction_reason', ) -REQUIRED_FOR_FILTER_LIST_SETTINGS = ( - 'guild_pings', - 'filter_dm', - 'dm_pings', - 'delete_messages', - 'send_alert', - 'bypass_roles', - 'enabled', +ALLOW_EMPTY_SETTINGS = ( 'enabled_channels', 'disabled_channels', 'enabled_categories', 'disabled_categories', + 'guild_pings', + 'dm_pings', + 'bypass_roles', ) # Required fields for custom JSON representation purposes @@ -198,7 +211,20 @@ CHANNEL_SCOPE_FIELDS = ( ) MENTIONS_FIELDS = ("guild_pings", "dm_pings") -SETTINGS_FIELDS = ALWAYS_OPTIONAL_SETTINGS + REQUIRED_FOR_FILTER_LIST_SETTINGS +SETTINGS_FIELDS = REQUIRED_FOR_FILTER_LIST_SETTINGS + OPTIONAL_FOR_FILTER_LIST_SETTINGS + + +def _create_filter_meta_extra_kwargs() -> dict[str, dict[str, bool]]: + """Create the extra kwargs of the Filter serializer's Meta class.""" + extra_kwargs = {} + for field in SETTINGS_FIELDS: + field_args = {'required': False, 'allow_null': True} + if field in ALLOW_BLANK_SETTINGS: + field_args['allow_blank'] = True + if field in ALLOW_EMPTY_SETTINGS: + field_args['allow_empty'] = True + extra_kwargs[field] = field_args + return extra_kwargs class FilterSerializer(ModelSerializer): @@ -236,17 +262,7 @@ class FilterSerializer(ModelSerializer): fields = ( 'id', 'content', 'description', 'additional_field', 'filter_list' ) + SETTINGS_FIELDS - extra_kwargs = { - field: {'required': False, 'allow_null': True} for field in SETTINGS_FIELDS - } | { - 'infraction_reason': {'allow_blank': True, 'allow_null': True, 'required': False}, - 'enabled_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, - 'disabled_channels': {'allow_empty': True, 'allow_null': True, 'required': False}, - 'enabled_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, - 'disabled_categories': {'allow_empty': True, 'allow_null': True, 'required': False}, - 'guild_pings': {'allow_empty': True, 'allow_null': True, 'required': False}, - 'dm_pings': {'allow_empty': True, 'allow_null': True, 'required': False}, - } + extra_kwargs = _create_filter_meta_extra_kwargs() def to_representation(self, instance: Filter) -> dict: """ @@ -258,28 +274,36 @@ class FilterSerializer(ModelSerializer): Furthermore, it puts the fields that meant to represent Filter settings, into a sub-field called `settings`. """ - schema_settings = { - "settings": - {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} - | { - "infraction_and_notification": - {name: getattr(instance, name) - for name in INFRACTION_AND_NOTIFICATION_FIELDS} - } | { - "channel_scope": - {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS} - } | { - "mentions": - { - schema_field_name: getattr(instance, schema_field_name) - for schema_field_name in MENTIONS_FIELDS - } - } + settings = {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} + settings["infraction_and_notification"] = { + name: getattr(instance, name) for name in INFRACTION_AND_NOTIFICATION_FIELDS + } + settings["channel_scope"] = { + name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS } - schema_base = {name: getattr(instance, name) for name in BASE_FILTER_FIELDS} | \ - {"filter_list": instance.filter_list.id} + settings["mentions"] = { + name: getattr(instance, name) for name in MENTIONS_FIELDS + } + + schema = {name: getattr(instance, name) for name in BASE_FILTER_FIELDS} + schema["filter_list"] = instance.filter_list.id + schema["settings"] = settings + return schema - return schema_base | schema_settings + +def _create_filter_list_meta_extra_kwargs() -> dict[str, dict[str, bool]]: + """Create the extra kwargs of the FilterList serializer's Meta class.""" + extra_kwargs = {} + for field in SETTINGS_FIELDS: + field_args = {} + if field in OPTIONAL_FOR_FILTER_LIST_SETTINGS: + field_args = {'required': False, 'allow_null': True} + if field in ALLOW_BLANK_SETTINGS: + field_args['allow_blank'] = True + if field in ALLOW_EMPTY_SETTINGS: + field_args['allow_empty'] = True + extra_kwargs[field] = field_args + return extra_kwargs class FilterListSerializer(ModelSerializer): @@ -302,10 +326,13 @@ class FilterListSerializer(ModelSerializer): if len(channels_collection) != len(set(channels_collection)): raise ValidationError("Enabled and Disabled channels lists contain duplicates.") - if data.get('disabled_categories') is not None: - categories_collection = data['disabled_categories'] + if ( + data.get('disabled_categories') is not None + and data.get('enabled_categories') is not None + ): + categories_collection = data['disabled_categories'] + data['enabled_categories'] if len(categories_collection) != len(set(categories_collection)): - raise ValidationError("Disabled categories lists contain duplicates.") + raise ValidationError("Enabled and Disabled categories lists contain duplicates.") return data @@ -314,22 +341,9 @@ class FilterListSerializer(ModelSerializer): model = FilterList fields = ('id', 'name', 'list_type', 'filters') + SETTINGS_FIELDS - extra_kwargs = { - field: {'required': False, 'allow_null': True} for field in ALWAYS_OPTIONAL_SETTINGS - } | { - field: {'allow_blank': True, 'allow_null': True, 'required': False} - for field in ALWAYS_BLANKABLE_SETTINGS - } | { - 'bypass_roles': {'allow_empty': True}, - 'enabled_channels': {'allow_empty': True}, - 'disabled_channels': {'allow_empty': True}, - 'enabled_categories': {'allow_empty': True}, - 'disabled_categories': {'allow_empty': True}, - 'guild_pings': {'allow_empty': True}, - 'dm_pings': {'allow_empty': True}, - } + extra_kwargs = _create_filter_list_meta_extra_kwargs() - # Ensure that we can only have one filter list with the same name and field + # Ensure there can only be one filter list with the same name and type. validators = [ UniqueTogetherValidator( queryset=FilterList.objects.all(), @@ -350,29 +364,23 @@ class FilterListSerializer(ModelSerializer): Furthermore, it puts the fields that meant to represent FilterList settings, into a sub-field called `settings`. """ - # Fetches the relating filters - filters = [ - FilterSerializer(many=False).to_representation( - instance=item - ) for item in Filter.objects.filter( - filter_list=instance.id - ) + schema = {name: getattr(instance, name) for name in BASE_FILTERLIST_FIELDS} + schema["filters"] = [ + FilterSerializer(many=False).to_representation(instance=item) + for item in Filter.objects.filter(filter_list=instance.id) ] - schema_base = {name: getattr(instance, name) for name in BASE_FILTERLIST_FIELDS} \ - | {"filters": filters} - schema_settings_base = {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} - schema_settings_categories = { - "infraction_and_notification": - {name: getattr(instance, name) for name in INFRACTION_AND_NOTIFICATION_FIELDS}} \ - | { - "channel_scope": - {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS}} | { - "mentions": { - schema_field_name: getattr(instance, schema_field_name) - for schema_field_name in MENTIONS_FIELDS - } + + settings = {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} + settings["infraction_and_notification"] = { + name: getattr(instance, name) for name in INFRACTION_AND_NOTIFICATION_FIELDS } - return schema_base | {"settings": schema_settings_base | schema_settings_categories} + settings["channel_scope"] = {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS} + settings["mentions"] = {name: getattr(instance, name) for name in MENTIONS_FIELDS} + + schema["settings"] = settings + return schema + +# endregion class InfractionSerializer(ModelSerializer): -- cgit v1.2.3 From ecc249f8829209d0427b6819b87fd3bdc0087c89 Mon Sep 17 00:00:00 2001 From: wookie184 Date: Thu, 27 Oct 2022 16:14:05 +0100 Subject: Add metricity query for messages in past n days Takes multiple users for efficiency as we may want to calculate this for many users at once. --- pydis_site/apps/api/models/bot/metricity.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py index abd25ef0..73bc1f0c 100644 --- a/pydis_site/apps/api/models/bot/metricity.py +++ b/pydis_site/apps/api/models/bot/metricity.py @@ -130,3 +130,31 @@ class Metricity: raise NotFoundError() return values + + def total_messages_in_past_n_days( + self, + user_ids: list[str], + days: int + ) -> list[tuple[int, int]]: + """ + Query activity by a list of users in the past `days` days. + + Returns a list of (user_id, message_count) tuples. + """ + self.cursor.execute( + """ + SELECT + author_id, COUNT(*) + FROM messages + WHERE + author_id IN %s + AND NOT is_deleted + AND channel_id NOT IN %s + AND created_at > now() - interval '%s days' + GROUP BY author_id + """, + [tuple(user_ids), EXCLUDE_CHANNELS, days] + ) + values = self.cursor.fetchall() + + return values -- cgit v1.2.3 From 4ce59374766849700b08c208b7c581be5037cd02 Mon Sep 17 00:00:00 2001 From: wookie184 Date: Thu, 27 Oct 2022 16:17:44 +0100 Subject: Add API endpoint for activity data I really had to work against DRF to get this working. Using the validator manually here isn't ideal but I couldn't see an obvious better way without adding a bunch of boilerplate code. It seems to work. --- pydis_site/apps/api/viewsets/bot/user.py | 60 +++++++++++++++++++++++++++++++- 1 file changed, 59 insertions(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index ba1bcd9d..f1aebee0 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -3,8 +3,9 @@ from collections import OrderedDict from django.db.models import Q from django_filters.rest_framework import DjangoFilterBackend -from rest_framework import status +from rest_framework import fields, status from rest_framework.decorators import action +from rest_framework.exceptions import ParseError from rest_framework.pagination import PageNumberPagination from rest_framework.request import Request from rest_framework.response import Response @@ -138,6 +139,30 @@ class UserViewSet(ModelViewSet): - 200: returned on success - 404: if a user with the given `snowflake` could not be found + ### GET /bot/users/metricity_activity_data + Gets the number of messages sent on the server in a given period. + + Users with no messages in the specified period or who do not + exist are not included in the result. + + #### Required Query Parameters + - days: how many days into the past to count message from. + + #### Request Format + >>> [ + ... 409107086526644234, + ... 493839819168808962 + ... ] + + #### Response format + >>> [ + ... {"id": 409107086526644234, "message_count": 54} + ... ] + + #### Status codes + - 200: returned on success + - 400: if request body or query parameters were missing or invalid + ### POST /bot/users Adds a single or multiple new users. The roles attached to the user(s) must be roles known by the site. @@ -298,3 +323,36 @@ class UserViewSet(ModelViewSet): except NotFoundError: return Response(dict(detail="User not found in metricity"), status=status.HTTP_404_NOT_FOUND) + + @action(detail=False) + def metricity_activity_data(self, request: Request) -> Response: + """Request handler for metricity_activity_data endpoint.""" + if "days" in request.query_params: + try: + days = int(request.query_params["days"]) + except ValueError: + raise ParseError(detail={ + "days": ["This query parameter must be an integer."] + }) + else: + raise ParseError(detail={ + "days": ["This query parameter is required."] + }) + + user_id_list_validator = fields.ListField( + child=fields.IntegerField(min_value=0), + allow_empty=False + ) + user_ids = [ + str(user_id) for user_id in + user_id_list_validator.run_validation(request.data) + ] + + with Metricity() as metricity: + data = metricity.total_messages_in_past_n_days(user_ids, days) + + response_data = [ + {"id": d[0], "message_count": d[1]} + for d in data + ] + return Response(response_data, status=status.HTTP_200_OK) -- cgit v1.2.3 From fca789323f750ff74bb5f4de92f7a8b96eb51e1f Mon Sep 17 00:00:00 2001 From: wookie184 Date: Thu, 27 Oct 2022 16:21:30 +0100 Subject: Add tests for metricity activity endpoint --- pydis_site/apps/api/tests/test_users.py | 100 ++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index 5d10069d..60be8598 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -1,3 +1,4 @@ +import json import random from unittest.mock import Mock, patch @@ -502,6 +503,105 @@ class UserMetricityTests(AuthenticatedAPITestCase): "total_messages": total_messages }) + def test_metricity_activity_data(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + self.metricity.total_messages_in_past_n_days.return_value = [[0, 10]] + + # When + url = reverse("api:bot:user-metricity-activity-data") + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + data=json.dumps([0, 1]), + QUERY_STRING="days=10", + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 200) + self.metricity.total_messages_in_past_n_days.assert_called_once_with(["0", "1"], 10) + self.assertEqual(response.json(), [{"id": 0, "message_count": 10}]) + + def test_metricity_activity_data_invalid_days(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse("api:bot:user-metricity-activity-data") + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + data=json.dumps([0, 1]), + QUERY_STRING="days=fifty", + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), {"days": ["This query parameter must be an integer."]}) + + def test_metricity_activity_data_no_days(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse('api:bot:user-metricity-activity-data') + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + data=json.dumps([0, 1]), + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), {'days': ["This query parameter is required."]}) + + def test_metricity_activity_data_no_users(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse('api:bot:user-metricity-activity-data') + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + QUERY_STRING="days=10", + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), ['Expected a list of items but got type "dict".']) + + def test_metricity_activity_data_invalid_users(self): + # Given + self.mock_no_metricity_user() # Other functions shouldn't be used. + + # When + url = reverse('api:bot:user-metricity-activity-data') + # Can't send data in body with normal GET request so use generic request. + response = self.client.generic( + "GET", + url, + data=json.dumps([123, 'username']), + QUERY_STRING="days=10", + content_type="application/json" + ) + + # Then + self.assertEqual(response.status_code, 400) + self.metricity.total_messages_in_past_n_days.assert_not_called() + self.assertEqual(response.json(), {'1': ['A valid integer is required.']}) + def mock_metricity_user(self, joined_at, total_messages, total_blocks, top_channel_activity): patcher = patch("pydis_site.apps.api.viewsets.bot.user.Metricity") self.metricity = patcher.start() -- cgit v1.2.3 From c248047efd5ea9d0c899f9a2e577735649652fb4 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Thu, 27 Oct 2022 00:14:46 +0300 Subject: Add uniques filter list to migrations --- .../apps/api/migrations/0087_unique_filter_list.py | 101 +++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0087_unique_filter_list.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0087_unique_filter_list.py b/pydis_site/apps/api/migrations/0087_unique_filter_list.py new file mode 100644 index 00000000..843bb00a --- /dev/null +++ b/pydis_site/apps/api/migrations/0087_unique_filter_list.py @@ -0,0 +1,101 @@ +from datetime import timedelta + +from django.apps.registry import Apps +from django.db import migrations + +import pydis_site.apps.api.models.bot.filters + + +def create_unique_list(apps: Apps, _): + """Create the 'unique' FilterList and its related Filters.""" + filter_list: pydis_site.apps.api.models.FilterList = apps.get_model("api", "FilterList") + filter_: pydis_site.apps.api.models.Filter = apps.get_model("api", "Filter") + + list_ = filter_list.objects.create( + name="unique", + list_type=0, + guild_pings=[], + filter_dm=True, + dm_pings=[], + delete_messages=False, + bypass_roles=[], + enabled=True, + dm_content="", + dm_embed="", + infraction_type="", + infraction_reason="", + infraction_duration=timedelta(seconds=0), + infraction_channel=None, + disabled_channels=[], + disabled_categories=[], + enabled_channels=[], + enabled_categories=[], + send_alert=True + ) + + everyone = filter_.objects.create( + content="everyone", + filter_list=list_, + description="", + delete_messages=True, + bypass_roles=["Helpers"], + dm_content=( + "Please don't try to ping `@everyone` or `@here`. Your message has been removed. " + "If you believe this was a mistake, please let staff know!" + ), + ) + everyone.save() + + webhook = filter_.objects.create( + content="webhook", + filter_list=list_, + description="", + delete_messages=True, + dm_content=( + "Looks like you posted a Discord webhook URL. " + "Therefore, your message has been removed, and your webhook has been deleted. " + "You can re-create it if you wish to. " + "If you believe this was a mistake, please let us know." + ), + ) + webhook.save() + + rich_embed = filter_.objects.create( + content="rich_embed", + filter_list=list_, + description="", + guild_pings=["Moderators"], + dm_pings=["Moderators"] + ) + rich_embed.save() + + discord_token = filter_.objects.create( + content="discord_token", + filter_list=list_, + filter_dm=False, + delete_messages=True, + dm_content=( + "I noticed you posted a seemingly valid Discord API " + "token in your message and have removed your message. " + "This means that your token has been **compromised**. " + "Please change your token **immediately** at: " + "\n\n" + "Feel free to re-post it with the token removed. " + "If you believe this was a mistake, please let us know!" + ) + ) + discord_token.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0086_unique_constraint_filters'), + ] + + operations = [ + migrations.RunPython( + code=create_unique_list, + reverse_code=None + ), + ] \ No newline at end of file -- cgit v1.2.3 From d0a21729de55db9681d2e4e6689607c537ac5f79 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Tue, 1 Nov 2022 22:54:14 +0200 Subject: Add antispam list in migrations --- .../api/migrations/0088_antispam_filter_list.py | 52 ++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0088_antispam_filter_list.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0088_antispam_filter_list.py b/pydis_site/apps/api/migrations/0088_antispam_filter_list.py new file mode 100644 index 00000000..d425293f --- /dev/null +++ b/pydis_site/apps/api/migrations/0088_antispam_filter_list.py @@ -0,0 +1,52 @@ +from datetime import timedelta + +from django.apps.registry import Apps +from django.db import migrations + +import pydis_site.apps.api.models.bot.filters + + +def create_antispam_list(apps: Apps, _): + """Create the 'unique' FilterList and its related Filters.""" + filter_list: pydis_site.apps.api.models.FilterList = apps.get_model("api", "FilterList") + filter_: pydis_site.apps.api.models.Filter = apps.get_model("api", "Filter") + + list_ = filter_list.objects.create( + name="antispam", + list_type=0, + guild_pings=["Moderators"], + filter_dm=False, + dm_pings=[], + delete_messages=True, + bypass_roles=["Helpers"], + enabled=True, + dm_content="", + dm_embed="", + infraction_type="mute", + infraction_reason="", + infraction_duration=timedelta(seconds=600), + infraction_channel=None, + disabled_channels=[], + disabled_categories=["CODE JAM"], + enabled_channels=[], + enabled_categories=[], + send_alert=True + ) + + rules = ("duplicates", "attachments", "burst", "chars", "emoji", "links", "mentions", "newlines", "role_mentions") + + filter_.objects.bulk_create([filter_(content=rule, filter_list=list_) for rule in rules]) + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0087_unique_filter_list'), + ] + + operations = [ + migrations.RunPython( + code=create_antispam_list, + reverse_code=None + ), + ] -- cgit v1.2.3 From 798c499c3c7673612a6815c0ab77d95be066d7ce Mon Sep 17 00:00:00 2001 From: wookie184 Date: Wed, 2 Nov 2022 18:57:22 +0000 Subject: Change the endpoint to be a POST not a GET --- pydis_site/apps/api/models/bot/metricity.py | 2 +- pydis_site/apps/api/tests/test_users.py | 34 ++++++++--------------------- pydis_site/apps/api/viewsets/bot/user.py | 6 ++--- 3 files changed, 13 insertions(+), 29 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py index 73bc1f0c..f53dd33c 100644 --- a/pydis_site/apps/api/models/bot/metricity.py +++ b/pydis_site/apps/api/models/bot/metricity.py @@ -135,7 +135,7 @@ class Metricity: self, user_ids: list[str], days: int - ) -> list[tuple[int, int]]: + ) -> list[tuple[str, int]]: """ Query activity by a list of users in the past `days` days. diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index 60be8598..9c0fa6ba 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -1,4 +1,3 @@ -import json import random from unittest.mock import Mock, patch @@ -510,13 +509,10 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse("api:bot:user-metricity-activity-data") - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, - data=json.dumps([0, 1]), + data=[0, 1], QUERY_STRING="days=10", - content_type="application/json" ) # Then @@ -530,13 +526,10 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse("api:bot:user-metricity-activity-data") - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, - data=json.dumps([0, 1]), + data=[0, 1], QUERY_STRING="days=fifty", - content_type="application/json" ) # Then @@ -550,12 +543,9 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse('api:bot:user-metricity-activity-data') - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, - data=json.dumps([0, 1]), - content_type="application/json" + data=[0, 1], ) # Then @@ -569,12 +559,9 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse('api:bot:user-metricity-activity-data') - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, QUERY_STRING="days=10", - content_type="application/json" ) # Then @@ -588,13 +575,10 @@ class UserMetricityTests(AuthenticatedAPITestCase): # When url = reverse('api:bot:user-metricity-activity-data') - # Can't send data in body with normal GET request so use generic request. - response = self.client.generic( - "GET", + response = self.client.post( url, - data=json.dumps([123, 'username']), + data=[123, 'username'], QUERY_STRING="days=10", - content_type="application/json" ) # Then diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index f1aebee0..f803b3f6 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -139,7 +139,7 @@ class UserViewSet(ModelViewSet): - 200: returned on success - 404: if a user with the given `snowflake` could not be found - ### GET /bot/users/metricity_activity_data + ### POST /bot/users/metricity_activity_data Gets the number of messages sent on the server in a given period. Users with no messages in the specified period or who do not @@ -324,7 +324,7 @@ class UserViewSet(ModelViewSet): return Response(dict(detail="User not found in metricity"), status=status.HTTP_404_NOT_FOUND) - @action(detail=False) + @action(detail=False, methods=["POST"]) def metricity_activity_data(self, request: Request) -> Response: """Request handler for metricity_activity_data endpoint.""" if "days" in request.query_params: @@ -352,7 +352,7 @@ class UserViewSet(ModelViewSet): data = metricity.total_messages_in_past_n_days(user_ids, days) response_data = [ - {"id": d[0], "message_count": d[1]} + {"id": int(d[0]), "message_count": d[1]} for d in data ] return Response(response_data, status=status.HTTP_200_OK) -- cgit v1.2.3 From fee81cf1f4205024d663fc8055f04ed22bec9f32 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 4 Nov 2022 00:30:04 +0200 Subject: Stop using None as a valid setting value See e100ae9b on bot --- .../apps/api/migrations/0085_new_filter_schema.py | 18 ++--- .../migrations/0086_unique_constraint_filters.py | 6 +- .../apps/api/migrations/0087_unique_filter_list.py | 4 +- .../api/migrations/0088_antispam_filter_list.py | 4 +- pydis_site/apps/api/models/bot/filters.py | 84 +++++++++++++++------- pydis_site/apps/api/serializers.py | 23 ++---- 6 files changed, 82 insertions(+), 57 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0085_new_filter_schema.py b/pydis_site/apps/api/migrations/0085_new_filter_schema.py index a38194ef..b0665ba5 100644 --- a/pydis_site/apps/api/migrations/0085_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0085_new_filter_schema.py @@ -46,10 +46,10 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: enabled=True, dm_content=dm_content, dm_embed="" if name != "FILE_FORMAT" else "*Defined at runtime.*", - infraction_type="", + infraction_type="NONE", infraction_reason="", infraction_duration=timedelta(seconds=0), - infraction_channel=None, + infraction_channel=0, disabled_channels=[], disabled_categories=(["CODE JAM"] if name in ("FILE_FORMAT", "GUILD_INVITE") else []), enabled_channels=[], @@ -110,7 +110,7 @@ class Migration(migrations.Migration): ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True, blank=True)), ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True, blank=True)), - ('infraction_type', models.CharField(choices=[('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), + ('infraction_type', models.CharField(choices=[('NONE', 'None'), ('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True, blank=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.", null=True)), @@ -133,12 +133,12 @@ class Migration(migrations.Migration): ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), - ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True, blank=True)), - ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True, blank=True)), - ('infraction_type', models.CharField(choices=[('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), - ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True, blank=True)), - ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), - ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.", null=True)), + ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, blank=True)), + ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, blank=True)), + ('infraction_type', models.CharField(choices=[('NONE', 'None'), ('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10)), + ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, blank=True)), + ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.')), + ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.")), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", size=None)), ('enabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", size=None)), diff --git a/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py index e7816e19..6fa99e9e 100644 --- a/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py @@ -13,15 +13,15 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name='filter', constraint=models.UniqueConstraint(fields=( + 'content', + 'additional_field', + 'filter_list', 'dm_content', 'dm_embed', 'infraction_type', 'infraction_reason', 'infraction_duration', 'infraction_channel', - 'content', - 'additional_field', - 'filter_list', 'guild_pings', 'filter_dm', 'dm_pings', diff --git a/pydis_site/apps/api/migrations/0087_unique_filter_list.py b/pydis_site/apps/api/migrations/0087_unique_filter_list.py index 843bb00a..9db966fb 100644 --- a/pydis_site/apps/api/migrations/0087_unique_filter_list.py +++ b/pydis_site/apps/api/migrations/0087_unique_filter_list.py @@ -22,10 +22,10 @@ def create_unique_list(apps: Apps, _): enabled=True, dm_content="", dm_embed="", - infraction_type="", + infraction_type="NONE", infraction_reason="", infraction_duration=timedelta(seconds=0), - infraction_channel=None, + infraction_channel=0, disabled_channels=[], disabled_categories=[], enabled_channels=[], diff --git a/pydis_site/apps/api/migrations/0088_antispam_filter_list.py b/pydis_site/apps/api/migrations/0088_antispam_filter_list.py index d425293f..354e4520 100644 --- a/pydis_site/apps/api/migrations/0088_antispam_filter_list.py +++ b/pydis_site/apps/api/migrations/0088_antispam_filter_list.py @@ -22,10 +22,10 @@ def create_antispam_list(apps: Apps, _): enabled=True, dm_content="", dm_embed="", - infraction_type="mute", + infraction_type="MUTE", infraction_reason="", infraction_duration=timedelta(seconds=600), - infraction_channel=None, + infraction_channel=0, disabled_channels=[], disabled_categories=["CODE JAM"], enabled_channels=[], diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 81b72c6e..7398f8a0 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -14,35 +14,43 @@ class FilterListType(models.IntegerChoices): DENY = 0 -class FilterSettingsMixin(models.Model): - """Mixin for common settings of a filters and filter lists.""" +class FilterList(models.Model): + """Represent a list in its allow or deny form.""" + name = models.CharField(max_length=50, help_text="The unique name of this list.") + list_type = models.IntegerField( + choices=FilterListType.choices, + help_text="Whether this list is an allowlist or denylist" + ) dm_content = models.CharField( max_length=1000, - null=True, + null=False, blank=True, help_text="The DM to send to a user triggering this filter." ) dm_embed = models.CharField( max_length=2000, help_text="The content of the DM embed", - null=True, + null=False, blank=True ) infraction_type = models.CharField( - choices=[(choices[0].upper(), choices[1]) for choices in Infraction.TYPE_CHOICES], + choices=[ + (choices[0].upper(), choices[1]) + for choices in [("NONE", "None"), *Infraction.TYPE_CHOICES] + ], max_length=10, - null=True, + null=False, help_text="The infraction to apply to this user." ) infraction_reason = models.CharField( max_length=1000, help_text="The reason to give for the infraction.", blank=True, - null=True + null=False ) infraction_duration = models.DurationField( - null=True, + null=False, help_text="The duration of the infraction. Null if permanent." ) infraction_channel = models.BigIntegerField( @@ -53,22 +61,7 @@ class FilterSettingsMixin(models.Model): ), ), help_text="Channel in which to send the infraction.", - null=True - ) - - class Meta: - """Metaclass for settings mixin.""" - - abstract = True - - -class FilterList(FilterSettingsMixin): - """Represent a list in its allow or deny form.""" - - name = models.CharField(max_length=50, help_text="The unique name of this list.") - list_type = models.IntegerField( - choices=FilterListType.choices, - help_text="Whether this list is an allowlist or denylist" + null=False ) guild_pings = ArrayField( models.CharField(max_length=100), @@ -126,7 +119,7 @@ class FilterList(FilterSettingsMixin): return f"Filter {FilterListType(self.list_type).label}list {self.name!r}" -class FilterBase(FilterSettingsMixin): +class FilterBase(models.Model): """One specific trigger of a list.""" content = models.CharField(max_length=100, help_text="The definition of this filter.") @@ -139,6 +132,47 @@ class FilterBase(FilterSettingsMixin): FilterList, models.CASCADE, related_name="filters", help_text="The filter list containing this filter." ) + dm_content = models.CharField( + max_length=1000, + null=True, + blank=True, + help_text="The DM to send to a user triggering this filter." + ) + dm_embed = models.CharField( + max_length=2000, + help_text="The content of the DM embed", + null=True, + blank=True + ) + infraction_type = models.CharField( + choices=[ + (choices[0].upper(), choices[1]) + for choices in [("NONE", "None"), *Infraction.TYPE_CHOICES] + ], + max_length=10, + null=True, + help_text="The infraction to apply to this user." + ) + infraction_reason = models.CharField( + max_length=1000, + help_text="The reason to give for the infraction.", + blank=True, + null=True + ) + infraction_duration = models.DurationField( + null=True, + help_text="The duration of the infraction. Null if permanent." + ) + infraction_channel = models.BigIntegerField( + validators=( + MinValueValidator( + limit_value=0, + message="Channel IDs cannot be negative." + ), + ), + help_text="Channel in which to send the infraction.", + null=True + ) guild_pings = ArrayField( models.CharField(max_length=100), help_text="Who to ping when this filter triggers.", diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index aac8d06e..a902523e 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -144,8 +144,13 @@ class DocumentationLinkSerializer(ModelSerializer): # region: filters serializers - -REQUIRED_FOR_FILTER_LIST_SETTINGS = ( +SETTINGS_FIELDS = ( + 'dm_content', + 'dm_embed', + 'infraction_type', + 'infraction_reason', + 'infraction_duration', + 'infraction_channel', 'guild_pings', 'filter_dm', 'dm_pings', @@ -159,19 +164,9 @@ REQUIRED_FOR_FILTER_LIST_SETTINGS = ( 'disabled_categories', ) -OPTIONAL_FOR_FILTER_LIST_SETTINGS = ( - 'dm_content', - 'dm_embed', - 'infraction_type', - 'infraction_reason', - 'infraction_duration', - 'infraction_channel', -) - ALLOW_BLANK_SETTINGS = ( 'dm_content', 'dm_embed', - 'infraction_type', 'infraction_reason', ) @@ -211,8 +206,6 @@ CHANNEL_SCOPE_FIELDS = ( ) MENTIONS_FIELDS = ("guild_pings", "dm_pings") -SETTINGS_FIELDS = REQUIRED_FOR_FILTER_LIST_SETTINGS + OPTIONAL_FOR_FILTER_LIST_SETTINGS - def _create_filter_meta_extra_kwargs() -> dict[str, dict[str, bool]]: """Create the extra kwargs of the Filter serializer's Meta class.""" @@ -296,8 +289,6 @@ def _create_filter_list_meta_extra_kwargs() -> dict[str, dict[str, bool]]: extra_kwargs = {} for field in SETTINGS_FIELDS: field_args = {} - if field in OPTIONAL_FOR_FILTER_LIST_SETTINGS: - field_args = {'required': False, 'allow_null': True} if field in ALLOW_BLANK_SETTINGS: field_args['allow_blank'] = True if field in ALLOW_EMPTY_SETTINGS: -- cgit v1.2.3 From 649fbc4799082f6ad5d9f986c86ca37ae6fe859d Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 5 Nov 2022 15:20:14 +0200 Subject: Add creation and update timestamps to filtering models This is to support auto-infractions reporting (bot 7fcec400) --- pydis_site/apps/api/migrations/0085_new_filter_schema.py | 6 ++++++ pydis_site/apps/api/models/bot/filters.py | 7 ++++--- pydis_site/apps/api/serializers.py | 8 ++++---- 3 files changed, 14 insertions(+), 7 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0085_new_filter_schema.py b/pydis_site/apps/api/migrations/0085_new_filter_schema.py index b0665ba5..d902be7f 100644 --- a/pydis_site/apps/api/migrations/0085_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0085_new_filter_schema.py @@ -60,6 +60,8 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: for object_ in objects: new_object = filter_.objects.create( content=object_.content, + created_at=object_.created_at, + updated_at=object_.updated_at, filter_list=list_, description=object_.comment, additional_field=None, @@ -99,6 +101,8 @@ class Migration(migrations.Migration): name='Filter', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), ('additional_field', models.JSONField(help_text='Implementation specific field.', null=True)), @@ -125,6 +129,8 @@ class Migration(migrations.Migration): name='FilterList', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), ('name', models.CharField(help_text='The unique name of this list.', max_length=50)), ('list_type', models.IntegerField(choices=[(1, 'Allow'), (0, 'Deny')], help_text='Whether this list is an allowlist or denylist')), ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 7398f8a0..1ea21a48 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -5,6 +5,7 @@ from django.db.models import UniqueConstraint # Must be imported that way to avoid circular imports from .infraction import Infraction +from pydis_site.apps.api.models.mixins import ModelTimestampMixin, ModelReprMixin class FilterListType(models.IntegerChoices): @@ -14,7 +15,7 @@ class FilterListType(models.IntegerChoices): DENY = 0 -class FilterList(models.Model): +class FilterList(ModelTimestampMixin, ModelReprMixin, models.Model): """Represent a list in its allow or deny form.""" name = models.CharField(max_length=50, help_text="The unique name of this list.") @@ -119,7 +120,7 @@ class FilterList(models.Model): return f"Filter {FilterListType(self.list_type).label}list {self.name!r}" -class FilterBase(models.Model): +class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): """One specific trigger of a list.""" content = models.CharField(max_length=100, help_text="The definition of this filter.") @@ -247,7 +248,7 @@ class Filter(FilterBase): UniqueConstraint( fields=tuple( [field.name for field in FilterBase._meta.fields - if field.name != "id" and field.name != "description"] + if field.name not in ("id", "description", "created_at", "updated_at")] ), name="unique_filters"), ) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index a902523e..d6bae2cb 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -181,8 +181,8 @@ ALLOW_EMPTY_SETTINGS = ( ) # Required fields for custom JSON representation purposes -BASE_FILTER_FIELDS = ('id', 'content', 'description', 'additional_field') -BASE_FILTERLIST_FIELDS = ('id', 'name', 'list_type') +BASE_FILTER_FIELDS = ('id', 'created_at', 'updated_at', 'content', 'description', 'additional_field') +BASE_FILTERLIST_FIELDS = ('id', 'created_at', 'updated_at', 'name', 'list_type') BASE_SETTINGS_FIELDS = ( "bypass_roles", "filter_dm", @@ -253,7 +253,7 @@ class FilterSerializer(ModelSerializer): model = Filter fields = ( - 'id', 'content', 'description', 'additional_field', 'filter_list' + 'id', 'created_at', 'updated_at', 'content', 'description', 'additional_field', 'filter_list' ) + SETTINGS_FIELDS extra_kwargs = _create_filter_meta_extra_kwargs() @@ -331,7 +331,7 @@ class FilterListSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterList - fields = ('id', 'name', 'list_type', 'filters') + SETTINGS_FIELDS + fields = ('id', 'created_at', 'updated_at', 'name', 'list_type', 'filters') + SETTINGS_FIELDS extra_kwargs = _create_filter_list_meta_extra_kwargs() # Ensure there can only be one filter list with the same name and type. -- cgit v1.2.3 From 5144f83c5cbd0571d1ca46f19da61860745d5f7e Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 6 Nov 2022 02:20:11 +0100 Subject: add thread_id column to the nomination table --- .../api/migrations/0085_add_thread_id_to_nomination.py | 18 ++++++++++++++++++ pydis_site/apps/api/models/bot/nomination.py | 5 +++++ 2 files changed, 23 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py b/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py new file mode 100644 index 00000000..cb216a62 --- /dev/null +++ b/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.2 on 2022-11-05 23:53 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0084_infraction_last_applied'), + ] + + operations = [ + migrations.AddField( + model_name='nomination', + name='thread_id', + field=models.BigIntegerField(blank=True, help_text="The nomination vote thread's id", null=True), + ), + ] diff --git a/pydis_site/apps/api/models/bot/nomination.py b/pydis_site/apps/api/models/bot/nomination.py index 221d8534..e96177d3 100644 --- a/pydis_site/apps/api/models/bot/nomination.py +++ b/pydis_site/apps/api/models/bot/nomination.py @@ -35,6 +35,11 @@ class Nomination(ModelReprMixin, models.Model): default=False, help_text="Whether a review was made." ) + thread_id = models.BigIntegerField( + help_text="The nomination vote thread's id.", + null=True, + blank=True + ) def __str__(self): """Representation that makes the target and state of the nomination immediately evident.""" -- cgit v1.2.3 From bb160ae0700ef3c739b9c622c3b90a26e576ac96 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 6 Nov 2022 02:29:39 +0100 Subject: add thread_id to serializer's fields --- pydis_site/apps/api/serializers.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 9228c1f4..4303e7d0 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -432,7 +432,15 @@ class NominationSerializer(ModelSerializer): model = Nomination fields = ( - 'id', 'active', 'user', 'inserted_at', 'end_reason', 'ended_at', 'reviewed', 'entries' + 'id', + 'active', + 'user', + 'inserted_at', + 'end_reason', + 'ended_at', + 'reviewed', + 'entries', + 'thread_id' ) -- cgit v1.2.3 From bf348379b8c825a8c2025894caa43d9611442fb7 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Mon, 7 Nov 2022 18:15:49 +0100 Subject: delete buggy migration --- .../api/migrations/0085_add_thread_id_to_nomination.py | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py b/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py deleted file mode 100644 index cb216a62..00000000 --- a/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 4.1.2 on 2022-11-05 23:53 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('api', '0084_infraction_last_applied'), - ] - - operations = [ - migrations.AddField( - model_name='nomination', - name='thread_id', - field=models.BigIntegerField(blank=True, help_text="The nomination vote thread's id", null=True), - ), - ] -- cgit v1.2.3 From 5c2b8c2ccb234dad6f31e83144d2e7879889e8aa Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Mon, 7 Nov 2022 18:18:03 +0100 Subject: fix grammar in thread_id's help text --- .../api/migrations/0085_add_thread_id_to_nomination.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py b/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py new file mode 100644 index 00000000..ee9707f0 --- /dev/null +++ b/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.2 on 2022-11-07 17:16 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0084_infraction_last_applied'), + ] + + operations = [ + migrations.AddField( + model_name='nomination', + name='thread_id', + field=models.BigIntegerField(blank=True, help_text="The nomination vote's thread id.", null=True), + ), + ] -- cgit v1.2.3 From 8bc05d1c7def6b5558f554b0d5305bd346783583 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Mon, 7 Nov 2022 18:20:10 +0100 Subject: push forgotten model update --- pydis_site/apps/api/models/bot/nomination.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/nomination.py b/pydis_site/apps/api/models/bot/nomination.py index e96177d3..9146c1d0 100644 --- a/pydis_site/apps/api/models/bot/nomination.py +++ b/pydis_site/apps/api/models/bot/nomination.py @@ -36,7 +36,7 @@ class Nomination(ModelReprMixin, models.Model): help_text="Whether a review was made." ) thread_id = models.BigIntegerField( - help_text="The nomination vote thread's id.", + help_text="The nomination vote's thread id.", null=True, blank=True ) -- cgit v1.2.3 From 1f22c1410afa7965b1f1f2e4dffd37f540bc0d5a Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Mon, 7 Nov 2022 18:26:51 +0100 Subject: add validation criteria for thread_id modification --- pydis_site/apps/api/viewsets/bot/nomination.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/viewsets/bot/nomination.py b/pydis_site/apps/api/viewsets/bot/nomination.py index 6af42bcb..ecbf217e 100644 --- a/pydis_site/apps/api/viewsets/bot/nomination.py +++ b/pydis_site/apps/api/viewsets/bot/nomination.py @@ -273,6 +273,11 @@ class NominationViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge {'reviewed': ['This field cannot be set while you are ending a nomination.']} ) + if 'thread_id' in request.data: + raise ValidationError( + {'thread_id': ['This field cannot be set when ending a nomination.']} + ) + instance.ended_at = timezone.now() elif 'active' in data: @@ -289,6 +294,13 @@ class NominationViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge {'reviewed': ['This field cannot be set if the nomination is inactive.']} ) + elif 'thread_id' in request.data: + # 5. We are altering the thread_id of the nomination. + if not instance.active: + raise ValidationError( + {'thread_id': ['This field cannot be set if the nomination is inactive.']} + ) + if 'reason' in request.data: if 'actor' not in request.data: raise ValidationError( -- cgit v1.2.3 From af1a11d05f716c7891ae3b661c470d2bbac44ee6 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Mon, 7 Nov 2022 18:28:24 +0100 Subject: add tests for new thread_id validation flow --- pydis_site/apps/api/tests/test_nominations.py | 32 +++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_nominations.py b/pydis_site/apps/api/tests/test_nominations.py index 62b2314c..b3742cdd 100644 --- a/pydis_site/apps/api/tests/test_nominations.py +++ b/pydis_site/apps/api/tests/test_nominations.py @@ -524,3 +524,35 @@ class NominationTests(AuthenticatedAPITestCase): self.assertEqual(response.json(), { 'actor': ["The actor doesn't exist or has not nominated the user."] }) + + def test_patch_nomination_set_thread_id_of_active_nomination(self): + url = reverse('api:bot:nomination-detail', args=(self.active_nomination.id,)) + data = {'thread_id': 9876543210} + response = self.client.patch(url, data=data) + self.assertEqual(response.status_code, 200) + + def test_patch_nomination_set_thread_id_and_reviewed_of_active_nomination(self): + url = reverse('api:bot:nomination-detail', args=(self.active_nomination.id,)) + data = {'thread_id': 9876543210, "reviewed": True} + response = self.client.patch(url, data=data) + self.assertEqual(response.status_code, 200) + + def test_modifying_thread_id_when_ending_nomination(self): + url = reverse('api:bot:nomination-detail', args=(self.active_nomination.id,)) + data = {'thread_id': 9876543210, 'active': False, 'end_reason': "What?"} + + response = self.client.patch(url, data=data) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.json(), { + 'thread_id': ['This field cannot be set when ending a nomination.'] + }) + + def test_patch_thread_id_for_inactive_nomination(self): + url = reverse('api:bot:nomination-detail', args=(self.inactive_nomination.id,)) + data = {'thread_id': 9876543210} + + response = self.client.patch(url, data=data) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.json(), { + 'thread_id': ['This field cannot be set if the nomination is inactive.'] + }) -- cgit v1.2.3 From d33310037146a7e8988a27238ff475b659ef625c Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Mon, 7 Nov 2022 21:24:46 +0100 Subject: refactor nomination validation flow --- pydis_site/apps/api/viewsets/bot/nomination.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/viewsets/bot/nomination.py b/pydis_site/apps/api/viewsets/bot/nomination.py index ecbf217e..49c40e7c 100644 --- a/pydis_site/apps/api/viewsets/bot/nomination.py +++ b/pydis_site/apps/api/viewsets/bot/nomination.py @@ -287,19 +287,15 @@ class NominationViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge ) # This is actually covered, but for some reason coverage don't think so. - elif 'reviewed' in request.data: # pragma: no cover - # 4. We are altering the reviewed state of the nomination. - if not instance.active: - raise ValidationError( - {'reviewed': ['This field cannot be set if the nomination is inactive.']} - ) + elif not instance.active and 'reviewed' in request.data: + raise ValidationError( + {'reviewed': ['This field cannot be set if the nomination is inactive.']} + ) - elif 'thread_id' in request.data: - # 5. We are altering the thread_id of the nomination. - if not instance.active: - raise ValidationError( - {'thread_id': ['This field cannot be set if the nomination is inactive.']} - ) + elif not instance.active and 'thread_id' in request.data: + raise ValidationError( + {'thread_id': ['This field cannot be set if the nomination is inactive.']} + ) if 'reason' in request.data: if 'actor' not in request.data: -- cgit v1.2.3 From 1411ad02a7b98c358b215191c7ff1b3846824c9d Mon Sep 17 00:00:00 2001 From: mbaruh Date: Mon, 7 Nov 2022 22:41:50 +0200 Subject: Disable everyone filter in code jam categories This is in line with what already existed. --- pydis_site/apps/api/migrations/0087_unique_filter_list.py | 1 + 1 file changed, 1 insertion(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0087_unique_filter_list.py b/pydis_site/apps/api/migrations/0087_unique_filter_list.py index 9db966fb..96c2b17a 100644 --- a/pydis_site/apps/api/migrations/0087_unique_filter_list.py +++ b/pydis_site/apps/api/migrations/0087_unique_filter_list.py @@ -43,6 +43,7 @@ def create_unique_list(apps: Apps, _): "Please don't try to ping `@everyone` or `@here`. Your message has been removed. " "If you believe this was a mistake, please let staff know!" ), + disabled_categories=["CODE JAM"] ) everyone.save() -- cgit v1.2.3 From ae180bb6a3028f9d0ec05ebfa940265004eb76cd Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Mon, 7 Nov 2022 22:07:32 +0100 Subject: remove useless coverage related commented --- pydis_site/apps/api/viewsets/bot/nomination.py | 1 - 1 file changed, 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/viewsets/bot/nomination.py b/pydis_site/apps/api/viewsets/bot/nomination.py index 49c40e7c..78687e0e 100644 --- a/pydis_site/apps/api/viewsets/bot/nomination.py +++ b/pydis_site/apps/api/viewsets/bot/nomination.py @@ -286,7 +286,6 @@ class NominationViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge {'active': ['This field can only be used to end a nomination']} ) - # This is actually covered, but for some reason coverage don't think so. elif not instance.active and 'reviewed' in request.data: raise ValidationError( {'reviewed': ['This field cannot be set if the nomination is inactive.']} -- cgit v1.2.3 From c39ae63d407663f47bf2d824a259335234066801 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Wed, 9 Nov 2022 21:27:26 +0200 Subject: Rename delete_messages to the more generic remove_context --- pydis_site/apps/api/migrations/0085_new_filter_schema.py | 8 ++++---- .../apps/api/migrations/0086_unique_constraint_filters.py | 2 +- pydis_site/apps/api/migrations/0087_unique_filter_list.py | 8 ++++---- pydis_site/apps/api/migrations/0088_antispam_filter_list.py | 2 +- pydis_site/apps/api/models/bot/filters.py | 8 ++++---- pydis_site/apps/api/serializers.py | 4 ++-- pydis_site/apps/api/tests/test_filters.py | 8 ++++---- pydis_site/apps/api/viewsets/bot/filters.py | 12 ++++++------ 8 files changed, 26 insertions(+), 26 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0085_new_filter_schema.py b/pydis_site/apps/api/migrations/0085_new_filter_schema.py index d902be7f..96d03bf4 100644 --- a/pydis_site/apps/api/migrations/0085_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0085_new_filter_schema.py @@ -41,7 +41,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: guild_pings=(["Moderators"] if name != "FILE_FORMAT" else []), filter_dm=True, dm_pings=[], - delete_messages=(True if name != "FILTER_TOKEN" else False), + remove_context=(True if name != "FILTER_TOKEN" else False), bypass_roles=["Helpers"], enabled=True, dm_content=dm_content, @@ -68,7 +68,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: guild_pings=None, filter_dm=None, dm_pings=None, - delete_messages=None, + remove_context=None, bypass_roles=None, enabled=None, dm_content=None, @@ -109,7 +109,7 @@ class Migration(migrations.Migration): ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, null=True)), - ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.', null=True)), + ('remove_context', models.BooleanField(help_text='Whether this filter should remove the context (such as a message) triggering it.', null=True)), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None, null=True)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.', null=True)), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, null=True, blank=True)), @@ -136,7 +136,7 @@ class Migration(migrations.Migration): ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.')), ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None)), - ('delete_messages', models.BooleanField(help_text='Whether this filter should delete messages triggering it.')), + ('remove_context', models.BooleanField(help_text='Whether this filter should remove the context (such as a message) triggering it.')), ('bypass_roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Roles and users who can bypass this filter.', size=None)), ('enabled', models.BooleanField(help_text='Whether this filter is currently enabled.')), ('dm_content', models.CharField(help_text='The DM to send to a user triggering this filter.', max_length=1000, blank=True)), diff --git a/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py index 6fa99e9e..b83e395c 100644 --- a/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0086_unique_constraint_filters.py @@ -25,7 +25,7 @@ class Migration(migrations.Migration): 'guild_pings', 'filter_dm', 'dm_pings', - 'delete_messages', + 'remove_context', 'bypass_roles', 'enabled', 'send_alert', diff --git a/pydis_site/apps/api/migrations/0087_unique_filter_list.py b/pydis_site/apps/api/migrations/0087_unique_filter_list.py index 96c2b17a..b8087d9c 100644 --- a/pydis_site/apps/api/migrations/0087_unique_filter_list.py +++ b/pydis_site/apps/api/migrations/0087_unique_filter_list.py @@ -17,7 +17,7 @@ def create_unique_list(apps: Apps, _): guild_pings=[], filter_dm=True, dm_pings=[], - delete_messages=False, + remove_context=False, bypass_roles=[], enabled=True, dm_content="", @@ -37,7 +37,7 @@ def create_unique_list(apps: Apps, _): content="everyone", filter_list=list_, description="", - delete_messages=True, + remove_context=True, bypass_roles=["Helpers"], dm_content=( "Please don't try to ping `@everyone` or `@here`. Your message has been removed. " @@ -51,7 +51,7 @@ def create_unique_list(apps: Apps, _): content="webhook", filter_list=list_, description="", - delete_messages=True, + remove_context=True, dm_content=( "Looks like you posted a Discord webhook URL. " "Therefore, your message has been removed, and your webhook has been deleted. " @@ -74,7 +74,7 @@ def create_unique_list(apps: Apps, _): content="discord_token", filter_list=list_, filter_dm=False, - delete_messages=True, + remove_context=True, dm_content=( "I noticed you posted a seemingly valid Discord API " "token in your message and have removed your message. " diff --git a/pydis_site/apps/api/migrations/0088_antispam_filter_list.py b/pydis_site/apps/api/migrations/0088_antispam_filter_list.py index 354e4520..fcb56781 100644 --- a/pydis_site/apps/api/migrations/0088_antispam_filter_list.py +++ b/pydis_site/apps/api/migrations/0088_antispam_filter_list.py @@ -17,7 +17,7 @@ def create_antispam_list(apps: Apps, _): guild_pings=["Moderators"], filter_dm=False, dm_pings=[], - delete_messages=True, + remove_context=True, bypass_roles=["Helpers"], enabled=True, dm_content="", diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 1ea21a48..4d8a4025 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -75,8 +75,8 @@ class FilterList(ModelTimestampMixin, ModelReprMixin, models.Model): help_text="Who to ping when this filter triggers on a DM.", null=False ) - delete_messages = models.BooleanField( - help_text="Whether this filter should delete messages triggering it.", + remove_context = models.BooleanField( + help_text="Whether this filter should remove the context (such as a message) triggering it.", null=False ) bypass_roles = ArrayField( @@ -185,8 +185,8 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): help_text="Who to ping when this filter triggers on a DM.", null=True ) - delete_messages = models.BooleanField( - help_text="Whether this filter should delete messages triggering it.", + remove_context = models.BooleanField( + help_text="Whether this filter should remove the context (such as a message) triggering it.", null=True ) bypass_roles = ArrayField( diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index d6bae2cb..eabca66e 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -154,7 +154,7 @@ SETTINGS_FIELDS = ( 'guild_pings', 'filter_dm', 'dm_pings', - 'delete_messages', + 'remove_context', 'send_alert', 'bypass_roles', 'enabled', @@ -187,7 +187,7 @@ BASE_SETTINGS_FIELDS = ( "bypass_roles", "filter_dm", "enabled", - "delete_messages", + "remove_context", "send_alert" ) INFRACTION_AND_NOTIFICATION_FIELDS = ( diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 5f40c6f9..f3afdaeb 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -49,7 +49,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: ping_type=[], filter_dm=False, dm_ping_type=[], - delete_messages=False, + remove_context=False, bypass_roles=[], enabled=False, default_action=FilterAction( @@ -76,7 +76,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: "ping_type": ["onduty"], "filter_dm": True, "dm_ping_type": ["123456"], - "delete_messages": True, + "remove_context": True, "bypass_roles": [123456], "enabled": True, "default_action": FilterAction( @@ -130,7 +130,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: ping_type=[], filter_dm=False, dm_ping_type=[], - delete_messages=False, + remove_context=False, bypass_roles=[], enabled=False, default_action=FilterAction( @@ -157,7 +157,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: "ping_type": ["everyone"], "filter_dm": False, "dm_ping_type": ["here"], - "delete_messages": False, + "remove_context": False, "bypass_roles": [9876], "enabled": True, "filter_action": None, diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index dd9a7d87..1eb05053 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -36,7 +36,7 @@ class FilterListViewSet(ModelViewSet): ... "filter_dm": None, ... "enabled": None ... "send_alert": True, - ... "delete_messages": None + ... "remove_context": None ... "infraction_and_notification": { ... "infraction_type": None, ... "infraction_reason": "", @@ -64,7 +64,7 @@ class FilterListViewSet(ModelViewSet): ... ], ... "filter_dm": True, ... "enabled": True - ... "delete_messages": True, + ... "remove_context": True, ... "send_alert": True ... "infraction_and_notification": { ... "infraction_type": "", @@ -111,7 +111,7 @@ class FilterListViewSet(ModelViewSet): ... "bypass_roles": None ... "filter_dm": None, ... "enabled": None - ... "delete_messages": None, + ... "remove_context": None, ... "send_alert": None ... "infraction_and_notification": { ... "infraction_type": None, @@ -140,7 +140,7 @@ class FilterListViewSet(ModelViewSet): ... ], ... "filter_dm": True, ... "enabled": True - ... "delete_messages": True + ... "remove_context": True ... "send_alert": True ... "infraction_and_notification": { ... "infraction_type": "", @@ -198,7 +198,7 @@ class FilterViewSet(ModelViewSet): ... "bypass_roles": None ... "filter_dm": None, ... "enabled": None - ... "delete_messages": True, + ... "remove_context": True, ... "send_alert": True ... "infraction": { ... "infraction_type": None, @@ -237,7 +237,7 @@ class FilterViewSet(ModelViewSet): ... "bypass_roles": None ... "filter_dm": None, ... "enabled": None - ... "delete_messages": True, + ... "remove_context": True, ... "send_alert": True ... "infraction": { ... "infraction_type": None, -- cgit v1.2.3 From 285f48f4e0fd903f296921640f710fe6459a7154 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sat, 12 Nov 2022 15:53:25 +0100 Subject: remove useless blank constraint This updates the migration as well --- .../api/migrations/0085_add_thread_id_to_nomination.py | 18 ------------------ .../migrations/0085_add_thread_id_to_nominations.py | 18 ++++++++++++++++++ pydis_site/apps/api/models/bot/nomination.py | 1 - 3 files changed, 18 insertions(+), 19 deletions(-) delete mode 100644 pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py create mode 100644 pydis_site/apps/api/migrations/0085_add_thread_id_to_nominations.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py b/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py deleted file mode 100644 index ee9707f0..00000000 --- a/pydis_site/apps/api/migrations/0085_add_thread_id_to_nomination.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 4.1.2 on 2022-11-07 17:16 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('api', '0084_infraction_last_applied'), - ] - - operations = [ - migrations.AddField( - model_name='nomination', - name='thread_id', - field=models.BigIntegerField(blank=True, help_text="The nomination vote's thread id.", null=True), - ), - ] diff --git a/pydis_site/apps/api/migrations/0085_add_thread_id_to_nominations.py b/pydis_site/apps/api/migrations/0085_add_thread_id_to_nominations.py new file mode 100644 index 00000000..56a24cc3 --- /dev/null +++ b/pydis_site/apps/api/migrations/0085_add_thread_id_to_nominations.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.2 on 2022-11-12 14:52 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0084_infraction_last_applied'), + ] + + operations = [ + migrations.AddField( + model_name='nomination', + name='thread_id', + field=models.BigIntegerField(help_text="The nomination vote's thread id.", null=True), + ), + ] diff --git a/pydis_site/apps/api/models/bot/nomination.py b/pydis_site/apps/api/models/bot/nomination.py index 9146c1d0..58e70a83 100644 --- a/pydis_site/apps/api/models/bot/nomination.py +++ b/pydis_site/apps/api/models/bot/nomination.py @@ -38,7 +38,6 @@ class Nomination(ModelReprMixin, models.Model): thread_id = models.BigIntegerField( help_text="The nomination vote's thread id.", null=True, - blank=True ) def __str__(self): -- cgit v1.2.3 From 09b69ba789be11fda24493fce671b5bc37912382 Mon Sep 17 00:00:00 2001 From: wookie184 Date: Thu, 17 Nov 2022 19:58:36 +0000 Subject: Include users with no messages in response, and simplify response format --- pydis_site/apps/api/tests/test_users.py | 4 ++-- pydis_site/apps/api/viewsets/bot/user.py | 19 ++++++++----------- 2 files changed, 10 insertions(+), 13 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index 9c0fa6ba..d86e80bb 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -505,7 +505,7 @@ class UserMetricityTests(AuthenticatedAPITestCase): def test_metricity_activity_data(self): # Given self.mock_no_metricity_user() # Other functions shouldn't be used. - self.metricity.total_messages_in_past_n_days.return_value = [[0, 10]] + self.metricity.total_messages_in_past_n_days.return_value = [(0, 10)] # When url = reverse("api:bot:user-metricity-activity-data") @@ -518,7 +518,7 @@ class UserMetricityTests(AuthenticatedAPITestCase): # Then self.assertEqual(response.status_code, 200) self.metricity.total_messages_in_past_n_days.assert_called_once_with(["0", "1"], 10) - self.assertEqual(response.json(), [{"id": 0, "message_count": 10}]) + self.assertEqual(response.json(), {"0": 10, "1": 0}) def test_metricity_activity_data_invalid_days(self): # Given diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index f803b3f6..db73a83c 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -140,10 +140,8 @@ class UserViewSet(ModelViewSet): - 404: if a user with the given `snowflake` could not be found ### POST /bot/users/metricity_activity_data - Gets the number of messages sent on the server in a given period. - - Users with no messages in the specified period or who do not - exist are not included in the result. + Returns a mapping of user ID to message count in a given period for + the given user IDs. #### Required Query Parameters - days: how many days into the past to count message from. @@ -155,9 +153,10 @@ class UserViewSet(ModelViewSet): ... ] #### Response format - >>> [ - ... {"id": 409107086526644234, "message_count": 54} - ... ] + >>> { + ... "409107086526644234": 54, + ... "493839819168808962": 0 + ... } #### Status codes - 200: returned on success @@ -351,8 +350,6 @@ class UserViewSet(ModelViewSet): with Metricity() as metricity: data = metricity.total_messages_in_past_n_days(user_ids, days) - response_data = [ - {"id": int(d[0]), "message_count": d[1]} - for d in data - ] + default_data = {user_id: 0 for user_id in user_ids} + response_data = default_data | dict(data) return Response(response_data, status=status.HTTP_200_OK) -- cgit v1.2.3 From d3eec93b36bd57c521e70b4001c74cb9756caf23 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 27 Jan 2023 22:19:28 +0200 Subject: Fix filter serializers validation to account for filterlist settings --- pydis_site/apps/api/serializers.py | 40 +++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 20 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index eabca66e..83ab4584 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -1,4 +1,6 @@ """Converters from Django models to data interchange formats and back.""" +from typing import Any + from django.db.models.query import QuerySet from django.db.utils import IntegrityError from rest_framework.exceptions import NotFound @@ -220,31 +222,29 @@ def _create_filter_meta_extra_kwargs() -> dict[str, dict[str, bool]]: return extra_kwargs +def get_field_value(data: dict, field_name: str) -> Any: + """Get the value directly from the key, or from the filter list if it's missing or is None.""" + if data.get(field_name): + return data[field_name] + return getattr(data["filter_list"], field_name) + + class FilterSerializer(ModelSerializer): """A class providing (de-)serialization of `Filter` instances.""" def validate(self, data: dict) -> dict: - """Perform infraction data + allow and disallowed lists validation.""" - if ( - data.get('infraction_reason') or data.get('infraction_duration') - ) and not data.get('infraction_type'): - raise ValidationError("Infraction type is required with infraction duration or reason") - + """Perform infraction data + allowed and disallowed lists validation.""" if ( - data.get('disabled_channels') is not None - and data.get('enabled_channels') is not None + (get_field_value(data, "infraction_reason") or get_field_value(data, "infraction_duration")) + and get_field_value(data, "infraction_type") == "NONE" ): - channels_collection = data['disabled_channels'] + data['enabled_channels'] - if len(channels_collection) != len(set(channels_collection)): - raise ValidationError("Enabled and Disabled channels lists contain duplicates.") + raise ValidationError("Infraction type is required with infraction duration or reason.") - if ( - data.get('disabled_categories') is not None - and data.get('enabled_categories') is not None - ): - categories_collection = data['disabled_categories'] + data['enabled_categories'] - if len(categories_collection) != len(set(categories_collection)): - raise ValidationError("Enabled and Disabled categories lists contain duplicates.") + if set(get_field_value(data, "disabled_channels")) & set(get_field_value(data, "enabled_channels")): + raise ValidationError("You can't have the same value in both enabled and disabled channels lists.") + + if set(get_field_value(data, "disabled_categories")) & set(get_field_value(data, "enabled_categories")): + raise ValidationError("You can't have the same value in both enabled and disabled categories lists.") return data @@ -318,8 +318,8 @@ class FilterListSerializer(ModelSerializer): raise ValidationError("Enabled and Disabled channels lists contain duplicates.") if ( - data.get('disabled_categories') is not None - and data.get('enabled_categories') is not None + data.get('disabled_categories') is not None + and data.get('enabled_categories') is not None ): categories_collection = data['disabled_categories'] + data['enabled_categories'] if len(categories_collection) != len(set(categories_collection)): -- cgit v1.2.3 From 3862e051407061186609dbeaab23ec53aeca2f94 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 27 Jan 2023 22:20:10 +0200 Subject: Update tests --- pydis_site/apps/api/models/__init__.py | 1 - pydis_site/apps/api/tests/test_filters.py | 309 ++++++++++++++++-------------- pydis_site/apps/api/tests/test_models.py | 3 - 3 files changed, 170 insertions(+), 143 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/__init__.py b/pydis_site/apps/api/models/__init__.py index 580c95a0..fee4c8d5 100644 --- a/pydis_site/apps/api/models/__init__.py +++ b/pydis_site/apps/api/models/__init__.py @@ -6,7 +6,6 @@ from .bot import ( BumpedThread, DocumentationLink, DeletedMessage, - FilterList, Infraction, Message, MessageDeletionContext, diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index f3afdaeb..cae78cd6 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -1,19 +1,13 @@ import contextlib from dataclasses import dataclass +from datetime import timedelta from typing import Any, Dict, Tuple, Type from django.db.models import Model -from django_hosts import reverse +from django.urls import reverse -from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order - FilterList, - FilterSettings, - FilterAction, - ChannelRange, - Filter, - FilterOverride -) -from pydis_site.apps.api.tests.base import APISubdomainTestCase +from pydis_site.apps.api.models.bot.filters import FilterList, Filter +from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase @dataclass() @@ -21,99 +15,76 @@ class TestSequence: model: Type[Model] route: str object: Dict[str, Any] - ignored_fields: Tuple[str] = () + ignored_fields: Tuple[str, ...] = () def url(self, detail: bool = False) -> str: - return reverse(f'bot:{self.route}-{"detail" if detail else "list"}', host='api') + return reverse(f'api:bot:{self.route}-{"detail" if detail else "list"}') -FK_FIELDS: Dict[Type[Model], Tuple[str]] = { - FilterList: ("default_settings",), - FilterSettings: ("default_action", "default_range"), - FilterAction: (), - ChannelRange: (), +FK_FIELDS: Dict[Type[Model], Tuple[str, ...]] = { + FilterList: (), Filter: ("filter_list",), - FilterOverride: ("filter_action", "filter_range") } def get_test_sequences() -> Dict[str, TestSequence]: + filter_list1_deny_dict = { + "name": "testname", + "list_type": 0, + "guild_pings": [], + "filter_dm": True, + "dm_pings": [], + "remove_context": False, + "bypass_roles": [], + "enabled": True, + "dm_content": "", + "dm_embed": "", + "infraction_type": "NONE", + "infraction_reason": "", + "infraction_duration": timedelta(seconds=0), + "infraction_channel": 0, + "disabled_channels": [], + "disabled_categories": [], + "enabled_channels": [], + "enabled_categories": [], + "send_alert": True + } + filter_list1_allow_dict = filter_list1_deny_dict.copy() + filter_list1_allow_dict["list_type"] = 1 + filter_list1_allow = FilterList(**filter_list1_allow_dict) + return { - "filter_list": TestSequence( + "filter_list1": TestSequence( FilterList, "filterlist", - { - "name": "testname", - "list_type": 0, - "default_settings": FilterSettings( - ping_type=[], - filter_dm=False, - dm_ping_type=[], - remove_context=False, - bypass_roles=[], - enabled=False, - default_action=FilterAction( - dm_content=None, - infraction_type=None, - infraction_reason="", - infraction_duration=None - ), - default_range=ChannelRange( - disallowed_channels=[], - disallowed_categories=[], - allowed_channels=[], - allowed_categories=[], - default=False - ) - ) - }, - ignored_fields=("filters",) + filter_list1_deny_dict, + ignored_fields=("filters", "created_at", "updated_at") ), - "filter_settings": TestSequence( - FilterSettings, - "filtersettings", + "filter_list2": TestSequence( + FilterList, + "filterlist", { - "ping_type": ["onduty"], - "filter_dm": True, - "dm_ping_type": ["123456"], + "name": "testname2", + "list_type": 1, + "guild_pings": ["Moderators"], + "filter_dm": False, + "dm_pings": ["here"], "remove_context": True, - "bypass_roles": [123456], - "enabled": True, - "default_action": FilterAction( - dm_content=None, - infraction_type=None, - infraction_reason="", - infraction_duration=None - ), - "default_range": ChannelRange( - disallowed_channels=[], - disallowed_categories=[], - allowed_channels=[], - allowed_categories=[], - default=False - ) - } - ), - "filter_action": TestSequence( - FilterAction, - "filteraction", - { - "dm_content": "This is a DM message.", - "infraction_type": "Mute", - "infraction_reason": "Too long beard", - "infraction_duration": "1 02:03:00" - } - ), - "channel_range": TestSequence( - ChannelRange, - "channelrange", - { - "disallowed_channels": [1234], - "disallowed_categories": [5678], - "allowed_channels": [9101], - "allowed_categories": [1121], - "default": True - } + "bypass_roles": ["123456"], + "enabled": False, + "dm_content": "testing testing", + "dm_embed": "one two three", + "infraction_type": "MUTE", + "infraction_reason": "stop testing", + "infraction_duration": timedelta(seconds=10.5), + "infraction_channel": 123, + "disabled_channels": ["python-general"], + "disabled_categories": ["CODE JAM"], + "enabled_channels": ["mighty-mice"], + "enabled_categories": ["Lobby"], + "send_alert": False + }, + ignored_fields=("filters", "created_at", "updated_at") ), "filter": TestSequence( Filter, @@ -121,58 +92,35 @@ def get_test_sequences() -> Dict[str, TestSequence]: { "content": "bad word", "description": "This is a really bad word.", - "additional_field": None, - "override": None, - "filter_list": FilterList( - name="testname", - list_type=0, - default_settings=FilterSettings( - ping_type=[], - filter_dm=False, - dm_ping_type=[], - remove_context=False, - bypass_roles=[], - enabled=False, - default_action=FilterAction( - dm_content=None, - infraction_type=None, - infraction_reason="", - infraction_duration=None - ), - default_range=ChannelRange( - disallowed_channels=[], - disallowed_categories=[], - allowed_channels=[], - allowed_categories=[], - default=False - ) - ) - ) - } + "additional_field": "{'hi': 'there'}", + "guild_pings": None, + "filter_dm": None, + "dm_pings": None, + "remove_context": None, + "bypass_roles": None, + "enabled": None, + "dm_content": None, + "dm_embed": None, + "infraction_type": None, + "infraction_reason": None, + "infraction_duration": None, + "infraction_channel": None, + "disabled_channels": None, + "disabled_categories": None, + "enabled_channels": None, + "enabled_categories": None, + "send_alert": None, + "filter_list": filter_list1_allow + }, + ignored_fields=("created_at", "updated_at") ), - "filter_override": TestSequence( - FilterOverride, - "filteroverride", - { - "ping_type": ["everyone"], - "filter_dm": False, - "dm_ping_type": ["here"], - "remove_context": False, - "bypass_roles": [9876], - "enabled": True, - "filter_action": None, - "filter_range": None - } - ) } def save_nested_objects(object_: Model, save_root: bool = True) -> None: - for field in FK_FIELDS[object_.__class__]: + for field in FK_FIELDS.get(object_.__class__, ()): value = getattr(object_, field) - - if value is not None: - save_nested_objects(value) + save_nested_objects(value) if save_root: object_.save() @@ -182,6 +130,8 @@ def clean_test_json(json: dict) -> dict: for key, value in json.items(): if isinstance(value, Model): json[key] = value.id + elif isinstance(value, timedelta): + json[key] = str(value.total_seconds()) return json @@ -194,7 +144,22 @@ def clean_api_json(json: dict, sequence: TestSequence) -> dict: return json -class GenericFilterTest(APISubdomainTestCase): +def flatten_settings(json: dict) -> dict: + settings = json.pop("settings", {}) + flattened_settings = {} + for entry, value in settings.items(): + if isinstance(value, dict): + flattened_settings.update(value) + else: + flattened_settings[entry] = value + + json.update(flattened_settings) + + return json + + +class GenericFilterTests(AuthenticatedAPITestCase): + def test_cannot_read_unauthenticated(self) -> None: for name, sequence in get_test_sequences().items(): with self.subTest(name=name): @@ -222,7 +187,7 @@ class GenericFilterTest(APISubdomainTestCase): response = self.client.get(sequence.url()) self.assertDictEqual( clean_test_json(sequence.object), - clean_api_json(response.json()[0], sequence) + clean_api_json(flatten_settings(response.json()[0]), sequence) ) def test_fetch_by_id(self) -> None: @@ -236,7 +201,7 @@ class GenericFilterTest(APISubdomainTestCase): response = self.client.get(f"{sequence.url()}/{saved.id}") self.assertDictEqual( clean_test_json(sequence.object), - clean_api_json(response.json(), sequence) + clean_api_json(flatten_settings(response.json()), sequence) ) def test_fetch_non_existing(self) -> None: @@ -259,14 +224,15 @@ class GenericFilterTest(APISubdomainTestCase): self.assertEqual(response.status_code, 201) self.assertDictEqual( - clean_api_json(response.json(), sequence), + clean_api_json(flatten_settings(response.json()), sequence), clean_test_json(sequence.object) ) def test_creation_missing_field(self) -> None: for name, sequence in get_test_sequences().items(): with self.subTest(name=name): - save_nested_objects(sequence.model(**sequence.object), False) + saved = sequence.model(**sequence.object) + save_nested_objects(saved) data = clean_test_json(sequence.object.copy()) for field in sequence.model._meta.get_fields(): @@ -296,3 +262,68 @@ class GenericFilterTest(APISubdomainTestCase): response = self.client.delete(f"{sequence.url()}/42") self.assertEqual(response.status_code, 404) + + +class FilterValidationTests(AuthenticatedAPITestCase): + + def test_filter_validation(self) -> None: + test_sequences = get_test_sequences() + base_filter = test_sequences["filter"] + base_filter_list = test_sequences["filter_list1"] + cases = ( + ({"infraction_reason": "hi"}, {}, 400), ({"infraction_duration": timedelta(seconds=10)}, {}, 400), + ({"infraction_reason": "hi"}, {"infraction_type": "NOTE"}, 200), + ({"infraction_duration": timedelta(seconds=10)}, {"infraction_type": "MUTE"}, 200), + ({"enabled_channels": ["admins"]}, {}, 200), ({"disabled_channels": ["123"]}, {}, 200), + ({"enabled_categories": ["CODE JAM"]}, {}, 200), ({"disabled_categories": ["CODE JAM"]}, {}, 200), + ({"enabled_channels": ["admins"], "disabled_channels": ["123", "admins"]}, {}, 400), + ({"enabled_categories": ["admins"], "disabled_categories": ["123", "admins"]}, {}, 400), + ({"enabled_channels": ["admins"]}, {"disabled_channels": ["123", "admins"]}, 400), + ({"enabled_categories": ["admins"]}, {"disabled_categories": ["123", "admins"]}, 400), + ) + + for filter_settings, filter_list_settings, response_code in cases: + with self.subTest(f_settings=filter_settings, fl_settings=filter_list_settings, response=response_code): + base_filter.model.objects.all().delete() + base_filter_list.model.objects.all().delete() + + case_filter_dict = base_filter.object.copy() + case_fl_dict = base_filter_list.object.copy() + case_fl_dict.update(filter_list_settings) + + case_fl = base_filter_list.model(**case_fl_dict) + case_filter_dict["filter_list"] = case_fl + case_filter = base_filter.model(**case_filter_dict) + save_nested_objects(case_filter) + + filter_settings["filter_list"] = case_fl + response = self.client.patch( + f"{base_filter.url()}/{case_filter.id}", data=clean_test_json(filter_settings) + ) + self.assertEqual(response.status_code, response_code) + + def test_filter_list_validation(self) -> None: + test_sequences = get_test_sequences() + base_filter_list = test_sequences["filter_list1"] + cases = ( + ({"infraction_reason": "hi"}, 400), ({"infraction_duration": timedelta(seconds=10)}, 400), + ({"infraction_reason": "hi", "infraction_type": "NOTE"}, 200), + ({"infraction_duration": timedelta(seconds=10), "infraction_type": "MUTE"}, 200), + ({"enabled_channels": ["admins"]}, 200), ({"disabled_channels": ["123"]}, 200), + ({"enabled_categories": ["CODE JAM"]}, 200), ({"disabled_categories": ["CODE JAM"]}, 200), + ({"enabled_channels": ["admins"], "disabled_channels": ["123", "admins"]}, 400), + ({"enabled_categories": ["admins"], "disabled_categories": ["123", "admins"]}, 400), + ) + + for filter_list_settings, response_code in cases: + with self.subTest(fl_settings=filter_list_settings, response=response_code): + base_filter_list.model.objects.all().delete() + + case_fl_dict = base_filter_list.object.copy() + case_fl = base_filter_list.model(**case_fl_dict) + save_nested_objects(case_fl) + + response = self.client.patch( + f"{base_filter_list.url()}/{case_fl.id}", data=clean_test_json(filter_list_settings) + ) + self.assertEqual(response.status_code, response_code) diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index b9b14a84..25d771cc 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -8,7 +8,6 @@ from pydis_site.apps.api.models import ( DocumentationLink, Filter, FilterList, - FilterSettings, Infraction, MessageDeletionContext, Nomination, @@ -110,13 +109,11 @@ class StringDunderMethodTests(SimpleTestCase): FilterList( name="forbidden_duckies", list_type=0, - default_settings=FilterSettings() ), Filter( content="ducky_nsfw", description="This ducky is totally inappropriate!", additional_field=None, - override=None ), OffensiveMessage( id=602951077675139072, -- cgit v1.2.3 From 5f538e9e876adc7f4a459fe230b46bdde61b3f64 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Fri, 27 Jan 2023 23:37:26 +0200 Subject: Make filter unique constraint use NULLS NOT DISTINCT The existing constraint was ineffective as null values were considered distinct, and so two filters with the same content and no overrides were considered different. This change uses a new PSQL 15 feature unsupported in django currently, and so it is added with raw SQL. --- .../migrations/0087_unique_constraint_filters.py | 39 ++++++++-------------- pydis_site/apps/api/serializers.py | 9 +++++ pydis_site/apps/api/tests/test_filters.py | 12 +++++++ 3 files changed, 34 insertions(+), 26 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0087_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0087_unique_constraint_filters.py index b2ff91f1..910e7b1b 100644 --- a/pydis_site/apps/api/migrations/0087_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0087_unique_constraint_filters.py @@ -1,5 +1,3 @@ -# Generated by Django 3.1.14 on 2022-03-22 16:31 - from django.db import migrations, models @@ -10,29 +8,18 @@ class Migration(migrations.Migration): ] operations = [ - migrations.AddConstraint( - model_name='filter', - constraint=models.UniqueConstraint(fields=( - 'content', - 'additional_field', - 'filter_list', - 'dm_content', - 'dm_embed', - 'infraction_type', - 'infraction_reason', - 'infraction_duration', - 'infraction_channel', - 'guild_pings', - 'filter_dm', - 'dm_pings', - 'remove_context', - 'bypass_roles', - 'enabled', - 'send_alert', - 'enabled_channels', - 'disabled_channels', - 'enabled_categories', - 'disabled_categories' - ), name='unique_filters'), + migrations.RunSQL( + "ALTER TABLE api_filter " + "ADD CONSTRAINT unique_filters UNIQUE NULLS NOT DISTINCT " + "(content, additional_field, filter_list_id, dm_content, dm_embed, infraction_type, infraction_reason, infraction_duration, infraction_channel, guild_pings, filter_dm, dm_pings, remove_context, bypass_roles, enabled, send_alert, enabled_channels, disabled_channels, enabled_categories, disabled_categories)", + state_operations=[ + migrations.AddConstraint( + model_name='filter', + constraint=models.UniqueConstraint( + fields=('content', 'additional_field', 'filter_list', 'dm_content', 'dm_embed', 'infraction_type', 'infraction_reason', 'infraction_duration', 'infraction_channel', 'guild_pings', 'filter_dm', 'dm_pings', 'remove_context', 'bypass_roles', 'enabled', 'send_alert', 'enabled_channels', 'disabled_channels', 'enabled_categories', 'disabled_categories'), + name='unique_filters' + ), + ), + ], ), ] diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 7f9461ec..8da47802 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -257,6 +257,15 @@ class FilterSerializer(ModelSerializer): ) + SETTINGS_FIELDS extra_kwargs = _create_filter_meta_extra_kwargs() + def create(self, validated_data: dict) -> User: + """Override the create method to catch violations of the custom uniqueness constraint.""" + try: + return super().create(validated_data) + except IntegrityError: + raise ValidationError( + "Check if a filter with this combination of content and settings already exists in this filter list." + ) + def to_representation(self, instance: Filter) -> dict: """ Provides a custom JSON representation to the Filter Serializers. diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index cae78cd6..73c8e0d9 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -327,3 +327,15 @@ class FilterValidationTests(AuthenticatedAPITestCase): f"{base_filter_list.url()}/{case_fl.id}", data=clean_test_json(filter_list_settings) ) self.assertEqual(response.status_code, response_code) + + def test_filter_unique_constraint(self) -> None: + test_filter = get_test_sequences()["filter"] + test_filter.model.objects.all().delete() + test_filter_object = test_filter.model(**test_filter.object) + save_nested_objects(test_filter_object, False) + + response = self.client.post(test_filter.url(), data=clean_test_json(test_filter.object)) + self.assertEqual(response.status_code, 201) + + response = self.client.post(test_filter.url(), data=clean_test_json(test_filter.object)) + self.assertEqual(response.status_code, 400) -- cgit v1.2.3 From d52a8c955aceccd719dd1511700aac9f2a564b0a Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 28 Jan 2023 16:24:58 +0200 Subject: Update viewsets, fix linting --- .../apps/api/migrations/0088_unique_filter_list.py | 2 +- pydis_site/apps/api/models/bot/filters.py | 12 +- pydis_site/apps/api/serializers.py | 50 +- pydis_site/apps/api/tests/test_filters.py | 24 +- pydis_site/apps/api/viewsets/bot/filters.py | 532 ++++++++++++++------- 5 files changed, 428 insertions(+), 192 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0088_unique_filter_list.py b/pydis_site/apps/api/migrations/0088_unique_filter_list.py index 3f3a34bb..98d14e2b 100644 --- a/pydis_site/apps/api/migrations/0088_unique_filter_list.py +++ b/pydis_site/apps/api/migrations/0088_unique_filter_list.py @@ -99,4 +99,4 @@ class Migration(migrations.Migration): code=create_unique_list, reverse_code=None ), - ] \ No newline at end of file + ] diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 4d8a4025..1eab79ba 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -4,8 +4,8 @@ from django.db import models from django.db.models import UniqueConstraint # Must be imported that way to avoid circular imports +from pydis_site.apps.api.models.mixins import ModelReprMixin, ModelTimestampMixin from .infraction import Infraction -from pydis_site.apps.api.models.mixins import ModelTimestampMixin, ModelReprMixin class FilterListType(models.IntegerChoices): @@ -76,7 +76,10 @@ class FilterList(ModelTimestampMixin, ModelReprMixin, models.Model): null=False ) remove_context = models.BooleanField( - help_text="Whether this filter should remove the context (such as a message) triggering it.", + help_text=( + "Whether this filter should remove the context (such as a message) " + "triggering it." + ), null=False ) bypass_roles = ArrayField( @@ -186,7 +189,10 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): null=True ) remove_context = models.BooleanField( - help_text="Whether this filter should remove the context (such as a message) triggering it.", + help_text=( + "Whether this filter should remove the context (such as a message) " + "triggering it." + ), null=True ) bypass_roles = ArrayField( diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 8da47802..a6328eff 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -21,9 +21,9 @@ from .models import ( BumpedThread, DeletedMessage, DocumentationLink, - Infraction, - FilterList, Filter, + FilterList, + Infraction, MessageDeletionContext, Nomination, NominationEntry, @@ -183,7 +183,9 @@ ALLOW_EMPTY_SETTINGS = ( ) # Required fields for custom JSON representation purposes -BASE_FILTER_FIELDS = ('id', 'created_at', 'updated_at', 'content', 'description', 'additional_field') +BASE_FILTER_FIELDS = ( + 'id', 'created_at', 'updated_at', 'content', 'description', 'additional_field' +) BASE_FILTERLIST_FIELDS = ('id', 'created_at', 'updated_at', 'name', 'list_type') BASE_SETTINGS_FIELDS = ( "bypass_roles", @@ -235,16 +237,31 @@ class FilterSerializer(ModelSerializer): def validate(self, data: dict) -> dict: """Perform infraction data + allowed and disallowed lists validation.""" if ( - (get_field_value(data, "infraction_reason") or get_field_value(data, "infraction_duration")) + ( + get_field_value(data, "infraction_reason") + or get_field_value(data, "infraction_duration") + ) and get_field_value(data, "infraction_type") == "NONE" ): - raise ValidationError("Infraction type is required with infraction duration or reason.") + raise ValidationError( + "Infraction type is required with infraction duration or reason." + ) - if set(get_field_value(data, "disabled_channels")) & set(get_field_value(data, "enabled_channels")): - raise ValidationError("You can't have the same value in both enabled and disabled channels lists.") + if ( + set(get_field_value(data, "disabled_channels")) + & set(get_field_value(data, "enabled_channels")) + ): + raise ValidationError( + "You can't have the same value in both enabled and disabled channels lists." + ) - if set(get_field_value(data, "disabled_categories")) & set(get_field_value(data, "enabled_categories")): - raise ValidationError("You can't have the same value in both enabled and disabled categories lists.") + if ( + set(get_field_value(data, "disabled_categories")) + & set(get_field_value(data, "enabled_categories")) + ): + raise ValidationError( + "You can't have the same value in both enabled and disabled categories lists." + ) return data @@ -253,7 +270,13 @@ class FilterSerializer(ModelSerializer): model = Filter fields = ( - 'id', 'created_at', 'updated_at', 'content', 'description', 'additional_field', 'filter_list' + 'id', + 'created_at', + 'updated_at', + 'content', + 'description', + 'additional_field', + 'filter_list' ) + SETTINGS_FIELDS extra_kwargs = _create_filter_meta_extra_kwargs() @@ -263,7 +286,8 @@ class FilterSerializer(ModelSerializer): return super().create(validated_data) except IntegrityError: raise ValidationError( - "Check if a filter with this combination of content and settings already exists in this filter list." + "Check if a filter with this combination of content " + "and settings already exists in this filter list." ) def to_representation(self, instance: Filter) -> dict: @@ -340,7 +364,9 @@ class FilterListSerializer(ModelSerializer): """Metadata defined for the Django REST Framework.""" model = FilterList - fields = ('id', 'created_at', 'updated_at', 'name', 'list_type', 'filters') + SETTINGS_FIELDS + fields = ( + 'id', 'created_at', 'updated_at', 'name', 'list_type', 'filters' + ) + SETTINGS_FIELDS extra_kwargs = _create_filter_list_meta_extra_kwargs() # Ensure there can only be one filter list with the same name and type. diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 73c8e0d9..62de23c4 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -6,7 +6,7 @@ from typing import Any, Dict, Tuple, Type from django.db.models import Model from django.urls import reverse -from pydis_site.apps.api.models.bot.filters import FilterList, Filter +from pydis_site.apps.api.models.bot.filters import Filter, FilterList from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase @@ -271,11 +271,14 @@ class FilterValidationTests(AuthenticatedAPITestCase): base_filter = test_sequences["filter"] base_filter_list = test_sequences["filter_list1"] cases = ( - ({"infraction_reason": "hi"}, {}, 400), ({"infraction_duration": timedelta(seconds=10)}, {}, 400), + ({"infraction_reason": "hi"}, {}, 400), + ({"infraction_duration": timedelta(seconds=10)}, {}, 400), ({"infraction_reason": "hi"}, {"infraction_type": "NOTE"}, 200), ({"infraction_duration": timedelta(seconds=10)}, {"infraction_type": "MUTE"}, 200), - ({"enabled_channels": ["admins"]}, {}, 200), ({"disabled_channels": ["123"]}, {}, 200), - ({"enabled_categories": ["CODE JAM"]}, {}, 200), ({"disabled_categories": ["CODE JAM"]}, {}, 200), + ({"enabled_channels": ["admins"]}, {}, 200), + ({"disabled_channels": ["123"]}, {}, 200), + ({"enabled_categories": ["CODE JAM"]}, {}, 200), + ({"disabled_categories": ["CODE JAM"]}, {}, 200), ({"enabled_channels": ["admins"], "disabled_channels": ["123", "admins"]}, {}, 400), ({"enabled_categories": ["admins"], "disabled_categories": ["123", "admins"]}, {}, 400), ({"enabled_channels": ["admins"]}, {"disabled_channels": ["123", "admins"]}, 400), @@ -283,7 +286,9 @@ class FilterValidationTests(AuthenticatedAPITestCase): ) for filter_settings, filter_list_settings, response_code in cases: - with self.subTest(f_settings=filter_settings, fl_settings=filter_list_settings, response=response_code): + with self.subTest( + f_settings=filter_settings, fl_settings=filter_list_settings, response=response_code + ): base_filter.model.objects.all().delete() base_filter_list.model.objects.all().delete() @@ -306,11 +311,13 @@ class FilterValidationTests(AuthenticatedAPITestCase): test_sequences = get_test_sequences() base_filter_list = test_sequences["filter_list1"] cases = ( - ({"infraction_reason": "hi"}, 400), ({"infraction_duration": timedelta(seconds=10)}, 400), + ({"infraction_reason": "hi"}, 400), + ({"infraction_duration": timedelta(seconds=10)}, 400), ({"infraction_reason": "hi", "infraction_type": "NOTE"}, 200), ({"infraction_duration": timedelta(seconds=10), "infraction_type": "MUTE"}, 200), ({"enabled_channels": ["admins"]}, 200), ({"disabled_channels": ["123"]}, 200), - ({"enabled_categories": ["CODE JAM"]}, 200), ({"disabled_categories": ["CODE JAM"]}, 200), + ({"enabled_categories": ["CODE JAM"]}, 200), + ({"disabled_categories": ["CODE JAM"]}, 200), ({"enabled_channels": ["admins"], "disabled_channels": ["123", "admins"]}, 400), ({"enabled_categories": ["admins"], "disabled_categories": ["123", "admins"]}, 400), ) @@ -324,7 +331,8 @@ class FilterValidationTests(AuthenticatedAPITestCase): save_nested_objects(case_fl) response = self.client.patch( - f"{base_filter_list.url()}/{case_fl.id}", data=clean_test_json(filter_list_settings) + f"{base_filter_list.url()}/{case_fl.id}", + data=clean_test_json(filter_list_settings) ) self.assertEqual(response.status_code, response_code) diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 1eb05053..8e677612 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -22,69 +22,79 @@ class FilterListViewSet(ModelViewSet): >>> [ ... { ... "id": 1, - ... "name": "invites", + ... "created_at": "2023-01-27T21:26:34.027293Z", + ... "updated_at": "2023-01-27T21:26:34.027308Z", + ... "name": "invite", ... "list_type": 1, ... "filters": [ ... { ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, - ... "filter_list": 1 + ... "filter_list": 1, ... "settings": { - ... "bypass_roles": None - ... "filter_dm": None, - ... "enabled": None - ... "send_alert": True, - ... "remove_context": None - ... "infraction_and_notification": { - ... "infraction_type": None, - ... "infraction_reason": "", - ... "infraction_duration": None - ... "dm_content": None, - ... "dm_embed": None - ... }, - ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None - ... } - ... "mentions": { - ... "ping_type": None - ... "dm_ping_type": None - ... } - ... } - ... + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } ... }, ... ... ... ], - ... "settings": { - ... "bypass_roles": [ - ... "staff" - ... ], - ... "filter_dm": True, - ... "enabled": True - ... "remove_context": True, - ... "send_alert": True - ... "infraction_and_notification": { - ... "infraction_type": "", - ... "infraction_reason": "", - ... "infraction_duration": "0.0", - ... "dm_content": "", - ... "dm_embed": "" - ... } - ... "channel_scope": { + ... "settings": { + ... "bypass_roles": [ + ... "Helpers" + ... ], + ... "filter_dm": True, + ... "enabled": True, + ... "remove_context": True, + ... "send_alert": True, + ... "infraction_and_notification": { + ... "infraction_type": "NONE", + ... "infraction_reason": "", + ... "infraction_duration": "0.0", + ... "infraction_channel": 0, + ... "dm_content": "Per Rule 6, your invite link has been removed...", + ... "dm_embed": "" + ... }, + ... "channel_scope": { ... "disabled_channels": [], - ... "disabled_categories": [], - ... "enabled_channels": [] - ... } - ... "mentions": { - ... "ping_type": [ - ... "onduty" - ... ] - ... "dm_ping_type": [] - ... } - ... }, + ... "disabled_categories": [ + ... "CODE JAM" + ... ], + ... "enabled_channels": [], + ... "enabled_categories": [] + ... }, + ... "mentions": { + ... "guild_pings": [ + ... "Moderators" + ... ], + ... "dm_pings": [] + ... } + ... } + ... }, ... ... ... ] @@ -97,75 +107,205 @@ class FilterListViewSet(ModelViewSet): #### Response format >>> { - ... "id": 1, - ... "name": "invites", - ... "list_type": 1, - ... "filters": [ - ... { - ... "id": 1, - ... "filter_list": 1 - ... "content": "267624335836053506", - ... "description": "Python Discord", - ... "additional_field": None, - ... "settings": { - ... "bypass_roles": None - ... "filter_dm": None, - ... "enabled": None - ... "remove_context": None, - ... "send_alert": None - ... "infraction_and_notification": { - ... "infraction_type": None, - ... "infraction_reason": "", - ... "infraction_duration": None - ... "dm_content": None, - ... "dm_embed": None - ... }, - ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None - ... } - ... "mentions": { - ... "ping_type": None - ... "dm_ping_type": None - ... } - ... } - ... - ... }, - ... ... + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.027293Z", + ... "updated_at": "2023-01-27T21:26:34.027308Z", + ... "name": "invite", + ... "list_type": 1, + ... "filters": [ + ... { + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } + ... }, + ... ... + ... ], + ... "settings": { + ... "bypass_roles": [ + ... "Helpers" ... ], - ... "settings": { - ... "bypass_roles": [ - ... "staff" - ... ], - ... "filter_dm": True, - ... "enabled": True - ... "remove_context": True - ... "send_alert": True - ... "infraction_and_notification": { - ... "infraction_type": "", - ... "infraction_reason": "", - ... "infraction_duration": "0.0", - ... "dm_content": "", - ... "dm_embed": "" - ... } - ... "channel_scope": { - ... "disabled_channels": [], - ... "disabled_categories": [], - ... "enabled_channels": [] - ... } - ... "mentions": { - ... "ping_type": [ - ... "onduty" - ... ] - ... "dm_ping_type": [] - ... } + ... "filter_dm": True, + ... "enabled": True, + ... "remove_context": True, + ... "send_alert": True, + ... "infraction_and_notification": { + ... "infraction_type": "NONE", + ... "infraction_reason": "", + ... "infraction_duration": "0.0", + ... "infraction_channel": 0, + ... "dm_content": "Per Rule 6, your invite link has been removed...", + ... "dm_embed": "" + ... }, + ... "channel_scope": { + ... "disabled_channels": [], + ... "disabled_categories": [ + ... "CODE JAM" + ... ], + ... "enabled_channels": [], + ... "enabled_categories": [] + ... }, + ... "mentions": { + ... "guild_pings": [ + ... "Moderators" + ... ], + ... "dm_pings": [] + ... } + ... } ... } #### Status codes - 200: returned on success - 404: returned if the id was not found. + ### POST /bot/filter/filter_lists + Adds a single FilterList item to the database. + + #### Request body + >>> { + ... "name": "invite", + ... "list_type": 1, + ... "bypass_roles": [ + ... "Helpers" + ... ], + ... "filter_dm": True, + ... "enabled": True, + ... "remove_context": True, + ... "send_alert": True, + ... "infraction_type": "NONE", + ... "infraction_reason": "", + ... "infraction_duration": "0.0", + ... "infraction_channel": 0, + ... "dm_content": "Per Rule 6, your invite link has been removed...", + ... "dm_embed": "", + ... "disabled_channels": [], + ... "disabled_categories": [ + ... "CODE JAM" + ... ], + ... "enabled_channels": [], + ... "enabled_categories": [] + ... "guild_pings": [ + ... "Moderators" + ... ], + ... "dm_pings": [] + ... } + + #### Status codes + - 201: returned on success + - 400: if one of the given fields is invalid + + ### PATCH /bot/filter/filter_lists/ + Updates a specific FilterList item from the database. + + #### Response format + >>> { + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.027293Z", + ... "updated_at": "2023-01-27T21:26:34.027308Z", + ... "name": "invite", + ... "list_type": 1, + ... "filters": [ + ... { + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } + ... }, + ... ... + ... ], + ... "settings": { + ... "bypass_roles": [ + ... "Helpers" + ... ], + ... "filter_dm": True, + ... "enabled": True, + ... "remove_context": True, + ... "send_alert": True, + ... "infraction_and_notification": { + ... "infraction_type": "NONE", + ... "infraction_reason": "", + ... "infraction_duration": "0.0", + ... "infraction_channel": 0, + ... "dm_content": "Per Rule 6, your invite link has been removed...", + ... "dm_embed": "" + ... }, + ... "channel_scope": { + ... "disabled_channels": [], + ... "disabled_categories": [ + ... "CODE JAM" + ... ], + ... "enabled_channels": [], + ... "enabled_categories": [] + ... }, + ... "mentions": { + ... "guild_pings": [ + ... "Moderators" + ... ], + ... "dm_pings": [] + ... } + ... } + ... } + + #### Status codes + - 200: returned on success + - 400: if one of the given fields is invalid + ### DELETE /bot/filter/filter_lists/ Deletes the FilterList item with the given `id`. @@ -188,33 +328,39 @@ class FilterViewSet(ModelViewSet): #### Response format >>> [ - ... { - ... "id": 1, - ... "filter_list": 1 - ... "content": "267624335836053506", - ... "description": "Python Discord", - ... "additional_field": None, - ... "settings": { - ... "bypass_roles": None - ... "filter_dm": None, - ... "enabled": None - ... "remove_context": True, - ... "send_alert": True - ... "infraction": { - ... "infraction_type": None, - ... "infraction_reason": None, - ... "infraction_duration": None - ... }, - ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None - ... } - ... "mentions": { - ... "ping_type": None, - ... "dm_ping_type": None - ... } - ... } + ... { + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } ... }, ... ... ... ] @@ -228,32 +374,38 @@ class FilterViewSet(ModelViewSet): #### Response format >>> { - ... "id": 1, - ... "filter_list": 1 - ... "content": "267624335836053506", - ... "description": "Python Discord", - ... "additional_field": None, - ... "settings": { - ... "bypass_roles": None - ... "filter_dm": None, - ... "enabled": None - ... "remove_context": True, - ... "send_alert": True - ... "infraction": { - ... "infraction_type": None, - ... "infraction_reason": None, - ... "infraction_duration": None - ... }, - ... "channel_scope": { - ... "disabled_channels": None, - ... "disabled_categories": None, - ... "enabled_channels": None, - ... } - ... "mentions": { - ... "ping_type": None - ... "dm_ping_type": None - ... } - ... } + ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", + ... "content": "267624335836053506", + ... "description": "Python Discord", + ... "additional_field": None, + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } ... } #### Status codes @@ -265,10 +417,27 @@ class FilterViewSet(ModelViewSet): #### Request body >>> { + ... "filter_list": 1, ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, - ... "override": 1 + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": False, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... "guild_pings": None, + ... "dm_pings": None ... } #### Status codes @@ -281,10 +450,37 @@ class FilterViewSet(ModelViewSet): #### Response format >>> { ... "id": 1, + ... "created_at": "2023-01-27T21:26:34.029539Z", + ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", ... "additional_field": None, - ... "override": 1 + ... "filter_list": 1, + ... "settings": { + ... "bypass_roles": None, + ... "filter_dm": None, + ... "enabled": None, + ... "remove_context": None, + ... "send_alert": None, + ... "infraction_and_notification": { + ... "infraction_type": None, + ... "infraction_reason": None, + ... "infraction_duration": None, + ... "infraction_channel": None, + ... "dm_content": None, + ... "dm_embed": None + ... }, + ... "channel_scope": { + ... "disabled_channels": None, + ... "disabled_categories": None, + ... "enabled_channels": None, + ... "enabled_categories": None + ... }, + ... "mentions": { + ... "guild_pings": None, + ... "dm_pings": None + ... } + ... } ... } #### Status codes -- cgit v1.2.3 From 4eecda92e16ffe97fabb5d2e07790357140f7bbb Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 11 Feb 2023 13:32:46 +0200 Subject: Fix documentation and import Co-authored-by: wookie184 --- pydis_site/apps/api/migrations/0086_new_filter_schema.py | 8 ++++---- pydis_site/apps/api/models/bot/filters.py | 5 ++--- pydis_site/apps/api/viewsets/bot/filters.py | 4 ++-- 3 files changed, 8 insertions(+), 9 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0086_new_filter_schema.py b/pydis_site/apps/api/migrations/0086_new_filter_schema.py index 9067a380..5da3a3b1 100644 --- a/pydis_site/apps/api/migrations/0086_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0086_new_filter_schema.py @@ -1,4 +1,4 @@ -# Modified migration file to migrate existing filters to the new one +"""Modified migration file to migrate existing filters to the new system.""" from datetime import timedelta import django.contrib.postgres.fields @@ -8,7 +8,7 @@ from django.db import migrations, models import django.db.models.deletion from django.db.backends.base.schema import BaseDatabaseSchemaEditor -import pydis_site.apps.api.models.bot.filters +import pydis_site.apps.api.models OLD_LIST_NAMES = (('GUILD_INVITE', True), ('GUILD_INVITE', False), ('FILE_FORMAT', True), ('DOMAIN_NAME', False), ('FILTER_TOKEN', False), ('REDIRECT', False)) change_map = { @@ -116,7 +116,7 @@ class Migration(migrations.Migration): ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, null=True, blank=True)), ('infraction_type', models.CharField(choices=[('NONE', 'None'), ('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10, null=True)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True, blank=True)), - ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.', null=True)), + ('infraction_duration', models.DurationField(help_text='The duration of the infraction. 0 for permanent.', null=True)), ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.", null=True)), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", null=True, size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", null=True, size=None)), @@ -143,7 +143,7 @@ class Migration(migrations.Migration): ('dm_embed', models.CharField(help_text='The content of the DM embed', max_length=2000, blank=True)), ('infraction_type', models.CharField(choices=[('NONE', 'None'), ('NOTE', 'Note'), ('WARNING', 'Warning'), ('WATCH', 'Watch'), ('MUTE', 'Mute'), ('KICK', 'Kick'), ('BAN', 'Ban'), ('SUPERSTAR', 'Superstar'), ('VOICE_BAN', 'Voice Ban'), ('VOICE_MUTE', 'Voice Mute')], help_text='The infraction to apply to this user.', max_length=10)), ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, blank=True)), - ('infraction_duration', models.DurationField(help_text='The duration of the infraction. Null if permanent.')), + ('infraction_duration', models.DurationField(help_text='The duration of the infraction. 0 for permanent.')), ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.")), ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", size=None)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 1eab79ba..584ee726 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -52,7 +52,7 @@ class FilterList(ModelTimestampMixin, ModelReprMixin, models.Model): ) infraction_duration = models.DurationField( null=False, - help_text="The duration of the infraction. Null if permanent." + help_text="The duration of the infraction. 0 for permanent." ) infraction_channel = models.BigIntegerField( validators=( @@ -165,7 +165,7 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): ) infraction_duration = models.DurationField( null=True, - help_text="The duration of the infraction. Null if permanent." + help_text="The duration of the infraction. 0 for permanent." ) infraction_channel = models.BigIntegerField( validators=( @@ -209,7 +209,6 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): null=True ) - # Check FilterList model for information about these properties. enabled_channels = ArrayField( models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index 8e677612..c84da909 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -311,7 +311,7 @@ class FilterListViewSet(ModelViewSet): #### Status codes - 204: returned on success - - 404: if a tag with the given `id` does not exist + - 404: if a FilterList with the given `id` does not exist """ serializer_class = FilterListSerializer @@ -492,7 +492,7 @@ class FilterViewSet(ModelViewSet): #### Status codes - 204: returned on success - - 404: if a tag with the given `id` does not exist + - 404: if a Filter with the given `id` does not exist """ serializer_class = FilterSerializer -- cgit v1.2.3 From be854fa3d34dac7b4b9e96b3736dd61d972f1b79 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 11 Feb 2023 13:45:31 +0200 Subject: Fix filter serializers for false-y values Co-authored-by: GDWR --- pydis_site/apps/api/serializers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index a6328eff..f4d64ad0 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -226,7 +226,7 @@ def _create_filter_meta_extra_kwargs() -> dict[str, dict[str, bool]]: def get_field_value(data: dict, field_name: str) -> Any: """Get the value directly from the key, or from the filter list if it's missing or is None.""" - if data.get(field_name): + if data.get(field_name) is not None: return data[field_name] return getattr(data["filter_list"], field_name) -- cgit v1.2.3 From 8a954029a2f0f22cde599afee3ff8195680e621e Mon Sep 17 00:00:00 2001 From: vivekashok1221 Date: Fri, 17 Feb 2023 11:40:29 +0530 Subject: Add jump_url field to infraction model --- .../apps/api/migrations/0086_infraction_jump_url.py | 18 ++++++++++++++++++ pydis_site/apps/api/models/bot/infraction.py | 8 ++++++++ pydis_site/apps/api/serializers.py | 3 ++- pydis_site/apps/api/viewsets/bot/infraction.py | 8 +++++--- 4 files changed, 33 insertions(+), 4 deletions(-) create mode 100644 pydis_site/apps/api/migrations/0086_infraction_jump_url.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0086_infraction_jump_url.py b/pydis_site/apps/api/migrations/0086_infraction_jump_url.py new file mode 100644 index 00000000..e32219c8 --- /dev/null +++ b/pydis_site/apps/api/migrations/0086_infraction_jump_url.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.6 on 2023-02-13 22:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0085_add_thread_id_to_nominations'), + ] + + operations = [ + migrations.AddField( + model_name='infraction', + name='jump_url', + field=models.CharField(default='', help_text='The jump url to message invoking the infraction.', max_length=88), + ), + ] diff --git a/pydis_site/apps/api/models/bot/infraction.py b/pydis_site/apps/api/models/bot/infraction.py index 218ee5ec..ea0277c3 100644 --- a/pydis_site/apps/api/models/bot/infraction.py +++ b/pydis_site/apps/api/models/bot/infraction.py @@ -69,6 +69,14 @@ class Infraction(ModelReprMixin, models.Model): help_text="Whether a DM was sent to the user when infraction was applied." ) + jump_url = models.CharField( + default='', + max_length=88, + help_text=( + "The jump url to message invoking the infraction." + ) + ) + def __str__(self): """Returns some info on the current infraction, for display purposes.""" s = f"#{self.id}: {self.type} on {self.user_id}" diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index 4303e7d0..e74ca102 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -184,7 +184,8 @@ class InfractionSerializer(ModelSerializer): 'type', 'reason', 'hidden', - 'dm_sent' + 'dm_sent', + 'jump_url' ) def validate(self, attrs: dict) -> dict: diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py index 93d29391..9c21733b 100644 --- a/pydis_site/apps/api/viewsets/bot/infraction.py +++ b/pydis_site/apps/api/viewsets/bot/infraction.py @@ -72,7 +72,8 @@ class InfractionViewSet( ... 'type': 'ban', ... 'reason': 'He terk my jerb!', ... 'hidden': True, - ... 'dm_sent': True + ... 'dm_sent': True, + ... 'jump_url': '' ... } ... ] @@ -103,7 +104,8 @@ class InfractionViewSet( ... 'type': 'ban', ... 'reason': 'He terk my jerb!', ... 'user': 172395097705414656, - ... 'dm_sent': False + ... 'dm_sent': False, + ... 'jump_url': ''x ... } #### Response format @@ -138,7 +140,7 @@ class InfractionViewSet( #### Status codes - 204: returned on success - - 404: if a infraction with the given `id` does not exist + - 404: if an infraction with the given `id` does not exist ### Expanded routes All routes support expansion of `user` and `actor` in responses. To use an expanded route, -- cgit v1.2.3 From 19afe0de583da9e90ff8627a6ece8503cd736136 Mon Sep 17 00:00:00 2001 From: vivekashok1221 Date: Fri, 17 Feb 2023 11:47:42 +0530 Subject: Remove typo --- pydis_site/apps/api/viewsets/bot/infraction.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py index 9c21733b..ec8b83a1 100644 --- a/pydis_site/apps/api/viewsets/bot/infraction.py +++ b/pydis_site/apps/api/viewsets/bot/infraction.py @@ -105,7 +105,7 @@ class InfractionViewSet( ... 'reason': 'He terk my jerb!', ... 'user': 172395097705414656, ... 'dm_sent': False, - ... 'jump_url': ''x + ... 'jump_url': '' ... } #### Response format -- cgit v1.2.3 From 548131617b93482c9591e4d5b9ebe78aed36d88b Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 4 Mar 2023 20:04:41 +0200 Subject: Migrate infraction type `mute` to `timeout` --- .../api/migrations/0086_alter_mute_to_timeout.py | 25 ++++++++++++++++++++++ pydis_site/apps/api/models/bot/infraction.py | 2 +- pydis_site/apps/api/tests/test_infractions.py | 24 ++++++++++----------- 3 files changed, 38 insertions(+), 13 deletions(-) create mode 100644 pydis_site/apps/api/migrations/0086_alter_mute_to_timeout.py (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0086_alter_mute_to_timeout.py b/pydis_site/apps/api/migrations/0086_alter_mute_to_timeout.py new file mode 100644 index 00000000..8eb3ff6d --- /dev/null +++ b/pydis_site/apps/api/migrations/0086_alter_mute_to_timeout.py @@ -0,0 +1,25 @@ +from django.apps.registry import Apps +from django.db import migrations, models + +import pydis_site.apps.api.models + + +def rename_type(apps: Apps, _) -> None: + infractions: pydis_site.apps.api.models.Infraction = apps.get_model("api", "Infraction") + infractions.objects.filter(type="mute").update(type="timeout") + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0085_add_thread_id_to_nominations'), + ] + + operations = [ + migrations.AlterField( + model_name='infraction', + name='type', + field=models.CharField(choices=[('note', 'Note'), ('warning', 'Warning'), ('watch', 'Watch'), ('timeout', 'Timeout'), ('kick', 'Kick'), ('ban', 'Ban'), ('superstar', 'Superstar'), ('voice_ban', 'Voice Ban'), ('voice_mute', 'Voice Mute')], help_text='The type of the infraction.', max_length=10), + ), + migrations.RunPython(rename_type, migrations.RunPython.noop) + ] diff --git a/pydis_site/apps/api/models/bot/infraction.py b/pydis_site/apps/api/models/bot/infraction.py index 218ee5ec..fcf8651e 100644 --- a/pydis_site/apps/api/models/bot/infraction.py +++ b/pydis_site/apps/api/models/bot/infraction.py @@ -12,7 +12,7 @@ class Infraction(ModelReprMixin, models.Model): ("note", "Note"), ("warning", "Warning"), ("watch", "Watch"), - ("mute", "Mute"), + ("timeout", "Timeout"), ("kick", "Kick"), ("ban", "Ban"), ("superstar", "Superstar"), diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index 89ee4e23..ceb5591b 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -68,10 +68,10 @@ class InfractionTests(AuthenticatedAPITestCase): active=False, inserted_at=dt(2020, 10, 10, 0, 1, 0, tzinfo=timezone.utc), ) - cls.mute_permanent = Infraction.objects.create( + cls.timeout_permanent = Infraction.objects.create( user_id=cls.user.id, actor_id=cls.user.id, - type='mute', + type='timeout', reason='He has a filthy mouth and I am his soap.', active=True, inserted_at=dt(2020, 10, 10, 0, 2, 0, tzinfo=timezone.utc), @@ -107,7 +107,7 @@ class InfractionTests(AuthenticatedAPITestCase): self.assertEqual(len(infractions), 5) self.assertEqual(infractions[0]['id'], self.voiceban_expires_later.id) self.assertEqual(infractions[1]['id'], self.superstar_expires_soon.id) - self.assertEqual(infractions[2]['id'], self.mute_permanent.id) + self.assertEqual(infractions[2]['id'], self.timeout_permanent.id) self.assertEqual(infractions[3]['id'], self.ban_inactive.id) self.assertEqual(infractions[4]['id'], self.ban_hidden.id) @@ -134,7 +134,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_permanent_false(self): url = reverse('api:bot:infraction-list') - response = self.client.get(f'{url}?type=mute&permanent=false') + response = self.client.get(f'{url}?type=timeout&permanent=false') self.assertEqual(response.status_code, 200) infractions = response.json() @@ -143,12 +143,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_permanent_true(self): url = reverse('api:bot:infraction-list') - response = self.client.get(f'{url}?type=mute&permanent=true') + response = self.client.get(f'{url}?type=timeout&permanent=true') self.assertEqual(response.status_code, 200) infractions = response.json() - self.assertEqual(infractions[0]['id'], self.mute_permanent.id) + self.assertEqual(infractions[0]['id'], self.timeout_permanent.id) def test_filter_after(self): url = reverse('api:bot:infraction-list') @@ -241,7 +241,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_manytypes(self): url = reverse('api:bot:infraction-list') - response = self.client.get(f'{url}?types=mute,ban') + response = self.client.get(f'{url}?types=timeout,ban') self.assertEqual(response.status_code, 200) infractions = response.json() @@ -249,7 +249,7 @@ class InfractionTests(AuthenticatedAPITestCase): def test_types_type_invalid(self): url = reverse('api:bot:infraction-list') - response = self.client.get(f'{url}?types=mute,ban&type=superstar') + response = self.client.get(f'{url}?types=timeout,ban&type=superstar') self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -519,7 +519,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_for_second_active_infraction_of_the_same_type(self): """Test if the API rejects a second active infraction of the same type for a given user.""" url = reverse('api:bot:infraction-list') - active_infraction_types = ('mute', 'ban', 'superstar') + active_infraction_types = ('timeout', 'ban', 'superstar') for infraction_type in active_infraction_types: with self.subTest(infraction_type=infraction_type): @@ -562,7 +562,7 @@ class CreationTests(AuthenticatedAPITestCase): first_active_infraction = { 'user': self.user.id, 'actor': self.user.id, - 'type': 'mute', + 'type': 'timeout', 'reason': 'Be silent!', 'hidden': True, 'active': True, @@ -649,9 +649,9 @@ class CreationTests(AuthenticatedAPITestCase): Infraction.objects.create( user=self.user, actor=self.user, - type="mute", + type="timeout", active=True, - reason="The first active mute" + reason="The first active timeout" ) def test_unique_constraint_accepts_active_infractions_for_different_users(self): -- cgit v1.2.3 From ed3577652d620057cc87bbc74775548066cfdc8f Mon Sep 17 00:00:00 2001 From: vivekashok1221 Date: Fri, 10 Mar 2023 23:11:35 +0530 Subject: Replace CharField with URLField and set default to None --- pydis_site/apps/api/migrations/0086_infraction_jump_url.py | 4 ++-- pydis_site/apps/api/models/bot/infraction.py | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0086_infraction_jump_url.py b/pydis_site/apps/api/migrations/0086_infraction_jump_url.py index e32219c8..7ae65751 100644 --- a/pydis_site/apps/api/migrations/0086_infraction_jump_url.py +++ b/pydis_site/apps/api/migrations/0086_infraction_jump_url.py @@ -1,4 +1,4 @@ -# Generated by Django 4.1.6 on 2023-02-13 22:23 +# Generated by Django 4.1.7 on 2023-03-10 17:25 from django.db import migrations, models @@ -13,6 +13,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='infraction', name='jump_url', - field=models.CharField(default='', help_text='The jump url to message invoking the infraction.', max_length=88), + field=models.URLField(default=None, help_text='The jump url to message invoking the infraction.', max_length=88, null=True), ), ] diff --git a/pydis_site/apps/api/models/bot/infraction.py b/pydis_site/apps/api/models/bot/infraction.py index ea0277c3..660f1cb4 100644 --- a/pydis_site/apps/api/models/bot/infraction.py +++ b/pydis_site/apps/api/models/bot/infraction.py @@ -69,8 +69,9 @@ class Infraction(ModelReprMixin, models.Model): help_text="Whether a DM was sent to the user when infraction was applied." ) - jump_url = models.CharField( - default='', + jump_url = models.URLField( + default=None, + null=True, max_length=88, help_text=( "The jump url to message invoking the infraction." -- cgit v1.2.3 From ca8a7a6f06db8e73a41f4bcae19b704b11c2c98f Mon Sep 17 00:00:00 2001 From: shtlrs Date: Tue, 21 Mar 2023 16:03:57 +0100 Subject: add rule 10 to the list --- pydis_site/apps/api/views.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 34167a38..20431a61 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -171,6 +171,10 @@ class RulesView(APIView): "Do not offer or ask for paid work of any kind.", ["paid", "work", "money"] ), + ( + "Do not copy and paste answers from ChatGPT or similar AI tools.", + ["gpt", "chatgpt", "gpt3", "ai"] + ), ]) -- cgit v1.2.3 From 138506045acd1cf464dc63d4465d36208801a3ab Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 25 Mar 2023 18:15:56 +0300 Subject: Make the unique constraint reversible Co-authored-by: Amrou --- pydis_site/apps/api/migrations/0089_unique_constraint_filters.py | 1 + 1 file changed, 1 insertion(+) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py index d6f32342..0bcfd8a3 100644 --- a/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py @@ -12,6 +12,7 @@ class Migration(migrations.Migration): "ALTER TABLE api_filter " "ADD CONSTRAINT unique_filters UNIQUE NULLS NOT DISTINCT " "(content, additional_field, filter_list_id, dm_content, dm_embed, infraction_type, infraction_reason, infraction_duration, infraction_channel, guild_pings, filter_dm, dm_pings, remove_context, bypass_roles, enabled, send_alert, enabled_channels, disabled_channels, enabled_categories, disabled_categories)", + reverse_sql="ALTER TABLE api_filter DROP CONSTRAINT unique_filters", state_operations=[ migrations.AddConstraint( model_name='filter', -- cgit v1.2.3 From d6565845ec07b9d76949da36505b3f53e402d230 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 25 Mar 2023 18:29:31 +0300 Subject: Documentation improvements and fixes Co-authored-by: Amrou --- pydis_site/apps/api/migrations/0088_new_filter_schema.py | 4 ++-- pydis_site/apps/api/migrations/0091_antispam_filter_list.py | 2 +- pydis_site/apps/api/models/bot/filters.py | 5 +++-- 3 files changed, 6 insertions(+), 5 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0088_new_filter_schema.py b/pydis_site/apps/api/migrations/0088_new_filter_schema.py index 46756781..1506e4d7 100644 --- a/pydis_site/apps/api/migrations/0088_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0088_new_filter_schema.py @@ -118,7 +118,7 @@ class Migration(migrations.Migration): ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, null=True, blank=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. 0 for permanent.', null=True)), ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.", null=True)), - ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", null=True, size=None)), + ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter even if it's enabled in the category.", null=True, size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", null=True, size=None)), ('enabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", null=True, size=None)), ('enabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="The only categories in which to run the filter.", null=True, size=None)), @@ -145,7 +145,7 @@ class Migration(migrations.Migration): ('infraction_reason', models.CharField(help_text='The reason to give for the infraction.', max_length=1000, blank=True)), ('infraction_duration', models.DurationField(help_text='The duration of the infraction. 0 for permanent.')), ('infraction_channel', models.BigIntegerField(validators=(MinValueValidator(limit_value=0, message="Channel IDs cannot be negative."),), help_text="Channel in which to send the infraction.")), - ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter.", size=None)), + ('disabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to not run the filter even if it's enabled in the category.", size=None)), ('disabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Categories in which to not run the filter.", size=None)), ('enabled_channels', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="Channels in which to run the filter even if it's disabled in the category.", size=None)), ('enabled_categories', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text="The only categories in which to run the filter.", size=None)), diff --git a/pydis_site/apps/api/migrations/0091_antispam_filter_list.py b/pydis_site/apps/api/migrations/0091_antispam_filter_list.py index 58ffa4a4..7c233142 100644 --- a/pydis_site/apps/api/migrations/0091_antispam_filter_list.py +++ b/pydis_site/apps/api/migrations/0091_antispam_filter_list.py @@ -7,7 +7,7 @@ import pydis_site.apps.api.models.bot.filters def create_antispam_list(apps: Apps, _): - """Create the 'unique' FilterList and its related Filters.""" + """Create the 'antispam' FilterList and its related Filters.""" filter_list: pydis_site.apps.api.models.FilterList = apps.get_model("api", "FilterList") filter_: pydis_site.apps.api.models.Filter = apps.get_model("api", "Filter") diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 584ee726..60ae394b 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -101,7 +101,7 @@ class FilterList(ModelTimestampMixin, ModelReprMixin, models.Model): ) disabled_channels = ArrayField( models.CharField(max_length=100), - help_text="Channels in which to not run the filter." + help_text="Channels in which to not run the filter even if it's enabled in the category." ) enabled_categories = ArrayField( models.CharField(max_length=100), @@ -216,7 +216,8 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): ) disabled_channels = ArrayField( models.CharField(max_length=100), - help_text="Channels in which to not run the filter.", null=True + help_text="Channels in which to not run the filter even if it's enabled in the category.", + null=True ) enabled_categories = ArrayField( models.CharField(max_length=100), -- cgit v1.2.3 From 43913623f87329b51cd2a6793e843c30368698aa Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 25 Mar 2023 18:51:24 +0300 Subject: Merge the extra kwargs creation functions Co-authored-by: Amrou --- pydis_site/apps/api/serializers.py | 23 +++++------------------ 1 file changed, 5 insertions(+), 18 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index fe3c1dd2..e8c5869f 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -211,11 +211,11 @@ CHANNEL_SCOPE_FIELDS = ( MENTIONS_FIELDS = ("guild_pings", "dm_pings") -def _create_filter_meta_extra_kwargs() -> dict[str, dict[str, bool]]: - """Create the extra kwargs of the Filter serializer's Meta class.""" +def _create_meta_extra_kwargs(*, for_filter: bool) -> dict[str, dict[str, bool]]: + """Create the extra kwargs for the Meta classes of the Filter and FilterList serializers.""" extra_kwargs = {} for field in SETTINGS_FIELDS: - field_args = {'required': False, 'allow_null': True} + field_args = {'required': False, 'allow_null': True} if for_filter else {} if field in ALLOW_BLANK_SETTINGS: field_args['allow_blank'] = True if field in ALLOW_EMPTY_SETTINGS: @@ -278,7 +278,7 @@ class FilterSerializer(ModelSerializer): 'additional_field', 'filter_list' ) + SETTINGS_FIELDS - extra_kwargs = _create_filter_meta_extra_kwargs() + extra_kwargs = _create_meta_extra_kwargs(for_filter=True) def create(self, validated_data: dict) -> User: """Override the create method to catch violations of the custom uniqueness constraint.""" @@ -317,19 +317,6 @@ class FilterSerializer(ModelSerializer): return schema -def _create_filter_list_meta_extra_kwargs() -> dict[str, dict[str, bool]]: - """Create the extra kwargs of the FilterList serializer's Meta class.""" - extra_kwargs = {} - for field in SETTINGS_FIELDS: - field_args = {} - if field in ALLOW_BLANK_SETTINGS: - field_args['allow_blank'] = True - if field in ALLOW_EMPTY_SETTINGS: - field_args['allow_empty'] = True - extra_kwargs[field] = field_args - return extra_kwargs - - class FilterListSerializer(ModelSerializer): """A class providing (de-)serialization of `FilterList` instances.""" @@ -367,7 +354,7 @@ class FilterListSerializer(ModelSerializer): fields = ( 'id', 'created_at', 'updated_at', 'name', 'list_type', 'filters' ) + SETTINGS_FIELDS - extra_kwargs = _create_filter_list_meta_extra_kwargs() + extra_kwargs = _create_meta_extra_kwargs(for_filter=False) # Ensure there can only be one filter list with the same name and type. validators = [ -- cgit v1.2.3 From 36bca58ff336f9d4b797a2c76f08775f9de7e9a7 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 25 Mar 2023 21:26:07 +0300 Subject: Specify the common elements in the validation errors Co-authored-by: Amrou --- pydis_site/apps/api/serializers.py | 34 ++++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index e8c5869f..bfad18ab 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -247,20 +247,24 @@ class FilterSerializer(ModelSerializer): "Infraction type is required with infraction duration or reason." ) - if ( + common_channels = ( set(get_field_value(data, "disabled_channels")) & set(get_field_value(data, "enabled_channels")) - ): + ) + if common_channels: raise ValidationError( - "You can't have the same value in both enabled and disabled channels lists." + "You can't have the same value in both enabled and disabled channels lists:" + f" {', '.join(repr(channel) for channel in common_channels)}." ) - if ( + common_categories = ( set(get_field_value(data, "disabled_categories")) & set(get_field_value(data, "enabled_categories")) - ): + ) + if common_categories: raise ValidationError( - "You can't have the same value in both enabled and disabled categories lists." + "You can't have the same value in both enabled and disabled categories lists:" + f" {', '.join(repr(category) for category in common_categories)}." ) return data @@ -333,17 +337,23 @@ class FilterListSerializer(ModelSerializer): data.get('disabled_channels') is not None and data.get('enabled_channels') is not None ): - channels_collection = data['disabled_channels'] + data['enabled_channels'] - if len(channels_collection) != len(set(channels_collection)): - raise ValidationError("Enabled and Disabled channels lists contain duplicates.") + common_channels = set(data['disabled_channels']) & set(data['enabled_channels']) + if common_channels: + raise ValidationError( + "You can't have the same value in both enabled and disabled channels lists:" + f" {', '.join(repr(channel) for channel in common_channels)}." + ) if ( data.get('disabled_categories') is not None and data.get('enabled_categories') is not None ): - categories_collection = data['disabled_categories'] + data['enabled_categories'] - if len(categories_collection) != len(set(categories_collection)): - raise ValidationError("Enabled and Disabled categories lists contain duplicates.") + common_categories = set(data['disabled_categories']) & set(data['enabled_categories']) + if common_categories: + raise ValidationError( + "You can't have the same value in both enabled and disabled categories lists:" + f" {', '.join(repr(category) for category in common_categories)}." + ) return data -- cgit v1.2.3 From 91b89475913210400cf39884efe37ab5552efbf7 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sat, 25 Mar 2023 21:35:49 +0300 Subject: Use consistent quoting style Co-authored-by: Johannes Christ --- pydis_site/apps/api/models/bot/filters.py | 4 +- pydis_site/apps/api/serializers.py | 68 +++++++++++++++---------------- 2 files changed, 36 insertions(+), 36 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 60ae394b..c6f6f851 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -253,8 +253,8 @@ class Filter(FilterBase): constraints = ( UniqueConstraint( fields=tuple( - [field.name for field in FilterBase._meta.fields - if field.name not in ("id", "description", "created_at", "updated_at")] + field.name for field in FilterBase._meta.fields + if field.name not in ("id", "description", "created_at", "updated_at") ), name="unique_filters"), ) diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index bfad18ab..da02c837 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -188,27 +188,27 @@ BASE_FILTER_FIELDS = ( ) BASE_FILTERLIST_FIELDS = ('id', 'created_at', 'updated_at', 'name', 'list_type') BASE_SETTINGS_FIELDS = ( - "bypass_roles", - "filter_dm", - "enabled", - "remove_context", - "send_alert" + 'bypass_roles', + 'filter_dm', + 'enabled', + 'remove_context', + 'send_alert' ) INFRACTION_AND_NOTIFICATION_FIELDS = ( - "infraction_type", - "infraction_reason", - "infraction_duration", - "infraction_channel", - "dm_content", - "dm_embed" + 'infraction_type', + 'infraction_reason', + 'infraction_duration', + 'infraction_channel', + 'dm_content', + 'dm_embed' ) CHANNEL_SCOPE_FIELDS = ( - "disabled_channels", - "disabled_categories", - "enabled_channels", - "enabled_categories" + 'disabled_channels', + 'disabled_categories', + 'enabled_channels', + 'enabled_categories' ) -MENTIONS_FIELDS = ("guild_pings", "dm_pings") +MENTIONS_FIELDS = ('guild_pings', 'dm_pings') def _create_meta_extra_kwargs(*, for_filter: bool) -> dict[str, dict[str, bool]]: @@ -228,7 +228,7 @@ def get_field_value(data: dict, field_name: str) -> Any: """Get the value directly from the key, or from the filter list if it's missing or is None.""" if data.get(field_name) is not None: return data[field_name] - return getattr(data["filter_list"], field_name) + return getattr(data['filter_list'], field_name) class FilterSerializer(ModelSerializer): @@ -238,18 +238,18 @@ class FilterSerializer(ModelSerializer): """Perform infraction data + allowed and disallowed lists validation.""" if ( ( - get_field_value(data, "infraction_reason") - or get_field_value(data, "infraction_duration") + get_field_value(data, 'infraction_reason') + or get_field_value(data, 'infraction_duration') ) - and get_field_value(data, "infraction_type") == "NONE" + and get_field_value(data, 'infraction_type') == 'NONE' ): raise ValidationError( "Infraction type is required with infraction duration or reason." ) common_channels = ( - set(get_field_value(data, "disabled_channels")) - & set(get_field_value(data, "enabled_channels")) + set(get_field_value(data, 'disabled_channels')) + & set(get_field_value(data, 'enabled_channels')) ) if common_channels: raise ValidationError( @@ -258,8 +258,8 @@ class FilterSerializer(ModelSerializer): ) common_categories = ( - set(get_field_value(data, "disabled_categories")) - & set(get_field_value(data, "enabled_categories")) + set(get_field_value(data, 'disabled_categories')) + & set(get_field_value(data, 'enabled_categories')) ) if common_categories: raise ValidationError( @@ -305,19 +305,19 @@ class FilterSerializer(ModelSerializer): into a sub-field called `settings`. """ settings = {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} - settings["infraction_and_notification"] = { + settings['infraction_and_notification'] = { name: getattr(instance, name) for name in INFRACTION_AND_NOTIFICATION_FIELDS } - settings["channel_scope"] = { + settings['channel_scope'] = { name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS } - settings["mentions"] = { + settings['mentions'] = { name: getattr(instance, name) for name in MENTIONS_FIELDS } schema = {name: getattr(instance, name) for name in BASE_FILTER_FIELDS} - schema["filter_list"] = instance.filter_list.id - schema["settings"] = settings + schema['filter_list'] = instance.filter_list.id + schema['settings'] = settings return schema @@ -388,19 +388,19 @@ class FilterListSerializer(ModelSerializer): into a sub-field called `settings`. """ schema = {name: getattr(instance, name) for name in BASE_FILTERLIST_FIELDS} - schema["filters"] = [ + schema['filters'] = [ FilterSerializer(many=False).to_representation(instance=item) for item in Filter.objects.filter(filter_list=instance.id) ] settings = {name: getattr(instance, name) for name in BASE_SETTINGS_FIELDS} - settings["infraction_and_notification"] = { + settings['infraction_and_notification'] = { name: getattr(instance, name) for name in INFRACTION_AND_NOTIFICATION_FIELDS } - settings["channel_scope"] = {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS} - settings["mentions"] = {name: getattr(instance, name) for name in MENTIONS_FIELDS} + settings['channel_scope'] = {name: getattr(instance, name) for name in CHANNEL_SCOPE_FIELDS} + settings['mentions'] = {name: getattr(instance, name) for name in MENTIONS_FIELDS} - schema["settings"] = settings + schema['settings'] = settings return schema # endregion -- cgit v1.2.3 From 0f40b114940164c65b10d1312b5a419ce025c799 Mon Sep 17 00:00:00 2001 From: mbaruh Date: Sun, 26 Mar 2023 23:12:34 +0300 Subject: Rename additional_field to additional_settings --- pydis_site/apps/api/migrations/0088_new_filter_schema.py | 4 ++-- .../apps/api/migrations/0089_unique_constraint_filters.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 4 +++- pydis_site/apps/api/serializers.py | 9 +++++---- pydis_site/apps/api/tests/test_filters.py | 2 +- pydis_site/apps/api/tests/test_models.py | 2 +- pydis_site/apps/api/viewsets/bot/filters.py | 14 +++++++------- 7 files changed, 21 insertions(+), 18 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0088_new_filter_schema.py b/pydis_site/apps/api/migrations/0088_new_filter_schema.py index 1506e4d7..2e1d78c9 100644 --- a/pydis_site/apps/api/migrations/0088_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0088_new_filter_schema.py @@ -64,7 +64,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: updated_at=object_.updated_at, filter_list=list_, description=object_.comment, - additional_field=None, + additional_settings=None, guild_pings=None, filter_dm=None, dm_pings=None, @@ -105,7 +105,7 @@ class Migration(migrations.Migration): ('updated_at', models.DateTimeField(auto_now=True)), ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), - ('additional_field', models.JSONField(help_text='Implementation specific field.', null=True)), + ('additional_settings', models.JSONField(help_text='Additional settings which are specific to this filter.', null=True)), ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, null=True)), diff --git a/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py b/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py index 0bcfd8a3..cb230a27 100644 --- a/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py +++ b/pydis_site/apps/api/migrations/0089_unique_constraint_filters.py @@ -11,13 +11,13 @@ class Migration(migrations.Migration): migrations.RunSQL( "ALTER TABLE api_filter " "ADD CONSTRAINT unique_filters UNIQUE NULLS NOT DISTINCT " - "(content, additional_field, filter_list_id, dm_content, dm_embed, infraction_type, infraction_reason, infraction_duration, infraction_channel, guild_pings, filter_dm, dm_pings, remove_context, bypass_roles, enabled, send_alert, enabled_channels, disabled_channels, enabled_categories, disabled_categories)", + "(content, additional_settings, filter_list_id, dm_content, dm_embed, infraction_type, infraction_reason, infraction_duration, infraction_channel, guild_pings, filter_dm, dm_pings, remove_context, bypass_roles, enabled, send_alert, enabled_channels, disabled_channels, enabled_categories, disabled_categories)", reverse_sql="ALTER TABLE api_filter DROP CONSTRAINT unique_filters", state_operations=[ migrations.AddConstraint( model_name='filter', constraint=models.UniqueConstraint( - fields=('content', 'additional_field', 'filter_list', 'dm_content', 'dm_embed', 'infraction_type', 'infraction_reason', 'infraction_duration', 'infraction_channel', 'guild_pings', 'filter_dm', 'dm_pings', 'remove_context', 'bypass_roles', 'enabled', 'send_alert', 'enabled_channels', 'disabled_channels', 'enabled_categories', 'disabled_categories'), + fields=('content', 'additional_settings', 'filter_list', 'dm_content', 'dm_embed', 'infraction_type', 'infraction_reason', 'infraction_duration', 'infraction_channel', 'guild_pings', 'filter_dm', 'dm_pings', 'remove_context', 'bypass_roles', 'enabled', 'send_alert', 'enabled_channels', 'disabled_channels', 'enabled_categories', 'disabled_categories'), name='unique_filters' ), ), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index c6f6f851..aadb39aa 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -131,7 +131,9 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): max_length=200, help_text="Why this filter has been added.", null=True ) - additional_field = models.JSONField(null=True, help_text="Implementation specific field.") + additional_settings = models.JSONField( + null=True, help_text="Additional settings which are specific to this filter." + ) filter_list = models.ForeignKey( FilterList, models.CASCADE, related_name="filters", help_text="The filter list containing this filter." diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index da02c837..a3779094 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -184,7 +184,7 @@ ALLOW_EMPTY_SETTINGS = ( # Required fields for custom JSON representation purposes BASE_FILTER_FIELDS = ( - 'id', 'created_at', 'updated_at', 'content', 'description', 'additional_field' + 'id', 'created_at', 'updated_at', 'content', 'description', 'additional_settings' ) BASE_FILTERLIST_FIELDS = ('id', 'created_at', 'updated_at', 'name', 'list_type') BASE_SETTINGS_FIELDS = ( @@ -279,7 +279,7 @@ class FilterSerializer(ModelSerializer): 'updated_at', 'content', 'description', - 'additional_field', + 'additional_settings', 'filter_list' ) + SETTINGS_FIELDS extra_kwargs = _create_meta_extra_kwargs(for_filter=True) @@ -382,9 +382,10 @@ class FilterListSerializer(ModelSerializer): Provides a custom JSON representation to the FilterList Serializers. This representation restructures how the Filter is represented. - It groups the Infraction, Channel and Mention related fields into their own separated group. + It groups the Infraction, Channel, and Mention related fields + into their own separated groups. - Furthermore, it puts the fields that meant to represent FilterList settings, + Furthermore, it puts the fields that are meant to represent FilterList settings, into a sub-field called `settings`. """ schema = {name: getattr(instance, name) for name in BASE_FILTERLIST_FIELDS} diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index f36e0617..3d3be51e 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -92,7 +92,7 @@ def get_test_sequences() -> Dict[str, TestSequence]: { "content": "bad word", "description": "This is a really bad word.", - "additional_field": "{'hi': 'there'}", + "additional_settings": "{'hi': 'there'}", "guild_pings": None, "filter_dm": None, "dm_pings": None, diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index 25d771cc..d3341b35 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -113,7 +113,7 @@ class StringDunderMethodTests(SimpleTestCase): Filter( content="ducky_nsfw", description="This ducky is totally inappropriate!", - additional_field=None, + additional_settings=None, ), OffensiveMessage( id=602951077675139072, diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index c84da909..d6c2d18c 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -33,7 +33,7 @@ class FilterListViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -119,7 +119,7 @@ class FilterListViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -239,7 +239,7 @@ class FilterListViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -334,7 +334,7 @@ class FilterViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -379,7 +379,7 @@ class FilterViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, @@ -420,7 +420,7 @@ class FilterViewSet(ModelViewSet): ... "filter_list": 1, ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "bypass_roles": None, ... "filter_dm": None, ... "enabled": False, @@ -454,7 +454,7 @@ class FilterViewSet(ModelViewSet): ... "updated_at": "2023-01-27T21:26:34.030532Z", ... "content": "267624335836053506", ... "description": "Python Discord", - ... "additional_field": None, + ... "additional_settings": None, ... "filter_list": 1, ... "settings": { ... "bypass_roles": None, -- cgit v1.2.3 From 4c923fa1cd6f1f5144036317b116aac745b3c345 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Wed, 5 Apr 2023 03:10:05 +0300 Subject: Add maximum auto-timeout duration validation --- pydis_site/apps/api/serializers.py | 31 +++++++++++++++++++++++++------ pydis_site/apps/api/tests/test_filters.py | 2 ++ 2 files changed, 27 insertions(+), 6 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py index a3779094..2186b02c 100644 --- a/pydis_site/apps/api/serializers.py +++ b/pydis_site/apps/api/serializers.py @@ -1,4 +1,5 @@ """Converters from Django models to data interchange formats and back.""" +from datetime import timedelta from typing import Any from django.db.models.query import QuerySet @@ -210,6 +211,8 @@ CHANNEL_SCOPE_FIELDS = ( ) MENTIONS_FIELDS = ('guild_pings', 'dm_pings') +MAX_TIMEOUT_DURATION = timedelta(days=28) + def _create_meta_extra_kwargs(*, for_filter: bool) -> dict[str, dict[str, bool]]: """Create the extra kwargs for the Meta classes of the Filter and FilterList serializers.""" @@ -236,17 +239,24 @@ class FilterSerializer(ModelSerializer): def validate(self, data: dict) -> dict: """Perform infraction data + allowed and disallowed lists validation.""" + infraction_type = get_field_value(data, 'infraction_type') + infraction_duration = get_field_value(data, 'infraction_duration') if ( - ( - get_field_value(data, 'infraction_reason') - or get_field_value(data, 'infraction_duration') - ) - and get_field_value(data, 'infraction_type') == 'NONE' + (get_field_value(data, 'infraction_reason') or infraction_duration) + and infraction_type == 'NONE' ): raise ValidationError( "Infraction type is required with infraction duration or reason." ) + if ( + infraction_type == 'TIMEOUT' + and (not infraction_duration or infraction_duration > MAX_TIMEOUT_DURATION) + ): + raise ValidationError( + f"A timeout cannot be longer than {MAX_TIMEOUT_DURATION.days} days." + ) + common_channels = ( set(get_field_value(data, 'disabled_channels')) & set(get_field_value(data, 'enabled_channels')) @@ -328,8 +338,9 @@ class FilterListSerializer(ModelSerializer): def validate(self, data: dict) -> dict: """Perform infraction data + allow and disallowed lists validation.""" + infraction_duration = data.get('infraction_duration') if ( - data.get('infraction_reason') or data.get('infraction_duration') + data.get('infraction_reason') or infraction_duration ) and not data.get('infraction_type'): raise ValidationError("Infraction type is required with infraction duration or reason") @@ -344,6 +355,14 @@ class FilterListSerializer(ModelSerializer): f" {', '.join(repr(channel) for channel in common_channels)}." ) + if ( + data.get('infraction_type') == 'TIMEOUT' + and (not infraction_duration or infraction_duration > MAX_TIMEOUT_DURATION) + ): + raise ValidationError( + f"A timeout cannot be longer than {MAX_TIMEOUT_DURATION.days} days." + ) + if ( data.get('disabled_categories') is not None and data.get('enabled_categories') is not None diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 3d3be51e..ebc4a2cf 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -274,6 +274,7 @@ class FilterValidationTests(AuthenticatedAPITestCase): ({"infraction_reason": "hi"}, {}, 400), ({"infraction_duration": timedelta(seconds=10)}, {}, 400), ({"infraction_reason": "hi"}, {"infraction_type": "NOTE"}, 200), + ({"infraction_type": "TIMEOUT", "infraction_duration": timedelta(days=30)}, {}, 400), ({"infraction_duration": timedelta(seconds=10)}, {"infraction_type": "TIMEOUT"}, 200), ({"enabled_channels": ["admins"]}, {}, 200), ({"disabled_channels": ["123"]}, {}, 200), @@ -313,6 +314,7 @@ class FilterValidationTests(AuthenticatedAPITestCase): cases = ( ({"infraction_reason": "hi"}, 400), ({"infraction_duration": timedelta(seconds=10)}, 400), + ({"infraction_type": "TIMEOUT", "infraction_duration": timedelta(days=30)}, 400), ({"infraction_reason": "hi", "infraction_type": "NOTE"}, 200), ({"infraction_duration": timedelta(seconds=10), "infraction_type": "TIMEOUT"}, 200), ({"enabled_channels": ["admins"]}, 200), ({"disabled_channels": ["123"]}, 200), -- cgit v1.2.3 From b8ddedc31d54f46bb86a7e7d200c163ea8806ee0 Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Thu, 6 Apr 2023 01:09:09 +0300 Subject: Make additional_settings non-null with dict default This makes sure that the value in the DB is always a valid JSON, ensuring the unique constraint will work properly. --- pydis_site/apps/api/migrations/0088_new_filter_schema.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0088_new_filter_schema.py b/pydis_site/apps/api/migrations/0088_new_filter_schema.py index 2e1d78c9..675fdcec 100644 --- a/pydis_site/apps/api/migrations/0088_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0088_new_filter_schema.py @@ -64,7 +64,7 @@ def forward(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: updated_at=object_.updated_at, filter_list=list_, description=object_.comment, - additional_settings=None, + additional_settings={}, guild_pings=None, filter_dm=None, dm_pings=None, @@ -105,7 +105,7 @@ class Migration(migrations.Migration): ('updated_at', models.DateTimeField(auto_now=True)), ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), - ('additional_settings', models.JSONField(help_text='Additional settings which are specific to this filter.', null=True)), + ('additional_settings', models.JSONField(help_text='Additional settings which are specific to this filter.', default=dict)), ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), ('dm_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers on a DM.', size=None, null=True)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index aadb39aa..71f8771f 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -132,7 +132,7 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): help_text="Why this filter has been added.", null=True ) additional_settings = models.JSONField( - null=True, help_text="Additional settings which are specific to this filter." + help_text="Additional settings which are specific to this filter.", default=dict ) filter_list = models.ForeignKey( FilterList, models.CASCADE, related_name="filters", diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index ebc4a2cf..5059d651 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -230,6 +230,7 @@ class GenericFilterTests(AuthenticatedAPITestCase): def test_creation_missing_field(self) -> None: for name, sequence in get_test_sequences().items(): + ignored_fields = sequence.ignored_fields + ("id", "additional_settings") with self.subTest(name=name): saved = sequence.model(**sequence.object) save_nested_objects(saved) @@ -237,7 +238,7 @@ class GenericFilterTests(AuthenticatedAPITestCase): for field in sequence.model._meta.get_fields(): with self.subTest(field=field): - if field.null or field.name in sequence.ignored_fields + ("id",): + if field.null or field.name in ignored_fields: continue test_data = data.copy() -- cgit v1.2.3 From c91bbccd85f64333a720a594435f9c7d33d9889d Mon Sep 17 00:00:00 2001 From: Boris Muratov <8bee278@gmail.com> Date: Thu, 6 Apr 2023 19:38:20 +0300 Subject: Fix contents and descriptions being too long for their field type --- pydis_site/apps/api/migrations/0088_new_filter_schema.py | 4 ++-- pydis_site/apps/api/models/bot/filters.py | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/migrations/0088_new_filter_schema.py b/pydis_site/apps/api/migrations/0088_new_filter_schema.py index 675fdcec..9bc40779 100644 --- a/pydis_site/apps/api/migrations/0088_new_filter_schema.py +++ b/pydis_site/apps/api/migrations/0088_new_filter_schema.py @@ -103,8 +103,8 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), - ('content', models.CharField(help_text='The definition of this filter.', max_length=100)), - ('description', models.CharField(help_text='Why this filter has been added.', max_length=200, null=True)), + ('content', models.TextField(help_text='The definition of this filter.')), + ('description', models.TextField(help_text='Why this filter has been added.', null=True)), ('additional_settings', models.JSONField(help_text='Additional settings which are specific to this filter.', default=dict)), ('guild_pings', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), help_text='Who to ping when this filter triggers.', size=None, null=True)), ('filter_dm', models.BooleanField(help_text='Whether DMs should be filtered.', null=True)), diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 71f8771f..620031dc 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -126,9 +126,8 @@ class FilterList(ModelTimestampMixin, ModelReprMixin, models.Model): class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): """One specific trigger of a list.""" - content = models.CharField(max_length=100, help_text="The definition of this filter.") - description = models.CharField( - max_length=200, + content = models.TextField(help_text="The definition of this filter.") + description = models.TextField( help_text="Why this filter has been added.", null=True ) additional_settings = models.JSONField( -- cgit v1.2.3 From 6dcdcf519b8a178b4884797d85ad6b2e1b9582c3 Mon Sep 17 00:00:00 2001 From: Amrou Bellalouna Date: Sun, 16 Apr 2023 22:48:08 +0100 Subject: update rule 5 (#943) --- pydis_site/apps/api/views.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 20431a61..54fbf809 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -149,9 +149,9 @@ class RulesView(APIView): ["english", "language"] ), ( - "Do not provide or request help on projects that may break laws, " - "breach terms of services, or are malicious or inappropriate.", - ["infraction", "tos", "breach", "malicious", "inappropriate"] + "Do not provide or request help on projects that may violate terms of service, " + "or that may be deemed inappropriate, malicious, or illegal.", + ["infraction", "tos", "breach", "malicious", "inappropriate", "illegal"] ), ( "Do not post unapproved advertising.", -- cgit v1.2.3 From cbc67702bb58a3a2d3c80521f21e1fd6f7f203e9 Mon Sep 17 00:00:00 2001 From: Johannes Christ Date: Fri, 21 Apr 2023 22:21:13 +0200 Subject: Crosscheck rules between API and static rules --- pydis_site/apps/api/tests/test_rules.py | 38 ++++++++++++++++++++++++++++++ pydis_site/apps/api/views.py | 2 +- pydis_site/apps/content/resources/rules.md | 2 +- 3 files changed, 40 insertions(+), 2 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/tests/test_rules.py b/pydis_site/apps/api/tests/test_rules.py index d08c5fae..3ee2d4e0 100644 --- a/pydis_site/apps/api/tests/test_rules.py +++ b/pydis_site/apps/api/tests/test_rules.py @@ -1,3 +1,7 @@ +import itertools +import re +from pathlib import Path + from django.urls import reverse from .base import AuthenticatedAPITestCase @@ -33,3 +37,37 @@ class RuleAPITests(AuthenticatedAPITestCase): url = reverse('api:rules') response = self.client.get(url + '?link_format=unknown') self.assertEqual(response.status_code, 400) + + +class RuleCorrectnessTests(AuthenticatedAPITestCase): + """Verifies that the rules from the API and by the static rules in the content app match.""" + + @classmethod + def setUpTestData(cls): + cls.markdown_rule_re = re.compile(r'^> \d+\. (.*)$') + + def test_rules_in_markdown_file_roughly_equal_api_rules(self) -> None: + url = reverse('api:rules') + api_response = self.client.get(url + '?link_format=md') + api_rules = tuple(rule for (rule, _tags) in api_response.json()) + + markdown_rules_path = ( + Path(__file__).parent.parent.parent / 'content' / 'resources' / 'rules.md' + ) + + markdown_rules = [] + for line in markdown_rules_path.read_text().splitlines(): + matches = self.markdown_rule_re.match(line) + if matches is not None: + markdown_rules.append(matches.group(1)) + + zipper = itertools.zip_longest(api_rules, markdown_rules) + for idx, (api_rule, markdown_rule) in enumerate(zipper): + with self.subTest(f"Rule {idx}"): + self.assertIsNotNone( + markdown_rule, f"The API has more rules than {markdown_rules_path}" + ) + self.assertIsNotNone( + api_rule, f"{markdown_rules_path} has more rules than the API endpoint" + ) + self.assertEqual(markdown_rule, api_rule) diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index 54fbf809..b1b7dc0f 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -120,7 +120,7 @@ class RulesView(APIView): link_format ) discord_tos = self._format_link( - 'Terms Of Service', + 'Terms of Service', 'https://discordapp.com/terms', link_format ) diff --git a/pydis_site/apps/content/resources/rules.md b/pydis_site/apps/content/resources/rules.md index a0bdb38d..e55c6715 100644 --- a/pydis_site/apps/content/resources/rules.md +++ b/pydis_site/apps/content/resources/rules.md @@ -5,7 +5,7 @@ icon: fab fa-discord --- We have a small but strict set of rules on our server. Please read over them and take them on board. If you don't understand a rule or need to report an incident, please send a direct message to @ModMail! -> 1. Follow the [Python Discord Code of Conduct](/pages/code-of-conduct/). +> 1. Follow the [Python Discord Code of Conduct](https://pythondiscord.com/pages/code-of-conduct/). > 2. Follow the [Discord Community Guidelines](https://discordapp.com/guidelines) and [Terms of Service](https://discordapp.com/terms). > 3. Respect staff members and listen to their instructions. > 4. Use English to the best of your ability. Be polite if someone speaks English imperfectly. -- cgit v1.2.3 From 07855963a1eedd80c410ab2dd51fcae1200c9cee Mon Sep 17 00:00:00 2001 From: Johannes Christ Date: Wed, 10 May 2023 12:30:57 +0200 Subject: Switch to ruff for linting --- .github/workflows/lint-test.yaml | 17 +- .pre-commit-config.yaml | 12 +- poetry.lock | 700 ++++++--------------- pydis_site/apps/api/admin.py | 29 +- pydis_site/apps/api/github_utils.py | 13 +- pydis_site/apps/api/models/bot/message.py | 7 +- pydis_site/apps/api/models/bot/metricity.py | 19 +- pydis_site/apps/api/tests/base.py | 3 +- pydis_site/apps/api/tests/test_bumped_threads.py | 2 +- pydis_site/apps/api/tests/test_deleted_messages.py | 11 +- .../apps/api/tests/test_documentation_links.py | 2 +- pydis_site/apps/api/tests/test_filters.py | 12 +- pydis_site/apps/api/tests/test_github_utils.py | 41 +- pydis_site/apps/api/tests/test_infractions.py | 110 ++-- pydis_site/apps/api/tests/test_models.py | 4 +- pydis_site/apps/api/tests/test_nominations.py | 2 +- .../apps/api/tests/test_off_topic_channel_names.py | 2 +- .../apps/api/tests/test_offensive_message.py | 10 +- pydis_site/apps/api/tests/test_reminders.py | 4 +- pydis_site/apps/api/tests/test_roles.py | 2 +- pydis_site/apps/api/tests/test_rules.py | 2 +- pydis_site/apps/api/tests/test_users.py | 29 +- pydis_site/apps/api/tests/test_validators.py | 4 +- pydis_site/apps/api/views.py | 4 +- pydis_site/apps/api/viewsets/bot/filters.py | 4 +- .../api/viewsets/bot/off_topic_channel_name.py | 7 +- pydis_site/apps/api/viewsets/bot/user.py | 5 +- pydis_site/apps/content/models/tag.py | 3 +- .../guides/pydis-guides/contributing/linting.md | 2 +- pydis_site/apps/content/tests/helpers.py | 20 +- pydis_site/apps/content/urls.py | 2 +- pydis_site/apps/content/utils.py | 16 +- pydis_site/apps/content/views/tags.py | 3 +- pydis_site/apps/events/urls.py | 2 +- pydis_site/apps/events/views/page.py | 3 +- .../apps/home/tests/test_repodata_helpers.py | 2 +- pydis_site/apps/home/views.py | 13 +- pydis_site/apps/redirect/urls.py | 25 +- pydis_site/apps/redirect/views.py | 3 +- pydis_site/apps/resources/views.py | 7 +- .../staff/templatetags/deletedmessage_filters.py | 3 +- .../staff/tests/test_deletedmessage_filters.py | 2 +- pyproject.toml | 39 +- static-builds/netlify_build.py | 6 +- 44 files changed, 445 insertions(+), 763 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/.github/workflows/lint-test.yaml b/.github/workflows/lint-test.yaml index 1c744470..a12fb650 100644 --- a/.github/workflows/lint-test.yaml +++ b/.github/workflows/lint-test.yaml @@ -24,20 +24,11 @@ jobs: # We will not run `flake8` here, as we will use a separate flake8 # action. - name: Run pre-commit hooks - run: SKIP=flake8 pre-commit run --all-files + run: SKIP=ruff pre-commit run --all-files - # Run flake8 and have it format the linting errors in the format of - # the GitHub Workflow command to register error annotations. This - # means that our flake8 output is automatically added as an error - # annotation to both the run result and in the "Files" tab of a - # pull request. - # - # Format used: - # ::error file={filename},line={line},col={col}::{message} - - name: Run flake8 - run: "flake8 \ - --format='::error file=%(path)s,line=%(row)d,col=%(col)d::\ - [flake8] %(code)s: %(text)s'" + # Run `ruff` using github formatting to enable automatic inline annotations. + - name: Run ruff + run: "ruff check --format=github ." - name: Migrations and run tests with coverage.py run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b2a03559..700dd0a0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,15 +10,11 @@ repos: args: [--fix=lf] - id: trailing-whitespace args: [--markdown-linebreak-ext=md] - - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.5.1 - hooks: - - id: python-check-blanket-noqa - repo: local hooks: - - id: flake8 - name: Flake8 - description: This hook runs flake8 within our project's environment. - entry: poetry run flake8 + - id: ruff + name: ruff + description: This hook runs ruff within our project's environment. + entry: poetry run ruff language: system types: [python] diff --git a/poetry.lock b/poetry.lock index d6e09cbe..4d9a87de 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "anyio" @@ -36,58 +36,16 @@ files = [ [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] -[[package]] -name = "attrs" -version = "22.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] - -[[package]] -name = "bandit" -version = "1.7.4" -description = "Security oriented static analyser for python code." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, - {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" -PyYAML = ">=5.3.1" -stevedore = ">=1.20.0" - -[package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] -toml = ["toml"] -yaml = ["PyYAML"] - [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] @@ -181,100 +139,87 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = "*" -files = [ - {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, - {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] [[package]] @@ -355,35 +300,31 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "39.0.1" +version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965"}, - {file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f"}, - {file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c"}, - {file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4"}, - {file = "cryptography-39.0.1-cp36-abi3-win32.whl", hash = "sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8"}, - {file = "cryptography-39.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5caeb8188c24888c90b5108a441c106f7faa4c4c075a2bcae438c6e8ca73cef"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4789d1e3e257965e960232345002262ede4d094d1a19f4d3b52e48d4d8f3b885"}, - {file = "cryptography-39.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6"}, - {file = "cryptography-39.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a"}, - {file = "cryptography-39.0.1.tar.gz", hash = "sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, + {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, + {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, + {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, ] [package.dependencies] @@ -392,10 +333,10 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"] +pep8test = ["black", "check-manifest", "mypy", "ruff"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] test-randomorder = ["pytest-randomly"] tox = ["tox"] @@ -536,193 +477,19 @@ pytz = "*" [[package]] name = "filelock" -version = "3.9.0" +version = "3.12.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, - {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, + {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, + {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "6.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" - -[[package]] -name = "flake8-annotations" -version = "3.0.1" -description = "Flake8 Type Annotation Checks" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8_annotations-3.0.1-py3-none-any.whl", hash = "sha256:af78e3216ad800d7e144745ece6df706c81b3255290cbf870e54879d495e8ade"}, - {file = "flake8_annotations-3.0.1.tar.gz", hash = "sha256:ff37375e71e3b83f2a5a04d443c41e2c407de557a884f3300a7fa32f3c41cb0a"}, -] - -[package.dependencies] -attrs = ">=21.4" -flake8 = ">=5.0" - -[[package]] -name = "flake8-bandit" -version = "4.1.1" -description = "Automated security testing with bandit and flake8." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, - {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, -] - -[package.dependencies] -bandit = ">=1.7.3" -flake8 = ">=5.0.0" - -[[package]] -name = "flake8-bugbear" -version = "23.5.9" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-bugbear-23.5.9.tar.gz", hash = "sha256:695c84a5d7da54eb35d79a7354dbaf3aaba80de32250608868aa1c85534b2a86"}, - {file = "flake8_bugbear-23.5.9-py3-none-any.whl", hash = "sha256:631fa927fbc799e8ca636b849dd7dfc304812287137b6ecb3277821f028bee40"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=6.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, -] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-import-order" -version = "0.18.2" -description = "Flake8 and pylama plugin that checks the ordering of import statements." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-import-order-0.18.2.tar.gz", hash = "sha256:e23941f892da3e0c09d711babbb0c73bc735242e9b216b726616758a920d900e"}, - {file = "flake8_import_order-0.18.2-py2.py3-none-any.whl", hash = "sha256:82ed59f1083b629b030ee9d3928d9e06b6213eb196fe745b3a7d4af2168130df"}, -] - -[package.dependencies] -pycodestyle = "*" -setuptools = "*" - -[[package]] -name = "flake8-string-format" -version = "0.3.0" -description = "string format checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, - {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, -] - -[package.dependencies] -flake8 = "*" - -[[package]] -name = "flake8-tidy-imports" -version = "4.8.0" -description = "A flake8 plugin that helps you write tidier imports." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-tidy-imports-4.8.0.tar.gz", hash = "sha256:df44f9c841b5dfb3a7a1f0da8546b319d772c2a816a1afefcce43e167a593d83"}, - {file = "flake8_tidy_imports-4.8.0-py3-none-any.whl", hash = "sha256:25bd9799358edefa0e010ce2c587b093c3aba942e96aeaa99b6d0500ae1bf09c"}, -] - -[package.dependencies] -flake8 = ">=3.8.0" - -[[package]] -name = "flake8-todo" -version = "0.7" -description = "TODO notes checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, -] - -[package.dependencies] -pycodestyle = ">=2.0.0,<3.0.0" - -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.31" -description = "GitPython is a Python library used to interact with Git repositories" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, - {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" +docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "gunicorn" @@ -759,14 +526,14 @@ files = [ [[package]] name = "httpcore" -version = "0.16.3" +version = "0.17.0" description = "A minimal low-level HTTP client." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, - {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, + {file = "httpcore-0.17.0-py3-none-any.whl", hash = "sha256:0fdfea45e94f0c9fd96eab9286077f9ff788dd186635ae61b312693e4d943599"}, + {file = "httpcore-0.17.0.tar.gz", hash = "sha256:cc045a3241afbf60ce056202301b4d8b6af08845e3294055eb26b09913ef903c"}, ] [package.dependencies] @@ -805,14 +572,14 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "identify" -version = "2.5.18" +version = "2.5.24" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "identify-2.5.18-py2.py3-none-any.whl", hash = "sha256:93aac7ecf2f6abf879b8f29a8002d3c6de7086b8c28d88e1ad15045a15ab63f9"}, - {file = "identify-2.5.18.tar.gz", hash = "sha256:89e144fa560cc4cffb6ef2ab5e9fb18ed9f9b3cb054384bab4b95c12f6c309fe"}, + {file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"}, + {file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"}, ] [package.extras] @@ -860,18 +627,6 @@ files = [ [package.extras] testing = ["coverage", "pyyaml"] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "mslex" version = "0.3.0" @@ -899,48 +654,21 @@ files = [ [package.dependencies] setuptools = "*" -[[package]] -name = "pbr" -version = "5.11.1" -description = "Python Build Reasonableness" -category = "dev" -optional = false -python-versions = ">=2.6" -files = [ - {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, - {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, -] - -[[package]] -name = "pep8-naming" -version = "0.13.3" -description = "Check PEP-8 naming conventions, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971"}, - {file = "pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80"}, -] - -[package.dependencies] -flake8 = ">=5.0.0" - [[package]] name = "platformdirs" -version = "3.0.0" +version = "3.5.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, - {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, + {file = "platformdirs-3.5.0-py3-none-any.whl", hash = "sha256:47692bc24c1958e8b0f13dd727307cff1db103fca36399f457da8e05f222fdc4"}, + {file = "platformdirs-3.5.0.tar.gz", hash = "sha256:7954a68d0ba23558d753f73437c55f89027cf8f5108c19844d4b82e5af396335"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pre-commit" @@ -978,26 +706,26 @@ twisted = ["twisted"] [[package]] name = "psutil" -version = "5.9.4" +version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, + {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, + {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, + {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, + {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, + {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, + {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, + {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, + {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, ] [package.extras] @@ -1075,18 +803,6 @@ files = [ {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"}, ] -[[package]] -name = "pycodestyle" -version = "2.10.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, -] - [[package]] name = "pycparser" version = "2.21" @@ -1099,36 +815,6 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - -[[package]] -name = "pyflakes" -version = "3.0.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, -] - [[package]] name = "pyjwt" version = "2.7.0" @@ -1202,14 +888,14 @@ test = ["pyaml", "pytest", "toml"] [[package]] name = "pytz" -version = "2022.7.1" +version = "2023.3" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] [[package]] @@ -1264,26 +950,53 @@ files = [ [[package]] name = "requests" -version = "2.28.2" +version = "2.30.0" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.30.0-py3-none-any.whl", hash = "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294"}, + {file = "requests-2.30.0.tar.gz", hash = "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "ruff" +version = "0.0.265" +description = "An extremely fast Python linter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.265-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:30ddfe22de6ce4eb1260408f4480bbbce998f954dbf470228a21a9b2c45955e4"}, + {file = "ruff-0.0.265-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a11bd0889e88d3342e7bc514554bb4461bf6cc30ec115821c2425cfaac0b1b6a"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a9b38bdb40a998cbc677db55b6225a6c4fadcf8819eb30695e1b8470942426b"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8b44a245b60512403a6a03a5b5212da274d33862225c5eed3bcf12037eb19bb"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b279fa55ea175ef953208a6d8bfbcdcffac1c39b38cdb8c2bfafe9222add70bb"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5028950f7af9b119d43d91b215d5044976e43b96a0d1458d193ef0dd3c587bf8"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4057eb539a1d88eb84e9f6a36e0a999e0f261ed850ae5d5817e68968e7b89ed9"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d586e69ab5cbf521a1910b733412a5735936f6a610d805b89d35b6647e2a66aa"}, + {file = "ruff-0.0.265-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa17b13cd3f29fc57d06bf34c31f21d043735cc9a681203d634549b0e41047d1"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9ac13b11d9ad3001de9d637974ec5402a67cefdf9fffc3929ab44c2fcbb850a1"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:62a9578b48cfd292c64ea3d28681dc16b1aa7445b7a7709a2884510fc0822118"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0f9967f84da42d28e3d9d9354cc1575f96ed69e6e40a7d4b780a7a0418d9409"}, + {file = "ruff-0.0.265-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1d5a8de2fbaf91ea5699451a06f4074e7a312accfa774ad9327cde3e4fda2081"}, + {file = "ruff-0.0.265-py3-none-win32.whl", hash = "sha256:9e9db5ccb810742d621f93272e3cc23b5f277d8d00c4a79668835d26ccbe48dd"}, + {file = "ruff-0.0.265-py3-none-win_amd64.whl", hash = "sha256:f54facf286103006171a00ce20388d88ed1d6732db3b49c11feb9bf3d46f90e9"}, + {file = "ruff-0.0.265-py3-none-win_arm64.whl", hash = "sha256:c78470656e33d32ddc54e8482b1b0fc6de58f1195586731e5ff1405d74421499"}, + {file = "ruff-0.0.265.tar.gz", hash = "sha256:53c17f0dab19ddc22b254b087d1381b601b155acfa8feed514f0d6a413d0ab3a"}, +] + [[package]] name = "sentry-sdk" version = "1.22.2" @@ -1328,14 +1041,14 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "67.4.0" +version = "67.7.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.4.0-py3-none-any.whl", hash = "sha256:f106dee1b506dee5102cc3f3e9e68137bbad6d47b616be7991714b0c62204251"}, - {file = "setuptools-67.4.0.tar.gz", hash = "sha256:e5fd0a713141a4a105412233c63dc4e17ba0090c8e8334594ac790ec97792330"}, + {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, + {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, ] [package.extras] @@ -1343,18 +1056,6 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] - [[package]] name = "sniffio" version = "1.3.0" @@ -1367,18 +1068,6 @@ files = [ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - [[package]] name = "sqlparse" version = "0.4.4" @@ -1396,21 +1085,6 @@ dev = ["build", "flake8"] doc = ["sphinx"] test = ["pytest", "pytest-cov"] -[[package]] -name = "stevedore" -version = "5.0.0" -description = "Manage dynamic plugins for Python applications" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ - {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, - {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, -] - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - [[package]] name = "taskipy" version = "1.10.4" @@ -1443,26 +1117,26 @@ files = [ [[package]] name = "tzdata" -version = "2022.7" +version = "2023.3" description = "Provider of IANA time zone data" category = "main" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2022.7-py2.py3-none-any.whl", hash = "sha256:2b88858b0e3120792a3c0635c23daf36a7d7eeeca657c323da299d2094402a0d"}, - {file = "tzdata-2022.7.tar.gz", hash = "sha256:fe5f866eddd8b96e9fcba978f8e503c909b19ea7efda11e52e39494bad3a7bfa"}, + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] [[package]] name = "urllib3" -version = "1.26.14" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, - {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] [package.extras] @@ -1472,24 +1146,24 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.19.0" +version = "20.23.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.19.0-py3-none-any.whl", hash = "sha256:54eb59e7352b573aa04d53f80fc9736ed0ad5143af445a1e539aada6eb947dd1"}, - {file = "virtualenv-20.19.0.tar.gz", hash = "sha256:37a640ba82ed40b226599c522d411e4be5edb339a0c0de030c0dc7b646d61590"}, + {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, + {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<4" +filelock = ">=3.11,<4" +platformdirs = ">=3.2,<4" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] [[package]] name = "whitenoise" @@ -1509,4 +1183,4 @@ brotli = ["Brotli"] [metadata] lock-version = "2.0" python-versions = "3.10.*" -content-hash = "a89b60823f9d7717b78c1071654c0c1f237768669a8e37e76175730de209e804" +content-hash = "f9076a1d72b610e77d0c389a4992c814a4c81028232a1ec4f060f77f2cb9125c" diff --git a/pydis_site/apps/api/admin.py b/pydis_site/apps/api/admin.py index e123d150..f3cc0405 100644 --- a/pydis_site/apps/api/admin.py +++ b/pydis_site/apps/api/admin.py @@ -1,7 +1,7 @@ from __future__ import annotations import json -from typing import Iterable, Optional, Tuple +from collections.abc import Iterable from django import urls from django.contrib import admin @@ -62,16 +62,16 @@ class InfractionActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = Infraction.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None return queryset.filter(actor__id=self.value()) @@ -149,7 +149,7 @@ class DeletedMessageAdmin(admin.ModelAdmin): list_display = ("id", "author", "channel_id") - def embed_data(self, message: DeletedMessage) -> Optional[str]: + def embed_data(self, message: DeletedMessage) -> str | None: """Format embed data in a code block for better readability.""" if message.embeds: return format_html( @@ -157,6 +157,7 @@ class DeletedMessageAdmin(admin.ModelAdmin): "{0}", json.dumps(message.embeds, indent=4) ) + return None embed_data.short_description = "Embeds" @@ -229,16 +230,16 @@ class NominationActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = NominationEntry.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None nomination_ids = NominationEntry.objects.filter( actor__id=self.value() ).values_list("nomination_id").distinct() @@ -292,16 +293,16 @@ class NominationEntryActorFilter(admin.SimpleListFilter): title = "Actor" parameter_name = "actor" - def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[Tuple[int, str]]: + def lookups(self, request: HttpRequest, model: NominationAdmin) -> Iterable[tuple[int, str]]: """Selectable values for viewer to filter by.""" actor_ids = NominationEntry.objects.order_by().values_list("actor").distinct() actors = User.objects.filter(id__in=actor_ids) return ((a.id, a.username) for a in actors) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None return queryset.filter(actor__id=self.value()) @@ -425,15 +426,15 @@ class UserRoleFilter(admin.SimpleListFilter): title = "Role" parameter_name = "role" - def lookups(self, request: HttpRequest, model: UserAdmin) -> Iterable[Tuple[str, str]]: + def lookups(self, request: HttpRequest, model: UserAdmin) -> Iterable[tuple[str, str]]: """Selectable values for viewer to filter by.""" roles = Role.objects.all() return ((r.name, r.name) for r in roles) - def queryset(self, request: HttpRequest, queryset: QuerySet) -> Optional[QuerySet]: + def queryset(self, request: HttpRequest, queryset: QuerySet) -> QuerySet | None: """Query to filter the list of Users against.""" if not self.value(): - return + return None role = Role.objects.get(name=self.value()) return queryset.filter(roles__contains=[role.id]) diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py index 44c571c3..af659195 100644 --- a/pydis_site/apps/api/github_utils.py +++ b/pydis_site/apps/api/github_utils.py @@ -82,7 +82,7 @@ def generate_token() -> str: Refer to: https://docs.github.com/en/developers/apps/building-github-apps/authenticating-with-github-apps#authenticating-as-a-github-app """ - now = datetime.datetime.now() + now = datetime.datetime.now(tz=datetime.timezone.utc) return jwt.encode( { "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at @@ -145,8 +145,12 @@ def authorize(owner: str, repo: str) -> httpx.Client: def check_run_status(run: WorkflowRun) -> str: """Check if the provided run has been completed, otherwise raise an exception.""" - created_at = datetime.datetime.strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) - run_time = datetime.datetime.utcnow() - created_at + created_at = ( + datetime.datetime + .strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT) + .replace(tzinfo=datetime.timezone.utc) + ) + run_time = datetime.datetime.now(tz=datetime.timezone.utc) - created_at if run.status != "completed": if run_time <= MAX_RUN_TIME: @@ -154,8 +158,7 @@ def check_run_status(run: WorkflowRun) -> str: f"The requested run is still pending. It was created " f"{run_time.seconds // 60}:{run_time.seconds % 60 :>02} minutes ago." ) - else: - raise RunTimeoutError("The requested workflow was not ready in time.") + raise RunTimeoutError("The requested workflow was not ready in time.") if run.conclusion != "success": # The action failed, or did not run diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index 89ae27e4..d8147cd4 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -61,9 +61,10 @@ class Message(ModelReprMixin, models.Model): @property def timestamp(self) -> datetime.datetime: """Attribute that represents the message timestamp as derived from the snowflake id.""" - return datetime.datetime.utcfromtimestamp( - ((self.id >> 22) + 1420070400000) / 1000 - ).replace(tzinfo=datetime.timezone.utc) + return datetime.datetime.fromtimestamp( + ((self.id >> 22) + 1420070400000) / 1000, + tz=datetime.timezone.utc, + ) class Meta: """Metadata provided for Django's ORM.""" diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py index f53dd33c..a55f5e5b 100644 --- a/pydis_site/apps/api/models/bot/metricity.py +++ b/pydis_site/apps/api/models/bot/metricity.py @@ -1,4 +1,3 @@ -from typing import List, Tuple from django.db import connections @@ -10,10 +9,9 @@ EXCLUDE_CHANNELS = ( ) -class NotFoundError(Exception): # noqa: N818 +class NotFoundError(Exception): """Raised when an entity cannot be found.""" - pass class Metricity: @@ -31,15 +29,14 @@ class Metricity: def user(self, user_id: str) -> dict: """Query a user's data.""" # TODO: Swap this back to some sort of verified at date - columns = ["joined_at"] - query = f"SELECT {','.join(columns)} FROM users WHERE id = '%s'" + query = "SELECT joined_at FROM users WHERE id = '%s'" self.cursor.execute(query, [user_id]) values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError - return dict(zip(columns, values)) + return {'joined_at': values[0]} def total_messages(self, user_id: str) -> int: """Query total number of messages for a user.""" @@ -58,7 +55,7 @@ class Metricity: values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError return values[0] @@ -88,11 +85,11 @@ class Metricity: values = self.cursor.fetchone() if not values: - raise NotFoundError() + raise NotFoundError return values[0] - def top_channel_activity(self, user_id: str) -> List[Tuple[str, int]]: + def top_channel_activity(self, user_id: str) -> list[tuple[str, int]]: """ Query the top three channels in which the user is most active. @@ -127,7 +124,7 @@ class Metricity: values = self.cursor.fetchall() if not values: - raise NotFoundError() + raise NotFoundError return values diff --git a/pydis_site/apps/api/tests/base.py b/pydis_site/apps/api/tests/base.py index c9f3cb7e..704b22cf 100644 --- a/pydis_site/apps/api/tests/base.py +++ b/pydis_site/apps/api/tests/base.py @@ -61,6 +61,7 @@ class AuthenticatedAPITestCase(APITestCase): ... self.assertEqual(response.status_code, 200) """ - def setUp(self): + def setUp(self) -> None: + """Bootstrap the user and authenticate it.""" super().setUp() self.client.force_authenticate(test_user) diff --git a/pydis_site/apps/api/tests/test_bumped_threads.py b/pydis_site/apps/api/tests/test_bumped_threads.py index 316e3f0b..2e3892c7 100644 --- a/pydis_site/apps/api/tests/test_bumped_threads.py +++ b/pydis_site/apps/api/tests/test_bumped_threads.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import BumpedThread +from pydis_site.apps.api.models import BumpedThread class UnauthedBumpedThreadAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_deleted_messages.py b/pydis_site/apps/api/tests/test_deleted_messages.py index 1eb535d8..62d17e58 100644 --- a/pydis_site/apps/api/tests/test_deleted_messages.py +++ b/pydis_site/apps/api/tests/test_deleted_messages.py @@ -1,10 +1,9 @@ -from datetime import datetime +from datetime import datetime, timezone from django.urls import reverse -from django.utils import timezone from .base import AuthenticatedAPITestCase -from ..models import MessageDeletionContext, User +from pydis_site.apps.api.models import MessageDeletionContext, User class DeletedMessagesWithoutActorTests(AuthenticatedAPITestCase): @@ -18,7 +17,7 @@ class DeletedMessagesWithoutActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': None, - 'creation': datetime.utcnow().isoformat(), + 'creation': datetime.now(tz=timezone.utc).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -58,7 +57,7 @@ class DeletedMessagesWithActorTests(AuthenticatedAPITestCase): cls.data = { 'actor': cls.actor.id, - 'creation': datetime.utcnow().isoformat(), + 'creation': datetime.now(tz=timezone.utc).isoformat(), 'deletedmessage_set': [ { 'author': cls.author.id, @@ -90,7 +89,7 @@ class DeletedMessagesLogURLTests(AuthenticatedAPITestCase): cls.deletion_context = MessageDeletionContext.objects.create( actor=cls.actor, - creation=timezone.now() + creation=datetime.now(tz=timezone.utc), ) def test_valid_log_url(self): diff --git a/pydis_site/apps/api/tests/test_documentation_links.py b/pydis_site/apps/api/tests/test_documentation_links.py index 4e238cbb..f4a332cb 100644 --- a/pydis_site/apps/api/tests/test_documentation_links.py +++ b/pydis_site/apps/api/tests/test_documentation_links.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import DocumentationLink +from pydis_site.apps.api.models import DocumentationLink class UnauthedDocumentationLinkAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_filters.py b/pydis_site/apps/api/tests/test_filters.py index 5059d651..4cef1c8f 100644 --- a/pydis_site/apps/api/tests/test_filters.py +++ b/pydis_site/apps/api/tests/test_filters.py @@ -1,7 +1,7 @@ import contextlib from dataclasses import dataclass from datetime import timedelta -from typing import Any, Dict, Tuple, Type +from typing import Any from django.db.models import Model from django.urls import reverse @@ -12,22 +12,22 @@ from pydis_site.apps.api.tests.base import AuthenticatedAPITestCase @dataclass() class TestSequence: - model: Type[Model] + model: type[Model] route: str - object: Dict[str, Any] - ignored_fields: Tuple[str, ...] = () + object: dict[str, Any] + ignored_fields: tuple[str, ...] = () def url(self, detail: bool = False) -> str: return reverse(f'api:bot:{self.route}-{"detail" if detail else "list"}') -FK_FIELDS: Dict[Type[Model], Tuple[str, ...]] = { +FK_FIELDS: dict[type[Model], tuple[str, ...]] = { FilterList: (), Filter: ("filter_list",), } -def get_test_sequences() -> Dict[str, TestSequence]: +def get_test_sequences() -> dict[str, TestSequence]: filter_list1_deny_dict = { "name": "testname", "list_type": 0, diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py index 95bafec0..34fae875 100644 --- a/pydis_site/apps/api/tests/test_github_utils.py +++ b/pydis_site/apps/api/tests/test_github_utils.py @@ -12,7 +12,7 @@ import rest_framework.test from django.urls import reverse from pydis_site import settings -from .. import github_utils +from pydis_site.apps.api import github_utils class GeneralUtilityTests(unittest.TestCase): @@ -39,7 +39,8 @@ class GeneralUtilityTests(unittest.TestCase): delta = datetime.timedelta(minutes=10) self.assertAlmostEqual(decoded["exp"] - decoded["iat"], delta.total_seconds()) - self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp()) + then = datetime.datetime.now(tz=datetime.timezone.utc) + delta + self.assertLess(decoded["exp"], then.timestamp()) class CheckRunTests(unittest.TestCase): @@ -50,7 +51,7 @@ class CheckRunTests(unittest.TestCase): "head_sha": "sha", "status": "completed", "conclusion": "success", - "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + "created_at": datetime.datetime.now(tz=datetime.timezone.utc).strftime(settings.GITHUB_TIMESTAMP_FORMAT), "artifacts_url": "url", } @@ -74,7 +75,8 @@ class CheckRunTests(unittest.TestCase): # Set the creation time to well before the MAX_RUN_TIME # to guarantee the right conclusion kwargs["created_at"] = ( - datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) + datetime.datetime.now(tz=datetime.timezone.utc) + - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10) ).strftime(settings.GITHUB_TIMESTAMP_FORMAT) with self.assertRaises(github_utils.RunTimeoutError): @@ -103,29 +105,26 @@ def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> htt "account": {"login": "VALID_OWNER"}, "access_tokens_url": "https://example.com/ACCESS_TOKEN_URL" }]) - else: - return httpx.Response( - 401, json={"error": "auth app/installations"}, request=request - ) + return httpx.Response( + 401, json={"error": "auth app/installations"}, request=request + ) - elif path == "/installation/repositories": + elif path == "/installation/repositories": # noqa: RET505 if auth == "bearer app access token": return httpx.Response(200, request=request, json={ "repositories": [{ "name": "VALID_REPO" }] }) - else: # pragma: no cover - return httpx.Response( - 401, json={"error": "auth installation/repositories"}, request=request - ) + return httpx.Response( # pragma: no cover + 401, json={"error": "auth installation/repositories"}, request=request + ) - elif request.method == "POST": + elif request.method == "POST": # noqa: RET505 if path == "/ACCESS_TOKEN_URL": if auth == "bearer JWT initial token": return httpx.Response(200, request=request, json={"token": "app access token"}) - else: # pragma: no cover - return httpx.Response(401, json={"error": "auth access_token"}, request=request) + return httpx.Response(401, json={"error": "auth access_token"}, request=request) # pragma: no cover # Reaching this point means something has gone wrong return httpx.Response(500, request=request) # pragma: no cover @@ -138,7 +137,7 @@ class AuthorizeTests(unittest.TestCase): def test_invalid_apps_auth(self): """Test that an exception is raised if authorization was attempted with an invalid token.""" - with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): + with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"): # noqa: SIM117 with self.assertRaises(httpx.HTTPStatusError) as error: github_utils.authorize("VALID_OWNER", "VALID_REPO") @@ -179,7 +178,11 @@ class ArtifactFetcherTests(unittest.TestCase): run = github_utils.WorkflowRun( name="action_name", head_sha="action_sha", - created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT), + created_at=( + datetime.datetime + .now(tz=datetime.timezone.utc) + .strftime(settings.GITHUB_TIMESTAMP_FORMAT) + ), status="completed", conclusion="success", artifacts_url="artifacts_url" @@ -187,7 +190,7 @@ class ArtifactFetcherTests(unittest.TestCase): return httpx.Response( 200, request=request, json={"workflow_runs": [dataclasses.asdict(run)]} ) - elif path == "/artifact_url": + elif path == "/artifact_url": # noqa: RET505 return httpx.Response( 200, request=request, json={"artifacts": [{ "name": "artifact_name", diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py index ceb5591b..71611ee9 100644 --- a/pydis_site/apps/api/tests/test_infractions.py +++ b/pydis_site/apps/api/tests/test_infractions.py @@ -8,8 +8,8 @@ from django.db.utils import IntegrityError from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Infraction, User -from ..serializers import InfractionSerializer +from pydis_site.apps.api.models import Infraction, User +from pydis_site.apps.api.serializers import InfractionSerializer class UnauthenticatedTests(AuthenticatedAPITestCase): @@ -152,8 +152,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_after(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?type=superstar&expires_after={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get(url, {'type': 'superstar', 'expires_after': target_time.isoformat()}) self.assertEqual(response.status_code, 200) infractions = response.json() @@ -161,8 +161,8 @@ class InfractionTests(AuthenticatedAPITestCase): def test_filter_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?type=superstar&expires_before={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get(url, {'type': 'superstar', 'expires_before': target_time.isoformat()}) self.assertEqual(response.status_code, 200) infractions = response.json() @@ -185,11 +185,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_before_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=4) - target_time_late = datetime.datetime.utcnow() + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=4) + target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) response = self.client.get( - f'{url}?expires_before={target_time_late.isoformat()}' - f'&expires_after={target_time.isoformat()}' + url, + {'expires_before': target_time_late.isoformat(), + 'expires_after': target_time.isoformat()}, ) self.assertEqual(response.status_code, 200) @@ -198,11 +199,12 @@ class InfractionTests(AuthenticatedAPITestCase): def test_after_after_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - target_time_late = datetime.datetime.utcnow() + datetime.timedelta(hours=9) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + target_time_late = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=9) response = self.client.get( - f'{url}?expires_before={target_time.isoformat()}' - f'&expires_after={target_time_late.isoformat()}' + url, + {'expires_before': target_time.isoformat(), + 'expires_after': target_time_late.isoformat()}, ) self.assertEqual(response.status_code, 400) @@ -212,8 +214,11 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_after_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?permanent=true&expires_after={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get( + url, + {'permanent': 'true', 'expires_after': target_time.isoformat()}, + ) self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -221,8 +226,11 @@ class InfractionTests(AuthenticatedAPITestCase): def test_permanent_before_invalid(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=5) - response = self.client.get(f'{url}?permanent=true&expires_before={target_time.isoformat()}') + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=5) + response = self.client.get( + url, + {'permanent': 'true', 'expires_before': target_time.isoformat()}, + ) self.assertEqual(response.status_code, 400) errors = list(response.json()) @@ -230,9 +238,10 @@ class InfractionTests(AuthenticatedAPITestCase): def test_nonpermanent_before(self): url = reverse('api:bot:infraction-list') - target_time = datetime.datetime.utcnow() + datetime.timedelta(hours=6) + target_time = datetime.datetime.now(tz=timezone.utc) + datetime.timedelta(hours=6) response = self.client.get( - f'{url}?permanent=false&expires_before={target_time.isoformat()}' + url, + {'permanent': 'false', 'expires_before': target_time.isoformat()}, ) self.assertEqual(response.status_code, 200) @@ -522,39 +531,38 @@ class CreationTests(AuthenticatedAPITestCase): active_infraction_types = ('timeout', 'ban', 'superstar') for infraction_type in active_infraction_types: - with self.subTest(infraction_type=infraction_type): - with transaction.atomic(): - first_active_infraction = { - 'user': self.user.id, - 'actor': self.user.id, - 'type': infraction_type, - 'reason': 'Take me on!', - 'active': True, - 'expires_at': '2019-10-04T12:52:00+00:00' - } + with self.subTest(infraction_type=infraction_type), transaction.atomic(): + first_active_infraction = { + 'user': self.user.id, + 'actor': self.user.id, + 'type': infraction_type, + 'reason': 'Take me on!', + 'active': True, + 'expires_at': '2019-10-04T12:52:00+00:00' + } + + # Post the first active infraction of a type and confirm it's accepted. + first_response = self.client.post(url, data=first_active_infraction) + self.assertEqual(first_response.status_code, 201) - # Post the first active infraction of a type and confirm it's accepted. - first_response = self.client.post(url, data=first_active_infraction) - self.assertEqual(first_response.status_code, 201) - - second_active_infraction = { - 'user': self.user.id, - 'actor': self.user.id, - 'type': infraction_type, - 'reason': 'Take on me!', - 'active': True, - 'expires_at': '2019-10-04T12:52:00+00:00' + second_active_infraction = { + 'user': self.user.id, + 'actor': self.user.id, + 'type': infraction_type, + 'reason': 'Take on me!', + 'active': True, + 'expires_at': '2019-10-04T12:52:00+00:00' + } + second_response = self.client.post(url, data=second_active_infraction) + self.assertEqual(second_response.status_code, 400) + self.assertEqual( + second_response.json(), + { + 'non_field_errors': [ + 'This user already has an active infraction of this type.' + ] } - second_response = self.client.post(url, data=second_active_infraction) - self.assertEqual(second_response.status_code, 400) - self.assertEqual( - second_response.json(), - { - 'non_field_errors': [ - 'This user already has an active infraction of this type.' - ] - } - ) + ) def test_returns_201_for_second_active_infraction_of_different_type(self): """Test if the API accepts a second active infraction of a different type than the first.""" diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py index d3341b35..1cca133d 100644 --- a/pydis_site/apps/api/tests/test_models.py +++ b/pydis_site/apps/api/tests/test_models.py @@ -118,7 +118,7 @@ class StringDunderMethodTests(SimpleTestCase): OffensiveMessage( id=602951077675139072, channel_id=291284109232308226, - delete_date=dt(3000, 1, 1) + delete_date=dt(3000, 1, 1, tzinfo=timezone.utc) ), OffTopicChannelName(name='bob-the-builders-playground'), Role( @@ -132,7 +132,7 @@ class StringDunderMethodTests(SimpleTestCase): name='shawn', discriminator=555, ), - creation=dt.utcnow() + creation=dt.now(tz=timezone.utc) ), User( id=5, diff --git a/pydis_site/apps/api/tests/test_nominations.py b/pydis_site/apps/api/tests/test_nominations.py index b3742cdd..ee6b1fbd 100644 --- a/pydis_site/apps/api/tests/test_nominations.py +++ b/pydis_site/apps/api/tests/test_nominations.py @@ -3,7 +3,7 @@ from datetime import datetime as dt, timedelta, timezone from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Nomination, NominationEntry, User +from pydis_site.apps.api.models import Nomination, NominationEntry, User class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_off_topic_channel_names.py b/pydis_site/apps/api/tests/test_off_topic_channel_names.py index 34098c92..315f707d 100644 --- a/pydis_site/apps/api/tests/test_off_topic_channel_names.py +++ b/pydis_site/apps/api/tests/test_off_topic_channel_names.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import OffTopicChannelName +from pydis_site.apps.api.models import OffTopicChannelName class UnauthenticatedTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_offensive_message.py b/pydis_site/apps/api/tests/test_offensive_message.py index 3cf95b75..53f9cb48 100644 --- a/pydis_site/apps/api/tests/test_offensive_message.py +++ b/pydis_site/apps/api/tests/test_offensive_message.py @@ -3,13 +3,13 @@ import datetime from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import OffensiveMessage +from pydis_site.apps.api.models import OffensiveMessage class CreationTests(AuthenticatedAPITestCase): def test_accept_valid_data(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -32,7 +32,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_on_non_future_date(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() - datetime.timedelta(days=1) + delete_at = datetime.datetime.now() - datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -46,7 +46,7 @@ class CreationTests(AuthenticatedAPITestCase): def test_returns_400_on_negative_id_or_channel_id(self): url = reverse('api:bot:offensivemessage-list') - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 data = { 'id': '602951077675139072', 'channel_id': '291284109232308226', @@ -72,7 +72,7 @@ class CreationTests(AuthenticatedAPITestCase): class ListTests(AuthenticatedAPITestCase): @classmethod def setUpTestData(cls): - delete_at = datetime.datetime.now() + datetime.timedelta(days=1) + delete_at = datetime.datetime.now() + datetime.timedelta(days=1) # noqa: DTZ005 aware_delete_at = delete_at.replace(tzinfo=datetime.timezone.utc) cls.messages = [ diff --git a/pydis_site/apps/api/tests/test_reminders.py b/pydis_site/apps/api/tests/test_reminders.py index e17569f0..9bb5fe4d 100644 --- a/pydis_site/apps/api/tests/test_reminders.py +++ b/pydis_site/apps/api/tests/test_reminders.py @@ -4,7 +4,7 @@ from django.forms.models import model_to_dict from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Reminder, User +from pydis_site.apps.api.models import Reminder, User class UnauthedReminderAPITests(AuthenticatedAPITestCase): @@ -59,7 +59,7 @@ class ReminderCreationTests(AuthenticatedAPITestCase): data = { 'author': self.author.id, 'content': 'Remember to...wait what was it again?', - 'expiration': datetime.utcnow().isoformat(), + 'expiration': datetime.now(tz=timezone.utc).isoformat(), 'jump_url': "https://www.google.com", 'channel_id': 123, 'mentions': [8888, 9999], diff --git a/pydis_site/apps/api/tests/test_roles.py b/pydis_site/apps/api/tests/test_roles.py index 73c80c77..d3031990 100644 --- a/pydis_site/apps/api/tests/test_roles.py +++ b/pydis_site/apps/api/tests/test_roles.py @@ -1,7 +1,7 @@ from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Role, User +from pydis_site.apps.api.models import Role, User class CreationTests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_rules.py b/pydis_site/apps/api/tests/test_rules.py index 3ee2d4e0..662fb8e9 100644 --- a/pydis_site/apps/api/tests/test_rules.py +++ b/pydis_site/apps/api/tests/test_rules.py @@ -5,7 +5,7 @@ from pathlib import Path from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..views import RulesView +from pydis_site.apps.api.views import RulesView class RuleAPITests(AuthenticatedAPITestCase): diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py index d86e80bb..cff4a825 100644 --- a/pydis_site/apps/api/tests/test_users.py +++ b/pydis_site/apps/api/tests/test_users.py @@ -4,9 +4,9 @@ from unittest.mock import Mock, patch from django.urls import reverse from .base import AuthenticatedAPITestCase -from ..models import Infraction, Role, User -from ..models.bot.metricity import NotFoundError -from ..viewsets.bot.user import UserListPagination +from pydis_site.apps.api.models import Infraction, Role, User +from pydis_site.apps.api.models.bot.metricity import NotFoundError +from pydis_site.apps.api.viewsets.bot.user import UserListPagination class UnauthedUserAPITests(AuthenticatedAPITestCase): @@ -469,18 +469,17 @@ class UserMetricityTests(AuthenticatedAPITestCase): with self.subTest( voice_infractions=case['voice_infractions'], voice_gate_blocked=case['voice_gate_blocked'] - ): - with patch("pydis_site.apps.api.viewsets.bot.user.Infraction.objects.filter") as p: - p.return_value = case['voice_infractions'] - - url = reverse('api:bot:user-metricity-data', args=[0]) - response = self.client.get(url) - - self.assertEqual(response.status_code, 200) - self.assertEqual( - response.json()["voice_gate_blocked"], - case["voice_gate_blocked"] - ) + ), patch("pydis_site.apps.api.viewsets.bot.user.Infraction.objects.filter") as p: + p.return_value = case['voice_infractions'] + + url = reverse('api:bot:user-metricity-data', args=[0]) + response = self.client.get(url) + + self.assertEqual(response.status_code, 200) + self.assertEqual( + response.json()["voice_gate_blocked"], + case["voice_gate_blocked"] + ) def test_metricity_review_data(self): # Given diff --git a/pydis_site/apps/api/tests/test_validators.py b/pydis_site/apps/api/tests/test_validators.py index 8c46fcbc..a7ec6e38 100644 --- a/pydis_site/apps/api/tests/test_validators.py +++ b/pydis_site/apps/api/tests/test_validators.py @@ -3,8 +3,8 @@ from datetime import datetime, timezone from django.core.exceptions import ValidationError from django.test import TestCase -from ..models.bot.bot_setting import validate_bot_setting_name -from ..models.bot.offensive_message import future_date_validator +from pydis_site.apps.api.models.bot.bot_setting import validate_bot_setting_name +from pydis_site.apps.api.models.bot.offensive_message import future_date_validator REQUIRED_KEYS = ( diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py index b1b7dc0f..32f41667 100644 --- a/pydis_site/apps/api/views.py +++ b/pydis_site/apps/api/views.py @@ -93,7 +93,7 @@ class RulesView(APIView): """ if target == 'html': return f'{description}' - elif target == 'md': + elif target == 'md': # noqa: RET505 return f'[{description}]({link})' else: raise ValueError( @@ -101,7 +101,7 @@ class RulesView(APIView): ) # `format` here is the result format, we have a link format here instead. - def get(self, request, format=None): # noqa: D102,ANN001,ANN201 + def get(self, request, format=None): # noqa: ANN001, ANN201 """ Returns a list of our community rules coupled with their keywords. diff --git a/pydis_site/apps/api/viewsets/bot/filters.py b/pydis_site/apps/api/viewsets/bot/filters.py index d6c2d18c..9c9e8338 100644 --- a/pydis_site/apps/api/viewsets/bot/filters.py +++ b/pydis_site/apps/api/viewsets/bot/filters.py @@ -1,10 +1,10 @@ from rest_framework.viewsets import ModelViewSet -from pydis_site.apps.api.models.bot.filters import ( # noqa: I101 - Preserving the filter order +from pydis_site.apps.api.models.bot.filters import ( # - Preserving the filter order FilterList, Filter ) -from pydis_site.apps.api.serializers import ( # noqa: I101 - Preserving the filter order +from pydis_site.apps.api.serializers import ( # - Preserving the filter order FilterListSerializer, FilterSerializer, ) diff --git a/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py b/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py index d0519e86..1774004c 100644 --- a/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py +++ b/pydis_site/apps/api/viewsets/bot/off_topic_channel_name.py @@ -85,10 +85,9 @@ class OffTopicChannelNameViewSet(ModelViewSet): serializer.save() return Response(create_data, status=HTTP_201_CREATED) - else: - raise ParseError(detail={ - 'name': ["This query parameter is required."] - }) + raise ParseError(detail={ + 'name': ["This query parameter is required."] + }) def list(self, request: Request, *args, **kwargs) -> Response: """ diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py index db73a83c..88fa3415 100644 --- a/pydis_site/apps/api/viewsets/bot/user.py +++ b/pydis_site/apps/api/viewsets/bot/user.py @@ -1,4 +1,3 @@ -import typing from collections import OrderedDict from django.db.models import Q @@ -24,14 +23,14 @@ class UserListPagination(PageNumberPagination): page_size = 2500 page_size_query_param = "page_size" - def get_next_page_number(self) -> typing.Optional[int]: + def get_next_page_number(self) -> int | None: """Get the next page number.""" if not self.page.has_next(): return None page_number = self.page.next_page_number() return page_number - def get_previous_page_number(self) -> typing.Optional[int]: + def get_previous_page_number(self) -> int | None: """Get the previous page number.""" if not self.page.has_previous(): return None diff --git a/pydis_site/apps/content/models/tag.py b/pydis_site/apps/content/models/tag.py index 1a20d775..7c49902f 100644 --- a/pydis_site/apps/content/models/tag.py +++ b/pydis_site/apps/content/models/tag.py @@ -30,8 +30,7 @@ class Commit(models.Model): def lines(self) -> collections.abc.Iterable[str]: """Return each line in the commit message.""" - for line in self.message.split("\n"): - yield line + yield from self.message.split("\n") def format_authors(self) -> collections.abc.Iterable[str]: """Return a nice representation of the author(s)' name and email.""" diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md index f6f8a5f2..b634f513 100644 --- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md +++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md @@ -4,7 +4,7 @@ description: A guide for linting and setting up pre-commit. --- Your commit will be rejected by the build server if it fails to lint. -On most of our projects, we use `flake8` and `pre-commit` to ensure that the code style is consistent across the code base. +On most of our projects, we use `ruff` and `pre-commit` to ensure that the code style is consistent across the code base. `pre-commit` is a powerful tool that helps you automatically lint before you commit. If the linter complains, the commit is aborted so that you can fix the linting errors before committing again. diff --git a/pydis_site/apps/content/tests/helpers.py b/pydis_site/apps/content/tests/helpers.py index fad91050..0e7562e8 100644 --- a/pydis_site/apps/content/tests/helpers.py +++ b/pydis_site/apps/content/tests/helpers.py @@ -62,19 +62,19 @@ class MockPagesTestCase(TestCase): ├── not_a_page.md ├── tmp.md ├── tmp - |   ├── _info.yml - |   └── category - |    ├── _info.yml - |      └── subcategory_without_info + | ├── _info.yml + | └── category + | ├── _info.yml + | └── subcategory_without_info └── category -    ├── _info.yml -    ├── with_metadata.md -    └── subcategory -    ├── with_metadata.md -       └── without_metadata.md + ├── _info.yml + ├── with_metadata.md + └── subcategory + ├── with_metadata.md + └── without_metadata.md """ - def setUp(self): + def setUp(self) -> None: """Create the fake filesystem.""" Path(f"{BASE_PATH}/_info.yml").write_text(CATEGORY_INFO) Path(f"{BASE_PATH}/root.md").write_text(MARKDOWN_WITH_METADATA) diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py index a7695a27..baae154d 100644 --- a/pydis_site/apps/content/urls.py +++ b/pydis_site/apps/content/urls.py @@ -8,7 +8,7 @@ from . import utils, views app_name = "content" -def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]: +def __get_all_files(root: Path, folder: Path | None = None) -> list[str]: """Find all folders and markdown files recursively starting from `root`.""" if not folder: folder = root diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py index c12893ef..347640dd 100644 --- a/pydis_site/apps/content/utils.py +++ b/pydis_site/apps/content/utils.py @@ -151,8 +151,11 @@ def set_tag_commit(tag: Tag) -> None: commit = data["commit"] author, committer = commit["author"], commit["committer"] - date = datetime.datetime.strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) - date = date.replace(tzinfo=datetime.timezone.utc) + date = ( + datetime.datetime + .strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT) + .replace(tzinfo=datetime.timezone.utc) + ) if author["email"] == committer["email"]: authors = [author] @@ -212,9 +215,8 @@ def get_tags() -> list[Tag]: record_tags(tags) return tags - else: - # Get tags from database - return list(Tag.objects.all()) + + return list(Tag.objects.all()) def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: @@ -242,13 +244,13 @@ def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]: if tag.last_commit is None and not skip_sync: set_tag_commit(tag) return tag - elif tag.group == name and group is None: + elif tag.group == name and group is None: # noqa: RET505 matches.append(tag) if matches: return matches - raise Tag.DoesNotExist() + raise Tag.DoesNotExist def get_tag_category(tags: list[Tag] | None = None, *, collapse_groups: bool) -> dict[str, dict]: diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py index 4f4bb5a2..8d3e3321 100644 --- a/pydis_site/apps/content/views/tags.py +++ b/pydis_site/apps/content/views/tags.py @@ -1,5 +1,4 @@ import re -import typing import frontmatter import markdown @@ -22,7 +21,7 @@ COMMAND_REGEX = re.compile(r"`*!tags? (?P[\w-]+)(?P [\w-]+)?`*") class TagView(TemplateView): """Handles tag pages.""" - tag: typing.Union[Tag, list[Tag]] + tag: Tag | list[Tag] is_group: bool def setup(self, *args, **kwargs) -> None: diff --git a/pydis_site/apps/events/urls.py b/pydis_site/apps/events/urls.py index 7ea65a31..6121d264 100644 --- a/pydis_site/apps/events/urls.py +++ b/pydis_site/apps/events/urls.py @@ -8,7 +8,7 @@ from pydis_site.apps.events.views import IndexView, PageView app_name = "events" -def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]: +def __get_all_files(root: Path, folder: Path | None = None) -> list[str]: """Find all folders and HTML files recursively starting from `root`.""" if not folder: folder = root diff --git a/pydis_site/apps/events/views/page.py b/pydis_site/apps/events/views/page.py index 1622ad70..adf9e952 100644 --- a/pydis_site/apps/events/views/page.py +++ b/pydis_site/apps/events/views/page.py @@ -1,4 +1,3 @@ -from typing import List from django.conf import settings from django.http import Http404 @@ -8,7 +7,7 @@ from django.views.generic import TemplateView class PageView(TemplateView): """Handles event pages showing.""" - def get_template_names(self) -> List[str]: + def get_template_names(self) -> list[str]: """Get specific template names.""" path: str = self.kwargs['path'] page_path = settings.EVENTS_PAGES_PATH / path diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py index a963f733..acf4a817 100644 --- a/pydis_site/apps/home/tests/test_repodata_helpers.py +++ b/pydis_site/apps/home/tests/test_repodata_helpers.py @@ -22,7 +22,7 @@ def mocked_requests_get(*args, **kwargs) -> "MockResponse": # noqa: F821 if args[0] == HomeView.github_api: json_path = Path(__file__).resolve().parent / "mock_github_api_response.json" - with open(json_path, 'r') as json_file: + with open(json_path) as json_file: mock_data = json.load(json_file) return MockResponse(mock_data, 200) diff --git a/pydis_site/apps/home/views.py b/pydis_site/apps/home/views.py index 8a165682..bfa9e02d 100644 --- a/pydis_site/apps/home/views.py +++ b/pydis_site/apps/home/views.py @@ -1,5 +1,4 @@ import logging -from typing import Dict, List import httpx from django.core.handlers.wsgi import WSGIRequest @@ -45,7 +44,7 @@ class HomeView(View): else: self.headers = {} - def _get_api_data(self) -> Dict[str, Dict[str, str]]: + def _get_api_data(self) -> dict[str, dict[str, str]]: """ Call the GitHub API and get information about our repos. @@ -54,7 +53,7 @@ class HomeView(View): repo_dict = {} try: # Fetch the data from the GitHub API - api_data: List[dict] = httpx.get( + api_data: list[dict] = httpx.get( self.github_api, headers=self.headers, timeout=settings.TIMEOUT_PERIOD @@ -89,7 +88,7 @@ class HomeView(View): return repo_dict - def _get_repo_data(self) -> List[RepositoryMetadata]: + def _get_repo_data(self) -> list[RepositoryMetadata]: """Build a list of RepositoryMetadata objects that we can use to populate the front page.""" # First off, load the timestamp of the least recently updated entry. if settings.STATIC_BUILD: @@ -121,8 +120,7 @@ class HomeView(View): if settings.STATIC_BUILD: return data - else: - return RepositoryMetadata.objects.bulk_create(data) + return RepositoryMetadata.objects.bulk_create(data) # If the data is stale, we should refresh it. if (timezone.now() - last_update).seconds > self.repository_cache_ttl: @@ -149,8 +147,7 @@ class HomeView(View): return database_repositories # Otherwise, if the data is fresher than 2 minutes old, we should just return it. - else: - return RepositoryMetadata.objects.all() + return RepositoryMetadata.objects.all() def get(self, request: WSGIRequest) -> HttpResponse: """Collect repo data and render the homepage view.""" diff --git a/pydis_site/apps/redirect/urls.py b/pydis_site/apps/redirect/urls.py index 067cccc3..a221ea12 100644 --- a/pydis_site/apps/redirect/urls.py +++ b/pydis_site/apps/redirect/urls.py @@ -83,22 +83,21 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]: return paths + redirect_path_name = "pages" if new_app_name == "content" else new_app_name + if len(data.redirect_arguments) > 0: + redirect_arg = data.redirect_arguments[0] else: - redirect_path_name = "pages" if new_app_name == "content" else new_app_name - if len(data.redirect_arguments) > 0: - redirect_arg = data.redirect_arguments[0] - else: - redirect_arg = "resources/" - new_redirect = f"/{redirect_path_name}/{redirect_arg}" + redirect_arg = "resources/" + new_redirect = f"/{redirect_path_name}/{redirect_arg}" - if new_redirect == "/resources/resources/": - new_redirect = "/resources/" + if new_redirect == "/resources/resources/": + new_redirect = "/resources/" - return [distill_path( - data.original_path, - lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)), - name=name, - )] + return [distill_path( + data.original_path, + lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)), + name=name, + )] urlpatterns = [] diff --git a/pydis_site/apps/redirect/views.py b/pydis_site/apps/redirect/views.py index 21180cdf..374daf2b 100644 --- a/pydis_site/apps/redirect/views.py +++ b/pydis_site/apps/redirect/views.py @@ -1,4 +1,3 @@ -import typing as t from django.views.generic import RedirectView @@ -15,7 +14,7 @@ class CustomRedirectView(RedirectView): """Overwrites original as_view to add static args.""" return super().as_view(**initkwargs) - def get_redirect_url(self, *args, **kwargs) -> t.Optional[str]: + def get_redirect_url(self, *args, **kwargs) -> str | None: """Extends default behaviour to use static args.""" args = self.static_args + args + tuple(kwargs.values()) if self.prefix_redirect: diff --git a/pydis_site/apps/resources/views.py b/pydis_site/apps/resources/views.py index 2375f722..a2cd8d0c 100644 --- a/pydis_site/apps/resources/views.py +++ b/pydis_site/apps/resources/views.py @@ -1,5 +1,4 @@ import json -import typing as t from pathlib import Path import yaml @@ -22,7 +21,7 @@ class ResourceView(View): """Sort a tuple by its key alphabetically, disregarding 'the' as a prefix.""" name, resource = tuple_ name = name.casefold() - if name.startswith("the ") or name.startswith("the_"): + if name.startswith(("the ", "the_")): return name[4:] return name @@ -48,7 +47,7 @@ class ResourceView(View): } for resource_name, resource in self.resources.items(): css_classes = [] - for tag_type in resource_tags.keys(): + for tag_type in resource_tags: # Store the tags into `resource_tags` tags = resource.get("tags", {}).get(tag_type, []) for tag in tags: @@ -102,7 +101,7 @@ class ResourceView(View): "difficulty": [to_kebabcase(tier) for tier in self.filters["Difficulty"]["filters"]], } - def get(self, request: WSGIRequest, resource_type: t.Optional[str] = None) -> HttpResponse: + def get(self, request: WSGIRequest, resource_type: str | None = None) -> HttpResponse: """List out all the resources, and any filtering options from the URL.""" # Add type filtering if the request is made to somewhere like /resources/video. # We also convert all spaces to dashes, so they'll correspond with the filters. diff --git a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py index 9d8f1819..c6638a3b 100644 --- a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py +++ b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import Union from django import template @@ -7,7 +6,7 @@ register = template.Library() @register.filter -def hex_colour(colour: Union[str, int]) -> str: +def hex_colour(colour: str | int) -> str: """ Converts the given representation of a colour to its RGB hex string. diff --git a/pydis_site/apps/staff/tests/test_deletedmessage_filters.py b/pydis_site/apps/staff/tests/test_deletedmessage_filters.py index 31215784..5e49f103 100644 --- a/pydis_site/apps/staff/tests/test_deletedmessage_filters.py +++ b/pydis_site/apps/staff/tests/test_deletedmessage_filters.py @@ -3,7 +3,7 @@ import enum from django.test import TestCase from django.utils import timezone -from ..templatetags import deletedmessage_filters +from pydis_site.apps.staff.templatetags import deletedmessage_filters class Colour(enum.IntEnum): diff --git a/pyproject.toml b/pyproject.toml index 40461f0b..9019efb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,18 +29,9 @@ whitenoise = "6.4.0" [tool.poetry.group.dev.dependencies] python-dotenv = "1.0.0" taskipy = "1.10.4" +ruff = "^0.0.265" [tool.poetry.group.lint.dependencies] -flake8 = "6.0.0" -flake8-annotations = "3.0.1" -flake8-bandit = "4.1.1" -flake8-bugbear = "23.5.9" -flake8-docstrings = "1.7.0" -flake8-import-order = "0.18.2" -flake8-tidy-imports = "4.8.0" -flake8-string-format = "0.3.0" -flake8-todo = "0.7" -pep8-naming = "0.13.3" pre-commit = "3.3.1" [tool.poetry.group.test.dependencies] @@ -50,6 +41,34 @@ coverage = "7.2.5" requires = ["poetry-core>=1.2.0"] build-backend = "poetry.core.masonry.api" +[tool.ruff] +target-version = "py310" +extend-exclude = [".cache"] +ignore = [ + "ANN002", "ANN003", "ANN101", "ANN102", "ANN204", "ANN206", "ANN401", + "B904", + "C401", "C408", + "D100", "D104", "D105", "D107", "D203", "D212", "D214", "D215", "D301", + "D400", "D401", "D402", "D404", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D412", "D413", "D414", "D416", "D417", + "E731", + "RET504", + "RUF005", + "S311", + "SIM102", "SIM108", +] +line-length = 120 +select = ["ANN", "B", "C4", "D", "DTZ", "E", "F", "ISC", "INT", "N", "PGH", "PIE", "RET", "RSE", "RUF", "S", "SIM", "T20", "TID", "UP", "W"] + +[tool.ruff.per-file-ignores] +"pydis_site/apps/**/migrations/*.py" = ["ALL"] +"manage.py" = ["T201"] +"pydis_site/apps/api/tests/base.py" = ["S106"] +"pydis_site/apps/**/tests/test_*.py" = ["ANN", "D"] +"static-builds/netlify_build.py" = ["T201"] +"pydis_site/apps/api/tests/test_off_topic_channel_names.py" = ["RUF001"] +"gunicorn.conf.py" = ["ANN", "D"] +"pydis_site/apps/api/models/bot/off_topic_channel_name.py" = ["RUF001"] + [tool.taskipy.tasks] start = "python manage.py run --debug" makemigrations = "python manage.py makemigrations" diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py index 36520c28..2d311a11 100644 --- a/static-builds/netlify_build.py +++ b/static-builds/netlify_build.py @@ -14,15 +14,15 @@ from pathlib import Path from urllib import parse import httpx +import contextlib def raise_response(response: httpx.Response) -> None: """Raise an exception from a response if necessary.""" if response.status_code // 100 != 2: - try: + with contextlib.suppress(json.JSONDecodeError): print(response.json()) - except json.JSONDecodeError: - pass + response.raise_for_status() -- cgit v1.2.3 From 5eada1e9801c158b9d90b673ee5fd504b4806eeb Mon Sep 17 00:00:00 2001 From: Johannes Christ Date: Wed, 10 May 2023 13:37:39 +0200 Subject: Add Django-specific rules for ruff --- pydis_site/apps/api/models/bot/documentation_link.py | 8 ++++---- pydis_site/apps/api/models/bot/filters.py | 6 +++--- pydis_site/apps/api/models/bot/infraction.py | 18 +++++++++--------- pydis_site/apps/api/models/bot/message.py | 10 +++++----- .../apps/api/models/bot/message_deletion_context.py | 10 +++++----- pydis_site/apps/api/models/bot/nomination.py | 10 +++++----- pydis_site/apps/api/models/bot/role.py | 10 +++++----- pyproject.toml | 3 ++- 8 files changed, 38 insertions(+), 37 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/documentation_link.py b/pydis_site/apps/api/models/bot/documentation_link.py index 9941907c..7f3b4ca5 100644 --- a/pydis_site/apps/api/models/bot/documentation_link.py +++ b/pydis_site/apps/api/models/bot/documentation_link.py @@ -37,11 +37,11 @@ class DocumentationLink(ModelReprMixin, models.Model): help_text="The URL at which the Sphinx inventory is available for this package." ) - def __str__(self): - """Returns the package and URL for the current documentation link, for display purposes.""" - return f"{self.package} - {self.base_url}" - class Meta: """Defines the meta options for the documentation link model.""" ordering = ['package'] + + def __str__(self): + """Returns the package and URL for the current documentation link, for display purposes.""" + return f"{self.package} - {self.base_url}" diff --git a/pydis_site/apps/api/models/bot/filters.py b/pydis_site/apps/api/models/bot/filters.py index 620031dc..6d5188e4 100644 --- a/pydis_site/apps/api/models/bot/filters.py +++ b/pydis_site/apps/api/models/bot/filters.py @@ -231,14 +231,14 @@ class FilterBase(ModelTimestampMixin, ModelReprMixin, models.Model): null=True ) - def __str__(self) -> str: - return f"Filter {self.content!r}" - class Meta: """Metaclass for FilterBase to make it abstract model.""" abstract = True + def __str__(self) -> str: + return f"Filter {self.content!r}" + class Filter(FilterBase): """ diff --git a/pydis_site/apps/api/models/bot/infraction.py b/pydis_site/apps/api/models/bot/infraction.py index 381b5b9d..b304c6d4 100644 --- a/pydis_site/apps/api/models/bot/infraction.py +++ b/pydis_site/apps/api/models/bot/infraction.py @@ -78,15 +78,6 @@ class Infraction(ModelReprMixin, models.Model): ) ) - def __str__(self): - """Returns some info on the current infraction, for display purposes.""" - s = f"#{self.id}: {self.type} on {self.user_id}" - if self.expires_at: - s += f" until {self.expires_at}" - if self.hidden: - s += " (hidden)" - return s - class Meta: """Defines the meta options for the infraction model.""" @@ -98,3 +89,12 @@ class Infraction(ModelReprMixin, models.Model): name="unique_active_infraction_per_type_per_user" ), ) + + def __str__(self): + """Returns some info on the current infraction, for display purposes.""" + s = f"#{self.id}: {self.type} on {self.user_id}" + if self.expires_at: + s += f" until {self.expires_at}" + if self.hidden: + s += " (hidden)" + return s diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py index d8147cd4..fb3c47fc 100644 --- a/pydis_site/apps/api/models/bot/message.py +++ b/pydis_site/apps/api/models/bot/message.py @@ -58,6 +58,11 @@ class Message(ModelReprMixin, models.Model): help_text="Attachments attached to this message." ) + class Meta: + """Metadata provided for Django's ORM.""" + + abstract = True + @property def timestamp(self) -> datetime.datetime: """Attribute that represents the message timestamp as derived from the snowflake id.""" @@ -65,8 +70,3 @@ class Message(ModelReprMixin, models.Model): ((self.id >> 22) + 1420070400000) / 1000, tz=datetime.timezone.utc, ) - - class Meta: - """Metadata provided for Django's ORM.""" - - abstract = True diff --git a/pydis_site/apps/api/models/bot/message_deletion_context.py b/pydis_site/apps/api/models/bot/message_deletion_context.py index 25741266..207bc4bc 100644 --- a/pydis_site/apps/api/models/bot/message_deletion_context.py +++ b/pydis_site/apps/api/models/bot/message_deletion_context.py @@ -30,12 +30,12 @@ class MessageDeletionContext(ModelReprMixin, models.Model): help_text="When this deletion took place." ) - @property - def log_url(self) -> str: - """Create the url for the deleted message logs.""" - return reverse('staff:logs', args=(self.id,)) - class Meta: """Set the ordering for list views to newest first.""" ordering = ("-creation",) + + @property + def log_url(self) -> str: + """Create the url for the deleted message logs.""" + return reverse('staff:logs', args=(self.id,)) diff --git a/pydis_site/apps/api/models/bot/nomination.py b/pydis_site/apps/api/models/bot/nomination.py index 58e70a83..2f8e305c 100644 --- a/pydis_site/apps/api/models/bot/nomination.py +++ b/pydis_site/apps/api/models/bot/nomination.py @@ -40,16 +40,16 @@ class Nomination(ModelReprMixin, models.Model): null=True, ) - def __str__(self): - """Representation that makes the target and state of the nomination immediately evident.""" - status = "active" if self.active else "ended" - return f"Nomination of {self.user} ({status})" - class Meta: """Set the ordering of nominations to most recent first.""" ordering = ("-inserted_at",) + def __str__(self): + """Representation that makes the target and state of the nomination immediately evident.""" + status = "active" if self.active else "ended" + return f"Nomination of {self.user} ({status})" + class NominationEntry(ModelReprMixin, models.Model): """A nomination entry created by a single staff member.""" diff --git a/pydis_site/apps/api/models/bot/role.py b/pydis_site/apps/api/models/bot/role.py index 733a8e08..e37f3ccd 100644 --- a/pydis_site/apps/api/models/bot/role.py +++ b/pydis_site/apps/api/models/bot/role.py @@ -51,6 +51,11 @@ class Role(ModelReprMixin, models.Model): help_text="The position of the role in the role hierarchy of the Discord Guild." ) + class Meta: + """Set role ordering from highest to lowest position.""" + + ordering = ("-position",) + def __str__(self) -> str: """Returns the name of the current role, for display purposes.""" return self.name @@ -62,8 +67,3 @@ class Role(ModelReprMixin, models.Model): def __le__(self, other: Role) -> bool: """Compares the roles based on their position in the role hierarchy of the guild.""" return self.position <= other.position - - class Meta: - """Set role ordering from highest to lowest position.""" - - ordering = ("-position",) diff --git a/pyproject.toml b/pyproject.toml index 9019efb0..fb82aa2e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,13 +51,14 @@ ignore = [ "D100", "D104", "D105", "D107", "D203", "D212", "D214", "D215", "D301", "D400", "D401", "D402", "D404", "D405", "D406", "D407", "D408", "D409", "D410", "D411", "D412", "D413", "D414", "D416", "D417", "E731", + "DJ001", "DJ008", "RET504", "RUF005", "S311", "SIM102", "SIM108", ] line-length = 120 -select = ["ANN", "B", "C4", "D", "DTZ", "E", "F", "ISC", "INT", "N", "PGH", "PIE", "RET", "RSE", "RUF", "S", "SIM", "T20", "TID", "UP", "W"] +select = ["ANN", "B", "C4", "D", "DJ", "DTZ", "E", "F", "ISC", "INT", "N", "PGH", "PIE", "RET", "RSE", "RUF", "S", "SIM", "T20", "TID", "UP", "W"] [tool.ruff.per-file-ignores] "pydis_site/apps/**/migrations/*.py" = ["ALL"] -- cgit v1.2.3 From 625f57488c9856b5dce0d16bb192119dd2cb3a1c Mon Sep 17 00:00:00 2001 From: Johannes Christ Date: Fri, 12 May 2023 10:29:34 +0200 Subject: Fix psycopg3 compatibility in metricity --- pydis_site/apps/api/models/bot/metricity.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) (limited to 'pydis_site/apps/api') diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py index a55f5e5b..f1277b21 100644 --- a/pydis_site/apps/api/models/bot/metricity.py +++ b/pydis_site/apps/api/models/bot/metricity.py @@ -3,10 +3,11 @@ from django.db import connections BLOCK_INTERVAL = 10 * 60 # 10 minute blocks -EXCLUDE_CHANNELS = ( +# This needs to be a list due to psycopg3 type adaptions. +EXCLUDE_CHANNELS = [ "267659945086812160", # Bot commands "607247579608121354" # SeasonalBot commands -) +] class NotFoundError(Exception): @@ -48,7 +49,7 @@ class Metricity: WHERE author_id = '%s' AND NOT is_deleted - AND channel_id NOT IN %s + AND channel_id != ANY(%s) """, [user_id, EXCLUDE_CHANNELS] ) @@ -76,7 +77,7 @@ class Metricity: WHERE author_id='%s' AND NOT is_deleted - AND channel_id NOT IN %s + AND channel_id != ANY(%s) GROUP BY interval ) block_query; """, @@ -144,13 +145,13 @@ class Metricity: author_id, COUNT(*) FROM messages WHERE - author_id IN %s + author_id = ANY(%s) AND NOT is_deleted - AND channel_id NOT IN %s + AND channel_id != ANY(%s) AND created_at > now() - interval '%s days' GROUP BY author_id """, - [tuple(user_ids), EXCLUDE_CHANNELS, days] + [user_ids, EXCLUDE_CHANNELS, days] ) values = self.cursor.fetchall() -- cgit v1.2.3