aboutsummaryrefslogtreecommitdiffstats
path: root/pydis_site
diff options
context:
space:
mode:
authorGravatar Xithrius <[email protected]>2022-12-09 22:27:08 -0800
committerGravatar GitHub <[email protected]>2022-12-09 22:27:08 -0800
commitaeeec9d11307b75ed117dabf7c04698bd9186d77 (patch)
treea23267fa619b5f98187ec5e027d3701c6e34f231 /pydis_site
parentAppeased the requests from reviews. (diff)
parentMerge pull request #811 from python-discord/dependabot/pip/flake8-bugbear-22.... (diff)
Merge branch 'main' into 695-setting-different-statuses-on-your-bot
Diffstat (limited to 'pydis_site')
-rw-r--r--pydis_site/apps/api/__init__.py1
-rw-r--r--pydis_site/apps/api/github_utils.py207
-rw-r--r--pydis_site/apps/api/migrations/0013_specialsnake_image.py3
-rw-r--r--pydis_site/apps/api/migrations/0019_deletedmessage.py2
-rw-r--r--pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py3
-rw-r--r--pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py3
-rw-r--r--pydis_site/apps/api/migrations/0082_otn_allow_big_solidus.py19
-rw-r--r--pydis_site/apps/api/migrations/0083_remove_embed_validation.py19
-rw-r--r--pydis_site/apps/api/migrations/0084_infraction_last_applied.py26
-rw-r--r--pydis_site/apps/api/models/bot/infraction.py6
-rw-r--r--pydis_site/apps/api/models/bot/message.py16
-rw-r--r--pydis_site/apps/api/models/bot/metricity.py28
-rw-r--r--pydis_site/apps/api/models/bot/off_topic_channel_name.py2
-rw-r--r--pydis_site/apps/api/models/utils.py172
-rw-r--r--pydis_site/apps/api/pagination.py5
-rw-r--r--pydis_site/apps/api/serializers.py1
-rw-r--r--pydis_site/apps/api/tests/migrations/__init__.py1
-rw-r--r--pydis_site/apps/api/tests/migrations/base.py102
-rw-r--r--pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py496
-rw-r--r--pydis_site/apps/api/tests/migrations/test_base.py135
-rw-r--r--pydis_site/apps/api/tests/test_filterlists.py4
-rw-r--r--pydis_site/apps/api/tests/test_github_utils.py286
-rw-r--r--pydis_site/apps/api/tests/test_infractions.py15
-rw-r--r--pydis_site/apps/api/tests/test_models.py12
-rw-r--r--pydis_site/apps/api/tests/test_users.py84
-rw-r--r--pydis_site/apps/api/tests/test_validators.py229
-rw-r--r--pydis_site/apps/api/urls.py9
-rw-r--r--pydis_site/apps/api/views.py96
-rw-r--r--pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py2
-rw-r--r--pydis_site/apps/api/viewsets/bot/aoc_link.py2
-rw-r--r--pydis_site/apps/api/viewsets/bot/infraction.py19
-rw-r--r--pydis_site/apps/api/viewsets/bot/nomination.py2
-rw-r--r--pydis_site/apps/api/viewsets/bot/reminder.py2
-rw-r--r--pydis_site/apps/api/viewsets/bot/user.py59
-rw-r--r--pydis_site/apps/content/apps.py2
-rw-r--r--pydis_site/apps/content/migrations/0001_add_tags.py35
-rw-r--r--pydis_site/apps/content/migrations/__init__.py0
-rw-r--r--pydis_site/apps/content/models/__init__.py3
-rw-r--r--pydis_site/apps/content/models/tag.py80
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/asking-good-questions.md2
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing.md90
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md53
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/commit-messages.md15
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines.md25
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines/_info.yml2
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines/supplemental-information.md99
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md14
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/logging.md31
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/pull-requests.md40
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/sir-lancebot.md33
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md30
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md21
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/contributing/working-with-git.md6
-rw-r--r--pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md2
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/discord-messages-with-colors.md68
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/discordpy_help_command.md66
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/docker-hosting-guide.md323
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md23
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/keeping-tokens-safe.md29
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/proper-error-handling.md70
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/vps-services.md41
-rw-r--r--pydis_site/apps/content/resources/guides/python-guides/why-not-json-as-database.md28
-rw-r--r--pydis_site/apps/content/resources/server-info/roles.md8
-rw-r--r--pydis_site/apps/content/resources/tags/_info.yml3
-rw-r--r--pydis_site/apps/content/tests/test_utils.py289
-rw-r--r--pydis_site/apps/content/tests/test_views.py222
-rw-r--r--pydis_site/apps/content/urls.py27
-rw-r--r--pydis_site/apps/content/utils.py282
-rw-r--r--pydis_site/apps/content/views/__init__.py3
-rw-r--r--pydis_site/apps/content/views/page_category.py18
-rw-r--r--pydis_site/apps/content/views/tags.py124
-rw-r--r--pydis_site/apps/events/apps.py2
-rw-r--r--pydis_site/apps/home/tests/test_repodata_helpers.py18
-rw-r--r--pydis_site/apps/home/views/home.py14
-rw-r--r--pydis_site/apps/redirect/apps.py2
-rw-r--r--pydis_site/apps/redirect/urls.py7
-rw-r--r--pydis_site/apps/resources/apps.py2
-rw-r--r--pydis_site/apps/resources/resources/atom.yaml14
-rw-r--r--pydis_site/apps/resources/resources/neural_networks_from_scratch_in_python.yaml2
-rw-r--r--pydis_site/apps/resources/resources/pycharm.yaml1
-rw-r--r--pydis_site/apps/resources/resources/python_morsels.yaml1
-rw-r--r--pydis_site/apps/resources/resources/the_algorithms_github.yaml17
-rw-r--r--pydis_site/apps/resources/resources/vcokltfre_discord_bot_tutorial.yaml2
-rw-r--r--pydis_site/apps/resources/templatetags/get_category_icon.py1
-rw-r--r--pydis_site/apps/staff/apps.py2
-rw-r--r--pydis_site/apps/staff/templatetags/deletedmessage_filters.py10
-rw-r--r--pydis_site/apps/staff/tests/test_logs_view.py12
-rw-r--r--pydis_site/settings.py79
-rw-r--r--pydis_site/static/css/content/color.css7
-rw-r--r--pydis_site/static/css/content/tag.css13
-rw-r--r--pydis_site/static/css/events/base.css8
-rw-r--r--pydis_site/static/css/home/index.css10
-rw-r--r--pydis_site/static/css/staff/logs.css3
-rw-r--r--pydis_site/static/images/content/contributing/pull_request.pngbin0 -> 10190 bytes
-rw-r--r--pydis_site/static/images/content/discord_colored_messages/ansi-colors.pngbin0 -> 43004 bytes
-rw-r--r--pydis_site/static/images/content/discord_colored_messages/result.pngbin0 -> 13740 bytes
-rw-r--r--pydis_site/static/images/content/fix-ssl-certificate/pem.pngbin0 -> 11619 bytes
-rw-r--r--pydis_site/static/images/content/regenerating_token.jpgbin0 -> 180570 bytes
-rw-r--r--pydis_site/static/images/events/Replit.pngbin0 -> 12114 bytes
-rw-r--r--pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/live_now.pngbin0 -> 406158 bytes
-rw-r--r--pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/qualifier_release.pngbin0 -> 470631 bytes
-rw-r--r--pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/sign_up.pngbin0 -> 432495 bytes
-rw-r--r--pydis_site/static/images/events/summer_code_jam_2022/site_banner.pngbin0 -> 301066 bytes
-rw-r--r--pydis_site/static/images/navbar/discord.svg327
-rw-r--r--pydis_site/static/js/content/listing.js41
-rw-r--r--pydis_site/templates/base/navbar.html3
-rw-r--r--pydis_site/templates/content/base.html6
-rw-r--r--pydis_site/templates/content/listing.html27
-rw-r--r--pydis_site/templates/content/page.html8
-rw-r--r--pydis_site/templates/content/tag.html40
-rw-r--r--pydis_site/templates/events/index.html7
-rw-r--r--pydis_site/templates/events/pages/code-jams/9/_index.html127
-rw-r--r--pydis_site/templates/events/pages/code-jams/9/frameworks.html148
-rw-r--r--pydis_site/templates/events/pages/code-jams/9/rules.html80
-rw-r--r--pydis_site/templates/events/pages/code-jams/_index.html2
-rw-r--r--pydis_site/templates/events/sidebar/code-jams/9.html21
-rw-r--r--pydis_site/templates/events/sidebar/code-jams/ongoing-code-jam.html6
-rw-r--r--pydis_site/templates/events/sidebar/code-jams/previous-code-jams.html1
-rw-r--r--pydis_site/templates/events/sidebar/events-list.html17
-rw-r--r--pydis_site/templates/events/sidebar/ongoing-event.html6
-rw-r--r--pydis_site/templates/home/index.html21
-rw-r--r--pydis_site/templates/staff/logs.html4
-rw-r--r--pydis_site/urls.py4
123 files changed, 3692 insertions, 1706 deletions
diff --git a/pydis_site/apps/api/__init__.py b/pydis_site/apps/api/__init__.py
index afa5b4d5..e69de29b 100644
--- a/pydis_site/apps/api/__init__.py
+++ b/pydis_site/apps/api/__init__.py
@@ -1 +0,0 @@
-default_app_config = 'pydis_site.apps.api.apps.ApiConfig'
diff --git a/pydis_site/apps/api/github_utils.py b/pydis_site/apps/api/github_utils.py
new file mode 100644
index 00000000..44c571c3
--- /dev/null
+++ b/pydis_site/apps/api/github_utils.py
@@ -0,0 +1,207 @@
+"""Utilities for working with the GitHub API."""
+import dataclasses
+import datetime
+import math
+import typing
+
+import httpx
+import jwt
+
+from pydis_site import settings
+
+MAX_RUN_TIME = datetime.timedelta(minutes=10)
+"""The maximum time allowed before an action is declared timed out."""
+
+
+class ArtifactProcessingError(Exception):
+ """Base exception for other errors related to processing a GitHub artifact."""
+
+ status: int
+
+
+class UnauthorizedError(ArtifactProcessingError):
+ """The application does not have permission to access the requested repo."""
+
+ status = 401
+
+
+class NotFoundError(ArtifactProcessingError):
+ """The requested resource could not be found."""
+
+ status = 404
+
+
+class ActionFailedError(ArtifactProcessingError):
+ """The requested workflow did not conclude successfully."""
+
+ status = 400
+
+
+class RunTimeoutError(ArtifactProcessingError):
+ """The requested workflow run was not ready in time."""
+
+ status = 408
+
+
+class RunPendingError(ArtifactProcessingError):
+ """The requested workflow run is still pending, try again later."""
+
+ status = 202
+
+
[email protected](frozen=True)
+class WorkflowRun:
+ """
+ A workflow run from the GitHub API.
+
+ https://docs.github.com/en/rest/actions/workflow-runs#get-a-workflow-run
+ """
+
+ name: str
+ head_sha: str
+ created_at: str
+ status: str
+ conclusion: str
+ artifacts_url: str
+
+ @classmethod
+ def from_raw(cls, data: dict[str, typing.Any]):
+ """Create an instance using the raw data from the API, discarding unused fields."""
+ return cls(**{
+ key.name: data[key.name] for key in dataclasses.fields(cls)
+ })
+
+
+def generate_token() -> str:
+ """
+ Generate a JWT token to access the GitHub API.
+
+ The token is valid for roughly 10 minutes after generation, before the API starts
+ returning 401s.
+
+ Refer to:
+ https://docs.github.com/en/developers/apps/building-github-apps/authenticating-with-github-apps#authenticating-as-a-github-app
+ """
+ now = datetime.datetime.now()
+ return jwt.encode(
+ {
+ "iat": math.floor((now - datetime.timedelta(seconds=60)).timestamp()), # Issued at
+ "exp": math.floor((now + datetime.timedelta(minutes=9)).timestamp()), # Expires at
+ "iss": settings.GITHUB_APP_ID,
+ },
+ settings.GITHUB_APP_KEY,
+ algorithm="RS256"
+ )
+
+
+def authorize(owner: str, repo: str) -> httpx.Client:
+ """
+ Get an access token for the requested repository.
+
+ The process is roughly:
+ - GET app/installations to get a list of all app installations
+ - POST <app_access_token> to get a token to access the given app
+ - GET installation/repositories and check if the requested one is part of those
+ """
+ client = httpx.Client(
+ base_url=settings.GITHUB_API,
+ headers={"Authorization": f"bearer {generate_token()}"},
+ timeout=10,
+ )
+
+ try:
+ # Get a list of app installations we have access to
+ apps = client.get("app/installations")
+ apps.raise_for_status()
+
+ for app in apps.json():
+ # Look for an installation with the right owner
+ if app["account"]["login"] != owner:
+ continue
+
+ # Get the repositories of the specified owner
+ app_token = client.post(app["access_tokens_url"])
+ app_token.raise_for_status()
+ client.headers["Authorization"] = f"bearer {app_token.json()['token']}"
+
+ repos = client.get("installation/repositories")
+ repos.raise_for_status()
+
+ # Search for the request repository
+ for accessible_repo in repos.json()["repositories"]:
+ if accessible_repo["name"] == repo:
+ # We've found the correct repository, and it's accessible with the current auth
+ return client
+
+ raise NotFoundError(
+ "Could not find the requested repository. Make sure the application can access it."
+ )
+
+ except BaseException as e:
+ # Close the client if we encountered an unexpected exception
+ client.close()
+ raise e
+
+
+def check_run_status(run: WorkflowRun) -> str:
+ """Check if the provided run has been completed, otherwise raise an exception."""
+ created_at = datetime.datetime.strptime(run.created_at, settings.GITHUB_TIMESTAMP_FORMAT)
+ run_time = datetime.datetime.utcnow() - created_at
+
+ if run.status != "completed":
+ if run_time <= MAX_RUN_TIME:
+ raise RunPendingError(
+ f"The requested run is still pending. It was created "
+ f"{run_time.seconds // 60}:{run_time.seconds % 60 :>02} minutes ago."
+ )
+ else:
+ raise RunTimeoutError("The requested workflow was not ready in time.")
+
+ if run.conclusion != "success":
+ # The action failed, or did not run
+ raise ActionFailedError(f"The requested workflow ended with: {run.conclusion}")
+
+ # The requested action is ready
+ return run.artifacts_url
+
+
+def get_artifact(owner: str, repo: str, sha: str, action_name: str, artifact_name: str) -> str:
+ """Get a download URL for a build artifact."""
+ client = authorize(owner, repo)
+
+ try:
+ # Get the workflow runs for this repository
+ runs = client.get(f"/repos/{owner}/{repo}/actions/runs", params={"per_page": 100})
+ runs.raise_for_status()
+ runs = runs.json()
+
+ # Filter the runs for the one associated with the given SHA
+ for run in runs["workflow_runs"]:
+ run = WorkflowRun.from_raw(run)
+ if run.name == action_name and sha == run.head_sha:
+ break
+ else:
+ raise NotFoundError(
+ "Could not find a run matching the provided settings in the previous hundred runs."
+ )
+
+ # Check the workflow status
+ url = check_run_status(run)
+
+ # Filter the artifacts, and return the download URL
+ artifacts = client.get(url)
+ artifacts.raise_for_status()
+
+ for artifact in artifacts.json()["artifacts"]:
+ if artifact["name"] == artifact_name:
+ data = client.get(artifact["archive_download_url"])
+ if data.status_code == 302:
+ return str(data.next_request.url)
+
+ # The following line is left untested since it should in theory be impossible
+ data.raise_for_status() # pragma: no cover
+
+ raise NotFoundError("Could not find an artifact matching the provided name.")
+
+ finally:
+ client.close()
diff --git a/pydis_site/apps/api/migrations/0013_specialsnake_image.py b/pydis_site/apps/api/migrations/0013_specialsnake_image.py
index a0d0d318..8ba3432f 100644
--- a/pydis_site/apps/api/migrations/0013_specialsnake_image.py
+++ b/pydis_site/apps/api/migrations/0013_specialsnake_image.py
@@ -2,7 +2,6 @@
import datetime
from django.db import migrations, models
-from django.utils.timezone import utc
class Migration(migrations.Migration):
@@ -15,7 +14,7 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='specialsnake',
name='image',
- field=models.URLField(default=datetime.datetime(2018, 10, 23, 11, 51, 23, 703868, tzinfo=utc)),
+ field=models.URLField(default=datetime.datetime(2018, 10, 23, 11, 51, 23, 703868, tzinfo=datetime.timezone.utc)),
preserve_default=False,
),
]
diff --git a/pydis_site/apps/api/migrations/0019_deletedmessage.py b/pydis_site/apps/api/migrations/0019_deletedmessage.py
index 6b848d64..25d04434 100644
--- a/pydis_site/apps/api/migrations/0019_deletedmessage.py
+++ b/pydis_site/apps/api/migrations/0019_deletedmessage.py
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
('id', models.BigIntegerField(help_text='The message ID as taken from Discord.', primary_key=True, serialize=False, validators=[django.core.validators.MinValueValidator(limit_value=0, message='Message IDs cannot be negative.')])),
('channel_id', models.BigIntegerField(help_text='The channel ID that this message was sent in, taken from Discord.', validators=[django.core.validators.MinValueValidator(limit_value=0, message='Channel IDs cannot be negative.')])),
('content', models.CharField(help_text='The content of this message, taken from Discord.', max_length=2000)),
- ('embeds', django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[pydis_site.apps.api.models.utils.validate_embed]), help_text='Embeds attached to this message.', size=None)),
+ ('embeds', django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[]), help_text='Embeds attached to this message.', size=None)),
('author', models.ForeignKey(help_text='The author of this message.', on_delete=django.db.models.deletion.CASCADE, to='api.User')),
('deletion_context', models.ForeignKey(help_text='The deletion context this message is part of.', on_delete=django.db.models.deletion.CASCADE, to='api.MessageDeletionContext')),
],
diff --git a/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py b/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py
index 124c6a57..622f21d1 100644
--- a/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py
+++ b/pydis_site/apps/api/migrations/0051_allow_blank_message_embeds.py
@@ -3,7 +3,6 @@
import django.contrib.postgres.fields
import django.contrib.postgres.fields.jsonb
from django.db import migrations
-import pydis_site.apps.api.models.utils
class Migration(migrations.Migration):
@@ -16,6 +15,6 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='deletedmessage',
name='embeds',
- field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[pydis_site.apps.api.models.utils.validate_embed]), blank=True, help_text='Embeds attached to this message.', size=None),
+ field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.jsonb.JSONField(validators=[]), blank=True, help_text='Embeds attached to this message.', size=None),
),
]
diff --git a/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py b/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py
index 9e8f2fb9..95ef5850 100644
--- a/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py
+++ b/pydis_site/apps/api/migrations/0077_use_generic_jsonfield.py
@@ -2,7 +2,6 @@
import django.contrib.postgres.fields
from django.db import migrations, models
-import pydis_site.apps.api.models.utils
class Migration(migrations.Migration):
@@ -20,6 +19,6 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='deletedmessage',
name='embeds',
- field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(validators=[pydis_site.apps.api.models.utils.validate_embed]), blank=True, help_text='Embeds attached to this message.', size=None),
+ field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(validators=[]), blank=True, help_text='Embeds attached to this message.', size=None),
),
]
diff --git a/pydis_site/apps/api/migrations/0082_otn_allow_big_solidus.py b/pydis_site/apps/api/migrations/0082_otn_allow_big_solidus.py
new file mode 100644
index 00000000..abbb98ec
--- /dev/null
+++ b/pydis_site/apps/api/migrations/0082_otn_allow_big_solidus.py
@@ -0,0 +1,19 @@
+# Generated by Django 3.1.14 on 2022-04-21 23:29
+
+import django.core.validators
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('api', '0081_bumpedthread'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='offtopicchannelname',
+ name='name',
+ field=models.CharField(help_text='The actual channel name that will be used on our Discord server.', max_length=96, primary_key=True, serialize=False, validators=[django.core.validators.RegexValidator(regex="^[a-z0-9\\U0001d5a0-\\U0001d5b9-ǃ?’'<>⧹⧸]+$")]),
+ ),
+ ]
diff --git a/pydis_site/apps/api/migrations/0083_remove_embed_validation.py b/pydis_site/apps/api/migrations/0083_remove_embed_validation.py
new file mode 100644
index 00000000..e835bb66
--- /dev/null
+++ b/pydis_site/apps/api/migrations/0083_remove_embed_validation.py
@@ -0,0 +1,19 @@
+# Generated by Django 3.1.14 on 2022-06-30 09:41
+
+import django.contrib.postgres.fields
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('api', '0082_otn_allow_big_solidus'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='deletedmessage',
+ name='embeds',
+ field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(), blank=True, help_text='Embeds attached to this message.', size=None),
+ ),
+ ]
diff --git a/pydis_site/apps/api/migrations/0084_infraction_last_applied.py b/pydis_site/apps/api/migrations/0084_infraction_last_applied.py
new file mode 100644
index 00000000..7704ddb8
--- /dev/null
+++ b/pydis_site/apps/api/migrations/0084_infraction_last_applied.py
@@ -0,0 +1,26 @@
+# Generated by Django 4.0.6 on 2022-07-27 20:32
+
+import django.utils.timezone
+from django.db import migrations, models
+from django.apps.registry import Apps
+
+
+def set_last_applied_to_inserted_at(apps: Apps, schema_editor):
+ Infractions = apps.get_model("api", "infraction")
+ Infractions.objects.all().update(last_applied=models.F("inserted_at"))
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('api', '0083_remove_embed_validation'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='infraction',
+ name='last_applied',
+ field=models.DateTimeField(default=django.utils.timezone.now, help_text='The date and time of when this infraction was last applied.'),
+ ),
+ migrations.RunPython(set_last_applied_to_inserted_at)
+ ]
diff --git a/pydis_site/apps/api/models/bot/infraction.py b/pydis_site/apps/api/models/bot/infraction.py
index c9303024..218ee5ec 100644
--- a/pydis_site/apps/api/models/bot/infraction.py
+++ b/pydis_site/apps/api/models/bot/infraction.py
@@ -23,6 +23,12 @@ class Infraction(ModelReprMixin, models.Model):
default=timezone.now,
help_text="The date and time of the creation of this infraction."
)
+ last_applied = models.DateTimeField(
+ # This default is for backwards compatibility with bot versions
+ # that don't explicitly give a value.
+ default=timezone.now,
+ help_text="The date and time of when this infraction was last applied."
+ )
expires_at = models.DateTimeField(
null=True,
help_text=(
diff --git a/pydis_site/apps/api/models/bot/message.py b/pydis_site/apps/api/models/bot/message.py
index bab3368d..89ae27e4 100644
--- a/pydis_site/apps/api/models/bot/message.py
+++ b/pydis_site/apps/api/models/bot/message.py
@@ -1,13 +1,11 @@
-from datetime import datetime
+import datetime
from django.contrib.postgres import fields as pgfields
from django.core.validators import MinValueValidator
from django.db import models
-from django.utils import timezone
from pydis_site.apps.api.models.bot.user import User
from pydis_site.apps.api.models.mixins import ModelReprMixin
-from pydis_site.apps.api.models.utils import validate_embed
class Message(ModelReprMixin, models.Model):
@@ -48,9 +46,7 @@ class Message(ModelReprMixin, models.Model):
blank=True
)
embeds = pgfields.ArrayField(
- models.JSONField(
- validators=(validate_embed,)
- ),
+ models.JSONField(),
blank=True,
help_text="Embeds attached to this message."
)
@@ -63,11 +59,11 @@ class Message(ModelReprMixin, models.Model):
)
@property
- def timestamp(self) -> datetime:
+ def timestamp(self) -> datetime.datetime:
"""Attribute that represents the message timestamp as derived from the snowflake id."""
- tz_naive_datetime = datetime.utcfromtimestamp(((self.id >> 22) + 1420070400000) / 1000)
- tz_aware_datetime = timezone.make_aware(tz_naive_datetime, timezone=timezone.utc)
- return tz_aware_datetime
+ return datetime.datetime.utcfromtimestamp(
+ ((self.id >> 22) + 1420070400000) / 1000
+ ).replace(tzinfo=datetime.timezone.utc)
class Meta:
"""Metadata provided for Django's ORM."""
diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py
index abd25ef0..f53dd33c 100644
--- a/pydis_site/apps/api/models/bot/metricity.py
+++ b/pydis_site/apps/api/models/bot/metricity.py
@@ -130,3 +130,31 @@ class Metricity:
raise NotFoundError()
return values
+
+ def total_messages_in_past_n_days(
+ self,
+ user_ids: list[str],
+ days: int
+ ) -> list[tuple[str, int]]:
+ """
+ Query activity by a list of users in the past `days` days.
+
+ Returns a list of (user_id, message_count) tuples.
+ """
+ self.cursor.execute(
+ """
+ SELECT
+ author_id, COUNT(*)
+ FROM messages
+ WHERE
+ author_id IN %s
+ AND NOT is_deleted
+ AND channel_id NOT IN %s
+ AND created_at > now() - interval '%s days'
+ GROUP BY author_id
+ """,
+ [tuple(user_ids), EXCLUDE_CHANNELS, days]
+ )
+ values = self.cursor.fetchall()
+
+ return values
diff --git a/pydis_site/apps/api/models/bot/off_topic_channel_name.py b/pydis_site/apps/api/models/bot/off_topic_channel_name.py
index e9fec114..b380efad 100644
--- a/pydis_site/apps/api/models/bot/off_topic_channel_name.py
+++ b/pydis_site/apps/api/models/bot/off_topic_channel_name.py
@@ -11,7 +11,7 @@ class OffTopicChannelName(ModelReprMixin, models.Model):
primary_key=True,
max_length=96,
validators=(
- RegexValidator(regex=r"^[a-z0-9\U0001d5a0-\U0001d5b9-ǃ?’'<>]+$"),
+ RegexValidator(regex=r"^[a-z0-9\U0001d5a0-\U0001d5b9-ǃ?’'<>⧹⧸]+$"),
),
help_text="The actual channel name that will be used on our Discord server."
)
diff --git a/pydis_site/apps/api/models/utils.py b/pydis_site/apps/api/models/utils.py
deleted file mode 100644
index 859394d2..00000000
--- a/pydis_site/apps/api/models/utils.py
+++ /dev/null
@@ -1,172 +0,0 @@
-from collections.abc import Mapping
-from typing import Any, Dict
-
-from django.core.exceptions import ValidationError
-from django.core.validators import MaxLengthValidator, MinLengthValidator
-
-
-def is_bool_validator(value: Any) -> None:
- """Validates if a given value is of type bool."""
- if not isinstance(value, bool):
- raise ValidationError(f"This field must be of type bool, not {type(value)}.")
-
-
-def validate_embed_fields(fields: dict) -> None:
- """Raises a ValidationError if any of the given embed fields is invalid."""
- field_validators = {
- 'name': (MaxLengthValidator(limit_value=256),),
- 'value': (MaxLengthValidator(limit_value=1024),),
- 'inline': (is_bool_validator,),
- }
-
- required_fields = ('name', 'value')
-
- for field in fields:
- if not isinstance(field, Mapping):
- raise ValidationError("Embed fields must be a mapping.")
-
- if not all(required_field in field for required_field in required_fields):
- raise ValidationError(
- f"Embed fields must contain the following fields: {', '.join(required_fields)}."
- )
-
- for field_name, value in field.items():
- if field_name not in field_validators:
- raise ValidationError(f"Unknown embed field field: {field_name!r}.")
-
- for validator in field_validators[field_name]:
- validator(value)
-
-
-def validate_embed_footer(footer: Dict[str, str]) -> None:
- """Raises a ValidationError if the given footer is invalid."""
- field_validators = {
- 'text': (
- MinLengthValidator(
- limit_value=1,
- message="Footer text must not be empty."
- ),
- MaxLengthValidator(limit_value=2048)
- ),
- 'icon_url': (),
- 'proxy_icon_url': ()
- }
-
- if not isinstance(footer, Mapping):
- raise ValidationError("Embed footer must be a mapping.")
-
- for field_name, value in footer.items():
- if field_name not in field_validators:
- raise ValidationError(f"Unknown embed footer field: {field_name!r}.")
-
- for validator in field_validators[field_name]:
- validator(value)
-
-
-def validate_embed_author(author: Any) -> None:
- """Raises a ValidationError if the given author is invalid."""
- field_validators = {
- 'name': (
- MinLengthValidator(
- limit_value=1,
- message="Embed author name must not be empty."
- ),
- MaxLengthValidator(limit_value=256)
- ),
- 'url': (),
- 'icon_url': (),
- 'proxy_icon_url': ()
- }
-
- if not isinstance(author, Mapping):
- raise ValidationError("Embed author must be a mapping.")
-
- for field_name, value in author.items():
- if field_name not in field_validators:
- raise ValidationError(f"Unknown embed author field: {field_name!r}.")
-
- for validator in field_validators[field_name]:
- validator(value)
-
-
-def validate_embed(embed: Any) -> None:
- """
- Validate a JSON document containing an embed as possible to send on Discord.
-
- This attempts to rebuild the validation used by Discord
- as well as possible by checking for various embed limits so we can
- ensure that any embed we store here will also be accepted as a
- valid embed by the Discord API.
-
- Using this directly is possible, although not intended - you usually
- stick this onto the `validators` keyword argument of model fields.
-
- Example:
-
- >>> from django.db import models
- >>> from pydis_site.apps.api.models.utils import validate_embed
- >>> class MyMessage(models.Model):
- ... embed = models.JSONField(
- ... validators=(
- ... validate_embed,
- ... )
- ... )
- ... # ...
- ...
-
- Args:
- embed (Any):
- A dictionary describing the contents of this embed.
- See the official documentation for a full reference
- of accepted keys by this dictionary:
- https://discordapp.com/developers/docs/resources/channel#embed-object
-
- Raises:
- ValidationError:
- In case the given embed is deemed invalid, a `ValidationError`
- is raised which in turn will allow Django to display errors
- as appropriate.
- """
- all_keys = {
- 'title', 'type', 'description', 'url', 'timestamp',
- 'color', 'footer', 'image', 'thumbnail', 'video',
- 'provider', 'author', 'fields'
- }
- one_required_of = {'description', 'fields', 'image', 'title', 'video'}
- field_validators = {
- 'title': (
- MinLengthValidator(
- limit_value=1,
- message="Embed title must not be empty."
- ),
- MaxLengthValidator(limit_value=256)
- ),
- 'description': (MaxLengthValidator(limit_value=4096),),
- 'fields': (
- MaxLengthValidator(limit_value=25),
- validate_embed_fields
- ),
- 'footer': (validate_embed_footer,),
- 'author': (validate_embed_author,)
- }
-
- if not embed:
- raise ValidationError("Tag embed must not be empty.")
-
- elif not isinstance(embed, Mapping):
- raise ValidationError("Tag embed must be a mapping.")
-
- elif not any(field in embed for field in one_required_of):
- raise ValidationError(f"Tag embed must contain one of the fields {one_required_of}.")
-
- for required_key in one_required_of:
- if required_key in embed and not embed[required_key]:
- raise ValidationError(f"Key {required_key!r} must not be empty.")
-
- for field_name, value in embed.items():
- if field_name not in all_keys:
- raise ValidationError(f"Unknown field name: {field_name!r}")
-
- if field_name in field_validators:
- for validator in field_validators[field_name]:
- validator(value)
diff --git a/pydis_site/apps/api/pagination.py b/pydis_site/apps/api/pagination.py
index 2a325460..61707d33 100644
--- a/pydis_site/apps/api/pagination.py
+++ b/pydis_site/apps/api/pagination.py
@@ -1,7 +1,6 @@
-import typing
-
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
+from rest_framework.utils.serializer_helpers import ReturnList
class LimitOffsetPaginationExtended(LimitOffsetPagination):
@@ -44,6 +43,6 @@ class LimitOffsetPaginationExtended(LimitOffsetPagination):
default_limit = 100
- def get_paginated_response(self, data: typing.Any) -> Response:
+ def get_paginated_response(self, data: ReturnList) -> Response:
"""Override to skip metadata i.e. `count`, `next`, and `previous`."""
return Response(data)
diff --git a/pydis_site/apps/api/serializers.py b/pydis_site/apps/api/serializers.py
index e53ccffa..9228c1f4 100644
--- a/pydis_site/apps/api/serializers.py
+++ b/pydis_site/apps/api/serializers.py
@@ -176,6 +176,7 @@ class InfractionSerializer(ModelSerializer):
fields = (
'id',
'inserted_at',
+ 'last_applied',
'expires_at',
'active',
'user',
diff --git a/pydis_site/apps/api/tests/migrations/__init__.py b/pydis_site/apps/api/tests/migrations/__init__.py
deleted file mode 100644
index 38e42ffc..00000000
--- a/pydis_site/apps/api/tests/migrations/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"""This submodule contains tests for functions used in data migrations."""
diff --git a/pydis_site/apps/api/tests/migrations/base.py b/pydis_site/apps/api/tests/migrations/base.py
deleted file mode 100644
index 0c0a5bd0..00000000
--- a/pydis_site/apps/api/tests/migrations/base.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""Includes utilities for testing migrations."""
-from django.db import connection
-from django.db.migrations.executor import MigrationExecutor
-from django.test import TestCase
-
-
-class MigrationsTestCase(TestCase):
- """
- A `TestCase` subclass to test migration files.
-
- To be able to properly test a migration, we will need to inject data into the test database
- before the migrations we want to test are applied, but after the older migrations have been
- applied. This makes sure that we are testing "as if" we were actually applying this migration
- to a database in the state it was in before introducing the new migration.
-
- To set up a MigrationsTestCase, create a subclass of this class and set the following
- class-level attributes:
-
- - app: The name of the app that contains the migrations (e.g., `'api'`)
- - migration_prior: The name* of the last migration file before the migrations you want to test
- - migration_target: The name* of the last migration file we want to test
-
- *) Specify the file names without a path or the `.py` file extension.
-
- Additionally, overwrite the `setUpMigrationData` in the subclass to inject data into the
- database before the migrations we want to test are applied. Please read the docstring of the
- method for more information. An optional hook, `setUpPostMigrationData` is also provided.
- """
-
- # These class-level attributes should be set in classes that inherit from this base class.
- app = None
- migration_prior = None
- migration_target = None
-
- @classmethod
- def setUpTestData(cls):
- """
- Injects data into the test database prior to the migration we're trying to test.
-
- This class methods reverts the test database back to the state of the last migration file
- prior to the migrations we want to test. It will then allow the user to inject data into the
- test database by calling the `setUpMigrationData` hook. After the data has been injected, it
- will apply the migrations we want to test and call the `setUpPostMigrationData` hook. The
- user can now test if the migration correctly migrated the injected test data.
- """
- if not cls.app:
- raise ValueError("The `app` attribute was not set.")
-
- if not cls.migration_prior or not cls.migration_target:
- raise ValueError("Both ` migration_prior` and `migration_target` need to be set.")
-
- cls.migrate_from = [(cls.app, cls.migration_prior)]
- cls.migrate_to = [(cls.app, cls.migration_target)]
-
- # Reverse to database state prior to the migrations we want to test
- executor = MigrationExecutor(connection)
- executor.migrate(cls.migrate_from)
-
- # Call the data injection hook with the current state of the project
- old_apps = executor.loader.project_state(cls.migrate_from).apps
- cls.setUpMigrationData(old_apps)
-
- # Run the migrations we want to test
- executor = MigrationExecutor(connection)
- executor.loader.build_graph()
- executor.migrate(cls.migrate_to)
-
- # Save the project state so we're able to work with the correct model states
- cls.apps = executor.loader.project_state(cls.migrate_to).apps
-
- # Call `setUpPostMigrationData` to potentially set up post migration data used in testing
- cls.setUpPostMigrationData(cls.apps)
-
- @classmethod
- def setUpMigrationData(cls, apps):
- """
- Override this method to inject data into the test database before the migration is applied.
-
- This method will be called after setting up the database according to the migrations that
- come before the migration(s) we are trying to test, but before the to-be-tested migration(s)
- are applied. This allows us to simulate a database state just prior to the migrations we are
- trying to test.
-
- To make sure we're creating objects according to the state the models were in at this point
- in the migration history, use `apps.get_model(app_name: str, model_name: str)` to get the
- appropriate model, e.g.:
-
- >>> Infraction = apps.get_model('api', 'Infraction')
- """
- pass
-
- @classmethod
- def setUpPostMigrationData(cls, apps):
- """
- Set up additional test data after the target migration has been applied.
-
- Use `apps.get_model(app_name: str, model_name: str)` to get the correct instances of the
- model classes:
-
- >>> Infraction = apps.get_model('api', 'Infraction')
- """
- pass
diff --git a/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py b/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py
deleted file mode 100644
index 8dc29b34..00000000
--- a/pydis_site/apps/api/tests/migrations/test_active_infraction_migration.py
+++ /dev/null
@@ -1,496 +0,0 @@
-"""Tests for the data migration in `filename`."""
-import logging
-from collections import ChainMap, namedtuple
-from datetime import timedelta
-from itertools import count
-from typing import Dict, Iterable, Type, Union
-
-from django.db.models import Q
-from django.forms.models import model_to_dict
-from django.utils import timezone
-
-from pydis_site.apps.api.models import Infraction, User
-from .base import MigrationsTestCase
-
-log = logging.getLogger(__name__)
-log.setLevel(logging.DEBUG)
-
-
-InfractionHistory = namedtuple('InfractionHistory', ("user_id", "infraction_history"))
-
-
-class InfractionFactory:
- """Factory that creates infractions for a User instance."""
-
- infraction_id = count(1)
- user_id = count(1)
- default_values = {
- 'active': True,
- 'expires_at': None,
- 'hidden': False,
- }
-
- @classmethod
- def create(
- cls,
- actor: User,
- infractions: Iterable[Dict[str, Union[str, int, bool]]],
- infraction_model: Type[Infraction] = Infraction,
- user_model: Type[User] = User,
- ) -> InfractionHistory:
- """
- Creates `infractions` for the `user` with the given `actor`.
-
- The `infractions` dictionary can contain the following fields:
- - `type` (required)
- - `active` (default: True)
- - `expires_at` (default: None; i.e, permanent)
- - `hidden` (default: False).
-
- The parameters `infraction_model` and `user_model` can be used to pass in an instance of
- both model classes from a different migration/project state.
- """
- user_id = next(cls.user_id)
- user = user_model.objects.create(
- id=user_id,
- name=f"Infracted user {user_id}",
- discriminator=user_id,
- avatar_hash=None,
- )
- infraction_history = []
-
- for infraction in infractions:
- infraction = dict(infraction)
- infraction["id"] = next(cls.infraction_id)
- infraction = ChainMap(infraction, cls.default_values)
- new_infraction = infraction_model.objects.create(
- user=user,
- actor=actor,
- type=infraction["type"],
- reason=f"`{infraction['type']}` infraction (ID: {infraction['id']} of {user}",
- active=infraction['active'],
- hidden=infraction['hidden'],
- expires_at=infraction['expires_at'],
- )
- infraction_history.append(new_infraction)
-
- return InfractionHistory(user_id=user_id, infraction_history=infraction_history)
-
-
-class InfractionFactoryTests(MigrationsTestCase):
- """Tests for the InfractionFactory."""
-
- app = "api"
- migration_prior = "0046_reminder_jump_url"
- migration_target = "0046_reminder_jump_url"
-
- @classmethod
- def setUpPostMigrationData(cls, apps):
- """Create a default actor for all infractions."""
- cls.infraction_model = apps.get_model('api', 'Infraction')
- cls.user_model = apps.get_model('api', 'User')
-
- cls.actor = cls.user_model.objects.create(
- id=9999,
- name="Unknown Moderator",
- discriminator=1040,
- avatar_hash=None,
- )
-
- def test_infraction_factory_total_count(self):
- """Does the test database hold as many infractions as we tried to create?"""
- InfractionFactory.create(
- actor=self.actor,
- infractions=(
- {'type': 'kick', 'active': False, 'hidden': False},
- {'type': 'ban', 'active': True, 'hidden': False},
- {'type': 'note', 'active': False, 'hidden': True},
- ),
- infraction_model=self.infraction_model,
- user_model=self.user_model,
- )
- database_count = Infraction.objects.all().count()
- self.assertEqual(3, database_count)
-
- def test_infraction_factory_multiple_users(self):
- """Does the test database hold as many infractions as we tried to create?"""
- for _user in range(5):
- InfractionFactory.create(
- actor=self.actor,
- infractions=(
- {'type': 'kick', 'active': False, 'hidden': True},
- {'type': 'ban', 'active': True, 'hidden': False},
- ),
- infraction_model=self.infraction_model,
- user_model=self.user_model,
- )
-
- # Check if infractions and users are recorded properly in the database
- database_count = Infraction.objects.all().count()
- self.assertEqual(database_count, 10)
-
- user_count = User.objects.all().count()
- self.assertEqual(user_count, 5 + 1)
-
- def test_infraction_factory_sets_correct_fields(self):
- """Does the InfractionFactory set the correct attributes?"""
- infractions = (
- {
- 'type': 'note',
- 'active': False,
- 'hidden': True,
- 'expires_at': timezone.now()
- },
- {'type': 'warning', 'active': False, 'hidden': False, 'expires_at': None},
- {'type': 'watch', 'active': False, 'hidden': True, 'expires_at': None},
- {'type': 'mute', 'active': True, 'hidden': False, 'expires_at': None},
- {'type': 'kick', 'active': True, 'hidden': True, 'expires_at': None},
- {'type': 'ban', 'active': True, 'hidden': False, 'expires_at': None},
- {
- 'type': 'superstar',
- 'active': True,
- 'hidden': True,
- 'expires_at': timezone.now()
- },
- )
-
- InfractionFactory.create(
- actor=self.actor,
- infractions=infractions,
- infraction_model=self.infraction_model,
- user_model=self.user_model,
- )
-
- for infraction in infractions:
- with self.subTest(**infraction):
- self.assertTrue(Infraction.objects.filter(**infraction).exists())
-
-
-class ActiveInfractionMigrationTests(MigrationsTestCase):
- """
- Tests the active infraction data migration.
-
- The active infraction data migration should do the following things:
-
- 1. migrates all active notes, warnings, and kicks to an inactive status;
- 2. migrates all users with multiple active infractions of a single type to have only one active
- infraction of that type. The infraction with the longest duration stays active.
- """
-
- app = "api"
- migration_prior = "0046_reminder_jump_url"
- migration_target = "0047_active_infractions_migration"
-
- @classmethod
- def setUpMigrationData(cls, apps):
- """Sets up an initial database state that contains the relevant test cases."""
- # Fetch the Infraction and User model in the current migration state
- cls.infraction_model = apps.get_model('api', 'Infraction')
- cls.user_model = apps.get_model('api', 'User')
-
- cls.created_infractions = {}
-
- # Moderator that serves as actor for all infractions
- cls.user_moderator = cls.user_model.objects.create(
- id=9999,
- name="Olivier de Vienne",
- discriminator=1040,
- avatar_hash=None,
- )
-
- # User #1: clean user with no infractions
- cls.created_infractions["no infractions"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=[],
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #2: One inactive note infraction
- cls.created_infractions["one inactive note"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {'type': 'note', 'active': False, 'hidden': True},
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #3: One active note infraction
- cls.created_infractions["one active note"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {'type': 'note', 'active': True, 'hidden': True},
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #4: One active and one inactive note infraction
- cls.created_infractions["one active and one inactive note"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {'type': 'note', 'active': False, 'hidden': True},
- {'type': 'note', 'active': True, 'hidden': True},
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #5: Once active note, one active kick, once active warning
- cls.created_infractions["active note, kick, warning"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {'type': 'note', 'active': True, 'hidden': True},
- {'type': 'kick', 'active': True, 'hidden': True},
- {'type': 'warning', 'active': True, 'hidden': True},
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #6: One inactive ban and one active ban
- cls.created_infractions["one inactive and one active ban"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {'type': 'ban', 'active': False, 'hidden': True},
- {'type': 'ban', 'active': True, 'hidden': True},
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #7: Two active permanent bans
- cls.created_infractions["two active perm bans"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {'type': 'ban', 'active': True, 'hidden': True},
- {'type': 'ban', 'active': True, 'hidden': True},
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #8: Multiple active temporary bans
- cls.created_infractions["multiple active temp bans"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {
- 'type': 'ban',
- 'active': True,
- 'hidden': True,
- 'expires_at': timezone.now() + timedelta(days=1)
- },
- {
- 'type': 'ban',
- 'active': True,
- 'hidden': True,
- 'expires_at': timezone.now() + timedelta(days=10)
- },
- {
- 'type': 'ban',
- 'active': True,
- 'hidden': True,
- 'expires_at': timezone.now() + timedelta(days=20)
- },
- {
- 'type': 'ban',
- 'active': True,
- 'hidden': True,
- 'expires_at': timezone.now() + timedelta(days=5)
- },
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #9: One active permanent ban, two active temporary bans
- cls.created_infractions["active perm, two active temp bans"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {
- 'type': 'ban',
- 'active': True,
- 'hidden': True,
- 'expires_at': timezone.now() + timedelta(days=10)
- },
- {
- 'type': 'ban',
- 'active': True,
- 'hidden': True,
- 'expires_at': None,
- },
- {
- 'type': 'ban',
- 'active': True,
- 'hidden': True,
- 'expires_at': timezone.now() + timedelta(days=7)
- },
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #10: One inactive permanent ban, two active temporary bans
- cls.created_infractions["one inactive perm ban, two active temp bans"] = (
- InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {
- 'type': 'ban',
- 'active': True,
- 'hidden': True,
- 'expires_at': timezone.now() + timedelta(days=10)
- },
- {
- 'type': 'ban',
- 'active': False,
- 'hidden': True,
- 'expires_at': None,
- },
- {
- 'type': 'ban',
- 'active': True,
- 'hidden': True,
- 'expires_at': timezone.now() + timedelta(days=7)
- },
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
- )
-
- # User #11: Active ban, active mute, active superstar
- cls.created_infractions["active ban, mute, and superstar"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {'type': 'ban', 'active': True, 'hidden': True},
- {'type': 'mute', 'active': True, 'hidden': True},
- {'type': 'superstar', 'active': True, 'hidden': True},
- {'type': 'watch', 'active': True, 'hidden': True},
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- # User #12: Multiple active bans, active mutes, active superstars
- cls.created_infractions["multiple active bans, mutes, stars"] = InfractionFactory.create(
- actor=cls.user_moderator,
- infractions=(
- {'type': 'ban', 'active': True, 'hidden': True},
- {'type': 'ban', 'active': True, 'hidden': True},
- {'type': 'ban', 'active': True, 'hidden': True},
- {'type': 'mute', 'active': True, 'hidden': True},
- {'type': 'mute', 'active': True, 'hidden': True},
- {'type': 'mute', 'active': True, 'hidden': True},
- {'type': 'superstar', 'active': True, 'hidden': True},
- {'type': 'superstar', 'active': True, 'hidden': True},
- {'type': 'superstar', 'active': True, 'hidden': True},
- {'type': 'watch', 'active': True, 'hidden': True},
- {'type': 'watch', 'active': True, 'hidden': True},
- {'type': 'watch', 'active': True, 'hidden': True},
- ),
- infraction_model=cls.infraction_model,
- user_model=cls.user_model,
- )
-
- def test_all_never_active_types_became_inactive(self):
- """Are all infractions of a non-active type inactive after the migration?"""
- inactive_type_query = Q(type="note") | Q(type="warning") | Q(type="kick")
- self.assertFalse(
- self.infraction_model.objects.filter(inactive_type_query, active=True).exists()
- )
-
- def test_migration_left_clean_user_without_infractions(self):
- """Do users without infractions have no infractions after the migration?"""
- user_id, infraction_history = self.created_infractions["no infractions"]
- self.assertFalse(
- self.infraction_model.objects.filter(user__id=user_id).exists()
- )
-
- def test_migration_left_user_with_inactive_note_untouched(self):
- """Did the migration leave users with only an inactive note untouched?"""
- user_id, infraction_history = self.created_infractions["one inactive note"]
- inactive_note = infraction_history[0]
- self.assertTrue(
- self.infraction_model.objects.filter(**model_to_dict(inactive_note)).exists()
- )
-
- def test_migration_only_touched_active_field_of_active_note(self):
- """Does the migration only change the `active` field?"""
- user_id, infraction_history = self.created_infractions["one active note"]
- note = model_to_dict(infraction_history[0])
- note['active'] = False
- self.assertTrue(
- self.infraction_model.objects.filter(**note).exists()
- )
-
- def test_migration_only_touched_active_field_of_active_note_left_inactive_untouched(self):
- """Does the migration only change the `active` field of active notes?"""
- user_id, infraction_history = self.created_infractions["one active and one inactive note"]
- for note in infraction_history:
- with self.subTest(active=note.active):
- note = model_to_dict(note)
- note['active'] = False
- self.assertTrue(
- self.infraction_model.objects.filter(**note).exists()
- )
-
- def test_migration_migrates_all_nonactive_types_to_inactive(self):
- """Do we set the `active` field of all non-active infractions to `False`?"""
- user_id, infraction_history = self.created_infractions["active note, kick, warning"]
- self.assertFalse(
- self.infraction_model.objects.filter(user__id=user_id, active=True).exists()
- )
-
- def test_migration_leaves_user_with_one_active_ban_untouched(self):
- """Do we leave a user with one active and one inactive ban untouched?"""
- user_id, infraction_history = self.created_infractions["one inactive and one active ban"]
- for infraction in infraction_history:
- with self.subTest(active=infraction.active):
- self.assertTrue(
- self.infraction_model.objects.filter(**model_to_dict(infraction)).exists()
- )
-
- def test_migration_turns_double_active_perm_ban_into_single_active_perm_ban(self):
- """Does the migration turn two active permanent bans into one active permanent ban?"""
- user_id, infraction_history = self.created_infractions["two active perm bans"]
- active_count = self.infraction_model.objects.filter(user__id=user_id, active=True).count()
- self.assertEqual(active_count, 1)
-
- def test_migration_leaves_temporary_ban_with_longest_duration_active(self):
- """Does the migration turn two active permanent bans into one active permanent ban?"""
- user_id, infraction_history = self.created_infractions["multiple active temp bans"]
- active_ban = self.infraction_model.objects.get(user__id=user_id, active=True)
- self.assertEqual(active_ban.expires_at, infraction_history[2].expires_at)
-
- def test_migration_leaves_permanent_ban_active(self):
- """Does the migration leave the permanent ban active?"""
- user_id, infraction_history = self.created_infractions["active perm, two active temp bans"]
- active_ban = self.infraction_model.objects.get(user__id=user_id, active=True)
- self.assertIsNone(active_ban.expires_at)
-
- def test_migration_leaves_longest_temp_ban_active_with_inactive_permanent_ban(self):
- """Does the longest temp ban stay active, even with an inactive perm ban present?"""
- user_id, infraction_history = self.created_infractions[
- "one inactive perm ban, two active temp bans"
- ]
- active_ban = self.infraction_model.objects.get(user__id=user_id, active=True)
- self.assertEqual(active_ban.expires_at, infraction_history[0].expires_at)
-
- def test_migration_leaves_all_active_types_active_if_one_of_each_exists(self):
- """Do all active infractions stay active if only one of each is present?"""
- user_id, infraction_history = self.created_infractions["active ban, mute, and superstar"]
- active_count = self.infraction_model.objects.filter(user__id=user_id, active=True).count()
- self.assertEqual(active_count, 4)
-
- def test_migration_reduces_all_active_types_to_a_single_active_infraction(self):
- """Do we reduce all of the infraction types to one active infraction?"""
- user_id, infraction_history = self.created_infractions["multiple active bans, mutes, stars"]
- active_infractions = self.infraction_model.objects.filter(user__id=user_id, active=True)
- self.assertEqual(len(active_infractions), 4)
- types_observed = [infraction.type for infraction in active_infractions]
-
- for infraction_type in ('ban', 'mute', 'superstar', 'watch'):
- with self.subTest(type=infraction_type):
- self.assertIn(infraction_type, types_observed)
diff --git a/pydis_site/apps/api/tests/migrations/test_base.py b/pydis_site/apps/api/tests/migrations/test_base.py
deleted file mode 100644
index f69bc92c..00000000
--- a/pydis_site/apps/api/tests/migrations/test_base.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import logging
-from unittest.mock import call, patch
-
-from django.db.migrations.loader import MigrationLoader
-from django.test import TestCase
-
-from .base import MigrationsTestCase, connection
-
-log = logging.getLogger(__name__)
-
-
-class SpanishInquisition(MigrationsTestCase):
- app = "api"
- migration_prior = "scragly"
- migration_target = "kosa"
-
-
-@patch("pydis_site.apps.api.tests.migrations.base.MigrationExecutor")
-class MigrationsTestCaseNoSideEffectsTests(TestCase):
- """Tests the MigrationTestCase class with actual migration side effects disabled."""
-
- def setUp(self):
- """Set up an instance of MigrationsTestCase for use in tests."""
- self.test_case = SpanishInquisition()
-
- def test_missing_app_class_raises_value_error(self, _migration_executor):
- """A MigrationsTestCase subclass should set the class-attribute `app`."""
- class Spam(MigrationsTestCase):
- pass
-
- spam = Spam()
- with self.assertRaises(ValueError, msg="The `app` attribute was not set."):
- spam.setUpTestData()
-
- def test_missing_migration_class_attributes_raise_value_error(self, _migration_executor):
- """A MigrationsTestCase subclass should set both `migration_prior` and `migration_target`"""
- class Eggs(MigrationsTestCase):
- app = "api"
- migration_target = "lemon"
-
- class Bacon(MigrationsTestCase):
- app = "api"
- migration_prior = "mark"
-
- instances = (Eggs(), Bacon())
-
- exception_message = "Both ` migration_prior` and `migration_target` need to be set."
- for instance in instances:
- with self.subTest(
- migration_prior=instance.migration_prior,
- migration_target=instance.migration_target,
- ):
- with self.assertRaises(ValueError, msg=exception_message):
- instance.setUpTestData()
-
- @patch(f"{__name__}.SpanishInquisition.setUpMigrationData")
- @patch(f"{__name__}.SpanishInquisition.setUpPostMigrationData")
- def test_migration_data_hooks_are_called_once(self, pre_hook, post_hook, _migration_executor):
- """The `setUpMigrationData` and `setUpPostMigrationData` hooks should be called once."""
- self.test_case.setUpTestData()
- for hook in (pre_hook, post_hook):
- with self.subTest(hook=repr(hook)):
- hook.assert_called_once()
-
- def test_migration_executor_is_instantiated_twice(self, migration_executor):
- """The `MigrationExecutor` should be instantiated with the database connection twice."""
- self.test_case.setUpTestData()
-
- expected_args = [call(connection), call(connection)]
- self.assertEqual(migration_executor.call_args_list, expected_args)
-
- def test_project_state_is_loaded_for_correct_migration_files_twice(self, migration_executor):
- """The `project_state` should first be loaded with `migrate_from`, then `migrate_to`."""
- self.test_case.setUpTestData()
-
- expected_args = [call(self.test_case.migrate_from), call(self.test_case.migrate_to)]
- self.assertEqual(migration_executor().loader.project_state.call_args_list, expected_args)
-
- def test_loader_build_graph_gets_called_once(self, migration_executor):
- """We should rebuild the migration graph before applying the second set of migrations."""
- self.test_case.setUpTestData()
-
- migration_executor().loader.build_graph.assert_called_once()
-
- def test_migration_executor_migrate_method_is_called_correctly_twice(self, migration_executor):
- """The migrate method of the executor should be called twice with the correct arguments."""
- self.test_case.setUpTestData()
-
- self.assertEqual(migration_executor().migrate.call_count, 2)
- calls = [call([('api', 'scragly')]), call([('api', 'kosa')])]
- migration_executor().migrate.assert_has_calls(calls)
-
-
-class LifeOfBrian(MigrationsTestCase):
- app = "api"
- migration_prior = "0046_reminder_jump_url"
- migration_target = "0048_add_infractions_unique_constraints_active"
-
- @classmethod
- def log_last_migration(cls):
- """Parses the applied migrations dictionary to log the last applied migration."""
- loader = MigrationLoader(connection)
- api_migrations = [
- migration for app, migration in loader.applied_migrations if app == cls.app
- ]
- last_migration = max(api_migrations, key=lambda name: int(name[:4]))
- log.info(f"The last applied migration: {last_migration}")
-
- @classmethod
- def setUpMigrationData(cls, apps):
- """Method that logs the last applied migration at this point."""
- cls.log_last_migration()
-
- @classmethod
- def setUpPostMigrationData(cls, apps):
- """Method that logs the last applied migration at this point."""
- cls.log_last_migration()
-
-
-class MigrationsTestCaseMigrationTest(TestCase):
- """Tests if `MigrationsTestCase` travels to the right points in the migration history."""
-
- def test_migrations_test_case_travels_to_correct_migrations_in_history(self):
- """The test case should first revert to `migration_prior`, then go to `migration_target`."""
- brian = LifeOfBrian()
-
- with self.assertLogs(log, level=logging.INFO) as logs:
- brian.setUpTestData()
-
- self.assertEqual(len(logs.records), 2)
-
- for time_point, record in zip(("migration_prior", "migration_target"), logs.records):
- with self.subTest(time_point=time_point):
- message = f"The last applied migration: {getattr(brian, time_point)}"
- self.assertEqual(record.getMessage(), message)
diff --git a/pydis_site/apps/api/tests/test_filterlists.py b/pydis_site/apps/api/tests/test_filterlists.py
index 5a5bca60..9959617e 100644
--- a/pydis_site/apps/api/tests/test_filterlists.py
+++ b/pydis_site/apps/api/tests/test_filterlists.py
@@ -64,8 +64,8 @@ class FetchTests(AuthenticatedAPITestCase):
self.assertEqual(response.status_code, 200)
for api_type, model_type in zip(response.json(), FilterList.FilterListType.choices):
- self.assertEquals(api_type[0], model_type[0])
- self.assertEquals(api_type[1], model_type[1])
+ self.assertEqual(api_type[0], model_type[0])
+ self.assertEqual(api_type[1], model_type[1])
class CreationTests(AuthenticatedAPITestCase):
diff --git a/pydis_site/apps/api/tests/test_github_utils.py b/pydis_site/apps/api/tests/test_github_utils.py
new file mode 100644
index 00000000..95bafec0
--- /dev/null
+++ b/pydis_site/apps/api/tests/test_github_utils.py
@@ -0,0 +1,286 @@
+import dataclasses
+import datetime
+import typing
+import unittest
+from unittest import mock
+
+import django.test
+import httpx
+import jwt
+import rest_framework.response
+import rest_framework.test
+from django.urls import reverse
+
+from pydis_site import settings
+from .. import github_utils
+
+
+class GeneralUtilityTests(unittest.TestCase):
+ """Test the utility methods which do not fit in another class."""
+
+ def test_token_generation(self):
+ """Test that the a valid JWT token is generated."""
+ def encode(payload: dict, _: str, algorithm: str, *args, **kwargs) -> str:
+ """
+ Intercept the encode method.
+
+ The result is encoded with an algorithm which does not require a PEM key, as it may
+ not be available in testing environments.
+ """
+ self.assertEqual("RS256", algorithm, "The GitHub App JWT must be signed using RS256.")
+ return original_encode(
+ payload, "secret-encoding-key", *args, algorithm="HS256", **kwargs
+ )
+
+ original_encode = jwt.encode
+ with mock.patch("jwt.encode", new=encode):
+ token = github_utils.generate_token()
+ decoded = jwt.decode(token, "secret-encoding-key", algorithms=["HS256"])
+
+ delta = datetime.timedelta(minutes=10)
+ self.assertAlmostEqual(decoded["exp"] - decoded["iat"], delta.total_seconds())
+ self.assertLess(decoded["exp"], (datetime.datetime.now() + delta).timestamp())
+
+
+class CheckRunTests(unittest.TestCase):
+ """Tests the check_run_status utility."""
+
+ run_kwargs: typing.Mapping = {
+ "name": "run_name",
+ "head_sha": "sha",
+ "status": "completed",
+ "conclusion": "success",
+ "created_at": datetime.datetime.utcnow().strftime(settings.GITHUB_TIMESTAMP_FORMAT),
+ "artifacts_url": "url",
+ }
+
+ def test_completed_run(self):
+ """Test that an already completed run returns the correct URL."""
+ final_url = "some_url_string_1234"
+
+ kwargs = dict(self.run_kwargs, artifacts_url=final_url)
+ result = github_utils.check_run_status(github_utils.WorkflowRun(**kwargs))
+ self.assertEqual(final_url, result)
+
+ def test_pending_run(self):
+ """Test that a pending run raises the proper exception."""
+ kwargs = dict(self.run_kwargs, status="pending")
+ with self.assertRaises(github_utils.RunPendingError):
+ github_utils.check_run_status(github_utils.WorkflowRun(**kwargs))
+
+ def test_timeout_error(self):
+ """Test that a timeout is declared after a certain duration."""
+ kwargs = dict(self.run_kwargs, status="pending")
+ # Set the creation time to well before the MAX_RUN_TIME
+ # to guarantee the right conclusion
+ kwargs["created_at"] = (
+ datetime.datetime.utcnow() - github_utils.MAX_RUN_TIME - datetime.timedelta(minutes=10)
+ ).strftime(settings.GITHUB_TIMESTAMP_FORMAT)
+
+ with self.assertRaises(github_utils.RunTimeoutError):
+ github_utils.check_run_status(github_utils.WorkflowRun(**kwargs))
+
+ def test_failed_run(self):
+ """Test that a failed run raises the proper exception."""
+ kwargs = dict(self.run_kwargs, conclusion="failed")
+ with self.assertRaises(github_utils.ActionFailedError):
+ github_utils.check_run_status(github_utils.WorkflowRun(**kwargs))
+
+
+def get_response_authorize(_: httpx.Client, request: httpx.Request, **__) -> httpx.Response:
+ """
+ Helper method for the authorize tests.
+
+ Requests are intercepted before being sent out, and the appropriate responses are returned.
+ """
+ path = request.url.path
+ auth = request.headers.get("Authorization")
+
+ if request.method == "GET":
+ if path == "/app/installations":
+ if auth == "bearer JWT initial token":
+ return httpx.Response(200, request=request, json=[{
+ "account": {"login": "VALID_OWNER"},
+ "access_tokens_url": "https://example.com/ACCESS_TOKEN_URL"
+ }])
+ else:
+ return httpx.Response(
+ 401, json={"error": "auth app/installations"}, request=request
+ )
+
+ elif path == "/installation/repositories":
+ if auth == "bearer app access token":
+ return httpx.Response(200, request=request, json={
+ "repositories": [{
+ "name": "VALID_REPO"
+ }]
+ })
+ else: # pragma: no cover
+ return httpx.Response(
+ 401, json={"error": "auth installation/repositories"}, request=request
+ )
+
+ elif request.method == "POST":
+ if path == "/ACCESS_TOKEN_URL":
+ if auth == "bearer JWT initial token":
+ return httpx.Response(200, request=request, json={"token": "app access token"})
+ else: # pragma: no cover
+ return httpx.Response(401, json={"error": "auth access_token"}, request=request)
+
+ # Reaching this point means something has gone wrong
+ return httpx.Response(500, request=request) # pragma: no cover
+
+
[email protected]("httpx.Client.send", new=get_response_authorize)
[email protected](github_utils, "generate_token", new=mock.Mock(return_value="JWT initial token"))
+class AuthorizeTests(unittest.TestCase):
+ """Test the authorize utility."""
+
+ def test_invalid_apps_auth(self):
+ """Test that an exception is raised if authorization was attempted with an invalid token."""
+ with mock.patch.object(github_utils, "generate_token", return_value="Invalid token"):
+ with self.assertRaises(httpx.HTTPStatusError) as error:
+ github_utils.authorize("VALID_OWNER", "VALID_REPO")
+
+ exception: httpx.HTTPStatusError = error.exception
+ self.assertEqual(401, exception.response.status_code)
+ self.assertEqual("auth app/installations", exception.response.json()["error"])
+
+ def test_missing_repo(self):
+ """Test that an exception is raised when the selected owner or repo are not available."""
+ with self.assertRaises(github_utils.NotFoundError):
+ github_utils.authorize("INVALID_OWNER", "VALID_REPO")
+ with self.assertRaises(github_utils.NotFoundError):
+ github_utils.authorize("VALID_OWNER", "INVALID_REPO")
+
+ def test_valid_authorization(self):
+ """Test that an accessible repository can be accessed."""
+ client = github_utils.authorize("VALID_OWNER", "VALID_REPO")
+ self.assertEqual("bearer app access token", client.headers.get("Authorization"))
+
+
+class ArtifactFetcherTests(unittest.TestCase):
+ """Test the get_artifact utility."""
+
+ @staticmethod
+ def get_response_get_artifact(request: httpx.Request, **_) -> httpx.Response:
+ """
+ Helper method for the get_artifact tests.
+
+ Requests are intercepted before being sent out, and the appropriate responses are returned.
+ """
+ path = request.url.path
+
+ if "force_error" in path:
+ return httpx.Response(404, request=request)
+
+ if request.method == "GET":
+ if path == "/repos/owner/repo/actions/runs":
+ run = github_utils.WorkflowRun(
+ name="action_name",
+ head_sha="action_sha",
+ created_at=datetime.datetime.now().strftime(settings.GITHUB_TIMESTAMP_FORMAT),
+ status="completed",
+ conclusion="success",
+ artifacts_url="artifacts_url"
+ )
+ return httpx.Response(
+ 200, request=request, json={"workflow_runs": [dataclasses.asdict(run)]}
+ )
+ elif path == "/artifact_url":
+ return httpx.Response(
+ 200, request=request, json={"artifacts": [{
+ "name": "artifact_name",
+ "archive_download_url": "artifact_download_url"
+ }]}
+ )
+ elif path == "/artifact_download_url":
+ response = httpx.Response(302, request=request)
+ response.next_request = httpx.Request(
+ "GET",
+ httpx.URL("https://final_download.url")
+ )
+ return response
+
+ # Reaching this point means something has gone wrong
+ return httpx.Response(500, request=request) # pragma: no cover
+
+ def setUp(self) -> None:
+ self.call_args = ["owner", "repo", "action_sha", "action_name", "artifact_name"]
+ self.client = httpx.Client(base_url="https://example.com")
+
+ self.patchers = [
+ mock.patch.object(self.client, "send", new=self.get_response_get_artifact),
+ mock.patch.object(github_utils, "authorize", return_value=self.client),
+ mock.patch.object(github_utils, "check_run_status", return_value="artifact_url"),
+ ]
+
+ for patcher in self.patchers:
+ patcher.start()
+
+ def tearDown(self) -> None:
+ for patcher in self.patchers:
+ patcher.stop()
+
+ def test_client_closed_on_errors(self):
+ """Test that the client is terminated even if an error occurs at some point."""
+ self.call_args[0] = "force_error"
+ with self.assertRaises(httpx.HTTPStatusError):
+ github_utils.get_artifact(*self.call_args)
+ self.assertTrue(self.client.is_closed)
+
+ def test_missing(self):
+ """Test that an exception is raised if the requested artifact was not found."""
+ cases = (
+ "invalid sha",
+ "invalid action name",
+ "invalid artifact name",
+ )
+ for i, name in enumerate(cases, 2):
+ with self.subTest(f"Test {name} raises an error"):
+ new_args = self.call_args.copy()
+ new_args[i] = name
+
+ with self.assertRaises(github_utils.NotFoundError):
+ github_utils.get_artifact(*new_args)
+
+ def test_valid(self):
+ """Test that the correct download URL is returned for valid requests."""
+ url = github_utils.get_artifact(*self.call_args)
+ self.assertEqual("https://final_download.url", url)
+ self.assertTrue(self.client.is_closed)
+
+
[email protected](github_utils, "get_artifact")
+class GitHubArtifactViewTests(django.test.TestCase):
+ """Test the GitHub artifact fetch API view."""
+
+ def setUp(self):
+ self.kwargs = {
+ "owner": "test_owner",
+ "repo": "test_repo",
+ "sha": "test_sha",
+ "action_name": "test_action",
+ "artifact_name": "test_artifact",
+ }
+ self.url = reverse("api:github-artifacts", kwargs=self.kwargs)
+
+ def test_correct_artifact(self, artifact_mock: mock.Mock):
+ """Test a proper response is returned with proper input."""
+ artifact_mock.return_value = "final download url"
+ result = self.client.get(self.url)
+
+ self.assertIsInstance(result, rest_framework.response.Response)
+ self.assertEqual({"url": artifact_mock.return_value}, result.data)
+
+ def test_failed_fetch(self, artifact_mock: mock.Mock):
+ """Test that a proper error is returned when the request fails."""
+ artifact_mock.side_effect = github_utils.NotFoundError("Test error message")
+ result = self.client.get(self.url)
+
+ self.assertIsInstance(result, rest_framework.response.Response)
+ self.assertEqual({
+ "error_type": github_utils.NotFoundError.__name__,
+ "error": "Test error message",
+ "requested_resource": "/".join(self.kwargs.values())
+ }, result.data)
diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py
index f1107734..89ee4e23 100644
--- a/pydis_site/apps/api/tests/test_infractions.py
+++ b/pydis_site/apps/api/tests/test_infractions.py
@@ -56,15 +56,17 @@ class InfractionTests(AuthenticatedAPITestCase):
type='ban',
reason='He terk my jerb!',
hidden=True,
+ inserted_at=dt(2020, 10, 10, 0, 0, 0, tzinfo=timezone.utc),
expires_at=dt(5018, 11, 20, 15, 52, tzinfo=timezone.utc),
- active=True
+ active=True,
)
cls.ban_inactive = Infraction.objects.create(
user_id=cls.user.id,
actor_id=cls.user.id,
type='ban',
reason='James is an ass, and we won\'t be working with him again.',
- active=False
+ active=False,
+ inserted_at=dt(2020, 10, 10, 0, 1, 0, tzinfo=timezone.utc),
)
cls.mute_permanent = Infraction.objects.create(
user_id=cls.user.id,
@@ -72,7 +74,8 @@ class InfractionTests(AuthenticatedAPITestCase):
type='mute',
reason='He has a filthy mouth and I am his soap.',
active=True,
- expires_at=None
+ inserted_at=dt(2020, 10, 10, 0, 2, 0, tzinfo=timezone.utc),
+ expires_at=None,
)
cls.superstar_expires_soon = Infraction.objects.create(
user_id=cls.user.id,
@@ -80,7 +83,8 @@ class InfractionTests(AuthenticatedAPITestCase):
type='superstar',
reason='This one doesn\'t matter anymore.',
active=True,
- expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5)
+ inserted_at=dt(2020, 10, 10, 0, 3, 0, tzinfo=timezone.utc),
+ expires_at=dt.now(timezone.utc) + datetime.timedelta(hours=5),
)
cls.voiceban_expires_later = Infraction.objects.create(
user_id=cls.user.id,
@@ -88,7 +92,8 @@ class InfractionTests(AuthenticatedAPITestCase):
type='voice_ban',
reason='Jet engine mic',
active=True,
- expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5)
+ inserted_at=dt(2020, 10, 10, 0, 4, 0, tzinfo=timezone.utc),
+ expires_at=dt.now(timezone.utc) + datetime.timedelta(days=5),
)
def test_list_all(self):
diff --git a/pydis_site/apps/api/tests/test_models.py b/pydis_site/apps/api/tests/test_models.py
index 0fad467c..c07d59cd 100644
--- a/pydis_site/apps/api/tests/test_models.py
+++ b/pydis_site/apps/api/tests/test_models.py
@@ -7,7 +7,6 @@ from pydis_site.apps.api.models import (
DeletedMessage,
DocumentationLink,
Infraction,
- Message,
MessageDeletionContext,
Nomination,
NominationEntry,
@@ -116,17 +115,6 @@ class StringDunderMethodTests(SimpleTestCase):
colour=0x5, permissions=0,
position=10,
),
- Message(
- id=45,
- author=User(
- id=444,
- name='bill',
- discriminator=5,
- ),
- channel_id=666,
- content="wooey",
- embeds=[]
- ),
MessageDeletionContext(
actor=User(
id=5555,
diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py
index 5d10069d..d86e80bb 100644
--- a/pydis_site/apps/api/tests/test_users.py
+++ b/pydis_site/apps/api/tests/test_users.py
@@ -502,6 +502,90 @@ class UserMetricityTests(AuthenticatedAPITestCase):
"total_messages": total_messages
})
+ def test_metricity_activity_data(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+ self.metricity.total_messages_in_past_n_days.return_value = [(0, 10)]
+
+ # When
+ url = reverse("api:bot:user-metricity-activity-data")
+ response = self.client.post(
+ url,
+ data=[0, 1],
+ QUERY_STRING="days=10",
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 200)
+ self.metricity.total_messages_in_past_n_days.assert_called_once_with(["0", "1"], 10)
+ self.assertEqual(response.json(), {"0": 10, "1": 0})
+
+ def test_metricity_activity_data_invalid_days(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+
+ # When
+ url = reverse("api:bot:user-metricity-activity-data")
+ response = self.client.post(
+ url,
+ data=[0, 1],
+ QUERY_STRING="days=fifty",
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 400)
+ self.metricity.total_messages_in_past_n_days.assert_not_called()
+ self.assertEqual(response.json(), {"days": ["This query parameter must be an integer."]})
+
+ def test_metricity_activity_data_no_days(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+
+ # When
+ url = reverse('api:bot:user-metricity-activity-data')
+ response = self.client.post(
+ url,
+ data=[0, 1],
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 400)
+ self.metricity.total_messages_in_past_n_days.assert_not_called()
+ self.assertEqual(response.json(), {'days': ["This query parameter is required."]})
+
+ def test_metricity_activity_data_no_users(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+
+ # When
+ url = reverse('api:bot:user-metricity-activity-data')
+ response = self.client.post(
+ url,
+ QUERY_STRING="days=10",
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 400)
+ self.metricity.total_messages_in_past_n_days.assert_not_called()
+ self.assertEqual(response.json(), ['Expected a list of items but got type "dict".'])
+
+ def test_metricity_activity_data_invalid_users(self):
+ # Given
+ self.mock_no_metricity_user() # Other functions shouldn't be used.
+
+ # When
+ url = reverse('api:bot:user-metricity-activity-data')
+ response = self.client.post(
+ url,
+ data=[123, 'username'],
+ QUERY_STRING="days=10",
+ )
+
+ # Then
+ self.assertEqual(response.status_code, 400)
+ self.metricity.total_messages_in_past_n_days.assert_not_called()
+ self.assertEqual(response.json(), {'1': ['A valid integer is required.']})
+
def mock_metricity_user(self, joined_at, total_messages, total_blocks, top_channel_activity):
patcher = patch("pydis_site.apps.api.viewsets.bot.user.Metricity")
self.metricity = patcher.start()
diff --git a/pydis_site/apps/api/tests/test_validators.py b/pydis_site/apps/api/tests/test_validators.py
index 551cc2aa..8c46fcbc 100644
--- a/pydis_site/apps/api/tests/test_validators.py
+++ b/pydis_site/apps/api/tests/test_validators.py
@@ -5,7 +5,6 @@ from django.test import TestCase
from ..models.bot.bot_setting import validate_bot_setting_name
from ..models.bot.offensive_message import future_date_validator
-from ..models.utils import validate_embed
REQUIRED_KEYS = (
@@ -22,234 +21,6 @@ class BotSettingValidatorTests(TestCase):
validate_bot_setting_name('bad name')
-class TagEmbedValidatorTests(TestCase):
- def test_rejects_non_mapping(self):
- with self.assertRaises(ValidationError):
- validate_embed('non-empty non-mapping')
-
- def test_rejects_missing_required_keys(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'unknown': "key"
- })
-
- def test_rejects_one_correct_one_incorrect(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'provider': "??",
- 'title': ""
- })
-
- def test_rejects_empty_required_key(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': ''
- })
-
- def test_rejects_list_as_embed(self):
- with self.assertRaises(ValidationError):
- validate_embed([])
-
- def test_rejects_required_keys_and_unknown_keys(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': "the duck walked up to the lemonade stand",
- 'and': "he said to the man running the stand"
- })
-
- def test_rejects_too_long_title(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': 'a' * 257
- })
-
- def test_rejects_too_many_fields(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'fields': [{} for _ in range(26)]
- })
-
- def test_rejects_too_long_description(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'description': 'd' * 4097
- })
-
- def test_allows_valid_embed(self):
- validate_embed({
- 'title': "My embed",
- 'description': "look at my embed, my embed is amazing"
- })
-
- def test_allows_unvalidated_fields(self):
- validate_embed({
- 'title': "My embed",
- 'provider': "what am I??"
- })
-
- def test_rejects_fields_as_list_of_non_mappings(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'fields': ['abc']
- })
-
- def test_rejects_fields_with_unknown_fields(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'fields': [
- {
- 'what': "is this field"
- }
- ]
- })
-
- def test_rejects_fields_with_too_long_name(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'fields': [
- {
- 'name': "a" * 257
- }
- ]
- })
-
- def test_rejects_one_correct_one_incorrect_field(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'fields': [
- {
- 'name': "Totally valid",
- 'value': "LOOK AT ME"
- },
- {
- 'name': "Totally valid",
- 'value': "LOOK AT ME",
- 'oh': "what is this key?"
- }
- ]
- })
-
- def test_rejects_missing_required_field_field(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'fields': [
- {
- 'name': "Totally valid",
- 'inline': True,
- }
- ]
- })
-
- def test_rejects_invalid_inline_field_field(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'fields': [
- {
- 'name': "Totally valid",
- 'value': "LOOK AT ME",
- 'inline': "Totally not a boolean",
- }
- ]
- })
-
- def test_allows_valid_fields(self):
- validate_embed({
- 'fields': [
- {
- 'name': "valid",
- 'value': "field",
- },
- {
- 'name': "valid",
- 'value': "field",
- 'inline': False,
- },
- {
- 'name': "valid",
- 'value': "field",
- 'inline': True,
- },
- ]
- })
-
- def test_rejects_footer_as_non_mapping(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': "whatever",
- 'footer': []
- })
-
- def test_rejects_footer_with_unknown_fields(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': "whatever",
- 'footer': {
- 'duck': "quack"
- }
- })
-
- def test_rejects_footer_with_empty_text(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': "whatever",
- 'footer': {
- 'text': ""
- }
- })
-
- def test_allows_footer_with_proper_values(self):
- validate_embed({
- 'title': "whatever",
- 'footer': {
- 'text': "django good"
- }
- })
-
- def test_rejects_author_as_non_mapping(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': "whatever",
- 'author': []
- })
-
- def test_rejects_author_with_unknown_field(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': "whatever",
- 'author': {
- 'field': "that is unknown"
- }
- })
-
- def test_rejects_author_with_empty_name(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': "whatever",
- 'author': {
- 'name': ""
- }
- })
-
- def test_rejects_author_with_one_correct_one_incorrect(self):
- with self.assertRaises(ValidationError):
- validate_embed({
- 'title': "whatever",
- 'author': {
- # Relies on "dictionary insertion order remembering" (D.I.O.R.) behaviour
- 'url': "bobswebsite.com",
- 'name': ""
- }
- })
-
- def test_allows_author_with_proper_values(self):
- validate_embed({
- 'title': "whatever",
- 'author': {
- 'name': "Bob"
- }
- })
-
-
class OffensiveMessageValidatorsTests(TestCase):
def test_accepts_future_date(self):
future_date_validator(datetime(3000, 1, 1, tzinfo=timezone.utc))
diff --git a/pydis_site/apps/api/urls.py b/pydis_site/apps/api/urls.py
index 1e564b29..2757f176 100644
--- a/pydis_site/apps/api/urls.py
+++ b/pydis_site/apps/api/urls.py
@@ -1,7 +1,7 @@
from django.urls import include, path
from rest_framework.routers import DefaultRouter
-from .views import HealthcheckView, RulesView
+from .views import GitHubArtifactsView, HealthcheckView, RulesView
from .viewsets import (
AocAccountLinkViewSet,
AocCompletionistBlockViewSet,
@@ -86,5 +86,10 @@ urlpatterns = (
# from django_hosts.resolvers import reverse
path('bot/', include((bot_router.urls, 'api'), namespace='bot')),
path('healthcheck', HealthcheckView.as_view(), name='healthcheck'),
- path('rules', RulesView.as_view(), name='rules')
+ path('rules', RulesView.as_view(), name='rules'),
+ path(
+ 'github/artifact/<str:owner>/<str:repo>/<str:sha>/<str:action_name>/<str:artifact_name>',
+ GitHubArtifactsView.as_view(),
+ name="github-artifacts"
+ ),
)
diff --git a/pydis_site/apps/api/views.py b/pydis_site/apps/api/views.py
index 816463f6..34167a38 100644
--- a/pydis_site/apps/api/views.py
+++ b/pydis_site/apps/api/views.py
@@ -1,7 +1,10 @@
from rest_framework.exceptions import ParseError
+from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
+from . import github_utils
+
class HealthcheckView(APIView):
"""
@@ -34,12 +37,14 @@ class RulesView(APIView):
## Routes
### GET /rules
- Returns a JSON array containing the server's rules:
+ Returns a JSON array containing the server's rules
+ and keywords relating to each rule.
+ Example response:
>>> [
- ... "Eat candy.",
- ... "Wake up at 4 AM.",
- ... "Take your medicine."
+ ... ["Eat candy.", ["candy", "sweets"]],
+ ... ["Wake up at 4 AM.", ["wake_up", "early", "early_bird"]],
+ ... ["Take your medicine.", ["medicine", "health"]]
... ]
Since some of the the rules require links, this view
@@ -97,6 +102,12 @@ class RulesView(APIView):
# `format` here is the result format, we have a link format here instead.
def get(self, request, format=None): # noqa: D102,ANN001,ANN201
+ """
+ Returns a list of our community rules coupled with their keywords.
+
+ Each item in the returned list is a tuple with the rule as first item
+ and a list of keywords that match that rules as second item.
+ """
link_format = request.query_params.get('link_format', 'md')
if link_format not in ('html', 'md'):
raise ParseError(
@@ -121,34 +132,93 @@ class RulesView(APIView):
return Response([
(
- f"Follow the {pydis_coc}."
+ f"Follow the {pydis_coc}.",
+ ["coc", "conduct", "code"]
),
(
- f"Follow the {discord_community_guidelines} and {discord_tos}."
+ f"Follow the {discord_community_guidelines} and {discord_tos}.",
+ ["discord", "guidelines", "discord_tos"]
),
(
- "Respect staff members and listen to their instructions."
+ "Respect staff members and listen to their instructions.",
+ ["respect", "staff", "instructions"]
),
(
"Use English to the best of your ability. "
- "Be polite if someone speaks English imperfectly."
+ "Be polite if someone speaks English imperfectly.",
+ ["english", "language"]
),
(
"Do not provide or request help on projects that may break laws, "
- "breach terms of services, or are malicious or inappropriate."
+ "breach terms of services, or are malicious or inappropriate.",
+ ["infraction", "tos", "breach", "malicious", "inappropriate"]
),
(
- "Do not post unapproved advertising."
+ "Do not post unapproved advertising.",
+ ["ad", "ads", "advert", "advertising"]
),
(
"Keep discussions relevant to the channel topic. "
- "Each channel's description tells you the topic."
+ "Each channel's description tells you the topic.",
+ ["off-topic", "topic", "relevance"]
),
(
"Do not help with ongoing exams. When helping with homework, "
- "help people learn how to do the assignment without doing it for them."
+ "help people learn how to do the assignment without doing it for them.",
+ ["exam", "exams", "assignment", "assignments", "homework"]
),
(
- "Do not offer or ask for paid work of any kind."
+ "Do not offer or ask for paid work of any kind.",
+ ["paid", "work", "money"]
),
])
+
+
+class GitHubArtifactsView(APIView):
+ """
+ Provides utilities for interacting with the GitHub API and obtaining action artifacts.
+
+ ## Routes
+ ### GET /github/artifacts
+ Returns a download URL for the artifact requested.
+
+ {
+ 'url': 'https://pipelines.actions.githubusercontent.com/...'
+ }
+
+ ### Exceptions
+ In case of an error, the following body will be returned:
+
+ {
+ "error_type": "<error class name>",
+ "error": "<error description>",
+ "requested_resource": "<owner>/<repo>/<sha>/<artifact_name>"
+ }
+
+ ## Authentication
+ Does not require any authentication nor permissions.
+ """
+
+ authentication_classes = ()
+ permission_classes = ()
+
+ def get(
+ self,
+ request: Request,
+ *,
+ owner: str,
+ repo: str,
+ sha: str,
+ action_name: str,
+ artifact_name: str
+ ) -> Response:
+ """Return a download URL for the requested artifact."""
+ try:
+ url = github_utils.get_artifact(owner, repo, sha, action_name, artifact_name)
+ return Response({"url": url})
+ except github_utils.ArtifactProcessingError as e:
+ return Response({
+ "error_type": e.__class__.__name__,
+ "error": str(e),
+ "requested_resource": f"{owner}/{repo}/{sha}/{action_name}/{artifact_name}"
+ }, status=e.status)
diff --git a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py
index 3a4cec60..97efb63c 100644
--- a/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py
+++ b/pydis_site/apps/api/viewsets/bot/aoc_completionist_block.py
@@ -70,4 +70,4 @@ class AocCompletionistBlockViewSet(
serializer_class = AocCompletionistBlockSerializer
queryset = AocCompletionistBlock.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ("user__id", "is_blocked")
+ filterset_fields = ("user__id", "is_blocked")
diff --git a/pydis_site/apps/api/viewsets/bot/aoc_link.py b/pydis_site/apps/api/viewsets/bot/aoc_link.py
index 9f22c1a1..3cdc342d 100644
--- a/pydis_site/apps/api/viewsets/bot/aoc_link.py
+++ b/pydis_site/apps/api/viewsets/bot/aoc_link.py
@@ -68,4 +68,4 @@ class AocAccountLinkViewSet(
serializer_class = AocAccountLinkSerializer
queryset = AocAccountLink.objects.all()
filter_backends = (DjangoFilterBackend,)
- filter_fields = ("user__id",)
+ filterset_fields = ("user__id", "aoc_username")
diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py
index 7f31292f..93d29391 100644
--- a/pydis_site/apps/api/viewsets/bot/infraction.py
+++ b/pydis_site/apps/api/viewsets/bot/infraction.py
@@ -1,9 +1,8 @@
-from datetime import datetime
+import datetime
from django.db import IntegrityError
from django.db.models import QuerySet
from django.http.request import HttpRequest
-from django.utils import timezone
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
@@ -154,7 +153,7 @@ class InfractionViewSet(
queryset = Infraction.objects.all()
pagination_class = LimitOffsetPaginationExtended
filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter)
- filter_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type')
+ filterset_fields = ('user__id', 'actor__id', 'active', 'hidden', 'type')
search_fields = ('$reason',)
frozen_fields = ('id', 'inserted_at', 'type', 'user', 'actor', 'hidden')
@@ -185,23 +184,21 @@ class InfractionViewSet(
filter_expires_after = self.request.query_params.get('expires_after')
if filter_expires_after:
try:
- expires_after_parsed = datetime.fromisoformat(filter_expires_after)
+ expires_after_parsed = datetime.datetime.fromisoformat(filter_expires_after)
except ValueError:
raise ValidationError({'expires_after': ['failed to convert to datetime']})
- additional_filters['expires_at__gte'] = timezone.make_aware(
- expires_after_parsed,
- timezone=timezone.utc,
+ additional_filters['expires_at__gte'] = expires_after_parsed.replace(
+ tzinfo=datetime.timezone.utc
)
filter_expires_before = self.request.query_params.get('expires_before')
if filter_expires_before:
try:
- expires_before_parsed = datetime.fromisoformat(filter_expires_before)
+ expires_before_parsed = datetime.datetime.fromisoformat(filter_expires_before)
except ValueError:
raise ValidationError({'expires_before': ['failed to convert to datetime']})
- additional_filters['expires_at__lte'] = timezone.make_aware(
- expires_before_parsed,
- timezone=timezone.utc,
+ additional_filters['expires_at__lte'] = expires_before_parsed.replace(
+ tzinfo=datetime.timezone.utc
)
if 'expires_at__lte' in additional_filters and 'expires_at__gte' in additional_filters:
diff --git a/pydis_site/apps/api/viewsets/bot/nomination.py b/pydis_site/apps/api/viewsets/bot/nomination.py
index 144daab0..6af42bcb 100644
--- a/pydis_site/apps/api/viewsets/bot/nomination.py
+++ b/pydis_site/apps/api/viewsets/bot/nomination.py
@@ -172,7 +172,7 @@ class NominationViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge
serializer_class = NominationSerializer
queryset = Nomination.objects.all()
filter_backends = (DjangoFilterBackend, SearchFilter, OrderingFilter)
- filter_fields = ('user__id', 'active')
+ filterset_fields = ('user__id', 'active')
frozen_fields = ('id', 'inserted_at', 'user', 'ended_at')
frozen_on_create = ('ended_at', 'end_reason', 'active', 'inserted_at', 'reviewed')
diff --git a/pydis_site/apps/api/viewsets/bot/reminder.py b/pydis_site/apps/api/viewsets/bot/reminder.py
index 78d7cb3b..5f997052 100644
--- a/pydis_site/apps/api/viewsets/bot/reminder.py
+++ b/pydis_site/apps/api/viewsets/bot/reminder.py
@@ -125,4 +125,4 @@ class ReminderViewSet(
serializer_class = ReminderSerializer
queryset = Reminder.objects.prefetch_related('author')
filter_backends = (DjangoFilterBackend, SearchFilter)
- filter_fields = ('active', 'author__id')
+ filterset_fields = ('active', 'author__id')
diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py
index 3318b2b9..db73a83c 100644
--- a/pydis_site/apps/api/viewsets/bot/user.py
+++ b/pydis_site/apps/api/viewsets/bot/user.py
@@ -3,8 +3,9 @@ from collections import OrderedDict
from django.db.models import Q
from django_filters.rest_framework import DjangoFilterBackend
-from rest_framework import status
+from rest_framework import fields, status
from rest_framework.decorators import action
+from rest_framework.exceptions import ParseError
from rest_framework.pagination import PageNumberPagination
from rest_framework.request import Request
from rest_framework.response import Response
@@ -138,6 +139,29 @@ class UserViewSet(ModelViewSet):
- 200: returned on success
- 404: if a user with the given `snowflake` could not be found
+ ### POST /bot/users/metricity_activity_data
+ Returns a mapping of user ID to message count in a given period for
+ the given user IDs.
+
+ #### Required Query Parameters
+ - days: how many days into the past to count message from.
+
+ #### Request Format
+ >>> [
+ ... 409107086526644234,
+ ... 493839819168808962
+ ... ]
+
+ #### Response format
+ >>> {
+ ... "409107086526644234": 54,
+ ... "493839819168808962": 0
+ ... }
+
+ #### Status codes
+ - 200: returned on success
+ - 400: if request body or query parameters were missing or invalid
+
### POST /bot/users
Adds a single or multiple new users.
The roles attached to the user(s) must be roles known by the site.
@@ -237,7 +261,7 @@ class UserViewSet(ModelViewSet):
queryset = User.objects.all().order_by("id")
pagination_class = UserListPagination
filter_backends = (DjangoFilterBackend,)
- filter_fields = ('name', 'discriminator')
+ filterset_fields = ('name', 'discriminator')
def get_serializer(self, *args, **kwargs) -> ModelSerializer:
"""Set Serializer many attribute to True if request body contains a list."""
@@ -298,3 +322,34 @@ class UserViewSet(ModelViewSet):
except NotFoundError:
return Response(dict(detail="User not found in metricity"),
status=status.HTTP_404_NOT_FOUND)
+
+ @action(detail=False, methods=["POST"])
+ def metricity_activity_data(self, request: Request) -> Response:
+ """Request handler for metricity_activity_data endpoint."""
+ if "days" in request.query_params:
+ try:
+ days = int(request.query_params["days"])
+ except ValueError:
+ raise ParseError(detail={
+ "days": ["This query parameter must be an integer."]
+ })
+ else:
+ raise ParseError(detail={
+ "days": ["This query parameter is required."]
+ })
+
+ user_id_list_validator = fields.ListField(
+ child=fields.IntegerField(min_value=0),
+ allow_empty=False
+ )
+ user_ids = [
+ str(user_id) for user_id in
+ user_id_list_validator.run_validation(request.data)
+ ]
+
+ with Metricity() as metricity:
+ data = metricity.total_messages_in_past_n_days(user_ids, days)
+
+ default_data = {user_id: 0 for user_id in user_ids}
+ response_data = default_data | dict(data)
+ return Response(response_data, status=status.HTTP_200_OK)
diff --git a/pydis_site/apps/content/apps.py b/pydis_site/apps/content/apps.py
index 1e300a48..96019e1c 100644
--- a/pydis_site/apps/content/apps.py
+++ b/pydis_site/apps/content/apps.py
@@ -4,4 +4,4 @@ from django.apps import AppConfig
class ContentConfig(AppConfig):
"""Django AppConfig for content app."""
- name = 'content'
+ name = 'pydis_site.apps.content'
diff --git a/pydis_site/apps/content/migrations/0001_add_tags.py b/pydis_site/apps/content/migrations/0001_add_tags.py
new file mode 100644
index 00000000..2c31e4c1
--- /dev/null
+++ b/pydis_site/apps/content/migrations/0001_add_tags.py
@@ -0,0 +1,35 @@
+# Generated by Django 4.0.6 on 2022-08-23 09:06
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ initial = True
+
+ dependencies = [
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='Commit',
+ fields=[
+ ('sha', models.CharField(help_text='The SHA hash of this commit.', max_length=40, primary_key=True, serialize=False)),
+ ('message', models.TextField(help_text='The commit message.')),
+ ('date', models.DateTimeField(help_text='The date and time the commit was created.')),
+ ('authors', models.TextField(help_text='The person(s) who created the commit. This is a serialized JSON object. Refer to the GitHub documentation on the commit endpoint (schema/commit.author & schema/commit.committer) for more info. https://docs.github.com/en/rest/commits/commits#get-a-commit')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Tag',
+ fields=[
+ ('last_updated', models.DateTimeField(auto_now=True, help_text='The date and time this data was last fetched.')),
+ ('sha', models.CharField(help_text="The tag's hash, as calculated by GitHub.", max_length=40)),
+ ('name', models.CharField(help_text="The tag's name.", max_length=50, primary_key=True, serialize=False)),
+ ('group', models.CharField(help_text='The group the tag belongs to.', max_length=50, null=True)),
+ ('body', models.TextField(help_text='The content of the tag.')),
+ ('last_commit', models.ForeignKey(help_text='The commit this file was last touched in.', null=True, on_delete=django.db.models.deletion.CASCADE, to='content.commit')),
+ ],
+ ),
+ ]
diff --git a/pydis_site/apps/content/migrations/__init__.py b/pydis_site/apps/content/migrations/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/pydis_site/apps/content/migrations/__init__.py
diff --git a/pydis_site/apps/content/models/__init__.py b/pydis_site/apps/content/models/__init__.py
new file mode 100644
index 00000000..60007e27
--- /dev/null
+++ b/pydis_site/apps/content/models/__init__.py
@@ -0,0 +1,3 @@
+from .tag import Commit, Tag
+
+__all__ = ["Commit", "Tag"]
diff --git a/pydis_site/apps/content/models/tag.py b/pydis_site/apps/content/models/tag.py
new file mode 100644
index 00000000..1a20d775
--- /dev/null
+++ b/pydis_site/apps/content/models/tag.py
@@ -0,0 +1,80 @@
+import collections.abc
+import json
+
+from django.db import models
+
+
+class Commit(models.Model):
+ """A git commit from the Python Discord Bot project."""
+
+ URL_BASE = "https://github.com/python-discord/bot/commit/"
+
+ sha = models.CharField(
+ help_text="The SHA hash of this commit.",
+ primary_key=True,
+ max_length=40,
+ )
+ message = models.TextField(help_text="The commit message.")
+ date = models.DateTimeField(help_text="The date and time the commit was created.")
+ authors = models.TextField(help_text=(
+ "The person(s) who created the commit. This is a serialized JSON object. "
+ "Refer to the GitHub documentation on the commit endpoint "
+ "(schema/commit.author & schema/commit.committer) for more info. "
+ "https://docs.github.com/en/rest/commits/commits#get-a-commit"
+ ))
+
+ @property
+ def url(self) -> str:
+ """The URL to the commit on GitHub."""
+ return self.URL_BASE + self.sha
+
+ def lines(self) -> collections.abc.Iterable[str]:
+ """Return each line in the commit message."""
+ for line in self.message.split("\n"):
+ yield line
+
+ def format_authors(self) -> collections.abc.Iterable[str]:
+ """Return a nice representation of the author(s)' name and email."""
+ for author in json.loads(self.authors):
+ yield f"{author['name']} <{author['email']}>"
+
+
+class Tag(models.Model):
+ """A tag from the python-discord bot repository."""
+
+ URL_BASE = "https://github.com/python-discord/bot/tree/main/bot/resources/tags"
+
+ last_updated = models.DateTimeField(
+ help_text="The date and time this data was last fetched.",
+ auto_now=True,
+ )
+ sha = models.CharField(
+ help_text="The tag's hash, as calculated by GitHub.",
+ max_length=40,
+ )
+ last_commit = models.ForeignKey(
+ Commit,
+ help_text="The commit this file was last touched in.",
+ null=True,
+ on_delete=models.CASCADE,
+ )
+ name = models.CharField(
+ help_text="The tag's name.",
+ primary_key=True,
+ max_length=50,
+ )
+ group = models.CharField(
+ help_text="The group the tag belongs to.",
+ null=True,
+ max_length=50,
+ )
+ body = models.TextField(help_text="The content of the tag.")
+
+ @property
+ def url(self) -> str:
+ """Get the URL of the tag on GitHub."""
+ url = Tag.URL_BASE
+ if self.group:
+ url += f"/{self.group}"
+ url += f"/{self.name}.md"
+ return url
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/asking-good-questions.md b/pydis_site/apps/content/resources/guides/pydis-guides/asking-good-questions.md
index 971989a9..b08ba7c6 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/asking-good-questions.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/asking-good-questions.md
@@ -26,7 +26,7 @@ If none of the above steps help you or you're not sure how to do some of the abo
# A Good Question
-When you're ready to ask a question, there's a few things you should have to hand before forming a query.
+When you're ready to ask a question, there are a few things you should have to hand before forming a query.
* A code example that illustrates your problem
* If possible, make this a minimal example rather than an entire application
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md
index 4013962c..2822d046 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing.md
@@ -4,7 +4,7 @@ description: A guide to contributing to our open source projects.
icon: fab fa-github
---
-Our projects on Python Discord are open source and [available on Github](https://github.com/python-discord). If you would like to contribute, consider one of the following projects:
+Our projects on Python Discord are open source and [available on GitHub](https://github.com/python-discord). If you would like to contribute, consider one of the following projects:
<!-- Project cards -->
<div class="columns is-multiline is-centered is-3 is-variable">
@@ -19,11 +19,7 @@ Our projects on Python Discord are open source and [available on Github](https:/
</div>
<div class="card-content">
<div class="content">
- Our community-driven Discord bot.
- </div>
- <div class="tags has-addons">
- <span class="tag is-dark">Difficulty</span>
- <span class="tag is-primary">Beginner</span>
+ Sir Lancebot has a collection of self-contained, for-fun features. If you're new to Discord bots or contributing, this is a great place to start!
</div>
</div>
<div class="card-footer">
@@ -46,11 +42,7 @@ Our projects on Python Discord are open source and [available on Github](https:/
</div>
<div class="card-content">
<div class="content">
- The community and moderation Discord bot.
- </div>
- <div class="tags has-addons">
- <span class="tag is-dark">Difficulty</span>
- <span class="tag is-warning">Intermediate</span>
+ Called @Python on the server, this bot handles moderation tools, help channels, and other critical features for our community.
</div>
</div>
<div class="card-footer">
@@ -73,11 +65,7 @@ Our projects on Python Discord are open source and [available on Github](https:/
</div>
<div class="card-content">
<div class="content">
- The website, subdomains and API.
- </div>
- <div class="tags has-addons">
- <span class="tag is-dark">Difficulty</span>
- <span class="tag is-danger">Advanced</span>
+ This website itself! This project is built with Django and includes our API, which is used by various services such as @Python.
</div>
</div>
<div class="card-footer">
@@ -91,26 +79,64 @@ Our projects on Python Discord are open source and [available on Github](https:/
</div>
</div>
-If you don't understand anything or need clarification, feel free to ask any staff member with the **@PyDis Core Developers** role in the server. We're always happy to help!
+# How do I start contributing?
+Unsure of what contributing to open source projects involves? Have questions about how to use GitHub? Just need to know about our contribution etiquette? Completing these steps will have you ready to make your first contribution no matter your starting point.
+
+Feel free to skip any steps you're already familiar with, but please make sure not to miss the [Contributing Guidelines](#5-read-our-contributing-guidelines).
+
+If you are here looking for the answer to a specific question, check out the sub-articles in the top right of the page to see a list of our guides.
+
+**Note:** We use Git to keep track of changes to the files in our projects. Git allows you to make changes to your local code and then distribute those changes to the other people working on the project. You'll use Git in a couple steps of the contributing process. You can refer to this [**guide on using Git**](./working-with-git/).
+{: .notification }
+
+### 1. Fork and clone the repo
+GitHub is a website based on Git that stores project files in the cloud. We use GitHub as a central place for sending changes, reviewing others' changes, and communicating with each other. You'll need to create a copy under your own GitHub account, a.k.a. "fork" it. You'll make your changes to this copy, which can then later be merged into the Python Discord repository.
+
+*Note: Members of the Python Discord staff can create feature branches directly on the repo without forking it.*
+
+Check out our [**guide on forking a GitHub repo**](./forking-repository/).
+
+Now that you have your own fork you need to be able to make changes to the code. You can clone the repo to your local machine, commit changes to it there, then push those changes to GitHub.
+
+Check out our [**guide on cloning a GitHub repo**](./cloning-repository/).
+
+### 2. Set up the project
+You have the source code on your local computer, now how do you actually run it? We have detailed guides on setting up the environment for each of our main projects:
+
+* [**Sir Lancebot**](./sir-lancebot/)
+
+* [**Python Bot**](./bot/)
+
+* [**Site**](./site/)
+
+### 3. Read our Contributing Guidelines
+We have a few short rules that all contributors must follow. Make sure you read and follow them while working on our projects.
+
+[**Contributing Guidelines**](./contributing-guidelines/).
+
+As mentioned in the Contributing Guidelines, we have a simple style guide for our projects based on PEP 8. Give it a read to keep your code consistent with the rest of the codebase.
+
+[**Style Guide**](./style-guide/)
+
+### 4. Create an issue
+The first step to any new contribution is an issue describing a problem with the current codebase or proposing a new feature. All the open issues are viewable on the GitHub repositories, for instance here is the [issues page for Sir Lancebot](https://github.com/python-discord/sir-lancebot/issues). If you have something that you want to implement open a new issue to present your idea. Otherwise, you can browse the unassigned issues and ask to be assigned to one that you're interested in, either in the comments on the issue or in the [`#dev-contrib`](https://discord.gg/2h3qBv8Xaa) channel on Discord.
+
+[**How to write a good issue**](./issues/)
-### Useful Resources
+Don't move forward until your issue is approved by a Core Developer. Issues are not guaranteed to be approved so your work may be wasted.
+{: .notification .is-warning }
-[Guidelines](./contributing-guidelines/) - General guidelines you should follow when contributing to our projects.<br>
-[Style Guide](./style-guide/) - Information regarding the code styles you should follow when working on our projects.<br>
-[Review Guide](../code-reviews-primer/) - A guide to get you started on doing code reviews.
+### 5. Make changes
+Now it is time to make the changes to fulfill your approved issue. You should create a new Git branch for your feature; that way you can keep your main branch up to date with ours and even work on multiple features at once in separate branches.
-## Contributors Community
-We are very happy to have many members in our community that contribute to [our open source projects](https://github.com/python-discord/).
-Whether it's writing code, reviewing pull requests, or contributing graphics for our events, it’s great to see so many people being motivated to help out.
-As a token of our appreciation, those who have made significant contributions to our projects will receive a special **@Contributors** role on our server that makes them stand out from other members.
-That way, they can also serve as guides to others who are looking to start contributing to our open source projects or open source in general.
+This is a good time to review [how to write good commit messages](./contributing-guidelines/commit-messages) if you haven't already.
-#### Guidelines for the @Contributors Role
+### 6. Open a pull request
+After your issue has been approved and you've written your code and tested it, it's time to open a pull request. Pull requests are a feature in GitHub; you can think of them as asking the project maintainers to accept your changes. This gives other contributors a chance to review your code and make any needed changes before it's merged into the main branch of the project.
-One question we get a lot is what the requirements for the **@Contributors** role are.
-As it’s difficult to precisely quantify contributions, we’ve come up with the following guidelines for the role:
+Check out our [**Pull Request Guide**](./pull-requests/) for help with opening a pull request and going through the review process.
-- The member has made several significant contributions to our projects.
-- The member has a positive influence in our contributors subcommunity.
+Check out our [**Code Review Guide**](../code-reviews-primer/) to learn how to be a star reviewer. Reviewing PRs is a vital part of open source development, and we always need more reviewers!
-The role will be assigned at the discretion of the Admin Team in consultation with the Core Developers Team.
+### That's it!
+Thank you for contributing to our community projects. If there's anything you don't understand or you just want to discuss with other contributors, come visit the [`#dev-contrib`](https://discord.gg/2h3qBv8Xaa) channel to ask questions. Keep an eye out for staff members with the **@PyDis Core Developers** role in the server; we're always happy to help!
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md
index 2aa10aa3..02316bca 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/bot.md
@@ -5,34 +5,9 @@ icon: fab fa-github
toc: 3
---
The purpose of this guide is to get you a running local version of [the Python bot](https://github.com/python-discord/bot).
-This page will focus on the quickest steps one can take, with mentions of alternatives afterwards.
-
-### Clone The Repository
-First things first, to run the bot's code and make changes to it, you need a local version of it (on your computer).
+You should have already forked the repository and cloned it to your local machine. If not, check out our [detailed walkthrough](../#1-fork-and-clone-the-repo).
-<div class="card">
- <button type="button" class="card-header collapsible">
- <span class="card-header-title subtitle is-6 my-2 ml-2">Getting started with Git and GitHub</span>
- <span class="card-header-icon">
- <i class="fas fa-fw fa-angle-down title is-5" aria-hidden="true"></i>
- </span>
- </button>
- <div class="collapsible-content collapsed">
- <div class="card-content">
- <p>If you don't have Git on your computer already, <a href="https://git-scm.com/downloads">install it</a>. You can additionally install a Git GUI such as <a href="https://www.gitkraken.com/download">GitKraken</a>, or the <a href="https://cli.github.com/manual/installation">GitHub CLI</a>.</p>
- <p>To learn more about Git, you can look into <a href="../working-with-git">our guides</a>, as well as <a href="https://education.github.com/git-cheat-sheet-education.pdf">this cheatsheet</a>, <a href="https://learngitbranching.js.org">Learn Git Branching</a>, and otherwise any guide you can find on the internet. Once you got the basic idea though, the best way to learn Git is to use it.</p>
- <p>Creating a copy of a repository under your own account is called a <em>fork</em>. This is where all your changes and commits will be pushed to, and from where your pull requests will originate from.</p>
- <p><strong><a href="../forking-repository">Learn about forking a project</a></strong>.</p>
- </div>
- </div>
-</div>
-<br>
-
-You will need to create a fork of [the project](https://github.com/python-discord/bot), and clone the fork.
-Once this is done, you will have completed the first step towards having a running version of the bot.
-
-#### Working on the Repository Directly
-If you are a member of the organisation (a member of [this list](https://github.com/orgs/python-discord/people), or in our particular case, server staff), you can clone the project repository without creating a fork, and work on a feature branch instead.
+This page will focus on the quickest steps one can take, with mentions of alternatives afterwards.
---
@@ -113,6 +88,7 @@ urls:
# Snekbox
snekbox_eval_api: "http://localhost:8060/eval"
+ snekbox_311_eval_api: "http://localhost:8065/eval"
##### << Replace the following � characters with the channel IDs in your test server >> #####
# This assumes the template was used: https://discord.new/zmHtscpYN9E3
@@ -506,10 +482,14 @@ You are now almost ready to run the Python bot. The simplest way to do so is wit
In your `config.yml` file:
* Set `urls.site` to `"web:8000"`.
-* If you wish to work with snekbox set `urls.snekbox_eval_api` to `"http://snekbox:8060/eval"`.
+* If you wish to work with snekbox set the following:
+ * `urls.snekbox_eval_api` to `"http://snekbox:8060/eval"`
+ * `urls.snekbox_311_eval_api` to `"http://snekbox-311:8060/eval"`.
Assuming you have Docker installed **and running**, enter the cloned repo in the command line and type `docker-compose up`.
+If working with snekbox you can run `docker-compose --profile 3.10 up` to also start up a 3.10 snekbox container, in addition to the default 3.11 container!
+
After pulling the images and building the containers, your bot will start. Enter your server and type `!help` (or whatever prefix you chose instead of `!`).
Your bot is now running, but this method makes debugging with an IDE a fairly involved process. For additional running methods, continue reading the following sections.
@@ -519,12 +499,13 @@ The advantage of this method is that you can run the bot's code in your preferre
* Append the following line to your `.env` file: `BOT_API_KEY=badbot13m0n8f570f942013fc818f234916ca531`.
* In your `config.yml` file, set `urls.site` to `"localhost:8000"`. If you wish to keep using `web:8000`, then [COMPOSE_PROJECT_NAME](../docker/#compose-project-names) has to be set.
-* To work with snekbox, set `urls.snekbox_eval_api` to `"http://localhost:8060/eval"`
+* To work with snekbox, set `urls.snekbox_eval_api` to `"http://localhost:8060/eval"` and `urls.snekbox_311_eval_api` to `"http://localhost:8065/eval"`
You will need to start the services separately, but if you got the previous section with Docker working, that's pretty simple:
* `docker-compose up web` to start the site container. This is required.
* `docker-compose up snekbox` to start the snekbox container. You only need this if you're planning on working on the snekbox cog.
+* `docker-compose up snekbox-311` to start the snekbox 3.11 container. You only need this if you're planning on working on the snekbox cog.
* `docker-compose up redis` to start the Redis container. You only need this if you're not using fakeredis. For more info refer to [Working with Redis](#optional-working-with-redis).
You can start several services together: `docker-compose up web snekbox redis`.
@@ -532,7 +513,7 @@ You can start several services together: `docker-compose up web snekbox redis`.
##### Setting Up a Development Environment
The bot's code is Python code like any other. To run it locally, you will need the right version of Python with the necessary packages installed:
-1. Make sure you have [Python 3.9](https://www.python.org/downloads/) installed. It helps if it is your system's default Python version.
+1. Make sure you have [Python 3.10](https://www.python.org/downloads/) installed. It helps if it is your system's default Python version.
2. [Install Poetry](https://github.com/python-poetry/poetry#installation).
3. [Install the dependencies](../installing-project-dependencies).
@@ -570,10 +551,7 @@ Now that you have everything setup, it is finally time to make changes to the bo
#### Working with Git
-If you have not yet [read the contributing guidelines](../contributing-guidelines), now is a good time.
-Contributions that do not adhere to the guidelines may be rejected.
-
-Notably, version control of our projects is done using Git and Github.
+Version control of our projects is done using Git and Github.
It can be intimidating at first, so feel free to ask for any help in the server.
[**Click here to see the basic Git workflow when contributing to one of our projects.**](../working-with-git/)
@@ -664,4 +642,11 @@ The following is a list of all available environment variables used by the bot:
| `METABASE_USERNAME` | When you wish to interact with Metabase | The username for a Metabase admin account.
| `METABASE_PASSWORD` | When you wish to interact with Metabase | The password for a Metabase admin account.
+---
+
+# Next steps
+Now that you have everything setup, it is finally time to make changes to the bot! If you have not yet read the [contributing guidelines](../contributing-guidelines.md), now is a good time. Contributions that do not adhere to the guidelines may be rejected.
+
+If you're not sure where to go from here, our [detailed walkthrough](../#2-set-up-the-project) is for you.
+
Have fun!
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/commit-messages.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/commit-messages.md
new file mode 100644
index 00000000..ba476b65
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/commit-messages.md
@@ -0,0 +1,15 @@
+---
+title: Writing Good Commit Messages
+description: Information about logging in our projects.
+---
+
+A well-structured git log is key to a project's maintainability; it provides insight into when and *why* things were done for future maintainers of the project.
+
+Commits should be as narrow in scope as possible.
+Commits that span hundreds of lines across multiple unrelated functions and/or files are very hard for maintainers to follow.
+After about a week they'll probably be hard for you to follow, too.
+
+Please also avoid making minor commits for fixing typos or linting errors.
+[Don’t forget to lint before you push!](https://soundcloud.com/lemonsaurusrex/lint-before-you-push)
+
+A more in-depth guide to writing great commit messages can be found in Chris Beam's [How to Write a Git Commit Message](https://chris.beams.io/posts/git-commit/).
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines.md
index de1777f2..d1e4250d 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines.md
@@ -4,22 +4,15 @@ description: Guidelines to adhere to when contributing to our projects.
---
Thank you for your interest in our projects!
+This page contains the golden rules to follow when contributing. If you have questions about how to get started contributing, check out our [in-depth walkthrough](../../contributing/).
-If you are interested in contributing, **this page contains the golden rules to follow when contributing.**
-Supplemental information [can be found here](./supplemental-information/).
-Do note that failing to comply with our guidelines may lead to a rejection of the contribution.
-
-If you are confused by any of these rules, feel free to ask us in the `#dev-contrib` channel in our [Discord server.](https://discord.gg/python)
-
-# The Golden Rules of Contributing
-
-1. **Lint before you push.** We have simple but strict style rules that are enforced through linting.
-You must always lint your code before committing or pushing.
-[Using tools](./supplemental-information/#linting-and-pre-commit) such as `flake8` and `pre-commit` can make this easier.
-Make sure to follow our [style guide](../style-guide/) when contributing.
+1. **Lint before you push.**
+We have simple but strict style rules that are enforced through linting.
+[Set up a pre-commit hook](../linting/) to lint your code when you commit it.
+Not all of the style rules are enforced by linting, so make sure to read the [style guide](../style-guide/) as well.
2. **Make great commits.**
Great commits should be atomic, with a commit message explaining what and why.
-More on that can be found in [this section](./supplemental-information/#writing-good-commit-messages).
+Check out [Writing Good Commit Messages](../commit-messages/) for details.
3. **Do not open a pull request if you aren't assigned to the issue.**
If someone is already working on it, consider offering to collaborate with that person.
4. **Use assets licensed for public use.**
@@ -28,4 +21,8 @@ Whenever the assets are images, audio or even code, they must have a license com
We aim to foster a welcoming and friendly environment on our open source projects.
We take violations of our Code of Conduct very seriously, and may respond with moderator action.
-Welcome to our projects!
+<br/>
+
+Failing to comply with our guidelines may lead to a rejection of the contribution.
+If you have questions about any of the rules, feel free to ask us in the [`#dev-contrib`](https://discord.gg/2h3qBv8Xaa) channel in our [Discord server](https://discord.gg/python).
+{: .notification .is-warning }
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines/_info.yml b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines/_info.yml
deleted file mode 100644
index 80c8e772..00000000
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines/_info.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-title: Contributing Guidelines
-description: Guidelines to adhere to when contributing to our projects.
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines/supplemental-information.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines/supplemental-information.md
deleted file mode 100644
index e64e4fc6..00000000
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/contributing-guidelines/supplemental-information.md
+++ /dev/null
@@ -1,99 +0,0 @@
----
-title: Supplemental Information
-description: Additional information related to our contributing guidelines.
----
-
-This page contains additional information concerning a specific part of our development pipeline.
-
-## Writing Good Commit Messages
-
-A well-structured git log is key to a project's maintainability; it provides insight into when and *why* things were done for future maintainers of the project.
-
-Commits should be as narrow in scope as possible.
-Commits that span hundreds of lines across multiple unrelated functions and/or files are very hard for maintainers to follow.
-After about a week they'll probably be hard for you to follow, too.
-
-Please also avoid making minor commits for fixing typos or linting errors.
-*[Don’t forget to lint before you push!](https://soundcloud.com/lemonsaurusrex/lint-before-you-push)*
-
-A more in-depth guide to writing great commit messages can be found in Chris Beam's *[How to Write a Git Commit Message](https://chris.beams.io/posts/git-commit/).*
-
-## Code Style
-
-All of our projects have a certain project-wide style that contributions should attempt to maintain consistency with.
-During PR review, it's not unusual for style adjustments to be requested.
-
-[This page](../../style-guide/) will reference the differences between our projects and what is recommended by [PEP 8.](https://www.python.org/dev/peps/pep-0008/)
-
-## Linting and Pre-commit
-
-On most of our projects, we use `flake8` and `pre-commit` to ensure that the code style is consistent across the code base.
-
-Running `flake8` will warn you about any potential style errors in your contribution.
-You must always check it **before pushing**.
-Your commit will be rejected by the build server if it fails to lint.
-
-**Some style rules are not enforced by flake8. Make sure to read the [style guide](../../style-guide/).**
-
-`pre-commit` is a powerful tool that helps you automatically lint before you commit.
-If the linter complains, the commit is aborted so that you can fix the linting errors before committing again.
-That way, you never commit the problematic code in the first place!
-
-Please refer to the project-specific documentation to see how to setup and run those tools.
-In most cases, you can install pre-commit using `poetry run task precommit`, and lint using `poetry run task lint`.
-
-## Type Hinting
-
-[PEP 484](https://www.python.org/dev/peps/pep-0484/) formally specifies type hints for Python functions, added to the Python Standard Library in version 3.5.
-Type hints are recognized by most modern code editing tools and provide useful insight into both the input and output types of a function, preventing the user from having to go through the codebase to determine these types.
-
-For example:
-
-```python
-import typing
-
-def foo(input_1: int, input_2: typing.Dict[str, str]) -> bool:
- ...
-```
-
-This tells us that `foo` accepts an `int` and a `dict`, with `str` keys and values, and returns a `bool`.
-
-If the project is running Python 3.9 or above, you can use `dict` instead of `typing.Dict`.
-See [PEP 585](https://www.python.org/dev/peps/pep-0585/) for more information.
-
-All function declarations should be type hinted in code contributed to the PyDis organization.
-
-## Logging
-
-Instead of using `print` statements for logging, we use the built-in [`logging`](https://docs.python.org/3/library/logging.html) module.
-Here is an example usage:
-
-```python
-import logging
-
-log = logging.getLogger(__name__) # Get a logger bound to the module name.
-# This line is usually placed under the import statements at the top of the file.
-
-log.trace("This is a trace log.")
-log.warning("BEEP! This is a warning.")
-log.critical("It is about to go down!")
-```
-
-Print statements should be avoided when possible.
-Our projects currently defines logging levels as follows, from lowest to highest severity:
-
-- **TRACE:** These events should be used to provide a *verbose* trace of every step of a complex process. This is essentially the `logging` equivalent of sprinkling `print` statements throughout the code.
-- **Note:** This is a PyDis-implemented logging level. It may not be available on every project.
-- **DEBUG:** These events should add context to what's happening in a development setup to make it easier to follow what's going while workig on a project. This is in the same vein as **TRACE** logging but at a much lower level of verbosity.
-- **INFO:** These events are normal and don't need direct attention but are worth keeping track of in production, like checking which cogs were loaded during a start-up.
-- **WARNING:** These events are out of the ordinary and should be fixed, but can cause a failure.
-- **ERROR:** These events can cause a failure in a specific part of the application and require urgent attention.
-- **CRITICAL:** These events can cause the whole application to fail and require immediate intervention.
-
-Any logging above the **INFO** level will trigger a [Sentry](https://sentry.io) issue and alert the Core Developer team.
-
-## Draft Pull Requests
-
-Github [provides a PR feature](https://github.blog/2019-02-14-introducing-draft-pull-requests/) that allows the PR author to mark it as a Draft when opening it. This provides both a visual and functional indicator that the contents of the PR are in a draft state and not yet ready for formal review.
-
-This feature should be utilized in place of the traditional method of prepending `[WIP]` to the PR title.
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md
new file mode 100644
index 00000000..f6f8a5f2
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/linting.md
@@ -0,0 +1,14 @@
+---
+title: Linting
+description: A guide for linting and setting up pre-commit.
+---
+
+Your commit will be rejected by the build server if it fails to lint.
+On most of our projects, we use `flake8` and `pre-commit` to ensure that the code style is consistent across the code base.
+
+`pre-commit` is a powerful tool that helps you automatically lint before you commit.
+If the linter complains, the commit is aborted so that you can fix the linting errors before committing again.
+That way, you never commit the problematic code in the first place!
+
+Please refer to the project-specific documentation to see how to setup and run those tools.
+In most cases, you can install pre-commit using `poetry run task precommit`, and lint using `poetry run task lint` in the console.
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/logging.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/logging.md
new file mode 100644
index 00000000..1291a7a4
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/logging.md
@@ -0,0 +1,31 @@
+---
+title: Logging
+description: Information about logging in our projects.
+---
+
+Instead of using `print` statements for logging, we use the built-in [`logging`](https://docs.python.org/3/library/logging.html) module.
+Here is an example usage:
+
+```python
+import logging
+
+log = logging.getLogger(__name__) # Get a logger bound to the module name.
+# This line is usually placed under the import statements at the top of the file.
+
+log.trace("This is a trace log.")
+log.warning("BEEP! This is a warning.")
+log.critical("It is about to go down!")
+```
+
+Print statements should be avoided when possible.
+Our projects currently defines logging levels as follows, from lowest to highest severity:
+
+- **TRACE:** These events should be used to provide a *verbose* trace of every step of a complex process. This is essentially the `logging` equivalent of sprinkling `print` statements throughout the code.
+- **Note:** This is a PyDis-implemented logging level. It may not be available on every project.
+- **DEBUG:** These events should add context to what's happening in a development setup to make it easier to follow what's going while workig on a project. This is in the same vein as **TRACE** logging but at a much lower level of verbosity.
+- **INFO:** These events are normal and don't need direct attention but are worth keeping track of in production, like checking which cogs were loaded during a start-up.
+- **WARNING:** These events are out of the ordinary and should be fixed, but can cause a failure.
+- **ERROR:** These events can cause a failure in a specific part of the application and require urgent attention.
+- **CRITICAL:** These events can cause the whole application to fail and require immediate intervention.
+
+Any logging above the **INFO** level will trigger a [Sentry](https://sentry.io) issue and alert the Core Developer team.
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/pull-requests.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/pull-requests.md
new file mode 100644
index 00000000..d193a455
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/pull-requests.md
@@ -0,0 +1,40 @@
+---
+title: Pull Requests
+description: A guide for opening pull requests.
+---
+
+As stated in our [Contributing Guidelines](../contributing-guidelines/), do not open a pull request if you aren't assigned to an approved issue. You can check out our [Issues Guide](../issues/) for help with opening an issue or getting assigned to an existing one.
+{: .notification .is-warning }
+
+Before opening a pull request you should have:
+
+1. Committed your changes to your local repository
+2. [Linted](../linting/) your code
+3. Tested your changes
+4. Pushed the branch to your fork of the project on GitHub
+
+## Opening a Pull Request
+
+Navigate to your fork on GitHub and make sure you're on the branch with your changes. Click on `Contribute` and then `Open pull request`:
+
+![Pull Request UI](/static/images/content/contributing/pull_request.png)
+
+In the page that it opened, write an overview of the changes you made and why. This should explain how you resolved the issue that spawned this PR and highlight any differences from the proposed implementation. You should also [link the issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
+
+At this stage you can also request reviews from individual contributors. If someone showed interest in the issue or has specific knowledge about it, they may be a good reviewer. It isn't necessary to request your reviewers; someone will review your PR either way.
+
+## The Review Process
+
+Before your changes are merged, your PR needs to be reviewed by other contributors. They will read the issue and your description of your PR, look at your code, test it, and then leave comments on the PR if they find any problems, possibly with suggested changes. Sometimes this can feel intrusive or insulting, but remember that the reviewers are there to help you make your code better.
+
+#### If the PR is already open, how do I make changes to it?
+
+A pull request is between a source branch and a target branch. Updating the source branch with new commits will automatically update the PR to include those commits; they'll even show up in the comment thread of the PR. Sometimes for small changes the reviewer will even write the suggested code themself, in which case you can simply accept them with the click of a button.
+
+If you truly disagree with a reviewer's suggestion, leave a reply in the thread explaining why or proposing an alternative change. Also feel free to ask questions if you want clarification about suggested changes or just want to discuss them further.
+
+## Draft Pull Requests
+
+GitHub [provides a PR feature](https://github.blog/2019-02-14-introducing-draft-pull-requests/) that allows the PR author to mark it as a draft when opening it. This provides both a visual and functional indicator that the contents of the PR are in a draft state and not yet ready for formal review. This is helpful when you want people to see the changes you're making before you're ready for the final pull request.
+
+This feature should be utilized in place of the traditional method of prepending `[WIP]` to the PR title.
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/sir-lancebot.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/sir-lancebot.md
index e3cd8f0c..c9566d23 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/sir-lancebot.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/sir-lancebot.md
@@ -5,10 +5,11 @@ icon: fab fa-github
toc: 1
---
-> Before contributing, please ensure you read the [contributing guidelines](../contributing-guidelines) in full.
+You should have already forked the [`sir-lancebot`](https://github.com/python-discord/sir-lancebot) repository and cloned it to your local machine. If not, check out our [detailed walkthrough](../#1-fork-and-clone-the-repo).
----
-# Requirements
+Remember to ensure that you have read the [contributing guidelines](../contributing-guidelines) in full before you start contributing.
+
+### Requirements
- [Python 3.9](https://www.python.org/downloads/)
- [Poetry](https://github.com/python-poetry/poetry#installation)
- [Git](https://git-scm.com/downloads)
@@ -16,10 +17,12 @@ toc: 1
- [MacOS Installer](https://git-scm.com/download/mac) or `brew install git`
- [Linux](https://git-scm.com/download/linux)
+---
+
## Using Gitpod
Sir Lancebot can be edited and tested on Gitpod. Gitpod will automatically install the correct dependencies and Python version, so you can get straight to coding.
-To do this, you will need a Gitpod account, which you can get [here](https://www.gitpod.io/#get-started), and a fork of Sir Lancebot. This guide covers forking the repository [here](#fork-the-project).
+To do this, you will need a Gitpod account, which you can get [here](https://www.gitpod.io/#get-started), and a fork of Sir Lancebot. This guide covers forking the repository [here](../forking-repository).
Afterwards, click on [this link](https://gitpod.io/#/github.com/python-discord/sir-lancebot) to spin up a new workspace for Sir Lancebot. Then run the following commands in the terminal after the existing tasks have finished running:
```sh
@@ -41,19 +44,8 @@ The requirements for Docker are:
* This is only a required step for linux. Docker comes bundled with docker-compose on Mac OS and Windows.
---
-
-# Fork the Project
-You will need your own remote (online) copy of the project repository, known as a *fork*.
-
-- [**Learn how to create a fork of the repository here.**](../forking-repository)
-
-You will do all your work in the fork rather than directly in the main repository.
-
----
-
# Development Environment
-1. Once you have your fork, you will need to [**clone the repository to your computer**](../cloning-repository).
-2. After cloning, proceed to [**install the project's dependencies**](../installing-project-dependencies). (This is not required if using Docker)
+If you aren't using Docker, you will need to [install the project's dependencies](../installing-project-dependencies) yourself.
---
# Test Server and Bot Account
@@ -120,14 +112,11 @@ After installing project dependencies use the poetry command `poetry run task st
```shell
$ poetry run task start
```
-
---
-# Working with Git
-Now that you have everything setup, it is finally time to make changes to the bot! If you have not yet [read the contributing guidelines](https://github.com/python-discord/sir-lancebot/blob/main/CONTRIBUTING.md), now is a good time. Contributions that do not adhere to the guidelines may be rejected.
-
-Notably, version control of our projects is done using Git and Github. It can be intimidating at first, so feel free to ask for any help in the server.
+# Next steps
+Now that you have everything setup, it is finally time to make changes to the bot! If you have not yet read the [contributing guidelines](../contributing-guidelines.md), now is a good time. Contributions that do not adhere to the guidelines may be rejected.
-[**Click here to see the basic Git workflow when contributing to one of our projects.**](../working-with-git/)
+If you're not sure where to go from here, our [detailed walkthrough](../#2-set-up-the-project) is for you.
Have fun!
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md
index f2c3bd95..9786698b 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/site.md
@@ -5,9 +5,11 @@ icon: fab fa-github
toc: 1
---
-# Requirements
+You should have already forked the [`site`](https://github.com/python-discord/site) repository and cloned it to your local machine. If not, check out our [detailed walkthrough](../#1-fork-and-clone-the-repo).
-- [Python 3.9](https://www.python.org/downloads/)
+### Requirements
+
+- [Python 3.10](https://www.python.org/downloads/)
- [Poetry](https://python-poetry.org/docs/#installation)
- `pip install poetry`
- [Git](https://git-scm.com/downloads)
@@ -27,22 +29,9 @@ Without Docker:
- Note that if you wish, the webserver can run on the host and still use Docker for PostgreSQL.
---
-# Fork the project
-
-You will need access to a copy of the git repository of your own that will allow you to edit the code and push your commits to.
-Creating a copy of a repository under your own account is called a _fork_.
-
-- [Learn how to create a fork of the repository here.](../forking-repository/)
-
-This is where all your changes and commits will be pushed to, and from where your PRs will originate from.
-
-For any Core Developers, since you have write permissions already to the original repository, you can just create a feature branch to push your commits to instead.
-
----
# Development environment
-1. [Clone your fork to a local project directory](../cloning-repository/)
-2. [Install the project's dependencies](../installing-project-dependencies/)
+[Install the project's dependencies](../installing-project-dependencies/)
## Without Docker
@@ -178,3 +167,12 @@ The website is configured through the following environment variables:
- **`STATIC_ROOT`**: The root in which `python manage.py collectstatic`
collects static files. Optional, defaults to `/app/staticfiles` for the
standard Docker deployment.
+
+---
+
+# Next steps
+Now that you have everything setup, it is finally time to make changes to the site! If you have not yet read the [contributing guidelines](../contributing-guidelines.md), now is a good time. Contributions that do not adhere to the guidelines may be rejected.
+
+If you're not sure where to go from here, our [detailed walkthrough](../#2-set-up-the-project) is for you.
+
+Have fun!
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md
index f9962990..b26c467c 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/style-guide.md
@@ -191,21 +191,14 @@ Present tense defines that the work being done is now, in the present, rather th
**Use:** "Build an information embed."<br>
**Don't use:** "Built an information embed." or "Will build an information embed."
-# Type Annotations
-Functions are required to have type annotations as per the style defined in [PEP 484](https://www.python.org/dev/peps/pep-0484/).
+# Type Hinting
+Functions are required to have type annotations as per the style defined in [PEP 484](https://www.python.org/dev/peps/pep-0484/). Type hints are recognized by most modern code editing tools and provide useful insight into both the input and output types of a function, preventing the user from having to go through the codebase to determine these types.
-A function without annotations might look like:
-```py
-def divide(a, b):
- """Divide the two given arguments."""
- return a / b
-```
-
-With annotations, the arguments and the function are annotated with their respective types:
-```py
-def divide(a: int, b: int) -> float:
- """Divide the two given arguments."""
- return a / b
+A function with type hints looks like:
+```python
+def foo(input_1: int, input_2: dict[str, int]) -> bool:
+ ...
```
+This tells us that `foo` accepts an `int` and a `dict`, with `str` keys and `int` values, and returns a `bool`.
In previous examples, we have purposely omitted annotations to keep focus on the specific points they represent.
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/working-with-git.md b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/working-with-git.md
index 26c89b56..59c57859 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/contributing/working-with-git.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/contributing/working-with-git.md
@@ -19,5 +19,7 @@ Below are links to regular workflows for working with Git using PyCharm or the C
**Resources to learn Git**
* [The Git Book](https://git-scm.com/book)
-* [Corey Schafer's Youtube Tutorials](https://www.youtube.com/watch?v=HVsySz-h9r4&list=PL-osiE80TeTuRUfjRe54Eea17-YfnOOAx)
-* [GitHub Git Resources Portal](https://try.github.io/)
+* [Corey Schafer's YouTube tutorials](https://www.youtube.com/watch?v=HVsySz-h9r4&list=PL-osiE80TeTuRUfjRe54Eea17-YfnOOAx)
+* [GitHub Git resources portal](https://try.github.io/)
+* [Git cheatsheet](https://education.github.com/git-cheat-sheet-education.pdf)
+* [Learn Git branching](https://learngitbranching.js.org)
diff --git a/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md b/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md
index f8031834..5e785cd9 100644
--- a/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md
+++ b/pydis_site/apps/content/resources/guides/pydis-guides/off-topic-etiquette.md
@@ -5,7 +5,7 @@ icon: fab fa-discord
---
## Why do we need off-topic etiquette?
-Everyone wants to have good conversations in our off-topic channels, but with tens of thousands of members, this might mean different things to different people.
+Everyone wants to have good conversations in our off-topic channels, but with hundreds of thousands of members, this might mean different things to different people.
To facilitate the best experience for everyone, here are some guidelines on conversation etiquette.
## Three things you shouldn't do
diff --git a/pydis_site/apps/content/resources/guides/python-guides/discord-messages-with-colors.md b/pydis_site/apps/content/resources/guides/python-guides/discord-messages-with-colors.md
new file mode 100644
index 00000000..62ff61f9
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/python-guides/discord-messages-with-colors.md
@@ -0,0 +1,68 @@
+---
+title: Discord Messages with Colors
+description: A guide on how to add colors to your codeblocks on Discord
+---
+
+Discord is now slowly rolling out the ability to send colored text within code blocks. This is done using ANSI color codes which is also how you print colored text in your terminal.
+
+To send colored text in a code block you need to first specify the `ansi` language and use the prefixes similar to the one below:
+```ansi
+\u001b[{format};{color}m
+```
+*`\u001b` is the unicode escape for ESCAPE/ESC, meant to be used in the source of your bot (see <http://www.unicode-symbol.com/u/001B.html>).* ***If you wish to send colored text without using your bot you need to copy the character from the website.***
+
+After you've written this, you can now type the text you wish to color. If you want to reset the color back to normal, then you need to use the `\u001b[0m` prefix again.
+
+Here is the list of values you can use to replace `{format}`:
+
+* 0: Normal
+* 1: **Bold**
+* 4: <ins>Underline</ins>
+
+Here is the list of values you can use to replace `{color}`:
+
+*The following values will change the **text** color.*
+
+* 30: Gray
+* 31: Red
+* 32: Green
+* 33: Yellow
+* 34: Blue
+* 35: Pink
+* 36: Cyan
+* 37: White
+
+*The following values will change the **text background** color.*
+
+* 40: Firefly dark blue
+* 41: Orange
+* 42: Marble blue
+* 43: Greyish turquoise
+* 44: Gray
+* 45: Indigo
+* 46: Light gray
+* 47: White
+
+Let's take an example, I want a bold green colored text with the very dark blue background.
+I simply use `\u001b[0;40m` (background color) and `\u001b[1;32m` (text color) as prefix. Note that the order is **important**, first you give the background color and then the text color.
+
+Alternatively you can also directly combine them into a single prefix like the following: `\u001b[1;40;32m` and you can also use multiple values. Something like `\u001b[1;40;4;32m` would underline the text, make it bold, make it green and have a dark blue background.
+
+Raw message:
+````nohighlight
+```ansi
+\u001b[0;40m\u001b[1;32mThat's some cool formatted text right?
+or
+\u001b[1;40;32mThat's some cool formatted text right?
+```
+````
+
+Result:
+
+![Background and text color result](/static/images/content/discord_colored_messages/result.png)
+
+The way the colors look like on Discord is shown in the image below:
+
+![ANSI Colors](/static/images/content/discord_colored_messages/ansi-colors.png)
+
+Note: If the change as not been brought to you yet, or other users, then you can use other code blocks in the meantime to get colored text. See **[this gist](https://gist.github.com/matthewzring/9f7bbfd102003963f9be7dbcf7d40e51)**.
diff --git a/pydis_site/apps/content/resources/guides/python-guides/discordpy_help_command.md b/pydis_site/apps/content/resources/guides/python-guides/discordpy_help_command.md
new file mode 100644
index 00000000..4b475146
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/python-guides/discordpy_help_command.md
@@ -0,0 +1,66 @@
+---
+title: Custom Help Command
+description: "Overwrite discord.py's help command to implement custom functionality"
+---
+
+First, a basic walkthrough can be found [here](https://gist.github.com/InterStella0/b78488fb28cadf279dfd3164b9f0cf96) by Stella#2000 on subclassing the HelpCommand. It will provide some foundational knowledge that is required before attempting a more customizable help command.
+
+## Custom Subclass of Help Command
+If the types of classes of the HelpCommand do not fit your needs, you can subclass HelpCommand and use the class mehods to customize the output. Below is a simple demonstration using the following methods that can also be found on the documenation:
+
+- [filter_commands](https://discordpy.readthedocs.io/en/stable/ext/commands/api.html#discord.ext.commands.HelpCommand.filter_commands)
+
+- [send_group_help](https://discordpy.readthedocs.io/en/stable/ext/commands/api.html#discord.ext.commands.HelpCommand.send_bot_help)
+
+- [send_command_help](https://discordpy.readthedocs.io/en/stable/ext/commands/api.html#discord.ext.commands.HelpCommand.send_command_help)
+
+- [send_group_help](https://discordpy.readthedocs.io/en/stable/ext/commands/api.html#discord.ext.commands.HelpCommand.send_group_help)
+
+- [send_error_message](https://discordpy.readthedocs.io/en/stable/ext/commands/api.html#discord.ext.commands.HelpCommand.send_error_message)
+
+```python
+class MyHelp(commands.HelpCommand):
+
+ async def send_bot_help(self, mapping):
+ """
+ This is triggered when !help is invoked.
+
+ This example demonstrates how to list the commands that the member invoking the help command can run.
+ """
+ filtered = await self.filter_commands(self.context.bot.commands, sort=True) # returns a list of command objects
+ names = [command.name for command in filtered] # iterating through the commands objects getting names
+ available_commands = "\n".join(names) # joining the list of names by a new line
+ embed = disnake.Embed(description=available_commands)
+ await self.context.send(embed=embed)
+
+ async def send_command_help(self, command):
+ """This is triggered when !help <command> is invoked."""
+ await self.context.send("This is the help page for a command")
+
+ async def send_group_help(self, group):
+ """This is triggered when !help <group> is invoked."""
+ await self.context.send("This is the help page for a group command")
+
+ async def send_cog_help(self, cog):
+ """This is triggered when !help <cog> is invoked."""
+ await self.context.send("This is the help page for a cog")
+
+ async def send_error_message(self, error):
+ """If there is an error, send a embed containing the error."""
+ channel = self.get_destination() # this defaults to the command context channel
+ await channel.send(error)
+
+bot.help_command = MyHelp()
+```
+
+You can handle when a user does not pass a command name when invoking the help command and make a fancy and customized embed; here a page that describes the bot and shows a list of commands is generally used. However if a command is passed in, you can display detailed information of the command. Below are references from the documentation below that can be utilised:
+
+- [Get the command object](https://discordpy.readthedocs.io/en/latest/ext/commands/api.html#discord.ext.commands.Bot.get_command)
+
+- [Get the command name](https://discordpy.readthedocs.io/en/latest/ext/commands/api.html#discord.ext.commands.Command.name)
+
+- [Get the command aliases](https://discordpy.readthedocs.io/en/latest/ext/commands/api.html#discord.ext.commands.Command.aliases)
+
+- [Get the command brief](https://discordpy.readthedocs.io/en/latest/ext/commands/api.html#discord.ext.commands.Command.brief)
+
+- [Get the command usage](https://discordpy.readthedocs.io/en/latest/ext/commands/api.html#discord.ext.commands.Command.usage)
diff --git a/pydis_site/apps/content/resources/guides/python-guides/docker-hosting-guide.md b/pydis_site/apps/content/resources/guides/python-guides/docker-hosting-guide.md
new file mode 100644
index 00000000..57d86e99
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/python-guides/docker-hosting-guide.md
@@ -0,0 +1,323 @@
+---
+title: How to host a bot with Docker and GitHub Actions on Ubuntu VPS
+description: This guide shows how to host a bot with Docker and GitHub Actions on Ubuntu VPS
+---
+
+## Contents
+
+1. [You will learn](#you-will-learn)
+2. [Introduction](#introduction)
+3. [Installing Docker](#installing-docker)
+4. [Creating Dockerfile](#creating-dockerfile)
+5. [Building Image and Running Container](#building-image-and-running-container)
+6. [Creating Volumes](#creating-volumes)
+7. [Using GitHub Actions for full automation](#using-github-actions-for-full-automation)
+
+## You will learn how to
+
+- write Dockerfile
+- build Docker image and run the container
+- use Docker Compose
+- make docker keep the files throughout the container's runs
+- parse environment variables into container
+- use GitHub Actions for automation
+- set up self-hosted runner
+- use runner secrets
+
+## Introduction
+
+Let's say you have got a nice discord bot written in python and you have a VPS to host it on. Now the only question is
+how to run it 24/7. You might have been suggested to use *screen multiplexer*, but it has some disadvantages:
+
+1. Every time you update the bot you have to SSH to your server, attach to screen, shutdown the bot, run `git pull` and
+ run the bot again. You might have good extensions management that allows you to update the bot without restarting it,
+ but there are some other cons as well
+2. If you update some dependencies, you have to update them manually
+3. The bot doesn't run in an isolated environment, which is not good for security.
+
+But there's a nice and easy solution to these problems - **Docker**! Docker is a containerization utility that automates
+some stuff like dependencies update and running the application in the background. So let's get started.
+
+## Installing Docker
+
+The best way to install Docker is to use
+the [convenience script](https://docs.docker.com/engine/install/ubuntu/#install-using-the-convenience-script) provided
+by Docker developers themselves. You just need 2 lines:
+
+```shell
+$ curl -fsSL https://get.docker.com -o get-docker.sh
+$ sudo sh get-docker.sh
+```
+
+## Creating Dockerfile
+
+To tell Docker what it has to do to run the application, we need to create a file named `Dockerfile` in our project's
+root.
+
+1. First we need to specify the *base image*, which is the OS that the docker container will be running. Doing that will
+ make Docker install some apps we need to run our bot, for
+ example the Python interpreter
+
+```dockerfile
+FROM python:3.10-bullseye
+```
+
+2. Next, we need to copy our Python project's external dependencies to some directory *inside the container*. Let's call
+ it `/app`
+
+```dockerfile
+COPY requirements.txt /app/
+```
+
+3. Now we need to set the directory as working and install the requirements
+
+```dockerfile
+WORKDIR /app
+RUN pip install -r requirements.txt
+```
+
+4. The only thing that is left to do is to copy the rest of project's files and run the main executable
+
+```dockerfile
+COPY . .
+CMD ["python3", "main.py"]
+```
+
+The final version of Dockerfile looks like this:
+
+```dockerfile
+FROM python:3.10-bullseye
+COPY requirements.txt /app/
+WORKDIR /app
+RUN pip install -r requirements.txt
+COPY . .
+CMD ["python3", "main.py"]
+```
+
+## Building Image and Running Container
+
+Now update the project on your VPS, so we can run the bot with Docker.
+
+1. Build the image (dot at the end is very important)
+
+```shell
+$ docker build -t mybot .
+```
+
+- the `-t` flag specifies a **tag** that will be assigned to the image. With it, we can easily run the image that the
+ tag was assigned to.
+- the dot at the end is basically the path to search for Dockerfile. The dot means current directory (`./`)
+
+2. Run the container
+
+```shell
+$ docker run -d --name mybot mybot:latest
+```
+
+- `-d` flag tells Docker to run the container in detached mode, meaning it will run the container in the background of
+ your terminal and not give us
+ any output from it. If we don't
+ provide it, the `run` will be giving us the output until the application exits. Discord bots aren't supposed to exit
+ after certain time, so we do need this flag
+- `--name` assigns a name to the container. By default, container is identified by id that is not human-readable. To
+ conveniently refer to container when needed,
+ we can assign it a name
+- `mybot:latest` means "latest version of `mybot` image"
+
+3. Read bot logs (keep in mind that this utility only allows to read STDERR)
+
+```shell
+$ docker logs -f mybot
+```
+
+- `-f` flag tells the docker to keep reading logs as they appear in container and is called "follow mode". To exit
+ press `CTRL + C`.
+
+If everything went successfully, your bot will go online and will keep running!
+
+## Using Docker Compose
+
+Just 2 commands to run a container is cool, but we can shorten it down to just 1 simple command. For that, create
+a `docker-compose.yml` file in project's root and fill it with the following contents:
+
+```yml
+version: "3.8"
+services:
+ main:
+ build: .
+ container_name: mybot
+```
+
+- `version` tells Docker what version of Compose to use. You may check all the
+ versions [here](https://docs.docker.com/compose/compose-file/compose-versioning/)
+- `services` contains services to build and run. Read more about
+ services [here](https://docs.docker.com/compose/compose-file/#services-top-level-element)
+- `main` is a service. We can call it whatever we would like to, not necessarily `main`
+- `build: .` is a path to search for Dockerfile, just like `docker build` command's dot
+- `container_name: mybot` is a container name to use for a bot, just like `docker run --name mybot`
+
+Update the project on VPS, remove the previous container with `docker rm -f mybot` and run this command
+
+```shell
+docker compose up -d --build
+```
+
+Now the docker will automatically build the image for you and run the container.
+
+### Why docker-compose
+
+The main purpose of Compose is to run several services at once. Mostly we
+don't need this in discord bots, however.
+For us, it has the following benefits:
+
+- we can build and run the container with just one command
+- if we need to parse some environment variables or volumes (more about them further in tutorial) our run command would
+ look like this
+
+```shell
+$ docker run -d --name mybot -e TOKEN=... -e WEATHER_API_APIKEY=... -e SOME_USEFUL_ENVIRONMENT_VARIABLE=... --net=host -v /home/exenifix/bot-data/data:/app/data -v /home/exenifix/bot-data/images:/app/data/images
+```
+
+This is pretty long and unreadable. Compose allows us to transfer those flags into single config file and still
+use just one short command to run the container.
+
+## Creating Volumes
+
+The files creating during container run are destroyed after its recreation. To prevent some files from getting
+destroyed, we need to use *volumes* that basically save the files from directory inside of container somewhere on drive.
+
+1. Create a new directory somewhere and copy path to it
+
+```shell
+$ mkdir mybot-data
+$ echo $(pwd)/mybot-data
+```
+
+My path is `/home/exenifix/mybot-data`, yours is most likely **different**!
+
+2. In your project, store the files that need to be persistent in a separate directory (eg. `data`)
+3. Add `volumes` to `docker-compose.yaml` so it looks like this:
+
+```yml
+version: "3.8"
+services:
+ main:
+ build: .
+ container_name: mybot
+ volumes:
+ - /home/exenifix/mybot-data:/app/data
+```
+
+The path before the colon `:` is the directory *on server's drive, outside of container*, and the second path is the
+directory *inside of container*.
+All the files saved in container in that directory will be saved on drive's directory as well and Docker will be
+accessing them *from drive*.
+
+## Using GitHub Actions for full automation
+
+Now it's time to fully automate the process and make Docker update the bot automatically on every commit or release. For
+that, we will use a **GitHub Actions workflow**, which basically runs some commands when we need to. You may read more
+about them [here](https://docs.github.com/en/actions/using-workflows).
+
+### Create repository secret
+
+We will not have the ability to use `.env` files with the workflow, so it's better to store the environment variables
+as **actions secrets**. Let's add your discord bot's token as a secret
+
+1. Head to your repository page -> Settings -> Secrets -> Actions
+2. Press `New repository secret`
+3. Give it a name like `TOKEN` and paste the token.
+ Now we will be able to access its value in workflow like `${{ secrets.TOKEN }}`. However, we also need to parse the
+ variable into container now. Edit `docker-compose` so it looks like this:
+
+```yml
+version: "3.8"
+services:
+ main:
+ build: .
+ container_name: mybot
+ volumes:
+ - /home/exenifix/mybot-data:/app/data
+ environment:
+ - TOKEN
+```
+
+### Setup self-hosted runner
+
+To run the workflow on our VPS, we will need to register it as *self-hosted runner*.
+
+1. Head to Settings -> Actions -> Runners
+2. Press `New self-hosted runner`
+3. Select runner image and architecture
+4. Follow the instructions but don't run the runner
+5. Instead, create a service
+
+```shell
+$ sudo ./svc.sh install
+$ sudo ./svc.sh start
+```
+
+Now we have registered our VPS as a self-hosted runner and we can run the workflow on it now.
+
+### Write a workflow
+
+Create a new file `.github/workflows/runner.yml` and paste the following content into it. Please pay attention to
+the `branches` instruction.
+The GitHub's standard main branch name is `main`, however it may be named `master` or something else if you edited its
+name. Make sure to put
+the correct branch name, otherwise it won't work. More about GitHub workflows
+syntax [here](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions)
+
+```yml
+name: Docker Runner
+
+on:
+ push:
+ branches: [ main ]
+
+jobs:
+ run:
+ runs-on: self-hosted
+ environment: production
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Run Container
+ run: docker compose up -d --build
+ env:
+ TOKEN: ${{ secrets.TOKEN }}
+
+ - name: Cleanup Unused Images
+ run: docker image prune -f
+```
+
+Run `docker rm -f mybot` (it only needs to be done once) and push to GitHub. Now if you open `Actions` tab on your
+repository, you should see a workflow running your bot. Congratulations!
+
+### Displaying logs in actions terminal
+
+There's a nice utility for reading docker container's logs and stopping upon meeting a certain phrase and it might be
+useful for you as well.
+
+1. Install the utility on your VPS with
+
+```shell
+$ pip install exendlr
+```
+
+2. Add a step to your workflow that would show the logs until it meets `"ready"` phrase. I recommend putting it before
+ the cleanup.
+
+```yml
+- name: Display Logs
+ run: python3 -m exendlr mybot "ready"
+```
+
+Now you should see the logs of your bot until the stop phrase is met.
+
+**WARNING**
+> The utility only reads from STDERR and redirects to STDERR, if you are using STDOUT for logs, it will not work and
+> will be waiting for stop phrase forever. The utility automatically exits if bot's container is stopped (e.g. error
+> occurred during starting) or if a log line contains a stop phrase. Make sure that your bot 100% displays a stop phrase
+> when it's ready otherwise your workflow will get stuck.
diff --git a/pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md b/pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md
new file mode 100644
index 00000000..096e3a90
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/python-guides/fix-ssl-certificate.md
@@ -0,0 +1,23 @@
+---
+title: Fixing an SSL Certificate Verification Error
+description: A guide on fixing verification of an SSL certificate.
+---
+
+We're fixing the error Python specifies as [ssl.SSLCertVerificationError](https://docs.python.org/3/library/ssl.html#ssl.SSLCertVerificationError).
+
+# How to fix SSL Certificate issue on Windows
+
+Firstly, try updating your OS, wouldn't hurt to try.
+
+Now, if you're still having an issue, you would need to download the certificate for the SSL.
+
+The SSL Certificate, Sectigo (cert vendor) provides a download link of an [SSL certificate](https://crt.sh/?id=2835394). You should find it in the bottom left corner, shown below:
+
+A picture where to find the certificate in the website is:
+![location of certificate](/static/images/content/fix-ssl-certificate/pem.png)
+
+You have to setup the certificate yourself. To do that you can just click on it, or if that doesn't work, refer to [this link](https://portal.threatpulse.com/docs/sol/Solutions/ManagePolicy/SSL/ssl_chrome_cert_ta.htm)
+
+# How to fix SSL Certificate issue on Mac
+
+Navigate to your `Applications/Python 3.x/` folder and double-click the `Install Certificates.command` to fix this.
diff --git a/pydis_site/apps/content/resources/guides/python-guides/keeping-tokens-safe.md b/pydis_site/apps/content/resources/guides/python-guides/keeping-tokens-safe.md
new file mode 100644
index 00000000..9d523b4b
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/python-guides/keeping-tokens-safe.md
@@ -0,0 +1,29 @@
+---
+title: Keeping Discord Bot Tokens Safe
+description: How to keep your bot tokens safe and safety measures you can take.
+---
+It's **very** important to keep a bot token safe,
+primarily because anyone who has the bot token can do whatever they want with the bot --
+such as destroying servers your bot has been added to and getting your bot banned from the API.
+
+# How to Avoid Leaking your Token
+To help prevent leaking your token,
+you should ensure that you don't upload it to an open source program/website,
+such as replit and github, as they show your code publicly.
+The best practice for storing tokens is generally utilising .env files
+([click here](https://vcokltfre.dev/tips/tokens/.) for more information on storing tokens safely).
+
+# What should I do if my token does get leaked?
+
+If for whatever reason your token gets leaked, you should immediately follow these steps:
+- Go to the list of [Discord Bot Applications](https://discord.com/developers/applications) you have and select the bot application that had the token leaked.
+- Select the Bot (1) tab on the left-hand side, next to a small image of a puzzle piece. After doing so you should see a small section named TOKEN (under your bot USERNAME and next to his avatar image)
+- Press the Regenerate button to regenerate your bot token and invalidate the old one.
+
+![Steps to Take to Reset your Discord Bot](/static/images/content/regenerating_token.jpg)
+
+Following these steps will create a new token for your bot, making it secure again and terminating any connections from the leaked token.
+The old token will stop working though, so make sure to replace the old token with the new one in your code if you haven't already.
+
+# Summary
+Make sure you keep your token secure by storing it safely, not sending it to anyone you don't trust, and regenerating your token if it does get leaked.
diff --git a/pydis_site/apps/content/resources/guides/python-guides/proper-error-handling.md b/pydis_site/apps/content/resources/guides/python-guides/proper-error-handling.md
new file mode 100644
index 00000000..74b0f59b
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/python-guides/proper-error-handling.md
@@ -0,0 +1,70 @@
+---
+title: Proper error handling in discord.py
+description: Are you not getting any errors? This might be why!
+---
+If you're not recieving any errors in your console, even though you know you should be, try this:
+
+# With bot subclass:
+```py
+import discord
+from discord.ext import commands
+
+import traceback
+import sys
+
+class MyBot(commands.Bot):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ async def on_command_error(self, ctx: commands.Context, error):
+ # Handle your errors here
+ if isinstance(error, commands.MemberNotFound):
+ await ctx.send("I could not find member '{error.argument}'. Please try again")
+
+ elif isinstance(error, commands.MissingRequiredArgument):
+ await ctx.send(f"'{error.param.name}' is a required argument.")
+ else:
+ # All unhandled errors will print their original traceback
+ print(f'Ignoring exception in command {ctx.command}:', file=sys.stderr)
+ traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)
+
+bot = MyBot(command_prefix="!", intents=discord.Intents.default())
+
+bot.run("token")
+```
+
+# Without bot subclass
+```py
+import discord
+from discord.ext import commands
+
+import traceback
+import sys
+
+async def on_command_error(self, ctx: commands.Context, error):
+ # Handle your errors here
+ if isinstance(error, commands.MemberNotFound):
+ await ctx.send("I could not find member '{error.argument}'. Please try again")
+
+ elif isinstance(error, commands.MissingRequiredArgument):
+ await ctx.send(f"'{error.param.name}' is a required argument.")
+ else:
+ # All unhandled errors will print their original traceback
+ print(f'Ignoring exception in command {ctx.command}:', file=sys.stderr)
+ traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)
+
+bot = commands.Bot(command_prefix="!", intents=discord.Intents.default())
+bot.on_command_error = on_command_error
+
+bot.run("token")
+```
+
+
+Make sure to import `traceback` and `sys`!
+
+-------------------------------------------------------------------------------------------------------------
+
+Useful Links:
+- [FAQ](https://discordpy.readthedocs.io/en/latest/faq.html)
+- [Simple Error Handling](https://gist.github.com/EvieePy/7822af90858ef65012ea500bcecf1612)
diff --git a/pydis_site/apps/content/resources/guides/python-guides/vps-services.md b/pydis_site/apps/content/resources/guides/python-guides/vps-services.md
index 0acd3e55..710fd914 100644
--- a/pydis_site/apps/content/resources/guides/python-guides/vps-services.md
+++ b/pydis_site/apps/content/resources/guides/python-guides/vps-services.md
@@ -1,9 +1,12 @@
---
-title: VPS Services
-description: On different VPS services
+title: VPS and Free Hosting Service for Discord bots
+description: This article lists recommended VPS services and covers the disasdvantages of utilising a free hosting service to run a discord bot.
+toc: 2
---
-If you need to run your bot 24/7 (with no downtime), you should consider using a virtual private server (VPS). This is a list of VPS services that are sufficient for running Discord bots.
+## Recommended VPS services
+
+If you need to run your bot 24/7 (with no downtime), you should consider using a virtual private server (VPS). Here is a list of VPS services that are sufficient for running Discord bots.
* Europe
* [netcup](https://www.netcup.eu/)
@@ -25,7 +28,31 @@ If you need to run your bot 24/7 (with no downtime), you should consider using a
* [OVHcloud](https://www.ovhcloud.com/)
* [Vultr](https://www.vultr.com/)
----
-# Free hosts
-There are no reliable free options for VPS hosting. If you would rather not pay for a hosting service, you can consider self-hosting.
-Any modern hardware should be sufficient for running a bot. An old computer with a few GB of ram could be suitable, or a Raspberry Pi.
+
+## Why not to use free hosting services for bots?
+While these may seem like nice and free services, it has a lot more caveats than you may think. For example, the drawbacks of using common free hosting services to host a discord bot are discussed below.
+
+### Replit
+
+- The machines are super underpowered, resulting in your bot lagging a lot as it gets bigger.
+
+- You need to run a webserver alongside your bot to prevent it from being shut off. This uses extra machine power.
+
+- Repl.it uses an ephemeral file system. This means any file you saved through your bot will be overwritten when you next launch.
+
+- They use a shared IP for everything running on the service.
+This one is important - if someone is running a user bot on their service and gets banned, everyone on that IP will be banned. Including you.
+
+### Heroku
+- Bots are not what the platform is designed for. Heroku is designed to provide web servers (like Django, Flask, etc). This is why they give you a domain name and open a port on their local emulator.
+
+- Heroku's environment is heavily containerized, making it significantly underpowered for a standard use case.
+
+- Heroku's environment is volatile. In order to handle the insane amount of users trying to use it for their own applications, Heroku will dispose your environment every time your application dies unless you pay.
+
+- Heroku has minimal system dependency control. If any of your Python requirements need C bindings (such as PyNaCl
+ binding to libsodium, or lxml binding to libxml), they are unlikely to function properly, if at all, in a native
+ environment. As such, you often need to resort to adding third-party buildpacks to facilitate otherwise normal
+ CPython extension functionality. (This is the reason why voice doesn't work natively on heroku)
+
+- Heroku only offers a limited amount of time on their free programme for your applications. If you exceed this limit, which you probably will, they'll shut down your application until your free credit resets.
diff --git a/pydis_site/apps/content/resources/guides/python-guides/why-not-json-as-database.md b/pydis_site/apps/content/resources/guides/python-guides/why-not-json-as-database.md
new file mode 100644
index 00000000..ae34c2b4
--- /dev/null
+++ b/pydis_site/apps/content/resources/guides/python-guides/why-not-json-as-database.md
@@ -0,0 +1,28 @@
+---
+title: Why JSON is unsuitable as a database
+description: The many reasons why you shouldn't use JSON as a database, and instead opt for SQL.
+relevant_links:
+ Tips on Storing Data: https://tutorial.vcokltfre.dev/tips/storage/
+---
+
+JSON, quite simply, is not a database. It's not designed to be a data storage format,
+rather a wayof transmitting data over a network. It's also often used as a way of doing configuration files for programs.
+
+There is no redundancy built in to JSON. JSON is just a format, and Python has libraries for it
+like json and ujson that let you load and dump it, sometimes to files, but that's all it does, write data to a file.
+There is no sort of DBMS (Database Management System), which means no sort of sophistication in how the data is stored,
+or built in ways to keep it safe and backed up, there's no built in encryption either - bear in mind
+in larger applications encryption may be necessary for GDPR/relevant data protection regulations compliance.
+
+JSON, unlike relational databases, has no way to store relational data,
+which is a very commonly needed way of storing data.
+Relational data, as the name may suggest, is data that relates to other data.
+For example if you have a table of users and a table of servers, the server table will probably have an owner field,
+where you'd reference a user from the users table. (**This is only relevant for relational data**).
+
+JSON is primarily a KV (key-value) format, for example `{"a": "b"}` where `a` is the key and `b` is the value,
+but what if you want to search not by that key but by a sub-key? Well, instead of being able to quickly use `var[key]`,
+which in a Python dictionary has a constant return time (for more info look up hash tables),
+you now have to iterate through every object in the dictionary and compare to find what you're looking for.
+Most relational database systems, like MySQL, MariaDB, and PostgreSQL have ways of indexing secondary fields
+apart from the primary key so that you can easily search by multiple attributes.
diff --git a/pydis_site/apps/content/resources/server-info/roles.md b/pydis_site/apps/content/resources/server-info/roles.md
index edc02066..409e037e 100644
--- a/pydis_site/apps/content/resources/server-info/roles.md
+++ b/pydis_site/apps/content/resources/server-info/roles.md
@@ -28,8 +28,12 @@ There are multiple requirements listed there for getting the role.
This includes writing pull requests for open issues, and also for reviewing open pull requests (**we really need reviewers!**)
**How to get it:** Contribute to the projects!
-There is no minimum requirements, but the role is **not** assigned for every single contribution.
-Read more about this in the [Guidelines for the Contributors Role](/pages/contributing/#guidelines-for-the-contributors-role) on the Contributing page.
+It’s difficult to precisely quantify contributions, but we’ve come up with the following guidelines for the role:
+
+- The member has made several significant contributions to our projects.
+- The member has a positive influence in our contributors subcommunity.
+
+The role will be assigned at the discretion of the Admin Team in consultation with the Core Developers Team. Check out our [walkthrough](/pages/contributing/) to get started contributing.
---
diff --git a/pydis_site/apps/content/resources/tags/_info.yml b/pydis_site/apps/content/resources/tags/_info.yml
new file mode 100644
index 00000000..054125ec
--- /dev/null
+++ b/pydis_site/apps/content/resources/tags/_info.yml
@@ -0,0 +1,3 @@
+title: Tags
+description: Useful snippets that are often used in the server.
+icon: fas fa-tags
diff --git a/pydis_site/apps/content/tests/test_utils.py b/pydis_site/apps/content/tests/test_utils.py
index be5ea897..462818b5 100644
--- a/pydis_site/apps/content/tests/test_utils.py
+++ b/pydis_site/apps/content/tests/test_utils.py
@@ -1,12 +1,34 @@
+import datetime
+import json
+import tarfile
+import tempfile
+import textwrap
from pathlib import Path
+from unittest import mock
+import httpx
+import markdown
from django.http import Http404
+from django.test import TestCase
-from pydis_site.apps.content import utils
+from pydis_site import settings
+from pydis_site.apps.content import models, utils
from pydis_site.apps.content.tests.helpers import (
BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA
)
+_time = datetime.datetime(2022, 10, 10, 10, 10, 10, tzinfo=datetime.timezone.utc)
+_time_str = _time.strftime(settings.GITHUB_TIMESTAMP_FORMAT)
+TEST_COMMIT_KWARGS = {
+ "sha": "123",
+ "message": "Hello world\n\nThis is a commit message",
+ "date": _time,
+ "authors": json.dumps([
+ {"name": "Author 1", "email": "[email protected]", "date": _time_str},
+ {"name": "Author 2", "email": "[email protected]", "date": _time_str},
+ ]),
+}
+
class GetCategoryTests(MockPagesTestCase):
"""Tests for the get_category function."""
@@ -96,3 +118,268 @@ class GetPageTests(MockPagesTestCase):
def test_get_nonexistent_page_returns_404(self):
with self.assertRaises(Http404):
utils.get_page(Path(BASE_PATH, "invalid"))
+
+
+class TagUtilsTests(TestCase):
+ """Tests for the tag-related utilities."""
+
+ def setUp(self) -> None:
+ super().setUp()
+ self.commit = models.Commit.objects.create(**TEST_COMMIT_KWARGS)
+
+ @mock.patch.object(utils, "fetch_tags")
+ def test_static_fetch(self, fetch_mock: mock.Mock):
+ """Test that the static fetch function is only called at most once during static builds."""
+ tags = [models.Tag(name="Name", body="body")]
+ fetch_mock.return_value = tags
+ result = utils.get_tags_static()
+ second_result = utils.get_tags_static()
+
+ fetch_mock.assert_called_once()
+ self.assertEqual(tags, result)
+ self.assertEqual(tags, second_result)
+
+ @mock.patch("httpx.Client.get")
+ def test_mocked_fetch(self, get_mock: mock.Mock):
+ """Test that proper data is returned from fetch, but with a mocked API response."""
+ fake_request = httpx.Request("GET", "https://google.com")
+
+ # Metadata requests
+ returns = [httpx.Response(
+ request=fake_request,
+ status_code=200,
+ json=[
+ {"type": "file", "name": "first_tag.md", "sha": "123"},
+ {"type": "file", "name": "second_tag.md", "sha": "456"},
+ {"type": "dir", "name": "some_group", "sha": "789", "url": "/some_group"},
+ ]
+ ), httpx.Response(
+ request=fake_request,
+ status_code=200,
+ json=[{"type": "file", "name": "grouped_tag.md", "sha": "789123"}]
+ )]
+
+ # Main content request
+ bodies = (
+ "This is the first tag!",
+ textwrap.dedent("""
+ ---
+ frontmatter: empty
+ ---
+ This tag has frontmatter!
+ """),
+ "This is a grouped tag!",
+ )
+
+ # Generate a tar archive with a few tags
+ with tempfile.TemporaryDirectory() as tar_folder:
+ tar_folder = Path(tar_folder)
+ with tempfile.TemporaryDirectory() as folder:
+ folder = Path(folder)
+ (folder / "ignored_file.md").write_text("This is an ignored file.")
+ tags_folder = folder / "bot/resources/tags"
+ tags_folder.mkdir(parents=True)
+
+ (tags_folder / "first_tag.md").write_text(bodies[0])
+ (tags_folder / "second_tag.md").write_text(bodies[1])
+
+ group_folder = tags_folder / "some_group"
+ group_folder.mkdir()
+ (group_folder / "grouped_tag.md").write_text(bodies[2])
+
+ with tarfile.open(tar_folder / "temp.tar", "w") as file:
+ file.add(folder, recursive=True)
+
+ body = (tar_folder / "temp.tar").read_bytes()
+
+ returns.append(httpx.Response(
+ status_code=200,
+ content=body,
+ request=fake_request,
+ ))
+
+ get_mock.side_effect = returns
+ result = utils.fetch_tags()
+
+ def sort(_tag: models.Tag) -> str:
+ return _tag.name
+
+ self.assertEqual(sorted([
+ models.Tag(name="first_tag", body=bodies[0], sha="123"),
+ models.Tag(name="second_tag", body=bodies[1], sha="245"),
+ models.Tag(name="grouped_tag", body=bodies[2], group=group_folder.name, sha="789123"),
+ ], key=sort), sorted(result, key=sort))
+
+ def test_get_real_tag(self):
+ """Test that a single tag is returned if it exists."""
+ tag = models.Tag.objects.create(name="real-tag", last_commit=self.commit)
+ result = utils.get_tag("real-tag")
+
+ self.assertEqual(tag, result)
+
+ def test_get_grouped_tag(self):
+ """Test fetching a tag from a group."""
+ tag = models.Tag.objects.create(
+ name="real-tag", group="real-group", last_commit=self.commit
+ )
+ result = utils.get_tag("real-group/real-tag")
+
+ self.assertEqual(tag, result)
+
+ def test_get_group(self):
+ """Test fetching a group of tags."""
+ included = [
+ models.Tag.objects.create(name="tag-1", group="real-group"),
+ models.Tag.objects.create(name="tag-2", group="real-group"),
+ models.Tag.objects.create(name="tag-3", group="real-group"),
+ ]
+
+ models.Tag.objects.create(name="not-included-1")
+ models.Tag.objects.create(name="not-included-2", group="other-group")
+
+ result = utils.get_tag("real-group")
+ self.assertListEqual(included, result)
+
+ def test_get_tag_404(self):
+ """Test that an error is raised when we fetch a non-existing tag."""
+ models.Tag.objects.create(name="real-tag")
+ with self.assertRaises(models.Tag.DoesNotExist):
+ utils.get_tag("fake")
+
+ @mock.patch.object(utils, "get_tag_category")
+ def test_category_pages(self, get_mock: mock.Mock):
+ """Test that the category pages function calls the correct method for tags."""
+ tag = models.Tag.objects.create(name="tag")
+ get_mock.return_value = tag
+ result = utils.get_category_pages(settings.CONTENT_PAGES_PATH / "tags")
+ self.assertEqual(tag, result)
+ get_mock.assert_called_once_with(collapse_groups=True)
+
+ def test_get_category_root(self):
+ """Test that all tags are returned and formatted properly for the tag root page."""
+ body = "normal body"
+ base = {"description": markdown.markdown(body), "icon": "fas fa-tag"}
+
+ models.Tag.objects.create(name="tag-1", body=body),
+ models.Tag.objects.create(name="tag-2", body=body),
+ models.Tag.objects.create(name="tag-3", body=body),
+
+ models.Tag.objects.create(name="tag-4", body=body, group="tag-group")
+ models.Tag.objects.create(name="tag-5", body=body, group="tag-group")
+
+ result = utils.get_tag_category(collapse_groups=True)
+
+ self.assertDictEqual({
+ "tag-1": {**base, "title": "tag-1"},
+ "tag-2": {**base, "title": "tag-2"},
+ "tag-3": {**base, "title": "tag-3"},
+ "tag-group": {
+ "title": "tag-group",
+ "description": "Contains the following tags: tag-4, tag-5",
+ "icon": "fas fa-tags"
+ }
+ }, result)
+
+ def test_get_category_group(self):
+ """Test the function for a group root page."""
+ body = "normal body"
+ base = {"description": markdown.markdown(body), "icon": "fas fa-tag"}
+
+ included = [
+ models.Tag.objects.create(name="tag-1", body=body, group="group"),
+ models.Tag.objects.create(name="tag-2", body=body, group="group"),
+ ]
+ models.Tag.objects.create(name="not-included", body=body)
+
+ result = utils.get_tag_category(included, collapse_groups=False)
+ self.assertDictEqual({
+ "tag-1": {**base, "title": "tag-1"},
+ "tag-2": {**base, "title": "tag-2"},
+ }, result)
+
+ def test_tag_url(self):
+ """Test that tag URLs are generated correctly."""
+ cases = [
+ ({"name": "tag"}, f"{models.Tag.URL_BASE}/tag.md"),
+ ({"name": "grouped", "group": "abc"}, f"{models.Tag.URL_BASE}/abc/grouped.md"),
+ ]
+
+ for options, url in cases:
+ tag = models.Tag(**options)
+ with self.subTest(tag=tag):
+ self.assertEqual(url, tag.url)
+
+ @mock.patch("httpx.Client.get")
+ def test_get_tag_commit(self, get_mock: mock.Mock):
+ """Test the get commit function with a normal tag."""
+ tag = models.Tag.objects.create(name="example")
+
+ authors = json.loads(self.commit.authors)
+
+ get_mock.return_value = httpx.Response(
+ request=httpx.Request("GET", "https://google.com"),
+ status_code=200,
+ json=[{
+ "sha": self.commit.sha,
+ "commit": {
+ "message": self.commit.message,
+ "author": authors[0],
+ "committer": authors[1],
+ }
+ }]
+ )
+
+ result = utils.get_tag(tag.name)
+ self.assertEqual(tag, result)
+
+ get_mock.assert_called_once()
+ call_params = get_mock.call_args[1]["params"]
+
+ self.assertEqual({"path": "/bot/resources/tags/example.md"}, call_params)
+ self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit)
+
+ @mock.patch("httpx.Client.get")
+ def test_get_group_tag_commit(self, get_mock: mock.Mock):
+ """Test the get commit function with a group tag."""
+ tag = models.Tag.objects.create(name="example", group="group-name")
+
+ authors = json.loads(self.commit.authors)
+ authors.pop()
+ self.commit.authors = json.dumps(authors)
+ self.commit.save()
+
+ get_mock.return_value = httpx.Response(
+ request=httpx.Request("GET", "https://google.com"),
+ status_code=200,
+ json=[{
+ "sha": self.commit.sha,
+ "commit": {
+ "message": self.commit.message,
+ "author": authors[0],
+ "committer": authors[0],
+ }
+ }]
+ )
+
+ utils.set_tag_commit(tag)
+
+ get_mock.assert_called_once()
+ call_params = get_mock.call_args[1]["params"]
+
+ self.assertEqual({"path": "/bot/resources/tags/group-name/example.md"}, call_params)
+ self.assertEqual(self.commit, models.Tag.objects.get(name=tag.name).last_commit)
+
+ @mock.patch.object(utils, "set_tag_commit")
+ def test_exiting_commit(self, set_commit_mock: mock.Mock):
+ """Test that a commit is saved when the data has not changed."""
+ tag = models.Tag.objects.create(name="tag-name", body="old body", last_commit=self.commit)
+
+ # This is only applied to the object, not to the database
+ tag.last_commit = None
+
+ utils.record_tags([tag])
+ self.assertEqual(self.commit, tag.last_commit)
+
+ result = utils.get_tag("tag-name")
+ self.assertEqual(tag, result)
+ set_commit_mock.assert_not_called()
diff --git a/pydis_site/apps/content/tests/test_views.py b/pydis_site/apps/content/tests/test_views.py
index eadad7e3..3ef9bcc4 100644
--- a/pydis_site/apps/content/tests/test_views.py
+++ b/pydis_site/apps/content/tests/test_views.py
@@ -1,12 +1,18 @@
+import textwrap
from pathlib import Path
from unittest import TestCase
+import django.test
+import markdown
from django.http import Http404
from django.test import RequestFactory, SimpleTestCase, override_settings
+from django.urls import reverse
+from pydis_site.apps.content.models import Commit, Tag
from pydis_site.apps.content.tests.helpers import (
BASE_PATH, MockPagesTestCase, PARSED_CATEGORY_INFO, PARSED_HTML, PARSED_METADATA
)
+from pydis_site.apps.content.tests.test_utils import TEST_COMMIT_KWARGS
from pydis_site.apps.content.views import PageOrCategoryView
@@ -172,7 +178,7 @@ class PageOrCategoryViewTests(MockPagesTestCase, SimpleTestCase, TestCase):
for item in context["breadcrumb_items"]:
item["path"] = Path(item["path"])
- self.assertEquals(
+ self.assertEqual(
context["breadcrumb_items"],
[
{"name": PARSED_CATEGORY_INFO["title"], "path": Path(".")},
@@ -180,3 +186,217 @@ class PageOrCategoryViewTests(MockPagesTestCase, SimpleTestCase, TestCase):
{"name": PARSED_CATEGORY_INFO["title"], "path": Path("category/subcategory")},
]
)
+
+
+class TagViewTests(django.test.TestCase):
+ """Tests for the TagView class."""
+
+ def setUp(self):
+ """Set test helpers, then set up fake filesystem."""
+ super().setUp()
+ self.commit = Commit.objects.create(**TEST_COMMIT_KWARGS)
+
+ def test_routing(self):
+ """Test that the correct template is returned for each route."""
+ Tag.objects.create(name="example", last_commit=self.commit)
+ Tag.objects.create(name="grouped-tag", group="group-name", last_commit=self.commit)
+
+ cases = [
+ ("/pages/tags/example/", "content/tag.html"),
+ ("/pages/tags/group-name/", "content/listing.html"),
+ ("/pages/tags/group-name/grouped-tag/", "content/tag.html"),
+ ]
+
+ for url, template in cases:
+ with self.subTest(url=url):
+ response = self.client.get(url)
+ self.assertEqual(200, response.status_code)
+ self.assertTemplateUsed(response, template)
+
+ def test_valid_tag_returns_200(self):
+ """Test that a page is returned for a valid tag."""
+ Tag.objects.create(name="example", body="This is the tag body.", last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(200, response.status_code)
+ self.assertIn("This is the tag body", response.content.decode("utf-8"))
+ self.assertTemplateUsed(response, "content/tag.html")
+
+ def test_invalid_tag_404(self):
+ """Test that a tag which doesn't exist raises a 404."""
+ response = self.client.get("/pages/tags/non-existent/")
+ self.assertEqual(404, response.status_code)
+
+ def test_context_tag(self):
+ """Test that the context contains the required data for a tag."""
+ body = textwrap.dedent("""
+ ---
+ unused: frontmatter
+ ----
+ Tag content here.
+ """)
+
+ tag = Tag.objects.create(name="example", body=body, last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ expected = {
+ "page_title": "example",
+ "page": markdown.markdown("Tag content here."),
+ "tag": tag,
+ "breadcrumb_items": [
+ {"name": "Pages", "path": "."},
+ {"name": "Tags", "path": "tags"},
+ ]
+ }
+ for key in expected:
+ self.assertEqual(
+ expected[key], response.context.get(key), f"context.{key} did not match"
+ )
+
+ def test_context_grouped_tag(self):
+ """
+ Test the context for a tag in a group.
+
+ The only difference between this and a regular tag are the breadcrumbs,
+ so only those are checked.
+ """
+ Tag.objects.create(
+ name="example", body="Body text", group="group-name", last_commit=self.commit
+ )
+ response = self.client.get("/pages/tags/group-name/example/")
+ self.assertListEqual([
+ {"name": "Pages", "path": "."},
+ {"name": "Tags", "path": "tags"},
+ {"name": "group-name", "path": "tags/group-name"},
+ ], response.context.get("breadcrumb_items"))
+
+ def test_group_page(self):
+ """Test rendering of a group's root page."""
+ Tag.objects.create(name="tag-1", body="Body 1", group="group-name", last_commit=self.commit)
+ Tag.objects.create(name="tag-2", body="Body 2", group="group-name", last_commit=self.commit)
+ Tag.objects.create(name="not-included", last_commit=self.commit)
+
+ response = self.client.get("/pages/tags/group-name/")
+ content = response.content.decode("utf-8")
+
+ self.assertInHTML("<div class='level-left'>group-name</div>", content)
+ self.assertInHTML(
+ f"<a class='level-item fab fa-github' href='{Tag.URL_BASE}/group-name'>",
+ content
+ )
+ self.assertIn(">tag-1</span>", content)
+ self.assertIn(">tag-2</span>", content)
+ self.assertNotIn(
+ ">not-included</span>",
+ content,
+ "Tags not in this group shouldn't be rendered."
+ )
+
+ self.assertInHTML("<p>Body 1</p>", content)
+
+ def test_markdown(self):
+ """Test that markdown content is rendered properly."""
+ body = textwrap.dedent("""
+ ```py
+ Hello world!
+ ```
+
+ **This text is in bold**
+ """)
+
+ Tag.objects.create(name="example", body=body, last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ content = response.content.decode("utf-8")
+
+ self.assertInHTML('<code class="language-py">Hello world!</code>', content)
+ self.assertInHTML("<strong>This text is in bold</strong>", content)
+
+ def test_embed(self):
+ """Test that an embed from the frontmatter is treated correctly."""
+ body = textwrap.dedent("""
+ ---
+ embed:
+ title: Embed title
+ image:
+ url: https://google.com
+ ---
+ Tag body.
+ """)
+
+ Tag.objects.create(name="example", body=body, last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ content = response.content.decode("utf-8")
+
+ self.assertInHTML('<img alt="Embed title" src="https://google.com"/>', content)
+ self.assertInHTML("<p>Tag body.</p>", content)
+
+ def test_embed_title(self):
+ """Test that the page title gets set to the embed title."""
+ body = textwrap.dedent("""
+ ---
+ embed:
+ title: Embed title
+ ---
+ """)
+
+ Tag.objects.create(name="example", body=body, last_commit=self.commit)
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(
+ "Embed title",
+ response.context.get("page_title"),
+ "The page title must match the embed title."
+ )
+
+ def test_hyperlinked_item(self):
+ """Test hyperlinking of tags works as intended."""
+ filler_before, filler_after = "empty filler text\n\n", "more\nfiller"
+ body = filler_before + "`!tags return`" + filler_after
+ Tag.objects.create(name="example", body=body, last_commit=self.commit)
+
+ other_url = reverse("content:tag", kwargs={"location": "return"})
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(
+ markdown.markdown(filler_before + f"[`!tags return`]({other_url})" + filler_after),
+ response.context.get("page")
+ )
+
+ def test_hyperlinked_group(self):
+ """Test hyperlinking with a group works as intended."""
+ Tag.objects.create(
+ name="example", body="!tags group-name grouped-tag", last_commit=self.commit
+ )
+ Tag.objects.create(name="grouped-tag", group="group-name")
+
+ other_url = reverse("content:tag", kwargs={"location": "group-name/grouped-tag"})
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(
+ markdown.markdown(f"[!tags group-name grouped-tag]({other_url})"),
+ response.context.get("page")
+ )
+
+ def test_hyperlinked_extra_text(self):
+ """Test hyperlinking when a tag is followed by extra, unrelated text."""
+ Tag.objects.create(
+ name="example", body="!tags other unrelated text", last_commit=self.commit
+ )
+ Tag.objects.create(name="other")
+
+ other_url = reverse("content:tag", kwargs={"location": "other"})
+ response = self.client.get("/pages/tags/example/")
+ self.assertEqual(
+ markdown.markdown(f"[!tags other]({other_url}) unrelated text"),
+ response.context.get("page")
+ )
+
+ def test_tag_root_page(self):
+ """Test the root tag page which lists all tags."""
+ Tag.objects.create(name="tag-1", last_commit=self.commit)
+ Tag.objects.create(name="tag-2", last_commit=self.commit)
+ Tag.objects.create(name="tag-3", last_commit=self.commit)
+
+ response = self.client.get("/pages/tags/")
+ content = response.content.decode("utf-8")
+
+ self.assertTemplateUsed(response, "content/listing.html")
+ self.assertInHTML('<div class="level-left">Tags</div>', content)
+
+ for tag_number in range(1, 4):
+ self.assertIn(f"tag-{tag_number}</span>", content)
diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py
index f8496095..a7695a27 100644
--- a/pydis_site/apps/content/urls.py
+++ b/pydis_site/apps/content/urls.py
@@ -3,7 +3,7 @@ from pathlib import Path
from django_distill import distill_path
-from . import views
+from . import utils, views
app_name = "content"
@@ -29,15 +29,38 @@ def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[st
return results
-def get_all_pages() -> typing.Iterator[dict[str, str]]:
+DISTILL_RETURN = typing.Iterator[dict[str, str]]
+
+
+def get_all_pages() -> DISTILL_RETURN:
"""Yield a dict of all page categories."""
for location in __get_all_files(Path("pydis_site", "apps", "content", "resources")):
yield {"location": location}
+def get_all_tags() -> DISTILL_RETURN:
+ """Return all tag names and groups in static builds."""
+ # We instantiate the set with None here to make filtering it out later easier
+ # whether it was added in the loop or not
+ groups = {None}
+ for tag in utils.get_tags_static():
+ groups.add(tag.group)
+ yield {"location": (f"{tag.group}/" if tag.group else "") + tag.name}
+
+ groups.remove(None)
+ for group in groups:
+ yield {"location": group}
+
+
urlpatterns = [
distill_path("", views.PageOrCategoryView.as_view(), name='pages'),
distill_path(
+ "tags/<path:location>/",
+ views.TagView.as_view(),
+ name="tag",
+ distill_func=get_all_tags
+ ),
+ distill_path(
"<path:location>/",
views.PageOrCategoryView.as_view(),
name='page_category',
diff --git a/pydis_site/apps/content/utils.py b/pydis_site/apps/content/utils.py
index d3f270ff..c12893ef 100644
--- a/pydis_site/apps/content/utils.py
+++ b/pydis_site/apps/content/utils.py
@@ -1,14 +1,41 @@
+import datetime
+import functools
+import json
+import tarfile
+import tempfile
+from io import BytesIO
from pathlib import Path
-from typing import Dict, Tuple
import frontmatter
+import httpx
import markdown
import yaml
from django.http import Http404
+from django.utils import timezone
from markdown.extensions.toc import TocExtension
+from pydis_site import settings
+from .models import Commit, Tag
-def get_category(path: Path) -> Dict[str, str]:
+TAG_CACHE_TTL = datetime.timedelta(hours=1)
+
+
+def github_client(**kwargs) -> httpx.Client:
+ """Get a client to access the GitHub API with important settings pre-configured."""
+ client = httpx.Client(
+ base_url=settings.GITHUB_API,
+ follow_redirects=True,
+ timeout=settings.TIMEOUT_PERIOD,
+ **kwargs
+ )
+ if settings.GITHUB_TOKEN: # pragma: no cover
+ if not client.headers.get("Authorization"):
+ client.headers = {"Authorization": f"token {settings.GITHUB_TOKEN}"}
+
+ return client
+
+
+def get_category(path: Path) -> dict[str, str]:
"""Load category information by name from _info.yml."""
if not path.is_dir():
raise Http404("Category not found.")
@@ -16,7 +43,7 @@ def get_category(path: Path) -> Dict[str, str]:
return yaml.safe_load(path.joinpath("_info.yml").read_text(encoding="utf-8"))
-def get_categories(path: Path) -> Dict[str, Dict]:
+def get_categories(path: Path) -> dict[str, dict]:
"""Get information for all categories."""
categories = {}
@@ -27,8 +54,253 @@ def get_categories(path: Path) -> Dict[str, Dict]:
return categories
-def get_category_pages(path: Path) -> Dict[str, Dict]:
+def get_tags_static() -> list[Tag]:
+ """
+ Fetch tag information in static builds.
+
+ This also includes some fake tags to preview the tag groups feature.
+ This will return a cached value, so it should only be used for static builds.
+ """
+ tags = fetch_tags()
+ for tag in tags[3:5]: # pragma: no cover
+ tag.group = "very-cool-group"
+ return tags
+
+
+def fetch_tags() -> list[Tag]:
+ """
+ Fetch tag data from the GitHub API.
+
+ The entire repository is downloaded and extracted locally because
+ getting file content would require one request per file, and can get rate-limited.
+ """
+ with github_client() as client:
+ # Grab metadata
+ metadata = client.get("/repos/python-discord/bot/contents/bot/resources")
+ metadata.raise_for_status()
+
+ hashes = {}
+ for entry in metadata.json():
+ if entry["type"] == "dir":
+ # Tag group
+ files = client.get(entry["url"])
+ files.raise_for_status()
+ files = files.json()
+ else:
+ files = [entry]
+
+ for file in files:
+ hashes[file["name"]] = file["sha"]
+
+ # Download the files
+ tar_file = client.get("/repos/python-discord/bot/tarball")
+ tar_file.raise_for_status()
+
+ tags = []
+ with tempfile.TemporaryDirectory() as folder:
+ with tarfile.open(fileobj=BytesIO(tar_file.content)) as repo:
+ included = []
+ for file in repo.getmembers():
+ if "/bot/resources/tags" in file.path:
+ included.append(file)
+ repo.extractall(folder, included)
+
+ for tag_file in Path(folder).rglob("*.md"):
+ name = tag_file.name
+ group = None
+ if tag_file.parent.name != "tags":
+ # Tags in sub-folders are considered part of a group
+ group = tag_file.parent.name
+
+ tags.append(Tag(
+ name=name.removesuffix(".md"),
+ sha=hashes[name],
+ group=group,
+ body=tag_file.read_text(encoding="utf-8"),
+ last_commit=None,
+ ))
+
+ return tags
+
+
+def set_tag_commit(tag: Tag) -> None:
+ """Fetch commit information from the API, and save it for the tag."""
+ if settings.STATIC_BUILD: # pragma: no cover
+ # Static builds request every page during build, which can ratelimit it.
+ # Instead, we return some fake data.
+ tag.last_commit = Commit(
+ sha="68da80efc00d9932a209d5cccd8d344cec0f09ea",
+ message="Initial Commit\n\nTHIS IS FAKE DEMO DATA",
+ date=datetime.datetime(2018, 2, 3, 12, 20, 26, tzinfo=datetime.timezone.utc),
+ authors=json.dumps([{"name": "Joseph", "email": "[email protected]"}]),
+ )
+ return
+
+ path = "/bot/resources/tags"
+ if tag.group:
+ path += f"/{tag.group}"
+ path += f"/{tag.name}.md"
+
+ # Fetch and set the commit
+ with github_client() as client:
+ data = client.get("/repos/python-discord/bot/commits", params={"path": path})
+ data.raise_for_status()
+ data = data.json()[0]
+
+ commit = data["commit"]
+ author, committer = commit["author"], commit["committer"]
+
+ date = datetime.datetime.strptime(committer["date"], settings.GITHUB_TIMESTAMP_FORMAT)
+ date = date.replace(tzinfo=datetime.timezone.utc)
+
+ if author["email"] == committer["email"]:
+ authors = [author]
+ else:
+ authors = [author, committer]
+
+ commit_obj, _ = Commit.objects.get_or_create(
+ sha=data["sha"],
+ message=commit["message"],
+ date=date,
+ authors=json.dumps(authors),
+ )
+ tag.last_commit = commit_obj
+ tag.save()
+
+
+def record_tags(tags: list[Tag]) -> None:
+ """Sync the database with an updated set of tags."""
+ # Remove entries which no longer exist
+ Tag.objects.exclude(name__in=[tag.name for tag in tags]).delete()
+
+ # Insert/update the tags
+ for new_tag in tags:
+ try:
+ old_tag = Tag.objects.get(name=new_tag.name)
+ except Tag.DoesNotExist:
+ # The tag is not in the database yet,
+ # pretend it's previous state is the current state
+ old_tag = new_tag
+
+ if old_tag.sha == new_tag.sha and old_tag.last_commit is not None:
+ # We still have an up-to-date commit entry
+ new_tag.last_commit = old_tag.last_commit
+
+ new_tag.save()
+
+ # Drop old, unused commits
+ Commit.objects.filter(tag__isnull=True).delete()
+
+
+def get_tags() -> list[Tag]:
+ """Return a list of all tags visible to the application, from the cache or API."""
+ if settings.STATIC_BUILD: # pragma: no cover
+ last_update = None
+ else:
+ last_update = (
+ Tag.objects.values_list("last_updated", flat=True)
+ .order_by("last_updated").first()
+ )
+
+ if last_update is None or timezone.now() >= (last_update + TAG_CACHE_TTL):
+ # Stale or empty cache
+ if settings.STATIC_BUILD: # pragma: no cover
+ tags = get_tags_static()
+ else:
+ tags = fetch_tags()
+ record_tags(tags)
+
+ return tags
+ else:
+ # Get tags from database
+ return list(Tag.objects.all())
+
+
+def get_tag(path: str, *, skip_sync: bool = False) -> Tag | list[Tag]:
+ """
+ Return a tag based on the search location.
+
+ If certain tag data is out of sync (for instance a commit date is missing),
+ an extra request will be made to sync the information.
+
+ The tag name and group must match. If only one argument is provided in the path,
+ it's assumed to either be a group name, or a no-group tag name.
+
+ If it's a group name, a list of tags which belong to it is returned.
+ """
+ path = path.split("/")
+ if len(path) == 2:
+ group, name = path
+ else:
+ name = path[0]
+ group = None
+
+ matches = []
+ for tag in get_tags():
+ if tag.name == name and tag.group == group:
+ if tag.last_commit is None and not skip_sync:
+ set_tag_commit(tag)
+ return tag
+ elif tag.group == name and group is None:
+ matches.append(tag)
+
+ if matches:
+ return matches
+
+ raise Tag.DoesNotExist()
+
+
+def get_tag_category(tags: list[Tag] | None = None, *, collapse_groups: bool) -> dict[str, dict]:
+ """
+ Generate context data for `tags`, or all tags if None.
+
+ If `tags` is None, `get_tag` is used to populate the data.
+ If `collapse_groups` is True, tags with parent groups are not included in the list,
+ and instead the parent itself is included as a single entry with it's sub-tags
+ in the description.
+ """
+ if not tags:
+ tags = get_tags()
+
+ data = []
+ groups = {}
+
+ # Create all the metadata for the tags
+ for tag in tags:
+ if tag.group is None or not collapse_groups:
+ content = frontmatter.parse(tag.body)[1]
+ data.append({
+ "title": tag.name,
+ "description": markdown.markdown(content, extensions=["pymdownx.superfences"]),
+ "icon": "fas fa-tag",
+ })
+ else:
+ if tag.group not in groups:
+ groups[tag.group] = {
+ "title": tag.group,
+ "description": [tag.name],
+ "icon": "fas fa-tags",
+ }
+ else:
+ groups[tag.group]["description"].append(tag.name)
+
+ # Flatten group description into a single string
+ for group in groups.values():
+ # If the following string is updated, make sure to update it in the frontend JS as well
+ group["description"] = "Contains the following tags: " + ", ".join(group["description"])
+ data.append(group)
+
+ # Sort the tags, and return them in the proper format
+ return {tag["title"]: tag for tag in sorted(data, key=lambda tag: tag["title"].casefold())}
+
+
+def get_category_pages(path: Path) -> dict[str, dict]:
"""Get all page names and their metadata at a category path."""
+ # Special handling for tags
+ if path == Path(__file__).parent / "resources/tags":
+ return get_tag_category(collapse_groups=True)
+
pages = {}
for item in path.glob("*.md"):
@@ -39,7 +311,7 @@ def get_category_pages(path: Path) -> Dict[str, Dict]:
return pages
-def get_page(path: Path) -> Tuple[str, Dict]:
+def get_page(path: Path) -> tuple[str, dict]:
"""Get one specific page."""
if not path.is_file():
raise Http404("Page not found.")
diff --git a/pydis_site/apps/content/views/__init__.py b/pydis_site/apps/content/views/__init__.py
index 70ea1c7a..a969b1dc 100644
--- a/pydis_site/apps/content/views/__init__.py
+++ b/pydis_site/apps/content/views/__init__.py
@@ -1,3 +1,4 @@
from .page_category import PageOrCategoryView
+from .tags import TagView
-__all__ = ["PageOrCategoryView"]
+__all__ = ["PageOrCategoryView", "TagView"]
diff --git a/pydis_site/apps/content/views/page_category.py b/pydis_site/apps/content/views/page_category.py
index 5af77aff..062c2bc1 100644
--- a/pydis_site/apps/content/views/page_category.py
+++ b/pydis_site/apps/content/views/page_category.py
@@ -1,18 +1,17 @@
-import typing as t
from pathlib import Path
import frontmatter
from django.conf import settings
-from django.http import Http404
+from django.http import Http404, HttpRequest, HttpResponse
from django.views.generic import TemplateView
-from pydis_site.apps.content import utils
+from pydis_site.apps.content import models, utils
class PageOrCategoryView(TemplateView):
"""Handles pages and page categories."""
- def dispatch(self, request: t.Any, *args, **kwargs) -> t.Any:
+ def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""Conform URL path location to the filesystem path."""
self.location = Path(kwargs.get("location", ""))
@@ -25,7 +24,7 @@ class PageOrCategoryView(TemplateView):
return super().dispatch(request, *args, **kwargs)
- def get_template_names(self) -> t.List[str]:
+ def get_template_names(self) -> list[str]:
"""Checks if the view uses the page template or listing template."""
if self.page_path.is_file():
template_name = "content/page.html"
@@ -36,7 +35,7 @@ class PageOrCategoryView(TemplateView):
return [template_name]
- def get_context_data(self, **kwargs) -> t.Dict[str, t.Any]:
+ def get_context_data(self, **kwargs) -> dict[str, any]:
"""Assign proper context variables based on what resource user requests."""
context = super().get_context_data(**kwargs)
@@ -73,7 +72,7 @@ class PageOrCategoryView(TemplateView):
return context
@staticmethod
- def _get_page_context(path: Path) -> t.Dict[str, t.Any]:
+ def _get_page_context(path: Path) -> dict[str, any]:
page, metadata = utils.get_page(path)
return {
"page": page,
@@ -84,7 +83,7 @@ class PageOrCategoryView(TemplateView):
}
@staticmethod
- def _get_category_context(path: Path) -> t.Dict[str, t.Any]:
+ def _get_category_context(path: Path) -> dict[str, any]:
category = utils.get_category(path)
return {
"categories": utils.get_categories(path),
@@ -92,4 +91,7 @@ class PageOrCategoryView(TemplateView):
"page_title": category["title"],
"page_description": category["description"],
"icon": category.get("icon"),
+ "app_name": "content:page_category",
+ "is_tag_listing": "/resources/tags" in path.as_posix(),
+ "tag_url": models.Tag.URL_BASE,
}
diff --git a/pydis_site/apps/content/views/tags.py b/pydis_site/apps/content/views/tags.py
new file mode 100644
index 00000000..4f4bb5a2
--- /dev/null
+++ b/pydis_site/apps/content/views/tags.py
@@ -0,0 +1,124 @@
+import re
+import typing
+
+import frontmatter
+import markdown
+from django.conf import settings
+from django.http import Http404
+from django.urls import reverse
+from django.views.generic import TemplateView
+
+from pydis_site.apps.content import utils
+from pydis_site.apps.content.models import Tag
+
+# The following regex tries to parse a tag command
+# It'll read up to two words seperated by spaces
+# If the command does not include a group, the tag name will be in the `first` group
+# If there's a second word after the command, or if there's a tag group, extra logic
+# is necessary to determine whether it's a tag with a group, or a tag with text after it
+COMMAND_REGEX = re.compile(r"`*!tags? (?P<first>[\w-]+)(?P<second> [\w-]+)?`*")
+
+
+class TagView(TemplateView):
+ """Handles tag pages."""
+
+ tag: typing.Union[Tag, list[Tag]]
+ is_group: bool
+
+ def setup(self, *args, **kwargs) -> None:
+ """Look for a tag, and configure the view."""
+ super().setup(*args, **kwargs)
+
+ try:
+ self.tag = utils.get_tag(kwargs.get("location"))
+ self.is_group = isinstance(self.tag, list)
+ except Tag.DoesNotExist:
+ raise Http404
+
+ def get_template_names(self) -> list[str]:
+ """Either return the tag page template, or the listing."""
+ if self.is_group:
+ template_name = "content/listing.html"
+ else:
+ template_name = "content/tag.html"
+
+ return [template_name]
+
+ def get_context_data(self, **kwargs) -> dict:
+ """Get the relevant context for this tag page or group."""
+ context = super().get_context_data(**kwargs)
+ context["breadcrumb_items"] = [{
+ "name": utils.get_category(settings.CONTENT_PAGES_PATH / location)["title"],
+ "path": location,
+ } for location in (".", "tags")]
+
+ if self.is_group:
+ self._set_group_context(context, self.tag)
+ else:
+ self._set_tag_context(context, self.tag)
+
+ return context
+
+ @staticmethod
+ def _set_tag_context(context: dict[str, any], tag: Tag) -> None:
+ """Update the context with the information for a tag page."""
+ context.update({
+ "page_title": tag.name,
+ "tag": tag,
+ })
+
+ if tag.group:
+ # Add group names to the breadcrumbs
+ context["breadcrumb_items"].append({
+ "name": tag.group,
+ "path": f"tags/{tag.group}",
+ })
+
+ # Clean up tag body
+ body = frontmatter.parse(tag.body)
+ content = body[1]
+
+ # Check for tags which can be hyperlinked
+ def sub(match: re.Match) -> str:
+ first, second = match.groups()
+ location = first
+ text, extra = match.group(), ""
+
+ if second is not None:
+ # Possibly a tag group
+ try:
+ new_location = f"{first}/{second.strip()}"
+ utils.get_tag(new_location, skip_sync=True)
+ location = new_location
+ except Tag.DoesNotExist:
+ # Not a group, remove the second argument from the link
+ extra = text[text.find(second):]
+ text = text[:text.find(second)]
+
+ link = reverse("content:tag", kwargs={"location": location})
+ return f"[{text}]({link}){extra}"
+ content = COMMAND_REGEX.sub(sub, content)
+
+ # Add support for some embed elements
+ if embed := body[0].get("embed"):
+ context["page_title"] = embed["title"]
+ if image := embed.get("image"):
+ content = f"![{embed['title']}]({image['url']})\n\n" + content
+
+ # Insert the content
+ context["page"] = markdown.markdown(content, extensions=["pymdownx.superfences"])
+
+ @staticmethod
+ def _set_group_context(context: dict[str, any], tags: list[Tag]) -> None:
+ """Update the context with the information for a group of tags."""
+ group = tags[0].group
+ context.update({
+ "categories": {},
+ "pages": utils.get_tag_category(tags, collapse_groups=False),
+ "page_title": group,
+ "icon": "fab fa-tags",
+ "is_tag_listing": True,
+ "app_name": "content:tag",
+ "path": f"{group}/",
+ "tag_url": f"{tags[0].URL_BASE}/{group}"
+ })
diff --git a/pydis_site/apps/events/apps.py b/pydis_site/apps/events/apps.py
index a1cf09ef..70762bc2 100644
--- a/pydis_site/apps/events/apps.py
+++ b/pydis_site/apps/events/apps.py
@@ -4,4 +4,4 @@ from django.apps import AppConfig
class EventsConfig(AppConfig):
"""Django AppConfig for events app."""
- name = 'events'
+ name = 'pydis_site.apps.events'
diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py
index d43bd28e..a963f733 100644
--- a/pydis_site/apps/home/tests/test_repodata_helpers.py
+++ b/pydis_site/apps/home/tests/test_repodata_helpers.py
@@ -36,13 +36,13 @@ class TestRepositoryMetadataHelpers(TestCase):
"""Executed before each test method."""
self.home_view = HomeView()
- @mock.patch('requests.get', side_effect=mocked_requests_get)
+ @mock.patch('httpx.get', side_effect=mocked_requests_get)
def test_returns_metadata(self, _: mock.MagicMock):
"""Test if the _get_repo_data helper actually returns what it should."""
metadata = self.home_view._get_repo_data()
self.assertIsInstance(metadata[0], RepositoryMetadata)
- self.assertEquals(len(metadata), len(self.home_view.repos))
+ self.assertEqual(len(metadata), len(self.home_view.repos))
def test_returns_cached_metadata(self):
"""Test if the _get_repo_data helper returns cached data when available."""
@@ -59,7 +59,7 @@ class TestRepositoryMetadataHelpers(TestCase):
self.assertIsInstance(metadata[0], RepositoryMetadata)
self.assertIsInstance(str(metadata[0]), str)
- @mock.patch('requests.get', side_effect=mocked_requests_get)
+ @mock.patch('httpx.get', side_effect=mocked_requests_get)
def test_refresh_stale_metadata(self, _: mock.MagicMock):
"""Test if the _get_repo_data helper will refresh when the data is stale."""
repo_data = RepositoryMetadata(
@@ -75,18 +75,18 @@ class TestRepositoryMetadataHelpers(TestCase):
self.assertIsInstance(metadata[0], RepositoryMetadata)
- @mock.patch('requests.get', side_effect=mocked_requests_get)
+ @mock.patch('httpx.get', side_effect=mocked_requests_get)
def test_returns_api_data(self, _: mock.MagicMock):
"""Tests if the _get_api_data helper returns what it should."""
api_data = self.home_view._get_api_data()
repo = self.home_view.repos[0]
self.assertIsInstance(api_data, dict)
- self.assertEquals(len(api_data), len(self.home_view.repos))
+ self.assertEqual(len(api_data), len(self.home_view.repos))
self.assertIn(repo, api_data.keys())
self.assertIn("stargazers_count", api_data[repo])
- @mock.patch('requests.get', side_effect=mocked_requests_get)
+ @mock.patch('httpx.get', side_effect=mocked_requests_get)
def test_mocked_requests_get(self, mock_get: mock.MagicMock):
"""Tests if our mocked_requests_get is returning what it should."""
success_data = mock_get(HomeView.github_api)
@@ -98,7 +98,7 @@ class TestRepositoryMetadataHelpers(TestCase):
self.assertIsNotNone(success_data.json_data)
self.assertIsNone(fail_data.json_data)
- @mock.patch('requests.get')
+ @mock.patch('httpx.get')
def test_falls_back_to_database_on_error(self, mock_get: mock.MagicMock):
"""Tests that fallback to the database is performed when we get garbage back."""
repo_data = RepositoryMetadata(
@@ -117,7 +117,7 @@ class TestRepositoryMetadataHelpers(TestCase):
[item] = metadata
self.assertEqual(item, repo_data)
- @mock.patch('requests.get')
+ @mock.patch('httpx.get')
def test_falls_back_to_database_on_error_without_entries(self, mock_get: mock.MagicMock):
"""Tests that fallback to the database is performed when we get garbage back."""
mock_get.return_value.json.return_value = ['garbage']
@@ -126,7 +126,7 @@ class TestRepositoryMetadataHelpers(TestCase):
with self.assertLogs():
metadata = self.home_view._get_repo_data()
- self.assertEquals(len(metadata), 0)
+ self.assertEqual(len(metadata), 0)
def test_cleans_up_stale_metadata(self):
"""Tests that we clean up stale metadata when we start the HomeView."""
diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py
index 69e706c5..8a165682 100644
--- a/pydis_site/apps/home/views/home.py
+++ b/pydis_site/apps/home/views/home.py
@@ -1,7 +1,7 @@
import logging
from typing import Dict, List
-import requests
+import httpx
from django.core.handlers.wsgi import WSGIRequest
from django.http import HttpResponse
from django.shortcuts import render
@@ -32,9 +32,7 @@ class HomeView(View):
def __init__(self):
"""Clean up stale RepositoryMetadata."""
- self._static_build = settings.env("STATIC_BUILD")
-
- if not self._static_build:
+ if not settings.STATIC_BUILD:
RepositoryMetadata.objects.exclude(repo_name__in=self.repos).delete()
# If no token is defined (for example in local development), then
@@ -56,12 +54,12 @@ class HomeView(View):
repo_dict = {}
try:
# Fetch the data from the GitHub API
- api_data: List[dict] = requests.get(
+ api_data: List[dict] = httpx.get(
self.github_api,
headers=self.headers,
timeout=settings.TIMEOUT_PERIOD
).json()
- except requests.exceptions.Timeout:
+ except httpx.TimeoutException:
log.error("Request to fetch GitHub repository metadata for timed out!")
return repo_dict
@@ -94,7 +92,7 @@ class HomeView(View):
def _get_repo_data(self) -> List[RepositoryMetadata]:
"""Build a list of RepositoryMetadata objects that we can use to populate the front page."""
# First off, load the timestamp of the least recently updated entry.
- if self._static_build:
+ if settings.STATIC_BUILD:
last_update = None
else:
last_update = (
@@ -121,7 +119,7 @@ class HomeView(View):
for api_data in api_repositories.values()
]
- if settings.env("STATIC_BUILD"):
+ if settings.STATIC_BUILD:
return data
else:
return RepositoryMetadata.objects.bulk_create(data)
diff --git a/pydis_site/apps/redirect/apps.py b/pydis_site/apps/redirect/apps.py
index 9b70d169..0234bc93 100644
--- a/pydis_site/apps/redirect/apps.py
+++ b/pydis_site/apps/redirect/apps.py
@@ -4,4 +4,4 @@ from django.apps import AppConfig
class RedirectConfig(AppConfig):
"""AppConfig instance for Redirect app."""
- name = 'redirect'
+ name = 'pydis_site.apps.redirect'
diff --git a/pydis_site/apps/redirect/urls.py b/pydis_site/apps/redirect/urls.py
index f7ddf45b..067cccc3 100644
--- a/pydis_site/apps/redirect/urls.py
+++ b/pydis_site/apps/redirect/urls.py
@@ -3,6 +3,7 @@ import re
import yaml
from django import conf
+from django.http import HttpResponse
from django.urls import URLPattern, path
from django_distill import distill_path
@@ -31,7 +32,7 @@ class Redirect:
def map_redirect(name: str, data: Redirect) -> list[URLPattern]:
"""Return a pattern using the Redirects app, or a static HTML redirect for static builds."""
- if not settings.env("STATIC_BUILD"):
+ if not settings.STATIC_BUILD:
# Normal dynamic redirect
return [path(
data.original_path,
@@ -53,7 +54,7 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]:
class RedirectFunc:
def __init__(self, new_url: str, _name: str):
- self.result = REDIRECT_TEMPLATE.format(url=new_url)
+ self.result = HttpResponse(REDIRECT_TEMPLATE.format(url=new_url))
self.__qualname__ = _name
def __call__(self, *args, **kwargs):
@@ -95,7 +96,7 @@ def map_redirect(name: str, data: Redirect) -> list[URLPattern]:
return [distill_path(
data.original_path,
- lambda *args: REDIRECT_TEMPLATE.format(url=new_redirect),
+ lambda *args: HttpResponse(REDIRECT_TEMPLATE.format(url=new_redirect)),
name=name,
)]
diff --git a/pydis_site/apps/resources/apps.py b/pydis_site/apps/resources/apps.py
index e0c235bd..93117654 100644
--- a/pydis_site/apps/resources/apps.py
+++ b/pydis_site/apps/resources/apps.py
@@ -4,4 +4,4 @@ from django.apps import AppConfig
class ResourcesConfig(AppConfig):
"""AppConfig instance for Resources app."""
- name = 'resources'
+ name = 'pydis_site.apps.resources'
diff --git a/pydis_site/apps/resources/resources/atom.yaml b/pydis_site/apps/resources/resources/atom.yaml
deleted file mode 100644
index 26e125b1..00000000
--- a/pydis_site/apps/resources/resources/atom.yaml
+++ /dev/null
@@ -1,14 +0,0 @@
-description: A free Electron-based editor, a "hackable text editor for the 21st century", maintained
- by the GitHub team.
-name: Atom
-title_url: https://atom.io/
-tags:
- topics:
- - general
- payment_tiers:
- - free
- difficulty:
- - beginner
- - intermediate
- type:
- - tool
diff --git a/pydis_site/apps/resources/resources/neural_networks_from_scratch_in_python.yaml b/pydis_site/apps/resources/resources/neural_networks_from_scratch_in_python.yaml
index c4ad1e1b..26e88cb9 100644
--- a/pydis_site/apps/resources/resources/neural_networks_from_scratch_in_python.yaml
+++ b/pydis_site/apps/resources/resources/neural_networks_from_scratch_in_python.yaml
@@ -2,7 +2,7 @@ description: '"Neural Networks From Scratch" is a book intended to teach you how
without any libraries, so you can better understand deep learning and how all of the elements work.
This is so you can go out and do new/novel things with deep learning as well as to become more successful with even more basic models.
This book is to accompany the usual free tutorial videos and sample code from youtube.com/sentdex.'
-name: Neural Networks from Scratch in Python
+name: Neural Networks from Scratch
title_url: https://nnfs.io/
urls:
- icon: branding/goodreads
diff --git a/pydis_site/apps/resources/resources/pycharm.yaml b/pydis_site/apps/resources/resources/pycharm.yaml
index 574158bc..e8c787e6 100644
--- a/pydis_site/apps/resources/resources/pycharm.yaml
+++ b/pydis_site/apps/resources/resources/pycharm.yaml
@@ -1,6 +1,7 @@
description: The very best Python IDE, with a wealth of advanced features and convenience
functions.
name: PyCharm
+title_image: https://resources.jetbrains.com/storage/products/pycharm/img/meta/pycharm_logo_300x300.png
title_url: https://www.jetbrains.com/pycharm/
tags:
topics:
diff --git a/pydis_site/apps/resources/resources/python_morsels.yaml b/pydis_site/apps/resources/resources/python_morsels.yaml
index bbc8133b..4cdff36b 100644
--- a/pydis_site/apps/resources/resources/python_morsels.yaml
+++ b/pydis_site/apps/resources/resources/python_morsels.yaml
@@ -17,3 +17,4 @@ tags:
- intermediate
type:
- interactive
+ - video
diff --git a/pydis_site/apps/resources/resources/the_algorithms_github.yaml b/pydis_site/apps/resources/resources/the_algorithms_github.yaml
new file mode 100644
index 00000000..30a0a5da
--- /dev/null
+++ b/pydis_site/apps/resources/resources/the_algorithms_github.yaml
@@ -0,0 +1,17 @@
+description: A git repository of Python implementations of many of the algorithms taught in algorithm
+ and data structure courses, as well as algorithms for neural networks, block chains, and compression. This is
+ a great resource for students wanting to see algorithms implemented in a familiar language.
+name: The Algorithms
+title_url: https://github.com/TheAlgorithms/Python
+tags:
+ topics:
+ - algorithms and data structures
+ - data science
+ - security
+ payment_tiers:
+ - free
+ difficulty:
+ - beginner
+ - intermediate
+ type:
+ - tutorial
diff --git a/pydis_site/apps/resources/resources/vcokltfre_discord_bot_tutorial.yaml b/pydis_site/apps/resources/resources/vcokltfre_discord_bot_tutorial.yaml
index 61a7b6f6..12f2a154 100644
--- a/pydis_site/apps/resources/resources/vcokltfre_discord_bot_tutorial.yaml
+++ b/pydis_site/apps/resources/resources/vcokltfre_discord_bot_tutorial.yaml
@@ -1,4 +1,4 @@
-description: This tutorial, written by Python Discord staff member vcokltfre,
+description: This tutorial, written by vcokltfre,
will walk you through all the aspects of creating your own Discord bot,
starting from creating the bot user itself.
name: vcokltfre's Discord Bot Tutorial
diff --git a/pydis_site/apps/resources/templatetags/get_category_icon.py b/pydis_site/apps/resources/templatetags/get_category_icon.py
index 71f1393f..30bc4eaa 100644
--- a/pydis_site/apps/resources/templatetags/get_category_icon.py
+++ b/pydis_site/apps/resources/templatetags/get_category_icon.py
@@ -21,6 +21,7 @@ _ICONS = {
"Paid": "fa-dollar-sign",
"Podcast": "fa-microphone-alt",
"Project Ideas": "fa-lightbulb-o",
+ "Security": "fa-solid fa-lock",
"Software Design": "fa-paint-brush",
"Subscription": "fa-credit-card",
"Testing": "fa-vial",
diff --git a/pydis_site/apps/staff/apps.py b/pydis_site/apps/staff/apps.py
index 70a15f40..d68a80c3 100644
--- a/pydis_site/apps/staff/apps.py
+++ b/pydis_site/apps/staff/apps.py
@@ -4,4 +4,4 @@ from django.apps import AppConfig
class StaffConfig(AppConfig):
"""Django AppConfig for the staff app."""
- name = 'staff'
+ name = 'pydis_site.apps.staff'
diff --git a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py
index 8e14ced6..5026068e 100644
--- a/pydis_site/apps/staff/templatetags/deletedmessage_filters.py
+++ b/pydis_site/apps/staff/templatetags/deletedmessage_filters.py
@@ -1,4 +1,5 @@
from datetime import datetime
+from typing import Union
from django import template
@@ -6,13 +7,16 @@ register = template.Library()
@register.filter
-def hex_colour(color: int) -> str:
+def hex_colour(colour: Union[str, int]) -> str:
"""
- Converts an integer representation of a colour to the RGB hex value.
+ Converts the given representation of a colour to its RGB hex string.
As we are using a Discord dark theme analogue, black colours are returned as white instead.
"""
- colour = f"#{color:0>6X}"
+ if isinstance(colour, str):
+ colour = colour if colour.startswith("#") else f"#{colour}"
+ else:
+ colour = f"#{colour:0>6X}"
return colour if colour != "#000000" else "#FFFFFF"
diff --git a/pydis_site/apps/staff/tests/test_logs_view.py b/pydis_site/apps/staff/tests/test_logs_view.py
index 45e9ce8f..3e5726cd 100644
--- a/pydis_site/apps/staff/tests/test_logs_view.py
+++ b/pydis_site/apps/staff/tests/test_logs_view.py
@@ -95,12 +95,22 @@ class TestLogsView(TestCase):
"description": "This embed is way too cool to be seen in public channels.",
}
+ cls.embed_three = {
+ "description": "This embed is way too cool to be seen in public channels.",
+ "color": "#e74c3c",
+ }
+
+ cls.embed_four = {
+ "description": "This embed is way too cool to be seen in public channels.",
+ "color": "e74c3c",
+ }
+
cls.deleted_message_two = DeletedMessage.objects.create(
author=cls.author,
id=614444836291870750,
channel_id=1984,
content='Does that mean this thing will halt?',
- embeds=[cls.embed_one, cls.embed_two],
+ embeds=[cls.embed_one, cls.embed_two, cls.embed_three, cls.embed_four],
attachments=['https://http.cat/100', 'https://http.cat/402'],
deletion_context=cls.deletion_context,
)
diff --git a/pydis_site/settings.py b/pydis_site/settings.py
index 17f220f3..e9e0ba67 100644
--- a/pydis_site/settings.py
+++ b/pydis_site/settings.py
@@ -21,7 +21,6 @@ import environ
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
-
env = environ.Env(
DEBUG=(bool, False),
SITE_DSN=(str, ""),
@@ -30,17 +29,31 @@ env = environ.Env(
GIT_SHA=(str, 'development'),
TIMEOUT_PERIOD=(int, 5),
GITHUB_TOKEN=(str, None),
+ GITHUB_APP_ID=(str, None),
+ GITHUB_APP_KEY=(str, None),
)
GIT_SHA = env("GIT_SHA")
+GITHUB_API = "https://api.github.com"
GITHUB_TOKEN = env("GITHUB_TOKEN")
-
-sentry_sdk.init(
- dsn=env('SITE_DSN'),
- integrations=[DjangoIntegration()],
- send_default_pii=True,
- release=f"site@{GIT_SHA}"
-)
+GITHUB_APP_ID = env("GITHUB_APP_ID")
+GITHUB_APP_KEY = env("GITHUB_APP_KEY")
+GITHUB_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
+"""The datetime string format GitHub uses."""
+
+STATIC_BUILD: bool = env("STATIC_BUILD")
+
+if GITHUB_APP_KEY and (key_file := Path(GITHUB_APP_KEY)).is_file():
+ # Allow the OAuth key to be loaded from a file
+ GITHUB_APP_KEY = key_file.read_text(encoding="utf-8")
+
+if not STATIC_BUILD:
+ sentry_sdk.init(
+ dsn=env('SITE_DSN'),
+ integrations=[DjangoIntegration()],
+ send_default_pii=True,
+ release=f"site@{GIT_SHA}"
+ )
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -91,7 +104,7 @@ else:
NON_STATIC_APPS = [
'pydis_site.apps.api',
'pydis_site.apps.staff',
-] if not env("STATIC_BUILD") else []
+] if not STATIC_BUILD else []
INSTALLED_APPS = [
*NON_STATIC_APPS,
@@ -120,25 +133,29 @@ INSTALLED_APPS = [
if not env("BUILDING_DOCKER"):
INSTALLED_APPS.append("django_prometheus")
-NON_STATIC_MIDDLEWARE = [
- 'django_prometheus.middleware.PrometheusBeforeMiddleware',
-] if not env("STATIC_BUILD") else []
-
-# Ensure that Prometheus middlewares are first and last here.
-MIDDLEWARE = [
- *NON_STATIC_MIDDLEWARE,
-
- 'django.middleware.security.SecurityMiddleware',
- 'whitenoise.middleware.WhiteNoiseMiddleware',
- 'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.middleware.common.CommonMiddleware',
- 'django.middleware.csrf.CsrfViewMiddleware',
- 'django.contrib.auth.middleware.AuthenticationMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
- 'django.middleware.clickjacking.XFrameOptionsMiddleware',
-
- 'django_prometheus.middleware.PrometheusAfterMiddleware'
-]
+if STATIC_BUILD:
+ # The only middleware required during static builds
+ MIDDLEWARE = [
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ ]
+else:
+ # Ensure that Prometheus middlewares are first and last here.
+ MIDDLEWARE = [
+ 'django_prometheus.middleware.PrometheusBeforeMiddleware',
+
+ 'django.middleware.security.SecurityMiddleware',
+ 'whitenoise.middleware.WhiteNoiseMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.common.CommonMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+
+ 'django_prometheus.middleware.PrometheusAfterMiddleware'
+ ]
ROOT_URLCONF = 'pydis_site.urls'
@@ -167,7 +184,7 @@ WSGI_APPLICATION = 'pydis_site.wsgi.application'
DATABASES = {
'default': env.db(),
'metricity': env.db('METRICITY_DB_URL'),
-} if not env("STATIC_BUILD") else {}
+} if not STATIC_BUILD else {}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
@@ -192,7 +209,6 @@ AUTH_PASSWORD_VALIDATORS = [
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
-USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
@@ -219,6 +235,9 @@ if DEBUG:
else:
PARENT_HOST = env('PARENT_HOST', default='pythondiscord.com')
+# Django Model Configuration
+DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
+
# Django REST framework
# https://www.django-rest-framework.org
REST_FRAMEWORK = {
diff --git a/pydis_site/static/css/content/color.css b/pydis_site/static/css/content/color.css
new file mode 100644
index 00000000..f4801c28
--- /dev/null
+++ b/pydis_site/static/css/content/color.css
@@ -0,0 +1,7 @@
+.content .fa-github {
+ color: black;
+}
+
+.content .fa-github:hover {
+ color: #7289DA;
+}
diff --git a/pydis_site/static/css/content/tag.css b/pydis_site/static/css/content/tag.css
new file mode 100644
index 00000000..79795f9e
--- /dev/null
+++ b/pydis_site/static/css/content/tag.css
@@ -0,0 +1,13 @@
+.content a * {
+ /* This is the original color, but propagated down the chain */
+ /* which allows for elements inside links, such as codeblocks */
+ color: #7289DA;
+}
+
+.content a *:hover {
+ color: dimgray;
+}
+
+span.update-time {
+ text-decoration: black underline dotted;
+}
diff --git a/pydis_site/static/css/events/base.css b/pydis_site/static/css/events/base.css
index 266bca1d..9e244ed9 100644
--- a/pydis_site/static/css/events/base.css
+++ b/pydis_site/static/css/events/base.css
@@ -10,3 +10,11 @@ pre {
*/
background-color: #282c34;
}
+
+.panel .panel-heading {
+ /*
+ * Remove whitespace between the panel heading and the first item in a panel,
+ * since it makes the first panel item taller than the others.
+ */
+ margin-bottom: 0 !important
+}
diff --git a/pydis_site/static/css/home/index.css b/pydis_site/static/css/home/index.css
index 7ec8af74..e117a35b 100644
--- a/pydis_site/static/css/home/index.css
+++ b/pydis_site/static/css/home/index.css
@@ -49,11 +49,16 @@ h1 {
margin: auto auto;
}
-#wave-hero-right img{
+#wave-hero-right img {
border-radius: 10px;
box-shadow: 0 1px 6px rgba(0,0,0,0.16), 0 1px 6px rgba(0,0,0,0.23);
margin-top: 1em;
text-align: right;
+ transition: all 0.3s cubic-bezier(.25,.8,.25,1);
+}
+
+#wave-hero-right img:hover {
+ box-shadow: 0 14px 28px rgba(0,0,0,0.25), 0 10px 10px rgba(0,0,0,0.22);
}
#wave-hero .wave {
@@ -121,8 +126,7 @@ h1 {
margin: 0 4% 0 4%;
background-color: #3EB2EF;
color: white;
- font-size: 15px;
- line-height: 33px;
+ line-height: 31px;
border:none;
box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24);
transition: all 0.3s cubic-bezier(.25,.8,.25,1);
diff --git a/pydis_site/static/css/staff/logs.css b/pydis_site/static/css/staff/logs.css
index acf4f1f7..56a12380 100644
--- a/pydis_site/static/css/staff/logs.css
+++ b/pydis_site/static/css/staff/logs.css
@@ -25,7 +25,10 @@ main.site-content {
.discord-message:first-child {
border-top: 1px;
+}
+.discord-message-content {
+ overflow-wrap: break-word;
}
.discord-message-header {
diff --git a/pydis_site/static/images/content/contributing/pull_request.png b/pydis_site/static/images/content/contributing/pull_request.png
new file mode 100644
index 00000000..87b7ffbe
--- /dev/null
+++ b/pydis_site/static/images/content/contributing/pull_request.png
Binary files differ
diff --git a/pydis_site/static/images/content/discord_colored_messages/ansi-colors.png b/pydis_site/static/images/content/discord_colored_messages/ansi-colors.png
new file mode 100644
index 00000000..d7176393
--- /dev/null
+++ b/pydis_site/static/images/content/discord_colored_messages/ansi-colors.png
Binary files differ
diff --git a/pydis_site/static/images/content/discord_colored_messages/result.png b/pydis_site/static/images/content/discord_colored_messages/result.png
new file mode 100644
index 00000000..a666804e
--- /dev/null
+++ b/pydis_site/static/images/content/discord_colored_messages/result.png
Binary files differ
diff --git a/pydis_site/static/images/content/fix-ssl-certificate/pem.png b/pydis_site/static/images/content/fix-ssl-certificate/pem.png
new file mode 100644
index 00000000..face520f
--- /dev/null
+++ b/pydis_site/static/images/content/fix-ssl-certificate/pem.png
Binary files differ
diff --git a/pydis_site/static/images/content/regenerating_token.jpg b/pydis_site/static/images/content/regenerating_token.jpg
new file mode 100644
index 00000000..7b2588dc
--- /dev/null
+++ b/pydis_site/static/images/content/regenerating_token.jpg
Binary files differ
diff --git a/pydis_site/static/images/events/Replit.png b/pydis_site/static/images/events/Replit.png
new file mode 100644
index 00000000..a8202641
--- /dev/null
+++ b/pydis_site/static/images/events/Replit.png
Binary files differ
diff --git a/pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/live_now.png b/pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/live_now.png
new file mode 100644
index 00000000..eb30bf7e
--- /dev/null
+++ b/pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/live_now.png
Binary files differ
diff --git a/pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/qualifier_release.png b/pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/qualifier_release.png
new file mode 100644
index 00000000..1e45024b
--- /dev/null
+++ b/pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/qualifier_release.png
Binary files differ
diff --git a/pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/sign_up.png b/pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/sign_up.png
new file mode 100644
index 00000000..f807418e
--- /dev/null
+++ b/pydis_site/static/images/events/summer_code_jam_2022/front_page_banners/sign_up.png
Binary files differ
diff --git a/pydis_site/static/images/events/summer_code_jam_2022/site_banner.png b/pydis_site/static/images/events/summer_code_jam_2022/site_banner.png
new file mode 100644
index 00000000..30b3dfbc
--- /dev/null
+++ b/pydis_site/static/images/events/summer_code_jam_2022/site_banner.png
Binary files differ
diff --git a/pydis_site/static/images/navbar/discord.svg b/pydis_site/static/images/navbar/discord.svg
index 406e3836..2cf3d6cc 100644
--- a/pydis_site/static/images/navbar/discord.svg
+++ b/pydis_site/static/images/navbar/discord.svg
@@ -1,165 +1,244 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
- xmlns:dc="http://purl.org/dc/elements/1.1/"
- xmlns:cc="http://creativecommons.org/ns#"
- xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
- xmlns:svg="http://www.w3.org/2000/svg"
- xmlns="http://www.w3.org/2000/svg"
- xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
- xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="120mm"
height="30mm"
viewBox="0 0 120 30"
version="1.1"
id="svg8"
- inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
- sodipodi:docname="discord.svg">
- <defs
- id="defs2">
- <rect
+ inkscape:version="1.2 (dc2aedaf03, 2022-05-15)"
+ sodipodi:docname="discord.svg"
+ xml:space="preserve"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:dc="http://purl.org/dc/elements/1.1/"><defs
+ id="defs2"><rect
x="75.819944"
y="98.265513"
width="25.123336"
height="7.8844509"
- id="rect953" />
- <rect
+ id="rect953" /><rect
x="75.819946"
y="98.265511"
width="25.123337"
height="7.8844509"
- id="rect953-0" />
- <rect
+ id="rect953-0" /><rect
x="75.819946"
y="98.265511"
width="25.123337"
height="7.8844509"
- id="rect968" />
- </defs>
- <sodipodi:namedview
+ id="rect968" /><clipPath
+ id="clip0"><rect
+ width="71"
+ height="55"
+ fill="white"
+ id="rect716" /></clipPath><clipPath
+ id="clip0-9"><rect
+ width="71"
+ height="55"
+ fill="white"
+ id="rect852" /></clipPath><clipPath
+ id="clip0-6"><rect
+ width="292"
+ height="56.4706"
+ fill="white"
+ transform="translate(0 11.7646)"
+ id="rect159" /></clipPath><clipPath
+ id="clip1"><rect
+ width="292"
+ height="56.4706"
+ fill="white"
+ transform="translate(0 11.7646)"
+ id="rect162" /></clipPath><clipPath
+ id="clip0-65"><rect
+ width="292"
+ height="56.4706"
+ fill="white"
+ transform="translate(0 11.7646)"
+ id="rect338" /></clipPath><clipPath
+ clipPathUnits="userSpaceOnUse"
+ id="clipPath405"><rect
+ width="292"
+ height="56.470596"
+ fill="#ffffff"
+ id="rect407"
+ x="-8.4424901e-06"
+ y="11.764597"
+ style="stroke-width:1" /></clipPath><clipPath
+ clipPathUnits="userSpaceOnUse"
+ id="clipPath409"><rect
+ width="292"
+ height="56.470596"
+ fill="#ffffff"
+ id="rect411"
+ x="-8.4424901e-06"
+ y="11.764597"
+ style="stroke-width:1" /></clipPath><clipPath
+ clipPathUnits="userSpaceOnUse"
+ id="clipPath413"><rect
+ width="292"
+ height="56.470596"
+ fill="#ffffff"
+ id="rect415"
+ x="-8.4424901e-06"
+ y="11.764597"
+ style="stroke-width:1" /></clipPath><clipPath
+ clipPathUnits="userSpaceOnUse"
+ id="clipPath417"><rect
+ width="292"
+ height="56.470596"
+ fill="#ffffff"
+ id="rect419"
+ x="-8.4424901e-06"
+ y="11.764597"
+ style="stroke-width:1" /></clipPath><clipPath
+ clipPathUnits="userSpaceOnUse"
+ id="clipPath421"><rect
+ width="292"
+ height="56.470596"
+ fill="#ffffff"
+ id="rect423"
+ x="-8.4424901e-06"
+ y="11.764597"
+ style="stroke-width:1" /></clipPath><clipPath
+ clipPathUnits="userSpaceOnUse"
+ id="clipPath425"><rect
+ width="292"
+ height="56.470596"
+ fill="#ffffff"
+ id="rect427"
+ x="-8.4424901e-06"
+ y="11.764597"
+ style="stroke-width:1" /></clipPath><clipPath
+ clipPathUnits="userSpaceOnUse"
+ id="clipPath429"><rect
+ width="292"
+ height="56.470596"
+ fill="#ffffff"
+ id="rect431"
+ x="-8.4424901e-06"
+ y="11.764597"
+ style="stroke-width:1" /></clipPath><clipPath
+ clipPathUnits="userSpaceOnUse"
+ id="clipPath433"><rect
+ width="292"
+ height="56.470596"
+ fill="#ffffff"
+ id="rect435"
+ x="-8.4424901e-06"
+ y="11.764597"
+ style="stroke-width:1" /></clipPath><clipPath
+ clipPathUnits="userSpaceOnUse"
+ id="clipPath437"><rect
+ width="292"
+ height="56.470596"
+ fill="#ffffff"
+ id="rect439"
+ x="-8.4424901e-06"
+ y="11.764597"
+ style="stroke-width:1" /></clipPath></defs><sodipodi:namedview
id="base"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
- inkscape:pageopacity="0.0"
+ inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:zoom="2.8"
- inkscape:cx="194.44623"
- inkscape:cy="53.152927"
+ inkscape:cx="226.07143"
+ inkscape:cy="53.035714"
inkscape:document-units="mm"
inkscape:current-layer="layer1"
showgrid="false"
- inkscape:window-width="2560"
- inkscape:window-height="1413"
- inkscape:window-x="4880"
- inkscape:window-y="677"
+ inkscape:window-width="1920"
+ inkscape:window-height="1001"
+ inkscape:window-x="-9"
+ inkscape:window-y="-9"
inkscape:window-maximized="1"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0"
- inkscape:document-rotation="0" />
- <metadata
- id="metadata5">
- <rdf:RDF>
- <cc:Work
- rdf:about="">
- <dc:format>image/svg+xml</dc:format>
- <dc:type
- rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
- <dc:title />
- </cc:Work>
- </rdf:RDF>
- </metadata>
- <g
+ inkscape:document-rotation="0"
+ inkscape:pagecheckerboard="false"
+ inkscape:showpageshadow="2"
+ inkscape:deskcolor="#d1d1d1" /><metadata
+ id="metadata5"><rdf:RDF><cc:Work
+ rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1"
- transform="translate(-52.233408,-75.88169)">
- <rect
+ transform="translate(-52.233408,-75.88169)"><rect
style="fill:#ffffff;fill-opacity:1;stroke-width:0.137677;paint-order:stroke fill markers;stop-color:#000000"
id="rect832"
width="61.511906"
height="30"
x="52.23341"
- y="75.881691" />
- <g
- id="g910"
- transform="matrix(0.90000009,0,0,0.90000009,17.445516,9.7980333)">
- <g
- id="g850"
- transform="matrix(0.06491223,0,0,0.06491223,109.76284,82.07218)">
- <path
- class="st0"
- d="m 142.8,120.1 c -5.7,0 -10.2,4.9 -10.2,11 0,6.1 4.6,11 10.2,11 5.7,0 10.2,-4.9 10.2,-11 0,-6.1 -4.6,-11 -10.2,-11 z m -36.5,0 c -5.7,0 -10.2,4.9 -10.2,11 0,6.1 4.6,11 10.2,11 5.7,0 10.2,-4.9 10.2,-11 0.1,-6.1 -4.5,-11 -10.2,-11 z"
- id="path836" />
- <path
- class="st0"
- d="m 191.4,36.9 h -134 c -11.3,0 -20.5,9.2 -20.5,20.5 v 134 c 0,11.3 9.2,20.5 20.5,20.5 h 113.4 l -5.3,-18.3 12.8,11.8 12.1,11.1 21.6,18.7 V 57.4 C 211.9,46.1 202.7,36.9 191.4,36.9 Z m -38.6,129.5 c 0,0 -3.6,-4.3 -6.6,-8 13.1,-3.7 18.1,-11.8 18.1,-11.8 -4.1,2.7 -8,4.6 -11.5,5.9 -5,2.1 -9.8,3.4 -14.5,4.3 -9.6,1.8 -18.4,1.3 -25.9,-0.1 -5.7,-1.1 -10.6,-2.6 -14.7,-4.3 -2.3,-0.9 -4.8,-2 -7.3,-3.4 -0.3,-0.2 -0.6,-0.3 -0.9,-0.5 -0.2,-0.1 -0.3,-0.2 -0.4,-0.2 -1.8,-1 -2.8,-1.7 -2.8,-1.7 0,0 4.8,7.9 17.5,11.7 -3,3.8 -6.7,8.2 -6.7,8.2 C 75,165.8 66.6,151.4 66.6,151.4 66.6,119.5 81,93.6 81,93.6 95.4,82.9 109,83.2 109,83.2 l 1,1.2 c -18,5.1 -26.2,13 -26.2,13 0,0 2.2,-1.2 5.9,-2.8 10.7,-4.7 19.2,-5.9 22.7,-6.3 0.6,-0.1 1.1,-0.2 1.7,-0.2 6.1,-0.8 13,-1 20.2,-0.2 9.5,1.1 19.7,3.9 30.1,9.5 0,0 -7.9,-7.5 -24.9,-12.6 l 1.4,-1.6 c 0,0 13.7,-0.3 28,10.4 0,0 14.4,25.9 14.4,57.8 0,-0.1 -8.4,14.3 -30.5,15 z m 151,-86.7 H 270.6 V 117 l 22.1,19.9 v -36.2 h 11.8 c 7.5,0 11.2,3.6 11.2,9.4 v 27.7 c 0,5.8 -3.5,9.7 -11.2,9.7 h -34 v 21.1 h 33.2 c 17.8,0.1 34.5,-8.8 34.5,-29.2 V 109.6 C 338.3,88.8 321.6,79.7 303.8,79.7 Z m 174,59.7 v -30.6 c 0,-11 19.8,-13.5 25.8,-2.5 l 18.3,-7.4 c -7.2,-15.8 -20.3,-20.4 -31.2,-20.4 -17.8,0 -35.4,10.3 -35.4,30.3 v 30.6 c 0,20.2 17.6,30.3 35,30.3 11.2,0 24.6,-5.5 32,-19.9 l -19.6,-9 c -4.8,12.3 -24.9,9.3 -24.9,-1.4 z M 417.3,113 c -6.9,-1.5 -11.5,-4 -11.8,-8.3 0.4,-10.3 16.3,-10.7 25.6,-0.8 l 14.7,-11.3 c -9.2,-11.2 -19.6,-14.2 -30.3,-14.2 -16.3,0 -32.1,9.2 -32.1,26.6 0,16.9 13,26 27.3,28.2 7.3,1 15.4,3.9 15.2,8.9 -0.6,9.5 -20.2,9 -29.1,-1.8 l -14.2,13.3 c 8.3,10.7 19.6,16.1 30.2,16.1 16.3,0 34.4,-9.4 35.1,-26.6 1,-21.7 -14.8,-27.2 -30.6,-30.1 z m -67,55.5 h 22.4 V 79.7 H 350.3 Z M 728,79.7 H 694.8 V 117 l 22.1,19.9 v -36.2 h 11.8 c 7.5,0 11.2,3.6 11.2,9.4 v 27.7 c 0,5.8 -3.5,9.7 -11.2,9.7 h -34 v 21.1 H 728 c 17.8,0.1 34.5,-8.8 34.5,-29.2 V 109.6 C 762.5,88.8 745.8,79.7 728,79.7 Z M 565.1,78.5 c -18.4,0 -36.7,10 -36.7,30.5 v 30.3 c 0,20.3 18.4,30.5 36.9,30.5 18.4,0 36.7,-10.2 36.7,-30.5 V 109 C 602,88.6 583.5,78.5 565.1,78.5 Z m 14.4,60.8 c 0,6.4 -7.2,9.7 -14.3,9.7 -7.2,0 -14.4,-3.1 -14.4,-9.7 V 109 c 0,-6.5 7,-10 14,-10 7.3,0 14.7,3.1 14.7,10 z M 682.4,109 c -0.5,-20.8 -14.7,-29.2 -33,-29.2 h -35.5 v 88.8 h 22.7 v -28.2 h 4 l 20.6,28.2 h 28 L 665,138.1 c 10.7,-3.4 17.4,-12.7 17.4,-29.1 z m -32.6,12 h -13.2 v -20.3 h 13.2 c 14.1,0 14.1,20.3 0,20.3 z"
- id="path838" />
- </g>
- <path
- id="path4789-6"
- class=""
- d="m 167.72059,90.383029 -3.19204,3.19205 c -0.15408,0.15408 -0.40352,0.15408 -0.55746,0 l -0.37229,-0.37231 c -0.15368,-0.15369 -0.15408,-0.40277 -4.9e-4,-0.55681 l 2.52975,-2.54167 -2.52975,-2.54164 c -0.15329,-0.15408 -0.15309,-0.40312 4.9e-4,-0.55681 l 0.37229,-0.37228 c 0.15408,-0.15408 0.40353,-0.15408 0.55746,0 l 3.19204,3.19201 c 0.15408,0.15407 0.15408,0.40354 0,0.55746 z"
- inkscape:connector-curvature="0"
- style="fill:#ffffff;fill-opacity:1;stroke-width:0.0164247" />
- </g>
- <g
- id="g904"
- transform="matrix(0.90000009,0,0,0.90000009,10.464254,9.7980333)">
- <g
- id="g850-3"
- transform="matrix(0.06491223,0,0,0.06491223,52.083661,82.07218)">
- <path
- class="st0"
- d="m 142.8,120.1 c -5.7,0 -10.2,4.9 -10.2,11 0,6.1 4.6,11 10.2,11 5.7,0 10.2,-4.9 10.2,-11 0,-6.1 -4.6,-11 -10.2,-11 z m -36.5,0 c -5.7,0 -10.2,4.9 -10.2,11 0,6.1 4.6,11 10.2,11 5.7,0 10.2,-4.9 10.2,-11 0.1,-6.1 -4.5,-11 -10.2,-11 z"
- id="path836-5"
- style="fill:#7289da;fill-opacity:1" />
- <path
- class="st0"
- d="m 191.4,36.9 h -134 c -11.3,0 -20.5,9.2 -20.5,20.5 v 134 c 0,11.3 9.2,20.5 20.5,20.5 h 113.4 l -5.3,-18.3 12.8,11.8 12.1,11.1 21.6,18.7 V 57.4 C 211.9,46.1 202.7,36.9 191.4,36.9 Z m -38.6,129.5 c 0,0 -3.6,-4.3 -6.6,-8 13.1,-3.7 18.1,-11.8 18.1,-11.8 -4.1,2.7 -8,4.6 -11.5,5.9 -5,2.1 -9.8,3.4 -14.5,4.3 -9.6,1.8 -18.4,1.3 -25.9,-0.1 -5.7,-1.1 -10.6,-2.6 -14.7,-4.3 -2.3,-0.9 -4.8,-2 -7.3,-3.4 -0.3,-0.2 -0.6,-0.3 -0.9,-0.5 -0.2,-0.1 -0.3,-0.2 -0.4,-0.2 -1.8,-1 -2.8,-1.7 -2.8,-1.7 0,0 4.8,7.9 17.5,11.7 -3,3.8 -6.7,8.2 -6.7,8.2 C 75,165.8 66.6,151.4 66.6,151.4 66.6,119.5 81,93.6 81,93.6 95.4,82.9 109,83.2 109,83.2 l 1,1.2 c -18,5.1 -26.2,13 -26.2,13 0,0 2.2,-1.2 5.9,-2.8 10.7,-4.7 19.2,-5.9 22.7,-6.3 0.6,-0.1 1.1,-0.2 1.7,-0.2 6.1,-0.8 13,-1 20.2,-0.2 9.5,1.1 19.7,3.9 30.1,9.5 0,0 -7.9,-7.5 -24.9,-12.6 l 1.4,-1.6 c 0,0 13.7,-0.3 28,10.4 0,0 14.4,25.9 14.4,57.8 0,-0.1 -8.4,14.3 -30.5,15 z"
- id="path838-6"
- style="fill:#7289da;fill-opacity:1"
- sodipodi:nodetypes="sssssccccccscccccccccccccccccccccccccccc" />
- </g>
- <path
- id="path4789-6-2"
- class=""
- d="m 107.16039,90.382629 -3.19204,3.19205 c -0.15408,0.15408 -0.40352,0.15408 -0.55746,0 l -0.37229,-0.37231 c -0.15368,-0.15369 -0.15408,-0.40277 -5.3e-4,-0.55681 l 2.52975,-2.54167 -2.52975,-2.54164 c -0.15329,-0.15408 -0.15309,-0.40312 5.3e-4,-0.55681 l 0.37229,-0.37228 c 0.15408,-0.15408 0.40353,-0.15408 0.55746,0 l 3.19204,3.19201 c 0.15408,0.15407 0.15408,0.40354 0,0.55746 z"
- inkscape:connector-curvature="0"
- style="fill:#7289da;fill-opacity:1;stroke-width:0.0164247" />
- <g
- aria-label="JOIN US"
- transform="matrix(1.2501707,0,0,1.2501707,-25.160061,-36.966352)"
- id="text951"
- style="font-style:normal;font-weight:normal;font-size:6.35px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;white-space:pre;shape-inside:url(#rect953-0);fill:#7289da;fill-opacity:1;stroke:none">
- <path
- d="m 75.839362,102.56309 c 0.127,0.9525 0.89535,1.3843 1.67005,1.3843 0.85725,0 1.7145,-0.55245 1.7145,-1.53035 v -3.028953 h -2.1463 v 1.028703 h 1.02235 v 2.00025 c 0,0.26035 -0.2667,0.4318 -0.5461,0.4318 -0.2794,0 -0.57785,-0.14605 -0.64135,-0.508 z"
- style="font-style:normal;font-variant:normal;font-weight:900;font-stretch:normal;font-family:'Uni Sans';-inkscape-font-specification:'Uni Sans Heavy';fill:#7289da;fill-opacity:1"
- id="path850" />
- <path
- d="m 79.795412,102.40434 c 0,1.0287 0.93345,1.54305 1.8669,1.54305 0.93345,0 1.86055,-0.51435 1.86055,-1.54305 v -1.5367 c 0,-1.028703 -0.93345,-1.543053 -1.8669,-1.543053 -0.93345,0 -1.86055,0.508 -1.86055,1.543053 z m 1.13665,-1.5367 c 0,-0.3302 0.3556,-0.508 0.7112,-0.508 0.3683,0 0.74295,0.15875 0.74295,0.508 v 1.5367 c 0,0.32385 -0.36195,0.48895 -0.7239,0.48895 -0.36195,0 -0.73025,-0.15875 -0.73025,-0.48895 z"
- style="font-style:normal;font-variant:normal;font-weight:900;font-stretch:normal;font-family:'Uni Sans';-inkscape-font-specification:'Uni Sans Heavy';fill:#7289da;fill-opacity:1"
- id="path852" />
- <path
- d="m 85.262755,99.388087 h -1.13665 v 4.495803 h 1.13665 z"
- style="font-style:normal;font-variant:normal;font-weight:900;font-stretch:normal;font-family:'Uni Sans';-inkscape-font-specification:'Uni Sans Heavy';fill:#7289da;fill-opacity:1"
- id="path854" />
- <path
- d="m 85.973945,103.88389 h 1.13665 v -1.79705 l -0.14605,-0.86995 0.03175,-0.006 0.3937,0.9017 1.016,1.77165 h 1.14935 v -4.495803 h -1.1303 v 2.038353 c 0.0063,0 0.12065,0.7747 0.127,0.7747 l -0.03175,0.006 -0.381,-0.9017 -1.08585,-1.917703 h -1.0795 z"
- style="font-style:normal;font-variant:normal;font-weight:900;font-stretch:normal;font-family:'Uni Sans';-inkscape-font-specification:'Uni Sans Heavy';fill:#7289da;fill-opacity:1"
- id="path856" />
- <path
- d="m 92.546182,99.388087 h -1.14935 v 2.990853 c -0.0063,2.1082 3.5814,2.1082 3.58775,0 v -2.990853 h -1.14935 v 2.990853 c -0.0064,0.7239 -1.28905,0.7239 -1.28905,0 z"
- style="font-style:normal;font-variant:normal;font-weight:900;font-stretch:normal;font-family:'Uni Sans';-inkscape-font-specification:'Uni Sans Heavy';fill:#7289da;fill-opacity:1"
- id="path858" />
- <path
- d="m 95.44178,103.13459 c 0.4191,0.53975 0.9906,0.8128 1.53035,0.8128 0.8255,0 1.7399,-0.47625 1.778,-1.3462 0.0508,-1.1049 -0.7493,-1.3843 -1.5494,-1.53035 -0.34925,-0.0762 -0.5842,-0.2032 -0.5969,-0.4191 0.01905,-0.5207 0.8255,-0.53975 1.2954,-0.0381 l 0.74295,-0.5715 c -0.46355,-0.565153 -0.9906,-0.717553 -1.5367,-0.717553 -0.8255,0 -1.6256,0.46355 -1.6256,1.346203 0,0.85725 0.6604,1.31445 1.3843,1.42875 0.3683,0.0508 0.78105,0.19685 0.76835,0.45085 -0.03175,0.4826 -1.02235,0.4572 -1.4732,-0.0889 z"
- style="font-style:normal;font-variant:normal;font-weight:900;font-stretch:normal;font-family:'Uni Sans';-inkscape-font-specification:'Uni Sans Heavy';fill:#7289da;fill-opacity:1"
- id="path860" />
- </g>
- </g>
- </g>
- <style
- id="style834">.st0{fill:#FFFFFF;}</style>
-</svg>
+ y="75.881691" /><path
+ id="path4789-6"
+ class=""
+ d="m 168.39406,91.142768 -2.87283,2.872845 c -0.13868,0.138672 -0.36317,0.138672 -0.50172,0 l -0.33506,-0.335079 c -0.13831,-0.138321 -0.13867,-0.362493 -4.4e-4,-0.501129 l 2.27677,-2.287503 -2.27677,-2.287477 c -0.13796,-0.138672 -0.13778,-0.362808 4.4e-4,-0.501129 l 0.33506,-0.335052 c 0.13867,-0.138672 0.36318,-0.138672 0.50172,0 l 2.87283,2.872809 c 0.13867,0.138663 0.13867,0.363187 0,0.501715 z"
+ inkscape:connector-curvature="0"
+ style="fill:#ffffff;fill-opacity:1;stroke-width:0.0147822" /><path
+ id="path4789-6-2"
+ class=""
+ d="m 106.90861,91.142408 -2.87283,2.872845 c -0.13867,0.138672 -0.36317,0.138672 -0.50172,0 L 103.199,93.680174 c -0.13831,-0.138321 -0.13867,-0.362493 -4.7e-4,-0.501129 l 2.27677,-2.287503 -2.27677,-2.287477 c -0.13796,-0.138672 -0.13778,-0.362808 4.7e-4,-0.501129 l 0.33506,-0.335052 c 0.13868,-0.138672 0.36318,-0.138672 0.50172,0 l 2.87283,2.872809 c 0.13868,0.138663 0.13868,0.363187 0,0.501715 z"
+ inkscape:connector-curvature="0"
+ style="fill:#7289da;fill-opacity:1;stroke-width:0.0147822" /><g
+ style="fill:none"
+ id="g196"
+ transform="matrix(0.14732984,0,0,0.14732984,118.63341,84.998511)"><g
+ clip-path="url(#clip1)"
+ id="g155"><path
+ d="m 61.7958,16.494 c -4.7222,-2.2094 -9.7714,-3.8151 -15.0502,-4.7294 -0.6483,1.1721 -1.4057,2.7486 -1.9279,4.0027 -5.6115,-0.8439 -11.1714,-0.8439 -16.6797,0 -0.5221,-1.2541 -1.2967,-2.8306 -1.9508,-4.0027 -5.2845,0.9143 -10.3395,2.5259 -15.0617,4.7411 C 1.60078,30.8988 -0.981215,44.9344 0.309785,58.7707 6.62708,63.4883 12.7493,66.3541 18.7682,68.2294 c 1.4861,-2.0453 2.8115,-4.2195 3.9533,-6.5109 -2.1746,-0.8263 -4.2574,-1.846 -6.2254,-3.0298 0.5221,-0.3868 1.0328,-0.7912 1.5262,-1.2073 12.0034,5.6143 25.0454,5.6143 36.9054,0 0.4992,0.4161 1.0098,0.8205 1.5262,1.2073 -1.9738,1.1896 -4.0623,2.2093 -6.2369,3.0357 1.1418,2.2855 2.4615,4.4656 3.9533,6.5108 6.0247,-1.8753 12.1526,-4.741 18.4699,-9.4645 C 74.155,42.7309 70.0525,28.8242 61.7958,16.494 Z m -37.439,33.7675 c -3.6033,0 -6.5583,-3.3639 -6.5583,-7.4603 0,-4.0964 2.8919,-7.4661 6.5583,-7.4661 3.6665,0 6.6214,3.3638 6.5583,7.4661 0.0057,4.0964 -2.8918,7.4603 -6.5583,7.4603 z m 24.2364,0 c -3.6033,0 -6.5583,-3.3639 -6.5583,-7.4603 0,-4.0964 2.8918,-7.4661 6.5583,-7.4661 3.6664,0 6.6214,3.3638 6.5583,7.4661 0,4.0964 -2.8919,7.4603 -6.5583,7.4603 z"
+ fill="#ffffff"
+ id="path137" /><path
+ d="m 98.0293,26.1707 h 15.6637 c 3.776,0 6.966,0.6036 9.583,1.805 2.61,1.2013 4.567,2.8774 5.864,5.0223 1.296,2.1449 1.95,4.6004 1.95,7.3665 0,2.7075 -0.677,5.163 -2.031,7.3606 -1.354,2.2035 -3.414,3.9441 -6.185,5.2275 -2.771,1.2834 -6.203,1.928 -10.305,1.928 H 98.0293 Z m 14.3787,21.4138 c 2.542,0 4.499,-0.6505 5.864,-1.9457 1.366,-1.301 2.049,-3.0708 2.049,-5.3153 0,-2.0805 -0.609,-3.739 -1.825,-4.9814 -1.216,-1.2424 -3.058,-1.8694 -5.52,-1.8694 h -4.9 v 14.1118 z"
+ fill="#ffffff"
+ id="path139" /><path
+ d="m 154.541,54.8456 c -2.169,-0.5743 -4.126,-1.4065 -5.864,-2.5024 v -6.8097 c 1.314,1.0372 3.075,1.8929 5.284,2.5668 2.209,0.6681 4.344,1.0021 6.409,1.0021 0.964,0 1.693,-0.1289 2.186,-0.3868 0.494,-0.2578 0.741,-0.5684 0.741,-0.9259 0,-0.4102 -0.132,-0.7501 -0.402,-1.0256 -0.27,-0.2754 -0.792,-0.504 -1.566,-0.6974 l -4.82,-1.1076 c -2.76,-0.6563 -4.717,-1.5647 -5.881,-2.7309 -1.165,-1.1604 -1.745,-2.6841 -1.745,-4.5711 0,-1.5882 0.505,-2.9653 1.527,-4.1433 1.015,-1.1779 2.461,-2.0863 4.337,-2.7251 1.877,-0.6446 4.068,-0.9669 6.587,-0.9669 2.249,0 4.309,0.2461 6.186,0.7384 1.876,0.4923 3.425,1.1193 4.659,1.887 v 6.4406 c -1.263,-0.7677 -2.709,-1.3713 -4.361,-1.8285 -1.647,-0.4512 -3.339,-0.6739 -5.084,-0.6739 -2.519,0 -3.775,0.4395 -3.775,1.3127 0,0.4103 0.195,0.715 0.585,0.9201 0.39,0.2051 1.107,0.4161 2.146,0.6388 l 4.016,0.7384 c 2.623,0.463 4.579,1.2776 5.864,2.4379 1.286,1.1604 1.928,2.8775 1.928,5.1513 0,2.4906 -1.061,4.4656 -3.19,5.9307 -2.129,1.4651 -5.147,2.1976 -9.06,2.1976 -2.301,-0.0058 -4.538,-0.293 -6.707,-0.8673 z"
+ fill="#ffffff"
+ id="path141" /><path
+ d="m 182.978,53.9839 c -2.3,-1.1487 -4.039,-2.7075 -5.198,-4.6766 -1.159,-1.9691 -1.744,-4.1843 -1.744,-6.6457 0,-2.4613 0.602,-4.6648 1.807,-6.6046 1.205,-1.9398 2.972,-3.4635 5.302,-4.5711 2.329,-1.1076 5.112,-1.6585 8.354,-1.6585 4.016,0 7.35,0.8615 10.001,2.5844 v 7.5072 c -0.935,-0.6564 -2.026,-1.1897 -3.271,-1.5999 -1.245,-0.4102 -2.576,-0.6154 -3.999,-0.6154 -2.49,0 -4.435,0.463 -5.841,1.3948 -1.406,0.9318 -2.111,2.1449 -2.111,3.651 0,1.4768 0.682,2.6841 2.048,3.6335 1.366,0.9435 3.345,1.4182 5.944,1.4182 1.337,0 2.657,-0.1993 3.959,-0.5919 1.297,-0.3985 2.416,-0.8849 3.351,-1.4593 v 7.261 c -2.943,1.805 -6.357,2.7075 -10.242,2.7075 -3.27,-0.0117 -6.059,-0.586 -8.36,-1.7346 z"
+ fill="#ffffff"
+ id="path143" /><path
+ d="m 211.518,53.9841 c -2.318,-1.1486 -4.085,-2.7192 -5.302,-4.7176 -1.216,-1.9984 -1.83,-4.2253 -1.83,-6.6867 0,-2.4613 0.608,-4.659 1.83,-6.587 1.222,-1.9281 2.978,-3.4401 5.285,-4.536 2.3,-1.0959 5.049,-1.6409 8.233,-1.6409 3.185,0 5.933,0.545 8.234,1.6409 2.301,1.0959 4.057,2.5962 5.262,4.5125 1.205,1.9164 1.807,4.114 1.807,6.6047 0,2.4613 -0.602,4.6883 -1.807,6.6866 -1.205,1.9984 -2.967,3.569 -5.285,4.7176 -2.318,1.1487 -5.055,1.723 -8.216,1.723 -3.162,0 -5.899,-0.5685 -8.211,-1.7171 z m 12.204,-7.2786 c 0.976,-0.9962 1.469,-2.3148 1.469,-3.9557 0,-1.6409 -0.488,-2.9478 -1.469,-3.9148 -0.975,-0.9728 -2.307,-1.4592 -3.993,-1.4592 -1.716,0 -3.059,0.4864 -4.04,1.4592 -0.975,0.9729 -1.463,2.2739 -1.463,3.9148 0,1.6409 0.488,2.9595 1.463,3.9557 0.976,0.9963 2.324,1.5003 4.04,1.5003 1.686,-0.0059 3.018,-0.504 3.993,-1.5003 z"
+ fill="#ffffff"
+ id="path145" /><path
+ d="m 259.17,31.3395 v 8.8609 c -1.021,-0.6857 -2.341,-1.0256 -3.976,-1.0256 -2.141,0 -3.793,0.6623 -4.941,1.9867 -1.153,1.3245 -1.727,3.3873 -1.727,6.1768 v 7.5482 h -9.84 V 30.8883 h 9.64 v 7.6302 c 0.533,-2.7896 1.4,-4.8465 2.593,-6.1769 1.188,-1.3244 2.725,-1.9866 4.596,-1.9866 1.417,0 2.634,0.3282 3.655,0.9845 z"
+ fill="#ffffff"
+ id="path147" /><path
+ d="m 291.864,25.3503 v 29.5363 h -9.841 v -5.3739 c -0.832,2.0218 -2.094,3.5631 -3.792,4.6179 -1.699,1.0491 -3.799,1.5765 -6.289,1.5765 -2.226,0 -4.165,-0.5509 -5.824,-1.6585 -1.658,-1.1076 -2.937,-2.6254 -3.838,-4.5535 -0.895,-1.9281 -1.349,-4.1081 -1.349,-6.546 -0.028,-2.5141 0.448,-4.7704 1.429,-6.7688 0.976,-1.9984 2.358,-3.5572 4.137,-4.6766 1.779,-1.1193 3.81,-1.6819 6.088,-1.6819 4.688,0 7.832,2.0804 9.438,6.2354 V 25.3503 Z m -11.309,21.1912 c 1.004,-0.9963 1.503,-2.2914 1.503,-3.8737 0,-1.5296 -0.488,-2.7779 -1.463,-3.7331 -0.976,-0.9552 -2.313,-1.4358 -3.994,-1.4358 -1.658,0 -2.983,0.4864 -3.976,1.4592 -0.993,0.9729 -1.486,2.2328 -1.486,3.7917 0,1.5589 0.493,2.8306 1.486,3.8151 0.993,0.9845 2.301,1.4768 3.936,1.4768 1.658,-0.0058 2.989,-0.504 3.994,-1.5002 z"
+ fill="#ffffff"
+ id="path149" /><path
+ d="m 139.382,33.4432 c 2.709,0 4.906,-2.0151 4.906,-4.5008 0,-2.4857 -2.197,-4.5007 -4.906,-4.5007 -2.71,0 -4.906,2.015 -4.906,4.5007 0,2.4857 2.196,4.5008 4.906,4.5008 z"
+ fill="#ffffff"
+ id="path151" /><path
+ d="m 134.472,36.5435 c 3.006,1.3244 6.736,1.383 9.811,0 v 18.4719 h -9.811 z"
+ fill="#ffffff"
+ id="path153" /></g></g><path
+ d="m 61.7958,16.494 c -4.7222,-2.2094 -9.7714,-3.8151 -15.0502,-4.7294 -0.6483,1.1721 -1.4057,2.7486 -1.9279,4.0027 -5.6115,-0.8439 -11.1714,-0.8439 -16.6797,0 -0.5221,-1.2541 -1.2967,-2.8306 -1.9508,-4.0027 -5.2845,0.9143 -10.3395,2.5259 -15.0617,4.7411 C 1.60078,30.8988 -0.981215,44.9344 0.309785,58.7707 6.62708,63.4883 12.7493,66.3541 18.7682,68.2294 c 1.4861,-2.0453 2.8115,-4.2195 3.9533,-6.5109 -2.1746,-0.8263 -4.2574,-1.846 -6.2254,-3.0298 0.5221,-0.3868 1.0328,-0.7912 1.5262,-1.2073 12.0034,5.6143 25.0454,5.6143 36.9054,0 0.4992,0.4161 1.0098,0.8205 1.5262,1.2073 -1.9738,1.1896 -4.0623,2.2093 -6.2369,3.0357 1.1418,2.2855 2.4615,4.4656 3.9533,6.5108 6.0247,-1.8753 12.1526,-4.741 18.4699,-9.4645 C 74.155,42.7309 70.0525,28.8242 61.7958,16.494 Z m -37.439,33.7675 c -3.6033,0 -6.5583,-3.3639 -6.5583,-7.4603 0,-4.0964 2.8919,-7.4661 6.5583,-7.4661 3.6665,0 6.6214,3.3638 6.5583,7.4661 0.0057,4.0964 -2.8918,7.4603 -6.5583,7.4603 z m 24.2364,0 c -3.6033,0 -6.5583,-3.3639 -6.5583,-7.4603 0,-4.0964 2.8918,-7.4661 6.5583,-7.4661 3.6664,0 6.6214,3.3638 6.5583,7.4661 0,4.0964 -2.8919,7.4603 -6.5583,7.4603 z"
+ fill="#7289da"
+ id="path316"
+ transform="matrix(0.14732889,0,0,0.1473333,59.747728,84.998373)"
+ clip-path="url(#clipPath437)"
+ style="fill:#7289da;fill-opacity:1" /><g
+ aria-label="Join us"
+ id="text232"
+ style="font-weight:bold;font-stretch:ultra-expanded;font-size:5.98733px;font-family:'ABC Ginto Nord Bold';-inkscape-font-specification:'ABC Ginto Nord Bold, Bold Ultra-Expanded';fill:#7289da;stroke-width:0.264147"><path
+ d="m 75.055954,93.217428 c 1.059757,0 1.802186,-0.484974 1.802186,-1.616579 v -2.71226 h -1.496832 v 2.37697 c 0,0.484973 -0.305354,0.736441 -0.772366,0.736441 -0.347265,0 -0.562809,-0.131721 -0.69453,-0.24548 v 1.125618 c 0.185607,0.167645 0.60472,0.33529 1.161542,0.33529 z"
+ id="path892" /><path
+ d="m 79.624282,93.199466 c 1.454921,0 2.281172,-0.844213 2.281172,-1.915945 0,-1.07772 -0.826251,-1.86206 -2.281172,-1.86206 -1.454922,0 -2.28716,0.790327 -2.28716,1.86206 0,1.065744 0.832238,1.915945 2.28716,1.915945 z m 0,-1.095681 c -0.514911,0 -0.820265,-0.323316 -0.820265,-0.796315 0,-0.472999 0.305354,-0.78434 0.820265,-0.78434 0.508923,0 0.814277,0.311341 0.814277,0.78434 0,0.472999 -0.305354,0.796315 -0.814277,0.796315 z"
+ id="path894" /><path
+ d="m 83.144829,89.595094 c 0.461024,0 0.78434,-0.245481 0.78434,-0.598733 0,-0.341278 -0.323316,-0.586759 -0.78434,-0.586759 -0.467012,0 -0.802303,0.245481 -0.802303,0.586759 0,0.353252 0.335291,0.598733 0.802303,0.598733 z m 0.724466,3.484626 V 89.816625 H 82.4024 v 3.263095 z"
+ id="path896" /><path
+ d="m 87.407809,89.421461 c -0.730454,0 -1.173516,0.329303 -1.418997,1.029821 v -0.87415 h -1.448934 v 3.502588 h 1.460909 v -1.646516 c 0,-0.544847 0.24548,-0.826252 0.724467,-0.826252 0.431088,0 0.658606,0.275417 0.658606,0.802302 v 1.670466 h 1.466896 v -1.981807 c 0,-1.101668 -0.496948,-1.676452 -1.442947,-1.676452 z"
+ id="path898" /><path
+ d="m 93.832213,91.301483 c 0,0.532872 -0.239493,0.808289 -0.700517,0.808289 -0.44905,0 -0.652619,-0.24548 -0.652619,-0.778353 v -1.754287 h -1.466896 v 2.113527 c 0,0.969947 0.496948,1.514794 1.436959,1.514794 0.664594,0 1.149567,-0.293379 1.383073,-0.8502 v 0.724467 h 1.466896 v -3.502588 h -1.466896 z"
+ id="path900" /><path
+ d="m 97.729969,93.199466 c 1.191478,0 1.826135,-0.478986 1.826135,-1.185491 0,-0.69453 -0.419113,-0.975935 -1.161542,-1.107656 l -0.598733,-0.107772 c -0.287391,-0.05987 -0.407138,-0.0958 -0.407138,-0.227519 0,-0.119746 0.161658,-0.191594 0.562809,-0.191594 0.53886,0 1.047783,0.15567 1.407022,0.365227 V 89.80465 c -0.341277,-0.209556 -0.910074,-0.383189 -1.616579,-0.383189 -1.137592,0 -1.856072,0.44905 -1.856072,1.14358 0,0.532872 0.293379,0.868163 1.137593,1.065745 l 0.718479,0.161658 c 0.245481,0.05987 0.29338,0.143696 0.29338,0.251467 0,0.0958 -0.125734,0.191595 -0.437076,0.191595 -0.610707,0 -1.347149,-0.215544 -1.742313,-0.520898 v 0.993897 c 0.520898,0.317329 1.185492,0.490961 1.874035,0.490961 z"
+ id="path902" /></g></g><style
+ id="style834">.st0{fill:#FFFFFF;}</style></svg>
diff --git a/pydis_site/static/js/content/listing.js b/pydis_site/static/js/content/listing.js
new file mode 100644
index 00000000..4b722632
--- /dev/null
+++ b/pydis_site/static/js/content/listing.js
@@ -0,0 +1,41 @@
+/**
+ * Trim a tag listing to only show a few lines of content.
+ */
+function trimTag() {
+ const containers = document.getElementsByClassName("tag-container");
+ for (const container of containers) {
+ if (container.textContent.startsWith("Contains the following tags:")) {
+ // Tag group, no need to trim
+ continue;
+ }
+
+ // Remove every element after the first two paragraphs
+ while (container.children.length > 2) {
+ container.removeChild(container.lastChild);
+ }
+
+ // Trim down the elements if they are too long
+ const containerLength = container.textContent.length;
+ if (containerLength > 300) {
+ if (containerLength - container.firstChild.textContent.length > 300) {
+ // The first element alone takes up more than 300 characters
+ container.removeChild(container.lastChild);
+ }
+
+ let last = container.lastChild.lastChild;
+ while (container.textContent.length > 300 && container.lastChild.childNodes.length > 0) {
+ last = container.lastChild.lastChild;
+ last.remove();
+ }
+
+ if (container.textContent.length > 300 && (last instanceof HTMLElement && last.tagName !== "CODE")) {
+ // Add back the final element (up to a period if possible)
+ const stop = last.textContent.indexOf(".");
+ last.textContent = last.textContent.slice(0, stop > 0 ? stop + 1: null);
+ container.lastChild.appendChild(last);
+ }
+ }
+ }
+}
+
+trimTag();
diff --git a/pydis_site/templates/base/navbar.html b/pydis_site/templates/base/navbar.html
index d7fb4f4c..931693c8 100644
--- a/pydis_site/templates/base/navbar.html
+++ b/pydis_site/templates/base/navbar.html
@@ -67,6 +67,9 @@
<a class="navbar-item" href="{% url "resources:index" %}">
Resources
</a>
+ <a class="navbar-item" href="{% url "content:pages" %}">
+ Content
+ </a>
<a class="navbar-item" href="{% url "events:index" %}">
Events
</a>
diff --git a/pydis_site/templates/content/base.html b/pydis_site/templates/content/base.html
index 4a19a275..2fd721a3 100644
--- a/pydis_site/templates/content/base.html
+++ b/pydis_site/templates/content/base.html
@@ -8,6 +8,10 @@
<meta property="og:description" content="{{ page_description }}" />
<link rel="stylesheet" href="{% static "css/content/page.css" %}">
<link rel="stylesheet" href="{% static "css/collapsibles.css" %}">
+ <link rel="stylesheet"
+ href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/styles/atom-one-dark.min.css">
+ <script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/highlight.min.js"></script>
+ <script>hljs.highlightAll();</script>
<script src="{% static "js/collapsibles.js" %}"></script>
{% endblock %}
@@ -35,7 +39,7 @@
<section class="section">
<div class="container">
<div class="content">
- <h1 class="title">{{ page_title }}</h1>
+ <h1 class="title">{% block title_element %}{{ page_title }}{% endblock %}</h1>
{% block page_content %}{% endblock %}
</div>
</div>
diff --git a/pydis_site/templates/content/listing.html b/pydis_site/templates/content/listing.html
index ef0ef919..934b95f6 100644
--- a/pydis_site/templates/content/listing.html
+++ b/pydis_site/templates/content/listing.html
@@ -1,6 +1,22 @@
+{# Base navigation screen for resources #}
{% extends 'content/base.html' %}
+{% load static %}
+
+{# Show a GitHub button on tag pages #}
+{% block title_element %}
+{% if is_tag_listing %}
+ <link rel="stylesheet" href="{% static "css/content/color.css" %}">
+ <div class="level">
+ <div class="level-left">{{ block.super }}</div>
+ <div class="level-right">
+ <a class="level-item fab fa-github" href="{{ tag_url }}"></a>
+ </div>
+ </div>
+{% endif %}
+{% endblock %}
{% block page_content %}
+ {# Nested Categories #}
{% for category, data in categories.items %}
<div class="box" style="max-width: 800px;">
<span class="icon is-size-4 is-medium">
@@ -13,15 +29,22 @@
<p class="is-italic">{{ data.description }}</p>
</div>
{% endfor %}
+
+ {# Single Pages #}
{% for page, data in pages.items %}
<div class="box" style="max-width: 800px;">
<span class="icon is-size-4 is-medium">
<i class="{{ data.icon|default:"fab fa-python" }} is-size-3 is-black has-icon-padding" aria-hidden="true"></i>
</span>
- <a href="{% url "content:page_category" location=path|add:page %}">
+ <a href="{% url app_name location=path|add:page %}">
<span class="is-size-4 has-text-weight-bold">{{ data.title }}</span>
</a>
- <p class="is-italic">{{ data.description }}</p>
+ {% if is_tag_listing %}
+ <div class="tag-container">{{ data.description | safe }}</div>
+ {% else %}
+ <p class="is-italic">{{ data.description }}</p>
+ {% endif %}
</div>
{% endfor %}
+ <script src="{% static 'js/content/listing.js' %}"></script>
{% endblock %}
diff --git a/pydis_site/templates/content/page.html b/pydis_site/templates/content/page.html
index 759286f6..679ecec6 100644
--- a/pydis_site/templates/content/page.html
+++ b/pydis_site/templates/content/page.html
@@ -1,13 +1,5 @@
{% extends 'content/base.html' %}
-{% block head %}
- {{ block.super }}
- <link rel="stylesheet"
- href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/styles/atom-one-dark.min.css">
- <script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.1/highlight.min.js"></script>
- <script>hljs.initHighlightingOnLoad();</script>
-{% endblock %}
-
{% block page_content %}
{% if relevant_links or toc %}
<div class="columns is-variable is-8">
diff --git a/pydis_site/templates/content/tag.html b/pydis_site/templates/content/tag.html
new file mode 100644
index 00000000..fa9e44f5
--- /dev/null
+++ b/pydis_site/templates/content/tag.html
@@ -0,0 +1,40 @@
+{% extends "content/page.html" %}
+{% load static %}
+
+{% block head %}
+ {{ block.super }}
+ <link rel="stylesheet" href="{% static 'css/content/color.css' %}"/>
+ <link rel="stylesheet" href="{% static 'css/content/tag.css' %}"/>
+ <title>{{ tag.name }}</title>
+{% endblock %}
+
+{% block title_element %}
+ <div class="level mb-2">
+ <div class="level-left">{{ block.super }}</div>
+ <div class="level-right">
+ <a class="level-item fab fa-github" href="{{ tag.url }}"></a>
+ </div>
+ </div>
+
+ <div class="dropdown is-size-6 is-hoverable">
+ <div class="dropdown-trigger ">
+ <a aria-haspopup="menu" href="{{ tag.last_commit.url }}">
+ <span class="update-time">
+ Last Updated: {{ tag.last_commit.date | date:"F j, Y g:i A e" }}
+ </span>
+ </a>
+ </div>
+ <div class="dropdown-menu">
+ <div class="dropdown-content">
+ <div class="dropdown-item">Last edited by:</div>
+ {% for user in tag.last_commit.format_authors %}
+ <div class="dropdown-item">{{ user }}</div>
+ {% endfor %}
+ <div class="dropdown-divider"></div>
+ {% for line in tag.last_commit.lines %}
+ <div class="dropdown-item">{{ line }}</div>
+ {% endfor %}
+ </div>
+ </div>
+ </div>
+{% endblock %}
diff --git a/pydis_site/templates/events/index.html b/pydis_site/templates/events/index.html
index 158ec56b..640682d0 100644
--- a/pydis_site/templates/events/index.html
+++ b/pydis_site/templates/events/index.html
@@ -8,8 +8,11 @@
{% block event_content %}
<div class="box">
- <h2 class="title is-4">Code Jams</h2>
- <p>Each year, we organize at least one code jam, one during the summer and sometimes one during the winter. During these events, members of our community will work together in teams to create something amazing using a technology we picked for them. One such technology that was picked for the Summer 2021 Code Jam was text user interfaces (TUIS), where teams could pick from a pre-approved list of frameworks.</p>
+ <h2 class="title is-4"><a href="{% url "events:page" path="code-jams" %}">Code Jams</a></h2>
+ <div class="notification is-success">
+ <a href="{% url "events:page" path="code-jams/9" %}">The <b>2022 Summer Code Jam</b> is underway!</a>.
+ </div>
+ <p>Every year we hold a community-wide Summer Code Jam. For this event, members of our community are assigned to teams to collaborate and create something amazing using a technology we picked for them. One such technology that was picked for the Summer 2021 Code Jam was text user interfaces (TUIs), where teams could pick from a pre-approved list of frameworks.</p>
<p>To help fuel the creative process, we provide a specific theme, like <strong>Think Inside the Box</strong> or <strong>Early Internet</strong>. At the end of the Code Jam, the projects are judged by Python Discord server staff members and guest judges from the larger Python community. The judges will consider creativity, code quality, teamwork, and adherence to the theme.</p>
<p>If you want to read more about Code Jams, visit our <a href="{% url "events:page" path="code-jams" %}">Code Jam info page</a> or watch this video showcasing the best projects created during the <strong>Winter Code Jam 2020: Ancient Technology</strong>:</p>
<iframe width="560" height="315" src="https://www.youtube.com/embed/8fbZsGrqBzo" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen=""></iframe>
diff --git a/pydis_site/templates/events/pages/code-jams/9/_index.html b/pydis_site/templates/events/pages/code-jams/9/_index.html
new file mode 100644
index 00000000..ca7c4f90
--- /dev/null
+++ b/pydis_site/templates/events/pages/code-jams/9/_index.html
@@ -0,0 +1,127 @@
+{% extends "events/base_sidebar.html" %}
+
+{% load static %}
+
+{% block title %}Summer Code Jam 2022{% endblock %}
+
+{% block breadcrumb %}
+ <li><a href="{% url "events:index" %}">Events</a></li>
+ <li><a href="{% url "events:page" path="code-jams" %}">Code Jams</a></li>
+ <li class="is-active"><a href="#">Summer Code Jam 2022</a></li>
+{% endblock %}
+
+{% block event_content %}
+ <p>Once a year we host a code jam for members of our server to participate in. The code jam is an event where we place you
+ in a team with 5 other random server members. You then have 11 days to code some sort of application or program in Python.
+ Your program must use the specified technology/framework and incorporate the theme chosen by the server.
+ </p>
+ <p>
+ After the 11 days are complete, your team has 4 days to finish documentation and create a video presentation showcasing
+ and walking through the program that your team has created. More details and specifics of this will be released within the next 2 weeks.
+ </p>
+
+ <h3 id="important-dates"><a href="#important-dates">Important Dates</a></h3>
+ <ul>
+ <li><strike>Saturday, June 18 - Form to submit theme suggestions opens</strike></li>
+ <li><strike>Wednesday, June 29 - The Qualifier is released</strike></li>
+ <li><strike>Wednesday, July 6 - Voting for the theme opens</strike></li>
+ <li><strike>Wednesday, July 13 - The Qualifier closes</strike></li>
+ <li><strike>Thursday, July 21 - Code Jam Begins</strike></li>
+ <li><strike>Sunday, July 31 - Coding portion of the jam ends</strike></li>
+ <li><strike>Sunday, August 4 - Code Jam submissions are closed</strike></li>
+ </ul>
+
+ <h3 id="qualifier"><a href="#how-to-join">The Qualifier</a></h3>
+ <p>
+ The qualifier is a coding challenge that you are required to complete before registering for the code jam.
+ This is meant as a basic assessment of your skills to ensure you have enough python knowledge to effectively contribute in a team environment.
+ </p>
+ <p class="has-text-centered"><a class="button is-link" href="https://github.com/python-discord/code-jam-qualifier-9/" target="_blank" rel="noopener">View the Qualifier</a></p>
+ <p>
+ Please note the requirements for the qualifier.
+ <ul>
+ <li>The qualifier must be completed using Python 3.10</li>
+ <li>No external modules are allowed, only those available through the standard library.</li>
+ <li>The Qualifier must be submitted through the Code Jam sign-up form.</li>
+ </ul>
+ </p>
+
+ <h3 id="technology"><a href="#technology">Technology</a></h3>
+ <p>
+ The chosen technology/tech stack for this year is <strong>WebSockets</strong>.
+ Each team must make use of <a href="{% url "events:page" path="code-jams/9/frameworks" %}">the approved frameworks</a> to create a WebSockets-based app.
+ For more information of websockets, check out <a href="https://en.wikipedia.org/wiki/WebSocket" target="_blank" rel="noopener">this wikipedia article</a>.
+ </p>
+
+ <h3 id="prizes"><a href="#prizes">Prizes</a></h3>
+ <p>
+ Our Code Jam Sponsors have provided prizes for the winners of the code jam.
+ Also, thanks to our Patreon patrons supporting this server, we are able to send members of the winning teams
+ Python Discord t-shirts and possibly other goodies.
+ </p>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="media">
+ <div class="media-left" style="max-width:150px">
+ <img src="{% static "images/events/DO_Logo_Vertical_Blue.png" %}" alt="Digital Ocean">
+ </div>
+ <div class="media-content">
+ <p class="subtitle has-link"><a href="https://www.digitalocean.com/" target="_blank" rel="noopener">DigitalOcean</a></p>
+ <p class="is-italic">
+ Scalable compute platform with add-on storage, security, and monitoring capabilities.
+ We make it simple to launch in the cloud and scale up as you grow—whether you’re running one virtual machine or ten thousand.
+ </p>
+ <p><strong>Prizes</strong><br>
+ DigitalOcean credits to the members of a winning team.</p>
+ </div>
+ </div>
+ </div>
+ </div>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="media">
+ <div class="media-left" style="max-width:150px">
+ <img src="{% static "images/sponsors/jetbrains.png" %}" alt="JetBrains">
+ </div>
+ <div class="media-content">
+ <p class="subtitle has-link"><a href="https://www.jetbrains.com/" target="_blank" rel="noopener">JetBrains</a></p>
+ <p class="is-italic">
+ Whatever platform or language you work with, JetBrains has a development tool for you.
+ We help developers work faster by automating common, repetitive tasks to enable them to stay focused on code design and the big picture.
+ We provide tools to explore and familiarize with code bases faster. Our products make it easy for you to take care of quality during all stages of development and spend less time on maintenance tasks.
+ </p>
+ <p><strong>Prizes</strong><br>
+ 1-year JetBrain licenses to the members of a winning team.</p>
+ </div>
+ </div>
+ </div>
+ </div>
+
+ <div class="card mb">
+ <div class="card-content">
+ <div class="media">
+ <div class="media-left" style="max-width:150px">
+ <img src="{% static "images/events/Replit.png" %}" alt="Replit">
+ </div>
+ <div class="media-content">
+ <p class="subtitle has-link"><a href="https://www.replit.com" target="_blank" rel="noopener">Replit</a></p>
+ <p class="is-italic">Start coding instantly, right from your browser.
+ With GitHub integration and support for nearly every major programming language, Replit is the best place to code.
+ Our mission is to bring the next billion software creators online.
+ We build powerful, simple tools and platforms for learners, educators, and developers.
+ </p>
+ <p><strong>Prizes</strong><br>
+ Three months of the Replit hacker plan to the members of a winning team.</p>
+ </div>
+ </div>
+ </div>
+ </div>
+{% endblock %}
+
+{% block sidebar %}
+
+ {% include "events/sidebar/code-jams/9.html" %}
+
+{% endblock %}
diff --git a/pydis_site/templates/events/pages/code-jams/9/frameworks.html b/pydis_site/templates/events/pages/code-jams/9/frameworks.html
new file mode 100644
index 00000000..b462c733
--- /dev/null
+++ b/pydis_site/templates/events/pages/code-jams/9/frameworks.html
@@ -0,0 +1,148 @@
+{% extends "events/base_sidebar.html" %}
+
+{% load static %}
+
+{% block title %}Summer Code Jam 2022{% endblock %}
+
+{% block breadcrumb %}
+ <li><a href="{% url "events:index" %}">Events</a></li>
+ <li><a href="{% url "events:page" path="code-jams" %}">Code Jams</a></li>
+ <li><a href="{% url "events:page" path="code-jams/9" %}">Summer Code Jam 2022</a></li>
+ <li class="is-active"><a href="#">Approved Frameworks</a></li>
+{% endblock %}
+
+{% block event_content %}
+ <p>Below is the list of approved frameworks that you can use for the code jam.
+ Please work with your team to choose a library that everyone can and want to develop with.
+ If there is a library not listed below that you think should be here, you're welcome to discuss it with the Events Team over at <a href="https://discord.gg/HnGd3znxhJ">the server</a>.
+ </p>
+
+ <div class="notification is-info is-light">
+ <p>Most of the below frameworks implement what is called the ASGI Specification.
+ This specification documents how the frameworks should interact with ASGI servers.
+ You are also allowed to <strong>work with the ASGI specification directly</strong> without a framework, if your team so chooses to.
+ Refer to the <a href="https://asgi.readthedocs.io/en/latest/">specification online</a>.
+ </p>
+ </div>
+
+ <h3 id="approved-frameworks"><a href="#approved-frameworks">Approved Frameworks</a></h3>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="content">
+ <p class="subtitle">FastAPI</p>
+ <p>FastAPI is a modern web framework great for WebSockets based on standard Python type hints which provides great editor support.</p>
+ </div>
+ </div>
+ <div class="card-footer">
+ <a href="https://fastapi.tiangolo.com/advanced/websockets" class="card-footer-item"><i class="fas fa-book"></i>&ensp;Documentation</a>
+ <a href="https://github.com/tiangolo/fastapi" class="card-footer-item"><i class="fab fa-github"></i>&ensp;GitHub</a>
+ </div>
+ </div>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="content">
+ <p class="subtitle">Starlette</p>
+ <p>Starlette is a lightweight ASGI framework/toolkit, which is ideal for building async web services in Python.
+ </p>
+ </div>
+ </div>
+ <div class="card-footer">
+ <a href="https://www.starlette.io/websockets" class="card-footer-item"><i class="fas fa-book"></i>&ensp;Documentation</a>
+ <a href="https://github.com/encode/starlette" class="card-footer-item"><i class="fab fa-github"></i>&ensp;GitHub</a>
+ </div>
+ </div>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="content">
+ <p class="subtitle">websockets</p>
+ <p>websockets is a library for building both WebSocket clients and servers with focus on simplicity and performance.
+ </p>
+ </div>
+ </div>
+ <div class="card-footer">
+ <a href="https://websockets.readthedocs.io/en/stable" class="card-footer-item"><i class="fas fa-book"></i>&ensp;Documentation</a>
+ <a href="https://github.com/aaugustin/websockets" class="card-footer-item"><i class="fab fa-github"></i>&ensp;GitHub</a>
+ </div>
+ </div>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="content">
+ <p class="subtitle">aiohttp</p>
+ <p>aiohttp provides both a client and server WebSocket implementation, while avoiding callback-hell.
+ </p>
+ </div>
+ </div>
+ <div class="card-footer">
+ <a href="https://docs.aiohttp.org/en/stable/client_quickstart.html#websockets" class="card-footer-item"><i class="fas fa-book"></i>&ensp;Documentation</a>
+ <a href="https://github.com/aio-libs/aiohttp" class="card-footer-item"><i class="fab fa-github"></i>&ensp;GitHub</a>
+ </div>
+ </div>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="content">
+ <p class="subtitle">Django Channels</p>
+ <p>Django Channels adds WebSocket-support to Django - built on ASGI like other web frameworks.
+ </p>
+ </div>
+ </div>
+ <div class="card-footer">
+ <a href="https://channels.readthedocs.io/en/stable" class="card-footer-item"><i class="fas fa-book"></i>&ensp;Documentation</a>
+ <a href="https://github.com/django/channels" class="card-footer-item"><i class="fab fa-github"></i>&ensp;GitHub</a>
+ </div>
+ </div>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="content">
+ <p class="subtitle">Starlite</p>
+ <p>Starlite is a light and flexible ASGI API framework, using Starlette and Pydantic as foundations.
+ </p>
+ </div>
+ </div>
+ <div class="card-footer">
+ <a href="https://starlite-api.github.io/starlite" class="card-footer-item"><i class="fas fa-book"></i>&ensp;Documentation</a>
+ <a href="https://github.com/starlite-api/starlite" class="card-footer-item"><i class="fab fa-github"></i>&ensp;GitHub</a>
+ </div>
+ </div>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="content">
+ <p class="subtitle">Sanic</p>
+ <p>Sanic is an ASGI compliant web framework designed for speed and simplicity.
+ </p>
+ </div>
+ </div>
+ <div class="card-footer">
+ <a href="https://sanic.dev/en/guide/advanced/websockets.html" class="card-footer-item"><i class="fas fa-book"></i>&ensp;Documentation</a>
+ <a href="https://github.com/sanic-org/sanic" class="card-footer-item"><i class="fab fa-github"></i>&ensp;GitHub</a>
+ </div>
+ </div>
+
+ <div class="card mb-4">
+ <div class="card-content">
+ <div class="content">
+ <p class="subtitle">wsproto</p>
+ <p>wsproto is a pure-Python WebSocket protocol stack written to be as flexible as possible by having the user build the bridge to the I/O.
+ </p>
+ </div>
+ </div>
+ <div class="card-footer">
+ <a href="https://python-hyper.org/projects/wsproto/en/stable" class="card-footer-item"><i class="fas fa-book"></i>&ensp;Documentation</a>
+ <a href="https://github.com/python-hyper/wsproto" class="card-footer-item"><i class="fab fa-github"></i>&ensp;GitHub</a>
+ </div>
+ </div>
+
+
+{% endblock %}
+
+{% block sidebar %}
+
+ {% include "events/sidebar/code-jams/9.html" %}
+
+{% endblock %}
diff --git a/pydis_site/templates/events/pages/code-jams/9/rules.html b/pydis_site/templates/events/pages/code-jams/9/rules.html
new file mode 100644
index 00000000..9a28852f
--- /dev/null
+++ b/pydis_site/templates/events/pages/code-jams/9/rules.html
@@ -0,0 +1,80 @@
+{% extends "events/base_sidebar.html" %}
+
+{% block title %}Summer Code Jam 2022{% endblock %}
+
+{% block breadcrumb %}
+ <li><a href="{% url "events:index" %}">Events</a></li>
+ <li><a href="{% url "events:page" path="code-jams" %}">Code Jams</a></li>
+ <li><a href="{% url "events:page" path="code-jams/9" %}">Summer Code Jam 2022</a></li>
+ <li class="is-active"><a href="#">Rules</a></li>
+{% endblock %}
+
+{% block event_content %}
+<ol>
+ <li><p>Your solution must use one of the approved frameworks. It is not permitted to circumvent this rule by e.g. using the approved framework as a wrapper for another framework.</p></li>
+ <li>
+ <p>
+ <strong>The core of your project must use WebSockets as its communication protocol.</strong>.
+ This means that you are allowed to use other methods of communication where WebSockets cannot be implemented, however, that should be a non-significant portion of your project.
+ For example, serving static files for a website cannot be done over WebSockets and it does not pose as a significant portion of a project, therefore it is allowed.
+ </p>
+
+ <p>This rule does not apply to databases and files when used for <i>storage purposes</i> even though that may be a significant portion of your project. Working with subprocesses (through stdin/stdout or <code>multiprocessing.Pool()</code>/<code>concurrent.futures.ProcessPoolExecutor()</code>) is also exempt from this rule.</p>
+
+ <p>If you're unsure about your use of non-WebSocket communication, please reach out to the events team.</p>
+ </li>
+ <li><p>Your solution should be platform agnostic. For example, if you use filepaths in your submission, use <code>pathlib</code> to create platform agnostic Path objects instead of hardcoding the paths.</p></li>
+ <li>
+ <p>
+ You must document precisely how to install and run your project.
+ This should be as easy as possible, which means you should consider using dependency managers like <code>pipenv</code> or <code>poetry</code>.
+ We would also encourage you to use <code>docker</code> and <code>docker-compose</code> to containerize your project, but this isn't a requirement.
+ </p>
+ </li>
+ <li>
+ You must get contributions from every member of your team, if you have an issue with someone on your team please contact a member of the administration team.
+ These contributions do not necessarily have to be code, for example it's absolutely fine for someone to contribute management, documentation, graphics or audio.
+ <strong>
+ Team members that do not contribute will be removed from the Code Jam, and will not receive their share of any prizes the team may win.
+ They may also be barred from entering future events.
+ </strong>
+ </li>
+ <li><p>You must use GitHub as source control.</p></li>
+ <li>
+ <p>
+ All code and assets must be compatible with the <a href="https://en.wikipedia.org/wiki/MIT_License">MIT license</a>.
+ This is because we will be merging your submission into our <code>summer-code-jam-2022</code> repo at the end of the jam,
+ and this repo is licensed with the MIT license.
+ <strong>Projects that include assets that are incompatible with this license may be disqualified.</strong>
+ </p>
+ </li>
+ <li><p>All code must be written and committed within the time constrictions of the jam. Late commits may be reverted, so make sure you leave enough time to bug test your program.</p></li>
+ <li>
+ <p>
+ Use English as the main language for your project, including names, comments, documentation, and commit messages.
+ Any text displayed in your application should also be in English,
+ although you are allowed to provide the user with options for internationalisation and translation.
+ </p>
+ </li>
+ <li>
+ <p>
+ Your team, once the coding portion of the code jam is complete, must create a video presentation that showcases and explains your final product.
+ This must be in a video format and must be uploaded somewhere for the judges to view (i.e. unlisted Youtube video, Vimeo, etc.)
+ The video can be as simple as a screen recording with annotated text.
+ Teams who do not submit a final video presentation may be disqualified.
+ </p>
+ </li>
+</ol>
+
+<blockquote>
+ Please note that our regular
+ <a href="/pages/rules">community rules</a> and <a href="/pages/code-of-conduct">code of conduct</a>
+ also apply during the event and that we reserve the right to make changes to these rules at any time.
+</blockquote>
+{% endblock %}
+
+{% block sidebar %}
+
+ {% include "events/sidebar/code-jams/9.html" %}
+
+{% endblock %}
diff --git a/pydis_site/templates/events/pages/code-jams/_index.html b/pydis_site/templates/events/pages/code-jams/_index.html
index 207d4b9a..c7975679 100644
--- a/pydis_site/templates/events/pages/code-jams/_index.html
+++ b/pydis_site/templates/events/pages/code-jams/_index.html
@@ -31,7 +31,7 @@
<h2 class="title is-4" id="how-often-do-these-happen"><a href="#how-often-do-these-happen">How often do these happen?</a></h2>
<p>
- Our Code Jams happen twice a year. We have a Winter Jam and a Summer Jam.
+ Our Code Jams happen once a year every summer.
</p>
<h2 class="title is-4" id="what-happens-if-i-have-to-drop-out"><a href="#what-happens-if-i-have-to-drop-out">What happens if I have to drop out?</a></h2>
diff --git a/pydis_site/templates/events/sidebar/code-jams/9.html b/pydis_site/templates/events/sidebar/code-jams/9.html
new file mode 100644
index 00000000..2351973f
--- /dev/null
+++ b/pydis_site/templates/events/sidebar/code-jams/9.html
@@ -0,0 +1,21 @@
+{% load static %}
+<div class="panel">
+ <p class="panel-heading">Important Links</p>
+ <a class="panel-block has-text-link" href="{% url "events:page" path="code-jams/9/rules" %}">Rules</a>
+ <a class="panel-block has-text-link" href="{% url "events:page" path="code-jams/9/frameworks" %}">Approved Frameworks</a>
+ <a class="panel-block has-text-link" href="{% url "events:page" path="code-jams/code-style-guide" %}">The Code Style Guide</a>
+ </ul>
+</div>
+<div class="box">
+ <img src="{% static "images/events/summer_code_jam_2022/site_banner.png" %}" alt="Summer Code Jam 2022">
+ <h4 class="menu-label">Our Sponsors</h4>
+ <a href="https://www.digitalocean.com/" target="_blank">
+ <img src="{% static "images/events/DO_Logo_Vertical_Blue.png" %}" alt="Digital Ocean">
+ </a>
+ <a href="https://jetbrains.com" target="_blank">
+ <img src="{% static "images/sponsors/jetbrains.png" %}" alt="JetBrains">
+ </a>
+ <a href="https://replit.com/" target="_blank">
+ <img src="{% static "images/events/Replit.png" %}" alt="Replit">
+ </a>
+</div>
diff --git a/pydis_site/templates/events/sidebar/code-jams/ongoing-code-jam.html b/pydis_site/templates/events/sidebar/code-jams/ongoing-code-jam.html
index f4fa3a37..37569e57 100644
--- a/pydis_site/templates/events/sidebar/code-jams/ongoing-code-jam.html
+++ b/pydis_site/templates/events/sidebar/code-jams/ongoing-code-jam.html
@@ -1,8 +1,8 @@
{% load static %}
<div class="box">
- <h4 class="menu-label">Ongoing Code Jam</h4>
- <a href="{% url "events:page" path="code-jams/8" %}">
- <img src="{% static "images/events/summer_code_jam_2021/banner.png" %}" alt="Summer Code Jam 2021">
+ <h4 class="menu-label">Upcoming Code Jam</h4>
+ <a href="{% url "events:page" path="code-jams/9" %}">
+ <img src="{% static "images/events/summer_code_jam_2022/banner.png" %}" alt="Summer Code Jam 2022">
</a>
</div>
diff --git a/pydis_site/templates/events/sidebar/code-jams/previous-code-jams.html b/pydis_site/templates/events/sidebar/code-jams/previous-code-jams.html
index 21b2ccb4..28412c53 100644
--- a/pydis_site/templates/events/sidebar/code-jams/previous-code-jams.html
+++ b/pydis_site/templates/events/sidebar/code-jams/previous-code-jams.html
@@ -1,6 +1,7 @@
<div class="box">
<p class="menu-label">Previous Code Jams</p>
<ul class="menu-list">
+ <li><a class="has-text-link" href="{% url "events:page" path="code-jams/9" %}">Code Jam 9: It's Not A Bug, It's A Feature</a></li>
<li><a class="has-text-link" href="{% url "events:page" path="code-jams/8" %}">Code Jam 8: Think Inside the Box</a></li>
<li><a class="has-text-link" href="{% url "events:page" path="code-jams/7" %}">Code Jam 7: Early Internet</a></li>
<li><a class="has-text-link" href="{% url "events:page" path="code-jams/6" %}">Code Jam 6: Ancient Technology</a></li>
diff --git a/pydis_site/templates/events/sidebar/events-list.html b/pydis_site/templates/events/sidebar/events-list.html
index 5dfe5dc2..8deac80e 100644
--- a/pydis_site/templates/events/sidebar/events-list.html
+++ b/pydis_site/templates/events/sidebar/events-list.html
@@ -1,10 +1,17 @@
<div class="box">
- <p class="menu-label">Event Calendar 2021</p>
+ <p class="menu-label">Event Calendar 2022</p>
<ul class="menu-list">
- <li><a class="has-text-link" href="https://pyweek.org/31/" target="_blank" rel="noopener">March: PyWeek 31</a></li>
- <li><a class="has-text-black" style="cursor: default;">May: Pixels</a></li>
- <li><a class="has-text-link" href="{% url "events:page" path="code-jams/8" %}">July: Summer Code Jam</a></li>
- <li><a class="has-text-link" href="https://pyweek.org/32/" target="_blank" rel="noopener">September: PyWeek 32</a></li>
+ <li><a class="has-text-link" href="https://pyweek.org/33/" target="_blank" rel="noopener">March: PyWeek 33</a></li>
+ <li><a class="has-text-link" href="{% url "events:page" path="code-jams/9" %}">July: Summer Code Jam</a></li>
+ <li><a class="has-text-link" href="https://pyweek.org/34/" target="_blank" rel="noopener">September: PyWeek 34</a></li>
+ <li><a class="has-text-black" style="cursor: default;">October: Pixels</a></li>
<li><a class="has-text-black" style="cursor: default;">December: Advent of Code</a></li>
</ul>
</div>
+
+<div class="box">
+ <p class="menu-label">Related Links</p>
+ <ul class="menu-list">
+ <li><a class="has-text-link" href="{% url "events:page" path="code-jams" %}">Code Jams</a></li>
+ </ul>
+</div>
diff --git a/pydis_site/templates/events/sidebar/ongoing-event.html b/pydis_site/templates/events/sidebar/ongoing-event.html
index 37dfdf77..e375fa38 100644
--- a/pydis_site/templates/events/sidebar/ongoing-event.html
+++ b/pydis_site/templates/events/sidebar/ongoing-event.html
@@ -1,8 +1,8 @@
{% load static %}
<div class="box">
- <p class="menu-label">Ongoing Event</p>
- <a href="{% url "events:page" path="code-jams/8" %}">
- <img src="{% static "images/events/summer_code_jam_2021/banner.png" %}" alt="Summer Code Jam 2021">
+ <p class="menu-label">Upcoming Event</p>
+ <a href="{% url "events:page" path="code-jams/9" %}">
+ <img src="{% static "images/events/summer_code_jam_2022/banner.png" %}" alt="Summer Code Jam 2022">
</a>
</div>
diff --git a/pydis_site/templates/home/index.html b/pydis_site/templates/home/index.html
index c7350cac..cf6ff8cd 100644
--- a/pydis_site/templates/home/index.html
+++ b/pydis_site/templates/home/index.html
@@ -9,6 +9,13 @@
{% block content %}
{% include "base/navbar.html" %}
+ <!-- Mobile-only Code Jam Banner -->
+ <section id="mobile-notice" class="is-primary is-hidden-tablet">
+ <a href="/events/code-jams/9/">
+ <img src="{% static "images/events/summer_code_jam_2022/site_banner.png" %}" alt="Summer Code Jam 2022">
+ </a>
+ </section>
+
<!-- Wave Hero -->
<section id="wave-hero" class="section is-hidden-mobile">
@@ -37,7 +44,15 @@
></iframe>
</div>
</div>
+
+ {# Code Jam Banner #}
+ <div id="wave-hero-right" class="column is-half">
+ <a href="/events/code-jams/9/">
+ <img src="{% static "images/events/summer_code_jam_2022/site_banner.png" %}" alt="Summer Code Jam 2022">
+ </a>
+ </div>
</div>
+
</div>
{# Animated wave elements #}
@@ -84,9 +99,9 @@
<div class="mini-timeline">
<i class="fa fa-asterisk"></i>
<i class="fa fa-code"></i>
- <i class="fab fa-python"></i>
- <i class="fa fa-alien-monster"></i>
- <i class="fa fa-duck"></i>
+ <i class="fab fa-lg fa-python"></i>
+ <i class="fab fa-discord"></i>
+ <i class="fa fa-sm fa-terminal"></i>
<i class="fa fa-bug"></i>
</div>
diff --git a/pydis_site/templates/staff/logs.html b/pydis_site/templates/staff/logs.html
index 7bd6ba29..5e2a200b 100644
--- a/pydis_site/templates/staff/logs.html
+++ b/pydis_site/templates/staff/logs.html
@@ -14,7 +14,7 @@
<li>Date: {{ deletion_context.creation }}</li>
</ul>
<div class="is-divider has-small-margin"></div>
- {% for message in deletion_context.deletedmessage_set.all %}
+ {% for message in deletion_context.deletedmessage_set.all reversed %}
<div class="discord-message">
<div class="discord-message-header">
<span class="discord-username"
@@ -22,7 +22,7 @@
</span>
<span class="discord-message-metadata has-text-grey">
User ID: {{ message.author.id }}<br>
- {{ message.timestamp }} (Channel ID: {{ message.channel_id }})
+ {{ message.timestamp }} (Channel ID-Message ID: {{ message.channel_id }}-{{ message.id }})
</span>
</div>
<div class="discord-message-content">
diff --git a/pydis_site/urls.py b/pydis_site/urls.py
index 6cd31f26..0f2f6aeb 100644
--- a/pydis_site/urls.py
+++ b/pydis_site/urls.py
@@ -12,7 +12,7 @@ NON_STATIC_PATTERNS = [
path('pydis-api/', include('pydis_site.apps.api.urls', namespace='internal_api')),
path('', include('django_prometheus.urls')),
-] if not settings.env("STATIC_BUILD") else []
+] if not settings.STATIC_BUILD else []
urlpatterns = (
@@ -29,7 +29,7 @@ urlpatterns = (
)
-if not settings.env("STATIC_BUILD"):
+if not settings.STATIC_BUILD:
urlpatterns += (
path('staff/', include('pydis_site.apps.staff.urls', namespace='staff')),
)