aboutsummaryrefslogtreecommitdiffstats
path: root/pydis_site/apps
diff options
context:
space:
mode:
Diffstat (limited to 'pydis_site/apps')
-rw-r--r--pydis_site/apps/api/models/bot/metricity.py53
-rw-r--r--pydis_site/apps/api/tests/test_infractions.py30
-rw-r--r--pydis_site/apps/api/tests/test_users.py16
-rw-r--r--pydis_site/apps/api/viewsets/bot/infraction.py16
-rw-r--r--pydis_site/apps/api/viewsets/bot/user.py7
-rw-r--r--pydis_site/apps/home/migrations/0002_auto_now_on_repository_metadata.py18
-rw-r--r--pydis_site/apps/home/models/repository_metadata.py15
-rw-r--r--pydis_site/apps/home/tests/mock_github_api_response.json2
-rw-r--r--pydis_site/apps/home/tests/test_repodata_helpers.py42
-rw-r--r--pydis_site/apps/home/views/home.py155
10 files changed, 254 insertions, 100 deletions
diff --git a/pydis_site/apps/api/models/bot/metricity.py b/pydis_site/apps/api/models/bot/metricity.py
index 25b42fa2..cae630f1 100644
--- a/pydis_site/apps/api/models/bot/metricity.py
+++ b/pydis_site/apps/api/models/bot/metricity.py
@@ -1,5 +1,12 @@
from django.db import connections
+BLOCK_INTERVAL = 10 * 60 # 10 minute blocks
+
+EXCLUDE_CHANNELS = [
+ "267659945086812160", # Bot commands
+ "607247579608121354" # SeasonalBot commands
+]
+
class NotFound(Exception):
"""Raised when an entity cannot be found."""
@@ -21,7 +28,8 @@ class Metricity:
def user(self, user_id: str) -> dict:
"""Query a user's data."""
- columns = ["verified_at"]
+ # TODO: Swap this back to some sort of verified at date
+ columns = ["joined_at"]
query = f"SELECT {','.join(columns)} FROM users WHERE id = '%s'"
self.cursor.execute(query, [user_id])
values = self.cursor.fetchone()
@@ -33,7 +41,48 @@ class Metricity:
def total_messages(self, user_id: str) -> int:
"""Query total number of messages for a user."""
- self.cursor.execute("SELECT COUNT(*) FROM messages WHERE author_id = '%s'", [user_id])
+ self.cursor.execute(
+ """
+ SELECT
+ COUNT(*)
+ FROM messages
+ WHERE
+ author_id = '%s'
+ AND NOT is_deleted
+ AND NOT %s::varchar[] @> ARRAY[channel_id]
+ """,
+ [user_id, EXCLUDE_CHANNELS]
+ )
+ values = self.cursor.fetchone()
+
+ if not values:
+ raise NotFound()
+
+ return values[0]
+
+ def total_message_blocks(self, user_id: str) -> int:
+ """
+ Query number of 10 minute blocks during which the user has been active.
+
+ This metric prevents users from spamming to achieve the message total threshold.
+ """
+ self.cursor.execute(
+ """
+ SELECT
+ COUNT(*)
+ FROM (
+ SELECT
+ (floor((extract('epoch' from created_at) / %s )) * %s) AS interval
+ FROM messages
+ WHERE
+ author_id='%s'
+ AND NOT is_deleted
+ AND NOT %s::varchar[] @> ARRAY[channel_id]
+ GROUP BY interval
+ ) block_query;
+ """,
+ [BLOCK_INTERVAL, BLOCK_INTERVAL, user_id, EXCLUDE_CHANNELS]
+ )
values = self.cursor.fetchone()
if not values:
diff --git a/pydis_site/apps/api/tests/test_infractions.py b/pydis_site/apps/api/tests/test_infractions.py
index 93ef8171..82b497aa 100644
--- a/pydis_site/apps/api/tests/test_infractions.py
+++ b/pydis_site/apps/api/tests/test_infractions.py
@@ -512,6 +512,36 @@ class CreationTests(APISubdomainTestCase):
)
+class InfractionDeletionTests(APISubdomainTestCase):
+ @classmethod
+ def setUpTestData(cls):
+ cls.user = User.objects.create(
+ id=9876,
+ name='Unknown user',
+ discriminator=9876,
+ )
+
+ cls.warning = Infraction.objects.create(
+ user_id=cls.user.id,
+ actor_id=cls.user.id,
+ type='warning',
+ active=False
+ )
+
+ def test_delete_unknown_infraction_returns_404(self):
+ url = reverse('bot:infraction-detail', args=('something',), host='api')
+ response = self.client.delete(url)
+
+ self.assertEqual(response.status_code, 404)
+
+ def test_delete_known_infraction_returns_204(self):
+ url = reverse('bot:infraction-detail', args=(self.warning.id,), host='api')
+ response = self.client.delete(url)
+
+ self.assertEqual(response.status_code, 204)
+ self.assertRaises(Infraction.DoesNotExist, Infraction.objects.get, id=self.warning.id)
+
+
class ExpandedTests(APISubdomainTestCase):
@classmethod
def setUpTestData(cls):
diff --git a/pydis_site/apps/api/tests/test_users.py b/pydis_site/apps/api/tests/test_users.py
index 72ffcb3c..69bbfefc 100644
--- a/pydis_site/apps/api/tests/test_users.py
+++ b/pydis_site/apps/api/tests/test_users.py
@@ -407,9 +407,10 @@ class UserMetricityTests(APISubdomainTestCase):
def test_get_metricity_data(self):
# Given
- verified_at = "foo"
+ joined_at = "foo"
total_messages = 1
- self.mock_metricity_user(verified_at, total_messages)
+ total_blocks = 1
+ self.mock_metricity_user(joined_at, total_messages, total_blocks)
# When
url = reverse('bot:user-metricity-data', args=[0], host='api')
@@ -418,9 +419,10 @@ class UserMetricityTests(APISubdomainTestCase):
# Then
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), {
- "verified_at": verified_at,
+ "joined_at": joined_at,
"total_messages": total_messages,
"voice_banned": False,
+ "activity_blocks": total_blocks
})
def test_no_metricity_user(self):
@@ -440,7 +442,7 @@ class UserMetricityTests(APISubdomainTestCase):
{'exception': ObjectDoesNotExist, 'voice_banned': False},
]
- self.mock_metricity_user("foo", 1)
+ self.mock_metricity_user("foo", 1, 1)
for case in cases:
with self.subTest(exception=case['exception'], voice_banned=case['voice_banned']):
@@ -453,13 +455,14 @@ class UserMetricityTests(APISubdomainTestCase):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()["voice_banned"], case["voice_banned"])
- def mock_metricity_user(self, verified_at, total_messages):
+ def mock_metricity_user(self, joined_at, total_messages, total_blocks):
patcher = patch("pydis_site.apps.api.viewsets.bot.user.Metricity")
self.metricity = patcher.start()
self.addCleanup(patcher.stop)
self.metricity = self.metricity.return_value.__enter__.return_value
- self.metricity.user.return_value = dict(verified_at=verified_at)
+ self.metricity.user.return_value = dict(joined_at=joined_at)
self.metricity.total_messages.return_value = total_messages
+ self.metricity.total_message_blocks.return_value = total_blocks
def mock_no_metricity_user(self):
patcher = patch("pydis_site.apps.api.viewsets.bot.user.Metricity")
@@ -468,3 +471,4 @@ class UserMetricityTests(APISubdomainTestCase):
self.metricity = self.metricity.return_value.__enter__.return_value
self.metricity.user.side_effect = NotFound()
self.metricity.total_messages.side_effect = NotFound()
+ self.metricity.total_message_blocks.side_effect = NotFound()
diff --git a/pydis_site/apps/api/viewsets/bot/infraction.py b/pydis_site/apps/api/viewsets/bot/infraction.py
index edec0a1e..423e806e 100644
--- a/pydis_site/apps/api/viewsets/bot/infraction.py
+++ b/pydis_site/apps/api/viewsets/bot/infraction.py
@@ -5,6 +5,7 @@ from rest_framework.exceptions import ValidationError
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.mixins import (
CreateModelMixin,
+ DestroyModelMixin,
ListModelMixin,
RetrieveModelMixin
)
@@ -18,7 +19,13 @@ from pydis_site.apps.api.serializers import (
)
-class InfractionViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, GenericViewSet):
+class InfractionViewSet(
+ CreateModelMixin,
+ RetrieveModelMixin,
+ ListModelMixin,
+ GenericViewSet,
+ DestroyModelMixin
+):
"""
View providing CRUD operations on infractions for Discord users.
@@ -108,6 +115,13 @@ class InfractionViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, Ge
- 400: if a field in the request body is invalid or disallowed
- 404: if an infraction with the given `id` could not be found
+ ### DELETE /bot/infractions/<id:int>
+ Delete the infraction with the given `id`.
+
+ #### Status codes
+ - 204: returned on success
+ - 404: if a infraction with the given `id` does not exist
+
### Expanded routes
All routes support expansion of `user` and `actor` in responses. To use an expanded route,
append `/expanded` to the end of the route e.g. `GET /bot/infractions/expanded`.
diff --git a/pydis_site/apps/api/viewsets/bot/user.py b/pydis_site/apps/api/viewsets/bot/user.py
index 5205dc97..829e2694 100644
--- a/pydis_site/apps/api/viewsets/bot/user.py
+++ b/pydis_site/apps/api/viewsets/bot/user.py
@@ -109,8 +109,10 @@ class UserViewSet(ModelViewSet):
#### Response format
>>> {
- ... "verified_at": "2020-10-06T21:54:23.540766",
- ... "total_messages": 2
+ ... "joined_at": "2020-10-06T21:54:23.540766",
+ ... "total_messages": 2,
+ ... "voice_banned": False,
+ ... "activity_blocks": 1
...}
#### Status codes
@@ -255,6 +257,7 @@ class UserViewSet(ModelViewSet):
data = metricity.user(user.id)
data["total_messages"] = metricity.total_messages(user.id)
data["voice_banned"] = voice_banned
+ data["activity_blocks"] = metricity.total_message_blocks(user.id)
return Response(data, status=status.HTTP_200_OK)
except NotFound:
return Response(dict(detail="User not found in metricity"),
diff --git a/pydis_site/apps/home/migrations/0002_auto_now_on_repository_metadata.py b/pydis_site/apps/home/migrations/0002_auto_now_on_repository_metadata.py
new file mode 100644
index 00000000..7e78045b
--- /dev/null
+++ b/pydis_site/apps/home/migrations/0002_auto_now_on_repository_metadata.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.0.11 on 2020-12-21 22:57
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('home', '0001_initial'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='repositorymetadata',
+ name='last_updated',
+ field=models.DateTimeField(auto_now=True, help_text='The date and time this data was last fetched.'),
+ ),
+ ]
diff --git a/pydis_site/apps/home/models/repository_metadata.py b/pydis_site/apps/home/models/repository_metadata.py
index 92d2404d..00a83cd7 100644
--- a/pydis_site/apps/home/models/repository_metadata.py
+++ b/pydis_site/apps/home/models/repository_metadata.py
@@ -1,32 +1,31 @@
from django.db import models
-from django.utils import timezone
class RepositoryMetadata(models.Model):
"""Information about one of our repos fetched from the GitHub API."""
last_updated = models.DateTimeField(
- default=timezone.now,
- help_text="The date and time this data was last fetched."
+ help_text="The date and time this data was last fetched.",
+ auto_now=True,
)
repo_name = models.CharField(
primary_key=True,
max_length=40,
- help_text="The full name of the repo, e.g. python-discord/site"
+ help_text="The full name of the repo, e.g. python-discord/site",
)
description = models.CharField(
max_length=400,
- help_text="The description of the repo."
+ help_text="The description of the repo.",
)
forks = models.IntegerField(
- help_text="The number of forks of this repo"
+ help_text="The number of forks of this repo",
)
stargazers = models.IntegerField(
- help_text="The number of stargazers for this repo"
+ help_text="The number of stargazers for this repo",
)
language = models.CharField(
max_length=20,
- help_text="The primary programming language used for this repo."
+ help_text="The primary programming language used for this repo.",
)
def __str__(self):
diff --git a/pydis_site/apps/home/tests/mock_github_api_response.json b/pydis_site/apps/home/tests/mock_github_api_response.json
index 10be4f99..ddbffed8 100644
--- a/pydis_site/apps/home/tests/mock_github_api_response.json
+++ b/pydis_site/apps/home/tests/mock_github_api_response.json
@@ -35,7 +35,7 @@
"forks_count": 31
},
{
- "full_name": "python-discord/seasonalbot",
+ "full_name": "python-discord/sir-lancebot",
"description": "test",
"stargazers_count": 97,
"language": "Python",
diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py
index 77b1a68d..5634bc9b 100644
--- a/pydis_site/apps/home/tests/test_repodata_helpers.py
+++ b/pydis_site/apps/home/tests/test_repodata_helpers.py
@@ -123,10 +123,38 @@ class TestRepositoryMetadataHelpers(TestCase):
mock_get.return_value.json.return_value = ['garbage']
metadata = self.home_view._get_repo_data()
- self.assertEquals(len(metadata), len(self.home_view.repos))
- for item in metadata:
- with self.subTest(item=item):
- self.assertEqual(item.description, "Not available.")
- self.assertEqual(item.forks, 999)
- self.assertEqual(item.stargazers, 999)
- self.assertEqual(item.language, "Python")
+ self.assertEquals(len(metadata), 0)
+
+ def test_cleans_up_stale_metadata(self):
+ """Tests that we clean up stale metadata when we start the HomeView."""
+ repo_data = RepositoryMetadata(
+ repo_name="python-discord/INVALID",
+ description="testrepo",
+ forks=42,
+ stargazers=42,
+ language="English",
+ last_updated=timezone.now() - timedelta(seconds=HomeView.repository_cache_ttl + 1),
+ )
+ repo_data.save()
+ self.home_view.__init__()
+ cached_repos = RepositoryMetadata.objects.all()
+ cached_names = [repo.repo_name for repo in cached_repos]
+
+ self.assertNotIn("python-discord/INVALID", cached_names)
+
+ def test_dont_clean_up_unstale_metadata(self):
+ """Tests that we don't clean up good metadata when we start the HomeView."""
+ repo_data = RepositoryMetadata(
+ repo_name="python-discord/site",
+ description="testrepo",
+ forks=42,
+ stargazers=42,
+ language="English",
+ last_updated=timezone.now() - timedelta(seconds=HomeView.repository_cache_ttl + 1),
+ )
+ repo_data.save()
+ self.home_view.__init__()
+ cached_repos = RepositoryMetadata.objects.all()
+ cached_names = [repo.repo_name for repo in cached_repos]
+
+ self.assertIn("python-discord/site", cached_names)
diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py
index 09969f1d..e77772fb 100644
--- a/pydis_site/apps/home/views/home.py
+++ b/pydis_site/apps/home/views/home.py
@@ -1,4 +1,4 @@
-import datetime
+import logging
from typing import Dict, List
import requests
@@ -10,11 +10,13 @@ from django.views import View
from pydis_site.apps.home.models import RepositoryMetadata
+log = logging.getLogger(__name__)
+
class HomeView(View):
"""The main landing page for the website."""
- github_api = "https://api.github.com/users/python-discord/repos"
+ github_api = "https://api.github.com/users/python-discord/repos?per_page=100"
repository_cache_ttl = 3600
# Which of our GitHub repos should be displayed on the front page, and in which order?
@@ -22,82 +24,98 @@ class HomeView(View):
"python-discord/site",
"python-discord/bot",
"python-discord/snekbox",
- "python-discord/seasonalbot",
+ "python-discord/sir-lancebot",
"python-discord/metricity",
"python-discord/django-simple-bulma",
]
+ def __init__(self):
+ """Clean up stale RepositoryMetadata."""
+ RepositoryMetadata.objects.exclude(repo_name__in=self.repos).delete()
+
def _get_api_data(self) -> Dict[str, Dict[str, str]]:
- """Call the GitHub API and get information about our repos."""
- repo_dict: Dict[str, dict] = {repo_name: {} for repo_name in self.repos}
+ """
+ Call the GitHub API and get information about our repos.
+
+ If we're unable to get that info for any reason, return an empty dict.
+ """
+ repo_dict = {}
# Fetch the data from the GitHub API
api_data: List[dict] = requests.get(self.github_api).json()
# Process the API data into our dict
for repo in api_data:
- full_name = repo["full_name"]
-
- if full_name in self.repos:
- repo_dict[full_name] = {
- "full_name": repo["full_name"],
- "description": repo["description"],
- "language": repo["language"],
- "forks_count": repo["forks_count"],
- "stargazers_count": repo["stargazers_count"],
- }
+ try:
+ full_name = repo["full_name"]
+
+ if full_name in self.repos:
+ repo_dict[full_name] = {
+ "full_name": repo["full_name"],
+ "description": repo["description"],
+ "language": repo["language"],
+ "forks_count": repo["forks_count"],
+ "stargazers_count": repo["stargazers_count"],
+ }
+ # Something is not right about the API data we got back from GitHub.
+ except (TypeError, ConnectionError, KeyError) as e:
+ log.error(
+ "Unable to parse the GitHub repository metadata from response!",
+ extra={
+ 'api_data': api_data,
+ 'error': e
+ }
+ )
+ continue
return repo_dict
def _get_repo_data(self) -> List[RepositoryMetadata]:
"""Build a list of RepositoryMetadata objects that we can use to populate the front page."""
- # Try to get site data from the cache
- try:
- repo_data = RepositoryMetadata.objects.get(repo_name="python-discord/site")
+ database_repositories = []
- # If the data is stale, we should refresh it.
- if (timezone.now() - repo_data.last_updated).seconds > self.repository_cache_ttl:
+ # First, let's see if we have any metadata cached.
+ cached_data = RepositoryMetadata.objects.all()
- # Try to get new data from the API. If it fails, return the cached data.
- try:
- api_repositories = self._get_api_data()
- except (TypeError, ConnectionError):
- return RepositoryMetadata.objects.all()
- database_repositories = []
-
- # Update or create all RepoData objects in self.repos
- for repo_name, api_data in api_repositories.items():
- try:
- repo_data = RepositoryMetadata.objects.get(repo_name=repo_name)
- repo_data.description = api_data["description"]
- repo_data.language = api_data["language"]
- repo_data.forks = api_data["forks_count"]
- repo_data.stargazers = api_data["stargazers_count"]
- except RepositoryMetadata.DoesNotExist:
- repo_data = RepositoryMetadata(
- repo_name=api_data["full_name"],
- description=api_data["description"],
- forks=api_data["forks_count"],
- stargazers=api_data["stargazers_count"],
- language=api_data["language"],
- )
- repo_data.save()
- database_repositories.append(repo_data)
- return database_repositories
-
- # Otherwise, if the data is fresher than 2 minutes old, we should just return it.
- else:
- return RepositoryMetadata.objects.all()
+ # If we don't, we have to create some!
+ if not cached_data:
- # If this is raised, the database has no repodata at all, we will create them all.
- except RepositoryMetadata.DoesNotExist:
- database_repositories = []
- try:
- # Get new data from API
- api_repositories = self._get_api_data()
+ # Try to get new data from the API. If it fails, we'll return an empty list.
+ # In this case, we simply don't display our projects on the site.
+ api_repositories = self._get_api_data()
+
+ # Create all the repodata records in the database.
+ for api_data in api_repositories.values():
+ repo_data = RepositoryMetadata(
+ repo_name=api_data["full_name"],
+ description=api_data["description"],
+ forks=api_data["forks_count"],
+ stargazers=api_data["stargazers_count"],
+ language=api_data["language"],
+ )
+
+ repo_data.save()
+ database_repositories.append(repo_data)
- # Create all the repodata records in the database.
- for api_data in api_repositories.values():
+ return database_repositories
+
+ # If the data is stale, we should refresh it.
+ if (timezone.now() - cached_data[0].last_updated).seconds > self.repository_cache_ttl:
+ # Try to get new data from the API. If it fails, return the cached data.
+ api_repositories = self._get_api_data()
+
+ if not api_repositories:
+ return RepositoryMetadata.objects.all()
+
+ # Update or create all RepoData objects in self.repos
+ for repo_name, api_data in api_repositories.items():
+ try:
+ repo_data = RepositoryMetadata.objects.get(repo_name=repo_name)
+ repo_data.description = api_data["description"]
+ repo_data.language = api_data["language"]
+ repo_data.forks = api_data["forks_count"]
+ repo_data.stargazers = api_data["stargazers_count"]
+ except RepositoryMetadata.DoesNotExist:
repo_data = RepositoryMetadata(
repo_name=api_data["full_name"],
description=api_data["description"],
@@ -105,23 +123,14 @@ class HomeView(View):
stargazers=api_data["stargazers_count"],
language=api_data["language"],
)
- repo_data.save()
- database_repositories.append(repo_data)
- except TypeError:
- for repo_name in self.repos:
- repo_data = RepositoryMetadata(
- last_updated=timezone.now() - datetime.timedelta(minutes=50),
- repo_name=repo_name,
- description="Not available.",
- forks=999,
- stargazers=999,
- language="Python",
- )
- repo_data.save()
- database_repositories.append(repo_data)
-
+ repo_data.save()
+ database_repositories.append(repo_data)
return database_repositories
+ # Otherwise, if the data is fresher than 2 minutes old, we should just return it.
+ else:
+ return RepositoryMetadata.objects.all()
+
def get(self, request: WSGIRequest) -> HttpResponse:
"""Collect repo data and render the homepage view."""
repo_data = self._get_repo_data()