aboutsummaryrefslogtreecommitdiffstats
path: root/pydis_site/apps
diff options
context:
space:
mode:
authorGravatar Leon Sandøy <[email protected]>2019-04-16 17:58:29 +0200
committerGravatar Leon Sandøy <[email protected]>2019-04-16 17:58:29 +0200
commit1583b86a6215c879a667f304512d4708c77e9f82 (patch)
tree6e7c87375b3ce291267667be453eaedaab5ccb39 /pydis_site/apps
parentmerging in changes from the main branch (diff)
makemigrations pipeline created, RepoData model created, and a really naive implementation of the API stuff which makes 6 requests and takes forever to load. I've found a better approach so I'm gonna switch to that next.
Diffstat (limited to 'pydis_site/apps')
-rw-r--r--pydis_site/apps/main/models/__init__.py3
-rw-r--r--pydis_site/apps/main/models/repo_data.py33
-rw-r--r--pydis_site/apps/main/views/home.py94
3 files changed, 94 insertions, 36 deletions
diff --git a/pydis_site/apps/main/models/__init__.py b/pydis_site/apps/main/models/__init__.py
new file mode 100644
index 00000000..7a2cbb0b
--- /dev/null
+++ b/pydis_site/apps/main/models/__init__.py
@@ -0,0 +1,3 @@
+from .repo_data import RepoData
+
+__all__ = ["RepoData"]
diff --git a/pydis_site/apps/main/models/repo_data.py b/pydis_site/apps/main/models/repo_data.py
new file mode 100644
index 00000000..40540410
--- /dev/null
+++ b/pydis_site/apps/main/models/repo_data.py
@@ -0,0 +1,33 @@
+from django.db import models
+from django.utils import timezone
+
+
+class RepoData(models.Model):
+ """Information about one of our repos fetched from the GitHub API."""
+
+ last_updated = models.DateTimeField(
+ default=timezone.now,
+ help_text="The date and time this data was last fetched."
+ )
+ repo_name = models.CharField(
+ primary_key=True,
+ max_length=40,
+ help_text="The full name of the repo, e.g. python-discord/site"
+ )
+ description = models.CharField(
+ max_length=400,
+ help_text="The description of the repo."
+ )
+ forks = models.IntegerField(
+ help_text="The number of forks of this repo"
+ )
+ stargazers = models.IntegerField(
+ help_text="The number of stargazers for this repo"
+ )
+ language = models.CharField(
+ max_length=20,
+ help_text="The primary programming language used for this repo."
+ )
+
+ def __str__(self):
+ return self.repo_name
diff --git a/pydis_site/apps/main/views/home.py b/pydis_site/apps/main/views/home.py
index 8f45b912..883177bb 100644
--- a/pydis_site/apps/main/views/home.py
+++ b/pydis_site/apps/main/views/home.py
@@ -1,47 +1,69 @@
import requests
+
from django.shortcuts import render
+from django.utils import timezone
from django.views import View
+from pydis_site.apps.main.models import RepoData
+GITHUB_API = "https://api.github.com/repos"
+REPOS = [
+ "python-discord/site",
+ "python-discord/bot",
+ "python-discord/snekbox",
+ "python-discord/seasonalbot",
+ "python-discord/django-simple-bulma",
+ "python-discord/django-crispy-bulma",
+]
-
-class Home(View):
-
- projects = [
- "site",
- "bot",
- "snekbox",
- "seasonalbot",
- "django-simple-bulma",
- "django-crispy-bulma",
- ]
-
- def _get_repo_data(self):
- """
- This will get language, stars and forks for the projects listed in Home.projects.
-
- Returns a dictionary with the data, in a template-friendly manner. The rate limit for
- this particular endpoint is 30 requests per minute. This should be plenty for now,
- but if we ever run into rate limiting issues, we should implement some form of caching
- for this data.
- """
-
- # Gotta authenticate, or we get terrible rate limits.
-
- # We need to query the Search API https://developer.github.com/v3/search/, using a single
- # query to query for all of the projects at the same time, and making sure we cache that data
- # and make the request no more often than once per minute or something reasonable
- # like that.
-
- endpoint = "https://api.github.com/search/repositories?q=" + "repo+name+separated+by+pluses"
-
- # And finally
-
-
-
+# https://api.github.com/users/python-discord/repos gets all the data in one query.
+class Home(View):
+ def _get_repo_data(self, repo_name):
+ """This will get language, stars and forks for the requested GitHub repo."""
+
+ # Try to get the data from the cache
+ try:
+ repo_data = RepoData.objects.get(repo_name=repo_name)
+
+ # If the data is older than 2 minutes, we should refresh it
+ if (timezone.now() - repo_data.last_updated).seconds > 120:
+
+ # Fetch the data from the GitHub API
+ api_data = requests.get(f"{GITHUB_API}/{repo_name}")
+ api_data = api_data.json()
+
+ # Update the current object, and save it.
+ repo_data.description = api_data["description"]
+ repo_data.language = api_data["language"]
+ repo_data.forks = api_data["forks_count"]
+ repo_data.stargazers = api_data["stargazers_count"]
+ repo_data.save()
+ return repo_data
+
+ # Otherwise, if the data is fresher than 2 minutes old, we should just return it.
+ else:
+ return repo_data
+
+ # If this is raised, the data isn't there at all, so we'll need to create it.
+ except RepoData.DoesNotExist:
+ api_data = requests.get(f"{GITHUB_API}/{repo_name}")
+ api_data = api_data.json()
+ repo_data = RepoData(
+ description=api_data["description"],
+ forks=api_data["forks_count"],
+ stargazers=api_data["stargazers_count"],
+ language=api_data["language"],
+ )
+ repo_data.save()
+ return repo_data
def get(self, request):
+ # Collect the repo data
+ repo_data = []
+ for repo in REPOS:
+ repo_data.append(self._get_repo_data(repo))
+
# Call the GitHub API and ask it for some data
- return render(request, "home/index.html", {})
+ return render(request, "home/index.html", {"repo_data": repo_data})