aboutsummaryrefslogtreecommitdiffstats
path: root/pydis_site/apps/main/views/home.py
diff options
context:
space:
mode:
authorGravatar Leon Sandøy <[email protected]>2019-04-17 23:57:14 +0200
committerGravatar Leon Sandøy <[email protected]>2019-04-17 23:57:14 +0200
commitcf1fc2870326c431d0c4dc3fe46f2ae41c2fd5aa (patch)
treef26b61c866912c7007260f2d946ebbb3e5454585 /pydis_site/apps/main/views/home.py
parentmakemigrations pipeline created, RepoData model created, and a really naive i... (diff)
The GitHub API stuff is done now, including data caching. Just a couple of minor bugfixes left before this is PR-ready.
Diffstat (limited to 'pydis_site/apps/main/views/home.py')
-rw-r--r--pydis_site/apps/main/views/home.py119
1 files changed, 78 insertions, 41 deletions
diff --git a/pydis_site/apps/main/views/home.py b/pydis_site/apps/main/views/home.py
index 883177bb..f3f9d726 100644
--- a/pydis_site/apps/main/views/home.py
+++ b/pydis_site/apps/main/views/home.py
@@ -3,67 +3,104 @@ import requests
from django.shortcuts import render
from django.utils import timezone
from django.views import View
+from django.conf import settings
from pydis_site.apps.main.models import RepoData
-GITHUB_API = "https://api.github.com/repos"
-REPOS = [
- "python-discord/site",
- "python-discord/bot",
- "python-discord/snekbox",
- "python-discord/seasonalbot",
- "python-discord/django-simple-bulma",
- "python-discord/django-crispy-bulma",
-]
-
-# https://api.github.com/users/python-discord/repos gets all the data in one query.
+GITHUB_API = "https://api.github.com/users/python-discord/repos"
class Home(View):
- def _get_repo_data(self, repo_name):
- """This will get language, stars and forks for the requested GitHub repo."""
- # Try to get the data from the cache
+ def _get_api_data(self):
+ """Call the GitHub API and get information about our repos."""
+ repo_dict = {repo_name: {} for repo_name in settings.HOMEPAGE_REPOS}
+
+ # Fetch the data from the GitHub API
+ api_data = requests.get(GITHUB_API)
+ api_data = api_data.json()
+
+ # Process the API data into our dict
+ print(f"repo_dict = {repo_dict}")
+ for repo in api_data:
+ full_name = repo["full_name"]
+
+ if full_name in settings.HOMEPAGE_REPOS:
+ repo_dict[full_name] = {
+ "full_name": repo["full_name"],
+ "description": repo["description"],
+ "language": repo["language"],
+ "forks_count": repo["forks_count"],
+ "stargazers_count": repo["stargazers_count"],
+ }
+ print(f"repo_dict after processing = {repo_dict}")
+ return repo_dict
+
+ def _get_repo_data(self):
+ """Build a list of RepoData objects that we can use to populate the front page."""
+
+ # Try to get site data from the cache
try:
- repo_data = RepoData.objects.get(repo_name=repo_name)
+ repo_data = RepoData.objects.get(repo_name="python-discord/site")
- # If the data is older than 2 minutes, we should refresh it
+ # If the data is older than 2 minutes, we should refresh it. THIS PROBABLY ALWAYS FAILS?
if (timezone.now() - repo_data.last_updated).seconds > 120:
- # Fetch the data from the GitHub API
- api_data = requests.get(f"{GITHUB_API}/{repo_name}")
- api_data = api_data.json()
+ diff = (timezone.now() - repo_data.last_updated).seconds
+ print(f"okay baby, it's old! the seconds difference comes to: {diff}")
- # Update the current object, and save it.
- repo_data.description = api_data["description"]
- repo_data.language = api_data["language"]
- repo_data.forks = api_data["forks_count"]
- repo_data.stargazers = api_data["stargazers_count"]
- repo_data.save()
- return repo_data
+ # Get new data from API
+ api_data_container = self._get_api_data()
+ repo_data_container = []
+
+ # Update or create all RepoData objects in settings.HOMEPAGE_REPOS
+ for repo_name, api_data in api_data_container.items():
+ try:
+ repo_data = RepoData.objects.get(repo_name=repo_name)
+ repo_data.description = api_data["description"]
+ repo_data.language = api_data["language"]
+ repo_data.forks = api_data["forks_count"]
+ repo_data.stargazers = api_data["stargazers_count"]
+ except RepoData.DoesNotExist:
+ repo_data = RepoData(
+ repo_name=api_data["full_name"],
+ description=api_data["description"],
+ forks=api_data["forks_count"],
+ stargazers=api_data["stargazers_count"],
+ language=api_data["language"],
+ )
+ repo_data.save()
+ repo_data_container.append(repo_data)
+ return repo_data_container
# Otherwise, if the data is fresher than 2 minutes old, we should just return it.
else:
- return repo_data
+ return list(RepoData.objects.all())
- # If this is raised, the data isn't there at all, so we'll need to create it.
+ # If this is raised, the database has no repodata at all, we will create them all.
except RepoData.DoesNotExist:
- api_data = requests.get(f"{GITHUB_API}/{repo_name}")
- api_data = api_data.json()
- repo_data = RepoData(
- description=api_data["description"],
- forks=api_data["forks_count"],
- stargazers=api_data["stargazers_count"],
- language=api_data["language"],
- )
- repo_data.save()
- return repo_data
+
+ # Get new data from API
+ api_data_container = self._get_api_data()
+ repo_data_container = []
+
+ # Create all the repodata records in the database.
+ for repo_name, api_data in api_data_container.items():
+ repo_data = RepoData(
+ repo_name=api_data["full_name"],
+ description=api_data["description"],
+ forks=api_data["forks_count"],
+ stargazers=api_data["stargazers_count"],
+ language=api_data["language"],
+ )
+ repo_data.save()
+ repo_data_container.append(repo_data)
+
+ return repo_data_container
def get(self, request):
# Collect the repo data
- repo_data = []
- for repo in REPOS:
- repo_data.append(self._get_repo_data(repo))
+ repo_data = self._get_repo_data()
# Call the GitHub API and ask it for some data
return render(request, "home/index.html", {"repo_data": repo_data})