diff options
author | 2021-09-05 07:43:44 -0400 | |
---|---|---|
committer | 2021-09-05 07:43:44 -0400 | |
commit | 12c09e08ca50a271372adeaf0cb21d6c4ca9ccea (patch) | |
tree | 1ac76fddbc476001133aac2f55e414269de47744 /pydis_site/apps/home | |
parent | Add `tutorial` and `video` tags for RealPython. (diff) | |
parent | Merge pull request #581 from python-discord/Pin-platform-in-Dockerfile (diff) |
Merge with main, resolve conflicts.
Diffstat (limited to 'pydis_site/apps/home')
-rw-r--r-- | pydis_site/apps/home/urls.py | 1 | ||||
-rw-r--r-- | pydis_site/apps/home/views/home.py | 80 |
2 files changed, 46 insertions, 35 deletions
diff --git a/pydis_site/apps/home/urls.py b/pydis_site/apps/home/urls.py index 1e2af8f3..bb77220b 100644 --- a/pydis_site/apps/home/urls.py +++ b/pydis_site/apps/home/urls.py @@ -7,6 +7,7 @@ app_name = 'home' urlpatterns = [ path('', HomeView.as_view(), name='home'), path('', include('pydis_site.apps.redirect.urls')), + path('', include('django_prometheus.urls')), path('admin/', admin.site.urls), path('resources/', include('pydis_site.apps.resources.urls')), path('pages/', include('pydis_site.apps.content.urls')), diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py index b3767d37..bbb4b815 100644 --- a/pydis_site/apps/home/views/home.py +++ b/pydis_site/apps/home/views/home.py @@ -9,7 +9,7 @@ from django.utils import timezone from django.views import View from pydis_site.apps.home.models import RepositoryMetadata -from pydis_site.constants import GITHUB_TOKEN +from pydis_site.constants import GITHUB_TOKEN, TIMEOUT_PERIOD log = logging.getLogger(__name__) @@ -19,7 +19,6 @@ class HomeView(View): github_api = "https://api.github.com/users/python-discord/repos?per_page=100" repository_cache_ttl = 3600 - headers = {"Authorization": f"token {GITHUB_TOKEN}"} # Which of our GitHub repos should be displayed on the front page, and in which order? repos = [ @@ -35,6 +34,16 @@ class HomeView(View): """Clean up stale RepositoryMetadata.""" RepositoryMetadata.objects.exclude(repo_name__in=self.repos).delete() + # If no token is defined (for example in local development), then + # it does not make sense to pass the Authorization header. More + # specifically, GitHub will reject any requests from us due to the + # invalid header. We can make a limited number of anonymous requests + # though, which is useful for testing. + if GITHUB_TOKEN: + self.headers = {"Authorization": f"token {GITHUB_TOKEN}"} + else: + self.headers = {} + def _get_api_data(self) -> Dict[str, Dict[str, str]]: """ Call the GitHub API and get information about our repos. @@ -42,9 +51,16 @@ class HomeView(View): If we're unable to get that info for any reason, return an empty dict. """ repo_dict = {} - - # Fetch the data from the GitHub API - api_data: List[dict] = requests.get(self.github_api, headers=self.headers).json() + try: + # Fetch the data from the GitHub API + api_data: List[dict] = requests.get( + self.github_api, + headers=self.headers, + timeout=TIMEOUT_PERIOD + ).json() + except requests.exceptions.Timeout: + log.error("Request to fetch GitHub repository metadata for timed out!") + return repo_dict # Process the API data into our dict for repo in api_data: @@ -74,35 +90,33 @@ class HomeView(View): def _get_repo_data(self) -> List[RepositoryMetadata]: """Build a list of RepositoryMetadata objects that we can use to populate the front page.""" - database_repositories = [] - - # First, let's see if we have any metadata cached. - cached_data = RepositoryMetadata.objects.all() + # First off, load the timestamp of the least recently updated entry. + last_update = ( + RepositoryMetadata.objects.values_list("last_updated", flat=True) + .order_by("last_updated").first() + ) - # If we don't, we have to create some! - if not cached_data: + # If we did not retrieve any results here, we should import them! + if last_update is None: # Try to get new data from the API. If it fails, we'll return an empty list. # In this case, we simply don't display our projects on the site. api_repositories = self._get_api_data() # Create all the repodata records in the database. - for api_data in api_repositories.values(): - repo_data = RepositoryMetadata( + return RepositoryMetadata.objects.bulk_create( + RepositoryMetadata( repo_name=api_data["full_name"], description=api_data["description"], forks=api_data["forks_count"], stargazers=api_data["stargazers_count"], language=api_data["language"], ) - - repo_data.save() - database_repositories.append(repo_data) - - return database_repositories + for api_data in api_repositories.values() + ) # If the data is stale, we should refresh it. - if (timezone.now() - cached_data[0].last_updated).seconds > self.repository_cache_ttl: + if (timezone.now() - last_update).seconds > self.repository_cache_ttl: # Try to get new data from the API. If it fails, return the cached data. api_repositories = self._get_api_data() @@ -110,22 +124,18 @@ class HomeView(View): return RepositoryMetadata.objects.all() # Update or create all RepoData objects in self.repos - for repo_name, api_data in api_repositories.items(): - try: - repo_data = RepositoryMetadata.objects.get(repo_name=repo_name) - repo_data.description = api_data["description"] - repo_data.language = api_data["language"] - repo_data.forks = api_data["forks_count"] - repo_data.stargazers = api_data["stargazers_count"] - except RepositoryMetadata.DoesNotExist: - repo_data = RepositoryMetadata( - repo_name=api_data["full_name"], - description=api_data["description"], - forks=api_data["forks_count"], - stargazers=api_data["stargazers_count"], - language=api_data["language"], - ) - repo_data.save() + database_repositories = [] + for api_data in api_repositories.values(): + repo_data, _created = RepositoryMetadata.objects.update_or_create( + repo_name=api_data["full_name"], + defaults={ + 'repo_name': api_data["full_name"], + 'description': api_data["description"], + 'forks': api_data["forks_count"], + 'stargazers': api_data["stargazers_count"], + 'language': api_data["language"], + } + ) database_repositories.append(repo_data) return database_repositories |