From 862a982115bc6feb46c082484740765b0eac5caa Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sat, 5 Dec 2020 15:40:04 +0000 Subject: Update gunicorn configuration options --- manage.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/manage.py b/manage.py index 446b1af3..36442eab 100755 --- a/manage.py +++ b/manage.py @@ -163,7 +163,10 @@ class SiteManager: "-b", "0.0.0.0:8000", "pydis_site.wsgi:application", "--threads", "8", - "-w", "4" + "-w", "4", + "--max-requests-jitter", "1000", + "--statsd_host", "graphite.default.svc.cluster.local:8125", + "--statsd_prefix", "site", ] # Run gunicorn for the production server. -- cgit v1.2.3 From 702afad1ee0973c8b46b641bba1665dc7c490b06 Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sat, 5 Dec 2020 15:43:21 +0000 Subject: Switch underscores for dashes in gunicorn CLI options --- manage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manage.py b/manage.py index 36442eab..3dee15c0 100755 --- a/manage.py +++ b/manage.py @@ -165,8 +165,8 @@ class SiteManager: "--threads", "8", "-w", "4", "--max-requests-jitter", "1000", - "--statsd_host", "graphite.default.svc.cluster.local:8125", - "--statsd_prefix", "site", + "--statsd-host", "graphite.default.svc.cluster.local:8125", + "--statsd-prefix", "site", ] # Run gunicorn for the production server. -- cgit v1.2.3 From 4e0ec7b5d7a7fe9cce28f795ef376435ed01c944 Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sun, 6 Dec 2020 12:43:50 +0000 Subject: Update manage.py Co-authored-by: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> --- manage.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/manage.py b/manage.py index 3dee15c0..a025e7b1 100755 --- a/manage.py +++ b/manage.py @@ -164,7 +164,8 @@ class SiteManager: "pydis_site.wsgi:application", "--threads", "8", "-w", "4", - "--max-requests-jitter", "1000", + "--max-requests", "1000", + "--max-requests-jitter", "50", "--statsd-host", "graphite.default.svc.cluster.local:8125", "--statsd-prefix", "site", ] -- cgit v1.2.3 From 2c9c31c887cb1cb6de8fc7d65a11440467376bad Mon Sep 17 00:00:00 2001 From: Joe Banks Date: Sun, 6 Dec 2020 14:15:20 +0000 Subject: Create review-policy.yml --- .github/review-policy.yml | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .github/review-policy.yml diff --git a/.github/review-policy.yml b/.github/review-policy.yml new file mode 100644 index 00000000..421b30f8 --- /dev/null +++ b/.github/review-policy.yml @@ -0,0 +1,3 @@ +remote: python-discord/.github +path: review-policies/core-developers.yml +ref: main -- cgit v1.2.3 From abccf0f021e88cd2437f666222de7736a167749d Mon Sep 17 00:00:00 2001 From: Dennis Pham Date: Sun, 6 Dec 2020 19:43:04 -0500 Subject: Update CODEOWNERS for @Den4200 --- .github/CODEOWNERS | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index dd8eb4f3..0ba2c55b 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,18 +1,15 @@ -# Request Dennis for any PR -* @Den4200 - # Infractions API pydis_site/apps/api/models/bot/infraction.py @MarkKoz pydis_site/apps/api/viewsets/bot/infraction.py @MarkKoz # Django ORM **/migrations/** @Akarys42 -**/models/** @Akarys42 +**/models/** @Akarys42 @Den4200 # CI & Docker -.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ -Dockerfile @MarkKoz @Akarys42 -docker-compose.yml @MarkKoz @Akarys42 +.github/workflows/** @MarkKoz @Akarys42 @SebastiaanZ @Den4200 +Dockerfile @MarkKoz @Akarys42 @Den4200 +docker-compose.yml @MarkKoz @Akarys42 @Den4200 # Tools Pipfile* @Akarys42 -- cgit v1.2.3 From cf790ace675867bb8f1a5c5f4d8c9b912ad5b973 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff Date: Thu, 10 Dec 2020 23:19:15 +0100 Subject: Send an enhanced workflow status embed to discord Similar to Sir Lancebot and bot, I've added enhanced GitHub Actions status embeds to our workflow sequence. This means we get fewer embeds, but with more information! --- .github/workflows/lint-test.yaml | 22 +++++++++++ .github/workflows/status_embed.yaml | 78 +++++++++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+) create mode 100644 .github/workflows/status_embed.yaml diff --git a/.github/workflows/lint-test.yaml b/.github/workflows/lint-test.yaml index 668c888d..b75e3b8e 100644 --- a/.github/workflows/lint-test.yaml +++ b/.github/workflows/lint-test.yaml @@ -118,3 +118,25 @@ jobs: - name: Tear down docker-compose containers run: docker-compose stop if: ${{ always() }} + + # Prepare the Pull Request Payload artifact. If this fails, we + # we fail silently using the `continue-on-error` option. It's + # nice if this succeeds, but if it fails for any reason, it + # does not mean that our lint-test checks failed. + - name: Prepare Pull Request Payload artifact + id: prepare-artifact + if: always() && github.event_name == 'pull_request' + continue-on-error: true + run: cat $GITHUB_EVENT_PATH | jq '.pull_request' > pull_request_payload.json + + # This only makes sense if the previous step succeeded. To + # get the original outcome of the previous step before the + # `continue-on-error` conclusion is applied, we use the + # `.outcome` value. This step also fails silently. + - name: Upload a Build Artifact + if: steps.prepare-artifact.outcome == 'success' + continue-on-error: true + uses: actions/upload-artifact@v2 + with: + name: pull-request-payload + path: pull_request_payload.json diff --git a/.github/workflows/status_embed.yaml b/.github/workflows/status_embed.yaml new file mode 100644 index 00000000..b6a71b88 --- /dev/null +++ b/.github/workflows/status_embed.yaml @@ -0,0 +1,78 @@ +name: Status Embed + +on: + workflow_run: + workflows: + - Lint & Test + - Build + - Deploy + types: + - completed + +jobs: + status_embed: + # We need to send a status embed whenever the workflow + # sequence we're running terminates. There are a number + # of situations in which that happens: + # + # 1. We reach the end of the Deploy workflow, without + # it being skipped. + # + # 2. A `pull_request` triggered a Lint & Test workflow, + # as the sequence always terminates with one run. + # + # 3. If any workflow ends in failure or was cancelled. + if: >- + (github.event.workflow_run.name == 'Deploy' && github.event.workflow_run.conclusion != 'skipped') || + github.event.workflow_run.event == 'pull_request' || + github.event.workflow_run.conclusion == 'failure' || + github.event.workflow_run.conclusion == 'cancelled' + name: Send Status Embed to Discord + runs-on: ubuntu-latest + + steps: + # A workflow_run event does not contain all the information + # we need for a PR embed. That's why we upload an artifact + # with that information in the Lint workflow. + - name: Get Pull Request Information + id: pr_info + if: github.event.workflow_run.event == 'pull_request' + run: | + curl -s -H "Authorization: token $GITHUB_TOKEN" ${{ github.event.workflow_run.artifacts_url }} > artifacts.json + DOWNLOAD_URL=$(cat artifacts.json | jq -r '.artifacts[] | select(.name == "pull-request-payload") | .archive_download_url') + [ -z "$DOWNLOAD_URL" ] && exit 1 + wget --quiet --header="Authorization: token $GITHUB_TOKEN" -O pull_request_payload.zip $DOWNLOAD_URL || exit 2 + unzip -p pull_request_payload.zip > pull_request_payload.json + [ -s pull_request_payload.json ] || exit 3 + echo "::set-output name=pr_author_login::$(jq -r '.user.login // empty' pull_request_payload.json)" + echo "::set-output name=pr_number::$(jq -r '.number // empty' pull_request_payload.json)" + echo "::set-output name=pr_title::$(jq -r '.title // empty' pull_request_payload.json)" + echo "::set-output name=pr_source::$(jq -r '.head.label // empty' pull_request_payload.json)" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Send an informational status embed to Discord instead of the + # standard embeds that Discord sends. This embed will contain + # more information and we can fine tune when we actually want + # to send an embed. + - name: GitHub Actions Status Embed for Discord + uses: SebastiaanZ/github-status-embed-for-discord@v0.2.1 + with: + # Our GitHub Actions webhook + webhook_id: '784184528997842985' + webhook_token: ${{ secrets.GHA_WEBHOOK_TOKEN }} + + # Workflow information + workflow_name: ${{ github.event.workflow_run.name }} + run_id: ${{ github.event.workflow_run.id }} + run_number: ${{ github.event.workflow_run.run_number }} + status: ${{ github.event.workflow_run.conclusion }} + actor: ${{ github.actor }} + repository: ${{ github.repository }} + ref: ${{ github.ref }} + sha: ${{ github.event.workflow_run.head_sha }} + + pr_author_login: ${{ steps.pr_info.outputs.pr_author_login }} + pr_number: ${{ steps.pr_info.outputs.pr_number }} + pr_title: ${{ steps.pr_info.outputs.pr_title }} + pr_source: ${{ steps.pr_info.outputs.pr_source }} -- cgit v1.2.3 From 58a4b5b090f75fd5b91067f67b5924e97d701276 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Fri, 11 Dec 2020 11:40:24 +0100 Subject: Make sure to always upload PR artifact We need to make sure that we always upload our PR artifact, even if linting/testing fails. As GitHub Actions inserts an implicit "success" status check if you don't add any explicit status checks, this means that we have to include an "always" status check, even if this step has a condition relying on another "always" step. --- .github/workflows/lint-test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint-test.yaml b/.github/workflows/lint-test.yaml index b75e3b8e..397c2085 100644 --- a/.github/workflows/lint-test.yaml +++ b/.github/workflows/lint-test.yaml @@ -134,7 +134,7 @@ jobs: # `continue-on-error` conclusion is applied, we use the # `.outcome` value. This step also fails silently. - name: Upload a Build Artifact - if: steps.prepare-artifact.outcome == 'success' + if: always() && steps.prepare-artifact.outcome == 'success' continue-on-error: true uses: actions/upload-artifact@v2 with: -- cgit v1.2.3 From aeaa100bfe623b34b5742900d0571eea57c38ddb Mon Sep 17 00:00:00 2001 From: Leon Sandøy Date: Fri, 11 Dec 2020 22:33:43 +0100 Subject: Update README to trigger redeploy. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index daaa041d..134b8439 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Python Discord: Site -[![Discord](https://img.shields.io/static/v1?label=Python%20Discord&logo=discord&message=%3E100k%20members&color=%237289DA&logoColor=white)](https://discord.gg/2B963hn) +[![Discord](https://img.shields.io/static/v1?label=Python%20Discord&logo=discord&message=%3E115k%20members&color=%237289DA&logoColor=white)](https://discord.gg/2B963hn) [![Lint & Test][1]][2] [![Build & Deploy][3]][4] [![Coverage Status][5]][6] -- cgit v1.2.3 From e2e4222ecc52a8743a3829454261ce549bd2408c Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Sat, 12 Dec 2020 00:37:55 +0100 Subject: Change env var from SITE_SENTRY_DSN to SITE_DSN The name of the environment variable was shortened during our refactor of the Sentry secrets. Changed `settings.py` to reflect that change. --- pydis_site/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydis_site/settings.py b/pydis_site/settings.py index 204ce58f..2b1bfa58 100644 --- a/pydis_site/settings.py +++ b/pydis_site/settings.py @@ -28,7 +28,7 @@ if typing.TYPE_CHECKING: env = environ.Env( DEBUG=(bool, False), - SITE_SENTRY_DSN=(str, "") + SITE_DSN=(str, "") ) sentry_sdk.init( -- cgit v1.2.3 From 233c7904e1e276d78d96dd1c26594f19c162f1f0 Mon Sep 17 00:00:00 2001 From: Sebastiaan Zeeff <33516116+SebastiaanZ@users.noreply.github.com> Date: Sat, 12 Dec 2020 00:42:36 +0100 Subject: Fix incorrectly specified environment variable When I changed the name of SITE_SENTRY_DSN to SITE_DSN, I accidentally left an old reference in place. --- pydis_site/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydis_site/settings.py b/pydis_site/settings.py index 2b1bfa58..449a343f 100644 --- a/pydis_site/settings.py +++ b/pydis_site/settings.py @@ -32,7 +32,7 @@ env = environ.Env( ) sentry_sdk.init( - dsn=env('SITE_SENTRY_DSN'), + dsn=env('SITE_DSN'), integrations=[DjangoIntegration()], send_default_pii=True, release=f"pydis-site@{GIT_SHA}" -- cgit v1.2.3 From bf06cc2c64b8c991070e6364c9ddc05b7da243f8 Mon Sep 17 00:00:00 2001 From: Leon Sandøy Date: Sun, 13 Dec 2020 14:06:05 +0100 Subject: Add some sanity to frontpage projects. If no API data is available, the front page shouldn't crash and burn. This commit will ensure the front page will just render without the projects it can't get API data for instead of crashing the whole thing. --- pydis_site/apps/home/views/home.py | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py index c1c2055c..e9ed0df0 100644 --- a/pydis_site/apps/home/views/home.py +++ b/pydis_site/apps/home/views/home.py @@ -1,4 +1,5 @@ import datetime +import logging from typing import Dict, List import requests @@ -10,6 +11,8 @@ from django.views import View from pydis_site.apps.home.models import RepositoryMetadata +log = logging.getLogger(__name__) + class HomeView(View): """The main landing page for the website.""" @@ -67,22 +70,25 @@ class HomeView(View): # Update or create all RepoData objects in self.repos for repo_name, api_data in api_repositories.items(): - try: - repo_data = RepositoryMetadata.objects.get(repo_name=repo_name) - repo_data.description = api_data["description"] - repo_data.language = api_data["language"] - repo_data.forks = api_data["forks_count"] - repo_data.stargazers = api_data["stargazers_count"] - except RepositoryMetadata.DoesNotExist: - repo_data = RepositoryMetadata( - repo_name=api_data["full_name"], - description=api_data["description"], - forks=api_data["forks_count"], - stargazers=api_data["stargazers_count"], - language=api_data["language"], - ) - repo_data.save() - database_repositories.append(repo_data) + if api_data: + try: + repo_data = RepositoryMetadata.objects.get(repo_name=repo_name) + repo_data.description = api_data["description"] + repo_data.language = api_data["language"] + repo_data.forks = api_data["forks_count"] + repo_data.stargazers = api_data["stargazers_count"] + except RepositoryMetadata.DoesNotExist: + repo_data = RepositoryMetadata( + repo_name=api_data["full_name"], + description=api_data["description"], + forks=api_data["forks_count"], + stargazers=api_data["stargazers_count"], + language=api_data["language"], + ) + repo_data.save() + database_repositories.append(repo_data) + else: + log.error(f"No API data is available for {repo_name}!") return database_repositories # Otherwise, if the data is fresher than 2 minutes old, we should just return it. -- cgit v1.2.3 From 99fa3ae5806dadb73a49ceca4a4a2e82c3519b4e Mon Sep 17 00:00:00 2001 From: Leon Sandøy Date: Sun, 13 Dec 2020 14:49:45 +0100 Subject: Completely remove Projects section if no data. Instead of showing an empty Projects section, we'll just omit it if we can't resolve any of our projects. Otherwise, we'll show the projects we can resolve. --- pydis_site/templates/home/index.html | 98 ++++++++++++++++++------------------ 1 file changed, 50 insertions(+), 48 deletions(-) diff --git a/pydis_site/templates/home/index.html b/pydis_site/templates/home/index.html index 72a5f67c..a98613a3 100644 --- a/pydis_site/templates/home/index.html +++ b/pydis_site/templates/home/index.html @@ -130,57 +130,59 @@ -
-
-

Projects

- -
- - {# Generate project data from HomeView.repos #} - {% for repo in repo_data %} - -
-
+ + + {% endif %}
-- cgit v1.2.3 From d2d31b41d6122ee32fd8efebc0a3c9d33e713275 Mon Sep 17 00:00:00 2001 From: Leon Sandøy Date: Sun, 13 Dec 2020 14:52:51 +0100 Subject: Add even more sanity for the HomeView. This rewrite will ensure we always return whichever repositories we're able to resolve, and none of the others. It also shortens the code, shrinks all the try/excepts, and generally makes things a little more readable. --- pydis_site/apps/home/views/home.py | 153 ++++++++++++++++++++----------------- 1 file changed, 81 insertions(+), 72 deletions(-) diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py index e9ed0df0..ddbfc4b4 100644 --- a/pydis_site/apps/home/views/home.py +++ b/pydis_site/apps/home/views/home.py @@ -1,4 +1,3 @@ -import datetime import logging from typing import Dict, List @@ -31,79 +30,59 @@ class HomeView(View): ] def _get_api_data(self) -> Dict[str, Dict[str, str]]: - """Call the GitHub API and get information about our repos.""" - repo_dict: Dict[str, dict] = {repo_name: {} for repo_name in self.repos} + """ + Call the GitHub API and get information about our repos. + + If we're unable to get that info for any reason, return an empty dict. + """ + repo_dict = {} # Fetch the data from the GitHub API api_data: List[dict] = requests.get(self.github_api).json() # Process the API data into our dict for repo in api_data: - full_name = repo["full_name"] - - if full_name in self.repos: - repo_dict[full_name] = { - "full_name": repo["full_name"], - "description": repo["description"], - "language": repo["language"], - "forks_count": repo["forks_count"], - "stargazers_count": repo["stargazers_count"], - } + try: + full_name = repo["full_name"] + + if full_name in self.repos: + repo_dict[full_name] = { + "full_name": repo["full_name"], + "description": repo["description"], + "language": repo["language"], + "forks_count": repo["forks_count"], + "stargazers_count": repo["stargazers_count"], + } + # Something is not right about the API data we got back from GitHub. + except (TypeError, ConnectionError, KeyError) as e: + log.error( + "Unable to parse the GitHub repository metadata from response!", + extra={ + 'api_data': api_data, + 'error': e + } + ) + continue return repo_dict def _get_repo_data(self) -> List[RepositoryMetadata]: """Build a list of RepositoryMetadata objects that we can use to populate the front page.""" - # Try to get site data from the cache - try: - repo_data = RepositoryMetadata.objects.get(repo_name="python-discord/site") + database_repositories = [] - # If the data is stale, we should refresh it. - if (timezone.now() - repo_data.last_updated).seconds > self.repository_cache_ttl: + # First, let's see if we have any metadata cached. + cached_data = RepositoryMetadata.objects.all() - # Try to get new data from the API. If it fails, return the cached data. - try: - api_repositories = self._get_api_data() - except (TypeError, ConnectionError): - return RepositoryMetadata.objects.all() - database_repositories = [] - - # Update or create all RepoData objects in self.repos - for repo_name, api_data in api_repositories.items(): - if api_data: - try: - repo_data = RepositoryMetadata.objects.get(repo_name=repo_name) - repo_data.description = api_data["description"] - repo_data.language = api_data["language"] - repo_data.forks = api_data["forks_count"] - repo_data.stargazers = api_data["stargazers_count"] - except RepositoryMetadata.DoesNotExist: - repo_data = RepositoryMetadata( - repo_name=api_data["full_name"], - description=api_data["description"], - forks=api_data["forks_count"], - stargazers=api_data["stargazers_count"], - language=api_data["language"], - ) - repo_data.save() - database_repositories.append(repo_data) - else: - log.error(f"No API data is available for {repo_name}!") - return database_repositories - - # Otherwise, if the data is fresher than 2 minutes old, we should just return it. - else: - return RepositoryMetadata.objects.all() + # If we don't, we have to create some! + if not cached_data: - # If this is raised, the database has no repodata at all, we will create them all. - except RepositoryMetadata.DoesNotExist: - database_repositories = [] - try: - # Get new data from API - api_repositories = self._get_api_data() + # Try to get new data from the API. If it fails, return an empty list. + # In this case, we simply don't display our projects on the site. + api_repositories = self._get_api_data() - # Create all the repodata records in the database. - for api_data in api_repositories.values(): + # Create all the repodata records in the database. + for api_data in api_repositories.values(): + try: repo_data = RepositoryMetadata( repo_name=api_data["full_name"], description=api_data["description"], @@ -111,23 +90,53 @@ class HomeView(View): stargazers=api_data["stargazers_count"], language=api_data["language"], ) - repo_data.save() - database_repositories.append(repo_data) - except TypeError: - for repo_name in self.repos: - repo_data = RepositoryMetadata( - last_updated=timezone.now() - datetime.timedelta(minutes=50), - repo_name=repo_name, - description="Not available.", - forks=999, - stargazers=999, - language="Python", + # This error indicates there's something not quite right about the api_data types. + # In that case, just skip this repo. + except TypeError: + log.error( + "Encountered a TypeError while processing RepositoryMetadata " + "from the GitHub API.", + extra=api_data ) - repo_data.save() - database_repositories.append(repo_data) + continue + + repo_data.save() + database_repositories.append(repo_data) return database_repositories + # If the data is stale, we should refresh it. + if (timezone.now() - cached_data[0].last_updated).seconds > self.repository_cache_ttl: + # Try to get new data from the API. If it fails, return the cached data. + api_repositories = self._get_api_data() + + if not api_repositories: + return RepositoryMetadata.objects.all() + + # Update or create all RepoData objects in self.repos + for repo_name, api_data in api_repositories.items(): + try: + repo_data = RepositoryMetadata.objects.get(repo_name=repo_name) + repo_data.description = api_data["description"] + repo_data.language = api_data["language"] + repo_data.forks = api_data["forks_count"] + repo_data.stargazers = api_data["stargazers_count"] + except RepositoryMetadata.DoesNotExist: + repo_data = RepositoryMetadata( + repo_name=api_data["full_name"], + description=api_data["description"], + forks=api_data["forks_count"], + stargazers=api_data["stargazers_count"], + language=api_data["language"], + ) + repo_data.save() + database_repositories.append(repo_data) + return database_repositories + + # Otherwise, if the data is fresher than 2 minutes old, we should just return it. + else: + return RepositoryMetadata.objects.all() + def get(self, request: WSGIRequest) -> HttpResponse: """Collect repo data and render the homepage view.""" repo_data = self._get_repo_data() -- cgit v1.2.3 From d513270273d988fc3ecbfd609f3187889d0a0ea3 Mon Sep 17 00:00:00 2001 From: Leon Sandøy Date: Sun, 13 Dec 2020 14:53:28 +0100 Subject: Fix test for HomeView. Instead of building "fake repos", we're now just omitting them, so this test needs to be updated. --- pydis_site/apps/home/tests/test_repodata_helpers.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/pydis_site/apps/home/tests/test_repodata_helpers.py b/pydis_site/apps/home/tests/test_repodata_helpers.py index 77b1a68d..34bbdcde 100644 --- a/pydis_site/apps/home/tests/test_repodata_helpers.py +++ b/pydis_site/apps/home/tests/test_repodata_helpers.py @@ -123,10 +123,4 @@ class TestRepositoryMetadataHelpers(TestCase): mock_get.return_value.json.return_value = ['garbage'] metadata = self.home_view._get_repo_data() - self.assertEquals(len(metadata), len(self.home_view.repos)) - for item in metadata: - with self.subTest(item=item): - self.assertEqual(item.description, "Not available.") - self.assertEqual(item.forks, 999) - self.assertEqual(item.stargazers, 999) - self.assertEqual(item.language, "Python") + self.assertEquals(len(metadata), 0) -- cgit v1.2.3 From 9f5e2cd4a46349d161d5ec22c65d212a52215c47 Mon Sep 17 00:00:00 2001 From: Leon Sandøy Date: Sun, 13 Dec 2020 15:07:03 +0100 Subject: Add a test repo with bad types. This should restore 100% coverage. --- pydis_site/apps/home/tests/mock_github_api_response.json | 7 +++++++ pydis_site/apps/home/views/home.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/pydis_site/apps/home/tests/mock_github_api_response.json b/pydis_site/apps/home/tests/mock_github_api_response.json index ddbffed8..00f2840d 100644 --- a/pydis_site/apps/home/tests/mock_github_api_response.json +++ b/pydis_site/apps/home/tests/mock_github_api_response.json @@ -34,6 +34,13 @@ "language": "Python", "forks_count": 31 }, + { + "full_name": "python-discord/sir-lancebot", + "description": 42, + "stargazers_count": "bad types", + "language": ["not", "the", "right", "type"], + "forks_count": "31" + }, { "full_name": "python-discord/sir-lancebot", "description": "test", diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py index ddbfc4b4..77496121 100644 --- a/pydis_site/apps/home/views/home.py +++ b/pydis_site/apps/home/views/home.py @@ -76,7 +76,7 @@ class HomeView(View): # If we don't, we have to create some! if not cached_data: - # Try to get new data from the API. If it fails, return an empty list. + # Try to get new data from the API. If it fails, we'll return an empty list. # In this case, we simply don't display our projects on the site. api_repositories = self._get_api_data() -- cgit v1.2.3 From fa9ef1d0192a372c56a172484654c4103463ea7c Mon Sep 17 00:00:00 2001 From: Leon Sandøy Date: Sun, 13 Dec 2020 15:25:08 +0100 Subject: Remove pointless try/except. I don't think we need to check for TypeError here, so that saves us the trouble of testing these lines to appease coverage. --- .../apps/home/tests/mock_github_api_response.json | 7 ------- pydis_site/apps/home/views/home.py | 24 +++++++--------------- 2 files changed, 7 insertions(+), 24 deletions(-) diff --git a/pydis_site/apps/home/tests/mock_github_api_response.json b/pydis_site/apps/home/tests/mock_github_api_response.json index 00f2840d..ddbffed8 100644 --- a/pydis_site/apps/home/tests/mock_github_api_response.json +++ b/pydis_site/apps/home/tests/mock_github_api_response.json @@ -34,13 +34,6 @@ "language": "Python", "forks_count": 31 }, - { - "full_name": "python-discord/sir-lancebot", - "description": 42, - "stargazers_count": "bad types", - "language": ["not", "the", "right", "type"], - "forks_count": "31" - }, { "full_name": "python-discord/sir-lancebot", "description": "test", diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py index 77496121..97253a0c 100644 --- a/pydis_site/apps/home/views/home.py +++ b/pydis_site/apps/home/views/home.py @@ -82,23 +82,13 @@ class HomeView(View): # Create all the repodata records in the database. for api_data in api_repositories.values(): - try: - repo_data = RepositoryMetadata( - repo_name=api_data["full_name"], - description=api_data["description"], - forks=api_data["forks_count"], - stargazers=api_data["stargazers_count"], - language=api_data["language"], - ) - # This error indicates there's something not quite right about the api_data types. - # In that case, just skip this repo. - except TypeError: - log.error( - "Encountered a TypeError while processing RepositoryMetadata " - "from the GitHub API.", - extra=api_data - ) - continue + repo_data = RepositoryMetadata( + repo_name=api_data["full_name"], + description=api_data["description"], + forks=api_data["forks_count"], + stargazers=api_data["stargazers_count"], + language=api_data["language"], + ) repo_data.save() database_repositories.append(repo_data) -- cgit v1.2.3 From aaf733373e7f3dc59263df79a6a19cc87db4b27c Mon Sep 17 00:00:00 2001 From: Leon Sandøy Date: Sun, 13 Dec 2020 18:58:19 +0100 Subject: Increase per_page to 100 for GitHub API call. Snekbox was being banished to page 2 and we were not iterating pages, so it was not appearing in the data we got from our call to /repos. This commit changes the request to use `per_page=100`, which will work at least until we have >100 repos in our organisation. --- pydis_site/apps/home/views/home.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py index 97253a0c..0e5d4edf 100644 --- a/pydis_site/apps/home/views/home.py +++ b/pydis_site/apps/home/views/home.py @@ -16,7 +16,7 @@ log = logging.getLogger(__name__) class HomeView(View): """The main landing page for the website.""" - github_api = "https://api.github.com/users/python-discord/repos" + github_api = "https://api.github.com/users/python-discord/repos?per_page=100" repository_cache_ttl = 3600 # Which of our GitHub repos should be displayed on the front page, and in which order? -- cgit v1.2.3