aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Joe Banks <[email protected]>2021-10-10 20:00:35 +0100
committerGravatar GitHub <[email protected]>2021-10-10 20:00:35 +0100
commit3f8854ebd7daa35f9e896b95efa3e4ff7d23185d (patch)
tree5842c9d3d1d366cd14794dba70412ddb5ee497fc
parentMerge pull request #603 from python-discord/decrease-batch-size-for-user-list... (diff)
parentUses Nightly To Download Artifacts (diff)
Merge pull request #609 from python-discord/static-previews
Static Previews
-rw-r--r--.github/workflows/static-preview.yaml77
-rw-r--r--.gitignore3
-rw-r--r--Dockerfile6
-rwxr-xr-xmanage.py33
-rw-r--r--poetry.lock17
-rw-r--r--pydis_site/apps/content/urls.py43
-rw-r--r--pydis_site/apps/events/urls.py35
-rw-r--r--pydis_site/apps/home/urls.py6
-rw-r--r--pydis_site/apps/home/views/home.py26
-rw-r--r--pydis_site/apps/resources/urls.py22
-rw-r--r--pydis_site/settings.py21
-rw-r--r--pydis_site/urls.py11
-rw-r--r--pyproject.toml2
-rw-r--r--static-builds/README.md48
-rw-r--r--static-builds/netlify_build.py122
15 files changed, 445 insertions, 27 deletions
diff --git a/.github/workflows/static-preview.yaml b/.github/workflows/static-preview.yaml
new file mode 100644
index 00000000..52d7df5a
--- /dev/null
+++ b/.github/workflows/static-preview.yaml
@@ -0,0 +1,77 @@
+name: Build & Publish Static Preview
+
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+
+jobs:
+ build:
+ name: Build Static Preview
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+
+ # Create a commit SHA-based tag for the container repositories
+ - name: Create SHA Container Tag
+ id: sha_tag
+ run: |
+ tag=$(cut -c 1-7 <<< $GITHUB_SHA)
+ echo "::set-output name=tag::$tag"
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v1
+
+ - name: Login to Github Container Registry
+ uses: docker/login-action@v1
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ # Build the container, including an inline cache manifest to
+ # allow us to use the registry as a cache source.
+ - name: Build Docker Image (Main)
+ uses: docker/build-push-action@v2
+ if: github.ref == 'refs/heads/main'
+ with:
+ context: .
+ push: true
+ cache-from: type=registry,ref=ghcr.io/python-discord/static-site:latest
+ cache-to: type=inline
+ tags: |
+ ghcr.io/python-discord/static-site:latest
+ ghcr.io/python-discord/static-site:${{ steps.sha_tag.outputs.tag }}
+ build-args: |
+ git_sha=${{ github.sha }}
+ STATIC_BUILD=TRUE
+
+ - name: Extract Build From Docker Image (Main)
+ if: github.ref == 'refs/heads/main'
+ run: |
+ mkdir docker_build \
+ && docker run --entrypoint /bin/echo --name site \
+ ghcr.io/python-discord/static-site:${{ steps.sha_tag.outputs.tag }} \
+ && docker cp site:/app docker_build/
+
+ # Build directly to a local folder
+ - name: Build Docker Image (PR)
+ uses: docker/build-push-action@v2
+ if: github.ref != 'refs/heads/main'
+ with:
+ context: .
+ push: false
+ cache-from: type=registry,ref=ghcr.io/python-discord/static-site:latest
+ outputs: type=local,dest=docker_build/
+ build-args: |
+ git_sha=${{ github.sha }}
+ STATIC_BUILD=TRUE
+
+ - name: Upload Build
+ uses: actions/upload-artifact@v2
+ with:
+ name: static-build
+ path: docker_build/app/build/
+ if-no-files-found: error
diff --git a/.gitignore b/.gitignore
index e4ad2c19..08d257bc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -126,3 +126,6 @@ staticfiles/
*.js.tmp
log.*
+
+# Local Netlify folder
+.netlify
diff --git a/Dockerfile b/Dockerfile
index 046e7f80..2b039fab 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -36,6 +36,12 @@ RUN \
METRICITY_DB_URL=postgres://localhost \
python manage.py collectstatic --noinput --clear
+# Build static files if we are doing a static build
+ARG STATIC_BUILD=false
+RUN if [ $STATIC_BUILD = "TRUE" ] ; \
+ then SECRET_KEY=dummy_value python manage.py distill-local build --traceback --force ; \
+fi
+
# Run web server through custom manager
ENTRYPOINT ["python", "manage.py"]
CMD ["run"]
diff --git a/manage.py b/manage.py
index 578f4748..90912da3 100755
--- a/manage.py
+++ b/manage.py
@@ -1,6 +1,8 @@
#!/usr/bin/env python
import os
+import platform
import sys
+from pathlib import Path
import django
from django.contrib.auth import get_user_model
@@ -147,6 +149,22 @@ class SiteManager:
gunicorn.app.wsgiapp.run()
+def clean_up_static_files(build_folder: Path) -> None:
+ """Recursively loop over the build directory and fix links."""
+ for file in build_folder.iterdir():
+ if file.is_dir():
+ clean_up_static_files(file)
+ elif file.name.endswith(".html"):
+ # Fix parent host url
+ new = file.read_text(encoding="utf-8").replace(f"//{os.getenv('PARENT_HOST')}", "")
+
+ # Fix windows paths if on windows
+ if platform.system() == "Windows":
+ new = new.replace("%5C", "/")
+
+ file.write_text(new, encoding="utf-8")
+
+
def main() -> None:
"""Entry point for Django management script."""
# Use the custom site manager for launching the server
@@ -155,8 +173,23 @@ def main() -> None:
# Pass any others directly to standard management commands
else:
+ _static_build = "distill" in sys.argv[1]
+
+ if _static_build:
+ # Build a static version of the site with no databases and API support
+ os.environ["STATIC_BUILD"] = "True"
+ if not os.getenv("PARENT_HOST"):
+ os.environ["PARENT_HOST"] = "REPLACE_THIS.HOST"
+
execute_from_command_line(sys.argv)
+ if _static_build:
+ # Clean up parent host in generated files
+ for arg in sys.argv[2:]:
+ if not arg.startswith("-"):
+ clean_up_static_files(Path(arg))
+ break
+
if __name__ == '__main__':
main()
diff --git a/poetry.lock b/poetry.lock
index c6724cfc..eac58fdb 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -138,6 +138,18 @@ argon2 = ["argon2-cffi (>=16.1.0)"]
bcrypt = ["bcrypt"]
[[package]]
+name = "django-distill"
+version = "2.9.0"
+description = "Static site renderer and publisher for Django."
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+django = "*"
+requests = "*"
+
+[[package]]
name = "django-environ"
version = "0.4.5"
description = "Django-environ allows you to utilize 12factor inspired environment variables to configure your Django application."
@@ -757,7 +769,7 @@ brotli = ["brotli"]
[metadata]
lock-version = "1.1"
python-versions = "3.9.*"
-content-hash = "ed7da8dbc905d4f2c47e01301b49c4aed0083bee269da0ee5ebcc3abee4ab1a0"
+content-hash = "9f0c069c14e2dbff63d58474702693f0c02b8cfd30e5af38303975a73b71bcfd"
[metadata.files]
asgiref = [
@@ -858,6 +870,9 @@ django = [
{file = "Django-3.0.14-py3-none-any.whl", hash = "sha256:9bc7aa619ed878fedba62ce139abe663a147dccfd20e907725ec11e02a1ca225"},
{file = "Django-3.0.14.tar.gz", hash = "sha256:d58d8394036db75a81896037d757357e79406e8f68816c3e8a28721c1d9d4c11"},
]
+django-distill = [
+ {file = "django-distill-2.9.0.tar.gz", hash = "sha256:08f31dcde2e79e73c0bc4f36941830603a811cc89472be11f79f14affb460d84"},
+]
django-environ = [
{file = "django-environ-0.4.5.tar.gz", hash = "sha256:6c9d87660142608f63ec7d5ce5564c49b603ea8ff25da595fd6098f6dc82afde"},
{file = "django_environ-0.4.5-py2.py3-none-any.whl", hash = "sha256:c57b3c11ec1f319d9474e3e5a79134f40174b17c7cc024bbb2fad84646b120c4"},
diff --git a/pydis_site/apps/content/urls.py b/pydis_site/apps/content/urls.py
index c11b222a..fe7c2852 100644
--- a/pydis_site/apps/content/urls.py
+++ b/pydis_site/apps/content/urls.py
@@ -1,9 +1,46 @@
-from django.urls import path
+import typing
+from pathlib import Path
+
+from django_distill import distill_path
from . import views
app_name = "content"
+
+
+def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]:
+ """Find all folders and markdown files recursively starting from `root`."""
+ if not folder:
+ folder = root
+
+ results = []
+
+ for item in folder.iterdir():
+ name = item.relative_to(root).__str__().replace("\\", "/")
+
+ if item.is_dir():
+ results.append(name)
+ results.extend(__get_all_files(root, item))
+ else:
+ path, extension = name.rsplit(".", maxsplit=1)
+ if extension == "md":
+ results.append(path)
+
+ return results
+
+
+def get_all_pages() -> typing.Iterator[dict[str, str]]:
+ """Yield a dict of all pag categories."""
+ for location in __get_all_files(Path("pydis_site", "apps", "content", "resources")):
+ yield {"location": location}
+
+
urlpatterns = [
- path("", views.PageOrCategoryView.as_view(), name='pages'),
- path("<path:location>/", views.PageOrCategoryView.as_view(), name='page_category'),
+ distill_path("", views.PageOrCategoryView.as_view(), name='pages'),
+ distill_path(
+ "<path:location>/",
+ views.PageOrCategoryView.as_view(),
+ name='page_category',
+ distill_func=get_all_pages
+ ),
]
diff --git a/pydis_site/apps/events/urls.py b/pydis_site/apps/events/urls.py
index 9a65cf1f..7ea65a31 100644
--- a/pydis_site/apps/events/urls.py
+++ b/pydis_site/apps/events/urls.py
@@ -1,9 +1,38 @@
-from django.urls import path
+import typing
+from pathlib import Path
+
+from django_distill import distill_path
from pydis_site.apps.events.views import IndexView, PageView
app_name = "events"
+
+
+def __get_all_files(root: Path, folder: typing.Optional[Path] = None) -> list[str]:
+ """Find all folders and HTML files recursively starting from `root`."""
+ if not folder:
+ folder = root
+
+ results = []
+
+ for sub_folder in folder.iterdir():
+ results.append(
+ sub_folder.relative_to(root).__str__().replace("\\", "/").replace(".html", "")
+ )
+
+ if sub_folder.is_dir():
+ results.extend(__get_all_files(root, sub_folder))
+
+ return results
+
+
+def get_all_events() -> typing.Iterator[dict[str, str]]:
+ """Yield a dict of all event pages."""
+ for file in __get_all_files(Path("pydis_site", "templates", "events", "pages")):
+ yield {"path": file}
+
+
urlpatterns = [
- path("", IndexView.as_view(), name="index"),
- path("<path:path>/", PageView.as_view(), name="page"),
+ distill_path("", IndexView.as_view(), name="index"),
+ distill_path("<path:path>/", PageView.as_view(), name="page", distill_func=get_all_events),
]
diff --git a/pydis_site/apps/home/urls.py b/pydis_site/apps/home/urls.py
index 57abc942..30321ece 100644
--- a/pydis_site/apps/home/urls.py
+++ b/pydis_site/apps/home/urls.py
@@ -1,9 +1,9 @@
-from django.urls import path
+from django_distill import distill_path
from .views import HomeView, timeline
app_name = 'home'
urlpatterns = [
- path('', HomeView.as_view(), name='home'),
- path('timeline/', timeline, name="timeline"),
+ distill_path('', HomeView.as_view(), name='home'),
+ distill_path('timeline/', timeline, name="timeline"),
]
diff --git a/pydis_site/apps/home/views/home.py b/pydis_site/apps/home/views/home.py
index 401c768f..e28a3a00 100644
--- a/pydis_site/apps/home/views/home.py
+++ b/pydis_site/apps/home/views/home.py
@@ -8,6 +8,7 @@ from django.shortcuts import render
from django.utils import timezone
from django.views import View
+from pydis_site import settings
from pydis_site.apps.home.models import RepositoryMetadata
from pydis_site.constants import GITHUB_TOKEN, TIMEOUT_PERIOD
@@ -32,7 +33,10 @@ class HomeView(View):
def __init__(self):
"""Clean up stale RepositoryMetadata."""
- RepositoryMetadata.objects.exclude(repo_name__in=self.repos).delete()
+ self._static_build = settings.env("STATIC_BUILD")
+
+ if not self._static_build:
+ RepositoryMetadata.objects.exclude(repo_name__in=self.repos).delete()
# If no token is defined (for example in local development), then
# it does not make sense to pass the Authorization header. More
@@ -91,10 +95,13 @@ class HomeView(View):
def _get_repo_data(self) -> List[RepositoryMetadata]:
"""Build a list of RepositoryMetadata objects that we can use to populate the front page."""
# First off, load the timestamp of the least recently updated entry.
- last_update = (
- RepositoryMetadata.objects.values_list("last_updated", flat=True)
- .order_by("last_updated").first()
- )
+ if self._static_build:
+ last_update = None
+ else:
+ last_update = (
+ RepositoryMetadata.objects.values_list("last_updated", flat=True)
+ .order_by("last_updated").first()
+ )
# If we did not retrieve any results here, we should import them!
if last_update is None:
@@ -104,7 +111,7 @@ class HomeView(View):
api_repositories = self._get_api_data()
# Create all the repodata records in the database.
- return RepositoryMetadata.objects.bulk_create(
+ data = [
RepositoryMetadata(
repo_name=api_data["full_name"],
description=api_data["description"],
@@ -113,7 +120,12 @@ class HomeView(View):
language=api_data["language"],
)
for api_data in api_repositories.values()
- )
+ ]
+
+ if settings.env("STATIC_BUILD"):
+ return data
+ else:
+ return RepositoryMetadata.objects.bulk_create(data)
# If the data is stale, we should refresh it.
if (timezone.now() - last_update).seconds > self.repository_cache_ttl:
diff --git a/pydis_site/apps/resources/urls.py b/pydis_site/apps/resources/urls.py
index 19142081..10eda132 100644
--- a/pydis_site/apps/resources/urls.py
+++ b/pydis_site/apps/resources/urls.py
@@ -1,9 +1,25 @@
-from django.urls import path
+import typing
+from pathlib import Path
+
+from django_distill import distill_path
from pydis_site.apps.resources import views
app_name = "resources"
+
+
+def get_all_resources() -> typing.Iterator[dict[str, str]]:
+ """Yield a dict of all resource categories."""
+ for category in Path("pydis_site", "apps", "resources", "resources").iterdir():
+ yield {"category": category.name}
+
+
urlpatterns = [
- path("", views.ResourcesView.as_view(), name="index"),
- path("<str:category>/", views.ResourcesListView.as_view(), name="resources")
+ distill_path("", views.ResourcesView.as_view(), name="index"),
+ distill_path(
+ "<str:category>/",
+ views.ResourcesListView.as_view(),
+ name="resources",
+ distill_func=get_all_resources
+ ),
]
diff --git a/pydis_site/settings.py b/pydis_site/settings.py
index d7b87f33..d38c298b 100644
--- a/pydis_site/settings.py
+++ b/pydis_site/settings.py
@@ -25,7 +25,8 @@ from pydis_site.constants import GIT_SHA
env = environ.Env(
DEBUG=(bool, False),
SITE_DSN=(str, ""),
- BUILDING_DOCKER=(bool, False)
+ BUILDING_DOCKER=(bool, False),
+ STATIC_BUILD=(bool, False),
)
sentry_sdk.init(
@@ -65,10 +66,14 @@ else:
SECRET_KEY = env('SECRET_KEY')
# Application definition
-INSTALLED_APPS = [
+NON_STATIC_APPS = [
'pydis_site.apps.api',
- 'pydis_site.apps.home',
'pydis_site.apps.staff',
+] if not env("STATIC_BUILD") else []
+
+INSTALLED_APPS = [
+ *NON_STATIC_APPS,
+ 'pydis_site.apps.home',
'pydis_site.apps.resources',
'pydis_site.apps.content',
'pydis_site.apps.events',
@@ -86,14 +91,20 @@ INSTALLED_APPS = [
'django_simple_bulma',
'rest_framework',
'rest_framework.authtoken',
+
+ 'django_distill',
]
if not env("BUILDING_DOCKER"):
INSTALLED_APPS.append("django_prometheus")
+NON_STATIC_MIDDLEWARE = [
+ 'django_prometheus.middleware.PrometheusBeforeMiddleware',
+] if not env("STATIC_BUILD") else []
+
# Ensure that Prometheus middlewares are first and last here.
MIDDLEWARE = [
- 'django_prometheus.middleware.PrometheusBeforeMiddleware',
+ *NON_STATIC_MIDDLEWARE,
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
@@ -134,7 +145,7 @@ WSGI_APPLICATION = 'pydis_site.wsgi.application'
DATABASES = {
'default': env.db(),
'metricity': env.db('METRICITY_DB_URL'),
-}
+} if not env("STATIC_BUILD") else {}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
diff --git a/pydis_site/urls.py b/pydis_site/urls.py
index 891dbdcc..51ef4214 100644
--- a/pydis_site/urls.py
+++ b/pydis_site/urls.py
@@ -1,8 +1,9 @@
from django.contrib import admin
from django.urls import include, path
+from pydis_site import settings
-urlpatterns = (
+NON_STATIC_PATTERNS = [
path('admin/', admin.site.urls),
# External API ingress (over the net)
@@ -14,9 +15,15 @@ urlpatterns = (
# from wildcard matching all requests to `pages/...`.
path('', include('pydis_site.apps.redirect.urls')),
path('', include('django_prometheus.urls')),
+
+ path('staff/', include('pydis_site.apps.staff.urls', namespace='staff')),
+] if not settings.env("STATIC_BUILD") else []
+
+
+urlpatterns = (
+ *NON_STATIC_PATTERNS,
path('pages/', include('pydis_site.apps.content.urls', namespace='content')),
path('resources/', include('pydis_site.apps.resources.urls')),
path('events/', include('pydis_site.apps.events.urls', namespace='events')),
- path('staff/', include('pydis_site.apps.staff.urls', namespace='staff')),
path('', include('pydis_site.apps.home.urls', namespace='home')),
)
diff --git a/pyproject.toml b/pyproject.toml
index d0beb632..2f1322e3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -21,6 +21,7 @@ sentry-sdk = "~=0.19"
markdown = "~=3.3.4"
python-frontmatter = "~=1.0"
django-prometheus = "~=2.1"
+django-distill = "~=2.9.0"
[tool.poetry.dev-dependencies]
coverage = "~=5.0"
@@ -53,3 +54,4 @@ test = "coverage run manage.py test"
report = "coverage report -m"
lint = "pre-commit run --all-files"
precommit = "pre-commit install"
+static = "python mange.py distill-local build --traceback --force"
diff --git a/static-builds/README.md b/static-builds/README.md
new file mode 100644
index 00000000..b5cba896
--- /dev/null
+++ b/static-builds/README.md
@@ -0,0 +1,48 @@
+# Static Builds
+This directory includes all the needed information to build and deploy static previews of the site.
+
+Static deployments use [django-distill](https://github.com/meeb/django-distill) to build the static content.
+The content is built in GitHub Actions, and is fetched and deployed by Netlify.
+
+
+## Instructions
+These are the configuration instructions to get started with static deployments.
+They are split into two parts:
+
+- [Building The Site](#building-the-site)
+- [Deploying To Netlify](#deploying-to-netlify)
+
+
+### Building The Site
+To get started with building, you can use the following command:
+
+```shell
+python -m pip install httpx==0.19.0
+python manage.py distill-local build --traceback --force --collectstatic
+```
+
+Alternatively, you can use the [Dockerfile](/Dockerfile) and extract the build.
+
+Both output their builds to a `build/` directory.
+
+> Warning: If you are modifying the [build script](./netlify_build.py), make sure it is compatible with Python 3.8.
+
+Note: The build script uses [nightly.link](https://github.com/oprypin/nightly.link)
+to fetch the artifact with no verification.
+
+### Deploying To Netlify
+To deploy to netlify, link your site GitHub repository to a netlify site, and use the following settings:
+
+Build Command:
+`python -m pip install httpx==0.19.0 && python static-builds/netlify_build.py`
+
+Publish Directory:
+`build`
+
+Environment Variables:
+- PYTHON_VERSION: 3.8
+
+
+Note that at this time, if you are deploying to netlify yourself, you won't have access to the
+fa-icons pack we are using, which will lead to many missing icons on your preview.
+You can either update the pack to one which will work on your domain, or you'll have to live with the missing icons.
diff --git a/static-builds/netlify_build.py b/static-builds/netlify_build.py
new file mode 100644
index 00000000..4e1e6106
--- /dev/null
+++ b/static-builds/netlify_build.py
@@ -0,0 +1,122 @@
+"""Build script to deploy project on netlify."""
+
+# WARNING: This file must remain compatible with python 3.8
+
+# This script performs all the actions required to build and deploy our project on netlify
+# It depends on the following packages, which are set in the netlify UI:
+# httpx == 0.19.0
+
+import os
+import time
+import typing
+import zipfile
+from pathlib import Path
+from urllib import parse
+
+import httpx
+
+API_URL = "https://api.github.com"
+NIGHTLY_URL = "https://nightly.link"
+OWNER, REPO = parse.urlparse(os.getenv("REPOSITORY_URL")).path.lstrip("/").split("/")[0:2]
+
+
+def get_build_artifact() -> typing.Tuple[int, str]:
+ """
+ Search for a build artifact, and return the result.
+
+ The return is a tuple of the check suite ID, and the URL to the artifacts.
+ """
+ print("Fetching build URL.")
+
+ if os.getenv("PULL_REQUEST").lower() == "true":
+ print(f"Fetching data for PR #{os.getenv('REVIEW_ID')}")
+
+ pull_url = f"{API_URL}/repos/{OWNER}/{REPO}/pulls/{os.getenv('REVIEW_ID')}"
+ pull_request = httpx.get(pull_url)
+ pull_request.raise_for_status()
+
+ commit_sha = pull_request.json()["head"]["sha"]
+
+ workflows_params = parse.urlencode({
+ "event": "pull_request",
+ "per_page": 100
+ })
+
+ else:
+ commit_sha = os.getenv("COMMIT_REF")
+
+ workflows_params = parse.urlencode({
+ "event": "push",
+ "per_page": 100
+ })
+
+ print(f"Fetching action data for commit {commit_sha}")
+
+ workflows = httpx.get(f"{API_URL}/repos/{OWNER}/{REPO}/actions/runs?{workflows_params}")
+ workflows.raise_for_status()
+
+ for run in workflows.json()["workflow_runs"]:
+ if run["name"] == "Build & Publish Static Preview" and commit_sha == run["head_sha"]:
+ print(f"Found action for this commit: {run['id']}\n{run['html_url']}")
+ break
+ else:
+ raise Exception("Could not find the workflow run for this event.")
+
+ polls = 0
+ while polls <= 20:
+ if run["status"] != "completed":
+ print("Action isn't ready, sleeping for 10 seconds.")
+ polls += 1
+ time.sleep(10)
+
+ elif run["conclusion"] != "success":
+ print("Aborting build due to a failure in a previous CI step.")
+ exit(0)
+
+ else:
+ print(f"Found artifact URL:\n{run['artifacts_url']}")
+ return run["check_suite_id"], run["artifacts_url"]
+
+ _run = httpx.get(run["url"])
+ _run.raise_for_status()
+ run = _run.json()
+
+ raise Exception("Polled for the artifact workflow, but it was not ready in time.")
+
+
+def download_artifact(suite_id: int, url: str) -> None:
+ """Download a build artifact from `url`, and unzip the content."""
+ print("Fetching artifact data.")
+
+ artifacts = httpx.get(url)
+ artifacts.raise_for_status()
+ artifacts = artifacts.json()
+
+ if artifacts["total_count"] == "0":
+ raise Exception(f"No artifacts were found for this build, aborting.\n{url}")
+
+ for artifact in artifacts["artifacts"]:
+ if artifact["name"] == "static-build":
+ print("Found artifact with build.")
+ break
+ else:
+ raise Exception("Could not find an artifact with the expected name.")
+
+ artifact_url = f"{NIGHTLY_URL}/{OWNER}/{REPO}/suites/{suite_id}/artifacts/{artifact['id']}"
+ zipped_content = httpx.get(artifact_url)
+ zipped_content.raise_for_status()
+
+ zip_file = Path("temp.zip")
+ zip_file.write_bytes(zipped_content.read())
+
+ with zipfile.ZipFile(zip_file, "r") as zip_ref:
+ zip_ref.extractall("build")
+
+ zip_file.unlink(missing_ok=True)
+
+ print("Wrote artifact content to target directory.")
+
+
+if __name__ == "__main__":
+ print("Build started")
+ download_artifact(*get_build_artifact())