aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Sebastiaan Zeeff <[email protected]>2020-11-15 00:24:51 +0100
committerGravatar Sebastiaan Zeeff <[email protected]>2020-11-15 01:19:42 +0100
commitf4a65fc28e223c907ac78b3b31011d2532f93407 (patch)
tree1318ae57414fa28fb339b5b780e0fbe8fadfdbed
parentMerge pull request #422 from python-discord/get_rid_of_uwsgi (diff)
Migrate to GH Actions & GH Container Registry
I've migrated site to GitHub Actions and GitHub Container Registry. This also means that coverage results will be pushed to coveralls.io. This commit also removes the pretty useless codeql analysis action.
-rw-r--r--.flake82
-rw-r--r--.github/workflows/codeql-analysis.yml32
-rw-r--r--.github/workflows/lint-test-build.yaml157
-rw-r--r--Pipfile2
-rw-r--r--Pipfile.lock56
-rw-r--r--azure-pipelines.yml103
6 files changed, 209 insertions, 143 deletions
diff --git a/.flake8 b/.flake8
index bcd26d9e..a8d6036f 100644
--- a/.flake8
+++ b/.flake8
@@ -3,7 +3,7 @@ max-line-length=100
docstring-convention=all
import-order-style=pycharm
application_import_names=pydis_site
-exclude=__pycache__, venv, .venv, **/migrations/**
+exclude=__pycache__, venv, .venv, **/migrations/**, .cache/**
ignore=
B311,W503,E226,S311,T000
# Missing Docstrings
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
deleted file mode 100644
index 8760b35e..00000000
--- a/.github/workflows/codeql-analysis.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-name: "Code scanning - action"
-
-on:
- push:
- pull_request:
- schedule:
- - cron: '0 12 * * *'
-
-jobs:
- CodeQL-Build:
-
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v2
- with:
- fetch-depth: 2
-
- - run: git checkout HEAD^2
- if: ${{ github.event_name == 'pull_request' }}
-
- - name: Initialize CodeQL
- uses: github/codeql-action/init@v1
- with:
- languages: python
-
- - name: Autobuild
- uses: github/codeql-action/autobuild@v1
-
- - name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v1
diff --git a/.github/workflows/lint-test-build.yaml b/.github/workflows/lint-test-build.yaml
new file mode 100644
index 00000000..0d83c45e
--- /dev/null
+++ b/.github/workflows/lint-test-build.yaml
@@ -0,0 +1,157 @@
+name: Lint & Test
+
+on:
+ push:
+ branches:
+ - master
+ # We use pull_request_target as we get PRs from
+ # forks, but need to be able to add annotations
+ # for our flake8 step.
+ pull_request_target:
+
+
+jobs:
+ lint-test:
+ runs-on: ubuntu-latest
+ env:
+ # Configure pip to cache dependencies and do a user install
+ PIP_NO_CACHE_DIR: false
+ PIP_USER: 1
+
+ # Hide the graphical elements from pipenv's output
+ PIPENV_HIDE_EMOJIS: 1
+ PIPENV_NOSPIN: 1
+
+ # Make sure pipenv does not try reuse an environment it's running in
+ PIPENV_IGNORE_VIRTUALENVS: 1
+
+ # Specify explicit paths for python dependencies and the pre-commit
+ # environment so we know which directories to cache
+ PYTHONUSERBASE: ${{ github.workspace }}/.cache/py-user-base
+ PRE_COMMIT_HOME: ${{ github.workspace }}/.cache/pre-commit-cache
+
+ steps:
+ - name: Add custom PYTHONUSERBASE to PATH
+ run: echo '${{ env.PYTHONUSERBASE }}/bin/' >> $GITHUB_PATH
+
+ # We don't want to persist credentials, as our GitHub Action
+ # may be run when a PR is made from a fork.
+ - name: Checkout repository
+ uses: actions/checkout@v2
+ with:
+ persist-credentials: false
+
+ - name: Setup python
+ id: python
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.9'
+
+ # This step caches our Python dependencies. To make sure we
+ # only restore a cache when the dependencies, the python version,
+ # the runner operating system, and the dependency location haven't
+ # changed, we create a cache key that is a composite of those states.
+ #
+ # Only when the context is exactly the same, we will restore the cache.
+ - name: Python Dependency Caching
+ uses: actions/cache@v2
+ id: python_cache
+ with:
+ path: ${{ env.PYTHONUSERBASE }}
+ key: "python-0-${{ runner.os }}-${{ env.PYTHONUSERBASE }}-\
+ ${{ steps.python.outputs.python-version }}-\
+ ${{ hashFiles('./Pipfile', './Pipfile.lock') }}"
+
+ # Install our dependencies if we did not restore a dependency cache
+ - name: Install dependencies using pipenv
+ if: steps.python_cache.outputs.cache-hit != 'true'
+ run: |
+ pip install pipenv
+ pipenv install --dev --deploy --system
+
+ # This step caches our pre-commit environment. To make sure we
+ # do create a new environment when our pre-commit setup changes,
+ # we create a cache key based on relevant factors.
+ - name: Pre-commit Environment Caching
+ uses: actions/cache@v2
+ with:
+ path: ${{ env.PRE_COMMIT_HOME }}
+ key: "precommit-0-${{ runner.os }}-${{ env.PRE_COMMIT_HOME }}-\
+ ${{ steps.python.outputs.python-version }}-\
+ ${{ hashFiles('./.pre-commit-config.yaml') }}"
+
+ # We will not run `flake8` here, as we will use a separate flake8
+ # action. As pre-commit does not support user installs, we set
+ # PIP_USER=0 to not do a user install.
+ - name: Run pre-commit hooks
+ run: export PIP_USER=0; SKIP=flake8 pre-commit run --all-files
+
+ # This step requires `pull_request_target`, as adding annotations
+ # requires "write" permissions to the repo.
+ - name: Run flake8
+ uses: julianwachholz/flake8-action@v1
+ with:
+ checkName: lint-test
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Run database using docker-compose
+ run: docker-compose run -d -p 7777:5432 --name pydis_web postgres
+
+ - name: Migrations and run tests with coverage.py
+ run: |
+ python manage.py makemigrations --check
+ python manage.py migrate
+ coverage run manage.py test --no-input
+ coverage report -m
+ env:
+ CI: GHA
+ DATABASE_URL: postgres://pysite:pysite@localhost:7777/pysite
+ METRICITY_DB_URL: postgres://pysite:pysite@localhost:7777/metricity
+
+ # This step will publish the coverage reports coveralls.io and
+ # print a "job" link in the output of the GitHub Action
+ - name: Publish coverage report to coveralls.io
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: coveralls
+
+ - name: Tear down docker-compose containers
+ run: docker-compose stop
+ if: ${{ always() }}
+
+ build-and-push:
+ needs: lint-test
+ if: github.event_name != 'pull_request_target' && github.ref == 'refs/heads/master'
+ runs-on: ubuntu-latest
+
+ steps:
+ # Create a commit SHA-based tag for the container repositories
+ - name: Create SHA Container Tag
+ id: sha_tag
+ run: |
+ tag=$(cut -c 1-7 <<< $GITHUB_SHA)
+ echo "::set-output name=tag::$tag"
+ - name: Checkout code
+ uses: actions/checkout@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v1
+
+ - name: Login to Github Container Registry
+ uses: docker/login-action@v1
+ with:
+ registry: ghcr.io
+ username: ${{ secrets.GHCR_USER }}
+ password: ${{ secrets.GHCR_TOKEN }}
+
+ - name: Build and push
+ uses: docker/build-push-action@v2
+ with:
+ context: .
+ file: ./Dockerfile
+ push: true
+ cache-from: type=registry,ref=ghcr.io/python-discord/site:latest
+ tags: |
+ ghcr.io/python-discord/site:latest
+ ghcr.io/python-discord/site:${{ steps.sha_tag.outputs.tag }}
diff --git a/Pipfile b/Pipfile
index 4436117b..6bca3eb9 100644
--- a/Pipfile
+++ b/Pipfile
@@ -35,7 +35,7 @@ flake8-todo = "~=0.7"
mccabe = "~=0.6.1"
pep8-naming = "~=0.9"
pre-commit = "~=2.1"
-unittest-xml-reporting = "~=3.0"
+coveralls = "~=2.1"
[requires]
python_version = "3.9"
diff --git a/Pipfile.lock b/Pipfile.lock
index dbc81fbc..f1572032 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "7db8961e0ecd3e2f643fb79e56f84354c263e6944cc1dbdb114bef8a134540ac"
+ "sha256": "8def72734d22e0ecbf17dfa6f30b829b7fd556f2bff088b9bda63d7e1887beee"
},
"pipfile-spec": 6,
"requires": {
@@ -538,6 +538,13 @@
],
"version": "==1.6.2"
},
+ "certifi": {
+ "hashes": [
+ "sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd",
+ "sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4"
+ ],
+ "version": "==2020.11.8"
+ },
"cfgv": {
"hashes": [
"sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d",
@@ -546,6 +553,13 @@
"markers": "python_full_version >= '3.6.1'",
"version": "==3.2.0"
},
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
"coverage": {
"hashes": [
"sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516",
@@ -586,6 +600,14 @@
"index": "pypi",
"version": "==5.3"
},
+ "coveralls": {
+ "hashes": [
+ "sha256:4430b862baabb3cf090d36d84d331966615e4288d8a8c5957e0fd456d0dd8bd6",
+ "sha256:b3b60c17b03a0dee61952a91aed6f131e0b2ac8bd5da909389c53137811409e1"
+ ],
+ "index": "pypi",
+ "version": "==2.1.2"
+ },
"distlib": {
"hashes": [
"sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb",
@@ -593,6 +615,12 @@
],
"version": "==0.3.1"
},
+ "docopt": {
+ "hashes": [
+ "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
+ ],
+ "version": "==0.6.2"
+ },
"filelock": {
"hashes": [
"sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59",
@@ -701,6 +729,14 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.5.9"
},
+ "idna": {
+ "hashes": [
+ "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
+ "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
+ ],
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
+ "version": "==2.10"
+ },
"mccabe": {
"hashes": [
"sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
@@ -781,6 +817,14 @@
"index": "pypi",
"version": "==5.3.1"
},
+ "requests": {
+ "hashes": [
+ "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8",
+ "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998"
+ ],
+ "index": "pypi",
+ "version": "==2.25.0"
+ },
"six": {
"hashes": [
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
@@ -820,13 +864,13 @@
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.10.2"
},
- "unittest-xml-reporting": {
+ "urllib3": {
"hashes": [
- "sha256:7bf515ea8cb244255a25100cd29db611a73f8d3d0aaf672ed3266307e14cc1ca",
- "sha256:984cebba69e889401bfe3adb9088ca376b3a1f923f0590d005126c1bffd1a695"
+ "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08",
+ "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"
],
- "index": "pypi",
- "version": "==3.0.4"
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
+ "version": "==1.26.2"
},
"virtualenv": {
"hashes": [
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
deleted file mode 100644
index 730cffb6..00000000
--- a/azure-pipelines.yml
+++ /dev/null
@@ -1,103 +0,0 @@
-# https://aka.ms/yaml
-
-jobs:
- - job: test
- displayName: 'Test & Lint'
- pool:
- vmImage: ubuntu-16.04
-
- variables:
- PIP_CACHE_DIR: .cache/pip
- PRE_COMMIT_HOME: $(Pipeline.Workspace)/pre-commit-cache
-
- steps:
- - task: UsePythonVersion@0
- displayName: 'Set Python Version'
- name: PythonVersion
- inputs:
- versionSpec: '3.9.x'
- addToPath: true
-
- - task: DockerCompose@0
- displayName: 'Setup Database'
- inputs:
- action: Run a specific service
- dockerComposeFile: docker-compose.yml
- projectName: pydis_web
- serviceName: postgres
- ports: '7777:5432'
-
- - script: |
- pip install pipenv
- pipenv install --dev --system
- pip install flake8-formatter-junit-xml
- displayName: 'Install Project Environment'
-
- # Create an executable shell script which replaces the original pipenv binary.
- # The shell script ignores the first argument and executes the rest of the args as a command.
- # It makes the `pipenv run flake8` command in the pre-commit hook work by circumventing
- # pipenv entirely, which is too dumb to know it should use the system interpreter rather than
- # creating a new venv.
- - script: |
- printf '%s\n%s' '#!/bin/bash' '"${@:2}"' > $(PythonVersion.pythonLocation)/bin/pipenv \
- && chmod +x $(PythonVersion.pythonLocation)/bin/pipenv
- displayName: 'Mock pipenv binary'
-
- - task: Cache@2
- displayName: 'Restore pre-commit environment'
- inputs:
- key: pre-commit | "$(PythonVersion.pythonLocation)" | .pre-commit-config.yaml
- restoreKeys: |
- pre-commit | "$(PythonVersion.pythonLocation)"
- path: $(PRE_COMMIT_HOME)
-
- # flake8 runs so it can generate the XML output. pre-commit will run it again to show stdout.
- # flake8 standalone runs first to avoid any fixes pre-commit hooks may make.
- - script: flake8 --format junit-xml --output-file TEST-lint.xml; pre-commit run --all-files
- displayName: 'Run pre-commit hooks'
-
- - script: |
- python3 manage.py makemigrations --check
- python3 manage.py migrate
- coverage run \
- manage.py test \
- --testrunner xmlrunner.extra.djangotestrunner.XMLTestRunner \
- --no-input
- env:
- CI: azure
- DATABASE_URL: postgres://pysite:pysite@localhost:7777/pysite
- METRICITY_DB_URL: postgres://pysite:pysite@localhost:7777/metricity
- displayName: 'Run Tests'
-
- - script: coverage report -m && coverage xml
- displayName: 'Generate Coverage Reports'
-
- - task: PublishTestResults@2
- condition: succeededOrFailed()
- displayName: 'Publish Test & Linting Results'
- inputs:
- testResultsFiles: '**/TEST-*.xml'
- testRunTitle: 'Site Test Results'
-
- - task: PublishCodeCoverageResults@1
- displayName: 'Publish Coverage Results'
- condition: succeededOrFailed()
- inputs:
- codeCoverageTool: Cobertura
- summaryFileLocation: '**/coverage.xml'
-
- - job: build
- displayName: 'Build & Push Container'
- dependsOn: test
- condition: and(succeeded(), ne(variables['Build.Reason'], 'PullRequest'), eq(variables['Build.SourceBranch'], 'refs/heads/master'))
-
- steps:
- - task: Docker@2
- displayName: 'Build & Push Container'
- inputs:
- containerRegistry: 'DockerHub'
- repository: 'pythondiscord/site'
- command: 'buildAndPush'
- Dockerfile: 'docker/Dockerfile'
- buildContext: '.'
- tags: 'latest'