Newer
Older
cache:
paths:
- .cache/pip
- .cache/pre-commit
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
PRE_COMMIT_HOME: "$CI_PROJECT_DIR/.cache/pre-commit"
before_script:
- pip install pre-commit
script:
- pre-commit run -a
cache:
paths:
- .cache/pip
artifacts:
when: always
reports:
junit:
- test-report.xml
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
ARKINDEX_API_SCHEMA_URL: schema.yml
before_script:
- pip install tox
# Download OpenAPI schema from last backend build
# Use internal S3 bucket URL to avoid cache issues
- curl https://teklia-assets-release.s3.eu-west-3.amazonaws.com/arkindex/openapi.yml > schema.yml
- tox -- --junitxml=test-report.xml --durations=50
test-cookiecutter:
stage: test
cache:
paths:
- .cache/pip
- .cache/pre-commit
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
PRE_COMMIT_HOME: "$CI_PROJECT_DIR/.cache/pre-commit"
ARKINDEX_API_SCHEMA_URL: schema.yml
before_script:
- pip install cookiecutter tox pre-commit
# Configure git to be able to commit in the hook
- git config --global user.email "crasher@teklia.com"
- git config --global user.name "Crash Test"
script:
- cookiecutter --no-input .
- cd worker-demo
- find
- tox
- pre-commit run -a
# Store demo build for later docker build
artifacts:
paths:
- worker-demo/
build-cookiecutter:
image: docker:19.03.1
stage: build
services:
- docker:dind
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
variables:
DOCKER_DRIVER: overlay2
DOCKER_HOST: tcp://docker:2375/
# Ensure artifacts are available
dependencies:
- test-cookiecutter
script:
- cd worker-demo
- docker build .
pypi-publication:
image: python:3
stage: release
only:
- tags
environment:
name: pypi
url: https://pypi.org/project/arkindex-base-worker
before_script:
- pip install twine setuptools wheel
- echo "[distutils]" > ~/.pypirc
- echo "index-servers =" >> ~/.pypirc
- echo " pypi" >> ~/.pypirc
- echo "[pypi]" >> ~/.pypirc
- echo "repository=https://upload.pypi.org/legacy/" >> ~/.pypirc
- echo "username=$PYPI_DEPLOY_USERNAME" >> ~/.pypirc
- echo "password=$PYPI_DEPLOY_PASSWORD" >> ~/.pypirc
script:
- python setup.py sdist bdist_wheel
- twine upload dist/* -r pypi
artifacts:
paths:
- public
before_script:
- pip install -e .[docs]
script:
docs-build:
extends: .docs
stage: build
# Test job outside of tags to ensure the docs still can build before merging
# Does not use the `pages` name, therefore will be ignored by GitLab Pages
except:
- tags
- schedules
pages:
extends: .docs
stage: release
only:
stage: release
dependencies:
- docs-build
before_script:
- npm install -g surge
except:
- master
- tags
- schedules
environment:
name: ${CI_COMMIT_REF_SLUG}
url: https://${CI_COMMIT_REF_SLUG}-base-worker-arkindex.surge.sh
on_stop: docs-stop-surge
script:
- surge public ${CI_ENVIRONMENT_URL}
docs-stop-surge:
stage: release
when: manual
# Do not try to checkout the branch if it was deleted
variables:
GIT_STRATEGY: none
except:
- master
- tags
- schedules
environment:
name: ${CI_COMMIT_REF_SLUG}
url: https://${CI_COMMIT_REF_SLUG}-base-worker-arkindex.surge.sh
action: stop
before_script:
- npm install -g surge
script:
- surge teardown ${CI_ENVIRONMENT_URL}
release-notes:
stage: release
image: registry.gitlab.teklia.com/infra/devops:latest
only:
- tags
script:
- devops release-notes
image: registry.gitlab.teklia.com/infra/devops:latest
- devops python-deps requirements.txt tests-requirements.txt docs-requirements.txt "worker-{{cookiecutter.slug}}/requirements.txt"