stages: - test - build - release lint: image: python:3 cache: paths: - .cache/pip - .cache/pre-commit variables: PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" PRE_COMMIT_HOME: "$CI_PROJECT_DIR/.cache/pre-commit" except: - schedules before_script: - pip install pre-commit script: - pre-commit run -a test: image: python:3 stage: test cache: paths: - .cache/pip artifacts: when: always reports: junit: - test-report.xml variables: PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" ARKINDEX_API_SCHEMA_URL: schema.yml except: - schedules before_script: - pip install tox # Download OpenAPI schema from last backend build # Use internal S3 bucket URL to avoid cache issues - curl https://teklia-assets-release.s3.eu-west-3.amazonaws.com/arkindex/openapi.yml > schema.yml script: - tox -e arkindex_worker -- --junitxml=test-report.xml --durations=50 test-cookiecutter: image: python:slim stage: test cache: paths: - .cache/pip - .cache/pre-commit variables: PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" PRE_COMMIT_HOME: "$CI_PROJECT_DIR/.cache/pre-commit" ARKINDEX_API_SCHEMA_URL: schema.yml DEBIAN_FRONTEND: non-interactive except: - schedules before_script: - pip install cookiecutter tox pre-commit # Install curl and git - apt-get update -q -y && apt-get install -q -y --no-install-recommends curl git # Configure git to be able to commit in the hook - git config --global user.email "crasher@teklia.com" - git config --global user.name "Crash Test" script: - cookiecutter --no-input . - cd worker-demo - find - tox - pre-commit run -a # Store demo build for later docker build artifacts: paths: - worker-demo/ build-cookiecutter: image: docker:24.0.6 stage: build services: - docker:dind except: - schedules variables: DOCKER_DRIVER: overlay2 DOCKER_HOST: tcp://docker:2375/ # Ensure artifacts are available dependencies: - test-cookiecutter script: - cd worker-demo - docker build . pypi-publication: image: python:3 stage: release only: - tags environment: name: pypi url: https://pypi.org/project/arkindex-base-worker before_script: - pip install build twine script: - python -m build - twine upload dist/* -r pypi .docs: image: python:3 artifacts: paths: - public before_script: - pip install tox script: - tox -e doc docs-build: extends: .docs stage: build # Test job outside of tags to ensure the docs still can build before merging # Does not use the `pages` name, therefore will be ignored by GitLab Pages except: - tags - schedules pages: extends: .docs stage: release only: - master - tags except: - schedules docs-deploy: image: node:18 stage: release dependencies: - docs-build before_script: - npm install -g surge except: - master - tags - schedules environment: name: ${CI_COMMIT_REF_SLUG} url: https://${CI_COMMIT_REF_SLUG}-base-worker-arkindex.surge.sh on_stop: docs-stop-surge script: - surge public ${CI_ENVIRONMENT_URL} docs-stop-surge: image: node:18 stage: release when: manual # Do not try to checkout the branch if it was deleted variables: GIT_STRATEGY: none except: - master - tags - schedules environment: name: ${CI_COMMIT_REF_SLUG} url: https://${CI_COMMIT_REF_SLUG}-base-worker-arkindex.surge.sh action: stop before_script: - npm install -g surge script: - surge teardown ${CI_ENVIRONMENT_URL} release-notes: stage: release image: registry.gitlab.teklia.com/infra/devops:latest only: - tags script: - devops release-notes bump-python-deps: stage: release image: registry.gitlab.teklia.com/infra/devops:latest only: - schedules script: - devops python-deps requirements.txt tests-requirements.txt docs-requirements.txt "worker-{{cookiecutter.slug}}/requirements.txt"