Skip to content
Snippets Groups Projects
Commit 00ea286b authored by Yoann Schneider's avatar Yoann Schneider :tennis:
Browse files

Merge branch 'bootstrap' into 'master'

Bootstrap

Closes #1

See merge request !1
parents b778fc1a 1d4970fa
No related branches found
No related tags found
1 merge request!1Bootstrap
Pipeline #184761 passed
---
version: 2
workers:
- slug: file-import
name: File Import
type: import
description: descriptions/file-import.md
{
"slug": "file-import",
"name": "File Import",
"description": "Worker to import files in various formats in Arkindex",
"worker_type": "import",
"author": "Teklia",
"email": "contact@teklia.com"
}
.tox
.git
*.pyc
*.egg-info/
.tox/
stages:
- test
- build
- release
# GitLab provides a template to ensure pipelines run only for branches and tags, not for merge requests
# This prevents duplicate pipelines in merge requests.
# See https://docs.gitlab.com/ee/ci/troubleshooting.html#job-may-allow-multiple-pipelines-to-run-for-a-single-action
include:
- template: 'Workflows/Branch-Pipelines.gitlab-ci.yml'
variables:
VERSION: commit-$CI_COMMIT_SHORT_SHA
DEBIAN_FRONTEND: non-interactive
test:
image: python:slim
stage: test
cache:
paths:
- .cache/pip
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
ARKINDEX_API_SCHEMA_URL: schema.yml
before_script:
- pip install tox
# Install curl
- apt-get update -q -y && apt-get install -q -y --no-install-recommends curl
# Download OpenAPI schema from last backend build
- curl https://assets.teklia.com/arkindex/openapi.yml > schema.yml
except:
- schedules
script:
- tox -- --junitxml=test-report.xml --durations=50
lint:
image: python:slim
cache:
paths:
- .cache/pip
- .cache/pre-commit
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
PRE_COMMIT_HOME: "$CI_PROJECT_DIR/.cache/pre-commit"
before_script:
- pip install pre-commit
# Install git
- apt-get update -q -y && apt-get install -q -y --no-install-recommends git
except:
- schedules
script:
- pre-commit run -a
docker-build:
stage: build
image: docker:24.0.6
services:
- docker:dind
variables:
DOCKER_DRIVER: overlay2
DOCKER_HOST: tcp://docker:2375/
rules:
# Never run on scheduled pipelines
- if: '$CI_PIPELINE_SOURCE == "schedule"'
when: never
# Use commit tag when running on tagged commit
- if: $CI_COMMIT_TAG
variables:
VERSION: $CI_COMMIT_TAG
- when: on_success
script:
- ci/build.sh
release-notes:
stage: release
image: registry.gitlab.teklia.com/infra/devops:latest
# Only run on tags
only:
- tags
script:
- devops release-notes
bump-python-deps:
stage: release
image: registry.gitlab.teklia.com/infra/devops:latest
only:
- schedules
script:
- devops python-deps pyproject.toml
publish-worker:
stage: release
allow_failure: true
image: registry.gitlab.teklia.com/arkindex/cli:latest
script:
- arkindex -p "$ARKINDEX_INSTANCE" --gitlab-secure-file arkindex-cli.yaml worker publish "$CI_REGISTRY_IMAGE:$VERSION"
rules:
# Never run on scheduled pipelines
- if: '$CI_PIPELINE_SOURCE == "schedule"'
when: never
# Use commit tag when running on tagged commit
- if: $CI_COMMIT_TAG
variables:
VERSION: $CI_COMMIT_TAG
- when: on_success
parallel:
matrix:
- ARKINDEX_INSTANCE:
# Publish worker on https://ce-preprod.arkindex.teklia.com
- ce-preprod
# Publish worker on https://preprod.arkindex.teklia.com
- preprod
# Publish worker on https://demo.arkindex.org
- demo
# Publish worker on https://arkindex.teklia.com
- prod
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.3.1
hooks:
# Run the linter.
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
# Run the formatter.
- id: ruff-format
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-ast
- id: check-docstring-first
- id: check-executables-have-shebangs
- id: check-merge-conflict
- id: check-symlinks
- id: debug-statements
- id: trailing-whitespace
- id: check-yaml
args: [--allow-multiple-documents]
- id: mixed-line-ending
- id: name-tests-test
args: ['--django']
- id: check-json
- id: check-toml
- id: requirements-txt-fixer
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
hooks:
- id: codespell
args: ['--write-changes']
- repo: meta
hooks:
- id: check-useless-excludes
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.10.0.1
hooks:
- id: shellcheck
FROM python:3.11-slim
WORKDIR /src
# Install worker as a package
COPY worker_file_import worker_file_import
COPY pyproject.toml ./
RUN pip install . --no-cache-dir
Makefile 0 → 100644
.PHONY: release
release:
# Grep the version from pyproject.toml, squeeze multiple spaces, delete double and single quotes, get 3rd val.
# This command tolerates multiple whitespace sequences around the version number.
$(eval version:=$(shell grep -m 1 version pyproject.toml | tr -s ' ' | tr -d '"' | tr -d "'" | cut -d' ' -f3))
echo Releasing version $(version)
git commit pyproject.toml -m "Version $(version)"
git tag $(version)
git push origin master $(version)
# File
# File Import
Worker to import files in various formats in Arkindex
## Development
## Getting started
To make it easy for you to get started with GitLab, here's a list of recommended next steps.
Already a pro? Just edit this README.md and make it your own. Want to make it easy? [Use the template at the bottom](#editing-this-readme)!
## Add your files
- [ ] [Create](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#create-a-file) or [upload](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#upload-a-file) files
- [ ] [Add files using the command line](https://docs.gitlab.com/ee/gitlab-basics/add-file.html#add-a-file-using-the-command-line) or push an existing Git repository with the following command:
For development and tests purpose it may be useful to install the worker as a editable package with pip.
```shell
pip3 install -e .
```
cd existing_repo
git remote add origin https://gitlab.teklia.com/arkindex/workers/import/file.git
git branch -M master
git push -uf origin master
```
## Integrate with your tools
- [ ] [Set up project integrations](https://gitlab.teklia.com/arkindex/workers/import/file/-/settings/integrations)
## Collaborate with your team
- [ ] [Invite team members and collaborators](https://docs.gitlab.com/ee/user/project/members/)
- [ ] [Create a new merge request](https://docs.gitlab.com/ee/user/project/merge_requests/creating_merge_requests.html)
- [ ] [Automatically close issues from merge requests](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#closing-issues-automatically)
- [ ] [Enable merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/)
- [ ] [Set auto-merge](https://docs.gitlab.com/ee/user/project/merge_requests/merge_when_pipeline_succeeds.html)
## Test and Deploy
Use the built-in continuous integration in GitLab.
- [ ] [Get started with GitLab CI/CD](https://docs.gitlab.com/ee/ci/quick_start/index.html)
- [ ] [Analyze your code for known vulnerabilities with Static Application Security Testing (SAST)](https://docs.gitlab.com/ee/user/application_security/sast/)
- [ ] [Deploy to Kubernetes, Amazon EC2, or Amazon ECS using Auto Deploy](https://docs.gitlab.com/ee/topics/autodevops/requirements.html)
- [ ] [Use pull-based deployments for improved Kubernetes management](https://docs.gitlab.com/ee/user/clusters/agent/)
- [ ] [Set up protected environments](https://docs.gitlab.com/ee/ci/environments/protected_environments.html)
***
# Editing this README
## Linter
When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thanks to [makeareadme.com](https://www.makeareadme.com/) for this template.
Code syntax is analyzed before submitting the code.\
To run the linter tools suite you may use pre-commit.
## Suggestions for a good README
Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information.
## Name
Choose a self-explaining name for your project.
## Description
Let people know what your project can do specifically. Provide context and add a link to any reference visitors might be unfamiliar with. A list of Features or a Background subsection can also be added here. If there are alternatives to your project, this is a good place to list differentiating factors.
## Badges
On some READMEs, you may see small images that convey metadata, such as whether or not all the tests are passing for the project. You can use Shields to add some to your README. Many services also have instructions for adding a badge.
## Visuals
Depending on what you are making, it can be a good idea to include screenshots or even a video (you'll frequently see GIFs rather than actual videos). Tools like ttygif can help, but check out Asciinema for a more sophisticated method.
## Installation
Within a particular ecosystem, there may be a common way of installing things, such as using Yarn, NuGet, or Homebrew. However, consider the possibility that whoever is reading your README is a novice and would like more guidance. Listing specific steps helps remove ambiguity and gets people to using your project as quickly as possible. If it only runs in a specific context like a particular programming language version or operating system or has dependencies that have to be installed manually, also add a Requirements subsection.
## Usage
Use examples liberally, and show the expected output if you can. It's helpful to have inline the smallest example of usage that you can demonstrate, while providing links to more sophisticated examples if they are too long to reasonably include in the README.
## Support
Tell people where they can go to for help. It can be any combination of an issue tracker, a chat room, an email address, etc.
## Roadmap
If you have ideas for releases in the future, it is a good idea to list them in the README.
## Contributing
State if you are open to contributions and what your requirements are for accepting them.
For people who want to make changes to your project, it's helpful to have some documentation on how to get started. Perhaps there is a script that they should run or some environment variables that they need to set. Make these steps explicit. These instructions could also be useful to your future self.
```shell
pip install pre-commit
pre-commit run -a
```
You can also document commands to lint the code or run tests. These steps help to ensure high code quality and reduce the likelihood that the changes inadvertently break something. Having instructions for running tests is especially helpful if it requires external setup, such as starting a Selenium server for testing in a browser.
## Run tests
## Authors and acknowledgment
Show your appreciation to those who have contributed to the project.
Tests are executed with tox using [pytest](https://pytest.org).
## License
For open source projects, say how it is licensed.
```shell
pip install tox
tox
```
## Project status
If you have run out of energy or time for your project, put a note at the top of the README saying that development has slowed down or stopped completely. Someone may choose to fork your project or volunteer to step in as a maintainer or owner, allowing your project to keep going. You can also make an explicit request for maintainers.
To recreate tox virtual environment (e.g. a dependencies update), you may run `tox -r`
#!/bin/sh -e
# Build the tasks Docker image.
# Requires CI_PROJECT_DIR and CI_REGISTRY_IMAGE to be set.
# Will automatically login to a registry if CI_REGISTRY, CI_REGISTRY_USER and CI_REGISTRY_PASSWORD are set.
# Will only push an image if $CI_REGISTRY is set.
if [ -z "$VERSION" ] || [ -z "$CI_PROJECT_DIR" ] || [ -z "$CI_REGISTRY_IMAGE" ]; then
echo Missing environment variables
exit 1
fi
IMAGE_TAG="$CI_REGISTRY_IMAGE:$VERSION"
cd "$CI_PROJECT_DIR"
docker build -f Dockerfile . -t "$IMAGE_TAG"
if [ -n "$CI_REGISTRY" ] && [ -n "$CI_REGISTRY_USER" ] && [ -n "$CI_REGISTRY_PASSWORD" ]; then
echo "$CI_REGISTRY_PASSWORD" | docker login -u "$CI_REGISTRY_USER" --password-stdin "$CI_REGISTRY"
docker push "$IMAGE_TAG"
else
echo "Missing environment variables to log in to the container registry…"
fi
Worker to import files in various formats in Arkindex
#### Configuration
##### Elements
<!-- Describe the elements that should be passed as input to this worker. -->
The **File Import** worker processes *folder|page|word|...* elements.
##### Parameters
The parameters of the configuration used by this worker are described in the table below. To learn more about the different types of parameters, head to the [workers documentation](https://workers.arkindex.org/contents/workers/yaml/#setting-up-user-configurable-parameters).
<!-- Port the content of the section in `.arkindex.yml`. You can add more details about the parameters in the 'Comment' column or below. -->
| Description | Type | Required | Comment |
| ----------- | ---- | :------: | ------- |
| xxxx | xxxx | ☒/☐ | xxxx |
#### Workflow
<!-- Describe what the worker does with the element, in what order, to what end. -->
#### Results
<!-- Describe what the worker produces on Arkindex. It may be ML results (transcriptions, elements, entities, classifications, ...), artifacts... -->
#### Additional resources
<!-- Any external links, e.g. - [Description of website](https://www.url.com/) -->
[build-system]
requires = ["setuptools >= 61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "worker_file_import"
version = "0.1.0"
description = "Worker to import files in various formats in Arkindex"
dependencies = [
"arkindex-base-worker==0.3.7.post1",
]
authors = [
{ name = "Teklia", email = "contact@teklia.com" },
]
maintainers = [
{ name = "Teklia", email = "contact@teklia.com" },
]
requires-python = ">=3.10"
readme = { file = "README.md", content-type = "text/markdown" }
keywords = ["python"]
classifiers = [
# Specify the Python versions you support here.
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
]
[project.scripts]
"worker-file-import" = "worker_file_import.worker:main"
[tool.setuptools.packages]
find = {}
[tool.ruff]
exclude = [".git", "__pycache__"]
[tool.ruff.lint]
ignore = ["E501"]
select = [
# pycodestyle
"E",
"W",
# Pyflakes
"F",
# Flake8 Debugger
"T1",
# Isort
"I",
# Implicit Optional
"RUF013",
# Invalid pyproject.toml
"RUF200",
# pyupgrade
"UP",
# flake8-bugbear
"B",
# flake8-simplify
"SIM",
# flake8-pytest-style
"PT",
# flake8-use-pathlib
"PTH",
]
[tool.ruff.lint.per-file-ignores]
# Ignore `pytest-composite-assertion` rules of `flake8-pytest-style` linter for non-test files
"worker_file_import/**/*.py" = ["PT018"]
[tool.ruff.lint.isort]
known-first-party = ["arkindex", "arkindex_worker"]
known-third-party = ["pytest", "setuptools"]
import os
import pytest
from arkindex.mock import MockApiClient
from arkindex_worker.worker.base import BaseWorker
@pytest.fixture(autouse=True)
def _setup_environment(responses, monkeypatch) -> None:
"""Setup needed environment variables"""
# Allow accessing remote API schemas
# defaulting to the prod environment
schema_url = os.environ.get(
"ARKINDEX_API_SCHEMA_URL",
"https://demo.arkindex.org/api/v1/openapi/?format=openapi-json",
)
responses.add_passthru(schema_url)
# Set schema url in environment
os.environ["ARKINDEX_API_SCHEMA_URL"] = schema_url
# Setup a fake worker run ID
os.environ["ARKINDEX_WORKER_RUN_ID"] = "1234-file-import"
# Setup a fake corpus ID
os.environ["ARKINDEX_CORPUS_ID"] = "1234-corpus-id"
# Setup a mock api client instead of using a real one
def mock_setup_api_client(self):
self.api_client = MockApiClient()
monkeypatch.setattr(BaseWorker, "setup_api_client", mock_setup_api_client)
import importlib
def test_dummy():
assert True
def test_import():
"""Import our newly created module, through importlib to avoid parsing issues"""
worker = importlib.import_module("worker_file_import.worker")
assert hasattr(worker, "Demo")
assert hasattr(worker.Demo, "process_element")
tox.ini 0 → 100644
[tox]
envlist = worker-file-import
[testenv]
passenv = ARKINDEX_API_SCHEMA_URL
commands =
pytest {posargs}
deps =
pytest
pytest-responses
import logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s %(levelname)s/%(name)s: %(message)s",
)
from logging import Logger, getLogger
from arkindex_worker.models import Element
from arkindex_worker.worker import ElementsWorker
logger: Logger = getLogger(__name__)
class Demo(ElementsWorker):
def process_element(self, element: Element) -> None:
logger.info(f"Demo processing element ({element.id})")
def main() -> None:
Demo(description="Worker to import files in various formats in Arkindex").run()
if __name__ == "__main__":
main()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment