From 0773855e0d78ab206d1932fbf89415446daafc5f Mon Sep 17 00:00:00 2001
From: Yoann Schneider <yschneider@teklia.com>
Date: Mon, 7 Aug 2023 08:00:43 +0000
Subject: [PATCH] Rename Arkindex URL and remove some obsolete Arkindex related
 requirements in unit tests

---
 .gitlab-ci.yml               |  4 ----
 docs/get_started/training.md |  2 +-
 docs/usage/datasets/index.md |  2 +-
 tests/conftest.py            | 17 -----------------
 tox.ini                      |  2 --
 5 files changed, 2 insertions(+), 25 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 30145d86..8d5fabc7 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -34,14 +34,10 @@ test:
 
   variables:
     PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
-    ARKINDEX_API_SCHEMA_URL: schema.yml
 
   before_script:
     - pip install tox
 
-    # Download OpenAPI schema from last backend build
-    - curl https://assets.teklia.com/arkindex/openapi.yml > schema.yml
-
     # Add system deps for opencv
     - apt-get update -q
     - apt-get install -q -y libgl1
diff --git a/docs/get_started/training.md b/docs/get_started/training.md
index b996f018..6684fd05 100644
--- a/docs/get_started/training.md
+++ b/docs/get_started/training.md
@@ -6,7 +6,7 @@ There are a several steps to follow when training a DAN model.
 
 The data must be extracted and formatted for training. To extract the data, DAN uses an Arkindex export database in SQLite format. You will need to:
 
-1. Structure the data into folders (`train` / `val` / `test`) in [Arkindex](https://arkindex.teklia.com/).
+1. Structure the data into folders (`train` / `val` / `test`) in [Arkindex](https://demo.arkindex.org/).
 1. [Export the project](https://doc.arkindex.org/howto/export/) in SQLite format.
 1. Extract the data with the [extract command](../usage/datasets/extract.md).
 1. Format the data with the [format command](../usage/datasets/format.md).
diff --git a/docs/usage/datasets/index.md b/docs/usage/datasets/index.md
index a856f6c0..8e6a21cc 100644
--- a/docs/usage/datasets/index.md
+++ b/docs/usage/datasets/index.md
@@ -3,7 +3,7 @@
 Two operations are available through subcommands:
 
 `teklia-dan dataset extract`
-: To extract a dataset from Arkindex using its [Python API](https://arkindex.teklia.com/api-docs/). More details in [the dedicated section](./extract.md).
+: To extract a dataset from Arkindex using its [Python API](https://demo.arkindex.org/api-docs/). More details in [the dedicated section](./extract.md).
 
 `teklia-dan dataset format`
 : To format datasets for training. More details in [the dedicated section](./format.md).
diff --git a/tests/conftest.py b/tests/conftest.py
index f65fcb74..c73aac32 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,5 +1,4 @@
 # -*- coding: utf-8 -*-
-import os
 from pathlib import Path
 
 import pytest
@@ -14,22 +13,6 @@ from dan.transforms import Preprocessing
 FIXTURES = Path(__file__).resolve().parent / "data"
 
 
-@pytest.fixture(autouse=True)
-def setup_environment(responses):
-    """Setup needed environment variables"""
-
-    # Allow accessing remote API schemas
-    # defaulting to the prod environment
-    schema_url = os.environ.get(
-        "ARKINDEX_API_SCHEMA_URL",
-        "https://arkindex.teklia.com/api/v1/openapi/?format=openapi-json",
-    )
-    responses.add_passthru(schema_url)
-
-    # Set schema url in environment
-    os.environ["ARKINDEX_API_SCHEMA_URL"] = schema_url
-
-
 @pytest.fixture
 def database_path():
     return FIXTURES / "export.sqlite"
diff --git a/tox.ini b/tox.ini
index aedc8cf0..0b48a6dd 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,11 +2,9 @@
 envlist = teklia-dan
 
 [testenv]
-passenv = ARKINDEX_API_SCHEMA_URL
 commands =
   pytest {posargs}
 
 deps =
   pytest
-  pytest-responses
   -rrequirements.txt
-- 
GitLab