Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • workers/base-worker
1 result
Show changes
Commits on Source (5)
......@@ -270,7 +270,7 @@ def trim_polygon(polygon, image_width: int, image_height: int):
return updated_polygon
def revert_orientation(element, polygon):
def revert_orientation(element, polygon, reverse: bool = False):
"""
Update the coordinates of the polygon of a child element based on the orientation of
its parent.
......@@ -278,10 +278,15 @@ def revert_orientation(element, polygon):
This method should be called before sending any polygon to Arkindex, to undo the possible
orientation applied by :meth:`Element.open_image`.
In some cases, we want to apply the parent's orientation on the child's polygon instead. This is done
by enabling `reverse=True`.
:param element: Parent element.
:type element: Element or CachedElement
:param polygon: Polygon corresponding to the child element.
:type polygon: list(list(int or float))
:param mode: Whether we should revert (`revert`) or apply (`apply`) the parent's orientation.
:type mode: str
:return: A polygon with updated coordinates.
:rtype: list(list(int))
"""
......@@ -294,6 +299,7 @@ def revert_orientation(element, polygon):
assert polygon and isinstance(
polygon, list
), "polygon shouldn't be null and should be a list"
assert isinstance(reverse, bool), "Reverse should be a bool"
# Rotating with Pillow can cause it to move the image around, as the image cannot have negative coordinates
# and must be a rectangle. This means the origin point of any coordinates from an image is invalid, and the
# center of the bounding box of the rotated image is different from the center of the element's bounding box.
......@@ -322,13 +328,24 @@ def revert_orientation(element, polygon):
ring = LinearRing(polygon)
# First undo the negative coordinates offset, since this is the last step of the original rotation
ring = translate(ring, xoff=offset_x, yoff=offset_y)
if element.rotation_angle:
ring = rotate(ring, -element.rotation_angle, origin=origin)
if element.mirrored:
ring = scale(ring, xfact=-1, origin=origin)
if reverse:
# Apply the parent's orientation on the child's polygon
# Apply mirroring
if element.mirrored:
ring = scale(ring, xfact=-1, origin=origin)
# Apply rotation
if element.rotation_angle:
ring = rotate(ring, element.rotation_angle, origin=origin)
# At last translate coordinates offset
ring = translate(ring, xoff=-offset_x, yoff=-offset_y)
else:
# First undo the negative coordinates offset, since this is the last step of the original transformation
ring = translate(ring, xoff=offset_x, yoff=offset_y)
# Revert any rotation
if element.rotation_angle:
ring = rotate(ring, -element.rotation_angle, origin=origin)
# Revert any mirroring
if element.mirrored:
ring = scale(ring, xfact=-1, origin=origin)
return [[int(x), int(y)] for x, y in ring.coords]
......@@ -4,13 +4,15 @@ ElementsWorker methods for elements and element types.
"""
import uuid
from typing import Dict, Iterable, List, Optional, Union
from typing import Dict, Iterable, List, NamedTuple, Optional, Union
from peewee import IntegrityError
from arkindex_worker import logger
from arkindex_worker.cache import CachedElement, CachedImage
from arkindex_worker.models import Element
from arkindex_worker.models import Corpus, Element
ElementType = NamedTuple("ElementType", name=str, slug=str, is_folder=bool)
class MissingTypeError(Exception):
......@@ -24,13 +26,34 @@ class ElementMixin(object):
Mixin for the :class:`ElementsWorker` to provide ``Element`` helpers.
"""
def check_required_types(self, corpus_id: str, *type_slugs: str) -> bool:
def create_required_types(self, corpus: Corpus, element_types: List[ElementType]):
"""Creates given element types in the corpus.
:param Corpus corpus: The corpus to create types on.
:param List[ElementType] element_types: The missing element types to create.
"""
for element_type in element_types:
self.request(
"CreateElementType",
body={
"slug": element_type.slug,
"display_name": element_type.name,
"folder": element_type.is_folder,
"corpus": corpus.id,
},
)
logger.info(f"Created a new element type with slug {element_type.slug}")
def check_required_types(
self, corpus_id: str, *type_slugs: str, create_missing: bool = False
) -> bool:
"""
Check that a corpus has a list of required element types,
and raise an exception if any of them are missing.
:param str corpus_id: ID of the corpus to check types on.
:param str \\*type_slugs: Type slugs to look for.
:param bool create_missing: Whether missing types should be created.
:returns bool: True if all of the specified type slugs have been found.
:raises MissingTypeError: If any of the specified type slugs were not found.
"""
......@@ -42,14 +65,22 @@ class ElementMixin(object):
isinstance(slug, str) for slug in type_slugs
), "Element type slugs must be strings."
corpus = self.request("RetrieveCorpus", id=corpus_id)
available_slugs = {element_type["slug"] for element_type in corpus["types"]}
corpus = Corpus(self.request("RetrieveCorpus", id=corpus_id))
available_slugs = {element_type.slug for element_type in corpus.types}
missing_slugs = set(type_slugs) - available_slugs
if missing_slugs:
raise MissingTypeError(
f'Element type(s) {", ".join(sorted(missing_slugs))} were not found in the {corpus["name"]} corpus ({corpus["id"]}).'
)
if create_missing:
self.create_required_types(
corpus,
element_types=[
ElementType(slug, slug, False) for slug in missing_slugs
],
)
else:
raise MissingTypeError(
f'Element type(s) {", ".join(sorted(missing_slugs))} were not found in the {corpus.name} corpus ({corpus.id}).'
)
return True
......
......@@ -104,3 +104,91 @@ class MetaDataMixin(object):
self.report.add_metadata(element.id, metadata["id"], type.value, name)
return metadata["id"]
def create_metadatas(
self,
element: Element,
metadatas: list,
):
"""
Create multiple metadatas on an existing element.
This method does not support cache.
:param element Element: The element to create multiple metadata on.
:param metadata_list List(Dict): The list of dict whose keys are the following:
- type : MetaType
- name : str
- value : Union[str, Union[int, float]]
- entity_id : Union[str, None]
"""
assert element and isinstance(
element, Element
), "element shouldn't be null and should be of type Element"
assert metadatas and isinstance(
metadatas, list
), "type shouldn't be null and should be of type list of Dict"
# Make a copy to avoid modifiying the metadata_list argument
metas = []
for index, metadata in enumerate(metadatas):
assert isinstance(
metadata, dict
), f"Element at index {index} in metadata_list: Should be of type dict"
assert metadata.get("type") and isinstance(
metadata.get("type"), MetaType
), "type shouldn't be null and should be of type MetaType"
assert metadata.get("name") and isinstance(
metadata.get("name"), str
), "name shouldn't be null and should be of type str"
assert metadata.get("value") and isinstance(
metadata.get("value"), (str, float, int)
), "value shouldn't be null and should be of type (str or float or int)"
assert metadata.get("entity_id") is None or isinstance(
metadata.get("entity_id"), str
), "entity_id should be None or a str"
metas.append(
{
"type": metadata.get("type").value,
"name": metadata.get("name"),
"value": metadata.get("value"),
"entity_id": metadata.get("entity_id"),
}
)
if self.is_read_only:
logger.warning("Cannot create metadata as this worker is in read-only mode")
return
created_metadatas = self.request(
"CreateMetaDataBulk",
id=element.id,
body={
"worker_version": self.worker_version_id,
"worker_run_id": self.worker_run_id,
"metadata_list": metas,
},
)["metadata_list"]
for meta in created_metadatas:
self.report.add_metadata(element.id, meta["id"], meta["type"], meta["name"])
return created_metadatas
def list_metadata(self, element: Element):
"""
List all metadata linked to an element.
This method does not support cache.
:param element Element: The element to list metadata on.
"""
assert element and isinstance(
element, Element
), "element shouldn't be null and should be of type Element"
return self.api_client.paginate("ListElementMetaData", id=element.id)
......@@ -87,6 +87,11 @@ class TrainingMixin(object):
This method creates a model archive and its associated hash,
to create a unique version that will be stored on a bucket and published on arkindex.
"""
if self.is_read_only:
logger.warning(
"Cannot publish a new model version as this worker is in read-only mode"
)
return
# Create the zst archive, get its hash and size
with create_archive(path=model_path) as (
......@@ -131,6 +136,11 @@ class TrainingMixin(object):
- The version is in `Created` state: this version's details is used
- The version is in `Available` state: you cannot create twice the same version, an error is raised
"""
if self.is_read_only:
logger.warning(
"Cannot create a new model version as this worker is in read-only mode"
)
return
# Create a new model version with hash and size
try:
......@@ -171,6 +181,11 @@ class TrainingMixin(object):
"""
Upload the archive of the model's files to an Amazon s3 compatible storage
"""
if self.is_read_only:
logger.warning(
"Cannot upload this archive as this worker is in read-only mode"
)
return
s3_put_url = model_version_details.get("s3_put_url")
logger.info("Uploading to s3...")
......@@ -191,6 +206,11 @@ class TrainingMixin(object):
"""
Update the specified model version to the state `Available` and use the given information"
"""
if self.is_read_only:
logger.warning(
"Cannot update this model version as this worker is in read-only mode"
)
return
model_version_id = model_version_details.get("id")
logger.info(f"Updating model version ({model_version_id})")
......
......@@ -5,6 +5,7 @@ from uuid import UUID
import pytest
from apistar.exceptions import ErrorResponse
from responses import matchers
from arkindex_worker.cache import (
SQL_VERSION,
......@@ -33,11 +34,7 @@ def test_check_required_types_argument_types(mock_elements_worker):
assert str(e.value) == "Element type slugs must be strings."
def test_check_required_types(monkeypatch, tmp_path, mock_elements_worker, responses):
elements_path = tmp_path / "elements.json"
elements_path.write_text("[]")
monkeypatch.setenv("TASK_ELEMENTS", str(elements_path))
def test_check_required_types(responses):
corpus_id = "12341234-1234-1234-1234-123412341234"
responses.add(
responses.GET,
......@@ -49,7 +46,7 @@ def test_check_required_types(monkeypatch, tmp_path, mock_elements_worker, respo
},
)
worker = ElementsWorker()
worker.configure()
worker.setup_api_client()
assert worker.check_required_types(corpus_id, "page")
assert worker.check_required_types(corpus_id, "page", "folder")
......@@ -62,6 +59,54 @@ def test_check_required_types(monkeypatch, tmp_path, mock_elements_worker, respo
)
def test_create_missing_types(responses):
corpus_id = "12341234-1234-1234-1234-123412341234"
responses.add(
responses.GET,
f"http://testserver/api/v1/corpus/{corpus_id}/",
json={
"id": corpus_id,
"name": "Some Corpus",
"types": [{"slug": "folder"}, {"slug": "page"}],
},
)
responses.add(
responses.POST,
"http://testserver/api/v1/elements/type/",
match=[
matchers.json_params_matcher(
{
"slug": "text_line",
"display_name": "text_line",
"folder": False,
"corpus": corpus_id,
}
)
],
)
responses.add(
responses.POST,
"http://testserver/api/v1/elements/type/",
match=[
matchers.json_params_matcher(
{
"slug": "act",
"display_name": "act",
"folder": False,
"corpus": corpus_id,
}
)
],
)
worker = ElementsWorker()
worker.setup_api_client()
assert worker.check_required_types(
corpus_id, "page", "text_line", "act", create_missing=True
)
def test_list_elements_elements_list_arg_wrong_type(
monkeypatch, tmp_path, mock_elements_worker
):
......
......@@ -196,3 +196,225 @@ def test_create_metadata(responses, mock_elements_worker):
"worker_version": "12341234-1234-1234-1234-123412341234",
}
assert metadata_id == "12345678-1234-1234-1234-123456789123"
@pytest.mark.parametrize(
"metadatas",
[
([{"type": MetaType.Text, "name": "fake_name", "value": "fake_value"}]),
(
[
{
"type": MetaType.Text,
"name": "fake_name",
"value": "fake_value",
"entity_id": "fake_entity_id",
}
]
),
],
)
def test_create_metadatas(responses, mock_elements_worker, metadatas):
element = Element({"id": "12341234-1234-1234-1234-123412341234"})
responses.add(
responses.POST,
"http://testserver/api/v1/element/12341234-1234-1234-1234-123412341234/metadata/bulk/",
status=201,
json={
"worker_version": mock_elements_worker.worker_version_id,
"worker_run_id": mock_elements_worker.worker_run_id,
"metadata_list": [
{
"id": "fake_metadata_id",
"type": metadatas[0]["type"].value,
"name": metadatas[0]["name"],
"value": metadatas[0]["value"],
"dates": [],
"entity_id": metadatas[0].get("entity_id"),
}
],
},
)
created_metadatas = mock_elements_worker.create_metadatas(element, metadatas)
assert len(responses.calls) == len(BASE_API_CALLS) + 1
assert [
(call.request.method, call.request.url) for call in responses.calls
] == BASE_API_CALLS + [
(
"POST",
"http://testserver/api/v1/element/12341234-1234-1234-1234-123412341234/metadata/bulk/",
),
]
assert json.loads(responses.calls[-1].request.body)["metadata_list"] == [
{
"type": metadatas[0]["type"].value,
"name": metadatas[0]["name"],
"value": metadatas[0]["value"],
"entity_id": metadatas[0].get("entity_id"),
}
]
assert created_metadatas == [
{
"id": "fake_metadata_id",
"type": metadatas[0]["type"].value,
"name": metadatas[0]["name"],
"value": metadatas[0]["value"],
"dates": [],
"entity_id": metadatas[0].get("entity_id"),
}
]
@pytest.mark.parametrize(
"wrong_element",
[
None,
"not_element_type",
1234,
12.5,
],
)
def test_create_metadatas_wrong_element(mock_elements_worker, wrong_element):
wrong_metadatas = [
{"type": MetaType.Text, "name": "fake_name", "value": "fake_value"}
]
with pytest.raises(AssertionError) as e:
mock_elements_worker.create_metadatas(
element=wrong_element, metadatas=wrong_metadatas
)
assert str(e.value) == "element shouldn't be null and should be of type Element"
@pytest.mark.parametrize(
"wrong_type",
[
None,
"not_metadata_type",
1234,
12.5,
],
)
def test_create_metadatas_wrong_type(mock_elements_worker, wrong_type):
element = Element({"id": "12341234-1234-1234-1234-123412341234"})
wrong_metadatas = [{"type": wrong_type, "name": "fake_name", "value": "fake_value"}]
with pytest.raises(AssertionError) as e:
mock_elements_worker.create_metadatas(
element=element, metadatas=wrong_metadatas
)
assert str(e.value) == "type shouldn't be null and should be of type MetaType"
@pytest.mark.parametrize("wrong_name", [(None), (1234), (12.5), ([1, 2, 3, 4])])
def test_create_metadatas_wrong_name(mock_elements_worker, wrong_name):
element = Element({"id": "fake_element_id"})
wrong_metadatas = [
{"type": MetaType.Text, "name": wrong_name, "value": "fake_value"}
]
with pytest.raises(AssertionError) as e:
mock_elements_worker.create_metadatas(
element=element, metadatas=wrong_metadatas
)
assert str(e.value) == "name shouldn't be null and should be of type str"
@pytest.mark.parametrize("wrong_value", [(None), ([1, 2, 3, 4])])
def test_create_metadatas_wrong_value(mock_elements_worker, wrong_value):
element = Element({"id": "fake_element_id"})
wrong_metadatas = [
{"type": MetaType.Text, "name": "fake_name", "value": wrong_value}
]
with pytest.raises(AssertionError) as e:
mock_elements_worker.create_metadatas(
element=element, metadatas=wrong_metadatas
)
assert (
str(e.value)
== "value shouldn't be null and should be of type (str or float or int)"
)
@pytest.mark.parametrize(
"wrong_entity",
[
[1, 2, 3, 4],
1234,
12.5,
],
)
def test_create_metadatas_wrong_entity(mock_elements_worker, wrong_entity):
element = Element({"id": "fake_element_id"})
wrong_metadatas = [
{
"type": MetaType.Text,
"name": "fake_name",
"value": "fake_value",
"entity_id": wrong_entity,
}
]
with pytest.raises(AssertionError) as e:
mock_elements_worker.create_metadatas(
element=element, metadatas=wrong_metadatas
)
assert str(e.value) == "entity_id should be None or a str"
def test_create_metadatas_api_error(responses, mock_elements_worker):
element = Element({"id": "12341234-1234-1234-1234-123412341234"})
metadatas = [
{
"type": MetaType.Text,
"name": "fake_name",
"value": "fake_value",
"entity_id": "fake_entity_id",
}
]
responses.add(
responses.POST,
"http://testserver/api/v1/element/12341234-1234-1234-1234-123412341234/metadata/bulk/",
status=500,
)
with pytest.raises(ErrorResponse):
mock_elements_worker.create_metadatas(element, metadatas)
assert len(responses.calls) == len(BASE_API_CALLS) + 5
assert [
(call.request.method, call.request.url) for call in responses.calls
] == BASE_API_CALLS + [
# We retry 5 times the API call
(
"POST",
"http://testserver/api/v1/element/12341234-1234-1234-1234-123412341234/metadata/bulk/",
),
(
"POST",
"http://testserver/api/v1/element/12341234-1234-1234-1234-123412341234/metadata/bulk/",
),
(
"POST",
"http://testserver/api/v1/element/12341234-1234-1234-1234-123412341234/metadata/bulk/",
),
(
"POST",
"http://testserver/api/v1/element/12341234-1234-1234-1234-123412341234/metadata/bulk/",
),
(
"POST",
"http://testserver/api/v1/element/12341234-1234-1234-1234-123412341234/metadata/bulk/",
),
]
def test_list_metadata(fake_dummy_worker):
element = Element({"id": "element_id"})
fake_dummy_worker.api_client.add_response(
"ListElementMetaData",
id=element.id,
response={"id": "metadata_id"},
)
assert fake_dummy_worker.list_metadata(element) == {"id": "metadata_id"}
assert len(fake_dummy_worker.api_client.history) == 1
assert len(fake_dummy_worker.api_client.responses) == 0
# -*- coding: utf-8 -*-
import os
import sys
import pytest
import responses
......@@ -17,6 +18,15 @@ class TrainingWorker(BaseWorker, TrainingMixin):
pass
@pytest.fixture
def mock_training_worker(monkeypatch):
monkeypatch.setattr(sys, "argv", ["worker"])
training_worker = TrainingWorker()
training_worker.api_client = MockApiClient()
training_worker.args = training_worker.parser.parse_args()
return training_worker
def test_create_archive(model_file_dir):
"""Create an archive when the model's file is in a folder"""
......@@ -46,13 +56,11 @@ def test_create_archive(model_file_dir):
(None, ""),
],
)
def test_create_model_version(tag, description):
def test_create_model_version(mock_training_worker, tag, description):
"""A new model version is returned"""
model_version_id = "fake_model_version_id"
model_id = "fake_model_id"
training = TrainingWorker()
training.api_client = MockApiClient()
model_hash = "hash"
archive_hash = "archive_hash"
size = "30"
......@@ -68,7 +76,7 @@ def test_create_model_version(tag, description):
"s3_put_url": "http://hehehe.com",
}
training.api_client.add_response(
mock_training_worker.api_client.add_response(
"CreateModelVersion",
id=model_id,
response=model_version_details,
......@@ -81,7 +89,7 @@ def test_create_model_version(tag, description):
},
)
assert (
training.create_model_version(
mock_training_worker.create_model_version(
model_id, model_hash, size, archive_hash, tag, description
)
== model_version_details
......@@ -126,7 +134,7 @@ def test_create_model_version(tag, description):
({"hash": ["A version for this model with this hash already exists."]}, 403),
],
)
def test_retrieve_created_model_version(content, status_code):
def test_retrieve_created_model_version(mock_training_worker, content, status_code):
"""
If there is an existing model version in Created mode,
A 400 was raised, but the model is still returned in error content.
......@@ -135,14 +143,12 @@ def test_retrieve_created_model_version(content, status_code):
"""
model_id = "fake_model_id"
training = TrainingWorker()
training.api_client = MockApiClient()
model_hash = "hash"
archive_hash = "archive_hash"
size = "30"
tag = "tag"
description = "description"
training.api_client.add_error_response(
mock_training_worker.api_client.add_error_response(
"CreateModelVersion",
id=model_id,
status_code=status_code,
......@@ -157,26 +163,24 @@ def test_retrieve_created_model_version(content, status_code):
)
if status_code == 400:
assert (
training.create_model_version(
mock_training_worker.create_model_version(
model_id, model_hash, size, archive_hash, tag, description
)
== content["hash"]
)
elif status_code == 403:
assert (
training.create_model_version(
mock_training_worker.create_model_version(
model_id, model_hash, size, archive_hash, tag, description
)
is None
)
def test_handle_s3_uploading_errors(model_file_dir):
training = TrainingWorker()
training.api_client = MockApiClient()
def test_handle_s3_uploading_errors(mock_training_worker, model_file_dir):
s3_endpoint_url = "http://s3.localhost.com"
responses.add_passthru(s3_endpoint_url)
responses.add(responses.Response(method="PUT", url=s3_endpoint_url, status=400))
file_path = model_file_dir / "model_file.pth"
with pytest.raises(Exception):
training.upload_to_s3(file_path, {"s3_put_url": s3_endpoint_url})
mock_training_worker.upload_to_s3(file_path, {"s3_put_url": s3_endpoint_url})
......@@ -10,8 +10,10 @@ from PIL import Image, ImageChops, ImageOps
from arkindex_worker.cache import CachedElement, create_tables, init_cache_db
from arkindex_worker.image import (
BoundingBox,
download_tiles,
open_image,
polygon_bounding_box,
revert_orientation,
trim_polygon,
)
......@@ -367,66 +369,114 @@ class TestTrimPolygon(unittest.TestCase):
@pytest.mark.parametrize(
"current_polygon,angle,mirrored,original_polygon",
"angle, mirrored, updated_bounds, reverse",
(
(
[[73, 40], [1013, 40], [1013, 178], [73, 178]],
0,
False,
[[73, 40], [1013, 40], [1013, 178], [73, 178], [73, 40]],
{"x": 295, "y": 11, "width": 111, "height": 47}, # upper right
True,
),
(
90,
False,
{"x": 510, "y": 295, "width": 47, "height": 111}, # lower right
True,
),
(
180,
False,
{"x": 9, "y": 510, "width": 111, "height": 47}, # lower left
True,
),
(
270,
False,
{"x": 11, "y": 9, "width": 47, "height": 111}, # upper left
True,
),
(
0,
True,
{"x": 9, "y": 11, "width": 111, "height": 47}, # upper left
True,
),
(
90,
True,
{"x": 510, "y": 9, "width": 47, "height": 111}, # upper right
True,
),
(
180,
True,
{"x": 295, "y": 510, "width": 111, "height": 47}, # lower right
True,
),
(
270,
True,
{"x": 11, "y": 295, "width": 47, "height": 111}, # lower left
True,
),
(
0,
False,
{"x": 295, "y": 11, "width": 111, "height": 47}, # upper right
False,
),
(
[[502, 73], [588, 73], [588, 1013], [502, 1013]],
90,
False,
[[73, 126], [73, 40], [1013, 40], [1013, 126], [73, 126]],
{"x": 11, "y": 162, "width": 47, "height": 111}, # upper left
False,
),
(
[[254, 205], [540, 205], [540, 327], [254, 327]],
180,
False,
[[785, 423], [499, 423], [499, 301], [785, 301], [785, 423]],
{"x": 9, "y": 510, "width": 111, "height": 47}, # lower left
False,
),
(
[[301, 139], [423, 139], [423, 540], [301, 540]],
270,
False,
[[900, 301], [900, 423], [499, 423], [499, 301], [900, 301]],
{"x": 357, "y": 295, "width": 47, "height": 111}, # lower right
False,
),
(
[[26, 40], [966, 40], [966, 191], [26, 191]],
0,
True,
[[1013, 40], [73, 40], [73, 191], [1013, 191], [1013, 40]],
{"x": 9, "y": 11, "width": 111, "height": 47}, # upper left
False,
),
(
[[502, 26], [588, 26], [588, 966], [502, 966]],
90,
True,
[[1013, 126], [1013, 40], [73, 40], [73, 126], [1013, 126]],
{"x": 357, "y": 162, "width": 47, "height": 111}, # lower left
False,
),
(
[[486, 89], [900, 89], [900, 311], [486, 311]],
180,
True,
[[486, 539], [900, 539], [900, 317], [486, 317], [486, 539]],
{"x": 295, "y": 510, "width": 111, "height": 47}, # lower right
False,
),
(
[[317, 486], [539, 486], [539, 900], [317, 900]],
270,
True,
[[486, 317], [486, 539], [900, 539], [900, 317], [486, 317]],
{"x": 11, "y": 295, "width": 47, "height": 111}, # upper right
False,
),
),
)
def test_revert_orientation(
current_polygon, angle, mirrored, original_polygon, tmp_path
):
def test_revert_orientation(angle, mirrored, updated_bounds, reverse, tmp_path):
"""Test cases, for both Elements and CachedElements:
- no rotation or orientation
- rotation with 3 different angles (90, 180, 270)
- rotation + mirror with 4 angles (0, 90, 180, 270)
"""
child_polygon = [[295, 11], [295, 58], [406, 58], [406, 11], [295, 11]]
# Setup cache db to test with CachedElements
db_path = f"{tmp_path}/db.sqlite"
init_cache_db(db_path)
......@@ -434,9 +484,9 @@ def test_revert_orientation(
image_polygon = [
[0, 0],
[0, 628],
[1039, 628],
[1039, 0],
[0, 568],
[415, 568],
[415, 0],
[0, 0],
]
element = Element(
......@@ -454,5 +504,10 @@ def test_revert_orientation(
rotation_angle=angle,
)
assert revert_orientation(element, current_polygon) == original_polygon
assert revert_orientation(cached_element, current_polygon) == original_polygon
assert polygon_bounding_box(
revert_orientation(element, child_polygon, reverse=reverse)
) == BoundingBox(**updated_bounds)
assert polygon_bounding_box(
revert_orientation(cached_element, child_polygon, reverse=reverse)
) == BoundingBox(**updated_bounds)