Skip to content
Snippets Groups Projects
Commit 578c6f0d authored by Bastien Abadie's avatar Bastien Abadie
Browse files

Merge branch 'error-report' into 'master'

DataImport errors

See merge request !73
parents d50a05d8 8bdc6697
No related branches found
No related tags found
1 merge request!73DataImport errors
Showing
with 454 additions and 74 deletions
......@@ -20,7 +20,7 @@ backend-tests:
DB_PORT: 5432
before_script:
- apk --update add postgresql-dev libjpeg-turbo-dev gcc musl-dev zlib-dev libmagic libxml2-dev libxslt-dev
- apk --update add postgresql-dev libjpeg-turbo-dev gcc musl-dev zlib-dev libmagic libxml2-dev libxslt-dev git
- pip install codecov
script:
......
ROOT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
TUNNEL_HOST:=panetios-dev
TUNNEL_PORT:=8000
VERSION=$(shell git rev-parse --short HEAD)
TAG=arkindex-app
......@@ -30,6 +32,9 @@ release: require-version
worker:
celery worker -A arkindex.project -l INFO --purge
tunnel:
ssh $(TUNNEL_HOST) -NR *:$(TUNNEL_PORT):localhost:$(TUNNEL_PORT)
test-fixtures:
$(eval export PGPASSWORD=devdata)
psql -h 127.0.0.1 -p 9100 -U devuser -c 'DROP DATABASE IF EXISTS arkindex_tmp_fixtures' template1
......
......@@ -9,9 +9,9 @@ from rest_framework import status
from rest_framework.exceptions import ValidationError, NotAuthenticated, AuthenticationFailed
from arkindex.documents.models import Corpus
from arkindex.dataimport.models import \
DataImport, DataFile, DataImportState, DataImportMode, Repository, RepositorySource, Revision
DataImport, DataFile, DataImportState, DataImportMode, DataImportFailure, Repository, RepositorySource, Revision
from arkindex.dataimport.serializers import \
DataImportLightSerializer, DataImportSerializer, DataFileSerializer
DataImportLightSerializer, DataImportSerializer, DataImportFailureSerializer, DataFileSerializer
import hashlib
import magic
......@@ -73,6 +73,20 @@ class DataImportDetails(RetrieveUpdateDestroyAPIView):
super().perform_destroy(instance)
class DataImportFailures(ListAPIView):
"""
List a single import's errors
"""
permission_classes = (IsAuthenticated, )
serializer_class = DataImportFailureSerializer
def get_queryset(self):
return DataImportFailure.objects.filter(dataimport_id=self.kwargs['pk']) \
.prefetch_related('dataimport__revision__repo', 'element') \
.order_by('path', 'line')
class DataFileList(ListAPIView):
"""
List uploaded files
......@@ -175,8 +189,8 @@ class GitRepositoryImportHook(APIView):
repo=repo,
hash=request.data['checkout_sha'],
ref=request.data['ref'],
message=request.data['commits'][-1]['message'],
author=request.data['commits'][-1]['author']['name'],
message=request.data['commits'][0]['message'],
author=request.data['commits'][0]['author']['name'],
)
else:
raise NotImplementedError
......
# Generated by Django 2.0 on 2018-07-30 09:55
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('documents', '0020_metadata_revision'),
('dataimport', '0002_repository_revision'),
]
operations = [
migrations.CreateModel(
name='DataImportFailure',
fields=[
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
('path', models.CharField(max_length=250)),
('line', models.PositiveIntegerField(blank=True, null=True)),
('message', models.TextField()),
('context', models.TextField(blank=True, null=True)),
('dataimport', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name='failures',
to='dataimport.DataImport',
)),
('element', models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name='failures',
to='documents.Element',
)),
],
),
]
......@@ -10,6 +10,7 @@ from arkindex.project.models import IndexableModel
from arkindex.project.fields import ArrayField
import uuid
import os
import re
import urllib.parse
......@@ -123,6 +124,37 @@ class DataImport(IndexableModel):
return
return os.path.join(settings.LOCAL_MEDIA_ROOT, self.folder_name)
def save_failures(self, failures):
assert self.revision is not None
assert all(isinstance(failure, DataImportFailure) for failure in failures)
for failure in failures:
failure.path = re.sub(self.revision.repo.clone_dir, '', failure.path, count=1)
failure.dataimport = self
DataImportFailure.objects.bulk_create(failures)
class DataImportFailure(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
dataimport = models.ForeignKey(
'dataimport.DataImport', on_delete=models.CASCADE, related_name='failures')
element = models.ForeignKey(
'documents.Element', on_delete=models.CASCADE, blank=True, null=True, related_name='failures')
path = models.CharField(max_length=250)
line = models.PositiveIntegerField(blank=True, null=True)
message = models.TextField()
context = models.TextField(blank=True, null=True)
@property
def view_url(self):
if not self.line or not self.dataimport.revision:
return None
return '{}/blob/{}/{}#L{}'.format(
self.dataimport.revision.repo.url.rstrip('/'),
self.dataimport.revision.hash,
self.path,
self.line,
)
class DataFile(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
......
from rest_framework import serializers
from rest_framework.utils import model_meta
from arkindex.project.serializer_fields import EnumField
from arkindex.dataimport.models import DataImport, DataImportMode, DataImportState, DataFile, Revision
from arkindex.dataimport.models import \
DataImport, DataImportMode, DataImportState, DataImportFailure, DataFile, Revision
from arkindex.documents.serializers.light import ElementLightSerializer
import celery.states
......@@ -80,6 +82,23 @@ class ImagesPayloadSerializer(serializers.Serializer):
volume_name = serializers.CharField()
class RevisionSerializer(serializers.ModelSerializer):
"""
Serialize a repository revision
"""
class Meta:
model = Revision
fields = (
'id',
'hash',
'ref',
'message',
'author',
'commit_url',
)
class DataImportSerializer(DataImportLightSerializer):
"""
Serialize a data importing workflow with its payload
......@@ -90,6 +109,7 @@ class DataImportSerializer(DataImportLightSerializer):
payload = serializers.JSONField()
tasks = TaskSerializer(many=True, read_only=True)
task_count = serializers.IntegerField(read_only=True)
revision = RevisionSerializer(read_only=True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
......@@ -125,6 +145,7 @@ class DataImportSerializer(DataImportLightSerializer):
'creator',
'payload',
'files',
'revision',
'tasks',
'task_count',
'result',
......@@ -147,18 +168,21 @@ class DataFileSerializer(serializers.ModelSerializer):
read_only_fields = ('id', 'size', 'content_type', )
class RevisionSerializer(serializers.ModelSerializer):
class DataImportFailureSerializer(serializers.ModelSerializer):
"""
Serialize a repository revision
Serialize a data import error log
"""
element = ElementLightSerializer()
class Meta:
model = Revision
model = DataImportFailure
fields = (
'id',
'hash',
'ref',
'element',
'path',
'line',
'message',
'author',
'commit_url',
'context',
'view_url',
)
......@@ -142,6 +142,7 @@ def import_repo(self, dataimport):
parser = TeiParser(xml_file)
parser.check()
matches = parser.match_database(dataimport.corpus)
dataimport.save_failures(parser.match_errors)
for db_elt, tei_elt in matches:
with transaction.atomic():
......
from rest_framework.test import APITestCase
from rest_framework import status
from rest_framework.serializers import JSONField
from django.urls import reverse
from arkindex.dataimport.models import DataImport, DataImportMode, DataImportState
from arkindex.dataimport.models import \
DataImport, DataImportMode, DataImportState, DataImportFailure, DataFile, Revision, Repository
from arkindex.dataimport.serializers import DataImportSerializer, ImagesPayloadSerializer
from arkindex.documents.models import Corpus
from arkindex.project.tests import RedisMockAPITestCase
from arkindex.users.models import User
class TestImports(APITestCase):
class TestImports(RedisMockAPITestCase):
"""
Test data imports management
"""
def setUp(self):
super().setUp()
self.corpus = Corpus.objects.create(id='test', name='Unit Tests')
self.user = User.objects.create_user('test@test.test', 'testtest')
self.dataimport = DataImport.objects.create(
......@@ -43,3 +47,159 @@ class TestImports(APITestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.json()
self.assertEqual(data['id'], str(self.dataimport.id))
def test_create_requires_login(self):
response = self.client.post(reverse('api:import-list'), data={
'corpus': self.corpus.id,
'mode': DataImportMode.Images.value
})
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_create(self):
self.client.force_login(self.user)
response = self.client.post(reverse('api:import-list'), data={
'corpus': self.corpus.id,
'mode': DataImportMode.Images.value
})
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = response.json()
dataimport = DataImport.objects.get(id=data['id'])
self.assertEqual(dataimport.creator, self.user)
self.assertEqual(dataimport.corpus, self.corpus)
self.assertEqual(dataimport.mode, DataImportMode.Images)
self.assertEqual(dataimport.state, DataImportState.Created)
def test_payload_serializer(self):
"""
Test the DataImportSerializer will use ImagesPayloadSerializer instead of JSONField for Images workflows
"""
self.assertIsInstance(DataImportSerializer().fields['payload'], JSONField)
self.assertIsInstance(
DataImportSerializer(instance=self.dataimport).fields['payload'],
ImagesPayloadSerializer,
)
def test_configure_requires_login(self):
response = self.client.put(reverse('api:import-details', kwargs={'pk': self.dataimport.id}), data={
'corpus': str(self.dataimport.corpus.id),
'mode': DataImportMode.Images.value,
'state': DataImportState.Created.value,
'payload': {
'folder_name': 'dir',
'volume_name': 'vol',
},
'files': []
}, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_configure_no_files(self):
"""
Test configuring an already created images workflow with no files
"""
self.client.force_login(self.user)
response = self.client.put(reverse('api:import-details', kwargs={'pk': self.dataimport.id}), data={
'corpus': str(self.dataimport.corpus.id),
'mode': DataImportMode.Images.value,
'state': DataImportState.Created.value,
'payload': {
'folder_name': 'dir',
'volume_name': 'vol',
},
'files': [],
}, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_configure(self):
self.client.force_login(self.user)
df = DataFile.objects.create(
name='test.txt', size=42, hash='aaaa', content_type='text/plain', corpus=self.corpus)
response = self.client.put(reverse('api:import-details', kwargs={'pk': self.dataimport.id}), data={
'corpus': str(self.dataimport.corpus.id),
'mode': DataImportMode.Images.value,
'state': DataImportState.Created.value,
'payload': {
'folder_name': 'dir',
'volume_name': 'vol',
},
'files': [df.id, ],
}, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.dataimport.refresh_from_db()
self.assertEqual(self.dataimport.state, DataImportState.Configured)
self.assertEqual(self.dataimport.payload['folder_name'], 'dir')
self.assertEqual(self.dataimport.folder_name, 'dir')
self.assertTrue(self.dataimport.iiif_path.endswith('dir'))
self.assertEqual(self.dataimport.payload['volume_name'], 'vol')
self.assertListEqual(list(self.dataimport.files.all()), [df])
def test_start(self):
self.client.force_login(self.user)
df = DataFile.objects.create(
name='test.txt', size=42, hash='aaaa', content_type='text/plain', corpus=self.corpus)
self.dataimport.payload = {'folder_name': 'dir', 'volume_name': 'vol'}
self.dataimport.files.add(df)
self.dataimport.state = DataImportState.Configured
self.dataimport.save()
self.assertGreaterEqual(self.redis.llen('celery'), 0)
response = self.client.put(reverse('api:import-details', kwargs={'pk': self.dataimport.id}), data={
'corpus': str(self.dataimport.corpus.id),
'mode': DataImportMode.Images.value,
'state': DataImportState.Running.value,
'payload': {
'folder_name': 'dir',
'volume_name': 'vol',
},
'files': [df.id, ],
}, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.dataimport.refresh_from_db()
self.assertIsNotNone(self.dataimport.root_id)
self.assertGreaterEqual(self.redis.llen('celery'), 1)
def test_failures_require_login(self):
self.dataimport.failures.create(path='path/to/file', message='something')
response = self.client.get(reverse('api:import-failures', kwargs={'pk': self.dataimport.id}))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_failures(self):
self.client.force_login(self.user)
failure = self.dataimport.failures.create(path='path/to/file', message='something')
response = self.client.get(reverse('api:import-failures', kwargs={'pk': self.dataimport.id}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.json()
self.assertEqual(len(data['results']), 1)
self.assertEqual(data['results'][0]['id'], str(failure.id))
def test_save_failures(self):
"""
Check DataImport properly saves failures
"""
rev = Revision.objects.create(
repo=Repository.objects.create(
url='http://repo',
hook_token='token',
clone_user='user',
clone_token='token',
corpus=self.corpus,
user=self.user,
),
hash='42',
ref='ref/heads/master',
message='a',
author='me',
)
self.dataimport.revision = rev
self.dataimport.save_failures([
DataImportFailure(
path='somewhere/something',
message="Test failure",
)
])
self.assertEqual(self.dataimport.failures.count(), 1)
self.assertEqual(self.dataimport.failures.get().message, "Test failure")
from django.conf.urls import url
from arkindex.dataimport.views import \
DataImportsList, DataImportCreate, DataImportConfig, DataImportStatus, DataFileList
DataImportsList, DataImportCreate, DataImportConfig, DataImportStatus, DataImportFailures, DataFileList
urlpatterns = [
......@@ -8,5 +8,6 @@ urlpatterns = [
url(r'^new/?$', DataImportCreate.as_view(), name='import-create'),
url(r'^(?P<pk>[\w\-]+)/config/?$', DataImportConfig.as_view(), name='import-config'),
url(r'^(?P<pk>[\w\-]+)/status/?$', DataImportStatus.as_view(), name='import-status'),
url(r'^(?P<pk>[\w\-]+)/failures/?$', DataImportFailures.as_view(), name='import-failures'),
url(r'^files/?$', DataFileList.as_view(), name='files'),
]
......@@ -43,6 +43,19 @@ class DataImportStatus(LoginRequiredMixin, DetailView):
)
class DataImportFailures(LoginRequiredMixin, DetailView):
"""
View a data import workflow's failures
"""
template_name = 'dataimport/failures.html'
context_object_name = 'dataimport'
def get_queryset(self):
return DataImport.objects.filter(creator=self.request.user).exclude(
state__in=[DataImportState.Created, DataImportState.Configured],
)
class DataFileList(LoginRequiredMixin, TemplateView):
"""
View and manage uploaded files
......
......@@ -2,6 +2,7 @@ from rest_framework import serializers
from arkindex.documents.models import \
Element, ElementType, Page, PageType, PageDirection, Act, Corpus, MetaData, MetaType
from arkindex.images.serializers import ZoneSerializer, ImageSerializer
from arkindex.documents.serializers.light import ElementLightSerializer
from arkindex.dataimport.serializers import RevisionSerializer
from arkindex.project.serializer_fields import EnumField, ViewerURLField
......@@ -24,21 +25,6 @@ class MetaDataSerializer(serializers.ModelSerializer):
)
class ElementLightSerializer(serializers.ModelSerializer):
"""
Serialises a Element
"""
type = EnumField(ElementType)
class Meta:
model = Element
fields = (
'id',
'type',
'name',
)
class PageLightSerializer(serializers.ModelSerializer):
"""
Serialises a Page
......
from rest_framework import serializers
from arkindex.documents.models import Element, ElementType
from arkindex.project.serializer_fields import EnumField
class ElementLightSerializer(serializers.ModelSerializer):
"""
Serialises a Element
"""
type = EnumField(ElementType)
class Meta:
model = Element
fields = (
'id',
'type',
'name',
)
from django.conf import settings
from lxml import etree
from arkindex.project.tools import find_closest
from arkindex.project.tools import find_closest, read_file_range
from arkindex.documents.models import ElementType, Act, Element, MetaType
from arkindex.dataimport.models import Revision
from arkindex.dataimport.models import Revision, DataImportFailure
import logging
import sys
import os
import re
from django.conf import settings
NAMESPACES = {
......@@ -269,15 +269,20 @@ class Corpus(TeiElement):
class TeiParser(object):
def __init__(self, path):
assert os.path.exists(path), \
'Invalid path {}'.format(path)
self.path = path
root = etree.parse(path).getroot()
# Some files only have xmlns="..." and not xmlns:tei="...", the former maps to None
assert root.nsmap.get('tei') == NAMESPACES['tei'] or root.nsmap.get(None) == NAMESPACES['tei'], \
'Missing tei XML namespace'
# A list of DataImportFailure instances with no associated DataImport and absolute paths.
self.match_errors = []
# Start parsing
self.corpus = Corpus(root)
......@@ -286,6 +291,18 @@ class TeiParser(object):
for tei in self.corpus.tei:
logger.info('{} - completion {:.1%}'.format(tei, tei.completion))
def _match_error(self, tei_elt, message, db_elt=None):
failure = DataImportFailure(
path=self.path,
line=tei_elt.element.sourceline,
element=db_elt,
message=message,
)
if failure.line: # If a line has been set
failure.context = read_file_range(self.path, failure.line - 6, failure.line + 5).rstrip('\n')
self.match_errors.append(failure)
logger.warning(message)
def match_database(self, corpus):
'''
Link TEI elements with DB elements
......@@ -298,13 +315,13 @@ class TeiParser(object):
out = []
for tei in self.corpus.tei:
if not tei.witness:
logger.warning('No witness in {}'.format(str(tei)))
self._match_error(tei, 'No witness in {}'.format(str(tei)))
continue
tei_name = tei.witness.id or tei.witness.repository_id
volume = find_closest(tei_name, volumes)
if not volume:
logger.warning('No match for {}'.format(tei))
self._match_error(tei, 'No matching volume for {}'.format(tei))
continue
out.append((volume, tei))
......@@ -313,13 +330,13 @@ class TeiParser(object):
# Load volume acts
volume_acts = Element.objects.get_descending(volume.id, type=ElementType.Act)
if not volume_acts.exists():
logger.warning('No acts in DB for {}'.format(volume))
self._match_error(tei, 'No acts in DB for {}'.format(volume), volume)
continue
# Match acts
for text in tei.texts:
if text.witness is None:
logger.warning('No witness on text, skipping.')
self._match_error(text, 'No witness on text, skipping.')
continue
act = Act.objects.filter(
......@@ -330,7 +347,7 @@ class TeiParser(object):
out.append((act, text))
logger.info('Matched {} with {}'.format(act, text))
else:
logger.warning('No match for {}'.format(text))
self._match_error(text, 'No match for {}'.format(text))
return out
......
from fakeredis import FakeStrictRedis
from unittest import TestCase
from unittest.mock import patch
from arkindex.project.tests import RedisMockMixin
from arkindex.documents.tasks import reindex_acts, reindex_transcriptions
class TestTasks(TestCase):
class TestTasks(RedisMockMixin, TestCase):
"""Tests for asynchronous tasks"""
def setUp(self):
self.patches = [patch(x) for x in [
'celery.backends.redis.RedisBackend._create_client',
'kombu.transport.redis.Channel._create_client',
'celery_once.backends.redis.get_redis',
]]
self.mocked = [p.start() for p in self.patches]
self.redis = FakeStrictRedis()
for m in self.mocked:
m.return_value = self.redis
def tearDown(self):
for p in self.patches:
p.stop()
self.redis.flushall()
def test_reindex_acts(self):
reindex_acts.delay()
reindex_acts.delay()
......
from lxml import etree
from arkindex.documents.models import Act
from arkindex.documents.tei import Text
from arkindex.dataimport.models import Repository, Revision
from arkindex.documents.tei import Text, TeiParser
from arkindex.dataimport.models import Repository, Revision, DataImportFailure
from arkindex.project.tests import FixtureTestCase
import os.path
......@@ -13,6 +13,16 @@ FIXTURES = os.path.join(
class TestTeiElement(FixtureTestCase):
def setUp(self):
self.repo = Repository.objects.create(
url='http://repo',
hook_token='token',
clone_user='user',
clone_token='token',
corpus=self.corpus,
user=self.user,
)
def test_apply_xslt(self):
tree = etree.parse(os.path.join(FIXTURES, 'arguments.xml'))
te = Text(tree.getroot())
......@@ -37,16 +47,8 @@ class TestTeiElement(FixtureTestCase):
te_before = Text(tree_before.getroot())
te_after = Text(tree_after.getroot())
repo = Repository.objects.create(
url='http://repo',
hook_token='token',
clone_user='user',
clone_token='token',
corpus=self.corpus,
user=self.user,
)
rev1 = Revision.objects.create(repo=repo, hash='42', ref='ref/heads/master', message='a', author='me')
rev2 = Revision.objects.create(repo=repo, hash='43', ref='ref/heads/master', message='b', author='me')
rev1 = Revision.objects.create(repo=self.repo, hash='42', ref='ref/heads/master', message='a', author='me')
rev2 = Revision.objects.create(repo=self.repo, hash='43', ref='ref/heads/master', message='b', author='me')
te_before.save(act, rev1)
location = act.metadatas.get(name="location")
......@@ -67,3 +69,16 @@ class TestTeiElement(FixtureTestCase):
self.assertEqual(persons.revision, rev2)
self.assertFalse(act.metadatas.filter(name="places").exists())
self.assertEqual(act.metadatas.get(name="subjects").value, 'Something')
def test_match_error(self):
xml_path = os.path.join(FIXTURES, 'arguments.xml')
parser = TeiParser(xml_path)
parser._match_error(parser.corpus, 'Some funny test message')
self.assertEqual(len(parser.match_errors), 1)
failure = parser.match_errors[0]
self.assertIsInstance(failure, DataImportFailure)
self.assertEqual(failure.path, xml_path)
self.assertEqual(failure.line, 1)
self.assertEqual(failure.message, 'Some funny test message')
self.assertIsNone(failure.dataimport_id)
self.assertIsNone(failure.element_id)
......@@ -7,7 +7,8 @@ from arkindex.documents.api import \
TranscriptionSearch, ActSearch, TranscriptionSearchAnnotationList, \
ActEdit, TranscriptionCreate, TranscriptionBulk, SurfaceDetails
from arkindex.dataimport.api import \
DataImportsList, DataImportDetails, DataFileList, DataFileRetrieve, DataFileUpload, GitRepositoryImportHook
DataImportsList, DataImportDetails, DataImportFailures, \
DataFileList, DataFileRetrieve, DataFileUpload, GitRepositoryImportHook
api = [
......@@ -70,6 +71,7 @@ api = [
# Import workflows
url(r'^imports/$', DataImportsList.as_view(), name='import-list'),
url(r'^imports/(?P<pk>[\w\-]+)$', DataImportDetails.as_view(), name='import-details'),
url(r'^imports/(?P<pk>[\w\-]+)/failures$', DataImportFailures.as_view(), name='import-failures'),
url(r'^imports/files/(?P<pk>[\w\-]+)$', DataFileList.as_view(), name='file-list'),
url(r'^imports/file/(?P<pk>[\w\-]+)$', DataFileRetrieve.as_view(), name='file-retrieve'),
url(r'^imports/upload/(?P<pk>[\w\-]+)$', DataFileUpload.as_view(), name='file-upload'),
......
from django.test import TestCase
from fakeredis import FakeStrictRedis
from rest_framework.test import APITestCase
from unittest.mock import patch
from arkindex.documents.models import Corpus
from arkindex.images.models import ImageServer
from arkindex.users.models import User
class FixtureMixin(object):
"""
Add the database fixture to a test case
"""
fixtures = ['data.json', ]
@classmethod
......@@ -17,8 +23,46 @@ class FixtureMixin(object):
class FixtureTestCase(FixtureMixin, TestCase):
pass
"""
Django test case with the database fixture
"""
class FixtureAPITestCase(FixtureMixin, APITestCase):
pass
"""
Django REST Framework test case with the database fixture
"""
class RedisMockMixin(object):
"""
Add Redis mocking to a test case
"""
def setUp(self):
self.patches = [patch(x) for x in [
'celery.backends.redis.RedisBackend._create_client',
'kombu.transport.redis.Channel._create_client',
'celery_once.backends.redis.get_redis',
]]
self.mocked = [p.start() for p in self.patches]
self.redis = FakeStrictRedis()
for m in self.mocked:
m.return_value = self.redis
def tearDown(self):
for p in self.patches:
p.stop()
self.redis.flushall()
class RedisMockTestCase(RedisMockMixin, TestCase):
"""
Django test case with Redis mocking
"""
class RedisMockAPITestCase(RedisMockMixin, APITestCase):
"""
Django REST Framework test case with Redis mocking
"""
......@@ -77,3 +77,20 @@ def build_absolute_url(element, request, name, id_argument='pk', **kwargs):
"""
kwargs[id_argument] = str(element.id)
return request.build_absolute_uri(reverse(name, kwargs=kwargs))
def read_file_range(path, start, end):
"""
Read a specific range of lines from a file.
Line numbers start at 0. Like with Python slices, start is inclusive and end is exclusive.
"""
# TODO: Optimize dis
lines = []
with open(path) as f:
for i, line in enumerate(f):
if i < start:
continue
elif i >= end:
break
lines.append(line)
return ''.join(lines)
{% extends 'base.html' %}
{% block content %}
<h1 class="title">Workflow errors</h1>
<h2 class="subtitle">View a workflow's errors</h2>
<div id="app">
<Import-Failures id="{{ dataimport.id }}" />
</div>
{% endblock %}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment