From 830880a310600486194f9c1657057d00666391d2 Mon Sep 17 00:00:00 2001 From: Erwan Rouchet <rouchet@teklia.com> Date: Tue, 4 Jun 2019 15:22:14 +0000 Subject: [PATCH] Remove non-breaking spaces --- arkindex/dataimport/filetypes.py | 2 +- arkindex/dataimport/iiif.py | 2 +- arkindex/documents/indexer.py | 2 +- .../documents/management/commands/delete_corpus.py | 2 +- arkindex/documents/surface.py | 2 +- arkindex/documents/surface_link.py | 14 +++++++------- arkindex/images/importer.py | 2 +- .../images/management/commands/check_images.py | 4 ++-- .../templates/admin/images/imageserver/merge.html | 2 +- 9 files changed, 16 insertions(+), 16 deletions(-) diff --git a/arkindex/dataimport/filetypes.py b/arkindex/dataimport/filetypes.py index eaf39b8637..32b29ff457 100644 --- a/arkindex/dataimport/filetypes.py +++ b/arkindex/dataimport/filetypes.py @@ -56,7 +56,7 @@ class FileType(ABC): self.changed_objects = [] def __str__(self): - return '{} - {}Â from {} to {}'.format( + return '{} - {} from {} to {}'.format( self.__class__.__name__, self.diff.type.name, self.diff.old_path, diff --git a/arkindex/dataimport/iiif.py b/arkindex/dataimport/iiif.py index 418b3e22c8..b7645015da 100644 --- a/arkindex/dataimport/iiif.py +++ b/arkindex/dataimport/iiif.py @@ -438,7 +438,7 @@ class ManifestParser(IIIFParser): def run(self): try: self.parse() - logger.info("Parsed volume {}: {}Â metadata in volume, {} pages ({} changed), {}Â new servers".format( + logger.info("Parsed volume {}: {} metadata in volume, {} pages ({} changed), {} new servers".format( self.volume.name, len(self.metadata), len(self.pages), self.change_count, self.created_servers_count )) diff --git a/arkindex/documents/indexer.py b/arkindex/documents/indexer.py index 482453d74f..2d15a0e32a 100644 --- a/arkindex/documents/indexer.py +++ b/arkindex/documents/indexer.py @@ -82,7 +82,7 @@ class Indexer(object): self.elastic.indices.delete(index=index_name) logger.info("Dropped index {}".format(index_name)) except NotFoundError: - logger.info("Could not drop index {}Â (does not exist)".format(index_name)) + logger.info("Could not drop index {} (does not exist)".format(index_name)) def run_index(self, index_name, index_type, queryset, bulk_size=400): """ diff --git a/arkindex/documents/management/commands/delete_corpus.py b/arkindex/documents/management/commands/delete_corpus.py index c05450ff30..9f44728e32 100644 --- a/arkindex/documents/management/commands/delete_corpus.py +++ b/arkindex/documents/management/commands/delete_corpus.py @@ -87,7 +87,7 @@ class Command(PonosCommand): elts = Element.objects.filter(id__in=element_ids[i:i+batch_size]) deleted += elts.count() elts.delete() - logger.info('Deleted {} elements out of {}Â ({: >3}%)'.format( + logger.info('Deleted {} elements out of {} ({: >3}%)'.format( deleted, element_count, int(100.0 * deleted / element_count), diff --git a/arkindex/documents/surface.py b/arkindex/documents/surface.py index 459558d79c..98492a96bc 100644 --- a/arkindex/documents/surface.py +++ b/arkindex/documents/surface.py @@ -104,6 +104,6 @@ class SurfaceImporter(object): surfaces_count += region_count created_surfaces_count += created_count - logger.info("Parsed {}Â and created {}Â surfaces from {} XML files".format( + logger.info("Parsed {} and created {} surfaces from {} XML files".format( surfaces_count, created_surfaces_count, xml_count, )) diff --git a/arkindex/documents/surface_link.py b/arkindex/documents/surface_link.py index 341571c968..ba2fb99532 100644 --- a/arkindex/documents/surface_link.py +++ b/arkindex/documents/surface_link.py @@ -242,7 +242,7 @@ class SurfaceLinker(object): # The only trustable folio is the act starting folio - we start by finding all pages between this act's # starting folio and the next act's starting folio. Conflicts will be solved later. for index, act in enumerate(self.acts_raw): - logger.debug('Parsing act {} of volume {}Â with folio "{}"'.format( + logger.debug('Parsing act {} of volume {} with folio "{}"'.format( act.number, self.volume.name, act.folio)) # First find this act's folios @@ -350,7 +350,7 @@ class SurfaceLinker(object): @transaction.atomic def save_act(self, act_raw): - logger.debug('Saving act {} of volume {}Â with folio "{}"'.format( + logger.debug('Saving act {} of volume {} with folio "{}"'.format( act_raw.number, self.volume.name, act_raw.folio)) # Get the act @@ -400,10 +400,10 @@ class SurfaceLinker(object): def print_stats(self, dry_run=False): logger.info('Ran in {}'.format(datetime.timedelta(seconds=self.end_time - self.start_time))) logger.info('Parsed {} acts'.format(self.parsed_acts)) - logger.info('Found {}Â surfaces'.format(self.surfaces_count)) - logger.info('{} {}Â surfaces to acts'.format(dry_run and 'Would link' or 'Linked', self.linked_surfaces)) + logger.info('Found {} surfaces'.format(self.surfaces_count)) + logger.info('{} {} surfaces to acts'.format(dry_run and 'Would link' or 'Linked', self.linked_surfaces)) if not dry_run: - logger.info('Created {}Â acts'.format(self.created_acts)) + logger.info('Created {} acts'.format(self.created_acts)) def _pop_surfaces(self, folio, index=None): """ @@ -413,12 +413,12 @@ class SurfaceLinker(object): Will log warnings if folio does not exist or has no surfaces available. """ if folio not in self.surfaces: - logger.warning('Folio {}Â not found in surfaces of volume {}'.format( + logger.warning('Folio {} not found in surfaces of volume {}'.format( folio, self.volume.name)) return [] if not self.surfaces[folio]: - logger.warning('Ran out of surfaces for folio {}Â of volume {}'.format( + logger.warning('Ran out of surfaces for folio {} of volume {}'.format( folio, self.volume.name)) return [] diff --git a/arkindex/images/importer.py b/arkindex/images/importer.py index bd62d08821..e0188479e7 100644 --- a/arkindex/images/importer.py +++ b/arkindex/images/importer.py @@ -273,7 +273,7 @@ class IndexImporter(object): for path in self.path.glob('**/*.idx.gz'): rel_path = path.relative_to(self.path) if len(rel_path.parts) <= 1: # File is in root folder - logger.warning('File {}Â is not in a subfolder'.format(str(rel_path))) + logger.warning('File {} is not in a subfolder'.format(str(rel_path))) continue volume = self.find_volume(rel_path.parts[0]) if not volume: diff --git a/arkindex/images/management/commands/check_images.py b/arkindex/images/management/commands/check_images.py index 1d1e8923d2..5a660a4bca 100644 --- a/arkindex/images/management/commands/check_images.py +++ b/arkindex/images/management/commands/check_images.py @@ -71,7 +71,7 @@ class Command(PonosCommand): server_sample = server.images \ .filter(status=S3FileStatus.Checked) \ .order_by('?')[:sample] - logger.info('Re-checking {}Â images in server {}'.format(len(server_sample), server.display_name)) + logger.info('Re-checking {} images in server {}'.format(len(server_sample), server.display_name)) self.check(server_sample) self.check(images) @@ -79,7 +79,7 @@ class Command(PonosCommand): successful, failed = 0, 0 for image in images: - logger.info('Checking image {}Â at {}'.format(str(image.id), image.url)) + logger.info('Checking image {} at {}'.format(str(image.id), image.url)) image.perform_check(save=True) if image.status == S3FileStatus.Checked: successful += 1 diff --git a/arkindex/templates/admin/images/imageserver/merge.html b/arkindex/templates/admin/images/imageserver/merge.html index 3ac635f06f..cdd8fe33ce 100644 --- a/arkindex/templates/admin/images/imageserver/merge.html +++ b/arkindex/templates/admin/images/imageserver/merge.html @@ -20,7 +20,7 @@ <h2>Summary</h2> <ul> <li> - Will delete {{Â object }} + Will delete {{ object }} </li> {% if object.images.exists %} <li>Will merge {{ object.images.count }} image{{ object.images.count|pluralize }} into the destination server</li> -- GitLab