Skip to content

Commit 7d70873

Browse files
committed
Admin clone/transform tool: log more export/import details into stored task log
1 parent b759030 commit 7d70873

File tree

3 files changed

+90
-85
lines changed

3 files changed

+90
-85
lines changed

django/applications/catmaid/control/annotationadmin.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,8 @@ def async_project_copy_job(import_task_id) -> str:
180180

181181
# Export project data
182182
task_logger.info(f'Exporting data from project {p}')
183-
exporter = Exporter(p, export_options)
183+
exporter = Exporter(p, export_options,
184+
custom_logger=task_logger)
184185
project_data = exporter.export()
185186
precomputed_stats = exporter.get_precomputed_stats() \
186187
if scd['transfer_stats'] else dict()
@@ -208,7 +209,8 @@ def async_project_copy_job(import_task_id) -> str:
208209
task_logger.info(f'Importing into project {target_project}')
209210
override_user = None
210211
importer = GenericImporter(project_data, target_project,
211-
override_user, import_options)
212+
override_user, import_options,
213+
custom_logger=task_logger)
212214
with transaction.atomic():
213215
importer.import_data()
214216

django/applications/catmaid/control/exporter.py

Lines changed: 35 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ def has_more_weight(a, b):
114114

115115
class Exporter():
116116

117-
def __init__(self, project, options):
117+
def __init__(self, project, options, custom_logger=None):
118118
"""
119119
Create a new basic exporter that exports data from the passed in project
120120
with the provided options. Optionsa are decleared as a dictionary with
@@ -138,7 +138,8 @@ def __init__(self, project, options):
138138
exclusion_is_final: False
139139
}
140140
"""
141-
set_log_level(logger, options.get('verbosity', 1))
141+
self.logger = custom_logger or logger
142+
set_log_level(self.logger, options.get('verbosity', 1))
142143

143144
self.project = project
144145
self.options = options
@@ -223,7 +224,7 @@ def collect_data(self):
223224
neuron_info, num_total_records = get_annotated_entities(self.project.id,
224225
query_params, relations, classes, ['neuron'], with_skeletons=True)
225226

226-
logger.info(f"Found {num_total_records} neurons with the following exclusion annotations: {', '.join(self.excluded_annotations)}")
227+
self.logger.info(f"Found {num_total_records} neurons with the following exclusion annotations: {', '.join(self.excluded_annotations)}")
227228

228229
exclude_skeleton_id_constraints = set(chain.from_iterable(
229230
[n['skeleton_ids'] for n in neuron_info]))
@@ -263,7 +264,7 @@ def collect_data(self):
263264
settings_annotations, num_total_records = get_annotated_entities(self.project.id,
264265
query_params, relations, classes, ['annotation'])
265266

266-
logger.info(f"Found {num_total_records} annotations with the "
267+
self.logger.info(f"Found {num_total_records} annotations with the "
267268
f"following settings meta-annotations: {self.settings_meta_annotation}")
268269

269270
set_annotation_map = dict([(n['name'], n['id']) for n in settings_annotations])
@@ -278,7 +279,7 @@ def collect_data(self):
278279
settings_annotations_map = get_annotation_to_id_map(
279280
self.project.id, settings_annotation_names, relations, classes)
280281

281-
logger.info(f"Found {len(settings_annotations_map)} used "
282+
self.logger.info(f"Found {len(settings_annotations_map)} used "
282283
f"settings annotations: {', '.join(settings_annotations_map.keys())}")
283284

284285
# For each of these annotations, collect all annotated neurons and
@@ -292,7 +293,7 @@ def collect_data(self):
292293
settings_neuron_info, num_total_records = get_annotated_entities(self.project.id,
293294
query_params, relations, classes, ['neuron'], with_skeletons=True)
294295

295-
logger.info(f"Found {num_total_records} neurons with the "
296+
self.logger.info(f"Found {num_total_records} neurons with the "
296297
f"following settings meta-annotations: {se_name}")
297298

298299
skeleton_ids_with_settings:List = \
@@ -375,7 +376,7 @@ def collect_data(self):
375376
export_settings['connectors'][skeleton_id] = connector_settings
376377
n_updated_connectors += 1
377378

378-
logger.info(f'Updated export settings for set "{se_name}" '
379+
self.logger.info(f'Updated export settings for set "{se_name}" '
379380
f'({len(skeleton_ids_with_settings)} skeletons) based on annotations for '
380381
f'treenodes ({n_updated_treenodes} neurons), '
381382
f'tags ({n_updated_tags} neurons), '
@@ -401,7 +402,7 @@ def collect_data(self):
401402
neuron_info, num_total_records = get_annotated_entities(self.project.id,
402403
query_params, relations, classes, ['neuron'], with_skeletons=True)
403404

404-
logger.info(f"Found {num_total_records} neurons with the following annotations: {', '.join(self.required_annotations)}")
405+
self.logger.info(f"Found {num_total_records} neurons with the following annotations: {', '.join(self.required_annotations)}")
405406

406407
skeleton_id_constraints:Optional[List] = list(chain.from_iterable([n['skeleton_ids'] for n in neuron_info]))
407408
neuron_ids = [n['id'] for n in neuron_info]
@@ -469,7 +470,7 @@ def collect_data(self):
469470
if entities.count() == 0:
470471
raise CommandError("No matching neurons found")
471472

472-
logger.info("Will export %s neurons" % entities.count())
473+
self.logger.info("Will export %s neurons" % entities.count())
473474
if not self.run_noninteractive:
474475
start_export = ask_to_continue()
475476
if not start_export:
@@ -516,35 +517,35 @@ def collect_data(self):
516517
n_default_connector_skeletons = len(connector_skeletons)
517518

518519
connector_link_lists = []
519-
logger.info('Marking connector links for export for individual neuron sets')
520+
self.logger.info('Marking connector links for export for individual neuron sets')
520521
for se_name, se_id in set_annotation_map.items():
521522
settings_neuron_info = settings_neuron_infos.get(se_id)
522523
if settings_neuron_info is None:
523-
logger.error(f'Could not find export setting info for set annotation {se_name} with ID {se_id}')
524+
self.logger.error(f'Could not find export setting info for set annotation {se_name} with ID {se_id}')
524525
continue
525526

526527
skeleton_ids = settings_neuron_info['skeleton_ids']
527528
connector_setting = settings_neuron_info['connector_settings']
528529

529530
if connector_setting == ExportAnnotation.ConnectorsOnlyIntra:
530-
logger.info(f'Allowing only intra-set connector links export for neuron set "{se_name}" (ID: {se_id}, # skeletons: {len(skeleton_ids)})')
531+
self.logger.info(f'Allowing only intra-set connector links export for neuron set "{se_name}" (ID: {se_id}, # skeletons: {len(skeleton_ids)})')
531532
# Remove this neuron set from the general set of connector skeletons
532533
connector_skeletons = connector_skeletons - set(skeleton_ids)
533534
elif connector_setting == ExportAnnotation.ConnectorsNo:
534-
logger.info(f'Skipping connector link export for neuron set "{se_name}" (ID: {se_id}, # skeletons: {len(skeleton_ids)})')
535+
self.logger.info(f'Skipping connector link export for neuron set "{se_name}" (ID: {se_id}, # skeletons: {len(skeleton_ids)})')
535536
connector_skeletons = connector_skeletons - set(skeleton_ids)
536537
n_skeletons_ignored_for_connectors += len(skeleton_ids)
537538
continue
538539
elif connector_setting is None:
539-
logger.info(f'Applying no connector link constraints for neuron set "{se_name}" (ID: {se_id}, # skeletons: {len(skeleton_ids)}, Default: {self.connector_mode})')
540+
self.logger.info(f'Applying no connector link constraints for neuron set "{se_name}" (ID: {se_id}, # skeletons: {len(skeleton_ids)}, Default: {self.connector_mode})')
540541
continue
541542
else:
542-
logger.info(f'Allowing regular connectivity for neuron set "{se_name}" (ID: {se_id}, # skeletons: {len(skeleton_ids)})')
543+
self.logger.info(f'Allowing regular connectivity for neuron set "{se_name}" (ID: {se_id}, # skeletons: {len(skeleton_ids)})')
543544

544545
connector_link_lists.append(TreenodeConnector.objects \
545546
.filter(project=self.project, skeleton_id__in=skeleton_ids) \
546547
.values_list('id', 'connector', 'treenode'))
547-
logger.info(f'Current number of connector links: {len(connector_link_lists[-1])}')
548+
self.logger.info(f'Current number of connector links: {len(connector_link_lists[-1])}')
548549

549550
# Add remaining export skeletons that didn't have any explicit constraint.
550551
connector_link_lists.append(TreenodeConnector.objects \
@@ -559,7 +560,7 @@ def collect_data(self):
559560
connector_ids = set(c for _,c,_ in connector_links)
560561
self.to_serialize.append(Connector.objects.filter(
561562
id__in=connector_ids))
562-
logger.info(f'Exporting {len(connector_ids)} connectors '
563+
self.logger.info(f'Exporting {len(connector_ids)} connectors '
563564
f'({n_skeletons_ignored_for_connectors} explicitly ignored) '
564565
f'with {len(connector_links)} links')
565566

@@ -663,10 +664,10 @@ def collect_data(self):
663664
if all_annotation_links:
664665
self.to_serialize.append(all_annotation_links)
665666

666-
logger.info(f"Exporting {len(all_annotations)} annotations " + \
667+
self.logger.info(f"Exporting {len(all_annotations)} annotations " + \
667668
f"and {len(all_annotation_links)} annotation links")#: {', '.join([a.name for a in all_annotations])}")
668669
if self.annotation_annotations:
669-
logger.info("Only annotations in hierarchy of the following " + \
670+
self.logger.info("Only annotations in hierarchy of the following " + \
670671
f"annotations are exported: {', '.join(self.annotation_annotations)}")
671672

672673
# Export tags
@@ -746,22 +747,22 @@ def collect_data(self):
746747
if tags:
747748
tag_names = sorted(set(t.name for t in tags))
748749
if self.allowed_tags is None:
749-
logger.info('All tags are allowed for export')
750+
self.logger.info('All tags are allowed for export')
750751
else:
751-
logger.info(f'Allowed tags: {", ".join(self.allowed_tags)}')
752-
logger.info(f"Exporting {len(tags)} tags, part of {tag_links.count()} links: {', '.join(tag_names)}")
752+
self.logger.info(f'Allowed tags: {", ".join(self.allowed_tags)}')
753+
self.logger.info(f"Exporting {len(tags)} tags, part of {tag_links.count()} links: {', '.join(tag_names)}")
753754

754755
self.to_serialize.append(tags)
755756
self.to_serialize.append(tag_links)
756757
self.to_serialize.append(tag_links_connectors)
757758
else:
758-
logger.info(f"Exporting {len(tags)} tags and {tag_links.count()} (skeleton) and {tag_links_connectors.count()} (connector) tag links")
759+
self.logger.info(f"Exporting {len(tags)} tags and {tag_links.count()} (skeleton) and {tag_links_connectors.count()} (connector) tag links")
759760

760761
# TODO: Export reviews
761762
else:
762763
# TODO: Add support for export annotations
763764
if n_export_settings > 0:
764-
logger.warn('Export settings are currently only supported for '
765+
self.logger.warn('Export settings are currently only supported for '
765766
f'annotation based exports. Found {n_export_settings} '
766767
'export setting annotations')
767768

@@ -820,7 +821,7 @@ def collect_data(self):
820821
relation=relations.get('model_of')).values_list('class_instance_b', flat=True)
821822
annotation_links = annotation_links.exclude(class_instance_a__in=exclude_neuron_ids)
822823

823-
logger.info('Exporting all annotations')
824+
self.logger.info('Exporting all annotations')
824825

825826
self.to_serialize.append(annotations)
826827
self.to_serialize.append(annotation_links)
@@ -847,7 +848,7 @@ def collect_data(self):
847848
neurons = set([link.class_instance_b_id for link in neuron_links])
848849

849850
exported_tids = set(treenodes.values_list('id', flat=True))
850-
logger.info(f"Exporting {len(exported_tids)} treenodes in {n_skeletons} skeletons and {len(neurons)} neurons")
851+
self.logger.info(f"Exporting {len(exported_tids)} treenodes in {n_skeletons} skeletons and {len(neurons)} neurons")
851852

852853
# Get current maximum concept ID
853854
cursor = connection.cursor()
@@ -869,7 +870,7 @@ def collect_data(self):
869870
extra_tids = connector_tids - exported_tids
870871

871872
connector_export_settings = export_settings['connectors']
872-
logger.info(f"Exporting {len(extra_tids)} placeholder nodes")
873+
self.logger.info(f"Exporting {len(extra_tids)} placeholder nodes")
873874

874875
placeholder_treenodes = Treenode.objects.prefetch_related(
875876
'treenodeconnector_set').filter(id__in=extra_tids)
@@ -960,7 +961,7 @@ def collect_data(self):
960961
original_placeholder_nodes.append(pt)
961962

962963
# Find treenodes
963-
logger.info(f"Exported {len(original_placeholder_nodes)} "
964+
self.logger.info(f"Exported {len(original_placeholder_nodes)} "
964965
"placeholder nodes with original context, and "
965966
f"{n_new_placeholder_context} placeholder nodes with a new "
966967
"context.")
@@ -1003,18 +1004,18 @@ def collect_data(self):
10031004
if volume_ids:
10041005
volumes = Volume.objects.filter(pk__in=volume_ids,
10051006
project_id=self.project.id)
1006-
logger.info("Exporting {} volumes: {}".format(
1007+
self.logger.info("Exporting {} volumes: {}".format(
10071008
len(volumes), ', '.join(v.name for v in volumes)))
10081009
self.to_serialize.append(volumes)
10091010
else:
1010-
logger.info("No volumes found to export")
1011+
self.logger.info("No volumes found to export")
10111012

10121013
# Public deep links
10131014
seen_deep_links = set()
10141015
if self.export_public_deep_links:
10151016
deep_links = DeepLink.objects.filter(project_id=self.project.id, is_public=True)
10161017
seen_deep_links.update([link.id for link in deep_links])
1017-
logger.info(f'Exporting {len(deep_links)} public deep links')
1018+
self.logger.info(f'Exporting {len(deep_links)} public deep links')
10181019
# FIXME: Include referenced skeletons, treenodes and connectors
10191020
self.to_serialize.append(deep_links)
10201021

@@ -1024,7 +1025,7 @@ def collect_data(self):
10241025
n_exportable_deep_links = len(deep_links)
10251026
deep_links = list(filter(lambda x: x.id not in seen_deep_links, deep_links))
10261027
seen_deep_links.update([link.id for link in deep_links])
1027-
logger.info(f'Exporting {n_exportable_deep_links} exportable deep links ({len(deep_links)} are not already prev. included)')
1028+
self.logger.info(f'Exporting {n_exportable_deep_links} exportable deep links ({len(deep_links)} are not already prev. included)')
10281029
# FIXME: Include referenced skeletons, treenodes and connectors
10291030
self.to_serialize.append(deep_links)
10301031

@@ -1044,7 +1045,7 @@ def collect_data(self):
10441045
date_joined=u.date_joined) \
10451046
for u in User.objects.filter(pk__in=seen_user_ids)]
10461047
if self.export_users:
1047-
logger.info("Exporting {} users: {}".format(len(users),
1048+
self.logger.info("Exporting {} users: {}".format(len(users),
10481049
", ".join([u.username for u in users])))
10491050
self.to_serialize.append(users)
10501051
else:
@@ -1054,7 +1055,7 @@ def collect_data(self):
10541055
reduced_user = ReducedInfoUser(id=u.id, username=u.username,
10551056
password=make_password(User.objects.make_random_password()))
10561057
reduced_users.append(reduced_user)
1057-
logger.info("Exporting {} users in reduced form with random passwords: {}".format(len(reduced_users),
1058+
self.logger.info("Exporting {} users in reduced form with random passwords: {}".format(len(reduced_users),
10581059
", ".join([u.username for u in reduced_users])))
10591060
self.to_serialize.append(reduced_users)
10601061

0 commit comments

Comments
 (0)