Skip to content

Commit 196728a

Browse files
committed
Import/export widget: export treenode/connector archives as multipage TIFFs
This can be changed back to single file per slice in the export dialog. Also, move the checkox input in the front-end dialog in front of the label to make the options is easier to understand. This is also reflected in the dimension preview in the front-end dialog.
1 parent f942f31 commit 196728a

File tree

3 files changed

+88
-42
lines changed

3 files changed

+88
-42
lines changed

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -252,6 +252,9 @@ Miscellaneous:
252252
is loaded. It won't be displayed anymore when simply the front page is opened
253253
without a project being loaded.
254254

255+
- The import/export widget now can export treenode archives and connector
256+
archives as multipage TIFF files.
257+
255258
### Bug fixes
256259

257260
- Neuron Search: neurons that don't have any annotations are now returned as
@@ -303,6 +306,9 @@ Miscellaneous:
303306
- Restoring saved stack layer settings: a UI hang-up with fixed, caused by
304307
endlessly switching mirrors to a non-existent mirror ID.
305308

309+
- Treenode/connector export: the X, Y and Z radius now properly limit the width
310+
and height of the image. Before, the maximum X, Y and Z was short one pixel.
311+
306312
## Maintenance updates
307313

308314
- Node distance measurements: computation of straight line distance has been

django/applications/catmaid/control/treenodeexport.py

Lines changed: 63 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from django.db.models import Count
1313

1414
from catmaid.control.authentication import requires_user_role
15-
from catmaid.control.common import get_relation_to_id_map, id_generator
15+
from catmaid.control.common import get_relation_to_id_map, id_generator, get_request_bool
1616
from catmaid.control.cropping import (collect_stack_mirros, CropJob,
1717
extract_substack, ImageRetrievalError)
1818
from catmaid.models import ClassInstanceClassInstance, TreenodeConnector, \
@@ -29,7 +29,7 @@ class SkeletonExportJob:
2929
""" A container with data needed for exporting things related to skeletons.
3030
"""
3131
def __init__(self, user, project_id, stack_id, skeleton_ids,
32-
x_radius, y_radius, z_radius, sample):
32+
x_radius, y_radius, z_radius, sample, one_file_per_slice):
3333
# Sanity checks
3434
if not skeleton_ids:
3535
raise Exception("Please specify at least on skeleton ID")
@@ -46,11 +46,10 @@ def __init__(self, user, project_id, stack_id, skeleton_ids,
4646
raise Exception("The x_radius, y_radius and z_radius parameters have " \
4747
"to be numbers!")
4848
try:
49-
# Expect a boolean or a number
50-
sample = bool(int(sample))
49+
# Expect a boolean
50+
sample = bool(sample)
5151
except (ValueError, TypeError) as e:
52-
raise ValueError("The sample parameter has to be a number or a" \
53-
"boolean!")
52+
raise ValueError("The sample parameter has to be a boolean!")
5453

5554
# Store data
5655
self.user = user
@@ -61,6 +60,7 @@ def __init__(self, user, project_id, stack_id, skeleton_ids,
6160
self.y_radius = y_radius
6261
self.z_radius = z_radius
6362
self.sample = sample
63+
self.one_file_per_slice = one_file_per_slice
6464

6565
class TreenodeExporter:
6666
def __init__(self, job):
@@ -153,34 +153,46 @@ def export_single_node(self, treenode) -> None:
153153
""" Exports a treenode. Expects the output path to exist
154154
and be writable.
155155
"""
156+
stack = Stack.objects.get(id=self.job.stack_id)
157+
156158
# Calculate bounding box for current connector
157159
x_min = treenode.location_x - self.job.x_radius
158-
x_max = treenode.location_x + self.job.x_radius
159160
y_min = treenode.location_y - self.job.y_radius
160-
y_max = treenode.location_y + self.job.y_radius
161161
z_min = treenode.location_z - self.job.z_radius
162-
z_max = treenode.location_z + self.job.z_radius
162+
# Because the max values are exclusive, we have to add one step in
163+
# physical coordinates.
164+
x_max = treenode.location_x + self.job.x_radius + stack.resolution.x
165+
y_max = treenode.location_y + self.job.y_radius + stack.resolution.y
166+
z_max = treenode.location_z + self.job.z_radius + stack.resolution.z
163167
rotation_cw = 0
164168
zoom_level = 0
165169

166-
# Create a single file for each section (instead of a mulipage TIFF)
167-
stack = Stack.objects.get(id=self.job.stack_id)
168170
stack_mirror_ids = collect_stack_mirros([self.job.stack_id])
169171
crop_self = CropJob(self.job.user, self.job.project_id,
170172
stack_mirror_ids, x_min, x_max, y_min, y_max, z_min, z_max,
171173
rotation_cw, zoom_level, single_channel=True)
172174
cropped_stack = extract_substack(crop_self)
173-
# Save each file in output path
175+
176+
# Create a single file for each section (instead of a mulipage TIFF)
174177
output_path = self.create_path(treenode)
175-
for i, img in enumerate(cropped_stack):
176-
# Save image in output path, named after the treenode ID and the
177-
# image center's coordinates, rounded to full integers.
178-
x = int(treenode.location_x + 0.5)
179-
y = int(treenode.location_y + 0.5)
180-
z = int(z_min + i * stack.resolution.z + 0.5)
178+
x = int(treenode.location_x + 0.5)
179+
y = int(treenode.location_y + 0.5)
180+
if self.job.one_file_per_slice:
181+
# Save each file in output path
182+
for i, img in enumerate(cropped_stack):
183+
# Save image in output path, named after the treenode ID and the
184+
# image center's coordinates, rounded to full integers.
185+
z = int(z_min + i * stack.resolution.z + 0.5)
186+
image_name = f"{treenode.id}-{x}-{y}-{z}.tiff"
187+
treenode_image_path = os.path.join(output_path, image_name)
188+
img.save(treenode_image_path)
189+
else:
190+
z = int(treenode.location_z + 0.5)
181191
image_name = f"{treenode.id}-{x}-{y}-{z}.tiff"
182192
treenode_image_path = os.path.join(output_path, image_name)
183-
img.save(treenode_image_path)
193+
metadata = crop_self.create_tiff_metadata(len(cropped_stack))
194+
cropped_stack[0].save(treenode_image_path, compression="raw", save_all=True,
195+
append_images=cropped_stack[1:], tiffinfo=metadata)
184196

185197
def post_process(self, nodes) -> None:
186198
""" Create a meta data file for all the nodes passed (usually all of the
@@ -330,34 +342,47 @@ def export_single_node(self, connector_link) -> None:
330342
and writable.
331343
"""
332344
connector = connector_link.connector
345+
stack = Stack.objects.get(id=self.job.stack_id)
333346

334347
# Calculate bounding box for current connector
335348
x_min = connector.location_x - self.job.x_radius
336-
x_max = connector.location_x + self.job.x_radius
337349
y_min = connector.location_y - self.job.y_radius
338-
y_max = connector.location_y + self.job.y_radius
339350
z_min = connector.location_z - self.job.z_radius
340-
z_max = connector.location_z + self.job.z_radius
351+
# Because the max values are exclusive, we have to add one step in
352+
# physical coordinates.
353+
x_max = connector.location_x + self.job.x_radius + stack.resolution.x
354+
y_max = connector.location_y + self.job.y_radius + stack.resolution.y
355+
z_max = connector.location_z + self.job.z_radius + stack.resolution.z
341356
rotation_cw = 0
342357
zoom_level = 0
343358

344359
# Create a single file for each section (instead of a mulipage TIFF)
345360
stack_mirror_ids = collect_stack_mirros([self.job.stack_id])
346-
crop_self = CropJob(self.job.user, stack_mirror_ids,
347-
self.job.stack_id, x_min, x_max, y_min, y_max, z_min, z_max,
361+
crop_self = CropJob(self.job.user, self.job.project_id, stack_mirror_ids,
362+
x_min, x_max, y_min, y_max, z_min, z_max,
348363
rotation_cw, zoom_level, single_channel=True)
349364
cropped_stack = extract_substack(crop_self)
350-
# Save each file in output path
351-
connector_path = self.create_path(connector_link)
352-
for i, img in enumerate(cropped_stack):
353-
# Save image in output path, named after the image center's coordinates,
354-
# rounded to full integers.
355-
x = int(connector.location_x + 0.5)
356-
y = int(connector.location_y + 0.5)
357-
z = int(z_min + i * crop_self.stacks[0].resolution.z + 0.5)
365+
366+
# Create a single file for each section (instead of a mulipage TIFF)
367+
output_path = self.create_path(connector_link)
368+
x = int(connector.location_x + 0.5)
369+
y = int(connector.location_y + 0.5)
370+
if self.job.one_file_per_slice:
371+
# Save each file in output path
372+
for i, img in enumerate(cropped_stack):
373+
# Save image in output path, named after the treenode ID and the
374+
# image center's coordinates, rounded to full integers.
375+
z = int(z_min + i * stack.resolution.z + 0.5)
376+
image_name = f"{x}_{y}_{z}.tiff"
377+
connector_image_path = os.path.join(output_path, image_name)
378+
img.save(connector_image_path)
379+
else:
380+
z = int(connector.location_z + 0.5)
358381
image_name = f"{x}_{y}_{z}.tiff"
359-
connector_image_path = os.path.join(connector_path, image_name)
360-
img.write(connector_image_path)
382+
connector_image_path = os.path.join(output_path, image_name)
383+
metadata = crop_self.create_tiff_metadata(len(cropped_stack))
384+
cropped_stack[0].save(connector_image_path, compression="raw", save_all=True,
385+
append_images=cropped_stack[1:], tiffinfo=metadata)
361386

362387
def post_process(self, nodes) -> None:
363388
pass
@@ -470,11 +495,13 @@ def create_request_based_export_job(request, project_id):
470495
y_radius = request.POST.get('y_radius', None)
471496
z_radius = request.POST.get('z_radius', None)
472497
# Determine if a sample should be created
473-
sample = request.POST.get('sample', None)
498+
sample = get_request_bool(request.POST, 'sample', False)
499+
# Whether to export one file per slice
500+
one_file_per_slice = get_request_bool(request.POST, 'one_file_per_slice', False)
474501

475502
# Create a new export job
476503
return SkeletonExportJob(request.user, project_id, stack_id, skeleton_ids,
477-
x_radius, y_radius, z_radius, sample)
504+
x_radius, y_radius, z_radius, sample, one_file_per_slice)
478505

479506
@requires_user_role(UserRole.Browse)
480507
def export_connectors(request:HttpRequest, project_id=None) -> JsonResponse:

django/applications/catmaid/static/js/widgets/import-export-widget.js

Lines changed: 19 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1280,6 +1280,7 @@ annotations, neuron name, connectors or partner neurons.
12801280
dialog.z_in_sections = true;
12811281
dialog.xy_radius = 100;
12821282
dialog.z_radius = connector_export ? 10 : 0;
1283+
dialog.one_file_per_slice = false;
12831284

12841285
// Add user interface
12851286
dialog.appendMessage('Please select a source from where to get the ' +
@@ -1340,6 +1341,17 @@ annotations, neuron name, connectors or partner neurons.
13401341
z_radius.parentNode.style.display = 'flex';
13411342
}
13421343

1344+
// Add checkbox to select if one file per slice should be exported.
1345+
var one_file_per_slice_cb_p = document.createElement('p');
1346+
var one_file_per_slice_cb_l = document.createElement('label');
1347+
var one_file_per_slice_cb = document.createElement('input');
1348+
one_file_per_slice_cb.setAttribute('type', 'checkbox');
1349+
one_file_per_slice_cb_l.appendChild(one_file_per_slice_cb);
1350+
one_file_per_slice_cb_l.appendChild(document.createTextNode(
1351+
'Export one file per slice (instead of one multi-page TIFF per node)'));
1352+
one_file_per_slice_cb_p.appendChild(one_file_per_slice_cb_l);
1353+
dialog.dialog.appendChild(one_file_per_slice_cb_p);
1354+
13431355
// Display total extent
13441356
var extent_info_p = document.createElement('p');
13451357
var extent_info = document.createTextNode('');
@@ -1349,19 +1361,19 @@ annotations, neuron name, connectors or partner neurons.
13491361
// Add checkbox to create sample data for one connector
13501362
var sample_cb_p = document.createElement('p');
13511363
var sample_cb_l = document.createElement('label');
1352-
sample_cb_l.appendChild(document.createTextNode(
1353-
'Create single ' + entity + ' sample: '));
13541364
var sample_cb = document.createElement('input');
13551365
sample_cb.setAttribute('type', 'checkbox');
13561366
sample_cb_l.appendChild(sample_cb);
1367+
sample_cb_l.appendChild(document.createTextNode(
1368+
'Create single ' + entity + ' sample: '));
13571369
sample_cb_p.appendChild(sample_cb_l);
13581370
dialog.dialog.appendChild(sample_cb_p);
13591371

13601372
// Updates info text line
13611373
var update_info = function() {
13621374
// Get XY extent
1363-
var xy_extent_px = 2 * dialog.xy_radius;
1364-
var xy_extent_nm = 2 * dialog.xy_radius;
1375+
var xy_extent_px = 2 * dialog.xy_radius + 1;
1376+
var xy_extent_nm = 2 * dialog.xy_radius + stack.resolution.x;
13651377
if (dialog.xy_in_px) {
13661378
// Round pixel extent up, if XY is in nm mode
13671379
xy_extent_nm = Math.round(xy_extent_px * stack.resolution.x);
@@ -1430,7 +1442,8 @@ annotations, neuron name, connectors or partner neurons.
14301442
x_radius: dialog.xy_radius,
14311443
y_radius: dialog.xy_radius,
14321444
z_radius: dialog.z_radius,
1433-
sample: sample_cb.checked ? 1 : 0,
1445+
sample: sample_cb.checked,
1446+
one_file_per_slice: one_file_per_slice_cb.checked,
14341447
};
14351448
if (dialog.xy_in_px) {
14361449
query_data.x_radius = Math.round(query_data.x_radius * stack.resolution.x);
@@ -1451,7 +1464,7 @@ annotations, neuron name, connectors or partner neurons.
14511464
.catch(CATMAID.handleError);
14521465
};
14531466

1454-
dialog.show(500, 'auto', true);
1467+
dialog.show(550, 'auto', true);
14551468
update_info();
14561469
}
14571470

0 commit comments

Comments
 (0)