Skip to content

Commit 3af4df3

Browse files
committed
doc: Add comments and add additional status prints
1 parent c978b29 commit 3af4df3

File tree

1 file changed

+38
-32
lines changed

1 file changed

+38
-32
lines changed

cellpose/contrib/cluster_script.py

Lines changed: 38 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,17 @@
88
from cellpose.contrib.distributed_segmentation import numpy_array_to_zarr
99

1010
from cellpose.contrib.distributed_segmentation import distributed_eval
11-
from cellpose.contrib.distributed_segmentation import SlurmCluster, janeliaLSFCluster, myLocalCluster
12-
13-
11+
from cellpose.contrib.distributed_segmentation import SlurmCluster, janeliaLSFCluster
1412

1513
def main():
1614
## PARAMETERS
17-
# Compute node accessible directory for test input zarr dataset and outputs
18-
output_dir = Path.home() / 'link_scratch'
15+
# Compute node-accessible directory for input zarr dataset and outputs
16+
output_dir = Path() / 'outputs'
17+
input_zarr_path = output_dir / 'input.zarr'
18+
output_zarr_path = output_dir / 'segmentation.zarr'
19+
output_bbox_pkl = output_dir / 'bboxes.pkl'
1920

20-
# Cluster parameters (here: https://docs.mpcdf.mpg.de/doc/computing/viper-gpu-user-guide.html)
21+
# Cluster parameters (example: https://docs.mpcdf.mpg.de/doc/computing/viper-gpu-user-guide.html)
2122
cluster = {
2223
'job_cpu': 2, # number of CPUs per GPU worker
2324
'ncpus':1, # threads requested per GPU worker
@@ -34,35 +35,38 @@ def main():
3435
}
3536
# * Ask your cluster support staff for assistance
3637

37-
input_zarr_path = output_dir / 'input.zarr'
38-
output_zarr_path = output_dir / 'segmentation.zarr'
39-
output_bbox_pkl = output_dir / 'bboxes.pkl'
38+
# Cellpose parameters
39+
model_kwargs = {'gpu':True}
40+
eval_kwargs = {
41+
'z_axis':0,
42+
'do_3D':True,
43+
}
44+
45+
# Optional: Crop data to reduce runtime for this test case
46+
crop = (slice(0, 221), slice(1024,2048), slice(1024,2048))
4047

4148

49+
## DATA PREPARATION
50+
# here: DAPI-stained human gastruloid by Zhiyuan Yu (https://zenodo.org/records/17590053)
4251
if not input_zarr_path.exists():
4352
print('Download test data')
4453
fname = retrieve(
4554
url="https://zenodo.org/records/17590053/files/2d_gastruloid.tif?download=1",
4655
known_hash="8ac2d944882268fbaebdfae5f7c18e4d20fdab024db2f9f02f4f45134b936872",
4756
path = Path.home() / '.cellpose' / 'data',
4857
progressbar=True,
49-
)
50-
#crop = (slice(None), slice(1024,2048), slice(1024,2048))
51-
data_numpy = imread(fname)#[crop]
58+
)
59+
data_numpy = imread(fname)[crop]
5260

53-
print('Save as 3D local zarr array')
61+
print(f'Convert to {data_numpy.shape} zarr array')
5462
data_zarr = numpy_array_to_zarr(input_zarr_path, data_numpy, chunks=(256, 256, 256))
63+
print(f'Input stored in {input_zarr_path}')
5564
del data_numpy
5665
else:
66+
print(f'Read input data from {input_zarr_path}')
5767
data_zarr = zarr.open(input_zarr_path)
5868

59-
# parameterize cellpose however you like
60-
model_kwargs = {'gpu':True}
61-
eval_kwargs = {
62-
'z_axis':0,
63-
'do_3D':True,
64-
}
65-
69+
## EVALUATION
6670
# Guess cluster type by checking for cluster submission commands
6771
if subprocess.getstatusoutput('sbatch -h')[0] == 0:
6872
print('Slurm sbatch command detected -> use SlurmCluster')
@@ -72,11 +76,13 @@ def main():
7276
cluster = janeliaLSFCluster(**cluster_kwargs)
7377
else:
7478
cluster = None
79+
## Note in case you want to test without a cluster scheduler use:
80+
#from cellpose.contrib.distributed_segmentation import myLocalCluster
7581
#cluster = myLocalCluster(**{
76-
#'n_workers':1, # if you only have 1 gpu, then 1 worker is the right choice
77-
#'ncpus':24,
78-
#'memory_limit':'64GB',
79-
#'threads_per_worker':1,
82+
# 'n_workers': 1, # if you only have 1 gpu, then 1 worker is the right choice
83+
# 'ncpus': 8,
84+
# 'memory_limit':'64GB',
85+
# 'threads_per_worker':1,
8086
#})
8187

8288
if cluster is None:
@@ -88,22 +94,22 @@ def main():
8894
"\n * or raise a feature request at https://github.com/MouseLand/cellpose/issues."
8995
)
9096

91-
# Start evaluation
97+
# Start computation
9298
segments, boxes = distributed_eval(
93-
input_zarr=data_zarr,
94-
blocksize=(256, 256, 256),
95-
write_path=str(output_zarr_path),
96-
model_kwargs=model_kwargs,
97-
eval_kwargs=eval_kwargs,
99+
input_zarr = data_zarr,
100+
blocksize = (256, 256, 256),
101+
write_path = str(output_zarr_path),
102+
model_kwargs = model_kwargs,
103+
eval_kwargs = eval_kwargs,
98104
cluster = cluster,
99105
)
100106

101-
# Save boxes on disk
107+
# Save bounding boxes on disk
102108
with open(output_bbox_pkl, 'wb') as f:
103109
pickle.dump(boxes, f)
104110

105111
print(f'Segmentation saved in {str(output_zarr_path)}')
106-
print(f'Object boxes saved in {str(output_bbox_pkl)}')
112+
print(f'Object bounding boxes saved in {str(output_bbox_pkl)}')
107113

108114
if __name__ == '__main__':
109115
main()

0 commit comments

Comments
 (0)