Skip to content

Commit ef98d95

Browse files
committed
feat(import): simpler import + state restore logic
The pipeline code has been removed in favor of a simpler chain-of-responsibility approach, using `evaluateChain` and `asyncSelect`. `evaluateChain` is responsible for evaluating a data source against a chain of import handlers until one of them returns a new data source. To keep processing a data source like how the old pipeline code supported nested executions, `evaluateChain` is invoked inside a loop for every data source. `asyncSelect` is used to drive the loop execution, seleting `evaluateChain` promises whenever they are done. The state schema is updated to generically operate on serialized data sources. Instead of special-casing for remote files, the serialized DataSource type encodes this state.
1 parent 9b2e77b commit ef98d95

34 files changed

+863
-1319
lines changed

src/actions/importDicomChunks.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,5 +23,5 @@ export async function importDicomChunks(chunks: Chunk[]) {
2323
})
2424
);
2525

26-
return Object.keys(chunksByVolume);
26+
return chunksByVolume;
2727
}

src/actions/loadUserFiles.ts

+33-32
Original file line numberDiff line numberDiff line change
@@ -10,23 +10,24 @@ import { useDatasetStore } from '@/src/store/datasets';
1010
import { useDICOMStore } from '@/src/store/datasets-dicom';
1111
import { useLayersStore } from '@/src/store/datasets-layers';
1212
import { useSegmentGroupStore } from '@/src/store/segmentGroups';
13-
import { wrapInArray, nonNullable } from '@/src/utils';
13+
import { wrapInArray, nonNullable, partition } from '@/src/utils';
1414
import { basename } from '@/src/utils/path';
1515
import { parseUrl } from '@/src/utils/url';
1616
import { logError } from '@/src/utils/loggers';
17-
import { PipelineResultSuccess, partitionResults } from '@/src/core/pipeline';
1817
import {
19-
ImportDataSourcesResult,
2018
importDataSources,
2119
toDataSelection,
2220
} from '@/src/io/import/importDataSources';
2321
import {
22+
ErrorResult,
2423
ImportResult,
2524
LoadableResult,
26-
VolumeResult,
25+
LoadableVolumeResult,
2726
isLoadableResult,
2827
isVolumeResult,
28+
ImportDataSourcesResult,
2929
} from '@/src/io/import/common';
30+
import { isDicomImage } from '@/src/utils/dataSelection';
3031

3132
// higher value priority is preferred for picking a primary selection
3233
const BASE_MODALITY_TYPES = {
@@ -38,8 +39,8 @@ const BASE_MODALITY_TYPES = {
3839

3940
function findBaseDicom(loadableDataSources: Array<LoadableResult>) {
4041
// find dicom dataset for primary selection if available
41-
const dicoms = loadableDataSources.filter(
42-
({ dataType }) => dataType === 'dicom'
42+
const dicoms = loadableDataSources.filter(({ dataID }) =>
43+
isDicomImage(dataID)
4344
);
4445
// prefer some modalities as base
4546
const dicomStore = useDICOMStore();
@@ -97,19 +98,15 @@ function findBaseImage(
9798
}
9899

99100
// returns image and dicom sources, no config files
100-
function filterLoadableDataSources(
101-
succeeded: Array<PipelineResultSuccess<ImportResult>>
102-
) {
103-
return succeeded.flatMap((result) => {
104-
return result.data.filter(isLoadableResult);
105-
});
101+
function filterLoadableDataSources(succeeded: Array<ImportResult>) {
102+
return succeeded.filter(isLoadableResult);
106103
}
107104

108105
// Returns list of dataSources with file names where the name has the extension argument
109106
// and the start of the file name matches the primary file name.
110107
function filterMatchingNames(
111-
primaryDataSource: VolumeResult,
112-
succeeded: Array<PipelineResultSuccess<ImportResult>>,
108+
primaryDataSource: LoadableVolumeResult,
109+
succeeded: Array<ImportResult>,
113110
extension: string
114111
) {
115112
const dicomStore = useDICOMStore();
@@ -141,7 +138,7 @@ function getStudyUID(volumeID: string) {
141138
}
142139

143140
function findBaseDataSource(
144-
succeeded: Array<PipelineResultSuccess<ImportResult>>,
141+
succeeded: Array<ImportResult>,
145142
segmentGroupExtension: string
146143
) {
147144
const loadableDataSources = filterLoadableDataSources(succeeded);
@@ -155,24 +152,24 @@ function findBaseDataSource(
155152

156153
function filterOtherVolumesInStudy(
157154
volumeID: string,
158-
succeeded: Array<PipelineResultSuccess<ImportResult>>
155+
succeeded: Array<ImportResult>
159156
) {
160157
const targetStudyUID = getStudyUID(volumeID);
161158
const dicomDataSources = filterLoadableDataSources(succeeded).filter(
162-
({ dataType }) => dataType === 'dicom'
159+
({ dataID }) => isDicomImage(dataID)
163160
);
164161
return dicomDataSources.filter((ds) => {
165162
const sourceStudyUID = getStudyUID(ds.dataID);
166163
return sourceStudyUID === targetStudyUID && ds.dataID !== volumeID;
167-
}) as Array<VolumeResult>;
164+
}) as Array<LoadableVolumeResult>;
168165
}
169166

170167
// Layers a DICOM PET on a CT if found
171168
function loadLayers(
172-
primaryDataSource: VolumeResult,
173-
succeeded: Array<PipelineResultSuccess<ImportResult>>
169+
primaryDataSource: LoadableVolumeResult,
170+
succeeded: Array<ImportResult>
174171
) {
175-
if (primaryDataSource.dataType !== 'dicom') return;
172+
if (!isDicomImage(primaryDataSource.dataID)) return;
176173
const otherVolumesInStudy = filterOtherVolumesInStudy(
177174
primaryDataSource.dataID,
178175
succeeded
@@ -198,8 +195,8 @@ function loadLayers(
198195
// - DICOM SEG modalities with matching StudyUIDs.
199196
// - DataSources that have a name like foo.segmentation.bar and the primary DataSource is named foo.baz
200197
function loadSegmentations(
201-
primaryDataSource: VolumeResult,
202-
succeeded: Array<PipelineResultSuccess<ImportResult>>,
198+
primaryDataSource: LoadableVolumeResult,
199+
succeeded: Array<ImportResult>,
203200
segmentGroupExtension: string
204201
) {
205202
const matchingNames = filterMatchingNames(
@@ -237,13 +234,19 @@ function loadDataSources(sources: DataSource[]) {
237234

238235
let results: ImportDataSourcesResult[];
239236
try {
240-
results = await importDataSources(sources);
237+
results = (await importDataSources(sources)).filter((result) =>
238+
// only look at data and error results
239+
['data', 'error'].includes(result.type)
240+
);
241241
} catch (error) {
242242
loadDataStore.setError(error as Error);
243243
return;
244244
}
245245

246-
const [succeeded, errored] = partitionResults(results);
246+
const [succeeded, errored] = partition(
247+
(result) => result.type !== 'error',
248+
results
249+
);
247250

248251
if (!dataStore.primarySelection && succeeded.length) {
249252
const primaryDataSource = findBaseDataSource(
@@ -264,14 +267,12 @@ function loadDataSources(sources: DataSource[]) {
264267
}
265268

266269
if (errored.length) {
267-
const errorMessages = errored.map((errResult) => {
268-
// pick first error
269-
const [firstError] = errResult.errors;
270-
// pick innermost dataset that errored
271-
const name = getDataSourceName(firstError.inputDataStackTrace[0]);
270+
const errorMessages = (errored as ErrorResult[]).map((errResult) => {
271+
const { dataSource, error } = errResult;
272+
const name = getDataSourceName(dataSource);
272273
// log error for debugging
273-
logError(firstError.cause);
274-
return `- ${name}: ${firstError.message}`;
274+
logError(error);
275+
return `- ${name}: ${error.message}`;
275276
});
276277
const failedError = new Error(
277278
`These files failed to load:\n${errorMessages.join('\n')}`

src/components/SampleDataBrowser.vue

+2-2
Original file line numberDiff line numberDiff line change
@@ -96,8 +96,8 @@ export default defineComponent({
9696
if (!loadResult) {
9797
throw new Error('Did not receive a load result');
9898
}
99-
if (!loadResult.ok) {
100-
throw loadResult.errors[0].cause;
99+
if (loadResult.type === 'error') {
100+
throw loadResult.error;
101101
}
102102
103103
const selection = convertSuccessResultToDataSelection(loadResult);

0 commit comments

Comments
 (0)