Skip to content

Commit 59f1903

Browse files
pre-commit-ci[bot]lgray
authored andcommitted
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 41a8b0c commit 59f1903

File tree

1 file changed

+12
-13
lines changed

1 file changed

+12
-13
lines changed

src/coffea/dataset_tools/apply_processor.py

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -139,26 +139,25 @@ def apply_to_dataset(
139139
out = None
140140
if parallelize_with_dask:
141141
(wired_events,) = _pack_meta_to_wire(events)
142-
out = dask.delayed(
143-
lambda: lz4.frame.compress(
144-
cloudpickle.dumps(
145-
partial(_apply_analysis_wire, analysis, wired_events)()
146-
),
147-
compression_level=6,
148-
)
149-
)()
142+
out = (
143+
dask.delayed(
144+
lambda: lz4.frame.compress(
145+
cloudpickle.dumps(
146+
partial(_apply_analysis_wire, analysis, wired_events)()
147+
),
148+
compression_level=6,
149+
)
150+
)(),
151+
)
150152
dask.base.function_cache.clear()
151153
else:
152154
out = analysis(events)
155+
if not isinstance(out, tuple):
156+
out = (out,)
153157

154158
if report is not None:
155-
<<<<<<< HEAD
156-
return out, report
157-
return (out,)
158-
=======
159159
return events, out, report
160160
return events, out
161-
>>>>>>> aae802b3 (provide interface for serializing taskgraphs to/from disk)
162161

163162

164163
def apply_to_fileset(

0 commit comments

Comments
 (0)