Skip to content

Commit a1a3f7d

Browse files
committed
s3-to-dc: catch OSError
Commit cd4f39b put the try in the wrong place, so move it.
1 parent a396906 commit a1a3f7d

File tree

1 file changed

+58
-59
lines changed

1 file changed

+58
-59
lines changed

apps/dc_tools/odc/apps/dc_tools/s3_to_dc.py

Lines changed: 58 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -223,23 +223,17 @@ def cli(
223223
)
224224
# Get a generator from supplied S3 Uri for candidate documents
225225
# Grab the URL from the resulting S3 item
226-
try:
227-
if is_glob:
228-
fetcher = S3Fetcher(aws_unsigned=no_sign_request)
229-
document_stream = fetcher(
230-
url.url
231-
for url in s3_find_glob(
232-
uris[0], skip_check=skip_check, s3=fetcher, **opts
233-
)
234-
)
235-
else:
236-
# if working with absolute URLs, no need for all the globbing logic
237-
document_stream = SimpleFetcher(
238-
aws_unsigned=no_sign_request, request_opts=opts
239-
)(uris)
240-
except OSError as e:
241-
print(f"Error: {e}", file=sys.stderr)
242-
sys.exit(1)
226+
if is_glob:
227+
fetcher = S3Fetcher(aws_unsigned=no_sign_request)
228+
document_stream = fetcher(
229+
url.url
230+
for url in s3_find_glob(uris[0], skip_check=skip_check, s3=fetcher, **opts)
231+
)
232+
else:
233+
# if working with absolute URLs, no need for all the globbing logic
234+
document_stream = SimpleFetcher(
235+
aws_unsigned=no_sign_request, request_opts=opts
236+
)(uris)
243237

244238
if url_string_replace:
245239
url_string_replace_tuple = tuple(url_string_replace.split(","))
@@ -264,48 +258,53 @@ def cli(
264258
failed = 0
265259
skipped = 0
266260
found_docs = False
267-
for uri, dataset in parse_doc_stream(
268-
((doc.url, doc.data) for doc in document_stream), on_error=doc_error
269-
):
270-
if dataset is None:
271-
skipped += 1
272-
continue
273-
found_docs = True
274-
if convert_bools:
275-
for prop, val in dataset["properties"].items():
276-
if val is True:
277-
dataset["properties"][prop] = "true"
278-
elif val is False:
279-
dataset["properties"][prop] = "false"
280-
stac_doc = None
281-
if stac:
282-
item = Item.from_dict(dataset)
283-
dataset, new_uri, stac_doc = item_to_meta_uri(
284-
item,
285-
dc,
286-
rename_product=rename_product,
287-
url_string_replace=url_string_replace_tuple,
288-
)
289-
uri = new_uri or uri
290-
try:
291-
index_update_dataset(
292-
dataset,
293-
uri,
294-
dc,
295-
doc2ds,
296-
update=update,
297-
update_if_exists=update_if_exists,
298-
allow_unsafe=allow_unsafe,
299-
archive_less_mature=archive_less_mature,
300-
publish_action=publish_action,
301-
stac_doc=stac_doc,
302-
)
303-
added += 1
304-
except IndexingError:
305-
logging.exception("Failed to index dataset %s", uri)
306-
failed += 1
307-
except DatasetExists:
308-
skipped += 1
261+
try:
262+
for uri, dataset in parse_doc_stream(
263+
((doc.url, doc.data) for doc in document_stream), on_error=doc_error
264+
):
265+
if dataset is None:
266+
skipped += 1
267+
continue
268+
found_docs = True
269+
if convert_bools:
270+
for prop, val in dataset["properties"].items():
271+
if val is True:
272+
dataset["properties"][prop] = "true"
273+
elif val is False:
274+
dataset["properties"][prop] = "false"
275+
stac_doc = None
276+
if stac:
277+
item = Item.from_dict(dataset)
278+
dataset, new_uri, stac_doc = item_to_meta_uri(
279+
item,
280+
dc,
281+
rename_product=rename_product,
282+
url_string_replace=url_string_replace_tuple,
283+
)
284+
uri = new_uri or uri
285+
try:
286+
index_update_dataset(
287+
dataset,
288+
uri,
289+
dc,
290+
doc2ds,
291+
update=update,
292+
update_if_exists=update_if_exists,
293+
allow_unsafe=allow_unsafe,
294+
archive_less_mature=archive_less_mature,
295+
publish_action=publish_action,
296+
stac_doc=stac_doc,
297+
)
298+
added += 1
299+
except IndexingError:
300+
logging.exception("Failed to index dataset %s", uri)
301+
failed += 1
302+
except DatasetExists:
303+
skipped += 1
304+
except OSError as e:
305+
print(f"Error: {e}", file=sys.stderr)
306+
sys.exit(1)
307+
309308
if not found_docs:
310309
raise click.ClickException("Doc stream was empty")
311310

0 commit comments

Comments
 (0)