Skip to content

Commit 4886896

Browse files
committed
Let extensions deal with changes
1 parent 527b3d2 commit 4886896

File tree

2 files changed

+25
-27
lines changed

2 files changed

+25
-27
lines changed

ckanext/datapusher_plus/interfaces.py

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,24 @@ def after_upload(
5454
"""
5555
pass
5656

57-
def datastore_column_changed(self, changes, old_headers, new_headers):
58-
""" While we are updating the datastore, we detect column changes """
57+
def datastore_before_update(self, existing_info, new_headers):
58+
""" We are about to update the datastore
59+
60+
:param existing_info: The existing information in the datastore.
61+
Empty if the resource is new.
62+
Something like:
63+
{
64+
'Header 1': {'label': 'Header 1'},
65+
'Header 2': {'label': 'Header 2'},
66+
...
67+
}
68+
69+
:param new_headers: The new headers that are to be added to the datastore.
70+
Something like:
71+
[
72+
{'id': 'Header 1', 'type': 'text', 'info': {'label': 'Header 1'}},
73+
{'id': 'Header 2', 'type': 'numeric', 'info': {'label': 'Header 2'}},
74+
...
75+
]
76+
"""
5977
pass

ckanext/datapusher_plus/jobs.py

Lines changed: 5 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ def send_resource_to_datastore(
153153
"""
154154
Stores records in CKAN datastore
155155
"""
156-
log.info(f"\n\nSending data to datastore {resource_id}\n{headers}\n")
156+
log.info(f"Sending data to datastore {resource_id}")
157157

158158
if resource_id:
159159
# used to create the "main" resource
@@ -1016,30 +1016,10 @@ def _push_to_datastore(task_id, input, dry_run=False, temp_dir=None):
10161016
# Maintain data dictionaries from matching column names
10171017
# if data dictionary already exists for this resource as
10181018
# we want to preserve the user's data dictionary curations
1019-
# Also, detect column changes
1020-
changed = []
1021-
if existing_info:
1022-
for h in headers_dicts:
1023-
if h["id"] in existing_info:
1024-
h["info"] = existing_info[h["id"]]
1025-
# create columns with types user requested
1026-
type_override = existing_info[h["id"]].get("type_override")
1027-
if type_override in list(type_mapping.values()):
1028-
h["type"] = type_override
1029-
else:
1030-
# new column
1031-
changed.append({'new': h["id"]})
1032-
1033-
new_headers_ids = [h["id"] for h in headers_dicts]
1034-
for ei in existing_info:
1035-
if ei not in new_headers_ids:
1036-
# deleted column
1037-
changed.append({'deleted': ei})
1038-
1039-
# send all plugins the column_changes
1040-
if changed:
1041-
for plugin in plugins.PluginImplementations(interfaces.IDataPusher):
1042-
plugin.datastore_column_changed(changed, old_headers=existing_info, new_headers=headers_dicts)
1019+
1020+
# Notify plugins the datastore will be updated
1021+
for plugin in plugins.PluginImplementations(interfaces.IDataPusher):
1022+
plugin.datastore_before_update(existing_info=existing_info, new_headers=headers_dicts)
10431023

10441024
logger.info(
10451025
"Determined headers and types: {headers}...".format(headers=headers_dicts)

0 commit comments

Comments
 (0)