Skip to content

Commit 6dd6b52

Browse files
authored
[FIX] duplicate analyses (#809)
* do not allow duplicate analyses * fix update logic * fix condition when both record and data have no id
1 parent 0308aa1 commit 6dd6b52

File tree

3 files changed

+103
-9
lines changed

3 files changed

+103
-9
lines changed

store/neurostore/resources/base.py

Lines changed: 21 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,10 @@ class BaseView(MethodView):
6969
_view_fields = {}
7070
# _default_exclude = None
7171

72+
@classmethod
73+
def check_duplicate(cls, data, record):
74+
return False
75+
7276
def get_affected_ids(self, ids):
7377
"""
7478
Get all the ids that are affected by a change to a record..
@@ -223,16 +227,16 @@ def load_nested_records(cls, data, record=None):
223227
@classmethod
224228
def update_or_create(cls, data, id=None, user=None, record=None, flush=True):
225229
"""
226-
scenerios:
227-
1. cloning a study
228-
a. clone everything, a study is an object
229-
2. cloning a studyset
230-
a. studies are linked to a studyset, so create a new studyset with same links
231-
3. cloning an annotation
232-
a. annotations are linked to studysets, update when studyset updates
233-
4. creating an analysis
230+
Scenarios:
231+
1. Cloning a study
232+
a. Clone everything, a study is an object
233+
2. Cloning a studyset
234+
a. Studies are linked to a studyset, so create a new studyset with same links
235+
3. Cloning an annotation
236+
a. Annotations are linked to studysets, update when studyset updates
237+
4. Creating an analysis
234238
a. I should have to own all (relevant) parent objects
235-
5. creating an annotation
239+
5. Creating an annotation
236240
a. I should not have to own the studyset to create an annotation
237241
"""
238242

@@ -291,6 +295,14 @@ def update_or_create(cls, data, id=None, user=None, record=None, flush=True):
291295

292296
return record
293297

298+
data["user_id"] = current_user.external_id
299+
if hasattr(record, "id"):
300+
data["id"] = record.id
301+
# check to see if duplicate
302+
duplicate = cls.check_duplicate(data, record)
303+
if duplicate:
304+
return duplicate
305+
294306
# Update all non-nested attributes
295307
for k, v in data.items():
296308
if k in cls._parent and v is not None:

store/neurostore/resources/data.py

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -931,6 +931,57 @@ def join_tables(self, q, args):
931931
)
932932
return super().join_tables(q, args)
933933

934+
@classmethod
935+
def check_duplicate(cls, data, record):
936+
study_id = data.get("study_id")
937+
938+
if hasattr(record, "id") and record.id and record.id == data.get("id"):
939+
# not a duplicate, same record
940+
return False
941+
942+
if hasattr(record, "study") and record.study:
943+
study = record.study
944+
else:
945+
study = Study.query.filter_by(id=study_id).first()
946+
947+
if not study:
948+
return False
949+
950+
name = data.get("name")
951+
user_id = data.get("user_id")
952+
coordinates = data.get("points")
953+
954+
for analysis in study.analyses:
955+
if (
956+
analysis.name == name
957+
and analysis.user_id == user_id
958+
and cls._compare_coordinates(analysis.points, coordinates)
959+
):
960+
return analysis
961+
962+
return False
963+
964+
@staticmethod
965+
def _compare_coordinates(existing_points, new_points):
966+
# Create a dictionary to map point IDs to their coordinates
967+
existing_points_dict = {
968+
point.id: (point.x, point.y, point.z) for point in existing_points
969+
}
970+
971+
# Create sets for comparison
972+
existing_points_set = {(point.x, point.y, point.z) for point in existing_points}
973+
new_points_set = set()
974+
975+
for point in new_points:
976+
if "x" in point and "y" in point and "z" in point:
977+
new_points_set.add((point["x"], point["y"], point["z"]))
978+
elif "id" in point and point["id"] in existing_points_dict:
979+
new_points_set.add(existing_points_dict[point["id"]])
980+
else:
981+
return False # If the point doesn't have coordinates or a valid ID, return False
982+
983+
return existing_points_set == new_points_set
984+
934985

935986
@view_maker
936987
class ConditionsView(ObjectView, ListView):

store/neurostore/tests/api/test_analyses.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,3 +151,34 @@ def test_post_analysis_without_order(auth_client, ingest_neurosynth, session):
151151

152152
# Check if the 'order' field is not None
153153
assert resp.json()["order"] is not None
154+
155+
156+
def test_create_duplicate_analysis(auth_client, ingest_neurosynth, session):
157+
# Get an existing analysis from the database
158+
analysis_db = Analysis.query.first()
159+
analysis = AnalysisSchema().dump(analysis_db)
160+
id_ = auth_client.username
161+
user = User.query.filter_by(external_id=id_).first()
162+
analysis_db.study.user = user
163+
for a in analysis_db.study.analyses:
164+
a.user = user
165+
session.add(a)
166+
session.add(analysis_db.study)
167+
session.commit()
168+
169+
# Remove fields that are auto-generated
170+
for k in ["user", "id", "created_at", "updated_at", "entities"]:
171+
analysis.pop(k, None)
172+
173+
# Create the first analysis
174+
resp = auth_client.post("/api/analyses/", data=analysis)
175+
assert resp.status_code == 200
176+
177+
# Attempt to create a duplicate analysis
178+
resp_duplicate = auth_client.post("/api/analyses/", data=analysis)
179+
assert resp_duplicate.status_code == 200
180+
181+
# Check if the duplicate analysis is the same as the original
182+
original_analysis = resp.json()
183+
duplicate_analysis = resp_duplicate.json()
184+
assert original_analysis["id"] == duplicate_analysis["id"]

0 commit comments

Comments
 (0)