Skip to content
Open
Changes from 1 commit
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
f70c9cd
Added schemas Folder with __init__.py
JanNolten Nov 7, 2025
e007679
Added field_validator.py to group repeated validation
JanNolten Nov 7, 2025
ccac5cc
Added field_serializer.py to group repeated serialization
JanNolten Nov 7, 2025
11c2a35
added function_validator.py to validate elephant functions
JanNolten Nov 7, 2025
46ff38b
Added Pydantic Models for statistics
JanNolten Nov 7, 2025
5fb18dc
Added Pydantic Models for spike_train_correlation
JanNolten Nov 7, 2025
d5324bf
Added Pydantic Models for spike_train_synchrony
JanNolten Nov 7, 2025
c99b6c0
Original arguments are passed into the function
JanNolten Nov 7, 2025
cb533cc
Added pytest.ini to .gitignore
JanNolten Nov 7, 2025
71b7dc0
Added tests and option to skip validation
JanNolten Nov 7, 2025
7f4f5ef
Transfering Bug fixes
JanNolten Nov 7, 2025
b282cc3
Transfering Bug fixes
JanNolten Nov 7, 2025
9c3402a
Transfering Bug fixes
JanNolten Nov 7, 2025
ac00866
Implemented validation for statistics
JanNolten Nov 7, 2025
a95f9f8
Implemented validation for spike_train_correlation
JanNolten Nov 7, 2025
69745c2
Implemented validation for spike_train_synchrony
JanNolten Nov 7, 2025
460e9cd
Allowed some ValueErrors to also be TypeErrors
JanNolten Nov 7, 2025
bf9b837
Merge branch 'NeuralEnsemble:master' into feature/partial
JanNolten Nov 7, 2025
fac02e1
Removed ; at end of lines
JanNolten Nov 10, 2025
4fdd64a
Added Pydantic to requirements
JanNolten Nov 10, 2025
b5f0ff7
Merge branch 'NeuralEnsemble:master' into feature/partial
JanNolten Nov 11, 2025
7d933a4
Removed Self from typing, because it only works in python>=3.11.0
JanNolten Nov 11, 2025
932f1d4
Added ability to disable validation globally
JanNolten Nov 11, 2025
1f58b12
Allow t_start to be negative because it should be able to be used tha…
JanNolten Nov 11, 2025
6f2b7d3
Allowed all t_start and t_stop to be negative, becuase they could be …
JanNolten Nov 18, 2025
c46228d
Removed the option to skip validation with the extra kwargs not_valid…
JanNolten Nov 24, 2025
ae94ee6
Simplified test to make them more understandable
JanNolten Nov 24, 2025
ca247c0
Make test stricter by checking for the exact Error Type. Also Fixed Bugs
JanNolten Nov 24, 2025
91df787
Forgot to remove a print statement
JanNolten Nov 24, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 8 additions & 5 deletions elephant/schemas/field_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ def get_length(obj) -> int:
return obj.size
elif isinstance(obj, (list,tuple)):
return len(obj)
elif isinstance(obj, neo.core.spiketrainlist.SpikeTrainList):
return len(obj)

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added getting length of SpikeTrainList, to check if it is empty or has a min_length.



Expand Down Expand Up @@ -115,6 +117,7 @@ def validate_type_length(value, info, allowed_types: tuple, allow_none: bool, mi

def validate_array_content(value, info, allowed_types: tuple, allow_none: bool, min_length: int, allowed_content_types: tuple, min_length_content: int = 0):
validate_type_length(value, info, allowed_types, allow_none, min_length)
hasContentLength = False
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If hasContentLength is not set to True in the loop, then it would have never been initialized.

for i, item in enumerate(value):
if not isinstance(item, allowed_content_types):
raise TypeError(f"Element {i} in {info.field_name} must be {allowed_content_types}, not {type(item).__name__}")
Expand Down Expand Up @@ -144,7 +147,7 @@ def validate_spiketrains(value, info, allowed_types = (list,), allow_none = Fals

def validate_spiketrains_matrix(value, info, allowed_types = (elephant.trials.Trials, list[neo.core.spiketrainlist.SpikeTrainList], list[list[neo.core.SpikeTrain]]), allow_none = False, min_length = 1, check_rank_deficient = False):
if isinstance(value, list):
validate_spiketrains(value, info, allowed_content_types=(neo.core.spiketrainlist,list[neo.core.SpikeTrain],))
validate_spiketrains(value, info, allowed_content_types=(neo.core.spiketrainlist.SpikeTrainList,list[neo.core.SpikeTrain],))
else:
validate_type(value, info, (elephant.trials.Trials,), allow_none=False)
if check_rank_deficient:
Expand All @@ -169,7 +172,7 @@ def validate_time_intervals(value, info, allowed_types = (list, pq.Quantity, np.
raise ValueError(f"{info.field_name} is not allowed to be a matrix")
return value

def validate_array(value, info, allowed_types=(list, np.ndarray) , allow_none=False, min_length=1, allowed_content_types = None, min_length_content = 0):
def validate_array(value, info, allowed_types=(list, np.ndarray, tuple) , allow_none=False, min_length=1, allowed_content_types = None, min_length_content = 0):
if allowed_content_types is None:
validate_type_length(value, info, allowed_types, allow_none, min_length)
else:
Expand Down Expand Up @@ -202,10 +205,10 @@ def validate_key_in_tuple(value : str, info, t: tuple):

# ---- Model validation helpers ----

def model_validate_spiketrains_same_t_start_stop(spiketrain, t_start, t_stop, name: str = "spiketrains", warning: bool = False):
def model_validate_spiketrains_same_t_start_stop(spiketrains, t_start, t_stop, name: str = "spiketrains", warning: bool = False):
if(t_start is None or t_stop is None):
first = True
for i, item in enumerate(spiketrain):
for i, item in enumerate(spiketrains):
if first:
t_start = item.t_start
t_stop = item.t_stop
Expand All @@ -225,7 +228,7 @@ def model_validate_spiketrains_same_t_start_stop(spiketrain, t_start, t_stop, na
if t_start>t_stop:
raise ValueError(f"{name} has t_start > t_stop")

def model_validate_spiketrains_sam_t_start_stop(spiketrain_i, spiketrain_j):
def model_validate_two_spiketrains_same_t_start_stop(spiketrain_i, spiketrain_j):
if spiketrain_i.t_start != spiketrain_j.t_start:
raise ValueError("spiketrain_i and spiketrain_j need to have the same t_start")
if spiketrain_i.t_stop != spiketrain_j.t_stop:
Expand Down