@@ -269,7 +269,7 @@ def _extattributes(self) -> Attributes:
269269 with jsonfile .open ('r' ) as json_fid :
270270 attributes = json .load (json_fid )
271271 if not isinstance (attributes , dict ):
272- LOGGER .warning (f"Skipping unexpectedly formatted meta-data in: { jsonfile } " )
272+ LOGGER .warning (f"Skipping unexpectedly formatted metadata in: { jsonfile } " )
273273 return Attributes ({})
274274 self ._cache .update (attributes )
275275
@@ -370,7 +370,7 @@ def __init__(self, dataformat: str='', datatype: str='', data: dict=None, option
370370 self .dataformat = dataformat
371371 """The name of the dataformat"""
372372 self .datatype = datatype
373- """The name of the datatype """
373+ """The name of the data type """
374374 self .options = options
375375 """The dictionary with the BIDScoin options"""
376376 self .plugins = plugins
@@ -391,7 +391,7 @@ def __init__(self, dataformat: str='', datatype: str='', data: dict=None, option
391391 self .meta = Meta (data ['meta' ])
392392 """The meta output dictionary (will be appended to the json sidecar file)"""
393393 self .events = data ['events' ]
394- """The options to parse the stimulus presentation logfile (if any) to BIDS compliant events"""
394+ """The options to parse the stimulus presentation log file (if any) to BIDS compliant events"""
395395
396396 def __getattr__ (self , name : str ):
397397
@@ -463,7 +463,7 @@ def check(self, checks: tuple[bool, bool, bool]=(False, False, False)) -> tuple[
463463 if all (checks ) and not provenance :
464464 LOGGER .info (f"No provenance info found for { datatype } /*_{ bids ['suffix' ]} " )
465465
466- # Check if we have a suffix and datatype rules
466+ # Check if we have a suffix and data type rules
467467 if 'suffix' not in bids :
468468 if checks [1 ]: LOGGER .warning (f'Invalid bidsmap: The { datatype } "suffix" key is missing ({ datatype } -> { provenance } )' )
469469 return run_keysok , False , run_valsok # The suffix is not BIDS-valid, we cannot check the keys and values
@@ -665,7 +665,7 @@ def increment_runindex(self, outfolder: Path, bidsname: str, scans_table: pd.Dat
665665 return bidsname + bidsext
666666
667667 def eventsparser (self ) -> EventsParser :
668- """Returns a plugin EventsParser instance to parse the stimulus presentation logfile (if any)"""
668+ """Returns a plugin EventsParser instance to parse the stimulus presentation log file (if any)"""
669669
670670 for name in self .plugins :
671671 if plugin := bcoin .import_plugin (name , (f"{ self .dataformat } Events" ,)):
@@ -677,11 +677,11 @@ class DataType:
677677
678678 def __init__ (self , dataformat : str , datatype : str , data : list , options : Options , plugins : Plugins ):
679679 """
680- Reads from a YAML datatype dictionary
680+ Reads from a YAML data type dictionary
681681
682682 :param dataformat: The name of the dataformat (= section in the bidsmap)
683683 :param datatype: The name of the datatype (= section in a dataformat)
684- :param data: The YAML datatype dictionary, i.e. a list of runitems
684+ :param data: The YAML data type dictionary, i.e. a list of runitems
685685 :param options: The dictionary with the BIDScoin options
686686 :param plugins: The plugin dictionaries with their options
687687 """
@@ -895,8 +895,8 @@ def __init__(self, yamlfile: Path, folder: Path=templatefolder, plugins: Iterabl
895895 searched before the default 'heuristics'. If yamfile is empty, then first 'bidsmap.yaml' is searched for, then 'bidsmap_template'. So fullpath
896896 has precedence over folder and bidsmap.yaml has precedence over the bidsmap_template.
897897
898- :param yamlfile: The full pathname or basename of the bidsmap yaml-file
899- :param folder: Used when yamlfile=basename and not in the pwd: yamlfile is then assumed to be in the (bids/code/bidscoin)folder. A bidsignore file in folder will be added to the bidsmap bidsignore items
898+ :param yamlfile: The full path or base name of the bidsmap yaml-file
899+ :param folder: Used when yamlfile=base name and not in the pwd: yamlfile is then assumed to be in the (bids/code/bidscoin)folder. A bidsignore file in folder will be added to the bidsmap bidsignore items
900900 :param plugins: List of plugins to be used (with default options, overrules the plugin list in the study/template bidsmaps). Leave empty to use all plugins in the bidsmap
901901 :param checks: Booleans to check if all (bidskeys, bids-suffixes, bids-values) in the run are present according to the BIDS schema specifications
902902 """
@@ -1351,7 +1351,7 @@ def get_matching_run(self, sourcefile: Union[str, Path], dataformat: str='', run
13511351 else :
13521352 rundata ['bids' ][bidskey ] = datasource .dynamicvalue (bidsvalue , runtime = runtime )
13531353
1354- # Try to fill the meta-data
1354+ # Try to fill the metadata
13551355 for metakey , metavalue in runitem .meta .items ():
13561356
13571357 # Replace the dynamic meta values, except the IntendedFor value (e.g. <<task>>)
@@ -1381,7 +1381,7 @@ def get_matching_run(self, sourcefile: Union[str, Path], dataformat: str='', run
13811381 # We don't have a match (all tests failed, so datatype should be the *last* one, e.g. unknowndatatype)
13821382 LOGGER .bcdebug (f"Found no bidsmap match for: { sourcefile } " )
13831383 if datatype not in unknowndatatypes :
1384- LOGGER .warning (f"Datatype was expected to be in { unknowndatatypes } , instead it is '{ datatype } ' -> { sourcefile } " )
1384+ LOGGER .warning (f"Data type was expected to be in { unknowndatatypes } , instead it is '{ datatype } ' -> { sourcefile } " )
13851385
13861386 runitem = RunItem (dataformat , datatype , copy .deepcopy (rundata ), self .options , self .plugins )
13871387 runitem .strip_suffix ()
@@ -1812,11 +1812,11 @@ def check_runindices(session: Path) -> bool:
18121812
18131813def limitmatches (fmap : str , matches : list [str ], limits : str , niifiles : set [str ], scans_table : pd .DataFrame ):
18141814 """
1815- Helper function for addmetadata() to check if there are multiple fieldmap runs and get the lower- and upperbound from
1815+ Helper function for addmetadata() to check if there are multiple field map runs and get the lower- and upperbound from
18161816 the AcquisitionTime to bound the grand list of matches to adjacent runs. The resulting list is appended to niifiles
18171817
1818- :param fmap: The fieldmap (relative to the session folder)
1819- :param matches: The images (relative to the session folder) associated with the fieldmap
1818+ :param fmap: The field map (relative to the session folder)
1819+ :param matches: The images (relative to the session folder) associated with the field map
18201820 :param limits: The bounding limits from the dynamic value: '[lowerlimit:upperlimit]'
18211821 :param niifiles: The list to which the bounded results are appended
18221822 :param scans_table: The scans table with the acquisition times
@@ -1832,16 +1832,16 @@ def limitmatches(fmap: str, matches: list[str], limits: str, niifiles: set[str],
18321832 lowerbound = fmaptime .replace (year = 1900 ) # Use an ultra-wide lower limit for the search
18331833 upperbound = fmaptime .replace (year = 2100 ) # Idem for the upper limit
18341834
1835- # There may be more fieldmaps , hence try to limit down the matches to the adjacent acquisitions
1835+ # There may be more field maps , hence try to limit down the matches to the adjacent acquisitions
18361836 try :
18371837 fmaptime = dateutil .parser .parse (scans_table .loc [fmap , 'acq_time' ])
18381838 runindex = get_bidsvalue (fmap , 'run' )
18391839 prevfmap = get_bidsvalue (fmap , 'run' , str (int (runindex ) - 1 ))
18401840 nextfmap = get_bidsvalue (fmap , 'run' , str (int (runindex ) + 1 ))
18411841 if prevfmap in scans_table .index :
1842- lowerbound = dateutil .parser .parse (scans_table .loc [prevfmap , 'acq_time' ]) # Narrow the lower search limit down to the preceding fieldmap
1842+ lowerbound = dateutil .parser .parse (scans_table .loc [prevfmap , 'acq_time' ]) # Narrow the lower search limit down to the preceding field map
18431843 if nextfmap in scans_table .index :
1844- upperbound = dateutil .parser .parse (scans_table .loc [nextfmap , 'acq_time' ]) # Narrow the upper search limit down to the succeeding fieldmap
1844+ upperbound = dateutil .parser .parse (scans_table .loc [nextfmap , 'acq_time' ]) # Narrow the upper search limit down to the succeeding field map
18451845 except (TypeError , ValueError , KeyError , dateutil .parser .ParserError ) as acqtimeerror :
18461846 pass # Raise this only if there are limits and matches, i.e. below
18471847
@@ -1860,23 +1860,23 @@ def limitmatches(fmap: str, matches: list[str], limits: str, niifiles: set[str],
18601860 if (lowerbound < acqtime [0 ] < upperbound ) and (lowerlimit <= nr - offset <= upperlimit ):
18611861 niifiles .add (acqtime [1 ])
18621862 except Exception as matcherror :
1863- LOGGER .error (f"Could not bound the fieldmaps using <*:{ limits } > as it requires a *_scans.tsv file with acq_time values for: { fmap } \n { matcherror } " )
1863+ LOGGER .error (f"Could not bound the field maps using <*:{ limits } > as it requires a *_scans.tsv file with acq_time values for: { fmap } \n { matcherror } " )
18641864 niifiles .update (matches )
18651865 else :
18661866 niifiles .update (matches )
18671867
18681868
18691869def addmetadata (bidsses : Path ):
18701870 """
1871- Adds the special fieldmap metadata (IntendedFor, TE, etc.)
1871+ Adds the special field map metadata (IntendedFor, TE, etc.)
18721872
18731873 :param bidsses: The session folder with the BIDS session data
18741874 """
18751875
18761876 subid = bidsses .name if bidsses .name .startswith ('sub-' ) else bidsses .parent .name
18771877 sesid = bidsses .name if bidsses .name .startswith ('ses-' ) else ''
18781878
1879- # Add IntendedFor search results and TE1+TE2 meta-data to the fieldmap json-files. This has been postponed until all datatypes have been processed (i.e. so that all target images are indeed on disk)
1879+ # Add IntendedFor search results and TE1+TE2 metadata to the field map json-files. This has been postponed until all datatypes have been processed (i.e. so that all target images are indeed on disk)
18801880 if (bidsses / 'fmap' ).is_dir ():
18811881
18821882 scans_tsv = bidsses / f"{ subid } { '_' + sesid if sesid else '' } _scans.tsv"
@@ -1887,7 +1887,7 @@ def addmetadata(bidsses: Path):
18871887
18881888 for fmap in [fmap .relative_to (bidsses ).as_posix () for fmap in sorted ((bidsses / 'fmap' ).glob ('sub-*.nii*' ))]:
18891889
1890- # Load the existing meta-data
1890+ # Load the existing metadata
18911891 jsondata = {}
18921892 jsonfile = (bidsses / fmap ).with_suffix ('' ).with_suffix ('.json' )
18931893 if jsonfile .is_file ():
@@ -1915,11 +1915,11 @@ def addmetadata(bidsses: Path):
19151915 LOGGER .verbose (f"Adding IntendedFor to: { jsonfile } " )
19161916 jsondata ['IntendedFor' ] = [f"bids::{ (Path (subid )/ sesid / niifile ).as_posix ()} " for niifile in niifiles ]
19171917 else :
1918- LOGGER .warning (f"Empty 'IntendedFor' fieldmap value in { jsonfile } : the search for { intendedfor } gave no results" )
1918+ LOGGER .warning (f"Empty 'IntendedFor' field map value in { jsonfile } : the search for { intendedfor } gave no results" )
19191919 jsondata ['IntendedFor' ] = None
19201920
19211921 elif not (intendedfor or jsondata .get ('B0FieldSource' ) or jsondata .get ('B0FieldIdentifier' )):
1922- LOGGER .warning (f"Empty IntendedFor/B0FieldSource/B0FieldIdentifier fieldmap values in { jsonfile } (i.e. the fieldmap may not be used)" )
1922+ LOGGER .warning (f"Empty IntendedFor/B0FieldSource/B0FieldIdentifier field map values in { jsonfile } (i.e. the field map may not be used)" )
19231923
19241924 # Work-around because the bids-validator (v1.8) cannot handle `null` values / unused IntendedFor fields
19251925 if not jsondata .get ('IntendedFor' ):
@@ -1944,7 +1944,7 @@ def addmetadata(bidsses: Path):
19441944 matches .append (match .relative_to (bidsses ).as_posix ())
19451945 limitmatches (fmap , matches , limits , niifiles , scans_table )
19461946
1947- # In the b0fieldtags, replace the limits with fieldmap runindex
1947+ # In the b0fieldtags, replace the limits with field map runindex
19481948 runindex = get_bidsvalue (fmap , 'run' )
19491949 newb0fieldtag = b0fieldtag .replace (':' + limits , '_' + runindex if runindex else '' )
19501950 for niifile in niifiles :
@@ -1988,7 +1988,7 @@ def addmetadata(bidsses: Path):
19881988 else :
19891989 LOGGER .verbose (f"Adding EchoTime1: { jsondata ['EchoTime1' ]} and EchoTime2: { jsondata ['EchoTime2' ]} to { jsonfile } " )
19901990
1991- # Save the collected meta-data to disk
1991+ # Save the collected metadata to disk
19921992 if jsondata :
19931993 with jsonfile .open ('w' ) as sidecar :
19941994 json .dump (jsondata , sidecar , indent = 4 )
@@ -2003,10 +2003,10 @@ def poolmetadata(datasource: DataSource, targetmeta: Path, usermeta: Meta, metae
20032003 NB: In future versions this function could also support more source metadata formats, e.g. yaml, csv- or Excel-files
20042004
20052005 :param datasource: The data source from which dynamic values are read
2006- :param targetmeta: The filepath of the target data file with meta-data
2006+ :param targetmeta: The filepath of the target data file with metadata
20072007 :param usermeta: A user metadata dict, e.g. the meta table from a run-item
20082008 :param metaext: A list of file extensions of the source metadata files, e.g. as specified in bidsmap.plugins['plugin']['meta']
2009- :param sourcemeta: The filepath of the source data file with associated/equally named meta-data files (name may include wildcards). Leave empty to use datasource.path
2009+ :param sourcemeta: The filepath of the source data file with associated/equally named metadata files (name may include wildcards). Leave empty to use datasource.path
20102010 :return: The combined target + source + user metadata
20112011 """
20122012
@@ -2029,7 +2029,7 @@ def poolmetadata(datasource: DataSource, targetmeta: Path, usermeta: Meta, metae
20292029 with sourcefile .open ('r' ) as json_fid :
20302030 metadata = json .load (json_fid )
20312031 if not isinstance (metadata , dict ):
2032- LOGGER .error (f"Skipping unexpectedly formatted meta-data in: { sourcefile } " )
2032+ LOGGER .error (f"Skipping unexpectedly formatted metadata in: { sourcefile } " )
20332033 continue
20342034 for metakey , metaval in metadata .items ():
20352035 if metapool .get (metakey ) and metapool .get (metakey ) != metaval :
0 commit comments