3939import argparse
4040from typing import Union
4141from platform import system
42+ import importlib
43+
44+ try :
45+ import metadata
46+ except ImportError :
47+ from pypet2bids import metadata
4248
4349parent_dir = pathlib .Path (__file__ ).parent .resolve ()
4450project_dir = parent_dir .parent .parent
4551if "PET2BIDS" not in project_dir .parts :
4652 project_dir = parent_dir
4753
48- metadata_dir = os .path .join (project_dir , "metadata" )
49-
50- # check to see where the schema is at
51- pet_metadata_json = os .path .join (metadata_dir , "PET_metadata.json" )
52- permalink_pet_metadata_json = "https://github.com/openneuropet/PET2BIDS/blob/76d95cf65fa8a14f55a4405df3fdec705e2147cf/metadata/PET_metadata.json"
53- pet_reconstruction_metadata_json = os .path .join (
54- metadata_dir , "PET_reconstruction_methods.json"
55- )
5654
5755# load bids schema
58- bids_schema_path = os .path .join (metadata_dir , "schema.json" )
59- schema = json .load (open (bids_schema_path , "r" ))
60-
56+ schema = metadata .schema
57+ pet_metadata = metadata .PET_metadata
6158# putting these paths here as they are reused in dcm2niix4pet.py, update_json_pet_file.py, and ecat.py
6259module_folder = Path (__file__ ).parent .resolve ()
6360python_folder = module_folder .parent
6461pet2bids_folder = python_folder .parent
65- metadata_folder = os .path .join (pet2bids_folder , "metadata" )
6662
6763loggers = {}
6864
@@ -91,19 +87,6 @@ def logger(name):
9187 return logger
9288
9389
94- def load_pet_bids_requirements_json (
95- pet_bids_req_json : Union [str , pathlib .Path ] = pet_metadata_json
96- ) -> dict :
97- if type (pet_bids_req_json ) is str :
98- pet_bids_req_json = pathlib .Path (pet_bids_req_json )
99- if pet_bids_req_json .is_file ():
100- with open (pet_bids_req_json , "r" ) as infile :
101- reqs = json .load (infile )
102- return reqs
103- else :
104- raise FileNotFoundError (pet_bids_req_json )
105-
106-
10790def flatten_series (series ):
10891 """
10992 This function retrieves either a list or a single value from a pandas series object thus converting a complex
@@ -146,12 +129,13 @@ def collect_spreadsheets(folder_path: pathlib.Path):
146129
147130def single_spreadsheet_reader (
148131 path_to_spreadsheet : Union [str , pathlib .Path ],
149- pet2bids_metadata_json : Union [ str , pathlib . Path ] = pet_metadata_json ,
132+ pet2bids_metadata : dict = metadata . PET_metadata ,
150133 dicom_metadata = {},
151134 ** kwargs ,
152135) -> dict :
153136
154- metadata = {}
137+ spreadsheet_metadata = {}
138+ metadata_fields = pet2bids_metadata
155139
156140 if type (path_to_spreadsheet ) is str :
157141 path_to_spreadsheet = pathlib .Path (path_to_spreadsheet )
@@ -161,24 +145,6 @@ def single_spreadsheet_reader(
161145 else :
162146 raise FileNotFoundError (f"{ path_to_spreadsheet } does not exist." )
163147
164- if pet2bids_metadata_json :
165- if type (pet_metadata_json ) is str :
166- pet2bids_metadata_json = pathlib .Path (pet2bids_metadata_json )
167-
168- if pet2bids_metadata_json .is_file ():
169- with open (pet_metadata_json , "r" ) as infile :
170- metadata_fields = json .load (infile )
171- else :
172- raise FileNotFoundError (
173- f"Required metadata file not found at { pet_metadata_json } , check to see if this file exists;"
174- f"\n else pass path to file formatted to this { permalink_pet_metadata_json } via "
175- f"pet2bids_metadata_json argument in simplest_spreadsheet_reader call."
176- )
177- else :
178- raise FileNotFoundError (
179- f"pet2bids_metadata_json input required for function call, you provided { pet2bids_metadata_json } "
180- )
181-
182148 spreadsheet_dataframe = open_meta_data (path_to_spreadsheet )
183149
184150 log = logging .getLogger ("pypet2bids" )
@@ -188,7 +154,7 @@ def single_spreadsheet_reader(
188154 for field in metadata_fields [field_level ]:
189155 series = spreadsheet_dataframe .get (field , Series (dtype = numpy .float64 ))
190156 if not series .empty :
191- metadata [field ] = flatten_series (series )
157+ spreadsheet_metadata [field ] = flatten_series (series )
192158 elif (
193159 series .empty
194160 and field_level == "mandatory"
@@ -200,10 +166,10 @@ def single_spreadsheet_reader(
200166 )
201167
202168 # lastly apply any kwargs to the metadata
203- metadata .update (** kwargs )
169+ spreadsheet_metadata .update (** kwargs )
204170
205171 # more lastly, check to see if values are of the correct datatype (e.g. string, number, boolean)
206- for field , value in metadata .items ():
172+ for field , value in spreadsheet_metadata .items ():
207173 # check schema for field
208174 field_schema_properties = schema ["objects" ]["metadata" ].get (field , None )
209175 if field_schema_properties :
@@ -216,7 +182,7 @@ def single_spreadsheet_reader(
216182 try :
217183 check_bool = int (value ) / 1
218184 if check_bool == 0 or check_bool == 1 :
219- metadata [field ] = bool (value )
185+ spreadsheet_metadata [field ] = bool (value )
220186 else :
221187 log .warning (
222188 f"{ field } is not boolean, it's value is { value } "
@@ -228,7 +194,7 @@ def single_spreadsheet_reader(
228194 log .warning (f"{ field } is not string, it's value is { value } " )
229195 else :
230196 pass
231- return metadata
197+ return spreadsheet_metadata
232198
233199
234200def compress (file_like_object , output_path : str = None ):
@@ -316,27 +282,37 @@ def load_vars_from_config(
316282
317283def get_version ():
318284 """
319- Gets the version of this software from the toml file
320- :return: version number from pyproject.toml
285+ Gets the version of this software
286+ :return: version number
321287 """
322288 # this scripts directory path
323289 scripts_dir = pathlib .Path (os .path .dirname (__file__ ))
324290
325- try :
326- # if this is bundled as a package look next to this file for the pyproject.toml
327- toml_path = os .path .join (scripts_dir , "pyproject.toml" )
328- with open (toml_path , "r" ) as infile :
329- tomlfile = toml .load (infile )
330- except FileNotFoundError :
331- # when in development the toml file with the version is 2 directories above (e.g. where it should actually live)
332- toml_dir = scripts_dir .parent
333- toml_path = os .path .join (toml_dir , "pyproject.toml" )
334- with open (toml_path , "r" ) as infile :
335- tomlfile = toml .load (infile )
336-
337- attrs = tomlfile .get ("tool" , {})
338- poetry = attrs .get ("poetry" , {})
339- version = poetry .get ("version" , "" )
291+ # first try using importlib.metadata.version to determine version
292+
293+ version = importlib .metadata .version ("pypet2bids" )
294+
295+ if not version :
296+ tomlfile = {}
297+
298+ try :
299+ # if this is bundled as a package look next to this file for the pyproject.toml
300+ toml_path = os .path .join (scripts_dir , "pyproject.toml" )
301+ with open (toml_path , "r" ) as infile :
302+ tomlfile = toml .load (infile )
303+ except FileNotFoundError :
304+ # when in development the toml file with the version is 2 directories above (e.g. where it should actually live)
305+ try :
306+ toml_dir = scripts_dir .parent
307+ toml_path = os .path .join (toml_dir , "pyproject.toml" )
308+ with open (toml_path , "r" ) as infile :
309+ tomlfile = toml .load (infile )
310+ except FileNotFoundError :
311+ pass
312+
313+ attrs = tomlfile .get ("tool" , {})
314+ poetry = attrs .get ("poetry" , {})
315+ version = poetry .get ("version" , "" )
340316
341317 return version
342318
@@ -820,9 +796,7 @@ def get_recon_method(ReconstructionMethodString: str) -> dict:
820796 dimension = re .search (search_criteria , ReconMethodName )[0 ]
821797
822798 # doing some more manipulation of the recon method name to expand it from not so helpful acronyms
823- possible_names = load_pet_bids_requirements_json (pet_reconstruction_metadata_json )[
824- "reconstruction_names"
825- ]
799+ possible_names = metadata .PET_reconstruction_methods .get ("reconstruction_names" , [])
826800
827801 # we want to sort the possible names by longest first that we don't break up an acronym prematurely
828802 sorted_df = pandas .DataFrame (possible_names ).sort_values (
0 commit comments