3838import logging
3939from datetime import datetime
4040from pathlib import Path
41+ from typing import TYPE_CHECKING
4142
4243import h5py
4344import pandas as pd
4647from turn_by_turn .structures import TbtData , TransverseData
4748from turn_by_turn .utils import all_elements_equal
4849
50+ if TYPE_CHECKING :
51+ from turn_by_turn .constants import MetaDict
52+
4953LOGGER = logging .getLogger (__name__ )
5054
5155DEFAULT_BUNCH_ID : int = 0 # bunch ID not saved in the DOROS file
@@ -132,6 +136,10 @@ def read_tbt(
132136 file_path = Path (file_path )
133137 LOGGER .debug (f"Reading DOROS { data_type } data at path: '{ file_path .absolute ()} '" )
134138 data_keys = DataKeys .get_data_keys (data_type )
139+ meta : MetaDict = {
140+ "file" : file_path ,
141+ "source_datatype" : data_type ,
142+ }
135143
136144 with h5py .File (file_path , "r" ) as hdf_file :
137145 # use "/" to keep track of bpm order, see https://github.com/h5py/h5py/issues/1471
@@ -141,7 +149,7 @@ def read_tbt(
141149 _check_data_lengths (hdf_file , data_keys , bpm_names )
142150
143151 time_stamps = [hdf_file [bpm ][ACQ_STAMP ][0 ] for bpm in bpm_names ]
144- date = datetime .fromtimestamp (min (time_stamps ) / 1e6 , tz = tz .tzutc ())
152+ meta [ " date" ] = datetime .fromtimestamp (min (time_stamps ) / 1e6 , tz = tz .tzutc ())
145153
146154 nturns = hdf_file [bpm_names [0 ]][data_keys .n_samples ][0 ] # equal lengths checked before
147155 matrices = [
@@ -150,7 +158,7 @@ def read_tbt(
150158 Y = _create_dataframe (hdf_file , data_keys , bpm_names , plane = "Y" ),
151159 )
152160 ]
153- return TbtData (matrices , date , [bunch_id ], nturns )
161+ return TbtData (matrices , bunch_ids = [bunch_id ], nturns = nturns , meta = meta )
154162
155163
156164def write_tbt (
@@ -172,13 +180,15 @@ def write_tbt(
172180 data_keys = DataKeys .get_data_keys (data_type )
173181 other_keys = DataKeys .get_other_data_keys (data_type )
174182
183+ timestamp = tbt_data .meta .get ("date" , datetime .now (tz = tz .tzutc ())).timestamp () * 1e6
184+
175185 data = tbt_data .matrices [0 ]
176186 with h5py .File (file_path , "w" , track_order = True ) as hdf_file :
177187 hdf_file .create_group (METADATA )
178188 for bpm in data .X .index :
179189 hdf_file .create_group (bpm )
180- hdf_file [bpm ].create_dataset (ACQ_STAMP , data = [tbt_data . date . timestamp () * 1e6 ])
181- hdf_file [bpm ].create_dataset (BST_TIMESTAMP , data = [tbt_data . date . timestamp () * 1e6 ])
190+ hdf_file [bpm ].create_dataset (ACQ_STAMP , data = [timestamp ])
191+ hdf_file [bpm ].create_dataset (BST_TIMESTAMP , data = [timestamp ])
182192
183193 hdf_file [bpm ].create_dataset (data_keys .n_samples , data = [tbt_data .nturns ])
184194 hdf_file [bpm ].create_dataset (data_keys .data ["X" ], data = data .X .loc [bpm , :].values )
0 commit comments