22
33import datetime
44from pathlib import Path
5- from typing import List , Dict , Any
5+ from typing import List , Dict
66
77import h5py
88import numpy as np
2626 "BF16" : np .float16 ,
2727}
2828
29+ MAP_TO_ASAM_HO = {
30+ "U8" : "A_UINT8" ,
31+ "I8" : "A_INT8" ,
32+ "U16" : "A_UINT16" ,
33+ "I16" : "A_INT16" ,
34+ "U32" : "A_UINT32" ,
35+ "I32" : "A_INT32" ,
36+ "U64" : "A_UINT64" ,
37+ "I64" : "A_INT64" ,
38+ "F32" : "A_FLOAT32" ,
39+ "F64" : "A_FLOAT64" ,
40+ "F16" : "A_FLOAT16" ,
41+ "BF16" : "A_FLOAT16" ,
42+ }
43+
2944
3045class BufferedDataset :
3146 def __init__ (self , dataset : h5py .Dataset ):
@@ -40,7 +55,7 @@ def add_sample(self, sample: int | float):
4055 def flush (self ):
4156 batch = np .array (self .buffer )
4257 self .dataset .resize ((self .dataset .shape [0 ] + len (batch ),))
43- self .dataset [- len (batch ):] = batch
58+ self .dataset [- len (batch ) :] = batch
4459 self .buffer .clear ()
4560 self .dataset .flush ()
4661
@@ -50,10 +65,10 @@ def __len__(self):
5065
5166class DatasetGroup :
5267 def __init__ (
53- self ,
54- ts0_ds : BufferedDataset ,
55- ts1_ds : BufferedDataset ,
56- datasets : List [BufferedDataset ],
68+ self ,
69+ ts0_ds : BufferedDataset ,
70+ ts1_ds : BufferedDataset ,
71+ datasets : List [BufferedDataset ],
5772 ):
5873 self .ts0_ds = ts0_ds
5974 self .ts1_ds = ts1_ds
@@ -73,13 +88,16 @@ def finalize(self):
7388
7489
7590def create_timestamp_column (hdf_file : h5py .File , group_name : str , num : int ) -> h5py .Dataset :
76- return hdf_file .create_dataset (
91+ result = hdf_file .create_dataset (
7792 f"/{ group_name } /timestamp{ num } " ,
7893 shape = (0 ,),
7994 maxshape = (None ,),
8095 dtype = np .uint64 ,
8196 chunks = True ,
8297 )
98+ result .attrs ["asam_data_type" ] = "A_UINT64"
99+ result .attrs ["resolution" ] = ("1 nanosecond" ,)
100+ return result
83101
84102
85103class Hdf5OnlinePolicy (DaqOnlinePolicy ):
@@ -106,20 +124,29 @@ def initialize(self):
106124 for num , daq_list in enumerate (self .daq_lists ):
107125 if daq_list .stim :
108126 continue
109- self .hdf .create_group (daq_list .name )
127+ grp = self .hdf .create_group (daq_list .name )
128+ grp .attrs ["event_num" ] = daq_list .event_num
129+ grp .attrs ["enable_timestamps" ] = daq_list .enable_timestamps
130+ grp .attrs ["prescaler" ] = daq_list .prescaler
131+ grp .attrs ["priority" ] = daq_list .priority
132+ grp .attrs ["direction" ] = "STIM" if daq_list .stim else "DAQ"
110133 ts0 = BufferedDataset (create_timestamp_column (self .hdf , daq_list .name , 0 ))
111134 ts1 = BufferedDataset (create_timestamp_column (self .hdf , daq_list .name , 1 ))
112135 meas_map = {m .name : m for m in self .daq_lists [num ].measurements }
113136 dsets = []
114137 for name , _ in daq_list .headers :
115138 meas = meas_map [name ]
116139 dataset = self .hdf .create_dataset (
117- f"/{ daq_list .name } /{ meas .name } " ,
140+ f"/{ daq_list .name } /{ meas .name } /raw " ,
118141 shape = (0 ,),
119142 maxshape = (None ,),
120143 dtype = MAP_TO_NP [meas .data_type ],
121144 chunks = (1024 ,),
122145 )
146+ sub_group = dataset .parent
147+ sub_group .attrs ["asam_data_type" ] = MAP_TO_ASAM_HO .get (meas .data_type , "n/a" )
148+ dataset .attrs ["ecu_address" ] = meas .address
149+ dataset .attrs ["ecu_address_extension" ] = meas .ext
123150 dsets .append (BufferedDataset (dataset ))
124151 self .datasets [num ] = DatasetGroup (ts0_ds = ts0 , ts1_ds = ts1 , datasets = dsets )
125152 self .hdf .flush ()
0 commit comments