Skip to content

Commit 839ab3e

Browse files
committed
hdf5_policy.py: Update meta-data
1 parent 724cad9 commit 839ab3e

File tree

4 files changed

+39
-12
lines changed

4 files changed

+39
-12
lines changed

.bumpversion.cfg

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[bumpversion]
2-
current_version = 0.25.4
2+
current_version = 0.25.5
33
commit = True
44
tag = False
55

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ dynamic = ["license", "readme", "authors", "requires-python", "description", "cl
3838
[tool.poetry]
3939
authors = ["Christoph Schueler <[email protected]>"]
4040
name = "pyxcp"
41-
version = "0.25.4"
41+
version = "0.25.5"
4242
readme = "docs/README.rst"
4343
description = "Universal Calibration Protocol for Python"
4444
keywords = ["automotive", "ecu", "xcp", "asam", "autosar"]

pyxcp/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,4 +17,4 @@
1717

1818
# if you update this manually, do not forget to update
1919
# .bumpversion.cfg and pyproject.toml.
20-
__version__ = "0.25.4"
20+
__version__ = "0.25.5"

pyxcp/transport/hdf5_policy.py

Lines changed: 36 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import datetime
44
from pathlib import Path
5-
from typing import List, Dict, Any
5+
from typing import List, Dict
66

77
import h5py
88
import numpy as np
@@ -26,6 +26,21 @@
2626
"BF16": np.float16,
2727
}
2828

29+
MAP_TO_ASAM_HO = {
30+
"U8": "A_UINT8",
31+
"I8": "A_INT8",
32+
"U16": "A_UINT16",
33+
"I16": "A_INT16",
34+
"U32": "A_UINT32",
35+
"I32": "A_INT32",
36+
"U64": "A_UINT64",
37+
"I64": "A_INT64",
38+
"F32": "A_FLOAT32",
39+
"F64": "A_FLOAT64",
40+
"F16": "A_FLOAT16",
41+
"BF16": "A_FLOAT16",
42+
}
43+
2944

3045
class BufferedDataset:
3146
def __init__(self, dataset: h5py.Dataset):
@@ -40,7 +55,7 @@ def add_sample(self, sample: int | float):
4055
def flush(self):
4156
batch = np.array(self.buffer)
4257
self.dataset.resize((self.dataset.shape[0] + len(batch),))
43-
self.dataset[-len(batch):] = batch
58+
self.dataset[-len(batch) :] = batch
4459
self.buffer.clear()
4560
self.dataset.flush()
4661

@@ -50,10 +65,10 @@ def __len__(self):
5065

5166
class DatasetGroup:
5267
def __init__(
53-
self,
54-
ts0_ds: BufferedDataset,
55-
ts1_ds: BufferedDataset,
56-
datasets: List[BufferedDataset],
68+
self,
69+
ts0_ds: BufferedDataset,
70+
ts1_ds: BufferedDataset,
71+
datasets: List[BufferedDataset],
5772
):
5873
self.ts0_ds = ts0_ds
5974
self.ts1_ds = ts1_ds
@@ -73,13 +88,16 @@ def finalize(self):
7388

7489

7590
def create_timestamp_column(hdf_file: h5py.File, group_name: str, num: int) -> h5py.Dataset:
76-
return hdf_file.create_dataset(
91+
result = hdf_file.create_dataset(
7792
f"/{group_name}/timestamp{num}",
7893
shape=(0,),
7994
maxshape=(None,),
8095
dtype=np.uint64,
8196
chunks=True,
8297
)
98+
result.attrs["asam_data_type"] = "A_UINT64"
99+
result.attrs["resolution"] = ("1 nanosecond",)
100+
return result
83101

84102

85103
class Hdf5OnlinePolicy(DaqOnlinePolicy):
@@ -106,20 +124,29 @@ def initialize(self):
106124
for num, daq_list in enumerate(self.daq_lists):
107125
if daq_list.stim:
108126
continue
109-
self.hdf.create_group(daq_list.name)
127+
grp = self.hdf.create_group(daq_list.name)
128+
grp.attrs["event_num"] = daq_list.event_num
129+
grp.attrs["enable_timestamps"] = daq_list.enable_timestamps
130+
grp.attrs["prescaler"] = daq_list.prescaler
131+
grp.attrs["priority"] = daq_list.priority
132+
grp.attrs["direction"] = "STIM" if daq_list.stim else "DAQ"
110133
ts0 = BufferedDataset(create_timestamp_column(self.hdf, daq_list.name, 0))
111134
ts1 = BufferedDataset(create_timestamp_column(self.hdf, daq_list.name, 1))
112135
meas_map = {m.name: m for m in self.daq_lists[num].measurements}
113136
dsets = []
114137
for name, _ in daq_list.headers:
115138
meas = meas_map[name]
116139
dataset = self.hdf.create_dataset(
117-
f"/{daq_list.name}/{meas.name}",
140+
f"/{daq_list.name}/{meas.name}/raw",
118141
shape=(0,),
119142
maxshape=(None,),
120143
dtype=MAP_TO_NP[meas.data_type],
121144
chunks=(1024,),
122145
)
146+
sub_group = dataset.parent
147+
sub_group.attrs["asam_data_type"] = MAP_TO_ASAM_HO.get(meas.data_type, "n/a")
148+
dataset.attrs["ecu_address"] = meas.address
149+
dataset.attrs["ecu_address_extension"] = meas.ext
123150
dsets.append(BufferedDataset(dataset))
124151
self.datasets[num] = DatasetGroup(ts0_ds=ts0, ts1_ds=ts1, datasets=dsets)
125152
self.hdf.flush()

0 commit comments

Comments
 (0)