|
21 | 21 |
|
22 | 22 | properties (Access = private, Constant) |
23 | 23 | SUPPORTED_DATATYPES = {... |
24 | | - 'float', 'double', 'uint8', 'int8', 'uint16', 'int16', 'uint32',... |
25 | | - 'int32', 'uint64', 'int64' |
| 24 | + 'float', 'double', 'uint8', 'int8', 'uint16', 'int16',... |
| 25 | + 'uint32', 'int32', 'uint64', 'int64' |
26 | 26 | }; |
27 | 27 | end |
28 | 28 |
|
|
33 | 33 | p = inputParser; |
34 | 34 | p.addParameter('filename', ''); |
35 | 35 | p.addParameter('path', ''); |
36 | | - p.addParameter('offset', 1); |
| 36 | + p.addParameter('offset', 0); |
37 | 37 | p.addParameter('axis', 1); |
38 | 38 | p.addParameter('chunkSize', []); |
39 | 39 | p.addParameter('dataType', 'uint8'); |
40 | 40 | p.addParameter('compressionLevel', -1); |
| 41 | + p.addParameter('data', []); |
41 | 42 | p.parse(varargin{:}); |
42 | 43 |
|
43 | 44 | obj.filename = p.Results.filename; |
|
47 | 48 | obj.chunkSize = p.Results.chunkSize; |
48 | 49 | obj.dataType = p.Results.dataType; |
49 | 50 | obj.compressionLevel = p.Results.compressionLevel; |
| 51 | + obj.data = cast(p.Results.data, obj.dataType); |
50 | 52 | end |
51 | 53 | end |
52 | 54 |
|
|
83 | 85 | end |
84 | 86 |
|
85 | 87 | function set.offset(obj, val) |
86 | | - assert(isscalar(val) && isnumeric(val) && val > 0,... |
| 88 | + assert(isscalar(val) && isnumeric(val) && val >= 0,... |
87 | 89 | 'NWB:Untyped:DataPipe:SetOffset:InvalidType',... |
88 | 90 | 'Offset should be a nonzero scalar indicating axis offset.'); |
89 | 91 | val = ceil(val); |
@@ -204,22 +206,32 @@ function append(obj, data) |
204 | 206 |
|
205 | 207 | fid = H5F.open(obj.filename, 'H5F_ACC_RDWR', default_pid); |
206 | 208 | did = H5D.open(fid, obj.path, default_pid); |
| 209 | + sid = H5D.get_space(did); |
| 210 | + [~, h5_dims, ~] = H5S.get_simple_extent_dims(sid); |
| 211 | + H5S.close(sid); |
207 | 212 |
|
208 | 213 | rank = length(obj.maxSize); |
209 | | - stride_coords = ones(1, rank); |
210 | | - stride_coords(1:length(size(data))) = size(data); |
211 | | - new_extents = obj.maxSize; |
212 | | - new_extents(obj.axis) = obj.offset + stride_coords(obj.axis) - 1; |
| 214 | + stride_coords = size(data); |
| 215 | + if length(stride_coords) > rank && ~all(stride_coords(rank+1:end) == 1) |
| 216 | + warning('Nwb:Types:Untyped:DataPipe:InvalidRank',... |
| 217 | + ['Expected rank %d not expected for data of size %s. '... |
| 218 | + 'Data may be lost on write.'],... |
| 219 | + rank, mat2str(size(stride_coords))); |
| 220 | + end |
| 221 | + stride_coords = stride_coords(1:rank); |
| 222 | + new_extents = fliplr(h5_dims); |
| 223 | + new_extents(obj.axis) = obj.offset; |
| 224 | + new_extents = new_extents + stride_coords; |
213 | 225 | h5_extents = fliplr(new_extents); |
214 | | - H5D.set_extent(did, h5_extents); |
215 | | - |
| 226 | + H5D.set_extent(did, h5_extents); |
| 227 | + |
216 | 228 | sid = H5D.get_space(did); |
217 | 229 | H5S.select_none(sid); |
218 | 230 |
|
219 | | - offset_coords = ones(1, rank); |
| 231 | + offset_coords = zeros(1, rank); |
220 | 232 | offset_coords(obj.axis) = obj.offset; |
221 | 233 |
|
222 | | - h5_start = fliplr(offset_coords) - 1; |
| 234 | + h5_start = fliplr(offset_coords); |
223 | 235 | h5_stride = []; |
224 | 236 | h5_count = fliplr(stride_coords); |
225 | 237 | h5_block = []; |
|
0 commit comments