Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions +io/+space/getSize.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
function [datasetSize, datasetMaxSize] = getSize(spaceId)
% getSize - Retrieves the current and maximum sizes of a dataset.
%
% Syntax:
% [datasetSize, datasetMaxSize] = io.space.getSize(spaceId)
%
% Input Arguments:
% spaceId {H5ML.id} - Identifier for the dataspace from which
% the dimensions are retrieved.
%
% Output Arguments:
% datasetSize - Current size of the dataset dimensions.
% datasetMaxSize - Maximum size of the dataset dimensions.
%
% Note:
% - Flips dimensions as the h5 function returns dimensions in C-style order
% whereas MATLAB represents data in F-style order
% - Replaces H5 constants with Inf for unlimited dimensions

arguments
spaceId {matnwb.common.compatibility.mustBeA(spaceId, "H5ML.id")}
end

[~, h5Dims, h5MaxDims] = H5S.get_simple_extent_dims(spaceId);
datasetSize = fliplr(h5Dims);
datasetMaxSize = fliplr(h5MaxDims);

h5Unlimited = H5ML.get_constant_value('H5S_UNLIMITED');
datasetMaxSize(datasetMaxSize == h5Unlimited) = Inf;
end
9 changes: 6 additions & 3 deletions +tests/+fixtures/ExtensionGenerationFixture.m
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,13 @@ function clearGenerated(fixture)
namespaceName = extractBefore(namespaceFilename, '.');

generatedTypesDirectory = fullfile(fixture.TypesOutputFolder, "+types", "+"+namespaceName);
rmdir(generatedTypesDirectory, 's');

if isfolder(generatedTypesDirectory)
rmdir(generatedTypesDirectory, 's');
end
cacheFile = fullfile(fixture.TypesOutputFolder, "namespaces", namespaceName+".mat");
delete(cacheFile)
if isfile(cacheFile)
delete(cacheFile)
end
end
end
end
48 changes: 40 additions & 8 deletions +tests/+unit/dataStubTest.m
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
classdef dataStubTest < tests.abstract.NwbTestCase

Check failure on line 1 in +tests/+unit/dataStubTest.m

View workflow job for this annotation

GitHub Actions / JUnit Test Report

dataStubTest.testNestedDataIndexing

Error occurred in tests.unit.dataStubTest/testNestedDataIndexing and it did not run to completion. ---------
Raw output
Error occurred in tests.unit.dataStubTest/testNestedDataIndexing and it did not run to completion.
    ---------
    Error ID:
    ---------
    'NWB:CheckDType:InvalidValue'
    --------------
    Error Details:
    --------------
    Error using types.util.checkDtype>checkDtypeForCompoundDataset/validateCompoundFields (line 195)
    Compound type must only contain fields (a, b, c, objref, regref)
    
    Error in types.util.checkDtype>checkDtypeForCompoundDataset (line 135)
        validateCompoundFields(expectedFields, value)
    
    Error in types.util.checkDtype (line 26)
            value = checkDtypeForCompoundDataset(name, typeDescriptor, value);
    
    Error in types.cs/CompoundRefData/validate_data (line 59)
    
    Error in types.cs/CompoundRefData/set.data (line 45)
    
    Error in types.cs/CompoundRefData (line 37)
    
    Error in tests.unit.dataStubTest/testNestedDataIndexing (line 196)
                compoundRef = types.cs.CompoundRefData('data', table(...

methods (TestClassSetup)
function generateTestSchemas(testCase)
% Generate the rrs and cs test extensions for use in all tests
% of this test suite, using fixture for proper cleanup
testCase.applyTestSchemaFixture('rrs');
testCase.applyTestSchemaFixture('cs');
end
end

methods (TestMethodSetup)
function setupMethod(testCase)
% Use a fixture to create a temporary working directory
Expand Down Expand Up @@ -70,9 +79,6 @@

function testObjectCopy(testCase)

testCase.applyTestSchemaFixture('rrs');
testCase.applyTestSchemaFixture('cs');

nwb = NwbFile(...
'identifier', 'DATASTUB',...
'session_description', 'test datastub object copy',...
Expand All @@ -91,10 +97,12 @@
nwb.acquisition.set('rc', rc);
nwb.analysis.set('rcRef', rcRef);
nwbExport(nwb, 'original.nwb');

nwbOriginalIn = nwbRead('original.nwb', 'ignorecache');
tests.util.verifyContainerEqual(testCase, nwbOriginalIn, nwb);

nwbExport(nwbOriginalIn, 'copy.nwb');

nwbCopyIn = nwbRead('copy.nwb', 'ignorecache');
tests.util.verifyContainerEqual(testCase, nwbCopyIn, nwb);
end
Expand Down Expand Up @@ -138,11 +146,6 @@
end

function testResolveCompoundDataType(testCase)

% Generate the compound test schema using fixture
testCase.applyTestSchemaFixture('rrs');
testCase.applyTestSchemaFixture('cs');

% Set up file with compound dataset
nwb = tests.factory.NWBFile();

Expand Down Expand Up @@ -178,5 +181,34 @@
compoundRefInDirectRead.dims, ...
compoundRefIn.data.dims )
end

function testNestedDataIndexing(testCase)
% Set up file with compound dataset

nwb = tests.factory.NWBFile();

ts = tests.factory.TimeSeriesWithTimestamps();
nwb.acquisition.set('timeseries', ts);

tsPath = '/acquisition/timeseries';
tsDataPath = [tsPath '/data'];

compoundRef = types.cs.CompoundRefData('data', table(...
rand(2, 1),...
rand(2, 1),...
[types.untyped.ObjectView(tsPath); types.untyped.ObjectView(tsPath)],...
[types.untyped.RegionView(tsDataPath, 1:2); types.untyped.RegionView(tsDataPath, 2:3)],...
'VariableNames', {'a', 'b', 'objref', 'regref'}));

nwb.analysis.set('compoundRef', compoundRef);
nwbExport(nwb, 'test.nwb');

% Read in data
nwbIn = nwbRead('test.nwb', 'ignorecache');
compoundRefIn = nwbIn.analysis.get('compoundRef');

testCase.verifyClass(compoundRefIn.data(1:2).a, 'double');
testCase.verifyLength(compoundRefIn.data(1:2).a, 2);
end
end
end
13 changes: 4 additions & 9 deletions +types/+untyped/+datapipe/BoundPipe.m
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
properties (SetAccess = private)
config = types.untyped.datapipe.Configuration.empty;
pipeProperties = {};
stub = types.untyped.DataStub.empty;
stub types.untyped.DataStub = types.untyped.DataStub.empty;
end

properties (SetAccess = private, Dependent)
Expand All @@ -24,14 +24,8 @@

obj.stub = types.untyped.DataStub(filename, path);

sid = obj.stub.get_space();
[~, h5_dims, h5_maxdims] = H5S.get_simple_extent_dims(sid);
H5S.close(sid);

current_size = fliplr(h5_dims);
max_size = fliplr(h5_maxdims);
h5_unlimited = H5ML.get_constant_value('H5S_UNLIMITED');
max_size(max_size == h5_unlimited) = Inf;
current_size = obj.stub.dims;
max_size = obj.stub.maxDims;

did = obj.getDataset();

Expand Down Expand Up @@ -224,6 +218,7 @@ function append(obj, data)
H5F.close(fid);

obj.config.offset = obj.config.offset + data_size(obj.config.axis);
obj.stub.updateSize()
end

function property = getPipeProperty(obj, type)
Expand Down
72 changes: 53 additions & 19 deletions +types/+untyped/@DataStub/DataStub.m
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,15 @@
ndims;
dataType;
end

properties (Dependent, SetAccess = private, GetAccess = ?types.untyped.datapipe.BoundPipe)
maxDims
end

properties (Access = private)
dims_ double
dataType_ {mustBeA(dataType_, ["char", "string", "struct"])} = string.empty % Can be char (simple type) or struct (compound type descriptor)
dataType_ {matnwb.common.compatibility.mustBeA(dataType_, ["char", "string", "struct"])} = string.empty % Can be char (simple type) or struct (compound type descriptor)
maxDims_ double
end

methods
Expand All @@ -37,25 +43,21 @@
obj.dataType_ = dataType; % Keep as struct for compound types
end
end

function sid = get_space(obj) % Todo: private method
fid = H5F.open(obj.filename);
did = H5D.open(fid, obj.path);
sid = H5D.get_space(did);
H5D.close(did);
H5F.close(fid);
end


function dims = get.dims(obj)
if isempty(obj.dims_)
sid = obj.get_space();
[~, h5_dims, ~] = H5S.get_simple_extent_dims(sid);
obj.dims_ = fliplr(h5_dims);
H5S.close(sid);
obj.updateSize()
end
dims = obj.dims_;
end


function maxDims = get.maxDims(obj)
if isempty(obj.maxDims_)
obj.updateSize()
end
maxDims = obj.maxDims_;
end

function nd = get.ndims(obj)
nd = length(obj.dims);
end
Expand Down Expand Up @@ -185,10 +187,10 @@

refs = export(obj, fid, fullpath, refs);

function B = subsref(obj, S)
function varargout = subsref(obj, S)
CurrentSubRef = S(1);
if ~isscalar(obj) || strcmp(CurrentSubRef.type, '.')
B = builtin('subsref', obj, S);
[varargout{1:nargout}] = builtin('subsref', obj, S);
return;
end

Expand All @@ -200,9 +202,9 @@
selectionRank, rank);
data = obj.load_mat_style(CurrentSubRef.subs{:});
if isscalar(S)
B = data;
varargout = {data};
else
B = subsref(data, S(2:end));
[varargout{1:nargout}] = subsref(data, S(2:end));
end
end

Expand All @@ -225,4 +227,36 @@
tf = isstruct(dt);
end
end

methods % Custom indexing
function n = numArgumentsFromSubscript(obj, subs, indexingContext)
if ~isscalar(subs) && strcmp(subs(1).type, '()')
% Typical indexing pattern into compound data type, i.e
% data(1:3).fieldName. Assume/expect one output.
n = 1;
else
n = builtin('numArgumentsFromSubscript', obj, subs, indexingContext);
end
end
end

methods (Access = {?types.untyped.DataStub, ?types.untyped.datapipe.BoundPipe})
function updateSize(obj)
% updateSize - Should be called to initialize values or when dataset
% space is expanded
sid = get_space(obj);
[obj.dims_, obj.maxDims_] = io.space.getSize(sid);
H5S.close(sid);
end
end

methods (Access = private)
function sid = get_space(obj)
fid = H5F.open(obj.filename);
did = H5D.open(fid, obj.path);
sid = H5D.get_space(did);
H5D.close(did);
H5F.close(fid);
end
end
end
Loading