Skip to content

Commit 2cbb725

Browse files
authored
Add HDMF to PyNWB (#850)
* Replace FORM (pynwb.form) with HMDF (hdmf) * add HDMF to requirements.txt * get requirements from requirements.txt in setup.py
1 parent 85eccb6 commit 2cbb725

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

88 files changed

+123
-11502
lines changed

docs/code/creating-and-writing-nwbfile.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ def main():
136136
# create-compressed-timeseries: start
137137
from pynwb.ecephys import ElectricalSeries
138138
from pynwb.behavior import SpatialSeries
139-
from pynwb.form.backends.hdf5 import H5DataIO
139+
from hdmf.backends.hdf5 import H5DataIO
140140

141141
ephys_ts = ElectricalSeries('test_compressed_ephys_data',
142142
'an hypothetical source',

docs/gallery/general/advanced_hdf5_io.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,11 @@
88
'''
99

1010
####################
11-
# Wrapping data arrays with :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO`
11+
# Wrapping data arrays with :py:meth:`~hdmf.backends.hdf5.h5_utils.H5DataIO`
1212
# ---------------------------------------------------------------------------------
1313
#
1414
# In order to customize the I/O of datasets using the HDF I/O backend we simply need to wrap our datasets
15-
# using :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO`. Using H5DataIO allows us to keep the Container
15+
# using :py:meth:`~hdmf.backends.hdf5.h5_utils.H5DataIO`. Using H5DataIO allows us to keep the Container
1616
# classes independent of the I/O backend while still allowing us to customize HDF5-specific I/O features.
1717
#
1818
# Before we get started, lets create an NWBFile for testing so that we can add our data to it.
@@ -49,7 +49,7 @@
4949
# Now let's say we want to compress the recorded data values. We now simply need to wrap our data with H5DataIO.
5050
# Everything else remains the same
5151

52-
from pynwb.form.backends.hdf5.h5_utils import H5DataIO
52+
from hdmf.backends.hdf5.h5_utils import H5DataIO
5353
wrapped_data = H5DataIO(data=data, compression=True) # <----
5454
test_ts = TimeSeries(name='test_compressed_timeseries',
5555
data=wrapped_data, # <----
@@ -59,7 +59,7 @@
5959

6060
####################
6161
# This simple approach gives us access to a broad range of advanced I/O features, such as, chunking and
62-
# compression. For a complete list of all available settings see :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO`
62+
# compression. For a complete list of all available settings see :py:meth:`~hdmf.backends.hdf5.h5_utils.H5DataIO`
6363

6464
####################
6565
# Chunking
@@ -81,7 +81,7 @@
8181

8282

8383
####################
84-
# To use chunking we again, simply need to wrap our dataset via :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO`.
84+
# To use chunking we again, simply need to wrap our dataset via :py:meth:`~hdmf.backends.hdf5.h5_utils.H5DataIO`.
8585
# Using chunking then also allows to also create resizable arrays simply by defining the ``maxshape`` of the array.
8686

8787
data = np.arange(10000).reshape((1000, 10))
@@ -122,7 +122,7 @@
122122
# read/write operations. I/O filters operate on a per-chunk basis in HDF5 and as such require the use of chunking.
123123
# Chunking will be automatically enabled by h5py when compression and other I/O filters are enabled.
124124
#
125-
# To use compression, we can wrap our dataset using :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO` and
125+
# To use compression, we can wrap our dataset using :py:meth:`~hdmf.backends.hdf5.h5_utils.H5DataIO` and
126126
# define the approbriate opions:
127127

128128
wrapped_data = H5DataIO(data=data,
@@ -139,7 +139,7 @@
139139
####################
140140
# .. hint::
141141
#
142-
# In addition to ``compression``, :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO` also allows us to
142+
# In addition to ``compression``, :py:meth:`~hdmf.backends.hdf5.h5_utils.H5DataIO` also allows us to
143143
# enable the ``shuffle`` and ``fletcher32`` HDF5 I/O filters.
144144

145145
####################
@@ -200,10 +200,10 @@
200200

201201

202202
####################
203-
# Wrapping ``h5py.Datasets`` with :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO`
203+
# Wrapping ``h5py.Datasets`` with :py:meth:`~hdmf.backends.hdf5.h5_utils.H5DataIO`
204204
# ------------------------------------------------------------------------------------------------
205205
#
206-
# Just for completeness, :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO` also allows us to customize
206+
# Just for completeness, :py:meth:`~hdmf.backends.hdf5.h5_utils.H5DataIO` also allows us to customize
207207
# how ``h5py.Dataset`` objects should be handled on write by the PyNWBs HDF5 backend via the ``link_data``
208208
# parameter. If ``link_data`` is set to ``True`` then a ``SoftLink`` or ``ExternalLink`` will be created to
209209
# point to the HDF5 dataset On the other hand, if ``link_data`` is set to ``False`` then the dataset

docs/gallery/general/extensions.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
#
2929
# The following block of code demonstrates how to create a new namespace, and then add a new `neurodata_type`
3030
# to this namespace. Finally,
31-
# it calls :py:meth:`~pynwb.form.spec.write.NamespaceBuilder.export` to save the extensions to disk for downstream use.
31+
# it calls :py:meth:`~hdmf.spec.write.NamespaceBuilder.export` to save the extensions to disk for downstream use.
3232

3333
from pynwb.spec import NWBNamespaceBuilder, NWBGroupSpec, NWBAttributeSpec
3434

@@ -96,7 +96,7 @@
9696

9797
from pynwb import register_class, load_namespaces
9898
from pynwb.ecephys import ElectricalSeries
99-
from pynwb.form.utils import docval, call_docval_func, getargs, get_docval
99+
from hdmf.utils import docval, call_docval_func, getargs, get_docval
100100

101101
ns_path = "mylab.namespace.yaml"
102102
load_namespaces(ns_path)
@@ -117,9 +117,9 @@ def __init__(self, **kwargs):
117117
####################
118118
# .. note::
119119
#
120-
# See the API docs for more information about :py:func:`~pynwb.form.utils.docval`
121-
# :py:func:`~pynwb.form.utils.call_docval_func`, :py:func:`~pynwb.form.utils.getargs`
122-
# and :py:func:`~pynwb.form.utils.get_docval`
120+
# See the API docs for more information about :py:func:`~hdmf.utils.docval`
121+
# :py:func:`~hdmf.utils.call_docval_func`, :py:func:`~hdmf.utils.getargs`
122+
# and :py:func:`~hdmf.utils.get_docval`
123123
#
124124
# When extending :py:class:`~pynwb.core.NWBContainer` or :py:class:`~pynwb.core.NWBContainer`
125125
# subclasses, you should defining the class field ``__nwbfields__``. This will
@@ -151,7 +151,7 @@ def __init__(self, **kwargs):
151151
# -----------------------------------------------------
152152
#
153153
# Extensions can be cached to file so that your NWB file will carry the extensions needed to read the file with it.
154-
# This is done by setting *cache_spec* to *True* when calling :py:meth:`~pynwb.form.backends.hdf5.h5tools.HDF5IO.write`
154+
# This is done by setting *cache_spec* to *True* when calling :py:meth:`~hdmf.backends.hdf5.h5tools.HDF5IO.write`
155155
# on :py:class:`~pynwb.NWBHDF5IO` (See :ref:`basic_writing` for more on writing NWB files).
156156
#
157157
# To demonstrate this, first we will make some fake data using our extensions.

docs/gallery/general/iterative_write.py

Lines changed: 33 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -53,20 +53,20 @@
5353
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
5454
#
5555
# In PyNWB the process of iterating over large data arrays is implemented via the concept of
56-
# :py:class:`~pynwb.form.data_utils.DataChunk` and :py:class:`~pynwb.form.data_utils.AbstractDataChunkIterator`.
56+
# :py:class:`~hdmf.data_utils.DataChunk` and :py:class:`~hdmf.data_utils.AbstractDataChunkIterator`.
5757
#
58-
# * :py:class:`~pynwb.form.data_utils.DataChunk` is a simple data structure used to describe
58+
# * :py:class:`~hdmf.data_utils.DataChunk` is a simple data structure used to describe
5959
# a subset of a larger data array (i.e., a data chunk), consisting of:
6060
#
6161
# * ``DataChunk.data`` : the array with the data value(s) of the chunk and
6262
# * ``DataChunk.selection`` : the NumPy index tuple describing the location of the chunk in the whole array.
6363
#
64-
# * :py:class:`~pynwb.form.data_utils.AbstractDataChunkIterator` then defines a class for iterating over large
65-
# data arrays one-:py:class:`~pynwb.form.data_utils.DataChunk`-at-a-time.
64+
# * :py:class:`~hdmf.data_utils.AbstractDataChunkIterator` then defines a class for iterating over large
65+
# data arrays one-:py:class:`~hdmf.data_utils.DataChunk`-at-a-time.
6666
#
67-
# * :py:class:`~pynwb.form.data_utils.DataChunkIterator` is a specific implementation of an
68-
# :py:class:`~pynwb.form.data_utils.AbstractDataChunkIterator` that accepts any iterable and assumes
69-
# that we iterate over the first dimension of the data array. :py:class:`~pynwb.form.data_utils.DataChunkIterator`
67+
# * :py:class:`~hdmf.data_utils.DataChunkIterator` is a specific implementation of an
68+
# :py:class:`~hdmf.data_utils.AbstractDataChunkIterator` that accepts any iterable and assumes
69+
# that we iterate over the first dimension of the data array. :py:class:`~hdmf.data_utils.DataChunkIterator`
7070
# also supports buffered read, i.e., multiple values from the input iterator can be combined to a single chunk.
7171
# This is useful for buffered I/O operations, e.g., to improve performance by accumulating data in memory and
7272
# writing larger blocks at once.
@@ -77,17 +77,17 @@
7777
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
7878
#
7979
# On the front end, all a user needs to do is to create or wrap their data in a
80-
# :py:class:`~pynwb.form.data_utils.AbstractDataChunkIterator`. The I/O backend (e.g.,
81-
# :py:class:`~pynwb.form.backends.hdf5.h5tools.HDF5IO` or :py:class:`~pynwb.NWBHDF5IO`) then
80+
# :py:class:`~hdmf.data_utils.AbstractDataChunkIterator`. The I/O backend (e.g.,
81+
# :py:class:`~hdmf.backends.hdf5.h5tools.HDF5IO` or :py:class:`~pynwb.NWBHDF5IO`) then
8282
# implements the iterative processing of the data chunk iterators. PyNWB also provides with
83-
# :py:class:`~pynwb.form.data_utils.DataChunkIterator` a specific implementation of a data chunk iterator
83+
# :py:class:`~hdmf.data_utils.DataChunkIterator` a specific implementation of a data chunk iterator
8484
# which we can use to wrap common iterable types (e.g., generators, lists, or numpy arrays).
8585
# For more advanced use cases we then need to implement our own derived class of
86-
# :py:class:`~pynwb.form.data_utils.AbstractDataChunkIterator`.
86+
# :py:class:`~hdmf.data_utils.AbstractDataChunkIterator`.
8787
#
8888
# .. tip::
8989
#
90-
# Currently the HDF5 I/O backend of PyNWB (:py:class:`~pynwb.form.backends.hdf5.h5tools.HDF5IO`,
90+
# Currently the HDF5 I/O backend of PyNWB (:py:class:`~hdmf.backends.hdf5.h5tools.HDF5IO`,
9191
# :py:class:`~pynwb.NWBHDF5IO`) processes itertive data writes one-dataset-at-a-time. This means, that
9292
# while you may have an arbitrary number of iterative data writes, the write is performed in order.
9393
# In the future we may use a queing process to enable the simultaneous processing of multiple iterative writes at
@@ -172,7 +172,7 @@ def iter_sin(chunk_length=10, max_chunks=100):
172172
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
173173
#
174174

175-
from pynwb.form.data_utils import DataChunkIterator
175+
from hdmf.data_utils import DataChunkIterator
176176

177177
data = DataChunkIterator(data=iter_sin(10))
178178

@@ -201,22 +201,22 @@ def iter_sin(chunk_length=10, max_chunks=100):
201201
#
202202
# maxshape=(None, 10), recommended_data_shape=(1, 10), dtype=float64
203203
#
204-
# As we can see :py:class:`~pynwb.form.data_utils.DataChunkIterator` automatically recommends
204+
# As we can see :py:class:`~hdmf.data_utils.DataChunkIterator` automatically recommends
205205
# in its ``maxshape`` that the first dimensions of our array should be unlimited (``None``) and the second
206-
# dimension be ``10`` (i.e., the length of our chunk. Since :py:class:`~pynwb.form.data_utils.DataChunkIterator`
206+
# dimension be ``10`` (i.e., the length of our chunk. Since :py:class:`~hdmf.data_utils.DataChunkIterator`
207207
# has no way of knowing the minimum size of the array it automatically recommends the size of the first
208208
# chunk as the minimum size (i.e, ``(1, 10)``) and also infers the data type automatically from the first chunk.
209209
# To further customize this behavior we may also define the ``maxshape``, ``dtype``, and ``buffer_size`` when
210-
# we create the :py:class:`~pynwb.form.data_utils.DataChunkIterator`.
210+
# we create the :py:class:`~hdmf.data_utils.DataChunkIterator`.
211211
#
212212
# .. tip::
213213
#
214-
# We here used :py:class:`~pynwb.form.data_utils.DataChunkIterator` to conveniently wrap our data stream.
215-
# :py:class:`~pynwb.form.data_utils.DataChunkIterator` assumes that our generators yields in **consecutive order**
214+
# We here used :py:class:`~hdmf.data_utils.DataChunkIterator` to conveniently wrap our data stream.
215+
# :py:class:`~hdmf.data_utils.DataChunkIterator` assumes that our generators yields in **consecutive order**
216216
# **single** complete element along the **first dimension** of our a array (i.e., iterate over the first
217217
# axis and yield one-element-at-a-time). This behavior is useful in many practical cases. However, if
218218
# this strategy does not match our needs, then you can alternatively implement our own derived
219-
# :py:class:`~pynwb.form.data_utils.AbstractDataChunkIterator`. We show an example of this next.
219+
# :py:class:`~hdmf.data_utils.AbstractDataChunkIterator`. We show an example of this next.
220220
#
221221

222222

@@ -227,7 +227,7 @@ def iter_sin(chunk_length=10, max_chunks=100):
227227
# Step 1: Create a data chunk iterator for our sparse matrix
228228
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
229229

230-
from pynwb.form.data_utils import AbstractDataChunkIterator, DataChunk
230+
from hdmf.data_utils import AbstractDataChunkIterator, DataChunk
231231

232232

233233
class SparseMatrixIterator(AbstractDataChunkIterator):
@@ -306,8 +306,8 @@ def maxshape(self):
306306

307307
#####################
308308
# In order to also enable compression and other advanced HDF5 dataset I/O featurs we can then also
309-
# wrap our data via :py:class:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO`.
310-
from pynwb.form.backends.hdf5.h5_utils import H5DataIO
309+
# wrap our data via :py:class:`~hdmf.backends.hdf5.h5_utils.H5DataIO`.
310+
from hdmf.backends.hdf5.h5_utils import H5DataIO
311311
matrix2 = SparseMatrixIterator(shape=(xsize, ysize),
312312
num_chunks=num_chunks,
313313
chunk_shape=chunk_shape)
@@ -318,7 +318,7 @@ def maxshape(self):
318318
######################
319319
# We can now also customize the chunking , fillvalue and other settings
320320
#
321-
from pynwb.form.backends.hdf5.h5_utils import H5DataIO
321+
from hdmf.backends.hdf5.h5_utils import H5DataIO
322322

323323
# Increase the chunk size and add compression
324324
matrix3 = SparseMatrixIterator(shape=(xsize, ysize),
@@ -427,7 +427,7 @@ def maxshape(self):
427427
#
428428
# **Advantages:**
429429
#
430-
# * We only need to hold one :py:class:`~pynwb.form.data_utils.DataChunk` in memory at any given time
430+
# * We only need to hold one :py:class:`~hdmf.data_utils.DataChunk` in memory at any given time
431431
# * Only the data chunks in the HDF5 file that contain non-default values are ever being allocated
432432
# * The overall size of our file is reduced significantly
433433
# * Reduced I/O load
@@ -437,7 +437,7 @@ def maxshape(self):
437437
#
438438
# With great power comes great responsibility **!** I/O and storage cost will depend among others on the chunk size,
439439
# compression options, and the write pattern, i.e., the number and structure of the
440-
# :py:class:`~pynwb.form.data_utils.DataChunk` objects written. For example, using ``(1,1)`` chunks and writing them
440+
# :py:class:`~hdmf.data_utils.DataChunk` objects written. For example, using ``(1,1)`` chunks and writing them
441441
# one value at a time would result in poor I/O performance in most practical cases, because of the large number of
442442
# chunks and large number of small I/O operations required.
443443
#
@@ -489,7 +489,7 @@ def maxshape(self):
489489
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
490490
#
491491
# Note, we here use a generator for simplicity but we could equally well also implement our own
492-
# :py:class:`~pynwb.form.data_utils.AbstractDataChunkIterator`.
492+
# :py:class:`~hdmf.data_utils.AbstractDataChunkIterator`.
493493

494494

495495
def iter_largearray(filename, shape, dtype='float64'):
@@ -510,7 +510,7 @@ def iter_largearray(filename, shape, dtype='float64'):
510510
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
511511
#
512512

513-
from pynwb.form.data_utils import DataChunkIterator
513+
from hdmf.data_utils import DataChunkIterator
514514

515515
data = DataChunkIterator(data=iter_largearray(filename='basic_sparse_iterwrite_testdata.npy',
516516
shape=datashape),
@@ -530,8 +530,8 @@ def iter_largearray(filename, shape, dtype='float64'):
530530
# .. tip::
531531
#
532532
# Again, if we want to explicitly control how our data will be chunked (compressed etc.)
533-
# in the HDF5 file then we need to wrap our :py:class:`~pynwb.form.data_utils.DataChunkIterator`
534-
# using :py:class:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO`
533+
# in the HDF5 file then we need to wrap our :py:class:`~hdmf.data_utils.DataChunkIterator`
534+
# using :py:class:`~hdmf.backends.hdf5.h5_utils.H5DataIO`
535535

536536
####################
537537
# Discussion
@@ -589,7 +589,7 @@ def iter_largearray(filename, shape, dtype='float64'):
589589
# Step 1: Create a data chunk iterator for our multifile array
590590
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
591591

592-
from pynwb.form.data_utils import AbstractDataChunkIterator, DataChunk # noqa
592+
from hdmf.data_utils import AbstractDataChunkIterator, DataChunk # noqa
593593

594594

595595
class MultiFileArrayIterator(AbstractDataChunkIterator):
@@ -666,16 +666,16 @@ def maxshape(self):
666666
#
667667
# Common mistakes that will result in errors on write:
668668
#
669-
# * The size of a :py:class:`~pynwb.form.data_utils.DataChunk` does not match the selection.
670-
# * The selection for the :py:class:`~pynwb.form.data_utils.DataChunk` is not supported by h5py
669+
# * The size of a :py:class:`~hdmf.data_utils.DataChunk` does not match the selection.
670+
# * The selection for the :py:class:`~hdmf.data_utils.DataChunk` is not supported by h5py
671671
# (e.g., unordered lists etc.)
672672
#
673673
# Other common mistakes:
674674
#
675675
# * Choosing inappropriate chunk sizes. This typically means bad performance with regard to I/O and/or storage cost.
676676
# * Using auto chunking without supplying a good recommended_data_shape. h5py auto chunking can only make a good
677677
# guess of what the chunking should be if it (at least roughly) knows what the shape of the array will be.
678-
# * Trying to wrap a data generator using the default :py:class:`~pynwb.form.data_utils.DataChunkIterator`
678+
# * Trying to wrap a data generator using the default :py:class:`~hdmf.data_utils.DataChunkIterator`
679679
# when the generator does not comply with the assumptions of the default implementation (i.e., yield
680680
# individual, complete elements along the first dimension of the array one-at-a-time). Depending on the generator,
681681
# this may or may not result in an error on write, but the array you are generating will probably end up

docs/gallery/general/linking_data.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -151,9 +151,9 @@
151151
# our TimeSeries, this means that :py:class:`~pynwb.NWBHDF5IO` will need to
152152
# determine on write how to treat the dataset. We can make this explicit and customize this
153153
# behavior on a per-dataset basis by wrapping our dataset using
154-
# :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO`
154+
# :py:meth:`~hdmf.backends.hdf5.h5_utils.H5DataIO`
155155

156-
from pynwb.form.backends.hdf5.h5_utils import H5DataIO
156+
from hdmf.backends.hdf5.h5_utils import H5DataIO
157157

158158
# Create another timeseries that links to the same data
159159
test_ts5 = TimeSeries(name='test_timeseries5',
@@ -188,7 +188,7 @@
188188
# ---------------------------
189189
#
190190
# Appending to files and linking is made possible by passing around the same
191-
# :py:class:`~pynwb.form.build.map.BuildManager`. You can get a manager to pass around
191+
# :py:class:`~hdmf.build.map.BuildManager`. You can get a manager to pass around
192192
# using the :py:meth:`~pynwb.get_manager` function.
193193
#
194194

@@ -245,5 +245,5 @@
245245
#
246246
# External links are convenient but to share data we may want to hand a single file with all the
247247
# data to our collaborator rather than having to collect all relevant files. To do this,
248-
# :py:class:`~pynwb.form.backends.hdf5.h5tools.HDF5IO` (and in turn :py:class:`~pynwb.NWBHDF5IO`)
249-
# provide the convenience function :py:func:`~pynwb.form.backends.hdf5.h5tools.HDF5IO.copy_file`
248+
# :py:class:`~hdmf.backends.hdf5.h5tools.HDF5IO` (and in turn :py:class:`~pynwb.NWBHDF5IO`)
249+
# provide the convenience function :py:func:`~hdmf.backends.hdf5.h5tools.HDF5IO.copy_file`

requirements-dev.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
certifi==2018.1.18
1+
certifi==2019.3.9
22
chardet==3.0.4
33
codecov==2.0.15
44
configparser==3.5.0

requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
certifi==2018.1.18
1+
certifi==2019.3.9
22
chardet==3.0.4
33
h5py==2.9.0
44
idna==2.6
@@ -9,3 +9,4 @@ ruamel.yaml==0.15.85
99
six==1.11.0
1010
urllib3==1.23
1111
pandas==0.23.4
12+
hdmf==1.0.1

0 commit comments

Comments
 (0)