diff --git a/CHANGELOG.md b/CHANGELOG.md index d18bf235a..fc9974a14 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ ### Bug fixes - Fixed issue where scalar datasets with a compound data type were being written as non-scalar datasets @stephprince [#1176](https://github.com/hdmf-dev/hdmf/pull/1176) +- Fixed H5DataIO not exposing `maxshape` on non-dci dsets. @cboulay [#1149](https://github.com/hdmf-dev/hdmf/pull/1149) ## HDMF 3.14.3 (July 29, 2024) diff --git a/src/hdmf/backends/hdf5/h5_utils.py b/src/hdmf/backends/hdf5/h5_utils.py index 278735fbc..2d7187721 100644 --- a/src/hdmf/backends/hdf5/h5_utils.py +++ b/src/hdmf/backends/hdf5/h5_utils.py @@ -21,7 +21,7 @@ from ...query import HDMFDataset, ReferenceResolver, ContainerResolver, BuilderResolver from ...region import RegionSlicer from ...spec import SpecWriter, SpecReader -from ...utils import docval, getargs, popargs, get_docval +from ...utils import docval, getargs, popargs, get_docval, get_data_shape class HDF5IODataChunkIteratorQueue(deque): @@ -672,3 +672,14 @@ def valid(self): if isinstance(self.data, Dataset) and not self.data.id.valid: return False return super().valid + + @property + def maxshape(self): + if 'maxshape' in self.io_settings: + return self.io_settings['maxshape'] + elif hasattr(self.data, 'maxshape'): + return self.data.maxshape + elif hasattr(self, "shape"): + return self.shape + else: + return get_data_shape(self.data) diff --git a/tests/unit/test_io_hdf5_h5tools.py b/tests/unit/test_io_hdf5_h5tools.py index 1f0c2eb4c..131e4a6de 100644 --- a/tests/unit/test_io_hdf5_h5tools.py +++ b/tests/unit/test_io_hdf5_h5tools.py @@ -607,6 +607,12 @@ def test_pass_through_of_chunk_shape_generic_data_chunk_iterator(self): ############################################# # H5DataIO general ############################################# + def test_pass_through_of_maxshape_on_h5dataset(self): + k = 10 + self.io.write_dataset(self.f, DatasetBuilder('test_dataset', np.arange(k), attributes={})) + dset = H5DataIO(self.f['test_dataset']) + self.assertEqual(dset.maxshape, (k,)) + def test_warning_on_non_gzip_compression(self): # Make sure no warning is issued when using gzip with warnings.catch_warnings(record=True) as w: @@ -3763,6 +3769,14 @@ def test_dataio_shape_then_data(self): with self.assertRaisesRegex(ValueError, "Setting data when dtype and shape are not None is not supported"): dataio.data = list() + def test_dataio_maxshape(self): + dataio = H5DataIO(data=np.arange(10), maxshape=(None,)) + self.assertEqual(dataio.maxshape, (None,)) + + def test_dataio_maxshape_from_data(self): + dataio = H5DataIO(data=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + self.assertEqual(dataio.maxshape, (10,)) + def test_hdf5io_can_read(): assert not HDF5IO.can_read("not_a_file")