Skip to content

Commit

Permalink
review clean up
Browse files Browse the repository at this point in the history
  • Loading branch information
mavaylon1 committed May 6, 2024
1 parent a1a22ee commit 9b4ea75
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 13 deletions.
20 changes: 10 additions & 10 deletions src/hdmf/backends/hdf5/h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,8 @@ def copy_file(self, **kwargs):
'doc': 'A HERD object to populate with references.',
'default': None},
{'name': 'expandable', 'type': bool, 'default': True,
'doc': 'Bool to set whether datasets are expandable through chunking by default.'})
'doc': ('Bool to set whether datasets are expandable by setting the max shape for all dimensions',
'of a dataset to None and enabling auto-chunking by default.')})
def write(self, **kwargs):
"""Write the container to an HDF5 file."""
if self.__mode == 'r':
Expand Down Expand Up @@ -808,7 +809,8 @@ def close_linked_files(self):
{'name': 'export_source', 'type': str,
'doc': 'The source of the builders when exporting', 'default': None},
{'name': 'expandable', 'type': bool, 'default': True,
'doc': 'Bool to set whether datasets are expandable through chunking by default.'})
'doc': ('Bool to set whether datasets are expandable by setting the max shape for all dimensions',
'of a dataset to None and enabling auto-chunking by default.')})
def write_builder(self, **kwargs):
f_builder = popargs('builder', kwargs)
link_data, exhaust_dci, export_source = getargs('link_data',
Expand Down Expand Up @@ -986,7 +988,8 @@ def _filler():
{'name': 'export_source', 'type': str,
'doc': 'The source of the builders when exporting', 'default': None},
{'name': 'expandable', 'type': bool, 'default': True,
'doc': 'Bool to set whether datasets are expandable through chunking by default.'},
'doc': ('Bool to set whether datasets are expandable by setting the max shape for all dimensions',
'of a dataset to None and enabling auto-chunking by default.')},
returns='the Group that was created', rtype=Group)
def write_group(self, **kwargs):
parent, builder = popargs('parent', 'builder', kwargs)
Expand Down Expand Up @@ -1088,7 +1091,8 @@ def write_link(self, **kwargs):
{'name': 'export_source', 'type': str,
'doc': 'The source of the builders when exporting', 'default': None},
{'name': 'expandable', 'type': bool, 'default': True,
'doc': 'Bool to set whether datasets are expandable through chunking by default.'},
'doc': ('Bool to set whether datasets are expandable by setting the max shape for all dimensions',
'of a dataset to None and enabling auto-chunking by default.')},
returns='the Dataset that was created', rtype=Dataset)
def write_dataset(self, **kwargs): # noqa: C901
""" Write a dataset to HDF5
Expand Down Expand Up @@ -1464,14 +1468,10 @@ def __list_fill__(cls, parent, name, data, expandable, options=None):
else:
data_shape = get_data_shape(data)
if expandable:
# Don't override existing settings
if 'maxshape' not in io_settings:
io_settings['maxshape'] = tuple([None]*len(data_shape))
else:
# Don't override existing settings
pass
else:
msg = "Datasets written using user defined parameters. Default expandable via chunking: False"
warnings.warn(msg)

# Create the dataset
try:
dset = parent.create_dataset(name, shape=data_shape, dtype=dtype, **io_settings)
Expand Down
9 changes: 6 additions & 3 deletions tests/unit/test_io_hdf5_h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -771,15 +771,18 @@ def setUp(self):
self.manager = get_foo_buildmanager()
self.path = get_temp_filepath()

def tearDown(self):
if os.path.exists(self.path):
os.remove(self.path)

def test_expand_false(self):
# Setup all the data we need
foo1 = Foo('foo1', [1, 2, 3, 4, 5], "I am foo1", 17, 3.14)
foobucket = FooBucket('bucket1', [foo1])
foofile = FooFile(buckets=[foobucket])

with self.assertWarns(Warning):
with HDF5IO(self.path, manager=self.manager, mode='w') as io:
io.write(foofile, expandable=False)
with HDF5IO(self.path, manager=self.manager, mode='w') as io:
io.write(foofile, expandable=False)

io = HDF5IO(self.path, manager=self.manager, mode='r')
read_foofile = io.read()
Expand Down

0 comments on commit 9b4ea75

Please sign in to comment.