[med-svn] [Git][med-team/hdmf][upstream] New upstream version 3.4.0
Nilesh Patra (@nilesh)
gitlab at salsa.debian.org
Fri Aug 26 16:39:43 BST 2022
Nilesh Patra pushed to branch upstream at Debian Med / hdmf
Commits:
43016591 by Nilesh Patra at 2022-08-26T20:57:40+05:30
New upstream version 3.4.0
- - - - -
26 changed files:
- PKG-INFO
- requirements-dev.txt
- requirements.txt
- setup.cfg
- setup.py
- src/hdmf.egg-info/PKG-INFO
- src/hdmf.egg-info/requires.txt
- src/hdmf/_version.py
- src/hdmf/backends/hdf5/h5_utils.py
- src/hdmf/backends/hdf5/h5tools.py
- src/hdmf/build/manager.py
- src/hdmf/build/objectmapper.py
- src/hdmf/common/__init__.py
- src/hdmf/common/alignedtable.py
- src/hdmf/container.py
- src/hdmf/data_utils.py
- src/hdmf/monitor.py
- src/hdmf/spec/namespace.py
- src/hdmf/spec/spec.py
- src/hdmf/utils.py
- src/hdmf/validate/validator.py
- tests/unit/build_tests/test_io_map_data.py
- tests/unit/test_container.py
- tests/unit/test_io_hdf5_h5tools.py
- tests/unit/utils_test/test_core_DataIO.py
- tests/unit/utils_test/test_docval.py
Changes:
=====================================
PKG-INFO
=====================================
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: hdmf
-Version: 3.3.2
+Version: 3.4.0
Summary: A package for standardizing hierarchical object data
Home-page: https://github.com/hdmf-dev/hdmf
Author: Andrew Tritt
=====================================
requirements-dev.txt
=====================================
@@ -1,12 +1,12 @@
# pinned dependencies to reproduce an entire development environment to use HDMF, run HDMF tests, check code style,
# compute coverage, and create test environments
codecov==2.1.12
-coverage==6.3.2
-flake8==4.0.1
+coverage==6.4.2
+flake8==5.0.4
flake8-debugger==4.1.2
flake8-print==5.0.0
-importlib-metadata==4.2.0
+importlib-metadata==4.2.0 # there may be compatibility issues with newer versions
pytest==7.1.2
pytest-cov==3.0.0
python-dateutil==2.8.2
-tox==3.25.0
+tox==3.25.1
=====================================
requirements.txt
=====================================
@@ -1,8 +1,9 @@
# pinned dependencies to reproduce an entire development environment to use HDMF
-h5py==3.6.0
-jsonschema==4.5.1
+# note that python 3.7 end of life is 27 Jun 2023
+h5py==3.7.0
+jsonschema==4.9.1
numpy==1.21.5 # note that numpy 1.22 dropped python 3.7 support
pandas==1.3.5 # note that pandas 1.4 dropped python 3.7 support
ruamel.yaml==0.17.21
scipy==1.7.3 # note that scipy 1.8 dropped python 3.7 support
-setuptools==62.2.0
+setuptools==63.4.1
=====================================
setup.cfg
=====================================
@@ -19,9 +19,10 @@ exclude =
versioneer.py
src/hdmf/_version.py
src/hdmf/_due.py
+ docs/source/tutorials/
+ docs/_build/
per-file-ignores =
docs/gallery/*:E402,T001
- docs/source/tutorials/*:E402,T001
src/hdmf/__init__.py:F401
src/hdmf/backends/__init__.py:F401
src/hdmf/backends/hdf5/__init__.py:F401
=====================================
setup.py
=====================================
@@ -20,7 +20,7 @@ schema_dir = 'common/hdmf-common-schema/common'
reqs = [
'h5py>=2.10,<4',
'jsonschema>=2.6.0,<5',
- 'numpy>=1.16,<1.23',
+ 'numpy>=1.16,<1.24',
'pandas>=1.0.5,<2',
'ruamel.yaml>=0.16,<1',
'scipy>=1.1,<2',
=====================================
src/hdmf.egg-info/PKG-INFO
=====================================
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: hdmf
-Version: 3.3.2
+Version: 3.4.0
Summary: A package for standardizing hierarchical object data
Home-page: https://github.com/hdmf-dev/hdmf
Author: Andrew Tritt
=====================================
src/hdmf.egg-info/requires.txt
=====================================
@@ -1,6 +1,6 @@
h5py<4,>=2.10
jsonschema<5,>=2.6.0
-numpy<1.23,>=1.16
+numpy<1.24,>=1.16
pandas<2,>=1.0.5
ruamel.yaml<1,>=0.16
scipy<2,>=1.1
=====================================
src/hdmf/_version.py
=====================================
@@ -8,11 +8,11 @@ import json
version_json = '''
{
- "date": "2022-06-27T16:04:19-0700",
+ "date": "2022-08-05T10:51:30-0700",
"dirty": false,
"error": null,
- "full-revisionid": "0e50852145bbf9a4676b6edc936317f882f9a47e",
- "version": "3.3.2"
+ "full-revisionid": "7be40229d778dea940134753de957f2457a11e1f",
+ "version": "3.4.0"
}
''' # END VERSION_JSON
=====================================
src/hdmf/backends/hdf5/h5_utils.py
=====================================
@@ -21,7 +21,7 @@ from ...data_utils import DataIO, AbstractDataChunkIterator
from ...query import HDMFDataset, ReferenceResolver, ContainerResolver, BuilderResolver
from ...region import RegionSlicer
from ...spec import SpecWriter, SpecReader
-from ...utils import docval, getargs, popargs, call_docval_func, get_docval
+from ...utils import docval, getargs, popargs, get_docval
class HDF5IODataChunkIteratorQueue(deque):
@@ -89,7 +89,7 @@ class H5Dataset(HDMFDataset):
{'name': 'io', 'type': 'HDF5IO', 'doc': 'the IO object that was used to read the underlying dataset'})
def __init__(self, **kwargs):
self.__io = popargs('io', kwargs)
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
@property
def io(self):
@@ -180,7 +180,7 @@ class AbstractH5TableDataset(DatasetOfReferences):
'doc': 'the IO object that was used to read the underlying dataset'})
def __init__(self, **kwargs):
types = popargs('types', kwargs)
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
self.__refgetters = dict()
for i, t in enumerate(types):
if t is RegionReference:
@@ -378,9 +378,9 @@ class H5SpecReader(SpecReader):
@docval({'name': 'group', 'type': Group, 'doc': 'the HDF5 group to read specs from'})
def __init__(self, **kwargs):
- self.__group = getargs('group', kwargs)
- super_kwargs = {'source': "%s:%s" % (os.path.abspath(self.__group.file.name), self.__group.name)}
- call_docval_func(super().__init__, super_kwargs)
+ self.__group = popargs('group', kwargs)
+ source = "%s:%s" % (os.path.abspath(self.__group.file.name), self.__group.name)
+ super().__init__(source=source)
self.__cache = None
def __read(self, path):
@@ -475,11 +475,20 @@ class H5DataIO(DataIO):
{'name': 'allow_plugin_filters',
'type': bool,
'doc': 'Enable passing dynamically loaded filters as compression parameter',
- 'default': False}
+ 'default': False},
+ {'name': 'shape',
+ 'type': tuple,
+ 'doc': 'the shape of the new dataset, used only if data is None',
+ 'default': None},
+ {'name': 'dtype',
+ 'type': (str, type, np.dtype),
+ 'doc': 'the data type of the new dataset, used only if data is None',
+ 'default': None}
)
def __init__(self, **kwargs):
# Get the list of I/O options that user has passed in
- ioarg_names = [name for name in kwargs.keys() if name not in ['data', 'link_data', 'allow_plugin_filters']]
+ ioarg_names = [name for name in kwargs.keys() if name not in ['data', 'link_data', 'allow_plugin_filters',
+ 'dtype', 'shape']]
# Remove the ioargs from kwargs
ioarg_values = [popargs(argname, kwargs) for argname in ioarg_names]
# Consume link_data parameter
@@ -491,9 +500,12 @@ class H5DataIO(DataIO):
self.__link_data = False
warnings.warn('link_data parameter in H5DataIO will be ignored')
# Call the super constructor and consume the data parameter
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
# Construct the dict with the io args, ignoring all options that were set to None
self.__iosettings = {k: v for k, v in zip(ioarg_names, ioarg_values) if v is not None}
+ if self.data is None:
+ self.__iosettings['dtype'] = self.dtype
+ self.__iosettings['shape'] = self.shape
# Set io_properties for DataChunkIterators
if isinstance(self.data, AbstractDataChunkIterator):
# Define the chunking options if the user has not set them explicitly.
@@ -526,6 +538,18 @@ class H5DataIO(DataIO):
for k in self.__iosettings.keys():
warnings.warn("%s in H5DataIO will be ignored with H5DataIO.data being an HDF5 dataset" % k)
+ self.__dataset = None
+
+ @property
+ def dataset(self):
+ return self.__dataset
+
+ @dataset.setter
+ def dataset(self, val):
+ if self.__dataset is not None:
+ raise ValueError("Cannot overwrite H5DataIO.dataset")
+ self.__dataset = val
+
def get_io_params(self):
"""
Returns a dict with the I/O parameters specified in this DataIO.
=====================================
src/hdmf/backends/hdf5/h5tools.py
=====================================
@@ -3,7 +3,7 @@ import os.path
import warnings
from collections import deque
from functools import partial
-from pathlib import Path
+from pathlib import Path, PurePosixPath as pp
import numpy as np
import h5py
@@ -19,7 +19,7 @@ from ...build import (Builder, GroupBuilder, DatasetBuilder, LinkBuilder, BuildM
from ...container import Container
from ...data_utils import AbstractDataChunkIterator
from ...spec import RefSpec, DtypeSpec, NamespaceCatalog
-from ...utils import docval, getargs, popargs, call_docval_func, get_data_shape, fmt_docval_args, get_docval, StrDataset
+from ...utils import docval, getargs, popargs, get_data_shape, get_docval, StrDataset
from ..utils import NamespaceToBuilderHelper, WriteStatusTracker
ROOT_NAME = 'root'
@@ -36,16 +36,17 @@ class HDF5IO(HDMFIO):
__ns_spec_path = 'namespace' # path to the namespace dataset within a namespace group
- @docval({'name': 'path', 'type': (str, Path), 'doc': 'the path to the HDF5 file'},
- {'name': 'manager', 'type': (TypeMap, BuildManager),
- 'doc': 'the BuildManager or a TypeMap to construct a BuildManager to use for I/O', 'default': None},
+ @docval({'name': 'path', 'type': (str, Path), 'doc': 'the path to the HDF5 file', 'default': None},
{'name': 'mode', 'type': str,
'doc': ('the mode to open the HDF5 file with, one of ("w", "r", "r+", "a", "w-", "x"). '
'See `h5py.File <http://docs.h5py.org/en/latest/high/file.html#opening-creating-files>`_ for '
- 'more details.')},
+ 'more details.'),
+ 'default': 'r'},
+ {'name': 'manager', 'type': (TypeMap, BuildManager),
+ 'doc': 'the BuildManager or a TypeMap to construct a BuildManager to use for I/O', 'default': None},
{'name': 'comm', 'type': 'Intracomm',
'doc': 'the MPI communicator to use for parallel I/O', 'default': None},
- {'name': 'file', 'type': File, 'doc': 'a pre-existing h5py.File object', 'default': None},
+ {'name': 'file', 'type': [File, "S3File"], 'doc': 'a pre-existing h5py.File object', 'default': None},
{'name': 'driver', 'type': str, 'doc': 'driver for h5py to use when opening HDF5 file', 'default': None})
def __init__(self, **kwargs):
"""Open an HDF5 file for IO.
@@ -54,10 +55,13 @@ class HDF5IO(HDMFIO):
path, manager, mode, comm, file_obj, driver = popargs('path', 'manager', 'mode', 'comm', 'file', 'driver',
kwargs)
+ if path is None and file_obj is None:
+ raise ValueError("You must supply either a path or a file.")
+
if isinstance(path, Path):
path = str(path)
- if file_obj is not None and os.path.abspath(file_obj.filename) != os.path.abspath(path):
+ if file_obj is not None and path is not None and os.path.abspath(file_obj.filename) != os.path.abspath(path):
msg = 'You argued %s as this object\'s path, ' % path
msg += 'but supplied a file with filename: %s' % file_obj.filename
raise ValueError(msg)
@@ -351,7 +355,7 @@ class HDF5IO(HDMFIO):
% (self.source, self.__mode))
cache_spec = popargs('cache_spec', kwargs)
- call_docval_func(super().write, kwargs)
+ super().write(**kwargs)
if cache_spec:
self.__cache_spec()
@@ -409,7 +413,7 @@ class HDF5IO(HDMFIO):
write_args['export_source'] = src_io.source # pass export_source=src_io.source to write_builder
ckwargs = kwargs.copy()
ckwargs['write_args'] = write_args
- call_docval_func(super().export, ckwargs)
+ super().export(**ckwargs)
if cache_spec:
self.__cache_spec()
@@ -444,7 +448,7 @@ class HDF5IO(HDMFIO):
raise UnsupportedOperation("Cannot read from file %s in mode '%s'. Please use mode 'r', 'r+', or 'a'."
% (self.source, self.__mode))
try:
- return call_docval_func(super().read, kwargs)
+ return super().read(**kwargs)
except UnsupportedOperation as e:
if str(e) == 'Cannot build data. There are no values.': # pragma: no cover
raise UnsupportedOperation("Cannot read data from file %s in mode '%s'. There are no values."
@@ -1036,9 +1040,11 @@ class HDF5IO(HDMFIO):
return None
name = builder.name
data = builder.data
+ dataio = None
options = dict() # dict with additional
if isinstance(data, H5DataIO):
options['io_settings'] = data.io_settings
+ dataio = data
link_data = data.link_data
data = data.data
else:
@@ -1229,8 +1235,12 @@ class HDF5IO(HDMFIO):
return
# write a "regular" dataset
else:
+ # Create an empty dataset
+ if data is None:
+ dset = self.__setup_empty_dset__(parent, name, options['io_settings'])
+ dataio.dataset = dset
# Write a scalar dataset containing a single string
- if isinstance(data, (str, bytes)):
+ elif isinstance(data, (str, bytes)):
dset = self.__scalar_fill__(parent, name, data, options)
# Iterative write of a data chunk iterator
elif isinstance(data, AbstractDataChunkIterator):
@@ -1315,6 +1325,35 @@ class HDF5IO(HDMFIO):
raise Exception("Could not create dataset %s in %s" % (name, parent.name)) from exc
return dset
+ @classmethod
+ def __setup_empty_dset__(cls, parent, name, io_settings):
+ """
+ Setup a dataset for writing to one-chunk-at-a-time based on the given DataChunkIterator
+
+ :param parent: The parent object to which the dataset should be added
+ :type parent: h5py.Group, h5py.File
+ :param name: The name of the dataset
+ :type name: str
+ :param data: The data to be written.
+ :type data: DataChunkIterator
+ :param options: Dict with options for creating a dataset. available options are 'dtype' and 'io_settings'
+ :type options: dict
+
+ """
+ # Define the shape of the data if not provided by the user
+ if 'shape' not in io_settings:
+ raise ValueError(f"Cannot setup empty dataset {pp(parent.name, name)} without shape")
+ if 'dtype' not in io_settings:
+ raise ValueError(f"Cannot setup empty dataset {pp(parent.name, name)} without dtype")
+ if isinstance(io_settings['dtype'], str):
+ # map to real dtype if we were given a string
+ io_settings['dtype'] = cls.__dtypes.get(io_settings['dtype'])
+ try:
+ dset = parent.create_dataset(name, **io_settings)
+ except Exception as exc:
+ raise Exception("Could not create dataset %s in %s" % (name, parent.name)) from exc
+ return dset
+
@classmethod
def __chunked_iter_fill__(cls, parent, name, data, options=None):
"""
@@ -1471,5 +1510,4 @@ class HDF5IO(HDMFIO):
data = ...
data = H5DataIO(data)
"""
- cargs, ckwargs = fmt_docval_args(H5DataIO.__init__, kwargs)
- return H5DataIO(*cargs, **ckwargs)
+ return H5DataIO.__init__(**kwargs)
=====================================
src/hdmf/build/manager.py
=====================================
@@ -5,9 +5,9 @@ from copy import copy
from .builders import DatasetBuilder, GroupBuilder, LinkBuilder, Builder, BaseBuilder
from .classgenerator import ClassGenerator, CustomClassGenerator, MCIClassGenerator
from ..container import AbstractContainer, Container, Data
-from ..spec import DatasetSpec, GroupSpec, NamespaceCatalog, SpecReader
+from ..spec import DatasetSpec, GroupSpec, NamespaceCatalog
from ..spec.spec import BaseStorageSpec
-from ..utils import docval, getargs, call_docval_func, ExtenderMeta
+from ..utils import docval, getargs, ExtenderMeta, get_docval
class Proxy:
@@ -458,12 +458,7 @@ class TypeMap:
generator = getargs('generator', kwargs)
self.__class_generator.register_generator(generator)
- @docval({'name': 'namespace_path', 'type': str, 'doc': 'the path to the file containing the namespaces(s) to load'},
- {'name': 'resolve', 'type': bool,
- 'doc': 'whether or not to include objects from included/parent spec objects', 'default': True},
- {'name': 'reader',
- 'type': SpecReader,
- 'doc': 'the class to user for reading specifications', 'default': None},
+ @docval(*get_docval(NamespaceCatalog.load_namespaces),
returns="the namespaces loaded from the given file", rtype=dict)
def load_namespaces(self, **kwargs):
'''Load namespaces from a namespace file.
@@ -471,7 +466,7 @@ class TypeMap:
it will process the return value to keep track of what types were included in the loaded namespaces. Calling
load_namespaces here has the advantage of being able to keep track of type dependencies across namespaces.
'''
- deps = call_docval_func(self.__ns_catalog.load_namespaces, kwargs)
+ deps = self.__ns_catalog.load_namespaces(**kwargs)
for new_ns, ns_deps in deps.items():
for src_ns, types in ns_deps.items():
for dt in types:
=====================================
src/hdmf/build/objectmapper.py
=====================================
@@ -284,7 +284,10 @@ class ObjectMapper(metaclass=ExtenderMeta):
# return the list of DtypeSpecs
return value, spec_dtype
if isinstance(value, DataIO):
- return value, cls.convert_dtype(spec, value.data, spec_dtype)[1]
+ if value.data is None:
+ return value, value.dtype
+ else:
+ return value, cls.convert_dtype(spec, value.data, spec_dtype)[1]
if spec_dtype is None or spec_dtype == 'numeric' or type(value) in cls.__no_convert:
# infer type from value
if hasattr(value, 'dtype'): # covers numpy types, AbstractDataChunkIterator
@@ -973,9 +976,6 @@ class ObjectMapper(metaclass=ExtenderMeta):
self.logger.debug(" Skipping dataset - no attribute value")
continue
attr_value = self.__check_ref_resolver(attr_value)
- if isinstance(attr_value, DataIO) and attr_value.data is None:
- self.logger.debug(" Skipping dataset - attribute is dataio or has no data")
- continue
if isinstance(attr_value, LinkBuilder):
self.logger.debug(" Adding %s '%s' for spec name: %s, %s: %s, %s: %s"
% (attr_value.name, attr_value.__class__.__name__,
@@ -1254,8 +1254,12 @@ class ObjectMapper(metaclass=ExtenderMeta):
def __new_container__(self, cls, container_source, parent, object_id, **kwargs):
"""A wrapper function for ensuring a container gets everything set appropriately"""
- obj = cls.__new__(cls, container_source=container_source, parent=parent, object_id=object_id)
+ obj = cls.__new__(cls, container_source=container_source, parent=parent, object_id=object_id,
+ in_construct_mode=True)
+ # obj has been created and is in construction mode, indicating that the object is being constructed by
+ # the automatic construct process during read, rather than by the user
obj.__init__(**kwargs)
+ obj._in_construct_mode = False # reset to False to indicate that the construction of the object is complete
return obj
@docval({'name': 'container', 'type': AbstractContainer,
=====================================
src/hdmf/common/__init__.py
=====================================
@@ -9,7 +9,7 @@ EXP_NAMESPACE = 'hdmf-experimental'
from ..spec import NamespaceCatalog # noqa: E402
-from ..utils import docval, getargs, call_docval_func, get_docval, fmt_docval_args # noqa: E402
+from ..utils import docval, getargs, get_docval # noqa: E402
from ..backends.io import HDMFIO # noqa: E402
from ..backends.hdf5 import HDF5IO # noqa: E402
from ..validate import ValidatorMap # noqa: E402
@@ -148,17 +148,15 @@ def get_type_map(**kwargs):
return type_map
- at docval({'name': 'extensions', 'type': (str, TypeMap, list),
- 'doc': 'a path to a namespace, a TypeMap, or a list consisting paths to namespaces and TypeMaps',
- 'default': None},
- returns="the namespaces loaded from the given file", rtype=tuple,
+ at docval(*get_docval(get_type_map),
+ returns="a build manager with namespaces loaded from the given file", rtype=BuildManager,
is_method=False)
def get_manager(**kwargs):
'''
Get a BuildManager to use for I/O using the given extensions. If no extensions are provided,
return a BuildManager that uses the core namespace
'''
- type_map = call_docval_func(get_type_map, kwargs)
+ type_map = get_type_map(**kwargs)
return BuildManager(type_map)
@@ -188,8 +186,7 @@ def get_hdf5io(**kwargs):
manager = getargs('manager', kwargs)
if manager is None:
kwargs['manager'] = get_manager()
- cargs, ckwargs = fmt_docval_args(HDF5IO.__init__, kwargs)
- return HDF5IO(*cargs, **ckwargs)
+ return HDF5IO.__init__(**kwargs)
# load the hdmf-common namespace
=====================================
src/hdmf/common/alignedtable.py
=====================================
@@ -8,7 +8,7 @@ import pandas as pd
from . import register_class
from .table import DynamicTable
-from ..utils import docval, getargs, call_docval_func, popargs, get_docval, AllowPositional
+from ..utils import docval, getargs, popargs, get_docval, AllowPositional
@register_class('AlignedDynamicTable')
@@ -172,7 +172,7 @@ class AlignedDynamicTable(DynamicTable):
category_name = popargs('category', kwargs)
if category_name is None:
# Add the column to our main table
- call_docval_func(super().add_column, kwargs)
+ super().add_column(**kwargs)
else:
# Add the column to a sub-category table
try:
@@ -208,7 +208,7 @@ class AlignedDynamicTable(DynamicTable):
# Add the data to our main dynamic table
data['id'] = row_id
data['enforce_unique_id'] = enforce_unique_id
- call_docval_func(super().add_row, data)
+ super().add_row(**data)
# Add the data to all out dynamic table categories
for category, values in category_data.items():
=====================================
src/hdmf/container.py
=====================================
@@ -10,8 +10,7 @@ import numpy as np
import pandas as pd
from .data_utils import DataIO, append_data, extend_data
-from .utils import (docval, get_docval, getargs, ExtenderMeta, get_data_shape, fmt_docval_args,
- popargs, LabelledDict)
+from .utils import docval, get_docval, getargs, ExtenderMeta, get_data_shape, popargs, LabelledDict
def _set_exp(cls):
@@ -176,6 +175,13 @@ class AbstractContainer(metaclass=ExtenderMeta):
cls.__fieldsconf = tuple(all_fields_conf)
def __new__(cls, *args, **kwargs):
+ """
+ Static method of the object class called by Python to create the object first and then
+ __init__() is called to initialize the object's attributes.
+
+ NOTE: this method is called directly from ObjectMapper.__new_container__ during the process of
+ constructing the object from builders that are read from a file.
+ """
inst = super().__new__(cls)
if cls._experimental:
warn(_exp_warn_msg(cls))
@@ -184,6 +190,9 @@ class AbstractContainer(metaclass=ExtenderMeta):
inst.__children = list()
inst.__modified = True
inst.__object_id = kwargs.pop('object_id', str(uuid4()))
+ # this variable is being passed in from ObjectMapper.__new_container__ and is
+ # reset to False in that method after the object has been initialized by __init__
+ inst._in_construct_mode = kwargs.pop('in_construct_mode', False)
inst.parent = kwargs.pop('parent', None)
return inst
@@ -594,10 +603,6 @@ class DataRegion(Data):
pass
-def _not_parent(arg):
- return arg['name'] != 'parent'
-
-
class MultiContainerInterface(Container):
"""Class that dynamically defines methods to support a Container holding multiple Containers of the same type.
@@ -756,11 +761,10 @@ class MultiContainerInterface(Container):
def __make_create(cls, func_name, add_name, container_type):
doc = "Create %s and add it to this %s" % (cls.__add_article(container_type), cls.__name__)
- @docval(*filter(_not_parent, get_docval(container_type.__init__)), func_name=func_name, doc=doc,
+ @docval(*get_docval(container_type.__init__), func_name=func_name, doc=doc,
returns="the %s object that was created" % cls.__join(container_type), rtype=container_type)
def _func(self, **kwargs):
- cargs, ckwargs = fmt_docval_args(container_type.__init__, kwargs)
- ret = container_type(*cargs, **ckwargs)
+ ret = container_type(**kwargs)
getattr(self, add_name)(ret)
return ret
=====================================
src/hdmf/data_utils.py
=====================================
@@ -907,10 +907,28 @@ class DataIO:
used to pass dataset-specific I/O parameters to the particular HDMFIO backend.
"""
- @docval({'name': 'data', 'type': 'array_data', 'doc': 'the data to be written', 'default': None})
+ @docval({'name': 'data',
+ 'type': 'array_data',
+ 'doc': 'the data to be written',
+ 'default': None},
+ {'name': 'dtype',
+ 'type': (type, np.dtype),
+ 'doc': 'the data type of the dataset. Not used if data is specified.',
+ 'default': None},
+ {'name': 'shape',
+ 'type': tuple,
+ 'doc': 'the shape of the dataset. Not used if data is specified.',
+ 'default': None})
def __init__(self, **kwargs):
- data = popargs('data', kwargs)
+ data, dtype, shape = popargs('data', 'dtype', 'shape', kwargs)
+ if data is not None:
+ if dtype is not None:
+ raise ValueError("Setting the dtype when data is not None is not supported")
+ if shape is not None:
+ raise ValueError("Setting the shape when data is not None is not supported")
self.__data = data
+ self.__dtype = dtype
+ self.__shape = shape
def get_io_params(self):
"""
@@ -928,8 +946,20 @@ class DataIO:
"""Set the wrapped data object"""
if self.__data is not None:
raise ValueError("cannot overwrite 'data' on DataIO")
+ if not (self.__dtype is None and self.__shape is None):
+ raise ValueError("Setting data when dtype and shape are not None is not supported")
self.__data = val
+ @property
+ def dtype(self):
+ """Get the wrapped data object"""
+ return self.__dtype or self.__getattr__("dtype")
+
+ @property
+ def shape(self):
+ """Get the wrapped data object"""
+ return self.__shape or self.__getattr__("shape")
+
def __copy__(self):
"""
Define a custom copy method for shallow copy..
=====================================
src/hdmf/monitor.py
=====================================
@@ -1,7 +1,7 @@
from abc import ABCMeta, abstractmethod
from .data_utils import AbstractDataChunkIterator, DataChunkIterator, DataChunk
-from .utils import docval, getargs, call_docval_func
+from .utils import docval, getargs
class NotYetExhausted(Exception):
@@ -60,7 +60,7 @@ class DataChunkProcessor(AbstractDataChunkIterator, metaclass=ABCMeta):
class NumSampleCounter(DataChunkProcessor):
def __init__(self, **kwargs):
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
self.__sample_count = 0
@docval({'name': 'data_chunk', 'type': DataChunk, 'doc': 'a chunk to process'})
=====================================
src/hdmf/spec/namespace.py
=====================================
@@ -9,7 +9,7 @@ from warnings import warn
from .catalog import SpecCatalog
from .spec import DatasetSpec, GroupSpec
-from ..utils import docval, getargs, popargs, get_docval, call_docval_func
+from ..utils import docval, getargs, popargs, get_docval
_namespace_args = [
{'name': 'doc', 'type': str, 'doc': 'a description about what this namespace represents'},
@@ -195,8 +195,7 @@ class YAMLSpecReader(SpecReader):
@docval({'name': 'indir', 'type': str, 'doc': 'the path spec files are relative to', 'default': '.'})
def __init__(self, **kwargs):
- super_kwargs = {'source': kwargs['indir']}
- call_docval_func(super().__init__, super_kwargs)
+ super().__init__(source=kwargs['indir'])
def read_namespace(self, namespace_path):
namespaces = None
=====================================
src/hdmf/spec/spec.py
=====================================
@@ -4,7 +4,7 @@ from collections import OrderedDict
from copy import deepcopy
from warnings import warn
-from ..utils import docval, getargs, popargs, get_docval, fmt_docval_args
+from ..utils import docval, getargs, popargs, get_docval
NAME_WILDCARD = None # this is no longer used, but kept for backward compatibility
ZERO_OR_ONE = '?'
@@ -515,8 +515,7 @@ class BaseStorageSpec(Spec):
@docval(*_attr_args)
def add_attribute(self, **kwargs):
''' Add an attribute to this specification '''
- pargs, pkwargs = fmt_docval_args(AttributeSpec.__init__, kwargs)
- spec = AttributeSpec(*pargs, **pkwargs)
+ spec = AttributeSpec(**kwargs)
self.set_attribute(spec)
return spec
=====================================
src/hdmf/utils.py
=====================================
@@ -410,6 +410,12 @@ def fmt_docval_args(func, kwargs):
Useful for methods that wrap other methods
'''
+ warnings.warn("fmt_docval_args will be deprecated in a future version of HDMF. Instead of using fmt_docval_args, "
+ "call the function directly with the kwargs. Please note that fmt_docval_args "
+ "removes all arguments not accepted by the function's docval, so if you are passing kwargs that "
+ "includes extra arguments and the function's docval does not allow extra arguments (allow_extra=True "
+ "is set), then you will need to pop the extra arguments out of kwargs before calling the function.",
+ PendingDeprecationWarning)
func_docval = getattr(func, docval_attr_name, None)
ret_args = list()
ret_kwargs = dict()
@@ -429,8 +435,48 @@ def fmt_docval_args(func, kwargs):
return ret_args, ret_kwargs
+# def _remove_extra_args(func, kwargs):
+# """Return a dict of only the keyword arguments that are accepted by the function's docval.
+#
+# If the docval specifies allow_extra=True, then the original kwargs are returned.
+# """
+# # NOTE: this has the same functionality as the to-be-deprecated fmt_docval_args except that
+# # kwargs are kept as kwargs instead of parsed into args and kwargs
+# func_docval = getattr(func, docval_attr_name, None)
+# if func_docval:
+# if func_docval['allow_extra']:
+# # if extra args are allowed, return all args
+# return kwargs
+# else:
+# # save only the arguments listed in the function's docval (skip any others present in kwargs)
+# ret_kwargs = dict()
+# for arg in func_docval[__docval_args_loc]:
+# val = kwargs.get(arg['name'], None)
+# if val is not None: # do not return arguments that are not present or have value None
+# ret_kwargs[arg['name']] = val
+# return ret_kwargs
+# else:
+# raise ValueError('No docval found on %s' % str(func))
+
+
def call_docval_func(func, kwargs):
- fargs, fkwargs = fmt_docval_args(func, kwargs)
+ """Call the function with only the keyword arguments that are accepted by the function's docval.
+
+ Extra keyword arguments are not passed to the function unless the function's docval has allow_extra=True.
+ """
+ warnings.warn("call_docval_func will be deprecated in a future version of HDMF. Instead of using call_docval_func, "
+ "call the function directly with the kwargs. Please note that call_docval_func "
+ "removes all arguments not accepted by the function's docval, so if you are passing kwargs that "
+ "includes extra arguments and the function's docval does not allow extra arguments (allow_extra=True "
+ "is set), then you will need to pop the extra arguments out of kwargs before calling the function.",
+ PendingDeprecationWarning)
+ with warnings.catch_warnings(record=True):
+ # catch and ignore only PendingDeprecationWarnings from fmt_docval_args so that two
+ # PendingDeprecationWarnings saying the same thing are not raised
+ warnings.simplefilter("ignore", UserWarning)
+ warnings.simplefilter("always", PendingDeprecationWarning)
+ fargs, fkwargs = fmt_docval_args(func, kwargs)
+
return func(*fargs, **fkwargs)
@@ -826,7 +872,7 @@ def get_data_shape(data, strict_no_data_load=False):
# NOTE: data.maxshape will fail on empty h5py.Dataset without shape or maxshape. this will be fixed in h5py 3.0
if hasattr(data, 'maxshape'):
return data.maxshape
- if hasattr(data, 'shape'):
+ if hasattr(data, 'shape') and data.shape is not None:
return data.shape
if isinstance(data, dict):
return None
=====================================
src/hdmf/validate/validator.py
=====================================
@@ -13,7 +13,7 @@ from ..build.builders import BaseBuilder
from ..spec import Spec, AttributeSpec, GroupSpec, DatasetSpec, RefSpec, LinkSpec
from ..spec import SpecNamespace
from ..spec.spec import BaseStorageSpec, DtypeHelper
-from ..utils import docval, getargs, call_docval_func, pystr, get_data_shape
+from ..utils import docval, getargs, pystr, get_data_shape
from ..query import ReferenceResolver
@@ -291,7 +291,7 @@ class AttributeValidator(Validator):
@docval({'name': 'spec', 'type': AttributeSpec, 'doc': 'the specification to use to validate'},
{'name': 'validator_map', 'type': ValidatorMap, 'doc': 'the ValidatorMap to use during validation'})
def __init__(self, **kwargs):
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
@docval({'name': 'value', 'type': None, 'doc': 'the value to validate'},
returns='a list of Errors', rtype=list)
@@ -342,7 +342,7 @@ class BaseStorageValidator(Validator):
@docval({'name': 'spec', 'type': BaseStorageSpec, 'doc': 'the specification to use to validate'},
{'name': 'validator_map', 'type': ValidatorMap, 'doc': 'the ValidatorMap to use during validation'})
def __init__(self, **kwargs):
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
self.__attribute_validators = dict()
for attr in self.spec.attributes:
self.__attribute_validators[attr.name] = AttributeValidator(attr, self.vmap)
@@ -373,7 +373,7 @@ class DatasetValidator(BaseStorageValidator):
@docval({'name': 'spec', 'type': DatasetSpec, 'doc': 'the specification to use to validate'},
{'name': 'validator_map', 'type': ValidatorMap, 'doc': 'the ValidatorMap to use during validation'})
def __init__(self, **kwargs):
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
@docval({"name": "builder", "type": DatasetBuilder, "doc": "the builder to validate"},
returns='a list of Errors', rtype=list)
@@ -413,7 +413,7 @@ class GroupValidator(BaseStorageValidator):
@docval({'name': 'spec', 'type': GroupSpec, 'doc': 'the specification to use to validate'},
{'name': 'validator_map', 'type': ValidatorMap, 'doc': 'the ValidatorMap to use during validation'})
def __init__(self, **kwargs):
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
@docval({"name": "builder", "type": GroupBuilder, "doc": "the builder to validate"}, # noqa: C901
returns='a list of Errors', rtype=list)
=====================================
tests/unit/build_tests/test_io_map_data.py
=====================================
@@ -11,7 +11,7 @@ from hdmf.spec import (AttributeSpec, DatasetSpec, DtypeSpec, GroupSpec, SpecCat
RefSpec)
from hdmf.spec.spec import ZERO_OR_MANY
from hdmf.testing import TestCase
-from hdmf.utils import docval, getargs, call_docval_func
+from hdmf.utils import docval, getargs
from tests.unit.utils import Foo, CORE_NAMESPACE
@@ -149,7 +149,7 @@ class BazScalar(Data):
@docval({'name': 'name', 'type': str, 'doc': 'the name of this BazScalar'},
{'name': 'data', 'type': int, 'doc': 'some data'})
def __init__(self, **kwargs):
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
class TestDataMapScalar(TestCase):
@@ -203,7 +203,7 @@ class BazScalarCompound(Data):
@docval({'name': 'name', 'type': str, 'doc': 'the name of this BazScalar'},
{'name': 'data', 'type': 'array_data', 'doc': 'some data'})
def __init__(self, **kwargs):
- call_docval_func(super().__init__, kwargs)
+ super().__init__(**kwargs)
class TestDataMapScalarCompound(TestCase):
=====================================
tests/unit/test_container.py
=====================================
@@ -1,4 +1,6 @@
import numpy as np
+from uuid import uuid4, UUID
+
from hdmf.container import AbstractContainer, Container, Data
from hdmf.testing import TestCase
from hdmf.utils import docval
@@ -10,22 +12,47 @@ class Subcontainer(Container):
class TestContainer(TestCase):
- def test_constructor(self):
- """Test that constructor properly sets parent and both child and parent have an object_id
+ def test_new(self):
+ """Test that __new__ properly sets parent and other fields.
"""
parent_obj = Container('obj1')
- child_obj = Container.__new__(Container, parent=parent_obj)
+ child_object_id = str(uuid4())
+ child_obj = Container.__new__(Container, parent=parent_obj, object_id=child_object_id,
+ container_source="test_source")
self.assertIs(child_obj.parent, parent_obj)
self.assertIs(parent_obj.children[0], child_obj)
- self.assertIsNotNone(parent_obj.object_id)
- self.assertIsNotNone(child_obj.object_id)
+ self.assertEqual(child_obj.object_id, child_object_id)
+ self.assertFalse(child_obj._in_construct_mode)
+ self.assertTrue(child_obj.modified)
- def test_constructor_object_id_none(self):
- """Test that setting object_id to None in __new__ is OK and the object ID is set on get
+ def test_new_object_id_none(self):
+ """Test that passing object_id=None to __new__ is OK and results in a non-None object ID being assigned.
"""
parent_obj = Container('obj1')
child_obj = Container.__new__(Container, parent=parent_obj, object_id=None)
self.assertIsNotNone(child_obj.object_id)
+ UUID(child_obj.object_id, version=4) # raises ValueError if invalid
+
+ def test_new_construct_mode(self):
+ """Test that passing in_construct_mode to __new__ sets _in_construct_mode and _in_construct_mode can be reset.
+ """
+ parent_obj = Container('obj1')
+ child_obj = Container.__new__(Container, parent=parent_obj, object_id=None, in_construct_mode=True)
+ self.assertTrue(child_obj._in_construct_mode)
+ child_obj._in_construct_mode = False
+ self.assertFalse(child_obj._in_construct_mode)
+
+ def test_init(self):
+ """Test that __init__ properly sets object ID and other fields.
+ """
+ obj = Container('obj1')
+ self.assertIsNotNone(obj.object_id)
+ UUID(obj.object_id, version=4) # raises ValueError if invalid
+ self.assertFalse(obj._in_construct_mode)
+ self.assertTrue(obj.modified)
+ self.assertEqual(obj.children, tuple())
+ self.assertIsNone(obj.parent)
+ self.assertEqual(obj.name, 'obj1')
def test_set_parent(self):
"""Test that parent setter properly sets parent
=====================================
tests/unit/test_io_hdf5_h5tools.py
=====================================
@@ -838,6 +838,10 @@ class TestHDF5IO(TestCase):
with HDF5IO(pathlib_path, mode='w') as io:
self.assertEqual(io.source, self.path)
+ def test_path_or_file(self):
+ with self.assertRaisesWith(ValueError, "You must supply either a path or a file."):
+ HDF5IO()
+
class TestCacheSpec(TestCase):
@@ -1049,7 +1053,7 @@ class HDF5IOMultiFileTest(TestCase):
# Close all the files
for i in self.io:
i.close()
- del(i)
+ del i
self.io = None
self.f = None
# Make sure the files have been deleted
@@ -1278,7 +1282,7 @@ class HDF5IOInitFileExistsTest(TestCase):
def tearDown(self):
if self.io is not None:
self.io.close()
- del(self.io)
+ del self.io
if os.path.exists(self.path):
os.remove(self.path)
@@ -1312,7 +1316,7 @@ class HDF5IOReadNoDataTest(TestCase):
def tearDown(self):
if self.io is not None:
self.io.close()
- del(self.io)
+ del self.io
if os.path.exists(self.path):
os.remove(self.path)
@@ -1354,7 +1358,7 @@ class HDF5IOReadData(TestCase):
def tearDown(self):
if self.io is not None:
self.io.close()
- del(self.io)
+ del self.io
if os.path.exists(self.path):
os.remove(self.path)
@@ -1386,7 +1390,7 @@ class HDF5IOReadBuilderClosed(TestCase):
def tearDown(self):
if self.io is not None:
self.io.close()
- del(self.io)
+ del self.io
if os.path.exists(self.path):
os.remove(self.path)
@@ -1455,7 +1459,7 @@ class HDF5IOWriteFileExists(TestCase):
def tearDown(self):
if self.io is not None:
self.io.close()
- del(self.io)
+ del self.io
if os.path.exists(self.path):
os.remove(self.path)
@@ -3061,3 +3065,75 @@ class TestWriteHDF5withZarrInput(TestCase):
self.assertEqual(dset.compression_opts, 5)
self.assertEqual(dset.shuffle, True)
self.assertEqual(dset.fletcher32, True)
+
+
+class HDF5IOEmptyDataset(TestCase):
+ """ Test if file does not exist, write in mode (w, w-, x, a) is ok """
+
+ def setUp(self):
+ self.manager = get_foo_buildmanager()
+ self.path = get_temp_filepath()
+ self.path2 = get_temp_filepath()
+
+ def tearDown(self):
+ if os.path.exists(self.path):
+ os.remove(self.path)
+ if os.path.exists(self.path2):
+ os.remove(self.path2)
+
+ def test_write_empty_dataset(self):
+ dataio = H5DataIO(shape=(5,), dtype=int)
+ foo = Foo('foo1', dataio, "I am foo1", 17, 3.14)
+ bucket = FooBucket('bucket1', [foo])
+ foofile = FooFile(buckets=[bucket])
+
+ with HDF5IO(self.path, manager=self.manager, mode='w') as io:
+ io.write(foofile)
+
+ self.assertIs(foo.my_data, dataio)
+ self.assertIsNotNone(foo.my_data.dataset)
+ self.assertIsInstance(foo.my_data.dataset, h5py.Dataset)
+ np.testing.assert_array_equal(foo.my_data.dataset, np.zeros(5, dtype=int))
+
+ def test_overwrite_dataset(self):
+ dataio = H5DataIO(shape=(5,), dtype=int)
+ foo = Foo('foo1', dataio, "I am foo1", 17, 3.14)
+ bucket = FooBucket('bucket1', [foo])
+ foofile = FooFile(buckets=[bucket])
+
+ with HDF5IO(self.path, manager=self.manager, mode='w') as io:
+ io.write(foofile)
+
+ with self.assertRaisesRegex(ValueError, 'Cannot overwrite H5DataIO.dataset'):
+ with HDF5IO(self.path2, manager=self.manager, mode='w') as io:
+ io.write(foofile)
+
+
+class HDF5IOClassmethodTests(TestCase):
+
+ def setUp(self):
+ self.path = get_temp_filepath()
+ self.f = h5py.File(self.path, 'w')
+
+ def tearDown(self):
+ self.f.close()
+ if os.path.exists(self.path):
+ os.remove(self.path)
+
+ def test_setup_empty_dset(self):
+ dset = HDF5IO.__setup_empty_dset__(self.f, 'foo', {'shape': (3, 3), 'dtype': 'float'})
+ self.assertEqual(dset.name, '/foo')
+ self.assertTupleEqual(dset.shape, (3, 3))
+ self.assertIs(dset.dtype.type, np.float32)
+
+ def test_setup_empty_dset_req_args(self):
+ with self.assertRaisesRegex(ValueError, 'Cannot setup empty dataset /foo without dtype'):
+ HDF5IO.__setup_empty_dset__(self.f, 'foo', {'shape': (3, 3)})
+
+ with self.assertRaisesRegex(ValueError, 'Cannot setup empty dataset /foo without shape'):
+ HDF5IO.__setup_empty_dset__(self.f, 'foo', {'dtype': np.float32})
+
+ def test_setup_empty_dset_create_exception(self):
+ HDF5IO.__setup_empty_dset__(self.f, 'foo', {'shape': (3, 3), 'dtype': 'float'})
+ with self.assertRaisesRegex(Exception, "Could not create dataset foo in /"):
+ HDF5IO.__setup_empty_dset__(self.f, 'foo', {'shape': (3, 3), 'dtype': 'float'})
=====================================
tests/unit/utils_test/test_core_DataIO.py
=====================================
@@ -55,3 +55,16 @@ class DataIOTests(TestCase):
container = Data('wrapped_data', data)
with self.assertRaisesWith(ValueError, "cannot overwrite 'data' on DataIO"):
container.set_dataio(dataio)
+
+ def test_dataio_options(self):
+ """
+ Test that either data or dtype+shape are specified exclusively
+ """
+ with self.assertRaisesRegex(ValueError, "Setting the dtype when data is not None is not supported"):
+ DataIO(data=np.arange(5), dtype=int)
+ with self.assertRaisesRegex(ValueError, "Setting the shape when data is not None is not supported"):
+ DataIO(data=np.arange(5), shape=(3,))
+
+ dataio = DataIO(shape=(3,), dtype=int)
+ with self.assertRaisesRegex(ValueError, "Setting data when dtype and shape are not None is not supported"):
+ dataio.data = np.arange(5)
=====================================
tests/unit/utils_test/test_docval.py
=====================================
@@ -1,7 +1,7 @@
import numpy as np
from hdmf.testing import TestCase
from hdmf.utils import (docval, fmt_docval_args, get_docval, getargs, popargs, AllowPositional, get_docval_macro,
- docval_macro, popargs_to_dict)
+ docval_macro, popargs_to_dict, call_docval_func)
class MyTestClass(object):
@@ -137,14 +137,25 @@ class TestDocValidator(TestCase):
with self.assertRaises(ValueError):
method1(self, arg1=[[1, 1, 1]])
+ fmt_docval_warning_msg = (
+ "fmt_docval_args will be deprecated in a future version of HDMF. Instead of using fmt_docval_args, "
+ "call the function directly with the kwargs. Please note that fmt_docval_args "
+ "removes all arguments not accepted by the function's docval, so if you are passing kwargs that "
+ "includes extra arguments and the function's docval does not allow extra arguments (allow_extra=True "
+ "is set), then you will need to pop the extra arguments out of kwargs before calling the function."
+ )
+
def test_fmt_docval_args(self):
- """ Test that fmt_docval_args works """
+ """ Test that fmt_docval_args parses the args and strips extra args """
test_kwargs = {
'arg1': 'a string',
'arg2': 1,
'arg3': True,
+ 'hello': 'abc',
+ 'list': ['abc', 1, 2, 3]
}
- rec_args, rec_kwargs = fmt_docval_args(self.test_obj.basic_add2_kw, test_kwargs)
+ with self.assertWarnsWith(PendingDeprecationWarning, self.fmt_docval_warning_msg):
+ rec_args, rec_kwargs = fmt_docval_args(self.test_obj.basic_add2_kw, test_kwargs)
exp_args = ['a string', 1]
self.assertListEqual(rec_args, exp_args)
exp_kwargs = {'arg3': True}
@@ -156,7 +167,8 @@ class TestDocValidator(TestCase):
pass
with self.assertRaisesRegex(ValueError, r"no docval found on .*method1.*"):
- fmt_docval_args(method1, {})
+ with self.assertWarnsWith(PendingDeprecationWarning, self.fmt_docval_warning_msg):
+ fmt_docval_args(method1, {})
def test_fmt_docval_args_allow_extra(self):
""" Test that fmt_docval_args works """
@@ -167,12 +179,38 @@ class TestDocValidator(TestCase):
'hello': 'abc',
'list': ['abc', 1, 2, 3]
}
- rec_args, rec_kwargs = fmt_docval_args(self.test_obj.basic_add2_kw_allow_extra, test_kwargs)
+ with self.assertWarnsWith(PendingDeprecationWarning, self.fmt_docval_warning_msg):
+ rec_args, rec_kwargs = fmt_docval_args(self.test_obj.basic_add2_kw_allow_extra, test_kwargs)
exp_args = ['a string', 1]
self.assertListEqual(rec_args, exp_args)
exp_kwargs = {'arg3': True, 'hello': 'abc', 'list': ['abc', 1, 2, 3]}
self.assertDictEqual(rec_kwargs, exp_kwargs)
+ def test_call_docval_func(self):
+ """Test that call_docval_func strips extra args and calls the function."""
+ test_kwargs = {
+ 'arg1': 'a string',
+ 'arg2': 1,
+ 'arg3': True,
+ 'hello': 'abc',
+ 'list': ['abc', 1, 2, 3]
+ }
+ msg = (
+ "call_docval_func will be deprecated in a future version of HDMF. Instead of using call_docval_func, "
+ "call the function directly with the kwargs. Please note that call_docval_func "
+ "removes all arguments not accepted by the function's docval, so if you are passing kwargs that "
+ "includes extra arguments and the function's docval does not allow extra arguments (allow_extra=True "
+ "is set), then you will need to pop the extra arguments out of kwargs before calling the function."
+ )
+ with self.assertWarnsWith(PendingDeprecationWarning, msg):
+ ret_kwargs = call_docval_func(self.test_obj.basic_add2_kw, test_kwargs)
+ exp_kwargs = {
+ 'arg1': 'a string',
+ 'arg2': 1,
+ 'arg3': True
+ }
+ self.assertDictEqual(ret_kwargs, exp_kwargs)
+
def test_docval_add(self):
"""Test that docval works with a single positional
argument
View it on GitLab: https://salsa.debian.org/med-team/hdmf/-/commit/43016591955cb20a33d41610a5d68afe8e2d6c10
--
View it on GitLab: https://salsa.debian.org/med-team/hdmf/-/commit/43016591955cb20a33d41610a5d68afe8e2d6c10
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20220826/b7b673c1/attachment-0001.htm>
More information about the debian-med-commit
mailing list