[med-svn] [Git][med-team/hdmf][master] 3 commits: New upstream version 2.5.5

Nilesh Patra (@nilesh) gitlab at salsa.debian.org
Tue May 18 18:39:13 BST 2021



Nilesh Patra pushed to branch master at Debian Med / hdmf


Commits:
b6324417 by Nilesh Patra at 2021-05-18T23:05:22+05:30
New upstream version 2.5.5
- - - - -
33db3923 by Nilesh Patra at 2021-05-18T23:05:25+05:30
Update upstream source from tag 'upstream/2.5.5'

Update to upstream version '2.5.5'
with Debian dir 23e0e2b6c0ea65e987eef85619ce4dd2d9f155d6
- - - - -
e814e04b by Nilesh Patra at 2021-05-18T23:05:57+05:30
Interim changelog entry

- - - - -


9 changed files:

- PKG-INFO
- debian/changelog
- src/hdmf.egg-info/PKG-INFO
- src/hdmf/_version.py
- src/hdmf/build/manager.py
- src/hdmf/spec/namespace.py
- src/hdmf/spec/spec.py
- tests/unit/spec_tests/test_load_namespace.py
- tests/unit/test_io_hdf5_h5tools.py


Changes:

=====================================
PKG-INFO
=====================================
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: hdmf
-Version: 2.5.3
+Version: 2.5.5
 Summary: A package for standardizing hierarchical object data
 Home-page: https://github.com/hdmf-dev/hdmf
 Author: Andrew Tritt


=====================================
debian/changelog
=====================================
@@ -1,7 +1,7 @@
-hdmf (2.5.3-1) UNRELEASED; urgency=medium
+hdmf (2.5.5-1) UNRELEASED; urgency=medium
 
   * Team Upload.
-  * New upstream version 2.5.3
+  * New upstream version 2.5.5
   * Fix copyright
 
  -- Nilesh Patra <nilesh at debian.org>  Fri, 14 May 2021 22:40:21 +0530


=====================================
src/hdmf.egg-info/PKG-INFO
=====================================
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: hdmf
-Version: 2.5.3
+Version: 2.5.5
 Summary: A package for standardizing hierarchical object data
 Home-page: https://github.com/hdmf-dev/hdmf
 Author: Andrew Tritt


=====================================
src/hdmf/_version.py
=====================================
@@ -8,11 +8,11 @@ import json
 
 version_json = '''
 {
- "date": "2021-05-12T13:43:56-0700",
+ "date": "2021-05-17T17:02:11-0700",
  "dirty": false,
  "error": null,
- "full-revisionid": "6be6692191403622fe77db1f2303e4d4030e004b",
- "version": "2.5.3"
+ "full-revisionid": "db511489a3f82968446b165ec3bd60b34722587c",
+ "version": "2.5.5"
 }
 '''  # END VERSION_JSON
 


=====================================
src/hdmf/build/manager.py
=====================================
@@ -5,7 +5,7 @@ from copy import copy
 from .builders import DatasetBuilder, GroupBuilder, LinkBuilder, Builder, BaseBuilder
 from .classgenerator import ClassGenerator, CustomClassGenerator, MCIClassGenerator
 from ..container import AbstractContainer, Container, Data
-from ..spec import DatasetSpec, GroupSpec, LinkSpec, NamespaceCatalog, SpecReader
+from ..spec import DatasetSpec, GroupSpec, NamespaceCatalog, SpecReader
 from ..spec.spec import BaseStorageSpec
 from ..utils import docval, getargs, call_docval_func, ExtenderMeta
 
@@ -534,11 +534,10 @@ class TypeMap:
             if isinstance(spec, (GroupSpec, DatasetSpec)):
                 if spec.data_type_inc is not None:
                     self.get_dt_container_cls(spec.data_type_inc, namespace)  # TODO handle recursive definitions
-                if spec.data_type_def is not None:
+                if spec.data_type_def is not None:  # nested type definition
                     self.get_dt_container_cls(spec.data_type_def, namespace)
-            elif isinstance(spec, LinkSpec):
-                if spec.target_type is not None:
-                    self.get_dt_container_cls(spec.target_type, namespace)
+            else:  # spec is a LinkSpec
+                self.get_dt_container_cls(spec.target_type, namespace)
             if isinstance(spec, GroupSpec):
                 for child_spec in (spec.groups + spec.datasets + spec.links):
                     __check_dependent_types_helper(child_spec, namespace)


=====================================
src/hdmf/spec/namespace.py
=====================================
@@ -455,15 +455,10 @@ class NamespaceCatalog:
                     raise ValueError("Could not load namespace '%s'" % s['namespace']) from e
                 if types_to_load is None:
                     types_to_load = inc_ns.get_registered_types()  # load all types in namespace
+                registered_types = set()
                 for ndt in types_to_load:
-                    spec = inc_ns.get_spec(ndt)
-                    spec_file = inc_ns.catalog.get_spec_source_file(ndt)
-                    if isinstance(spec, DatasetSpec):
-                        spec = self.dataset_spec_cls.build_spec(spec)
-                    else:
-                        spec = self.group_spec_cls.build_spec(spec)
-                    catalog.register_spec(spec, spec_file)
-                included_types[s['namespace']] = tuple(types_to_load)
+                    self.__register_type(ndt, inc_ns, catalog, registered_types)
+                included_types[s['namespace']] = tuple(sorted(registered_types))
             else:
                 raise ValueError("Spec '%s' schema must have either 'source' or 'namespace' key" % ns_name)
         # construct namespace
@@ -471,6 +466,40 @@ class NamespaceCatalog:
         self.__namespaces[ns_name] = ns
         return included_types
 
+    def __register_type(self, ndt, inc_ns, catalog, registered_types):
+        spec = inc_ns.get_spec(ndt)
+        spec_file = inc_ns.catalog.get_spec_source_file(ndt)
+        self.__register_dependent_types(spec, inc_ns, catalog, registered_types)
+        if isinstance(spec, DatasetSpec):
+            built_spec = self.dataset_spec_cls.build_spec(spec)
+        else:
+            built_spec = self.group_spec_cls.build_spec(spec)
+        registered_types.add(ndt)
+        catalog.register_spec(built_spec, spec_file)
+
+    def __register_dependent_types(self, spec, inc_ns, catalog, registered_types):
+        """Ensure that classes for all types used by this type are registered
+        """
+        # TODO test cross-namespace registration...
+        def __register_dependent_types_helper(spec, inc_ns, catalog, registered_types):
+            if isinstance(spec, (GroupSpec, DatasetSpec)):
+                if spec.data_type_inc is not None:
+                    # TODO handle recursive definitions
+                    self.__register_type(spec.data_type_inc, inc_ns, catalog, registered_types)
+                if spec.data_type_def is not None:  # nested type definition
+                    self.__register_type(spec.data_type_def, inc_ns, catalog, registered_types)
+            else:  # spec is a LinkSpec
+                self.__register_type(spec.target_type, inc_ns, catalog, registered_types)
+            if isinstance(spec, GroupSpec):
+                for child_spec in (spec.groups + spec.datasets + spec.links):
+                    __register_dependent_types_helper(child_spec, inc_ns, catalog, registered_types)
+
+        if spec.data_type_inc is not None:
+            self.__register_type(spec.data_type_inc, inc_ns, catalog, registered_types)
+        if isinstance(spec, GroupSpec):
+            for child_spec in (spec.groups + spec.datasets + spec.links):
+                __register_dependent_types_helper(child_spec, inc_ns, catalog, registered_types)
+
     @docval({'name': 'namespace_path', 'type': str, 'doc': 'the path to the file containing the namespaces(s) to load'},
             {'name': 'resolve',
              'type': bool,


=====================================
src/hdmf/spec/spec.py
=====================================
@@ -836,14 +836,30 @@ class LinkSpec(Spec):
 
 _group_args = [
     {'name': 'doc', 'type': str, 'doc': 'a description about what this specification represents'},
-    {'name': 'name', 'type': str, 'doc': 'the name of this group', 'default': None},
+    {
+        'name': 'name',
+        'type': str,
+        'doc': 'the name of the Group that is written to the file. If this argument is omitted, users will be '
+               'required to enter a ``name`` field when creating instances of this data type in the API. Another '
+               'option is to specify ``default_name``, in which case this name will be used as the name of the Group '
+               'if no other name is provided.',
+        'default': None,
+    },
     {'name': 'default_name', 'type': str, 'doc': 'The default name of this group', 'default': None},
     {'name': 'groups', 'type': list, 'doc': 'the subgroups in this group', 'default': list()},
     {'name': 'datasets', 'type': list, 'doc': 'the datasets in this group', 'default': list()},
     {'name': 'attributes', 'type': list, 'doc': 'the attributes on this group', 'default': list()},
     {'name': 'links', 'type': list, 'doc': 'the links in this group', 'default': list()},
     {'name': 'linkable', 'type': bool, 'doc': 'whether or not this group can be linked', 'default': True},
-    {'name': 'quantity', 'type': (str, int), 'doc': 'the required number of allowed instance', 'default': 1},
+    {
+        'name': 'quantity',
+        'type': (str, int),
+        'doc': "the allowable number of instance of this group in a certain location. See table of options "
+               "`here <https://schema-language.readthedocs.io/en/latest/description.html#quantity>`_. Note that if you"
+               "specify ``name``, ``quantity`` cannot be ``'*'``, ``'+'``, or an integer greater that 1, because you "
+               "cannot have more than one group of the same name in the same parent group.",
+        'default': 1,
+    },
     {'name': 'data_type_def', 'type': str, 'doc': 'the data type this specification represents', 'default': None},
     {'name': 'data_type_inc', 'type': (str, 'GroupSpec'),
      'doc': 'the data type this specification data_type_inc', 'default': None},


=====================================
tests/unit/spec_tests/test_load_namespace.py
=====================================
@@ -301,8 +301,8 @@ class TestCustomSpecClasses(TestCase):
         namespace_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'test.namespace.yaml')
         namespace_deps = self.ns_catalog.load_namespaces(namespace_path)
 
-        # test that the dependencies are correct
-        expected = set(['Data', 'Container', 'DynamicTable'])
+        # test that the dependencies are correct, including dependencies of the dependencies
+        expected = set(['Data', 'Container', 'DynamicTable', 'ElementIdentifiers', 'VectorData'])
         self.assertSetEqual(set(namespace_deps['test']['hdmf-common']), expected)
 
         # test that the types are loaded
@@ -341,8 +341,9 @@ class TestCustomSpecClasses(TestCase):
         ext_namespace_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'test-ext.namespace.yaml')
         ext_namespace_deps = self.ns_catalog.load_namespaces(ext_namespace_path)
 
-        # test that the dependencies are correct
-        expected_deps = set(['TestData', 'TestContainer', 'TestTable', 'Container', 'Data', 'DynamicTable'])
+        # test that the dependencies are correct, including dependencies of the dependencies
+        expected_deps = set(['TestData', 'TestContainer', 'TestTable', 'Container', 'Data', 'DynamicTable',
+                             'ElementIdentifiers', 'VectorData'])
         self.assertSetEqual(set(ext_namespace_deps['test-ext']['test']), expected_deps)
 
     def test_load_namespaces_bad_path(self):


=====================================
tests/unit/test_io_hdf5_h5tools.py
=====================================
@@ -1980,23 +1980,27 @@ class TestLoadNamespaces(TestCase):
 
     def test_load_namespaces_with_dependencies(self):
         """Test loading namespaces where one includes another."""
-        file_spec = GroupSpec(doc="A FooFile", data_type_def='FooFile')
+        class MyFoo(Container):
+            pass
+
+        myfoo_spec = GroupSpec(doc="A MyFoo", data_type_def='MyFoo', data_type_inc='Foo')
         spec_catalog = SpecCatalog()
         name = 'test_core2'
         namespace = SpecNamespace(
             doc='a test namespace',
             name=name,
-            schema=[{'source': 'test.yaml', 'namespace': 'test_core'}],  # depends on test_core
+            schema=[{'source': 'test2.yaml', 'namespace': 'test_core'}],  # depends on test_core
             version='0.1.0',
             catalog=spec_catalog
         )
-        spec_catalog.register_spec(file_spec, 'test.yaml')
+        spec_catalog.register_spec(myfoo_spec, 'test2.yaml')
         namespace_catalog = NamespaceCatalog()
         namespace_catalog.add_namespace(name, namespace)
         type_map = TypeMap(namespace_catalog)
-        type_map.register_container_type(name, 'FooFile', FooFile)
+        type_map.register_container_type(name, 'MyFoo', MyFoo)
+        type_map.merge(self.manager.type_map, ns_catalog=True)
         manager = BuildManager(type_map)
-        container = FooFile()
+        container = MyFoo(name='myfoo')
         with HDF5IO(self.path, manager=manager, mode='a') as io:  # append to file
             io.write(container)
 



View it on GitLab: https://salsa.debian.org/med-team/hdmf/-/compare/bc64292650f557fb25a969a3a5b2900298071394...e814e04be6201c776cff1de353f4b69b40d78e42

-- 
View it on GitLab: https://salsa.debian.org/med-team/hdmf/-/compare/bc64292650f557fb25a969a3a5b2900298071394...e814e04be6201c776cff1de353f4b69b40d78e42
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20210518/e8a31114/attachment-0001.htm>


More information about the debian-med-commit mailing list