[med-svn] [Git][med-team/hdmf][upstream] New upstream version 3.4.7
Lance Lin (@linqigang)
gitlab at salsa.debian.org
Wed Nov 23 14:08:59 GMT 2022
Lance Lin pushed to branch upstream at Debian Med / hdmf
Commits:
c3f9d2ad by Lance Lin at 2022-11-23T20:53:24+07:00
New upstream version 3.4.7
- - - - -
13 changed files:
- PKG-INFO
- requirements-dev.txt
- requirements.txt
- setup.cfg
- src/hdmf.egg-info/PKG-INFO
- src/hdmf/__init__.py
- src/hdmf/_version.py
- src/hdmf/build/classgenerator.py
- src/hdmf/data_utils.py
- tests/unit/build_tests/test_classgenerator.py
- tests/unit/utils_test/test_core_GenericDataChunkIterator.py
- tox.ini
- versioneer.py
Changes:
=====================================
PKG-INFO
=====================================
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: hdmf
-Version: 3.4.6
+Version: 3.4.7
Summary: A package for standardizing hierarchical object data
Home-page: https://github.com/hdmf-dev/hdmf
Author: Andrew Tritt
=====================================
requirements-dev.txt
=====================================
@@ -5,7 +5,6 @@ coverage==6.4.2
flake8==5.0.4
flake8-debugger==4.1.2
flake8-print==5.0.0
-importlib-metadata==4.2.0 # there may be compatibility issues with newer versions
pytest==7.1.2
pytest-cov==3.0.0
python-dateutil==2.8.2
=====================================
requirements.txt
=====================================
@@ -2,8 +2,11 @@
# note that python 3.7 end of life is 27 Jun 2023
h5py==3.7.0
jsonschema==4.9.1
-numpy==1.21.5 # note that numpy 1.22 dropped python 3.7 support
-pandas==1.3.5 # note that pandas 1.4 dropped python 3.7 support
+numpy==1.23.3;python_version>='3.8'
+numpy==1.21.5;python_version<'3.8' # note that numpy 1.22 dropped python 3.7 support
+pandas==1.5.0;python_version>='3.8'
+pandas==1.3.5;python_version<'3.8' # note that pandas 1.4 dropped python 3.7 support
ruamel.yaml==0.17.21
-scipy==1.7.3 # note that scipy 1.8 dropped python 3.7 support
-setuptools==63.4.1
+scipy==1.9.3;python_version>='3.8'
+scipy==1.7.3;python_version<'3.8' # note that scipy 1.8 dropped python 3.7 support
+setuptools==65.4.1
=====================================
setup.cfg
=====================================
@@ -2,8 +2,7 @@
VCS = git
versionfile_source = src/hdmf/_version.py
versionfile_build = hdmf/_version.py
-style = pep440-pre
-tag_prefix = *.*.*
+tag_prefix = ''
[flake8]
max-line-length = 120
=====================================
src/hdmf.egg-info/PKG-INFO
=====================================
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: hdmf
-Version: 3.4.6
+Version: 3.4.7
Summary: A package for standardizing hierarchical object data
Home-page: https://github.com/hdmf-dev/hdmf
Author: Andrew Tritt
=====================================
src/hdmf/__init__.py
=====================================
@@ -20,9 +20,8 @@ def get_region_slicer(**kwargs):
return None
-from ._version import get_versions # noqa: E402
-__version__ = get_versions()['version']
-del get_versions
+from . import _version # noqa: F401,E402
+__version__ = _version.get_versions()['version']
from ._due import due, BibTeX # noqa: E402
=====================================
src/hdmf/_version.py
=====================================
@@ -1,5 +1,5 @@
-# This file was generated by 'versioneer.py' (0.18) from
+# This file was generated by 'versioneer.py' (0.28) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
@@ -8,11 +8,11 @@ import json
version_json = '''
{
- "date": "2022-10-04T14:35:09-0700",
+ "date": "2022-11-09T21:53:41-0800",
"dirty": false,
"error": null,
- "full-revisionid": "abb5d3b115823ff9581575cf049d025540b194c4",
- "version": "3.4.6"
+ "full-revisionid": "ccc5c252db697a65439086ff925f729e5eae118d",
+ "version": "3.4.7"
}
''' # END VERSION_JSON
=====================================
src/hdmf/build/classgenerator.py
=====================================
@@ -391,17 +391,18 @@ class MCIClassGenerator(CustomClassGenerator):
@docval(*docval_args, allow_positional=AllowPositional.WARNING)
def __init__(self, **kwargs):
- # first call the next superclass init
- # previous_init(**kwargs)
-
- # store the values passed to init for each MCI attribute
+ # store the values passed to init for each MCI attribute so that they can be added
+ # after calling __init__
new_kwargs = list()
for field_clsconf in classdict['__clsconf__']:
attr_name = field_clsconf['attr']
+ # do not store the value if it is None or not present
+ if attr_name not in kwargs or kwargs[attr_name] is None:
+ continue
add_method_name = field_clsconf['add']
new_kwarg = dict(
attr_name=attr_name,
- value=popargs(attr_name, kwargs) if attr_name in kwargs else None,
+ value=popargs(attr_name, kwargs),
add_method_name=add_method_name
)
new_kwargs.append(new_kwarg)
=====================================
src/hdmf/data_utils.py
=====================================
@@ -1,4 +1,7 @@
import copy
+import math
+import functools # TODO: remove when Python 3.7 support is dropped
+import operator # TODO: remove when Python 3.7 support is dropped
from abc import ABCMeta, abstractmethod
from collections.abc import Iterable
from warnings import warn
@@ -153,7 +156,7 @@ class GenericDataChunkIterator(AbstractDataChunkIterator):
doc=(
"If chunk_shape is not specified, it will be inferred as the smallest chunk "
"below the chunk_mb threshold.",
- "Defaults to 1MB."
+ "Defaults to 1MB.",
),
default=None,
),
@@ -205,39 +208,50 @@ class GenericDataChunkIterator(AbstractDataChunkIterator):
chunk_shape is not None
), "Only one of 'chunk_mb' or 'chunk_shape' can be specified!"
- self._maxshape = self._get_maxshape()
self._dtype = self._get_dtype()
- if chunk_shape is None:
- self.chunk_shape = self._get_default_chunk_shape(chunk_mb=chunk_mb)
- else:
- self.chunk_shape = chunk_shape
- if buffer_shape is None:
- self.buffer_shape = self._get_default_buffer_shape(buffer_gb=buffer_gb)
- else:
- self.buffer_shape = buffer_shape
- buffer_gb = np.prod(self.buffer_shape) * np.dtype(self._dtype).itemsize / 1e9
+ self._maxshape = tuple(int(x) for x in self._get_maxshape())
+ chunk_shape = tuple(int(x) for x in chunk_shape) if chunk_shape else chunk_shape
+ self.chunk_shape = chunk_shape or self._get_default_chunk_shape(chunk_mb=chunk_mb)
+ buffer_shape = tuple(int(x) for x in buffer_shape) if buffer_shape else buffer_shape
+ self.buffer_shape = buffer_shape or self._get_default_buffer_shape(buffer_gb=buffer_gb)
- array_chunk_shape = np.array(self.chunk_shape)
- array_buffer_shape = np.array(self.buffer_shape)
- array_maxshape = np.array(self.maxshape)
- assert all(array_buffer_shape > 0), f"Some dimensions of buffer_shape ({self.buffer_shape}) are less than zero!"
+ # Shape assertions
assert all(
- array_chunk_shape <= array_maxshape
+ buffer_axis > 0 for buffer_axis in self.buffer_shape
+ ), f"Some dimensions of buffer_shape ({self.buffer_shape}) are less than zero!"
+ assert all(
+ chunk_axis <= maxshape_axis for chunk_axis, maxshape_axis in zip(self.chunk_shape, self.maxshape)
), f"Some dimensions of chunk_shape ({self.chunk_shape}) exceed the data dimensions ({self.maxshape})!"
assert all(
- array_buffer_shape <= array_maxshape
+ buffer_axis <= maxshape_axis for buffer_axis, maxshape_axis in zip(self.buffer_shape, self.maxshape)
), f"Some dimensions of buffer_shape ({self.buffer_shape}) exceed the data dimensions ({self.maxshape})!"
assert all(
- array_chunk_shape <= array_buffer_shape
+ (chunk_axis <= buffer_axis for chunk_axis, buffer_axis in zip(self.chunk_shape, self.buffer_shape))
), f"Some dimensions of chunk_shape ({self.chunk_shape}) exceed the buffer shape ({self.buffer_shape})!"
- assert all((array_buffer_shape % array_chunk_shape == 0)[array_buffer_shape != array_maxshape]), (
+ assert all(
+ buffer_axis % chunk_axis == 0
+ for chunk_axis, buffer_axis, maxshape_axis in zip(self.chunk_shape, self.buffer_shape, self.maxshape)
+ if buffer_axis != maxshape_axis
+ ), (
f"Some dimensions of chunk_shape ({self.chunk_shape}) do not "
f"evenly divide the buffer shape ({self.buffer_shape})!"
)
- self.num_buffers = np.prod(np.ceil(array_maxshape / array_buffer_shape))
+ self.num_buffers = functools.reduce( # TODO: replace with math.prod when Python 3.7 support is dropped
+ operator.mul,
+ [
+ math.ceil(maxshape_axis / buffer_axis)
+ for buffer_axis, maxshape_axis in zip(self.buffer_shape, self.maxshape)
+ ],
+ 1,
+ )
self.buffer_selection_generator = (
- tuple([slice(lower_bound, upper_bound) for lower_bound, upper_bound in zip(lower_bounds, upper_bounds)])
+ tuple(
+ [
+ slice(lower_bound, upper_bound)
+ for lower_bound, upper_bound in zip(lower_bounds, upper_bounds)
+ ]
+ )
for lower_bounds, upper_bounds in zip(
product(
*[
@@ -280,61 +294,68 @@ class GenericDataChunkIterator(AbstractDataChunkIterator):
default=None,
)
)
- def _get_default_chunk_shape(self, **kwargs):
+ def _get_default_chunk_shape(self, **kwargs) -> Tuple[int, ...]:
"""
Select chunk shape with size in MB less than the threshold of chunk_mb.
Keeps the dimensional ratios of the original data.
"""
- chunk_mb = getargs('chunk_mb', kwargs)
+ chunk_mb = getargs("chunk_mb", kwargs)
assert chunk_mb > 0, f"chunk_mb ({chunk_mb}) must be greater than zero!"
n_dims = len(self.maxshape)
itemsize = self.dtype.itemsize
chunk_bytes = chunk_mb * 1e6
- v = np.floor(np.array(self.maxshape) / np.min(self.maxshape))
- prod_v = np.prod(v)
+
+ min_maxshape = min(self.maxshape)
+ v = tuple(math.floor(maxshape_axis / min_maxshape) for maxshape_axis in self.maxshape)
+ prod_v = functools.reduce(operator.mul, v, 1) # TODO: replace with math.prod when Python 3.7 support is dropped
while prod_v * itemsize > chunk_bytes and prod_v != 1:
- v_ind = v != 1
- next_v = v[v_ind]
- v[v_ind] = np.floor(next_v / np.min(next_v))
- prod_v = np.prod(v)
- k = np.floor((chunk_bytes / (prod_v * itemsize)) ** (1 / n_dims))
- return tuple([min(int(x), self.maxshape[dim]) for dim, x in enumerate(k * v)])
+ non_unit_min_v = min(x for x in v if x != 1)
+ v = tuple(math.floor(x / non_unit_min_v) if x != 1 else x for x in v)
+ # TODO: replace with math.prod when Python 3.7 support is dropped
+ prod_v = functools.reduce(operator.mul, v, 1)
+ k = math.floor((chunk_bytes / (prod_v * itemsize)) ** (1 / n_dims))
+ return tuple([min(k * x, self.maxshape[dim]) for dim, x in enumerate(v)])
@docval(
dict(
- name="buffer_gb",
- type=(float, int),
- doc="Size of the data buffer in gigabytes. Recommended to be as much free RAM as safely available.",
- default=None,
+ name="buffer_gb",
+ type=(float, int),
+ doc="Size of the data buffer in gigabytes. Recommended to be as much free RAM as safely available.",
+ default=None,
)
)
- def _get_default_buffer_shape(self, **kwargs):
+ def _get_default_buffer_shape(self, **kwargs) -> Tuple[int, ...]:
"""
Select buffer shape with size in GB less than the threshold of buffer_gb.
Keeps the dimensional ratios of the original data.
Assumes the chunk_shape has already been set.
"""
- buffer_gb = getargs('buffer_gb', kwargs)
+ buffer_gb = getargs("buffer_gb", kwargs)
assert buffer_gb > 0, f"buffer_gb ({buffer_gb}) must be greater than zero!"
- assert all(np.array(self.chunk_shape) > 0), (
+ assert all(chunk_axis > 0 for chunk_axis in self.chunk_shape), (
f"Some dimensions of chunk_shape ({self.chunk_shape}) are less than zero!"
)
- k = np.floor(
- (buffer_gb * 1e9 / (np.prod(self.chunk_shape) * self.dtype.itemsize)) ** (1 / len(self.chunk_shape))
+ # TODO: replace with math.prod when Python 3.7 support is dropped
+ k = math.floor(
+ (
+ buffer_gb * 1e9 / (functools.reduce(operator.mul, self.chunk_shape, 1) * self.dtype.itemsize)
+ ) ** (1 / len(self.chunk_shape))
+ )
+ return tuple(
+ [
+ min(max(k * x, self.chunk_shape[j]), self.maxshape[j])
+ for j, x in enumerate(self.chunk_shape)
+ ]
)
- return tuple([
- min(max(int(x), self.chunk_shape[j]), self.maxshape[j])
- for j, x in enumerate(k * np.array(self.chunk_shape))
- ])
- def recommended_chunk_shape(self) -> tuple:
+ def recommended_chunk_shape(self) -> Tuple[int, ...]:
return self.chunk_shape
- def recommended_data_shape(self) -> tuple:
+ def recommended_data_shape(self) -> Tuple[int, ...]:
return self.maxshape
def __iter__(self):
@@ -378,16 +399,16 @@ class GenericDataChunkIterator(AbstractDataChunkIterator):
raise NotImplementedError("The data fetching method has not been built for this DataChunkIterator!")
@property
- def maxshape(self):
+ def maxshape(self) -> Tuple[int, ...]:
return self._maxshape
@abstractmethod
- def _get_maxshape(self) -> tuple:
+ def _get_maxshape(self) -> Tuple[int, ...]:
"""Retrieve the maximum bounds of the data shape using minimal I/O."""
raise NotImplementedError("The setter for the maxshape property has not been built for this DataChunkIterator!")
@property
- def dtype(self):
+ def dtype(self) -> np.dtype:
return self._dtype
@abstractmethod
=====================================
tests/unit/build_tests/test_classgenerator.py
=====================================
@@ -372,6 +372,25 @@ class TestDynamicContainer(TestCase):
assert multi.bars['my_bar'] == Bar(name='my_bar', data=list(range(10)), attr1='value1', attr2=10)
assert multi.attr3 == 5.
+ def test_multi_container_spec_optional(self):
+ multi_spec = GroupSpec(
+ doc='A test extension that contains a multi',
+ data_type_def='Multi',
+ groups=[
+ GroupSpec(data_type_inc=self.bar_spec, doc='test multi', quantity='*')
+ ],
+ attributes=[
+ AttributeSpec(name='attr3', doc='a float attribute', dtype='float')
+ ]
+ )
+ self.spec_catalog.register_spec(multi_spec, 'extension.yaml')
+ Multi = self.type_map.get_dt_container_cls('Multi', CORE_NAMESPACE)
+ multi = Multi(
+ name='my_multi',
+ attr3=5.
+ )
+ assert len(multi.bars) == 0
+
class TestGetClassSeparateNamespace(TestCase):
=====================================
tests/unit/utils_test/test_core_GenericDataChunkIterator.py
=====================================
@@ -1,8 +1,9 @@
+import unittest
import numpy as np
from pathlib import Path
from tempfile import mkdtemp
from shutil import rmtree
-import unittest
+from typing import Tuple, Iterable
import h5py
@@ -22,22 +23,37 @@ class GenericDataChunkIteratorTests(TestCase):
self.array = array
super().__init__(**kwargs)
- def _get_data(self, selection):
+ def _get_data(self, selection) -> np.ndarray:
return self.array[selection]
- def _get_maxshape(self):
+ def _get_maxshape(self) -> Tuple[int, ...]:
return self.array.shape
- def _get_dtype(self):
+ def _get_dtype(self) -> np.dtype:
return self.array.dtype
- def setUp(self):
- np.random.seed(seed=0)
- self.test_dir = Path(mkdtemp())
- self.test_array = np.random.randint(low=-(2 ** 15), high=2 ** 15 - 1, size=(2000, 384), dtype="int16")
+ class TestNumpyArrayDataChunkIteratorWithNumpyDtypeShape(GenericDataChunkIterator):
+ def __init__(self, array: np.ndarray, **kwargs):
+ self.array = array
+ super().__init__(**kwargs)
+
+ def _get_data(self, selection) -> np.ndarray:
+ return self.array[selection]
- def tearDown(self):
- rmtree(self.test_dir)
+ def _get_maxshape(self) -> Tuple[np.uint64, ...]: # Undesirable return type, but can be handled
+ return tuple(np.uint64(x) for x in self.array.shape)
+
+ def _get_dtype(self) -> np.dtype:
+ return self.array.dtype
+
+ @classmethod
+ def setUpClass(cls):
+ cls.test_dir = Path(mkdtemp())
+ cls.test_array = np.empty(shape=(2000, 384), dtype="int16")
+
+ @classmethod
+ def tearDownClass(cls):
+ rmtree(cls.test_dir)
def check_first_data_chunk_call(self, expected_selection, iterator_options):
test = self.TestNumpyArrayDataChunkIterator(array=self.test_array, **iterator_options)
@@ -58,6 +74,13 @@ class GenericDataChunkIteratorTests(TestCase):
np.testing.assert_array_equal(np.array(dset), self.test_array)
self.assertEqual(dset.chunks, iterator.chunk_shape)
+ def check_all_of_iterable_is_python_int(self, iterable: Iterable):
+ assert all(
+ tuple( # Easier to visualize failures in pytest with tuple vs. generator
+ isinstance(x, int) for x in iterable
+ )
+ )
+
def test_abstract_assertions(self):
class TestGenericDataChunkIterator(GenericDataChunkIterator):
pass
@@ -122,7 +145,7 @@ class GenericDataChunkIteratorTests(TestCase):
array=self.test_array, buffer_shape=buffer_shape, chunk_shape=chunk_shape
)
- def test_buffer_option_assertions(self):
+ def test_buffer_option_assertion_negative_buffer_gb(self):
buffer_gb = -1
with self.assertRaisesWith(
exc_type=AssertionError,
@@ -130,13 +153,7 @@ class GenericDataChunkIteratorTests(TestCase):
):
self.TestNumpyArrayDataChunkIterator(array=self.test_array, buffer_gb=buffer_gb)
- buffer_shape = (-1, 384)
- with self.assertRaisesWith(
- exc_type=AssertionError,
- exc_msg=f"Some dimensions of buffer_shape ({buffer_shape}) are less than zero!"
- ):
- self.TestNumpyArrayDataChunkIterator(array=self.test_array, buffer_shape=buffer_shape)
-
+ def test_buffer_option_assertion_exceed_maxshape(self):
buffer_shape = (2001, 384)
with self.assertRaisesWith(
exc_type=AssertionError,
@@ -147,7 +164,15 @@ class GenericDataChunkIteratorTests(TestCase):
):
self.TestNumpyArrayDataChunkIterator(array=self.test_array, buffer_shape=buffer_shape)
- def test_chunk_option_assertions(self):
+ def test_buffer_option_assertion_negative_shape(self):
+ buffer_shape = (-1, 384)
+ with self.assertRaisesWith(
+ exc_type=AssertionError,
+ exc_msg=f"Some dimensions of buffer_shape ({buffer_shape}) are less than zero!"
+ ):
+ self.TestNumpyArrayDataChunkIterator(array=self.test_array, buffer_shape=buffer_shape)
+
+ def test_chunk_option_assertion_negative_chunk_mb(self):
chunk_mb = -1
with self.assertRaisesWith(
exc_type=AssertionError,
@@ -155,6 +180,7 @@ class GenericDataChunkIteratorTests(TestCase):
):
self.TestNumpyArrayDataChunkIterator(array=self.test_array, chunk_mb=chunk_mb)
+ def test_chunk_option_assertion_negative_shape(self):
chunk_shape = (-1, 384)
with self.assertRaisesWith(
exc_type=AssertionError,
@@ -174,13 +200,69 @@ class GenericDataChunkIteratorTests(TestCase):
progress_bar_options=dict(total=5),
)
+ def test_maxshape_attribute_contains_int_type(self):
+ """Motivated by issues described in https://github.com/hdmf-dev/hdmf/pull/780 & 781 regarding return types."""
+ self.check_all_of_iterable_is_python_int(
+ iterable=self.TestNumpyArrayDataChunkIterator(array=self.test_array).maxshape
+ )
+
+ def test_automated_buffer_shape_attribute_int_type(self):
+ """Motivated by issues described in https://github.com/hdmf-dev/hdmf/pull/780 & 781 regarding return types."""
+ self.check_all_of_iterable_is_python_int(
+ iterable=self.TestNumpyArrayDataChunkIterator(array=self.test_array).buffer_shape
+ )
+
+ def test_automated_chunk_shape_attribute_int_type(self):
+ """Motivated by issues described in https://github.com/hdmf-dev/hdmf/pull/780 & 781 regarding return types."""
+ self.check_all_of_iterable_is_python_int(
+ iterable=self.TestNumpyArrayDataChunkIterator(array=self.test_array).chunk_shape
+ )
+
+ def test_np_dtype_maxshape_attribute_int_type(self):
+ """Motivated by issues described in https://github.com/hdmf-dev/hdmf/pull/780 & 781 regarding return types."""
+ self.check_all_of_iterable_is_python_int(
+ iterable=self.TestNumpyArrayDataChunkIteratorWithNumpyDtypeShape(array=self.test_array).maxshape
+ )
+
+ def test_manual_buffer_shape_attribute_int_type(self):
+ """Motivated by issues described in https://github.com/hdmf-dev/hdmf/pull/780 & 781 regarding return types."""
+ self.check_all_of_iterable_is_python_int(
+ iterable=self.TestNumpyArrayDataChunkIterator(
+ array=self.test_array,
+ chunk_shape=(np.uint64(100), np.uint64(2)),
+ buffer_shape=(np.uint64(200), np.uint64(4)),
+ ).buffer_shape
+ )
+
+ def test_manual_chunk_shape_attribute_int_type(self):
+ """Motivated by issues described in https://github.com/hdmf-dev/hdmf/pull/780 & 781 regarding return types."""
+ self.check_all_of_iterable_is_python_int(
+ iterable=self.TestNumpyArrayDataChunkIterator(
+ array=self.test_array,
+ chunk_shape=(np.uint64(100), np.uint64(2))
+ ).chunk_shape
+ )
+
+ def test_selection_slices_int_type(self):
+ """Motivated by issues described in https://github.com/hdmf-dev/hdmf/pull/780 & 781 regarding return types."""
+ iterator = self.TestNumpyArrayDataChunkIterator(array=self.test_array)
+ first_chunk = next(iterator)
+ stop_0 = first_chunk.selection[0].stop
+ start_0 = first_chunk.selection[0].start
+ stop_1 = first_chunk.selection[1].stop
+ start_1 = first_chunk.selection[1].start
+
+ self.check_all_of_iterable_is_python_int(iterable=(stop_0, start_0, stop_1, start_1))
+
def test_num_buffers(self):
buffer_shape = (950, 190)
chunk_shape = (50, 38)
+ expected_num_buffers = 9
+
test = self.TestNumpyArrayDataChunkIterator(
array=self.test_array, buffer_shape=buffer_shape, chunk_shape=chunk_shape
)
- self.assertEqual(first=test.num_buffers, second=9)
+ self.assertEqual(first=test.num_buffers, second=expected_num_buffers)
def test_numpy_array_chunk_iterator(self):
iterator_options = dict()
@@ -190,10 +272,10 @@ class GenericDataChunkIteratorTests(TestCase):
self.check_direct_hdf5_write(iterator_options=iterator_options)
def test_buffer_shape_option(self):
- test_buffer_shape = (1580, 316)
- iterator_options = dict(buffer_shape=test_buffer_shape)
+ expected_buffer_shape = (1580, 316)
+ iterator_options = dict(buffer_shape=expected_buffer_shape)
self.check_first_data_chunk_call(
- expected_selection=tuple([slice(0, buffer_shape_axis) for buffer_shape_axis in test_buffer_shape]),
+ expected_selection=tuple([slice(0, buffer_shape_axis) for buffer_shape_axis in expected_buffer_shape]),
iterator_options=iterator_options,
)
self.check_direct_hdf5_write(iterator_options=iterator_options)
@@ -234,20 +316,29 @@ class GenericDataChunkIteratorTests(TestCase):
self.assertEqual(iterator.chunk_shape, test_chunk_shape)
def test_chunk_mb_option(self):
- test_chunk_shape = (1115, 223)
+ expected_chunk_shape = (1115, 223)
iterator = self.TestNumpyArrayDataChunkIterator(array=self.test_array, chunk_mb=0.5)
- self.assertEqual(iterator.chunk_shape, test_chunk_shape)
+ self.assertEqual(iterator.chunk_shape, expected_chunk_shape)
- # chunk is larger than total data size; should collapse to maxshape
- test_chunk_shape = (2000, 384)
+ def test_chunk_mb_option_larger_than_total_size(self):
+ """Chunk is larger than total data size; should collapse to maxshape."""
+ expected_chunk_shape = (2000, 384)
iterator = self.TestNumpyArrayDataChunkIterator(array=self.test_array, chunk_mb=2)
- self.assertEqual(iterator.chunk_shape, test_chunk_shape)
+ self.assertEqual(iterator.chunk_shape, expected_chunk_shape)
- # test to evoke while condition of default shaping method
- test_chunk_shape = (1, 79, 79)
+ def test_chunk_mb_option_while_condition(self):
+ """Test to evoke while condition of default shaping method."""
+ expected_chunk_shape = (2, 79, 79)
+ special_array = np.random.randint(low=-(2 ** 15), high=2 ** 15 - 1, size=(2, 2000, 2000), dtype="int16")
+ iterator = self.TestNumpyArrayDataChunkIterator(array=special_array)
+ self.assertEqual(iterator.chunk_shape, expected_chunk_shape)
+
+ def test_chunk_mb_option_while_condition_unit_maxshape_axis(self):
+ """Test to evoke while condition of default shaping method."""
+ expected_chunk_shape = (1, 79, 79)
special_array = np.random.randint(low=-(2 ** 15), high=2 ** 15 - 1, size=(1, 2000, 2000), dtype="int16")
iterator = self.TestNumpyArrayDataChunkIterator(array=special_array)
- self.assertEqual(iterator.chunk_shape, test_chunk_shape)
+ self.assertEqual(iterator.chunk_shape, expected_chunk_shape)
@unittest.skipIf(not TQDM_INSTALLED, "optional tqdm module is not installed")
def test_progress_bar(self):
=====================================
tox.ini
=====================================
@@ -8,17 +8,20 @@ envlist = py37, py38, py39, py310
requires = pip >= 22.0
[testenv]
+download = True
usedevelop = True
-setenv = PYTHONDONTWRITEBYTECODE = 1
+setenv =
+ PYTHONDONTWRITEBYTECODE = 1
+ VIRTUALENV_PIP = 22.3.1
install_command =
- pip install -U {opts} {packages}
+ python -m pip install -U {opts} {packages}
deps =
-rrequirements-dev.txt
-rrequirements.txt
-
commands =
- pip check # Check for conflicting packages
+ python -m pip check # Check for conflicting packages
+ python -m pip list
pytest -v
# Env to create coverage report locally
@@ -32,7 +35,7 @@ commands =
[testenv:py310-optional]
basepython = python3.10
install_command =
- pip install -e . {opts} {packages}
+ python -m pip install -e . {opts} {packages}
deps =
-rrequirements-dev.txt
-rrequirements-opt.txt
@@ -42,7 +45,7 @@ commands = {[testenv]commands}
[testenv:py310-upgraded]
basepython = python3.10
install_command =
- pip install -U -e . {opts} {packages}
+ python -m pip install -U -e . {opts} {packages}
deps =
-rrequirements-dev.txt
-rrequirements-opt.txt
@@ -52,7 +55,7 @@ commands = {[testenv]commands}
[testenv:py310-prerelease]
basepython = python3.10
install_command =
- pip install -U --pre -e . {opts} {packages}
+ python -m pip install -U --pre -e . {opts} {packages}
deps =
-rrequirements-dev.txt
-rrequirements-opt.txt
@@ -98,7 +101,7 @@ commands = {[testenv:build]commands}
[testenv:build-py310-upgraded]
basepython = python3.10
install_command =
- pip install -U -e . {opts} {packages}
+ python -m pip install -U -e . {opts} {packages}
deps =
-rrequirements-dev.txt
-rrequirements-opt.txt
@@ -107,7 +110,7 @@ commands = {[testenv:build]commands}
[testenv:build-py310-prerelease]
basepython = python3.10
install_command =
- pip install -U --pre -e . {opts} {packages}
+ python -m pip install -U --pre -e . {opts} {packages}
deps =
-rrequirements-dev.txt
-rrequirements-opt.txt
@@ -128,7 +131,7 @@ commands = python -c "import hdmf; import hdmf.common"
# Envs that will execute gallery tests
[testenv:gallery]
install_command =
- pip install -U {opts} {packages}
+ python -m pip install -U {opts} {packages}
deps =
-rrequirements-dev.txt
@@ -162,7 +165,7 @@ commands = {[testenv:gallery]commands}
[testenv:gallery-py310-upgraded]
basepython = python3.10
install_command =
- pip install -U -e . {opts} {packages}
+ python -m pip install -U -e . {opts} {packages}
deps =
-rrequirements-dev.txt
-rrequirements-doc.txt
@@ -173,7 +176,7 @@ commands = {[testenv:gallery]commands}
[testenv:gallery-py310-prerelease]
basepython = python3.10
install_command =
- pip install -U --pre -e . {opts} {packages}
+ python -m pip install -U --pre -e . {opts} {packages}
deps =
-rrequirements-dev.txt
-rrequirements-doc.txt
=====================================
versioneer.py
=====================================
@@ -1,5 +1,5 @@
-# flake8: noqa: C901
-# Version: 0.18
+
+# Version: 0.28
"""The Versioneer - like a rocketeer, but for versions.
@@ -7,18 +7,14 @@ The Versioneer
==============
* like a rocketeer, but for versions!
-* https://github.com/warner/python-versioneer
+* https://github.com/python-versioneer/python-versioneer
* Brian Warner
-* License: Public Domain
-* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
-* [![Latest Version]
-(https://pypip.in/version/versioneer/badge.svg?style=flat)
-](https://pypi.python.org/pypi/versioneer/)
-* [![Build Status]
-(https://travis-ci.org/warner/python-versioneer.png?branch=master)
-](https://travis-ci.org/warner/python-versioneer)
-
-This is a tool for managing a recorded version number in distutils-based
+* License: Public Domain (Unlicense)
+* Compatible with: Python 3.7, 3.8, 3.9, 3.10 and pypy3
+* [![Latest Version][pypi-image]][pypi-url]
+* [![Build Status][travis-image]][travis-url]
+
+This is a tool for managing a recorded version number in setuptools-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
@@ -27,9 +23,38 @@ system, and maybe making new tarballs.
## Quick Install
-* `pip install versioneer` to somewhere to your $PATH
-* add a `[versioneer]` section to your setup.cfg (see below)
-* run `versioneer install` in your source tree, commit the results
+Versioneer provides two installation modes. The "classic" vendored mode installs
+a copy of versioneer into your repository. The experimental build-time dependency mode
+is intended to allow you to skip this step and simplify the process of upgrading.
+
+### Vendored mode
+
+* `pip install versioneer` to somewhere in your $PATH
+ * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
+ available, so you can also use `conda install -c conda-forge versioneer`
+* add a `[tool.versioneer]` section to your `pyproject.toml` or a
+ `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
+ * Note that you will need to add `tomli; python_version < "3.11"` to your
+ build-time dependencies if you use `pyproject.toml`
+* run `versioneer install --vendor` in your source tree, commit the results
+* verify version information with `python setup.py version`
+
+### Build-time dependency mode
+
+* `pip install versioneer` to somewhere in your $PATH
+ * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
+ available, so you can also use `conda install -c conda-forge versioneer`
+* add a `[tool.versioneer]` section to your `pyproject.toml` or a
+ `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
+* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`)
+ to the `requires` key of the `build-system` table in `pyproject.toml`:
+ ```toml
+ [build-system]
+ requires = ["setuptools", "versioneer[toml]"]
+ build-backend = "setuptools.build_meta"
+ ```
+* run `versioneer install --no-vendor` in your source tree, commit the results
+* verify version information with `python setup.py version`
## Version Identifiers
@@ -61,7 +86,7 @@ version 1.3). Many VCS systems can report a description that captures this,
for example `git describe --tags --dirty --always` reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
-uncommitted changes.
+uncommitted changes).
The version identifier is used for multiple purposes:
@@ -166,7 +191,7 @@ which may help identify what went wrong).
Some situations are known to cause problems for Versioneer. This details the
most significant ones. More can be found on Github
-[issues page](https://github.com/warner/python-versioneer/issues).
+[issues page](https://github.com/python-versioneer/python-versioneer/issues).
### Subprojects
@@ -194,9 +219,9 @@ work too.
Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
some later version.
-[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
+[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking
this issue. The discussion in
-[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
+[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the
issue from the Versioneer side in more detail.
[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
@@ -224,31 +249,20 @@ regenerated while a different version is checked out. Many setup.py commands
cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
a different virtualenv), so this can be surprising.
-[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
+[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes
this one, but upgrading to a newer version of setuptools should probably
resolve it.
-### Unicode version strings
-
-While Versioneer works (and is continually tested) with both Python 2 and
-Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
-Newer releases probably generate unicode version strings on py2. It's not
-clear that this is wrong, but it may be surprising for applications when then
-write these strings to a network connection or include them in bytes-oriented
-APIs like cryptographic checksums.
-
-[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
-this question.
-
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
-* edit `setup.cfg`, if necessary, to include any new configuration settings
- indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
-* re-run `versioneer install` in your source tree, to replace
+* edit `setup.cfg` and `pyproject.toml`, if necessary,
+ to include any new configuration settings indicated by the release notes.
+ See [UPGRADING](./UPGRADING.md) for details.
+* re-run `versioneer install --[no-]vendor` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
@@ -265,29 +279,54 @@ installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
+## Similar projects
+
+* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time
+ dependency
+* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of
+ versioneer
+* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools
+ plugin
## License
To make Versioneer easier to embed, all its code is dedicated to the public
domain. The `_version.py` that it creates is also in the public domain.
-Specifically, both are released under the Creative Commons "Public Domain
-Dedication" license (CC0-1.0), as described in
-https://creativecommons.org/publicdomain/zero/1.0/ .
+Specifically, both are released under the "Unlicense", as described in
+https://unlicense.org/.
+
+[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg
+[pypi-url]: https://pypi.python.org/pypi/versioneer/
+[travis-image]:
+https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg
+[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer
"""
+# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring
+# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements
+# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error
+# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with
+# pylint:disable=attribute-defined-outside-init,too-many-arguments
-from __future__ import print_function
-try:
- import configparser
-except ImportError:
- import ConfigParser as configparser
+import configparser
import errno
-import fnmatch # HDMF
import json
import os
import re
import subprocess
import sys
+from pathlib import Path
+from typing import Callable, Dict
+import functools
+
+have_tomllib = True
+if sys.version_info >= (3, 11):
+ import tomllib
+else:
+ try:
+ import tomli as tomllib
+ except ImportError:
+ have_tomllib = False
class VersioneerConfig:
@@ -322,12 +361,12 @@ def get_root():
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
- me = os.path.realpath(os.path.abspath(__file__))
- me_dir = os.path.normcase(os.path.splitext(me)[0])
+ my_path = os.path.realpath(os.path.abspath(__file__))
+ me_dir = os.path.normcase(os.path.splitext(my_path)[0])
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
- if me_dir != vsr_dir:
+ if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals():
print("Warning: build in %s is using versioneer.py from %s"
- % (os.path.dirname(me), versioneer_py))
+ % (os.path.dirname(my_path), versioneer_py))
except NameError:
pass
return root
@@ -335,30 +374,39 @@ def get_root():
def get_config_from_root(root):
"""Read the project setup.cfg file to determine Versioneer config."""
- # This might raise EnvironmentError (if setup.cfg is missing), or
+ # This might raise OSError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
- setup_cfg = os.path.join(root, "setup.cfg")
- parser = configparser.SafeConfigParser()
- with open(setup_cfg, "r") as f:
- parser.readfp(f)
- VCS = parser.get("versioneer", "VCS") # mandatory
-
- def get(parser, name):
- if parser.has_option("versioneer", name):
- return parser.get("versioneer", name)
- return None
+ root = Path(root)
+ pyproject_toml = root / "pyproject.toml"
+ setup_cfg = root / "setup.cfg"
+ section = None
+ if pyproject_toml.exists() and have_tomllib:
+ try:
+ with open(pyproject_toml, 'rb') as fobj:
+ pp = tomllib.load(fobj)
+ section = pp['tool']['versioneer']
+ except (tomllib.TOMLDecodeError, KeyError):
+ pass
+ if not section:
+ parser = configparser.ConfigParser()
+ with open(setup_cfg) as cfg_file:
+ parser.read_file(cfg_file)
+ parser.get("versioneer", "VCS") # raise error if missing
+
+ section = parser["versioneer"]
+
cfg = VersioneerConfig()
- cfg.VCS = VCS
- cfg.style = get(parser, "style") or ""
- cfg.versionfile_source = get(parser, "versionfile_source")
- cfg.versionfile_build = get(parser, "versionfile_build")
- cfg.tag_prefix = get(parser, "tag_prefix")
- if cfg.tag_prefix in ("''", '""'):
+ cfg.VCS = section['VCS']
+ cfg.style = section.get("style", "")
+ cfg.versionfile_source = section.get("versionfile_source")
+ cfg.versionfile_build = section.get("versionfile_build")
+ cfg.tag_prefix = section.get("tag_prefix")
+ if cfg.tag_prefix in ("''", '""', None):
cfg.tag_prefix = ""
- cfg.parentdir_prefix = get(parser, "parentdir_prefix")
- cfg.verbose = get(parser, "verbose")
+ cfg.parentdir_prefix = section.get("parentdir_prefix")
+ cfg.verbose = section.get("verbose")
return cfg
@@ -367,17 +415,15 @@ class NotThisMethod(Exception):
# these dictionaries contain VCS-specific tools
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
+ """Create decorator to mark a method as the handler of a VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
+ HANDLERS.setdefault(vcs, {})[method] = f
return f
return decorate
@@ -386,17 +432,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
- p = None
- for c in commands:
+ process = None
+
+ popen_kwargs = {}
+ if sys.platform == "win32":
+ # This hides the console window if pythonw.exe is used
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ popen_kwargs["startupinfo"] = startupinfo
+
+ for command in commands:
try:
- dispcmd = str([c] + args)
+ dispcmd = str([command] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None), **popen_kwargs)
break
- except EnvironmentError:
+ except OSError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
@@ -408,35 +462,35 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
+ stdout = process.communicate()[0].strip().decode()
+ if process.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
- return None, p.returncode
- return stdout, p.returncode
+ return None, process.returncode
+ return stdout, process.returncode
-LONG_VERSION_PY['git'] = '''
+LONG_VERSION_PY['git'] = r'''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
+# This file is released into the public domain.
+# Generated by versioneer-0.28
+# https://github.com/python-versioneer/python-versioneer
"""Git implementation of _version.py."""
import errno
-import fnmatch # HDMF
import os
import re
import subprocess
import sys
+from typing import Callable, Dict
+import functools
def get_keywords():
@@ -474,12 +528,12 @@ class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
+ """Create decorator to mark a method as the handler of a VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
@@ -493,17 +547,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
- p = None
- for c in commands:
+ process = None
+
+ popen_kwargs = {}
+ if sys.platform == "win32":
+ # This hides the console window if pythonw.exe is used
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ popen_kwargs["startupinfo"] = startupinfo
+
+ for command in commands:
try:
- dispcmd = str([c] + args)
+ dispcmd = str([command] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None), **popen_kwargs)
break
- except EnvironmentError:
+ except OSError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
@@ -515,15 +577,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
+ stdout = process.communicate()[0].strip().decode()
+ if process.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
print("stdout was %%s" %% stdout)
- return None, p.returncode
- return stdout, p.returncode
+ return None, process.returncode
+ return stdout, process.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
@@ -535,15 +595,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
"""
rootdirs = []
- for i in range(3):
+ for _ in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %%s but none started with prefix %%s" %%
@@ -560,22 +619,21 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
+ with open(versionfile_abs, "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ except OSError:
pass
return keywords
@@ -583,10 +641,14 @@ def git_get_keywords(versionfile_abs):
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
+ if "refnames" not in keywords:
+ raise NotThisMethod("Short version file found")
date = keywords.get("date")
if date is not None:
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
+
# git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
@@ -599,11 +661,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
@@ -612,33 +674,26 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = {r for r in refs if re.search(r'\d', r)}
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs - tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
- # HDMF: Support tag_prefix specified as a glob pattern
- tag_is_glob_pattern = "*" in tag_prefix
- if tag_is_glob_pattern:
- if fnmatch.fnmatch(ref, tag_prefix):
- r = ref
- if verbose:
- print("picking %s" % r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
- else:
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
- if verbose:
- print("picking %s" % r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
+ if ref.startswith(tag_prefix):
+ r = ref[len(tag_prefix):]
+ # Filter out refs that exactly match prefix or that don't start
+ # with a number once the prefix is stripped (mostly a concern
+ # when prefix is '')
+ if not re.match(r'\d', r):
+ continue
+ if verbose:
+ print("picking %%s" %% r)
+ return {"version": r,
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": None,
+ "date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
@@ -648,7 +703,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
@@ -659,30 +714,31 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ # GIT_DIR can interfere with correct operation of Versioneer.
+ # It may be intended to be passed to the Versioneer-versioned project,
+ # but that should not change where we get our version from.
+ env = os.environ.copy()
+ env.pop("GIT_DIR", None)
+ runner = functools.partial(runner, env=env)
+
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=not verbose)
if rc != 0:
if verbose:
print("Directory %%s not under git control" %% root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
- # HDMF: Support tag_prefix specified as a glob pattern
- tag_is_glob_pattern = "*" in tag_prefix
- match_argument = tag_prefix
- if not tag_is_glob_pattern:
- match_argument = tag_prefix + "*"
-
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%s" % match_argument],
- cwd=root)
+ describe_out, rc = runner(GITS, [
+ "describe", "--tags", "--dirty", "--always", "--long",
+ "--match", f"{tag_prefix}[[:digit:]]*"
+ ], cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
@@ -692,6 +748,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+ cwd=root)
+ # --abbrev-ref was added in git-1.6.3
+ if rc != 0 or branch_name is None:
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+ branch_name = branch_name.strip()
+
+ if branch_name == "HEAD":
+ # If we aren't exactly on a branch, pick a branch which represents
+ # the current commit. If all else fails, we are on a branchless
+ # commit.
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+ # --contains was added in git-1.5.4
+ if rc != 0 or branches is None:
+ raise NotThisMethod("'git branch --contains' returned error")
+ branches = branches.split("\n")
+
+ # Remove the first line if we're running detached
+ if "(" in branches[0]:
+ branches.pop(0)
+
+ # Strip off the leading "* " from the list of branches.
+ branches = [branch[2:] for branch in branches]
+ if "master" in branches:
+ branch_name = "master"
+ elif not branches:
+ branch_name = None
+ else:
+ # Pick the first branch that is returned. Good or bad.
+ branch_name = branches[0]
+
+ pieces["branch"] = branch_name
+
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
@@ -708,32 +797,21 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
- # unparseable. Maybe git-describe is misbehaving?
+ # unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
- # HDMF: Support tag_prefix specified as a glob pattern
- if tag_is_glob_pattern:
- if not fnmatch.fnmatch(full_tag, tag_prefix):
- if verbose:
- fmt = "tag '%%s' doesn't match glob pattern '%%s'"
- print(fmt %% (full_tag, tag_prefix))
- pieces["error"] = ("tag '%%s' doesn't match glob pattern '%%s'"
- %% (full_tag, tag_prefix))
- return pieces
- pieces["closest-tag"] = full_tag
- else:
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%%s' doesn't start with prefix '%%s'"
- print(fmt %% (full_tag, tag_prefix))
- pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
- %% (full_tag, tag_prefix))
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix):]
+ if not full_tag.startswith(tag_prefix):
+ if verbose:
+ fmt = "tag '%%s' doesn't start with prefix '%%s'"
+ print(fmt %% (full_tag, tag_prefix))
+ pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
+ %% (full_tag, tag_prefix))
+ return pieces
+ pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
@@ -744,13 +822,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
- pieces["distance"] = int(count_out) # total number of commits
+ out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
+ pieces["distance"] = len(out.split()) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip()
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
@@ -788,19 +867,67 @@ def render_pep440(pieces):
return rendered
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces):
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
+ (a feature branch will appear "older" than the master branch).
Exceptions:
- 1: no tags. 0.post.devDISTANCE
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0"
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+untagged.%%d.g%%s" %% (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def pep440_split_post(ver):
+ """Split pep440 version string at the post-release segment.
+
+ Returns the release segments before the post-release and the
+ post-release version number (or -1 if no post-release segment is present).
+ """
+ vc = str.split(ver, ".post")
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces):
+ """TAG[.postN.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post0.devDISTANCE
+ """
+ if pieces["closest-tag"]:
if pieces["distance"]:
- rendered += ".post.dev%%d" %% pieces["distance"]
+ # update the post release segment
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+ rendered = tag_version
+ if post_version is not None:
+ rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"])
+ else:
+ rendered += ".post0.dev%%d" %% (pieces["distance"])
+ else:
+ # no commits, use the tag as the version
+ rendered = pieces["closest-tag"]
else:
# exception #1
- rendered = "0.post.dev%%d" %% pieces["distance"]
+ rendered = "0.post0.dev%%d" %% pieces["distance"]
return rendered
@@ -831,12 +958,41 @@ def render_pep440_post(pieces):
return rendered
+def render_pep440_post_branch(pieces):
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+ The ".dev0" means not master branch.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%%d" %% pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%%s" %% pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0.post%%d" %% pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+g%%s" %% pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
- Eexceptions:
+ Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
@@ -907,10 +1063,14 @@ def render(pieces, style):
if style == "pep440":
rendered = render_pep440(pieces)
+ elif style == "pep440-branch":
+ rendered = render_pep440_branch(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
+ elif style == "pep440-post-branch":
+ rendered = render_pep440_post_branch(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
@@ -946,7 +1106,7 @@ def get_versions():
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
- for i in cfg.versionfile_source.split('/'):
+ for _ in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
@@ -981,22 +1141,21 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
+ with open(versionfile_abs, "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ except OSError:
pass
return keywords
@@ -1004,10 +1163,14 @@ def git_get_keywords(versionfile_abs):
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
+ if "refnames" not in keywords:
+ raise NotThisMethod("Short version file found")
date = keywords.get("date")
if date is not None:
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
+
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
@@ -1020,11 +1183,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
@@ -1033,33 +1196,26 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = {r for r in refs if re.search(r'\d', r)}
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
- # HDMF: Support tag_prefix specified as a glob pattern
- tag_is_glob_pattern = "*" in tag_prefix
- if tag_is_glob_pattern:
- if fnmatch.fnmatch(ref, tag_prefix):
- r = ref
- if verbose:
- print("picking %s" % r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
- else:
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
- if verbose:
- print("picking %s" % r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
+ if ref.startswith(tag_prefix):
+ r = ref[len(tag_prefix):]
+ # Filter out refs that exactly match prefix or that don't start
+ # with a number once the prefix is stripped (mostly a concern
+ # when prefix is '')
+ if not re.match(r'\d', r):
+ continue
+ if verbose:
+ print("picking %s" % r)
+ return {"version": r,
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False, "error": None,
+ "date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
@@ -1069,7 +1225,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
@@ -1080,30 +1236,31 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ # GIT_DIR can interfere with correct operation of Versioneer.
+ # It may be intended to be passed to the Versioneer-versioned project,
+ # but that should not change where we get our version from.
+ env = os.environ.copy()
+ env.pop("GIT_DIR", None)
+ runner = functools.partial(runner, env=env)
+
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=not verbose)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
- # HDMF: Support tag_prefix specified as a glob pattern
- tag_is_glob_pattern = "*" in tag_prefix
- match_argument = tag_prefix
- if not tag_is_glob_pattern:
- match_argument = tag_prefix + "*"
-
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%s" % match_argument],
- cwd=root)
+ describe_out, rc = runner(GITS, [
+ "describe", "--tags", "--dirty", "--always", "--long",
+ "--match", f"{tag_prefix}[[:digit:]]*"
+ ], cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
@@ -1113,6 +1270,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+ cwd=root)
+ # --abbrev-ref was added in git-1.6.3
+ if rc != 0 or branch_name is None:
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+ branch_name = branch_name.strip()
+
+ if branch_name == "HEAD":
+ # If we aren't exactly on a branch, pick a branch which represents
+ # the current commit. If all else fails, we are on a branchless
+ # commit.
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+ # --contains was added in git-1.5.4
+ if rc != 0 or branches is None:
+ raise NotThisMethod("'git branch --contains' returned error")
+ branches = branches.split("\n")
+
+ # Remove the first line if we're running detached
+ if "(" in branches[0]:
+ branches.pop(0)
+
+ # Strip off the leading "* " from the list of branches.
+ branches = [branch[2:] for branch in branches]
+ if "master" in branches:
+ branch_name = "master"
+ elif not branches:
+ branch_name = None
+ else:
+ # Pick the first branch that is returned. Good or bad.
+ branch_name = branches[0]
+
+ pieces["branch"] = branch_name
+
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
@@ -1129,32 +1319,21 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
- # unparseable. Maybe git-describe is misbehaving?
+ # unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
- # HDMF: Support tag_prefix specified as a glob pattern
- if tag_is_glob_pattern:
- if not fnmatch.fnmatch(full_tag, tag_prefix):
- if verbose:
- fmt = "tag '%s' doesn't match glob pattern '%s'"
- print(fmt % (full_tag, tag_prefix))
- pieces["error"] = ("tag '%s' doesn't match glob pattern '%s'"
- % (full_tag, tag_prefix))
- return pieces
- pieces["closest-tag"] = full_tag
- else:
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%s' doesn't start with prefix '%s'"
- print(fmt % (full_tag, tag_prefix))
- pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
- % (full_tag, tag_prefix))
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix):]
+ if not full_tag.startswith(tag_prefix):
+ if verbose:
+ fmt = "tag '%s' doesn't start with prefix '%s'"
+ print(fmt % (full_tag, tag_prefix))
+ pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
+ % (full_tag, tag_prefix))
+ return pieces
+ pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
@@ -1165,19 +1344,20 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
- pieces["distance"] = int(count_out) # total number of commits
+ out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
+ pieces["distance"] = len(out.split()) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
-def do_vcs_install(manifest_in, versionfile_source, ipy):
+def do_vcs_install(versionfile_source, ipy):
"""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
@@ -1186,31 +1366,31 @@ def do_vcs_install(manifest_in, versionfile_source, ipy):
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- files = [manifest_in, versionfile_source]
+ files = [versionfile_source]
if ipy:
files.append(ipy)
- try:
- me = __file__
- if me.endswith(".pyc") or me.endswith(".pyo"):
- me = os.path.splitext(me)[0] + ".py"
- versioneer_file = os.path.relpath(me)
- except NameError:
- versioneer_file = "versioneer.py"
- files.append(versioneer_file)
+ if "VERSIONEER_PEP518" not in globals():
+ try:
+ my_path = __file__
+ if my_path.endswith((".pyc", ".pyo")):
+ my_path = os.path.splitext(my_path)[0] + ".py"
+ versioneer_file = os.path.relpath(my_path)
+ except NameError:
+ versioneer_file = "versioneer.py"
+ files.append(versioneer_file)
present = False
try:
- f = open(".gitattributes", "r")
- for line in f.readlines():
- if line.strip().startswith(versionfile_source):
- if "export-subst" in line.strip().split()[1:]:
- present = True
- f.close()
- except EnvironmentError:
+ with open(".gitattributes", "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith(versionfile_source):
+ if "export-subst" in line.strip().split()[1:]:
+ present = True
+ break
+ except OSError:
pass
if not present:
- f = open(".gitattributes", "a+")
- f.write("%s export-subst\n" % versionfile_source)
- f.close()
+ with open(".gitattributes", "a+") as fobj:
+ fobj.write(f"{versionfile_source} export-subst\n")
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
@@ -1224,15 +1404,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
"""
rootdirs = []
- for i in range(3):
+ for _ in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
@@ -1241,7 +1420,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.18) from
+# This file was generated by 'versioneer.py' (0.28) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
@@ -1263,7 +1442,7 @@ def versions_from_file(filename):
try:
with open(filename) as f:
contents = f.read()
- except EnvironmentError:
+ except OSError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
@@ -1318,19 +1497,67 @@ def render_pep440(pieces):
return rendered
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces):
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
+ (a feature branch will appear "older" than the master branch).
Exceptions:
- 1: no tags. 0.post.devDISTANCE
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0"
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+untagged.%d.g%s" % (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def pep440_split_post(ver):
+ """Split pep440 version string at the post-release segment.
+
+ Returns the release segments before the post-release and the
+ post-release version number (or -1 if no post-release segment is present).
+ """
+ vc = str.split(ver, ".post")
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces):
+ """TAG[.postN.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post0.devDISTANCE
+ """
+ if pieces["closest-tag"]:
if pieces["distance"]:
- rendered += ".post.dev%d" % pieces["distance"]
+ # update the post release segment
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+ rendered = tag_version
+ if post_version is not None:
+ rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
+ else:
+ rendered += ".post0.dev%d" % (pieces["distance"])
+ else:
+ # no commits, use the tag as the version
+ rendered = pieces["closest-tag"]
else:
# exception #1
- rendered = "0.post.dev%d" % pieces["distance"]
+ rendered = "0.post0.dev%d" % pieces["distance"]
return rendered
@@ -1361,12 +1588,41 @@ def render_pep440_post(pieces):
return rendered
+def render_pep440_post_branch(pieces):
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+ The ".dev0" means not master branch.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
- Eexceptions:
+ Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
@@ -1437,10 +1693,14 @@ def render(pieces, style):
if style == "pep440":
rendered = render_pep440(pieces)
+ elif style == "pep440-branch":
+ rendered = render_pep440_branch(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
+ elif style == "pep440-post-branch":
+ rendered = render_pep440_post_branch(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
@@ -1540,8 +1800,12 @@ def get_version():
return get_versions()["version"]
-def get_cmdclass():
- """Get the custom setuptools/distutils subclasses used by Versioneer."""
+def get_cmdclass(cmdclass=None):
+ """Get the custom setuptools subclasses used by Versioneer.
+
+ If the package uses a different cmdclass (e.g. one from numpy), it
+ should be provide as an argument.
+ """
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
@@ -1555,12 +1819,12 @@ def get_cmdclass():
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
- # Also see https://github.com/warner/python-versioneer/issues/52
+ # Also see https://github.com/python-versioneer/python-versioneer/issues/52
- cmds = {}
+ cmds = {} if cmdclass is None else cmdclass.copy()
- # we add "version" to both distutils and setuptools
- from distutils.core import Command
+ # we add "version" to setuptools
+ from setuptools import Command
class cmd_version(Command):
description = "report generated version string"
@@ -1583,7 +1847,7 @@ def get_cmdclass():
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
- # we override "build_py" in both distutils and setuptools
+ # we override "build_py" in setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
@@ -1598,11 +1862,14 @@ def get_cmdclass():
# then does setup.py bdist_wheel, or sometimes setup.py install
# setup.py egg_info -> ?
+ # pip install -e . and setuptool/editable_wheel will invoke build_py
+ # but the build_py command is not expected to copy any files.
+
# we override different "build_py" commands for both environments
- if "setuptools" in sys.modules:
- from setuptools.command.build_py import build_py as _build_py
+ if 'build_py' in cmds:
+ _build_py = cmds['build_py']
else:
- from distutils.command.build_py import build_py as _build_py
+ from setuptools.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
@@ -1610,6 +1877,10 @@ def get_cmdclass():
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
+ if getattr(self, "editable_mode", False):
+ # During editable installs `.py` and data files are
+ # not copied to build_lib
+ return
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
@@ -1619,6 +1890,38 @@ def get_cmdclass():
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
+ if 'build_ext' in cmds:
+ _build_ext = cmds['build_ext']
+ else:
+ from setuptools.command.build_ext import build_ext as _build_ext
+
+ class cmd_build_ext(_build_ext):
+ def run(self):
+ root = get_root()
+ cfg = get_config_from_root(root)
+ versions = get_versions()
+ _build_ext.run(self)
+ if self.inplace:
+ # build_ext --inplace will only build extensions in
+ # build/lib<..> dir with no _version.py to write to.
+ # As in place builds will already have a _version.py
+ # in the module dir, we do not need to write one.
+ return
+ # now locate _version.py in the new build/ directory and replace
+ # it with an updated value
+ if not cfg.versionfile_build:
+ return
+ target_versionfile = os.path.join(self.build_lib,
+ cfg.versionfile_build)
+ if not os.path.exists(target_versionfile):
+ print(f"Warning: {target_versionfile} does not exist, skipping "
+ "version update. This can happen if you are running build_ext "
+ "without first running build_py.")
+ return
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile, versions)
+ cmds["build_ext"] = cmd_build_ext
+
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
# nczeczulin reports that py2exe won't like the pep440-style string
@@ -1653,9 +1956,9 @@ def get_cmdclass():
if 'py2exe' in sys.modules: # py2exe enabled?
try:
- from py2exe.distutils_buildexe import py2exe as _py2exe # py3
+ from py2exe.setuptools_buildexe import py2exe as _py2exe
except ImportError:
- from py2exe.build_exe import py2exe as _py2exe # py2
+ from py2exe.distutils_buildexe import py2exe as _py2exe
class cmd_py2exe(_py2exe):
def run(self):
@@ -1679,11 +1982,48 @@ def get_cmdclass():
})
cmds["py2exe"] = cmd_py2exe
+ # sdist farms its file list building out to egg_info
+ if 'egg_info' in cmds:
+ _egg_info = cmds['egg_info']
+ else:
+ from setuptools.command.egg_info import egg_info as _egg_info
+
+ class cmd_egg_info(_egg_info):
+ def find_sources(self):
+ # egg_info.find_sources builds the manifest list and writes it
+ # in one shot
+ super().find_sources()
+
+ # Modify the filelist and normalize it
+ root = get_root()
+ cfg = get_config_from_root(root)
+ self.filelist.append('versioneer.py')
+ if cfg.versionfile_source:
+ # There are rare cases where versionfile_source might not be
+ # included by default, so we must be explicit
+ self.filelist.append(cfg.versionfile_source)
+ self.filelist.sort()
+ self.filelist.remove_duplicates()
+
+ # The write method is hidden in the manifest_maker instance that
+ # generated the filelist and was thrown away
+ # We will instead replicate their final normalization (to unicode,
+ # and POSIX-style paths)
+ from setuptools import unicode_utils
+ normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/')
+ for f in self.filelist.files]
+
+ manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt')
+ with open(manifest_filename, 'w') as fobj:
+ fobj.write('\n'.join(normalized))
+
+ cmds['egg_info'] = cmd_egg_info
+
# we override different "sdist" commands for both environments
- if "setuptools" in sys.modules:
- from setuptools.command.sdist import sdist as _sdist
+ if 'sdist' in cmds:
+ _sdist = cmds['sdist']
else:
- from distutils.command.sdist import sdist as _sdist
+ from setuptools.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
@@ -1747,21 +2087,26 @@ SAMPLE_CONFIG = """
"""
-INIT_PY_SNIPPET = """
+OLD_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
+INIT_PY_SNIPPET = """
+from . import {0}
+__version__ = {0}.get_versions()['version']
+"""
+
def do_setup():
- """Main VCS-independent setup function for installing Versioneer."""
+ """Do main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
- except (EnvironmentError, configparser.NoSectionError,
+ except (OSError, configparser.NoSectionError,
configparser.NoOptionError) as e:
- if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
+ if isinstance(e, (OSError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
@@ -1785,54 +2130,28 @@ def do_setup():
try:
with open(ipy, "r") as f:
old = f.read()
- except EnvironmentError:
+ except OSError:
old = ""
- if INIT_PY_SNIPPET not in old:
+ module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0]
+ snippet = INIT_PY_SNIPPET.format(module)
+ if OLD_SNIPPET in old:
+ print(" replacing boilerplate in %s" % ipy)
+ with open(ipy, "w") as f:
+ f.write(old.replace(OLD_SNIPPET, snippet))
+ elif snippet not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
- f.write(INIT_PY_SNIPPET)
+ f.write(snippet)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
- # Make sure both the top-level "versioneer.py" and versionfile_source
- # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
- # they'll be copied into source distributions. Pip won't be able to
- # install the package without this.
- manifest_in = os.path.join(root, "MANIFEST.in")
- simple_includes = set()
- try:
- with open(manifest_in, "r") as f:
- for line in f:
- if line.startswith("include "):
- for include in line.split()[1:]:
- simple_includes.add(include)
- except EnvironmentError:
- pass
- # That doesn't cover everything MANIFEST.in can do
- # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
- # it might give some false negatives. Appending redundant 'include'
- # lines is safe, though.
- if "versioneer.py" not in simple_includes:
- print(" appending 'versioneer.py' to MANIFEST.in")
- with open(manifest_in, "a") as f:
- f.write("include versioneer.py\n")
- else:
- print(" 'versioneer.py' already in MANIFEST.in")
- if cfg.versionfile_source not in simple_includes:
- print(" appending versionfile_source ('%s') to MANIFEST.in" %
- cfg.versionfile_source)
- with open(manifest_in, "a") as f:
- f.write("include %s\n" % cfg.versionfile_source)
- else:
- print(" versionfile_source already in MANIFEST.in")
-
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-subst keyword
# substitution.
- do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
+ do_vcs_install(cfg.versionfile_source, ipy)
return 0
@@ -1873,10 +2192,14 @@ def scan_setup_py():
return errors
+def setup_command():
+ """Set up Versioneer and exit with appropriate error code."""
+ errors = do_setup()
+ errors += scan_setup_py()
+ sys.exit(1 if errors else 0)
+
+
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
- errors = do_setup()
- errors += scan_setup_py()
- if errors:
- sys.exit(1)
+ setup_command()
View it on GitLab: https://salsa.debian.org/med-team/hdmf/-/commit/c3f9d2ad67e24e149e9223fe12b11010478fa436
--
View it on GitLab: https://salsa.debian.org/med-team/hdmf/-/commit/c3f9d2ad67e24e149e9223fe12b11010478fa436
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20221123/a4f7205e/attachment-0001.htm>
More information about the debian-med-commit
mailing list