[med-svn] [Git][med-team/python-hmmlearn][upstream] New upstream version 0.3.2
Michael R. Crusoe (@crusoe)
gitlab at salsa.debian.org
Sat Oct 5 09:19:46 BST 2024
Michael R. Crusoe pushed to branch upstream at Debian Med / python-hmmlearn
Commits:
2657d750 by Michael R. Crusoe at 2024-10-04T18:09:09+02:00
New upstream version 0.3.2
- - - - -
10 changed files:
- .github/workflows/build.yml
- .readthedocs.yaml
- CHANGES.rst
- doc/source/tutorial.rst
- lib/hmmlearn/base.py
- lib/hmmlearn/hmm.py
- lib/hmmlearn/tests/test_gaussian_hmm.py
- lib/hmmlearn/tests/test_variational_gaussian.py
- lib/hmmlearn/vhmm.py
- setup.py
Changes:
=====================================
.github/workflows/build.yml
=====================================
@@ -17,24 +17,24 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
- cibw-build: ['cp36-*', 'cp37-*', 'cp38-*', 'cp39-*', 'cp310-*', 'cp311-*']
+ cibw-build: ['cp38-*', 'cp39-*', 'cp310-*', 'cp311-*', 'cp312-*']
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout at v3
- uses: docker/setup-qemu-action at v2
if: runner.os == 'Linux'
- - uses: pypa/cibuildwheel at v2.12.1
+ - uses: pypa/cibuildwheel at v2.16.5
env:
- CIBW_ENVIRONMENT: SETUPTOOLS_SCM_PRETEND_VERSION=0.3.0
+ CIBW_ENVIRONMENT: SETUPTOOLS_SCM_PRETEND_VERSION=0.3.2
CIBW_BUILD: ${{ matrix.cibw-build }}
CIBW_SKIP: '*-musllinux_*'
CIBW_TEST_SKIP: '*-macosx_universal2:arm64'
CIBW_ARCHS_LINUX: 'x86_64 aarch64'
CIBW_ARCHS_MACOS: 'x86_64 universal2'
CIBW_ARCHS_WINDOWS: 'AMD64'
- CIBW_TEST_REQUIRES: pytest
- CIBW_TEST_COMMAND: python -mpytest --pyargs hmmlearn.tests
+ CIBW_TEST_REQUIRES: pytest==7.4.4
+ CIBW_TEST_COMMAND: python -mpytest --pyargs hmmlearn.tests {project}/doc
- uses: actions/upload-artifact at v3
with:
name: wheels
=====================================
.readthedocs.yaml
=====================================
@@ -1,5 +1,9 @@
version: 2
+build:
+ os: "ubuntu-22.04"
+ tools:
+ python: "3"
python:
install:
- path: .
=====================================
CHANGES.rst
=====================================
@@ -3,6 +3,22 @@ hmmlearn Changelog
Here you can see the full list of changes between each hmmlearn release.
+Version 0.3.2
+-------------
+
+Released on March 1st, 2023
+
+- update CI/CD Pipelines that were troublesome
+
+Version 0.3.1
+-------------
+
+Released on March 1st, 2023.
+
+- Support Python 3.8-3.12
+- Improve stability of test suite. Ensure the documentation examples are covered.
+- Documentation Improvements throughout.
+
Version 0.3.0
-------------
=====================================
doc/source/tutorial.rst
=====================================
@@ -172,7 +172,7 @@ You can use the :attr:`~.BaseHMM.monitor_` attribute to diagnose convergence:
>>> remodel.monitor_
ConvergenceMonitor(
history=[...],
- iter=15,
+ iter=...,
n_iter=100,
tol=0.01,
verbose=False,
=====================================
lib/hmmlearn/base.py
=====================================
@@ -141,6 +141,12 @@ class _AbstractHMM(BaseEstimator):
Number of states in the model.
algorithm : {"viterbi", "map"}, optional
Decoder algorithm.
+
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
random_state: RandomState or an int seed, optional
A random number generator instance.
n_iter : int, optional
@@ -302,8 +308,14 @@ class _AbstractHMM(BaseEstimator):
lengths : array-like of integers, shape (n_sequences, ), optional
Lengths of the individual sequences in ``X``. The sum of
these should be ``n_samples``.
- algorithm : string
- Decoder algorithm. Must be one of "viterbi" or "map".
+ algorithm : {"viterbi", "map"}, optional
+ Decoder algorithm.
+
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
If not given, :attr:`decoder` is used.
Returns
@@ -546,9 +558,11 @@ class _AbstractHMM(BaseEstimator):
s = getattr(self, name).sum(axis=-1)
if not np.allclose(s, 1):
raise ValueError(
- f"{name} must sum to 1 (got {s:.4f})" if s.ndim == 0 else
- f"{name} rows must sum to 1 (got {s})" if s.ndim == 1 else
- "Expected 1D or 2D array")
+ f"{name} must sum to 1 (got {s:.4f})"
+ if s.ndim == 0
+ else f"{name} rows must sum to 1 (got row sums of {s})"
+ if s.ndim == 1
+ else "Expected 1D or 2D array")
def _check(self):
"""
@@ -809,6 +823,12 @@ class BaseHMM(_AbstractHMM):
of the transition probabilities :attr:`transmat_`.
algorithm : {"viterbi", "map"}, optional
Decoder algorithm.
+
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
random_state: RandomState or an int seed, optional
A random number generator instance.
n_iter : int, optional
@@ -929,9 +949,11 @@ class BaseHMM(_AbstractHMM):
s = getattr(self, name).sum(axis=-1)
if not np.allclose(s, 1):
raise ValueError(
- f"{name} must sum to 1 (got {s:.4f})" if s.ndim == 0 else
- f"{name} rows must sum to 1 (got {s})" if s.ndim == 1 else
- "Expected 1D or 2D array")
+ f"{name} must sum to 1 (got {s:.4f})"
+ if s.ndim == 0
+ else f"{name} rows must sum to 1 (got row sums of {s})"
+ if s.ndim == 1
+ else "Expected 1D or 2D array")
def _check(self):
"""
@@ -1002,6 +1024,7 @@ class BaseHMM(_AbstractHMM):
n_params = sum(self._get_n_fit_scalars_per_param().values())
return -2 * self.score(X, lengths=lengths) + n_params * np.log(len(X))
+
_BaseHMM = BaseHMM # Backcompat name, will be deprecated in the future.
=====================================
lib/hmmlearn/hmm.py
=====================================
@@ -82,6 +82,12 @@ class CategoricalHMM(_emissions.BaseCategoricalHMM, BaseHMM):
algorithm : {"viterbi", "map"}, optional
Decoder algorithm.
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
+
random_state: RandomState or an int seed, optional
A random number generator instance.
@@ -239,6 +245,12 @@ class GaussianHMM(_emissions.BaseGaussianHMM, BaseHMM):
algorithm : {"viterbi", "map"}, optional
Decoder algorithm.
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
+
random_state: RandomState or an int seed, optional
A random number generator instance.
@@ -298,7 +310,7 @@ class GaussianHMM(_emissions.BaseGaussianHMM, BaseHMM):
if self._needs_init("m", "means_"):
kmeans = cluster.KMeans(n_clusters=self.n_components,
random_state=self.random_state,
- n_init=1) # sklearn <1.4 backcompat.
+ n_init=10) # sklearn <1.4 backcompat.
kmeans.fit(X)
self.means_ = kmeans.cluster_centers_
if self._needs_init("c", "covars_"):
@@ -469,6 +481,12 @@ class GMMHMM(_emissions.BaseGMMHMM):
algorithm : {"viterbi", "map"}, optional
Decoder algorithm.
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
+
random_state: RandomState or an int seed, optional
A random number generator instance.
@@ -844,6 +862,12 @@ class MultinomialHMM(_emissions.BaseMultinomialHMM):
algorithm : {"viterbi", "map"}, optional
Decoder algorithm.
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
+
random_state: RandomState or an int seed, optional
A random number generator instance.
@@ -967,6 +991,12 @@ class PoissonHMM(_emissions.BasePoissonHMM):
algorithm : {"viterbi", "map"}, optional
Decoder algorithm.
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
+
random_state: RandomState or an int seed, optional
A random number generator instance.
=====================================
lib/hmmlearn/tests/test_gaussian_hmm.py
=====================================
@@ -119,12 +119,18 @@ class GaussianHMMTestMixin:
@pytest.mark.parametrize("implementation", ["scaling", "log"])
def test_fit_ignored_init_warns(self, implementation, caplog):
+ # This test occasionally will be flaky in learning the model.
+ # What is important here, is that the expected log message is produced
+ # We can test convergence properties elsewhere.
h = hmm.GaussianHMM(self.n_components, self.covariance_type,
implementation=implementation)
h.startprob_ = self.startprob
- h.fit(np.random.randn(100, self.n_components))
- assert len(caplog.records) == 1, caplog
- assert "will be overwritten" in caplog.records[0].getMessage()
+ h.fit(self.prng.randn(100, self.n_components))
+ found = False
+ for record in caplog.records:
+ if "will be overwritten" in record.getMessage():
+ found = True
+ assert found, "Did not find expected warning message"
@pytest.mark.parametrize("implementation", ["scaling", "log"])
def test_fit_too_little_data(self, implementation, caplog):
@@ -135,7 +141,7 @@ class GaussianHMMTestMixin:
h.transmat_ = self.transmat
h.means_ = 20 * self.means
h.covars_ = np.maximum(self.covars, 0.1)
- h._init(np.random.randn(5, self.n_components), 5)
+ h._init(self.prng.randn(5, self.n_components), 5)
assert len(caplog.records) == 1
assert "degenerate solution" in caplog.records[0].getMessage()
@@ -182,7 +188,13 @@ class GaussianHMMTestMixin:
@pytest.mark.parametrize("implementation", ["scaling", "log"])
def test_fit_with_priors(self, implementation, init_params='mc',
- params='stmc', n_iter=5):
+ params='stmc', n_iter=20):
+ # We have a few options to make this a robust test, such as
+ # a. increase the amount of training data to ensure convergence
+ # b. Only learn some of the parameters (simplify the problem)
+ # c. Increase the number of iterations
+ #
+ # (c) seems to not affect the ci/cd time too much.
startprob_prior = 10 * self.startprob + 2.0
transmat_prior = 10 * self.transmat + 2.0
means_prior = self.means
@@ -225,15 +237,20 @@ class GaussianHMMTestMixin:
assert_log_likelihood_increasing(h_learn, X, lengths, n_iter)
# Make sure we've converged to the right parameters.
+ # In general, to account for state switching,
+ # compare sorted values.
# a) means
- assert_allclose(sorted(h.means_.tolist()),
- sorted(h_learn.means_.tolist()),
+ assert_allclose(sorted(h.means_.ravel().tolist()),
+ sorted(h_learn.means_.ravel().tolist()),
0.01)
# b) covars are hard to estimate precisely from a relatively small
# sample, thus the large threshold
+
+ # account for how we store the covars_compressed
+ orig = np.broadcast_to(h._covars_, h_learn._covars_.shape)
assert_allclose(
- *np.broadcast_arrays(sorted(h._covars_.tolist()),
- sorted(h_learn._covars_.tolist())),
+ sorted(orig.ravel().tolist()),
+ sorted(h_learn._covars_.ravel().tolist()),
10)
@@ -295,7 +312,7 @@ class TestGaussianHMMWithDiagonalCovars(GaussianHMMTestMixin):
def test_covar_is_writeable(self, implementation):
h = hmm.GaussianHMM(n_components=1, covariance_type="diag",
init_params="c", implementation=implementation)
- X = np.random.normal(size=(1000, 5))
+ X = self.prng.normal(size=(1000, 5))
h._init(X, 1000)
# np.diag returns a read-only view of the array in NumPy 1.9.X.
=====================================
lib/hmmlearn/tests/test_variational_gaussian.py
=====================================
@@ -74,7 +74,7 @@ class _TestGaussian:
vi_uniform_startprob_and_transmat(model, lengths)
model.fit(sequences, lengths)
# Perform one check that we are converging to the right answer
- assert (model.means_posterior_[-1][0]
+ assert (list(sorted(model.means_posterior_.ravel()))[3]
== pytest.approx(self.test_fit_mcgrory_titterington1d_mean)), \
model.means_posterior_
=====================================
lib/hmmlearn/vhmm.py
=====================================
@@ -89,6 +89,12 @@ class VariationalCategoricalHMM(BaseCategoricalHMM, VariationalBaseHMM):
algorithm : {"viterbi", "map"}, optional
Decoder algorithm.
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
+
random_state: RandomState or an int seed, optional
A random number generator instance.
@@ -382,6 +388,12 @@ class VariationalGaussianHMM(BaseGaussianHMM, VariationalBaseHMM):
algorithm : {"viterbi", "map"}, optional
Decoder algorithm.
+ - "viterbi": finds the most likely sequence of states, given all
+ emissions.
+ - "map" (also known as smoothing or forward-backward): finds the
+ sequence of the individual most-likely states, given all
+ emissions.
+
random_state: RandomState or an int seed, optional
A random number generator instance.
=====================================
setup.py
=====================================
@@ -52,7 +52,7 @@ setup(
package_dir={"": "lib"},
ext_modules=[Extension("", [])],
package_data={},
- python_requires=">=3.6",
+ python_requires=">=3.8",
setup_requires=[
"pybind11>=2.6",
"setuptools_scm>=3.3", # fallback_version.
View it on GitLab: https://salsa.debian.org/med-team/python-hmmlearn/-/commit/2657d750a9ed8176eb5325954757ac453944c0fa
--
View it on GitLab: https://salsa.debian.org/med-team/python-hmmlearn/-/commit/2657d750a9ed8176eb5325954757ac453944c0fa
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20241005/bc975fee/attachment-0001.htm>
More information about the debian-med-commit
mailing list