[med-svn] [Git][med-team/python-cooler][master] 2 commits: New upstream version 0.10.3

Alexandre Detiste (@detiste-guest) gitlab at salsa.debian.org
Sun Jan 12 00:11:49 GMT 2025



Alexandre Detiste pushed to branch master at Debian Med / python-cooler


Commits:
4f0beee1 by Alexandre Detiste at 2025-01-12T01:10:00+01:00
New upstream version 0.10.3
- - - - -
770de35d by Alexandre Detiste at 2025-01-12T01:10:57+01:00
Update upstream source from tag 'upstream/0.10.3'

Update to upstream version '0.10.3'
with Debian dir fb94f18486a19b1ac0a1c73f13993b6f3a6396d4
- - - - -


30 changed files:

- .github/workflows/ci.yml
- .pre-commit-config.yaml
- CHANGES.md
- docs/make_cli_rst.py
- pyproject.toml
- src/cooler/__init__.py
- src/cooler/balance.py → src/cooler/_balance.py
- src/cooler/reduce.py → src/cooler/_reduce.py
- src/cooler/_typing.py
- src/cooler/cli/__init__.py
- src/cooler/cli/balance.py
- src/cooler/cli/coarsen.py
- src/cooler/cli/merge.py
- src/cooler/cli/zoomify.py
- src/cooler/core/__init__.py
- src/cooler/core/_rangequery.py
- src/cooler/core/_selectors.py
- src/cooler/core/_tableops.py
- src/cooler/create/__init__.py
- + src/cooler/create/_constants.py
- src/cooler/create/_create.py
- src/cooler/create/_ingest.py
- src/cooler/fileops.py
- src/cooler/parallel.py
- − src/cooler/tools.py
- src/cooler/util.py
- tests/test_balance.py
- tests/test_fileops.py
- tests/test_reduce.py
- tests/test_util.py


Changes:

=====================================
.github/workflows/ci.yml
=====================================
@@ -12,7 +12,7 @@ jobs:
     strategy:
       matrix:
         os: [ubuntu-latest]
-        python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ]
+        python-version: [ "3.9", "3.10", "3.11", "3.12" ]
         include:
           - os: windows-latest
             python-version: "3.12"


=====================================
.pre-commit-config.yaml
=====================================
@@ -1,7 +1,7 @@
 exclude: '^scripts'
 repos:
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.6.0
+    rev: v5.0.0
     hooks:
       - id: check-ast
       - id: end-of-file-fixer
@@ -10,7 +10,7 @@ repos:
       - id: check-case-conflict
 
   - repo: https://github.com/astral-sh/ruff-pre-commit
-    rev: v0.3.5
+    rev: v0.8.3
     hooks:
       - id: ruff
-        args: [--fix, --exit-non-zero-on-fix]
+        args: [--fix, --show-fixes, --exit-non-zero-on-fix]


=====================================
CHANGES.md
=====================================
@@ -1,13 +1,19 @@
 # Release notes #
 
-## [v0.10.2](https://github.com/open2c/cooler/compare/v0.10.0...v0.10.1)
+## [v0.10.3](https://github.com/open2c/cooler/compare/v0.10.2...v0.10.3)
 
-## Maintenance
+### Maintenance
+* Update to support numpy>=1.26 and numpy 2.x by @lrauschning in https://github.com/open2c/cooler/pull/446
+* Dropped support for numpy < 1.26 and Python 3.8
+
+## [v0.10.2](https://github.com/open2c/cooler/compare/v0.10.1...v0.10.2)
+
+### Maintenance
 * NumPy 2.0 was released. Pin `numpy < 2` until we achieve compatibility.
 
 ## [v0.10.1](https://github.com/open2c/cooler/compare/v0.10.0...v0.10.1)
 
-## Bug fixes
+### Bug fixes
 * fix: Pixel annotation with partial bin table in https://github.com/open2c/cooler/pull/426
 
 ## [v0.10.0](https://github.com/open2c/cooler/compare/v0.9.3...v0.10.0)


=====================================
docs/make_cli_rst.py
=====================================
@@ -52,7 +52,7 @@ def _get_help_record(opt):
     if opt.default is not None and opt.show_default:
         extra.append(
             "default: {}".format(
-                ", ".join("%s" % d for d in opt.default)
+                ", ".join(f"{d}" for d in opt.default)
                 if isinstance(opt.default, (list, tuple))
                 else opt.default
             )
@@ -60,7 +60,7 @@ def _get_help_record(opt):
     if opt.required:
         extra.append("required")
     if extra:
-        help = "{}[{}]".format(help and help + "  " or "", "; ".join(extra))
+        help = "{}[{}]".format((help and help + "  ") or "", "; ".join(extra))
 
     return ", ".join(rv), help
 
@@ -161,7 +161,7 @@ def _format_envvar(param):
 
 def _format_envvars(ctx):
     """Format all envvars for a `click.Command`."""
-    params = [x for x in ctx.command.params if getattr(x, "envvar")]
+    params = [x for x in ctx.command.params if x.envvar]
 
     for param in params:
         yield ".. _{command_name}-{param_name}-{envvar}:".format(


=====================================
pyproject.toml
=====================================
@@ -4,9 +4,9 @@ build-backend = "hatchling.build"
 
 [project]
 name = "cooler"
-version = "0.10.2"
+version = "0.10.3"
 description = "Sparse binary format for genomic interaction matrices."
-requires-python = ">=3.8"
+requires-python = ">=3.9"
 license = {text = "BSD-3-Clause"}
 authors = [
   {name = "Nezar Abdennur", email = "nabdennur at gmail.com"},
@@ -29,7 +29,6 @@ classifiers = [
     "Operating System :: OS Independent",
     "Programming Language :: Python",
     "Programming Language :: Python :: 3",
-    "Programming Language :: Python :: 3.8",
     "Programming Language :: Python :: 3.9",
     "Programming Language :: Python :: 3.10",
     "Programming Language :: Python :: 3.11",
@@ -39,7 +38,7 @@ classifiers = [
 readme = "README.md"
 
 dependencies = [
-    "numpy>=1.9, <2",
+    "numpy>=1.26,<3",
     "scipy>=0.16",
     "pandas>1.5",
     "h5py>=2.5",
@@ -59,7 +58,7 @@ all = [
     "ipytree>=0.2.2",
     "ipywidgets>=8.0.0",
     "matplotlib",
-    "pypairix; platform_system != 'Windows'",
+#    "pypairix; platform_system != 'Windows'", # doesn't compile, see 4dn-dcic/pairix#79
     "psutil",
     "pysam; platform_system != 'Windows'",
 ]
@@ -96,15 +95,15 @@ cooler = "cooler.cli:cli"
 path = "src/cooler/_version.py"
 
 [tool.ruff]
-target-version = "py38"
+src = ["src"]
 exclude = [
     ".venv",
-    "__init__.py",
     "__main__.py",
 ]
 
 [tool.ruff.lint]
 extend-select = [
+    "B",  # bugbear
     # "C",  # complexity
     # "D",  # pydocstyle
     "E",  # style errors
@@ -122,7 +121,19 @@ known-first-party = ["cooler"]
 convention = "numpy"
 
 [tool.pytest.ini_options]
-addopts = "--cov cooler --cov-config pyproject.toml --cov-report term-missing --cov-report html --cov-report=xml"
+minversion = "7"
+log_cli_level = "INFO"
+xfail_strict = true
+addopts = [
+    "-ra",
+    "--strict-config",
+    "--strict-markers",
+    "--cov=cooler",
+    "--cov-config=pyproject.toml",
+    "--cov-report=term-missing",
+    "--cov-report=html",
+    "--cov-report=xml",
+]
 filterwarnings = ["ignore::PendingDeprecationWarning"]
 testpaths = ["tests"]
 


=====================================
src/cooler/__init__.py
=====================================
@@ -4,18 +4,36 @@ Cooler
 
 A cool place to store your Hi-C.
 
-:copyright: (c) 2016 Massachusetts Institute of Technology
 :author: Nezar Abdennur
-:license: BSD
+:license: BSD-3-Clause
 
 """
-from . import balance, create, fileops, parallel, tools
+from . import fileops, parallel
+from ._balance import balance_cooler
 from ._logging import get_verbosity_level, set_verbosity_level
+from ._reduce import coarsen_cooler, merge_coolers, zoomify_cooler
 from ._version import __format_version__, __version__
 from .api import Cooler, annotate
-from .balance import balance_cooler
 from .create import create_cooler, create_scool, rename_chroms
-from .reduce import coarsen_cooler, merge_coolers, zoomify_cooler
 from .util import binnify, fetch_chromsizes, read_chromsizes
 
-ice = balance  # alias
+__all__ = [
+    "Cooler",
+    "__format_version__",
+    "__version__",
+    "annotate",
+    "balance_cooler",
+    "binnify",
+    "coarsen_cooler",
+    "create_cooler",
+    "create_scool",
+    "fetch_chromsizes",
+    "fileops",
+    "get_verbosity_level",
+    "merge_coolers",
+    "parallel",
+    "read_chromsizes",
+    "rename_chroms",
+    "set_verbosity_level",
+    "zoomify_cooler",
+]


=====================================
src/cooler/balance.py → src/cooler/_balance.py
=====================================
@@ -111,7 +111,9 @@ def _balance_genomewide(
             break
     else:
         warnings.warn(
-            "Iteration limit reached without convergence.", ConvergenceWarning
+            "Iteration limit reached without convergence.",
+            ConvergenceWarning,
+            stacklevel=1,
         )
 
     scale = nzmarg.mean()
@@ -180,6 +182,7 @@ def _balance_cisonly(
             warnings.warn(
                 f"Iteration limit reached without convergence on {chroms[cid]}.",
                 ConvergenceWarning,
+                stacklevel=1,
             )
 
         scale = nzmarg.mean()
@@ -244,7 +247,9 @@ def _balance_transonly(
             break
     else:
         warnings.warn(
-            "Iteration limit reached without convergence.", ConvergenceWarning
+            "Iteration limit reached without convergence.",
+            ConvergenceWarning,
+            stacklevel=1,
         )
 
     scale = nzmarg.mean()


=====================================
src/cooler/reduce.py → src/cooler/_reduce.py
=====================================
@@ -4,7 +4,8 @@ import math
 import warnings
 from bisect import bisect_right
 from collections import OrderedDict, defaultdict
-from typing import Any, Iterator, Literal
+from collections.abc import Iterator
+from typing import Any, Literal
 
 import h5py
 import multiprocess as mp
@@ -19,7 +20,7 @@ from .create import ContactBinner, create
 from .parallel import lock
 from .util import GenomeSegmentation, parse_cooler_uri
 
-__all__ = ["merge_coolers", "coarsen_cooler", "zoomify_cooler"]
+__all__ = ["coarsen_cooler", "merge_coolers", "zoomify_cooler"]
 
 
 logger = get_logger(__name__)
@@ -125,7 +126,8 @@ def merge_breakpoints(
     if n_over > 0:
         warnings.warn(
             f"{n_over} merge epochs will require buffering more than {bufsize} "
-            f"pixel records, with as many as {nrecords_per_epoch.max():g}."
+            f"pixel records, with as many as {nrecords_per_epoch.max():g}.",
+            stacklevel=2,
         )
 
     return bin1_partition, cum_nrecords


=====================================
src/cooler/_typing.py
=====================================
@@ -1,6 +1,7 @@
 from __future__ import annotations
 
-from typing import Callable, Dict, Iterable, Optional, Tuple, TypeVar, Union
+from collections.abc import Iterable
+from typing import Callable, Optional, TypeVar, Union
 
 import numpy as np
 import pandas as pd
@@ -8,8 +9,8 @@ import pandas as pd
 T = TypeVar('T')
 U = TypeVar('U')
 MapFunctor = Callable[[Callable[[T], U], Iterable[T]], Iterable[U]]
-GenomicRangeSpecifier = Union[str , Tuple[str, Optional[int], Optional[int]]]
-GenomicRangeTuple = Tuple[str, int, int]
-Tabular = Union[pd.DataFrame, Dict[str, np.ndarray]]
+GenomicRangeSpecifier = Union[str , tuple[str, Optional[int], Optional[int]]]
+GenomicRangeTuple = tuple[str, int, int]
+Tabular = Union[pd.DataFrame, dict[str, np.ndarray]]
 
-__all__ = ["MapFunctor", "GenomicRangeSpecifier", "GenomicRangeTuple", "Tabular"]
+__all__ = ["GenomicRangeSpecifier", "GenomicRangeTuple", "MapFunctor", "Tabular"]


=====================================
src/cooler/cli/__init__.py
=====================================
@@ -1,4 +1,4 @@
-import logging
+import atexit
 import sys
 
 import click
@@ -10,10 +10,100 @@ CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]}
 
 
 class UnsortedGroup(click.Group):
+    """A click Group that lists commands in the order they were added."""
+
     def list_commands(self, ctx):
         return list(self.commands)
 
 
+def setup_psutil_at_exit(logger):
+    try:
+        import psutil
+    except ImportError:
+        logger.warning("Install psutil to see process information.")
+        return
+
+    attrs_available = {
+        x
+        for x in dir(psutil.Process)
+        if not x.startswith("_")
+        and x
+        not in {
+            "send_signal",
+            "suspend",
+            "resume",
+            "terminate",
+            "kill",
+            "wait",
+            "is_running",
+            "as_dict",
+            "parent",
+            "parents",
+            "children",
+            "rlimit",
+            "memory_info_ex",
+            "oneshot",
+        }
+    }
+
+    attrs = [
+        "cmdline",
+        "connections",
+        "cpu_affinity",
+        "cpu_num",
+        "cpu_percent",
+        "cpu_times",
+        "create_time",
+        "cwd",
+        "environ",
+        "exe",
+        "gids",
+        "io_counters",
+        "ionice",
+        "memory_full_info",
+        "memory_info",
+        "memory_maps",
+        "memory_percent",
+        "name",
+        "nice",
+        "num_ctx_switches",
+        "num_fds",
+        "num_threads",
+        "open_files",
+        "pid",
+        "ppid",
+        "status",
+        "terminal",
+        # "threads",  # RuntimeError on MacOS Big Sur
+        "uids",
+        "username",
+    ]
+
+    attrs = [attr for attr in attrs if attr in attrs_available]
+
+    @atexit.register
+    def process_dump_at_exit():
+        try:
+            process = psutil.Process()
+            process_info = process.as_dict(attrs, ad_value="")
+            for attr in attrs:
+                logger.debug(f"PSINFO:'{attr}': {process_info[attr]}")
+        except psutil.NoSuchProcess:
+            logger.error("PSINFO: Error - Process no longer exists.")
+
+
+def setup_postmortem_debugger():
+    import pdb
+    import traceback
+
+    def _excepthook(exc_type, value, tb):
+        traceback.print_exception(exc_type, value, tb)
+        print()
+        pdb.pm()
+
+    sys.excepthook = _excepthook
+
+
 @click.version_option(__version__, "-V", "--version")
 @click.group(context_settings=CONTEXT_SETTINGS, cls=UnsortedGroup)
 @click.option("-v", "--verbose", help="Verbose logging.", count=True)
@@ -35,92 +125,15 @@ def cli(verbose, debug):
 
     if verbose >= 2:  # pragma: no cover
         # Dump process info at exit
-        try:
-            import atexit
-
-            import psutil
-
-            attrs_available = {
-                x for x in dir(psutil.Process)
-                if not x.startswith('_')
-                and x not in {
-                    'send_signal', 'suspend',
-                    'resume', 'terminate', 'kill', 'wait',
-                    'is_running', 'as_dict', 'parent', 'parents',
-                    'children', 'rlimit',
-                    'memory_info_ex', 'oneshot'
-                }
-            }
-
-            attrs = [
-                attr for attr in [
-                    "cmdline",
-                    'connections',
-                    "cpu_affinity",
-                    "cpu_num",
-                    "cpu_percent",
-                    "cpu_times",
-                    "create_time",
-                    "cwd",
-                    'environ',
-                    "exe",
-                    'gids',
-                    "io_counters",
-                    "ionice",
-                    "memory_full_info",
-                    'memory_info',
-                    'memory_maps',
-                    "memory_percent",
-                    "name",
-                    "nice",
-                    "num_ctx_switches",
-                    "num_fds",
-                    "num_threads",
-                    "open_files",
-                    "pid",
-                    "ppid",
-                    "status",
-                    "terminal",
-                    # "threads",  # RuntimeError on MacOS Big Sur
-                    "uids",
-                    "username",
-                ]
-                if attr in attrs_available
-            ]
-
-            @atexit.register
-            def process_dump_at_exit():
-                try:
-                    process = psutil.Process()
-                    process_info = process.as_dict(attrs, ad_value="")
-                    for attr in attrs:
-                        logger.debug(
-                            f"PSINFO:'{attr}': {process_info[attr]}"
-                        )
-                except psutil.NoSuchProcess:
-                    logger.error("PSINFO: Error - Process no longer exists.")
-
-        except ImportError:
-            logger.warning("Install psutil to see process information.")
+        setup_psutil_at_exit(logger)
 
     if debug:  # pragma: no cover
         # Set hook for postmortem debugging
-        import traceback
-
-        try:
-            import ipdb as pdb
-        except ImportError:
-            import pdb
-
-        def _excepthook(exc_type, value, tb):
-            traceback.print_exception(exc_type, value, tb)
-            print()
-            pdb.pm()
-
-        sys.excepthook = _excepthook
+        setup_postmortem_debugger()
 
 
-from . import (
+# Load and register cli subcommands
+from . import (  # noqa: E402,F401
     balance,
     cload,
     coarsen,


=====================================
src/cooler/cli/balance.py
=====================================
@@ -6,7 +6,7 @@ import numpy as np
 import pandas as pd
 from multiprocess import Pool
 
-from .. import ice
+from .._balance import balance_cooler
 from ..api import Cooler
 from ..util import bedslice, parse_cooler_uri
 from . import cli, get_logger
@@ -241,7 +241,7 @@ def balance(
         else:
             map_ = map
 
-        bias, stats = ice.iterative_correction(
+        bias, stats = balance_cooler(
             clr,
             chunksize=chunksize,
             cis_only=cis_only,


=====================================
src/cooler/cli/coarsen.py
=====================================
@@ -2,8 +2,8 @@ import os.path as op
 
 import click
 
+from .._reduce import coarsen_cooler
 from ..parallel import lock
-from ..reduce import coarsen_cooler
 from ..util import parse_cooler_uri
 from . import cli
 from ._util import parse_field_param


=====================================
src/cooler/cli/merge.py
=====================================
@@ -1,6 +1,6 @@
 import click
 
-from ..reduce import merge_coolers
+from .._reduce import merge_coolers
 from . import cli
 from ._util import parse_field_param
 


=====================================
src/cooler/cli/zoomify.py
=====================================
@@ -5,13 +5,13 @@ from math import ceil
 import click
 
 from .. import api
-from ..parallel import lock
-from ..reduce import (
+from .._reduce import (
     HIGLASS_TILE_DIM,
     legacy_zoomify,
     preferred_sequence,
     zoomify_cooler,
 )
+from ..parallel import lock
 from ..util import parse_cooler_uri
 from . import cli, get_logger
 from ._util import parse_field_param
@@ -194,10 +194,11 @@ def zoomify(
         # Parse and expand user-provided resolutions
         resolutions, rstring = [], resolutions
         for res in [s.strip().lower() for s in rstring.split(",")]:
-            if "n" in res or "b" in res and maxres < curres:
+            if ("n" in res or "b" in res) and maxres < curres:
                 warnings.warn(
                     "Map is already < 256 x 256. Provide resolutions "
-                    "explicitly if you want to coarsen more."
+                    "explicitly if you want to coarsen more.",
+                    stacklevel=1,
                 )
             if res == "n":
                 r = preferred_sequence(curres, maxres, "nice")


=====================================
src/cooler/core/__init__.py
=====================================
@@ -14,9 +14,9 @@ __all__ = [
     "FillLowerRangeQuery2D",
     "RangeSelector1D",
     "RangeSelector2D",
-    "region_to_extent",
-    "region_to_offset",
     "delete",
     "get",
     "put",
+    "region_to_extent",
+    "region_to_offset",
 ]


=====================================
src/cooler/core/_rangequery.py
=====================================
@@ -1,6 +1,7 @@
 from __future__ import annotations
 
-from typing import Any, Callable, Iterator
+from collections.abc import Iterator
+from typing import Any, Callable
 
 import h5py
 import numpy as np
@@ -97,7 +98,7 @@ def sparray_slice_from_dict(
         raise ImportError(
             "The 'sparse' package is required for pydata/sparse output. "
             "You can install it with 'pip install sparse'."
-        )
+        ) from None
 
     shape = (row_stop - row_start, col_stop - col_start)
     return COO(
@@ -355,7 +356,7 @@ class BaseRangeQuery2D:
         except ImportError:
             raise ImportError(
                 "The 'dask' package is required for `dask.delayed` output."
-            )
+            ) from None
 
         out = []
         for task in self.tasks:
@@ -383,7 +384,7 @@ class BaseRangeQuery2D:
             raise ImportError(
                 "The 'dask' package is required for dask DataFrame output. "
                 "Install dask[dataframe] or dask[complete] with pip."
-            )
+            ) from None
 
         meta = self.reader.get_frame_meta(self.field)
         tasks = self.tasks


=====================================
src/cooler/core/_selectors.py
=====================================
@@ -1,11 +1,11 @@
 from __future__ import annotations
 
-from typing import Any, Callable, List, Optional, Tuple, Union, overload
+from typing import Any, Callable, Optional, Union, overload
 
 import pandas as pd
 
-ColumnsArg = Optional[Union[str, List[str]]]
-GenomicRangeArg = Optional[Union[str, Tuple[str, Optional[int], Optional[int]]]]
+ColumnsArg = Optional[Union[str, list[str]]]
+GenomicRangeArg = Optional[Union[str, tuple[str, Optional[int], Optional[int]]]]
 FieldArg = Optional[str]
 
 


=====================================
src/cooler/core/_tableops.py
=====================================
@@ -102,7 +102,7 @@ def get(
             data[field] = pd.Categorical.from_codes(
                 dset[lo:hi], sorted(dt, key=dt.__getitem__), ordered=True
             )
-        elif dset.dtype.type == np.string_:
+        elif dset.dtype.type == np.bytes_:
             data[field] = dset[lo:hi].astype("U")
         else:
             data[field] = dset[lo:hi]


=====================================
src/cooler/create/__init__.py
=====================================
@@ -1,22 +1,19 @@
-import numpy as np
-
-MAGIC = "HDF5::Cooler"
-MAGIC_SCOOL = "HDF5::SCOOL"
-MAGIC_MCOOL = "HDF5::MCOOL"
-
-URL = "https://github.com/open2c/cooler"
-CHROM_DTYPE = np.dtype("S")
-CHROMID_DTYPE = np.int32
-CHROMSIZE_DTYPE = np.int32
-COORD_DTYPE = np.int32
-BIN_DTYPE = np.int64
-COUNT_DTYPE = np.int32
-CHROMOFFSET_DTYPE = np.int64
-BIN1OFFSET_DTYPE = np.int64
-PIXEL_FIELDS = ("bin1_id", "bin2_id", "count")
-PIXEL_DTYPES = (("bin1_id", BIN_DTYPE), ("bin2_id", BIN_DTYPE), ("count", COUNT_DTYPE))
-
-
+from ._constants import (
+    BIN1OFFSET_DTYPE,
+    BIN_DTYPE,
+    CHROM_DTYPE,
+    CHROMID_DTYPE,
+    CHROMOFFSET_DTYPE,
+    CHROMSIZE_DTYPE,
+    COORD_DTYPE,
+    COUNT_DTYPE,
+    MAGIC,
+    MAGIC_MCOOL,
+    MAGIC_SCOOL,
+    PIXEL_DTYPES,
+    PIXEL_FIELDS,
+    URL,
+)
 from ._create import (
     append,
     create,
@@ -37,3 +34,36 @@ from ._ingest import (
     sanitize_records,
     validate_pixels,
 )
+
+__all__ = [
+    "BIN1OFFSET_DTYPE",
+    "BIN_DTYPE",
+    "CHROMID_DTYPE",
+    "CHROMOFFSET_DTYPE",
+    "CHROMSIZE_DTYPE",
+    "CHROM_DTYPE",
+    "COORD_DTYPE",
+    "COUNT_DTYPE",
+    "MAGIC",
+    "MAGIC_MCOOL",
+    "MAGIC_SCOOL",
+    "PIXEL_DTYPES",
+    "PIXEL_FIELDS",
+    "URL",
+    "ArrayLoader",
+    "BadInputError",
+    "ContactBinner",
+    "HDF5Aggregator",
+    "PairixAggregator",
+    "TabixAggregator",
+    "aggregate_records",
+    "append",
+    "create",
+    "create_cooler",
+    "create_from_unordered",
+    "create_scool",
+    "rename_chroms",
+    "sanitize_pixels",
+    "sanitize_records",
+    "validate_pixels",
+]


=====================================
src/cooler/create/_constants.py
=====================================
@@ -0,0 +1,17 @@
+import numpy as np
+
+MAGIC = "HDF5::Cooler"
+MAGIC_SCOOL = "HDF5::SCOOL"
+MAGIC_MCOOL = "HDF5::MCOOL"
+
+URL = "https://github.com/open2c/cooler"
+CHROM_DTYPE = np.dtype("S")
+CHROMID_DTYPE = np.int32
+CHROMSIZE_DTYPE = np.int32
+COORD_DTYPE = np.int32
+BIN_DTYPE = np.int64
+COUNT_DTYPE = np.int32
+CHROMOFFSET_DTYPE = np.int64
+BIN1OFFSET_DTYPE = np.int64
+PIXEL_FIELDS = ("bin1_id", "bin2_id", "count")
+PIXEL_DTYPES = (("bin1_id", BIN_DTYPE), ("bin2_id", BIN_DTYPE), ("count", COUNT_DTYPE))


=====================================
src/cooler/create/_create.py
=====================================
@@ -5,8 +5,9 @@ import os.path as op
 import posixpath
 import tempfile
 import warnings
+from collections.abc import Iterable
 from datetime import datetime
-from typing import Any, Iterable
+from typing import Any
 
 import h5py
 import numpy as np
@@ -429,7 +430,9 @@ def _get_dtypes_arg(
     if "dtype" in kwargs:
         if dtypes is None:
             dtypes = kwargs.pop("dtype")
-            warnings.warn("Use dtypes= instead of dtype=", FutureWarning)
+            warnings.warn(
+                "Use dtypes= instead of dtype=", FutureWarning, stacklevel=2
+            )
         else:
             raise ValueError(
                 'Received both "dtypes" and "dtype" arguments. '
@@ -593,7 +596,8 @@ def create(
     if not symmetric_upper and triucheck:
         warnings.warn(
             "Creating a non-symmetric matrix, but `triucheck` was set to "
-            "True. Changing to False."
+            "True. Changing to False.",
+            stacklevel=2,
         )
         triucheck = False
 
@@ -730,8 +734,8 @@ def create_from_unordered(
     in any particular order.
 
     """
+    from .._reduce import CoolerMerger
     from ..api import Cooler
-    from ..reduce import CoolerMerger
 
     # chromsizes = get_chromsizes(bins)
     bins = bins.copy()


=====================================
src/cooler/create/_ingest.py
=====================================
@@ -12,8 +12,9 @@ import itertools
 import warnings
 from bisect import bisect_left
 from collections import Counter, OrderedDict
+from collections.abc import Iterator
 from functools import partial
-from typing import Any, Callable, Iterator
+from typing import Any, Callable
 
 import h5py
 import numpy as np
@@ -112,8 +113,8 @@ def _sanitize_records(
         return chunk
 
     # Find positional anchor columns, convert to zero-based if needed
-    anchor1 = np.array(chunk[anchor_field + suffixes[0]])
-    anchor2 = np.array(chunk[anchor_field + suffixes[1]])
+    anchor1 = chunk[anchor_field + suffixes[0]].to_numpy(copy=True)
+    anchor2 = chunk[anchor_field + suffixes[1]].to_numpy(copy=True)
     if is_one_based:
         anchor1 -= 1
         anchor2 -= 1
@@ -691,13 +692,15 @@ class TabixAggregator(ContactBinner):
         for chrom in self.gs.contigs:
             if chrom not in self.file_contigs:
                 warnings.warn(
-                    "Did not find contig " + f" '{chrom}' in contact list file."
+                    "Did not find contig " + f" '{chrom}' in contact list file.",
+                    stacklevel=2,
                 )
 
         warnings.warn(
             "NOTE: When using the Tabix aggregator, make sure the order of "
             "chromosomes in the provided chromsizes agrees with the chromosome "
-            "ordering of read ends in the contact list file."
+            "ordering of read ends in the contact list file.",
+            stacklevel=2,
         )
 
     def aggregate(
@@ -871,7 +874,8 @@ class PairixAggregator(ContactBinner):
         for chrom in self.gs.contigs:
             if chrom not in self.file_contigs:
                 warnings.warn(
-                    "Did not find contig " + f" '{chrom}' in contact list file."
+                    "Did not find contig " + f" '{chrom}' in contact list file.",
+                    stacklevel=2,
                 )
 
     def aggregate(
@@ -991,7 +995,10 @@ class SparseBlockLoader(ContactBinner):  # pragma: no cover
             try:
                 block = self.mapping[chrom2, chrom1].T
             except KeyError:
-                warnings.warn(f"Block for {{{chrom1}, {chrom2}}} not found")
+                warnings.warn(
+                    f"Block for {{{chrom1}, {chrom2}}} not found",
+                    stacklevel=2,
+                )
                 raise
         return block
 
@@ -1097,7 +1104,10 @@ class ArrayBlockLoader(ContactBinner):  # pragma: no cover
             try:
                 block = self.mapping[chrom2, chrom1].T
             except KeyError:
-                warnings.warn(f"Block for {{{chrom1}, {chrom2}}} not found")
+                warnings.warn(
+                    f"Block for {{{chrom1}, {chrom2}}} not found",
+                    stacklevel=2,
+                )
                 raise
         return block
 


=====================================
src/cooler/fileops.py
=====================================
@@ -24,7 +24,7 @@ from asciitree.traversal import Traversal
 from .create import MAGIC, MAGIC_SCOOL
 from .util import natsorted, parse_cooler_uri
 
-__all__ = ["is_cooler", "is_multires_file", "list_coolers", "cp", "mv", "ln"]
+__all__ = ["cp", "is_cooler", "is_multires_file", "list_coolers", "ln", "mv"]
 
 
 def json_dumps(o: object) -> str:
@@ -118,7 +118,10 @@ def _is_cooler(grp: h5py.Group) -> bool:
     if fmt == MAGIC:
         keys = ("chroms", "bins", "pixels", "indexes")
         if not all(name in grp.keys() for name in keys):
-            warnings.warn(f"Cooler path {grp.name} appears to be corrupt")
+            warnings.warn(
+                f"Cooler path {grp.name} appears to be corrupt",
+                stacklevel=2,
+            )
         return True
     return False
 
@@ -172,7 +175,10 @@ def is_scool_file(filepath: str) -> bool:
         if fmt == MAGIC_SCOOL:
             keys = ("chroms", "bins", "cells")
             if not all(name in f.keys() for name in keys):
-                warnings.warn("Scool file appears to be corrupt")
+                warnings.warn(
+                    "Scool file appears to be corrupt",
+                    stacklevel=2,
+                )
                 return False
             if "cells" in f.keys() and len(f["cells"].keys()) > 0:
                 for cells in f["cells"].keys():


=====================================
src/cooler/parallel.py
=====================================
@@ -5,8 +5,9 @@ coolers.
 """
 from __future__ import annotations
 
+from collections.abc import Iterable, Iterator, Sequence
 from functools import partial, reduce
-from typing import Any, Callable, Iterable, Iterator, Sequence
+from typing import Any, Callable
 
 from multiprocess import Lock
 
@@ -15,7 +16,7 @@ from .api import Cooler
 from .core import get
 from .util import partition
 
-__all__ = ["partition", "split", "lock"]
+__all__ = ["lock", "partition", "split"]
 
 """
 Two possible reasons for using a lock


=====================================
src/cooler/tools.py deleted
=====================================
@@ -1,23 +0,0 @@
-import sys
-from warnings import warn
-
-from . import parallel
-
-deprecated_names = ["partition", "split", "lock", "MultiplexDataPipe"]
-
-
-if sys.version_info[0] == 3 and sys.version_info[1] >= 7:
-
-    def __getattr__(name):
-        if name in deprecated_names:
-            warn(
-                "The `cooler.tools` module is deprecated in v0.9 and will be "
-                "removed in v0.10. Use `cooler.parallel` instead.",
-                category=FutureWarning,
-                stacklevel=2,
-            )
-            return getattr(parallel, name)
-        raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
-
-else:
-    from .parallel import *  # noqa


=====================================
src/cooler/util.py
=====================================
@@ -3,8 +3,9 @@ from __future__ import annotations
 import os
 import re
 from collections import OrderedDict, defaultdict
+from collections.abc import Generator, Iterable, Iterator
 from contextlib import contextmanager
-from typing import IO, Any, ContextManager, Iterable, Iterator
+from typing import IO, Any
 
 import h5py
 import numpy as np
@@ -519,7 +520,7 @@ def open_hdf5(
     mode: str = "r",
     *args,
     **kwargs
-) -> ContextManager[h5py.Group]:
+) -> Generator[h5py.Group, None, None]:
     """
     Context manager like ``h5py.File`` but accepts already open HDF5 file
     handles which do not get closed on teardown.
@@ -627,7 +628,7 @@ def infer_meta(x, index=None):  # pragma: no cover
         "m": np.timedelta64(1),
         "S": np.str_("foo"),
         "a": np.str_("foo"),
-        "U": np.unicode_("foo"),
+        "U": np.str_("foo"),
         "O": "foo",
     }
 


=====================================
tests/test_balance.py
=====================================
@@ -5,7 +5,7 @@ import numpy as np
 import pytest
 
 import cooler
-from cooler import balance
+from cooler import _balance
 
 testdir = os.path.dirname(os.path.realpath(__file__))
 
@@ -16,7 +16,7 @@ testdir = os.path.dirname(os.path.realpath(__file__))
 )
 def test_balancing_genomewide(fp, tol):
     clr = cooler.Cooler(fp)
-    weights, stats = balance.iterative_correction(
+    weights, stats = _balance.iterative_correction(
         clr, ignore_diags=1, min_nnz=10, tol=tol
     )
 
@@ -53,7 +53,7 @@ def test_balancing_cisonly(fp, tol):
     with h5py.File(fp, "r") as h5:
         clr = cooler.Cooler(h5)
         chrom_offsets = h5["indexes/chrom_offset"][:]
-        weights, stats = balance.iterative_correction(
+        weights, stats = _balance.iterative_correction(
             clr, ignore_diags=1, min_nnz=10, tol=tol, cis_only=True
         )
 
@@ -103,7 +103,7 @@ def test_balancing_transonly(fp, tol):
     with h5py.File(fp, "r") as h5:
         clr = cooler.Cooler(h5)
         chrom_offsets = h5["indexes/chrom_offset"][:]
-        weights, stats = balance.iterative_correction(
+        weights, stats = _balance.iterative_correction(
             clr, ignore_diags=1, min_nnz=10, tol=tol, trans_only=True
         )
 
@@ -138,11 +138,11 @@ def test_balancing_transonly(fp, tol):
 )
 def test_balancing_other_options(fp, tol):
     clr = cooler.Cooler(fp)
-    weights, stats = balance.iterative_correction(
+    weights, stats = _balance.iterative_correction(
         clr, ignore_diags=1, min_nnz=10, tol=tol, x0=np.random.rand(len(clr.bins()))
     )
 
-    weights, stats = balance.iterative_correction(
+    weights, stats = _balance.iterative_correction(
         clr,
         chunksize=3,
         ignore_diags=1,
@@ -150,7 +150,7 @@ def test_balancing_other_options(fp, tol):
         tol=tol,
     )
 
-    weights, stats = balance.iterative_correction(
+    weights, stats = _balance.iterative_correction(
         clr,
         ignore_diags=1,
         min_nnz=10,
@@ -159,6 +159,6 @@ def test_balancing_other_options(fp, tol):
         tol=tol,
     )
 
-    weights, stats = balance.iterative_correction(
+    weights, stats = _balance.iterative_correction(
         clr, ignore_diags=1, min_nnz=10, tol=tol, blacklist=[0, 4]
     )


=====================================
tests/test_fileops.py
=====================================
@@ -160,6 +160,4 @@ def test_list_scool_cells():
     ]
     cell_paths = fileops.list_scool_cells(src_file)
     assert len(cell_paths) == 5
-    for cell in paths:
-        if cell not in cell_paths:
-            assert False
+    assert all([cell in paths for cell in cell_paths])


=====================================
tests/test_reduce.py
=====================================
@@ -6,7 +6,7 @@ import pytest
 from _common import cooler_cmp, isolated_filesystem
 
 import cooler
-from cooler.reduce import coarsen_cooler, legacy_zoomify, merge_coolers, zoomify_cooler
+from cooler._reduce import coarsen_cooler, legacy_zoomify, merge_coolers, zoomify_cooler
 
 testdir = op.realpath(op.dirname(__file__))
 datadir = op.join(testdir, "data")


=====================================
tests/test_util.py
=====================================
@@ -166,8 +166,8 @@ def test_read_chromsizes():
     util.read_chromsizes(op.join(datadir, "toy.chrom.sizes"))
 
 
-def test_fetch_chromsizes():
-    util.fetch_chromsizes("hg19")
+# def test_fetch_chromsizes():
+#     util.fetch_chromsizes("hg19")
 
 
 def test_load_fasta():



View it on GitLab: https://salsa.debian.org/med-team/python-cooler/-/compare/e66de14aef6ecfe913e8d1c0ee6cf3af21d709d2...770de35d97597f41db7ab50b04cb6b31212a7060

-- 
View it on GitLab: https://salsa.debian.org/med-team/python-cooler/-/compare/e66de14aef6ecfe913e8d1c0ee6cf3af21d709d2...770de35d97597f41db7ab50b04cb6b31212a7060
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20250112/9164430c/attachment-0001.htm>


More information about the debian-med-commit mailing list