Bug#1050854: python-xarray: autopkgtest failures

Gianfranco Costamagna locutusofborg at debian.org
Thu Sep 7 02:45:36 BST 2023


Hello, due to tzdata split, we need an additional patch:

https://github.com/pydata/xarray/pull/8153

 From cadeae171e882736e7e6b0dc79150627faf37f58 Mon Sep 17 00:00:00 2001
From: Gianfranco Costamagna <costamagnagianfranco at yahoo.it>
Date: Thu, 7 Sep 2023 03:35:49 +0200
Subject: [PATCH 1/2] tests: Update US/Eastern timezone to America/New_York

Reason is that US/Eastern symlink moved from tzdata to tzdata-legacy package,
causing FTBFS. Since America/New_York is better supported by tzdata,
the switch makes the package stronger against future failures
---
  xarray/tests/test_dataset.py  |  2 +-
  xarray/tests/test_variable.py | 18 +++++++++---------
  2 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/xarray/tests/test_dataset.py b/xarray/tests/test_dataset.py
index e119cfe9bc..19fd78daf0 100644
--- a/xarray/tests/test_dataset.py
+++ b/xarray/tests/test_dataset.py
@@ -4698,7 +4698,7 @@ def test_convert_dataframe_with_many_types_and_multiindex(self) -> None:
                  "e": [True, False, True],
                  "f": pd.Categorical(list("abc")),
                  "g": pd.date_range("20130101", periods=3),
-                "h": pd.date_range("20130101", periods=3, tz="US/Eastern"),
+                "h": pd.date_range("20130101", periods=3, tz="America/New_York"),
              }
          )
          df.index = pd.MultiIndex.from_product([["a"], range(3)], names=["one", "two"])
diff --git a/xarray/tests/test_variable.py b/xarray/tests/test_variable.py
index f30cdcf3f7..b09426c5d3 100644
--- a/xarray/tests/test_variable.py
+++ b/xarray/tests/test_variable.py
@@ -2604,7 +2604,7 @@ def test_datetime(self):
  
      @requires_pandas_version_two
      def test_tz_datetime(self) -> None:
-        tz = pytz.timezone("US/Eastern")
+        tz = pytz.timezone("America/New_York")
          times_ns = pd.date_range("2000", periods=1, tz=tz)
  
          times_s = times_ns.astype(pd.DatetimeTZDtype("s", tz))
@@ -2904,9 +2904,9 @@ def test_from_pint_wrapping_dask(self, Var):
          (pd.date_range("2000", periods=1), False),
          (datetime(2000, 1, 1), False),
          (np.array([datetime(2000, 1, 1)]), False),
-        (pd.date_range("2000", periods=1, tz=pytz.timezone("US/Eastern")), False),
+        (pd.date_range("2000", periods=1, tz=pytz.timezone("America/New_York")), False),
          (
-            pd.Series(pd.date_range("2000", periods=1, tz=pytz.timezone("US/Eastern"))),
+            pd.Series(pd.date_range("2000", periods=1, tz=pytz.timezone("America/New_York"))),
              False,
          ),
      ],
@@ -2929,7 +2929,7 @@ def test_datetime_conversion_warning(values, warns_under_pandas_version_two) ->
          # the case that the variable is backed by a timezone-aware
          # DatetimeIndex, and thus is hidden within the PandasIndexingAdapter class.
          assert var._data.array.dtype == pd.DatetimeTZDtype(
-            "ns", pytz.timezone("US/Eastern")
+            "ns", pytz.timezone("America/New_York")
          )
  
  
@@ -2941,12 +2941,12 @@ def test_pandas_two_only_datetime_conversion_warnings() -> None:
          (pd.date_range("2000", periods=1), "datetime64[s]"),
          (pd.Series(pd.date_range("2000", periods=1)), "datetime64[s]"),
          (
-            pd.date_range("2000", periods=1, tz=pytz.timezone("US/Eastern")),
-            pd.DatetimeTZDtype("s", pytz.timezone("US/Eastern")),
+            pd.date_range("2000", periods=1, tz=pytz.timezone("America/New_York")),
+            pd.DatetimeTZDtype("s", pytz.timezone("America/New_York")),
          ),
          (
-            pd.Series(pd.date_range("2000", periods=1, tz=pytz.timezone("US/Eastern"))),
-            pd.DatetimeTZDtype("s", pytz.timezone("US/Eastern")),
+            pd.Series(pd.date_range("2000", periods=1, tz=pytz.timezone("America/New_York"))),
+            pd.DatetimeTZDtype("s", pytz.timezone("America/New_York")),
          ),
      ]
      for data, dtype in cases:
@@ -2960,7 +2960,7 @@ def test_pandas_two_only_datetime_conversion_warnings() -> None:
          # the case that the variable is backed by a timezone-aware
          # DatetimeIndex, and thus is hidden within the PandasIndexingAdapter class.
          assert var._data.array.dtype == pd.DatetimeTZDtype(
-            "ns", pytz.timezone("US/Eastern")
+            "ns", pytz.timezone("America/New_York")
          )
  
  

 From 8b5a187d12294a40c21701fcea3ef1b78de65f11 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
  <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Thu, 7 Sep 2023 01:43:49 +0000
Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
---
  xarray/tests/test_variable.py | 8 ++++++--
  1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/xarray/tests/test_variable.py b/xarray/tests/test_variable.py
index b09426c5d3..9ecc3ace3c 100644
--- a/xarray/tests/test_variable.py
+++ b/xarray/tests/test_variable.py
@@ -2906,7 +2906,9 @@ def test_from_pint_wrapping_dask(self, Var):
          (np.array([datetime(2000, 1, 1)]), False),
          (pd.date_range("2000", periods=1, tz=pytz.timezone("America/New_York")), False),
          (
-            pd.Series(pd.date_range("2000", periods=1, tz=pytz.timezone("America/New_York"))),
+            pd.Series(
+                pd.date_range("2000", periods=1, tz=pytz.timezone("America/New_York"))
+            ),
              False,
          ),
      ],
@@ -2945,7 +2947,9 @@ def test_pandas_two_only_datetime_conversion_warnings() -> None:
              pd.DatetimeTZDtype("s", pytz.timezone("America/New_York")),
          ),
          (
-            pd.Series(pd.date_range("2000", periods=1, tz=pytz.timezone("America/New_York"))),
+            pd.Series(
+                pd.date_range("2000", periods=1, tz=pytz.timezone("America/New_York"))
+            ),
              pd.DatetimeTZDtype("s", pytz.timezone("America/New_York")),
          ),
      ]



On Wed, 30 Aug 2023 11:13:57 +0200 Bas Couwenberg <sebastic at xs4all.nl> wrote:
> Source: python-xarray
> Version: 2023.08.0-1
> Severity: serious
> Tags: patch
> Justification: autopkgtest failures
> 
> Dear Maintainer,
> 
> The autopkgtest for your package is failing:
> 
>  230s =================================== FAILURES ===================================
>  230s ____________ test_open_mfdataset_manyfiles[netcdf4-20-True-None-5] _____________
>  230s 
>  230s self = CachingFileManager(<class 'netCDF4._netCDF4.Dataset'>, '/tmp/tmp4hr6i68_/temp-1120.nc', mode='r', kwargs={'clobber': True, 'diskless': False, 'persist': False, 'format': 'NETCDF4'}, manager_id='abdbaa71-0b5f-4544-982d-afa923d39953')
>  230s needs_lock = True
>  230s 
>  230s     def _acquire_with_cache_info(self, needs_lock=True):
>  230s         """Acquire a file, returning the file and whether it was cached."""
>  230s         with self._optional_lock(needs_lock):
>  230s             try:
>  230s >               file = self._cache[self._key]
>  230s 
>  230s /usr/lib/python3/dist-packages/xarray/backends/file_manager.py:211: 
>  230s _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
>  230s 
>  230s self = <xarray.backends.lru_cache.LRUCache object at 0x7fe48b508b00>
>  230s key = [<class 'netCDF4._netCDF4.Dataset'>, ('/tmp/tmp4hr6i68_/temp-1120.nc',), 'r', (('clobber', True), ('diskless', False), ('format', 'NETCDF4'), ('persist', False)), 'abdbaa71-0b5f-4544-982d-afa923d39953']
>  230s 
>  230s     def __getitem__(self, key: K) -> V:
>  230s         # record recent use of the key by moving it to the front of the list
>  230s         with self._lock:
>  230s >           value = self._cache[key]
>  230s E           KeyError: [<class 'netCDF4._netCDF4.Dataset'>, ('/tmp/tmp4hr6i68_/temp-1120.nc',), 'r', (('clobber', True), ('diskless', False), ('format', 'NETCDF4'), ('persist', False)), 'abdbaa71-0b5f-4544-982d-afa923d39953']
>  230s 
>  230s /usr/lib/python3/dist-packages/xarray/backends/lru_cache.py:56: KeyError
>  230s 
>  230s During handling of the above exception, another exception occurred:
>  230s 
>  230s readengine = 'netcdf4', nfiles = 20, parallel = True, chunks = None
>  230s file_cache_maxsize = 5
>  230s 
>  230s     @requires_dask
>  230s     @pytest.mark.filterwarnings("ignore:use make_scale(name) instead")
>  230s     def test_open_mfdataset_manyfiles(
>  230s         readengine, nfiles, parallel, chunks, file_cache_maxsize
>  230s     ):
>  230s         # skip certain combinations
>  230s         skip_if_not_engine(readengine)
>  230s     
>  230s         if ON_WINDOWS:
>  230s             pytest.skip("Skipping on Windows")
>  230s     
>  230s         randdata = np.random.randn(nfiles)
>  230s         original = Dataset({"foo": ("x", randdata)})
>  230s         # test standard open_mfdataset approach with too many files
>  230s         with create_tmp_files(nfiles) as tmpfiles:
>  230s             writeengine = readengine if readengine != "pynio" else "netcdf4"
>  230s             # split into multiple sets of temp files
>  230s             for ii in original.x.values:
>  230s                 subds = original.isel(x=slice(ii, ii + 1))
-------------- next part --------------
A non-text attachment was scrubbed...
Name: OpenPGP_signature
Type: application/pgp-signature
Size: 840 bytes
Desc: OpenPGP digital signature
URL: <http://alioth-lists.debian.net/pipermail/debian-science-maintainers/attachments/20230907/ddd1cebe/attachment.sig>


More information about the debian-science-maintainers mailing list