[Git][debian-gis-team/rasterio][upstream] New upstream version 1.1.2

Bas Couwenberg gitlab at salsa.debian.org
Thu Dec 19 04:43:22 GMT 2019



Bas Couwenberg pushed to branch upstream at Debian GIS Project / rasterio


Commits:
ef125c10 by Bas Couwenberg at 2019-12-19T04:21:54Z
New upstream version 1.1.2
- - - - -


19 changed files:

- .travis.yml
- CHANGES.txt
- rasterio/__init__.py
- rasterio/_io.pyx
- rasterio/_warp.pyx
- rasterio/merge.py
- rasterio/rio/bounds.py
- rasterio/rio/calc.py
- rasterio/rio/clip.py
- rasterio/rio/convert.py
- rasterio/rio/helpers.py
- rasterio/rio/merge.py
- rasterio/sample.py
- tests/test_dataset_mask.py
- + tests/test_rio_bounds.py
- tests/test_rio_convert.py
- tests/test_rio_info.py
- tests/test_rio_merge.py
- tests/test_warpedvrt.py


Changes:

=====================================
.travis.yml
=====================================
@@ -1,7 +1,7 @@
-dist: trusty
-
 language: python
 
+dist: trusty
+
 env:
   global:
     - PIP_WHEEL_DIR=$HOME/.cache/pip/wheels
@@ -17,28 +17,16 @@ jobs:
       env: GDALVERSION="1.11.5" PROJVERSION="4.8.0"
     - python: "2.7"
       env: GDALVERSION="2.2.4" PROJVERSION="4.9.3"
+    - python: "2.7"
+      env: GDALVERSION="2.3.3" PROJVERSION="4.9.3"
     - python: "3.6"
       env: GDALVERSION="2.2.4" PROJVERSION="4.9.3"
     - python: "3.6"
       env: GDALVERSION="2.3.3" PROJVERSION="4.9.3"
-    - python: "2.7"
-      env: GDALVERSION="2.4.3" PROJVERSION="4.9.3"
     - python: "3.6"
       env: GDALVERSION="2.4.3" PROJVERSION="4.9.3"
-    - python: "3.7"
-      env: GDALVERSION="2.4.3" PROJVERSION="4.9.3"
-    - python: "3.8"
-      env: GDALVERSION="2.4.3" PROJVERSION="4.9.3"
     - python: "3.6"
       env: GDALVERSION="3.0.1" PROJVERSION="6.1.1"
-    - python: "3.7"
-      env: GDALVERSION="3.0.1" PROJVERSION="6.1.1"
-    - python: "3.8"
-      env: GDALVERSION="3.0.1" PROJVERSION="6.1.1"
-    - python: "3.8"
-      env: GDALVERSION="master" PROJVERSION="6.1.1"
- allow_failures:
-    - env: GDALVERSION="master" PROJVERSION="6.1.1"
 
 addons:
   apt:
@@ -49,6 +37,11 @@ addons:
     - libatlas-base-dev
     - gfortran
 
+before_script:
+  - "export DISPLAY=:99.0"
+  - "sh -e /etc/init.d/xvfb start"
+  - "sleep 3"
+
 before_install:
   - python -m pip install -U pip
   - python -m pip install wheel
@@ -69,11 +62,6 @@ install:
   - "rio --gdal-version"
   - "python -m pip list"
 
-before_script:
-  - "export DISPLAY=:99.0"
-  - "sh -e /etc/init.d/xvfb start"
-  - "sleep 3"
-
 script:
   - "if [[ $TRAVIS_PYTHON_VERSION == 3.5 && $GDALVERSION == 2.1.0 ]]; then python -m pytest --doctest-ignore-import-errors --doctest-glob='*.rst' docs/*.rst -k 'not index and not quickstart and not switch' ; fi"
   - python -m pytest -v -m "not wheel" -rxXs --cov rasterio --cov-report term-missing


=====================================
CHANGES.txt
=====================================
@@ -1,6 +1,32 @@
 Changes
 =======
 
+1.1.2 (2019-12-18)
+------------------
+
+Bug fixes:
+
+- Sampling of WarpedVRT datasets was broken in version in 1.1.1 (#1833) and has
+  been fixed.
+- The missing out_dtype keyword argument has been added to
+  WarpedVRTReaderBase.read() (#1849).
+- The missing --format option has been added to rio-calc (#1846).
+- Reduce all the band masks when computing the dataset mask. Previously we had
+  missed the last band.
+- PR #1842 makes sure that rio-calc's cleanup doesn't fail due to unbound
+  variables.
+- The conflict between the --bbox/--feature/--collection and
+  --sequence/--collection option of rio-bounds (#1807) has been fixed by
+  removing "collection" from the possible JSON type options.
+- Increase default precision for the merge tool from 7 to 10 (#1837).
+- Allow rio-clip and rio-convert to overwrite output files (#1836).
+- Allow src_crs parameter to fully override the source dataset's CRS (#1808).
+
+Packaging notes:
+
+- The wheels on PyPI now include the base (version 1.8) PROJ datum grids and
+  are thus a few MB larger in size.
+
 1.1.1 (2019-11-13)
 ------------------
 


=====================================
rasterio/__init__.py
=====================================
@@ -42,7 +42,7 @@ import rasterio.path
 
 
 __all__ = ['band', 'open', 'pad', 'Env']
-__version__ = "1.1.1"
+__version__ = "1.1.2"
 __gdal_version__ = gdal_version()
 
 # Rasterio attaches NullHandler to the 'rasterio' logger and its


=====================================
rasterio/_io.pyx
=====================================
@@ -713,31 +713,73 @@ cdef class DatasetReaderBase(DatasetBase):
 
     def dataset_mask(self, out=None, out_shape=None, window=None,
                      boundless=False, resampling=Resampling.nearest):
-        """Calculate the dataset's 2D mask. Derived from the individual band masks
-        provided by read_masks().
+        """Get the dataset's 2D valid data mask.
 
         Parameters
         ----------
-        out, out_shape, window, boundless and resampling are passed directly to read_masks()
+        out : numpy ndarray, optional
+            As with Numpy ufuncs, this is an optional reference to an
+            output array with the same dimensions and shape into which
+            data will be placed.
+
+            *Note*: the method's return value may be a view on this
+            array. In other words, `out` is likely to be an
+            incomplete representation of the method's results.
+
+            Cannot be combined with `out_shape`.
+
+        out_shape : tuple, optional
+            A tuple describing the output array's shape.  Allows for decimated
+            reads without constructing an output Numpy array.
+
+            Cannot be combined with `out`.
+
+        window : a pair (tuple) of pairs of ints or Window, optional
+            The optional `window` argument is a 2 item tuple. The first
+            item is a tuple containing the indexes of the rows at which
+            the window starts and stops and the second is a tuple
+            containing the indexes of the columns at which the window
+            starts and stops. For example, ((0, 2), (0, 2)) defines
+            a 2x2 window at the upper left of the raster dataset.
+
+        boundless : bool, optional (default `False`)
+            If `True`, windows that extend beyond the dataset's extent
+            are permitted and partially or completely filled arrays will
+            be returned as appropriate.
+
+        resampling : Resampling
+            By default, pixel values are read raw or interpolated using
+            a nearest neighbor algorithm from the band cache. Other
+            resampling algorithms may be specified. Resampled pixels
+            are not cached.
 
         Returns
         -------
-        ndarray, shape=(self.height, self.width), dtype='uint8'
-        0 = nodata, 255 = valid data
-
-        The dataset mask is calculate based on the individual band masks according to
-        the following logic, in order of precedence:
-
-        1. If a .msk file, dataset-wide alpha or internal mask exists,
-           it will be used as the dataset mask.
-        2. If an 4-band RGBA with a shadow nodata value,
-           band 4 will be used as the dataset mask.
-        3. If a nodata value exists, use the binary OR (|) of the band masks
-        4. If no nodata value exists, return a mask filled with 255
-
-        Note that this differs from read_masks and GDAL RFC15
-        in that it applies per-dataset, not per-band
-        (see https://trac.osgeo.org/gdal/wiki/rfc15_nodatabitmask)
+        Numpy ndarray or a view on a Numpy ndarray
+            The dtype of this array is uint8. 0 = nodata, 255 = valid
+            data.
+
+        Notes
+        -----
+        Note: as with Numpy ufuncs, an object is returned even if you
+        use the optional `out` argument and the return value shall be
+        preferentially used by callers.
+
+        The dataset mask is calculated based on the individual band
+        masks according to the following logic, in order of precedence:
+
+        1. If a .msk file, dataset-wide alpha, or internal mask exists
+           it will be used for the dataset mask.
+        2. Else if the dataset is a 4-band  with a shadow nodata value, band 4 will be
+           used as the dataset mask.
+        3. If a nodata value exists, use the binary OR (|) of the band
+           masks 4. If no nodata value exists, return a mask filled with
+           255.
+
+        Note that this differs from read_masks and GDAL RFC15 in that it
+        applies per-dataset, not per-band (see
+        https://trac.osgeo.org/gdal/wiki/rfc15_nodatabitmask)
+
         """
         kwargs = {
             'out': out,
@@ -746,21 +788,23 @@ cdef class DatasetReaderBase(DatasetBase):
             'boundless': boundless,
             'resampling': resampling}
 
-        # GDAL found dataset-wide alpha band or mask
-        # All band masks are equal so we can return the first
         if MaskFlags.per_dataset in self.mask_flag_enums[0]:
             return self.read_masks(1, **kwargs)
 
-        # use Alpha mask if available and looks like RGB, even if nodata is shadowing
         elif self.count == 4 and self.colorinterp[0] == ColorInterp.red:
             return self.read_masks(4, **kwargs)
 
-        # Or use the binary OR intersection of all GDALGetMaskBands
-        else:
-            mask = self.read_masks(1, **kwargs)
-            for i in range(1, self.count):
-                mask = mask | self.read_masks(i, **kwargs)
-            return mask
+        elif out is not None:
+            kwargs.pop("out", None)
+            kwargs["out_shape"] = (self.count, out.shape[-2], out.shape[-1])
+            out = 255 * np.logical_or.reduce(self.read_masks(**kwargs))
+            return out
+
+        elif out_shape is not None:
+            kwargs["out_shape"] = (self.count, out_shape[-2], out_shape[-1])
+
+        return 255 * np.logical_or.reduce(self.read_masks(**kwargs))
+
 
     def sample(self, xy, indexes=None, masked=False):
         """Get the values of a dataset at certain positions


=====================================
rasterio/_warp.pyx
=====================================
@@ -888,13 +888,16 @@ cdef class WarpedVRTReaderBase(DatasetReaderBase):
         psWOptions.hSrcDS = hds
 
         try:
+
             if self.dst_width and self.dst_height and self.dst_transform:
                 # set up transform args (otherwise handled in
                 # GDALAutoCreateWarpedVRT)
                 try:
+
                     hTransformArg = exc_wrap_pointer(
                         GDALCreateGenImgProjTransformer3(
                             src_crs_wkt, src_gt, dst_crs_wkt, dst_gt))
+
                     if c_tolerance > 0.0:
                         hTransformArg = exc_wrap_pointer(
                             GDALCreateApproxTransformer(
@@ -909,6 +912,7 @@ cdef class WarpedVRTReaderBase(DatasetReaderBase):
 
                     log.debug("Created transformer and options.")
                     psWOptions.pTransformerArg = hTransformArg
+
                 except Exception:
                     GDALDestroyApproxTransformer(hTransformArg)
                     raise
@@ -917,16 +921,20 @@ cdef class WarpedVRTReaderBase(DatasetReaderBase):
                     hds_warped = GDALCreateWarpedVRT(
                         hds, c_width, c_height, dst_gt, psWOptions)
                     GDALSetProjection(hds_warped, dst_crs_wkt)
+
                 self._hds = exc_wrap_pointer(hds_warped)
+
             else:
                 with nogil:
                     hds_warped = GDALAutoCreateWarpedVRT(
-                        hds, NULL, dst_crs_wkt, c_resampling,
+                        hds, src_crs_wkt, dst_crs_wkt, c_resampling,
                         c_tolerance, psWOptions)
+
                 self._hds = exc_wrap_pointer(hds_warped)
 
         except CPLE_OpenFailedError as err:
             raise RasterioIOError(err.errmsg)
+
         finally:
             CPLFree(dst_crs_wkt)
             CSLDestroy(c_warp_extras)
@@ -972,12 +980,84 @@ cdef class WarpedVRTReaderBase(DatasetReaderBase):
 
     def read(self, indexes=None, out=None, window=None, masked=False,
             out_shape=None, boundless=False, resampling=Resampling.nearest,
-            fill_value=None):
-        """Read a dataset's raw pixels as an N-d array"""
+            fill_value=None, out_dtype=None):
+        """Read a dataset's raw pixels as an N-d array
+
+        This data is read from the dataset's band cache, which means
+        that repeated reads of the same windows may avoid I/O.
+
+        Parameters
+        ----------
+        indexes : list of ints or a single int, optional
+            If `indexes` is a list, the result is a 3D array, but is
+            a 2D array if it is a band index number.
+
+        out : numpy ndarray, optional
+            As with Numpy ufuncs, this is an optional reference to an
+            output array into which data will be placed. If the height
+            and width of `out` differ from that of the specified
+            window (see below), the raster image will be decimated or
+            replicated using the specified resampling method (also see
+            below).
+
+            *Note*: the method's return value may be a view on this
+            array. In other words, `out` is likely to be an
+            incomplete representation of the method's results.
+
+            This parameter cannot be combined with `out_shape`.
+
+        out_dtype : str or numpy dtype
+            The desired output data type. For example: 'uint8' or
+            rasterio.uint16.
+
+        out_shape : tuple, optional
+            A tuple describing the shape of a new output array. See
+            `out` (above) for notes on image decimation and
+            replication.
+
+            Cannot combined with `out`.
+
+        window : a pair (tuple) of pairs of ints or Window, optional
+            The optional `window` argument is a 2 item tuple. The first
+            item is a tuple containing the indexes of the rows at which
+            the window starts and stops and the second is a tuple
+            containing the indexes of the columns at which the window
+            starts and stops. For example, ((0, 2), (0, 2)) defines
+            a 2x2 window at the upper left of the raster dataset.
+
+        masked : bool, optional
+            If `masked` is `True` the return value will be a masked
+            array. Otherwise (the default) the return value will be a
+            regular array. Masks will be exactly the inverse of the
+            GDAL RFC 15 conforming arrays returned by read_masks().
+
+        boundless : bool, optional (default `False`)
+            If `True`, windows that extend beyond the dataset's extent
+            are permitted and partially or completely filled arrays will
+            be returned as appropriate.
+
+        resampling : Resampling
+            By default, pixel values are read raw or interpolated using
+            a nearest neighbor algorithm from the band cache. Other
+            resampling algorithms may be specified. Resampled pixels
+            are not cached.
+
+        fill_value : scalar
+            Fill value applied in the `boundless=True` case only.
+
+        Returns
+        -------
+        Numpy ndarray or a view on a Numpy ndarray
+
+        Note: as with Numpy ufuncs, an object is returned even if you
+        use the optional `out` argument and the return value shall be
+        preferentially used by callers.
+
+        """
         if boundless:
             raise ValueError("WarpedVRT does not permit boundless reads")
         else:
-            return super(WarpedVRTReaderBase, self).read(indexes=indexes, out=out, window=window, masked=masked, out_shape=out_shape, resampling=resampling, fill_value=fill_value)
+            return super(WarpedVRTReaderBase, self).read(indexes=indexes, out=out, window=window, masked=masked, out_shape=out_shape, resampling=resampling, fill_value=fill_value, out_dtype=out_dtype)
 
     def read_masks(self, indexes=None, out=None, out_shape=None, window=None,
                    boundless=False, resampling=Resampling.nearest):


=====================================
rasterio/merge.py
=====================================
@@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
 MERGE_METHODS = ('first', 'last', 'min', 'max')
 
 
-def merge(datasets, bounds=None, res=None, nodata=None, precision=7, indexes=None,
+def merge(datasets, bounds=None, res=None, nodata=None, precision=10, indexes=None,
           method='first'):
     """Copy valid pixels from input files to an output file.
 


=====================================
rasterio/rio/bounds.py
=====================================
@@ -33,8 +33,7 @@ logger = logging.getLogger(__name__)
     help="Output in specified coordinates.")
 @options.sequence_opt
 @use_rs_opt
- at geojson_type_collection_opt(True)
- at geojson_type_feature_opt(False)
+ at geojson_type_feature_opt(True)
 @geojson_type_bbox_opt(False)
 @click.pass_context
 def bounds(ctx, input, precision, indent, compact, projection, dst_crs,


=====================================
rasterio/rio/calc.py
=====================================
@@ -7,6 +7,7 @@ from distutils.version import LooseVersion
 import math
 
 import click
+from cligj import format_opt
 import snuggs
 
 import rasterio
@@ -80,6 +81,7 @@ def _chunk_output(width, height, count, itemsize, mem_limit=1):
 @click.argument('command')
 @options.files_inout_arg
 @options.output_opt
+ at format_opt
 @click.option('--name', multiple=True,
               help='Specify an input file with a unique short (alphas only) '
                    'name for use in commands like '
@@ -90,7 +92,7 @@ def _chunk_output(width, height, count, itemsize, mem_limit=1):
 @click.option("--mem-limit", type=int, default=64, help="Limit on memory used to perform calculations, in MB.")
 @options.creation_options
 @click.pass_context
-def calc(ctx, command, files, output, name, dtype, masked, overwrite, mem_limit, creation_options):
+def calc(ctx, command, files, output, driver, name, dtype, masked, overwrite, mem_limit, creation_options):
     """A raster data calculator
 
     Evaluates an expression using input datasets and writes the result
@@ -135,6 +137,9 @@ def calc(ctx, command, files, output, name, dtype, masked, overwrite, mem_limit,
     """
     import numpy as np
 
+    dst = None
+    sources = []
+
     try:
         with ctx.obj['env']:
             output, files = resolve_inout(files=files, output=output,
@@ -149,6 +154,9 @@ def calc(ctx, command, files, output, name, dtype, masked, overwrite, mem_limit,
             dtype = dtype or first.meta['dtype']
             kwargs['dtype'] = dtype
 
+            if driver:
+                kwargs['driver'] = driver
+
             # Extend snuggs.
             snuggs.func_map['read'] = _read_array
             snuggs.func_map['band'] = lambda d, i: _get_bands(inputs, sources, d, i)
@@ -156,8 +164,6 @@ def calc(ctx, command, files, output, name, dtype, masked, overwrite, mem_limit,
             snuggs.func_map['fillnodata'] = lambda *args: fillnodata(*args)
             snuggs.func_map['sieve'] = lambda *args: sieve(*args)
 
-            dst = None
-
             # The windows iterator is initialized with a single sample.
             # The actual work windows will be added in the second
             # iteration of the loop.
@@ -205,8 +211,8 @@ def calc(ctx, command, files, output, name, dtype, masked, overwrite, mem_limit,
 
     except snuggs.ExpressionError as err:
         click.echo("Expression Error:")
-        click.echo('  %s' % err.text)
-        click.echo(' ' + ' ' * err.offset + "^")
+        click.echo("  {}".format(err.text))
+        click.echo(" {}^".format(" " * err.offset))
         click.echo(err)
         raise click.Abort()
 


=====================================
rasterio/rio/clip.py
=====================================
@@ -48,10 +48,11 @@ projection_projected_opt = click.option(
 @format_opt
 @projection_geographic_opt
 @projection_projected_opt
+ at options.overwrite_opt
 @options.creation_options
 @click.pass_context
 def clip(ctx, files, output, bounds, like, driver, projection,
-         creation_options):
+         overwrite, creation_options):
     """Clips a raster using projected or geographic bounds.
 
     \b
@@ -78,7 +79,7 @@ def clip(ctx, files, output, bounds, like, driver, projection,
 
     with ctx.obj['env']:
 
-        output, files = resolve_inout(files=files, output=output)
+        output, files = resolve_inout(files=files, output=output, overwrite=overwrite)
         input = files[0]
 
         with rasterio.open(input) as src:


=====================================
rasterio/rio/convert.py
=====================================
@@ -20,11 +20,12 @@ from rasterio.rio.helpers import resolve_inout
 @click.option('--scale-offset', type=float, default=None,
               help="Source to destination scaling offset.")
 @options.rgb_opt
+ at options.overwrite_opt
 @options.creation_options
 @click.pass_context
 def convert(
         ctx, files, output, driver, dtype, scale_ratio, scale_offset,
-        photometric, creation_options):
+        photometric, overwrite, creation_options):
     """Copy and convert raster datasets to other data types and formats.
 
     Data values may be linearly scaled when copying by using the
@@ -50,7 +51,7 @@ def convert(
     """
     with ctx.obj['env']:
 
-        outputfile, files = resolve_inout(files=files, output=output)
+        outputfile, files = resolve_inout(files=files, output=output, overwrite=overwrite)
         inputfile = files[0]
 
         with rasterio.open(inputfile) as src:


=====================================
rasterio/rio/helpers.py
=====================================
@@ -38,25 +38,18 @@ def write_features(
             bbox = (min(xs), min(ys), max(xs), max(ys))
             if use_rs:
                 fobj.write(u'\u001e')
-            if geojson_type == 'feature':
-                fobj.write(json.dumps(feat, **dump_kwds))
-            elif geojson_type == 'bbox':
+            if geojson_type == 'bbox':
                 fobj.write(json.dumps(bbox, **dump_kwds))
             else:
-                fobj.write(
-                    json.dumps({
-                        'type': 'FeatureCollection',
-                        'bbox': bbox,
-                        'features': [feat]}, **dump_kwds))
+                fobj.write(json.dumps(feat, **dump_kwds))
             fobj.write('\n')
+
     # Aggregate all features into a single object expressed as
     # bbox or collection.
     else:
         features = list(collection())
         if geojson_type == 'bbox':
             fobj.write(json.dumps(collection.bbox, **dump_kwds))
-        elif geojson_type == 'feature':
-            fobj.write(json.dumps(features[0], **dump_kwds))
         else:
             fobj.write(json.dumps({
                 'bbox': collection.bbox,


=====================================
rasterio/rio/merge.py
=====================================
@@ -18,7 +18,7 @@ from rasterio.rio.helpers import resolve_inout
 @options.nodata_opt
 @options.bidx_mult_opt
 @options.overwrite_opt
- at click.option('--precision', type=int, default=7,
+ at click.option('--precision', type=int, default=10,
               help="Number of decimal places of precision in alignment of "
                    "pixels")
 @options.creation_options


=====================================
rasterio/sample.py
=====================================
@@ -2,6 +2,7 @@
 
 import numpy
 
+from rasterio.enums import MaskFlags
 from rasterio.windows import Window
 
 
@@ -31,14 +32,24 @@ def sample_gen(dataset, xy, indexes=None, masked=False):
     index = dataset.index
     read = dataset.read
 
-    if isinstance(indexes, int):
+    if indexes is None:
+        indexes = dataset.indexes
+    elif isinstance(indexes, int):
         indexes = [indexes]
 
     for x, y in xy:
+
         row_off, col_off = index(x, y)
-#        if row_off < 0 or col_off < 0:
-#            yield numpy.ones((dataset.count,), dtype=dataset.dtypes[0]) * dataset.nodata
-#        else:
-        window = Window(col_off, row_off, 1, 1)
-        data = read(indexes, window=window, masked=masked, boundless=True)
-        yield data[:, 0, 0]
+
+        if row_off < 0 or col_off < 0 or row_off >= dataset.height or col_off >= dataset.width:
+            data = numpy.ones((len(indexes),), dtype=dataset.dtypes[0]) * (dataset.nodata or 0)
+            if masked:
+                mask = [False if MaskFlags.all_valid in dataset.mask_flag_enums[i - 1] else True for i in indexes]
+                yield numpy.ma.array(data, mask=mask)
+            else:
+                yield data
+
+        else:
+            window = Window(col_off, row_off, 1, 1)
+            data = read(indexes, window=window, masked=masked)
+            yield data[:, 0, 0]


=====================================
tests/test_dataset_mask.py
=====================================
@@ -188,20 +188,23 @@ def test_rgba_msk(tiffs):
         # mask takes precendent over alpha
         assert np.array_equal(src.dataset_mask(), msk)
 
-def test_kwargs(tiffs):
-    with rasterio.open(str(tiffs.join('rgb_ndv.tif'))) as src:
-        # window and boundless are passed along
-        other = src.dataset_mask(window=((1, 4), (1, 4)), boundless=True)
-        assert np.array_equal(alp_shift_lr, other)
-
-        other = src.dataset_mask(out_shape=(1, 5, 5))
-        assert np.array_equal(resampmask, other)
-
-        out = np.zeros((1, 5, 5), dtype=np.uint8)
-        other = src.dataset_mask(out=out)
-        assert np.array_equal(resampmask, other)
 
-        # band indexes are not supported
+ at pytest.mark.parametrize("kwds,expected", [(dict(window=((1, 4), (1, 4)), boundless=True), alp_shift_lr), (dict(out_shape=(1, 5, 5)), resampmask), (dict(out=np.zeros((1, 5, 5), dtype=np.uint8)), resampmask)])
+def test_kwargs(tiffs, kwds, expected):
+    with rasterio.open(str(tiffs.join('rgb_ndv.tif'))) as src:
+        result = src.dataset_mask(**kwds)
+        assert np.array_equal(expected, result)
+#
+#        other = src.dataset_mask(out_shape=(1, 5, 5))
+#        assert np.array_equal(resampmask, other)
+#
+#        out = np.zeros((1, 5, 5), dtype=np.uint8)
+#        other = src.dataset_mask(out=out)
+#        assert np.array_equal(resampmask, other)
+
+
+def test_indexes_not_supported(tiffs):
+    with rasterio.open(str(tiffs.join('rgb_ndv.tif'))) as src:
         with pytest.raises(TypeError):
             src.dataset_mask(indexes=1)
 


=====================================
tests/test_rio_bounds.py
=====================================
@@ -0,0 +1,38 @@
+import pytest
+
+import rasterio
+from rasterio.rio.main import main_group
+
+
+def test_bounds_sequence_single(runner, basic_image_file):
+    """
+    --sequence option should produce a feature collection for a single image.
+    """
+    result = runner.invoke(main_group, ["bounds", "--sequence", basic_image_file])
+
+    assert result.output.count('"FeatureCollection"') == 0
+    assert result.output.count('"Feature"') == 1
+
+
+def tests_bounds_sequence_multiple(runner, basic_image_file):
+    """
+    --sequence option should produce a feature collection for each image passed as argument.
+    """
+    result = runner.invoke(
+        main_group, ["bounds", "--sequence", basic_image_file, basic_image_file]
+    )
+
+    assert result.output.count('"FeatureCollection"') == 0
+    assert result.output.count('"Feature"') == 2
+
+
+def test_bounds_no_sequence_multiple(runner, basic_image_file):
+    """
+    --no-sequence option should produce a single feature collection
+    """
+    result = runner.invoke(
+        main_group, ["bounds", "--collection", basic_image_file, basic_image_file]
+    )
+
+    assert result.output.count('"FeatureCollection"') == 1
+    assert result.output.count('"Feature"') == 2


=====================================
tests/test_rio_convert.py
=====================================
@@ -84,6 +84,34 @@ def test_clip_like_disjunct(runner, tmpdir):
     assert '--like' in result.output
 
 
+def test_clip_overwrite_without_option(runner, tmpdir):
+    output = str(tmpdir.join('test.tif'))
+    result = runner.invoke(
+        main_group,
+        ['clip', 'tests/data/shade.tif', output, '--bounds', bbox(*TEST_BBOX)])
+    assert result.exit_code == 0
+
+    result = runner.invoke(
+        main_group,
+        ['clip', 'tests/data/shade.tif', output, '--bounds', bbox(*TEST_BBOX)])
+    assert result.exit_code == 1
+    assert '--overwrite' in result.output
+
+
+def test_clip_overwrite_with_option(runner, tmpdir):
+    output = str(tmpdir.join('test.tif'))
+    result = runner.invoke(
+        main_group,
+        ['clip', 'tests/data/shade.tif', output, '--bounds', bbox(*TEST_BBOX)])
+    assert result.exit_code == 0
+
+    result = runner.invoke(
+        main_group, [
+        'clip', 'tests/data/shade.tif', output, '--bounds', bbox(*TEST_BBOX),
+        '--overwrite'])
+    assert result.exit_code == 0
+
+
 # Tests: format and type conversion, --format and --dtype
 
 def test_format(tmpdir):
@@ -202,3 +230,31 @@ def test_rgb(tmpdir):
     assert result.exit_code == 0
     with rasterio.open(outputname) as src:
         assert src.colorinterp[0] == rasterio.enums.ColorInterp.red
+
+
+def test_convert_overwrite_without_option(runner, tmpdir):
+    outputname = str(tmpdir.join('test.tif'))
+    result = runner.invoke(
+        main_group,
+        ['convert', 'tests/data/RGB.byte.tif', '-o', outputname, '-f', 'JPEG'])
+    assert result.exit_code == 0
+
+    result = runner.invoke(
+        main_group,
+        ['convert', 'tests/data/RGB.byte.tif', '-o', outputname, '-f', 'JPEG'])
+    assert result.exit_code == 1
+    assert '--overwrite' in result.output
+
+
+def test_convert_overwrite_with_option(runner, tmpdir):
+    outputname = str(tmpdir.join('test.tif'))
+    result = runner.invoke(
+        main_group,
+        ['convert', 'tests/data/RGB.byte.tif', '-o', outputname, '-f', 'JPEG'])
+    assert result.exit_code == 0
+
+    result = runner.invoke(
+        main_group, [
+        'convert', 'tests/data/RGB.byte.tif', '-o', outputname, '-f', 'JPEG',
+        '--overwrite'])
+    assert result.exit_code == 0


=====================================
tests/test_rio_info.py
=====================================
@@ -266,7 +266,7 @@ def test_bounds_defaults():
         'tests/data/RGB.byte.tif'
     ])
     assert result.exit_code == 0
-    assert 'FeatureCollection' in result.output
+    assert 'Feature' in result.output
 
 
 def test_bounds_err():


=====================================
tests/test_rio_merge.py
=====================================
@@ -1,9 +1,9 @@
 """Unittests for $ rio merge"""
 
 
-import sys
 import os
-import logging
+import sys
+import textwrap
 
 import affine
 from click.testing import CliRunner
@@ -18,6 +18,7 @@ from rasterio.transform import Affine
 
 from .conftest import requires_gdal22
 
+
 # Fixture to create test datasets within temporary directory
 @fixture(scope='function')
 def test_data_dir_1(tmpdir):
@@ -455,7 +456,7 @@ def test_merge_rgb(tmpdir):
     assert result.exit_code == 0
 
     with rasterio.open(outputname) as src:
-        assert [src.checksum(i) for i in src.indexes] == [25420, 29131, 37860]
+        assert [src.checksum(i) for i in src.indexes] == [33219, 35315, 45188]
 
 
 def test_merge_tiny_intres(tiffs):
@@ -463,3 +464,52 @@ def test_merge_tiny_intres(tiffs):
     inputs.sort()
     datasets = [rasterio.open(x) for x in inputs]
     merge(datasets, res=2)
+
+
+ at pytest.mark.parametrize("precision", [[], ["--precision", "9"]])
+def test_merge_precision(tmpdir, precision):
+    """See https://github.com/mapbox/rasterio/issues/1837"""
+    # TDOD move ascii grids to a fixture?
+
+    expected = """\
+        ncols        8
+        nrows        8
+        xllcorner    0.000000000000
+        yllcorner    0.000000000000
+        cellsize     1.000000000000
+         1 2 3 4 1 2 3 4
+         3 4 5 6 3 4 5 6
+         4 5 6 8 4 5 6 8
+         7 9 5 4 7 9 5 4
+         1 2 3 4 1 2 3 4
+         3 4 5 6 3 4 5 6
+         4 5 6 8 4 5 6 8
+         7 9 5 4 7 9 5 4
+         """
+
+    template = """\
+        ncols 4
+        nrows 4
+        xllcorner {:f}
+        yllcorner {:f}
+        cellsize 1.0
+        1 2 3 4
+        3 4 5 6
+        4 5 6 8
+        7 9 5 4
+        """
+
+    names = ["sw.asc", "se.asc", "nw.asc", "ne.asc"]
+    corners = [(0.0, 0.0), (4.0, 0.0), (0.0, 4.0), (4.0, 4.0)]
+
+    for name, (minx, miny) in zip(names, corners):
+        content = textwrap.dedent(template.format(minx, miny))
+        tmpdir.join(name).write(content)
+
+    inputs = [str(tmpdir.join(name)) for name in names]
+    outputname = str(tmpdir.join("merged.asc"))
+
+    runner = CliRunner()
+    result = runner.invoke(main_group, ["merge", "-f", "AAIGrid"] + precision + inputs + [outputname])
+    assert result.exit_code == 0
+    assert open(outputname).read() == textwrap.dedent(expected)


=====================================
tests/test_warpedvrt.py
=====================================
@@ -496,6 +496,35 @@ def test_warp_warp(dsrec, path_rgb_byte_tif):
             assert "1 N GTiff" in records[0]
 
 
+def test_out_dtype(red_green):
+    """Read as float"""
+    with rasterio.Env():
+        with rasterio.open(str(red_green.join("red.tif"))) as src, WarpedVRT(
+            src,
+            transform=affine.Affine.translation(-src.width / 4, src.height / 4) * src.transform,
+            width=2 * src.width,
+            height=2 * src.height
+        ) as vrt:
+            data = vrt.read(out_dtype="float32")
+            image = numpy.moveaxis(data, 0, -1)
+            assert image[31, 31, 0] == 0.0
+            assert image[32, 32, 0] == 204.0
+            assert image[32, 32, 1] == 17.0
+
+
+def test_sample(red_green):
+    """See https://github.com/mapbox/rasterio/issues/1833."""
+    with rasterio.Env():
+        with rasterio.open(str(red_green.join("red.tif"))) as src, WarpedVRT(
+            src,
+            transform=affine.Affine.translation(-src.width / 4, src.height / 4) * src.transform,
+            width=2 * src.width,
+            height=2 * src.height
+        ) as vrt:
+            sample = next(vrt.sample([(-20, -50)]))
+            assert not sample.any()
+
+
 @pytest.fixture
 def dsrec(capfd):
     """GDAL's open dataset records as a pytest fixture"""



View it on GitLab: https://salsa.debian.org/debian-gis-team/rasterio/commit/ef125c107addc4120c7463ebcc7839794e7baba0

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/rasterio/commit/ef125c107addc4120c7463ebcc7839794e7baba0
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20191219/0764cbdb/attachment-0001.html>


More information about the Pkg-grass-devel mailing list