[Git][debian-gis-team/pyninjotiff][master] 6 commits: New upstream version 0.3.0
Antonio Valentino
gitlab at salsa.debian.org
Sat Feb 29 07:39:38 GMT 2020
Antonio Valentino pushed to branch master at Debian GIS Project / pyninjotiff
Commits:
d81a2468 by Antonio Valentino at 2020-02-28T15:43:15+00:00
New upstream version 0.3.0
- - - - -
1df0ccfc by Antonio Valentino at 2020-02-28T15:43:16+00:00
Update upstream source from tag 'upstream/0.3.0'
Update to upstream version '0.3.0'
with Debian dir 2bb1353ebdccd7d8c66aa44be8afcbfe1c7f509d
- - - - -
8aca00dd by Antonio Valentino at 2020-02-28T15:44:02+00:00
New upstream release
- - - - -
497062cc by Antonio Valentino at 2020-02-28T16:48:52+01:00
Add dependency on dask and xarray
- - - - -
1dd6e6b5 by Antonio Valentino at 2020-02-29T07:07:19+00:00
Rules-Requires-Root: no
- - - - -
e9016607 by Antonio Valentino at 2020-02-29T07:08:18+00:00
Set distribution to unstable
- - - - -
11 changed files:
- .bumpversion.cfg
- .gitignore
- + .pre-commit-config.yaml
- .travis.yml
- changelog.rst
- debian/changelog
- debian/control
- pyninjotiff/ninjotiff.py
- pyninjotiff/tests/test_ninjotiff.py
- pyninjotiff/version.py
- setup.py
Changes:
=====================================
.bumpversion.cfg
=====================================
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.2.0
+current_version = 0.3.0
commit = True
tag = True
=====================================
.gitignore
=====================================
@@ -44,6 +44,7 @@ nosetests.xml
coverage.xml
*,cover
.hypothesis/
+.mypy_cache/
# Translations
*.mo
@@ -61,7 +62,7 @@ instance/
.scrapy
# Sphinx documentation
-docs/_build/
+doc/source/_build/
# PyBuilder
target/
@@ -87,3 +88,7 @@ ENV/
# Rope project settings
.ropeproject
+
+# test images
+*tif
+*tiff
=====================================
.pre-commit-config.yaml
=====================================
@@ -0,0 +1,8 @@
+exclude: '^$'
+fail_fast: false
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v2.2.3
+ hooks:
+ - id: flake8
+ additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear]
=====================================
.travis.yml
=====================================
@@ -1,6 +1,5 @@
language: python
python:
- - "2.7"
- "3.6"
- "3.7"
=====================================
changelog.rst
=====================================
@@ -2,6 +2,33 @@ Changelog
=========
+v0.3.0 (2020-02-25)
+-------------------
+- update changelog. [Martin Raspaud]
+- Bump version: 0.2.0 → 0.3.0. [Martin Raspaud]
+- Merge pull request #21 from mraspaud/feature-dask-start. [Martin
+ Raspaud]
+
+ Adapt ninjotiff to xarray and dask
+- Require dask dataframe. [Martin Raspaud]
+- Specify dask[array] as dependency. [Martin Raspaud]
+- Make ninjotiff.py xarray-based and dask friendly. [Martin Raspaud]
+- Update .gitignore. [Martin Raspaud]
+- Add a precommit config. [Martin Raspaud]
+- Merge pull request #20 from mraspaud/feature-colormap-unittests.
+ [Martin Raspaud]
+
+ Add unittests for the colormapped albedo and IR images, and thumbnails
+- Remove python 2.7 from travis. [Martin Raspaud]
+- Refactor nav parameter computations. [Martin Raspaud]
+- Add test for tags and thumbnails. [Martin Raspaud]
+- Test colormap and values separately. [Martin Raspaud]
+- Fix wavelength and calibration for the tests. [Martin Raspaud]
+- Fix the time of the test images. [Martin Raspaud]
+- Add unittests for the colormapped albedo and IR images. [Martin
+ Raspaud]
+
+
v0.2.0 (2019-09-19)
-------------------
- update changelog. [Martin Raspaud]
@@ -105,4 +132,3 @@ v0.1.0 (2017-10-16)
- Initial commit. [Martin Raspaud]
-
=====================================
debian/changelog
=====================================
@@ -1,8 +1,12 @@
-pyninjotiff (0.2.0-2) UNRELEASED; urgency=medium
+pyninjotiff (0.3.0-1) unstable; urgency=medium
+ * New upstream release.
* Bump Standards-Version to 4.5.0, no changes.
+ * debian/contol
+ - add dependency on dask and xarray
+ - explicitly specify Rules-Requires-Root: no
- -- Antonio Valentino <antonio.valentino at tiscali.it> Mon, 30 Sep 2019 19:46:45 +0200
+ -- Antonio Valentino <antonio.valentino at tiscali.it> Sat, 29 Feb 2020 07:07:22 +0000
pyninjotiff (0.2.0-1) unstable; urgency=medium
=====================================
debian/control
=====================================
@@ -4,6 +4,7 @@ Uploaders: Antonio Valentino <antonio.valentino at tiscali.it>
Section: python
Priority: optional
Testsuite: autopkgtest-pkg-python
+Rules-Requires-Root: no
Build-Depends: debhelper-compat (= 12),
dh-python,
python3-all,
@@ -24,11 +25,13 @@ Homepage: https://github.com/pytroll/pyninjotiff
Package: python3-pyninjotiff
Architecture: all
-Depends: python3-matplotlib,
+Depends: python3-dask,
+ python3-matplotlib,
python3-numpy,
python3-pyproj,
python3-pyresample,
python3-six,
+ python3-xarray,
${python3:Depends},
${misc:Depends}
Recommends: ${python3:Recommends}
=====================================
pyninjotiff/ninjotiff.py
=====================================
@@ -41,11 +41,14 @@ from copy import deepcopy
from datetime import datetime
import numpy as np
+from dask import delayed
+import dask.array as da
+import xarray as xr
from pyproj import Proj
from pyresample.utils import proj4_radius_parameters
-from pyninjotiff import tifffile
+from pyninjotiff import tifffile as local_tifffile
log = logging.getLogger(__name__)
@@ -282,7 +285,8 @@ def _get_projection_name(area_def):
return 'SPOL'
else:
return 'NPOL'
- return None
+ # FIXME: this feels like a hack
+ return area_def.proj_id.split('_')[-1]
def _get_pixel_size(projection_name, area_def):
@@ -325,7 +329,7 @@ def _get_satellite_altitude(filename):
def _finalize(img, dtype=np.uint8, value_range_measurement_unit=None,
data_is_scaled_01=True, fill_value=None):
- """Finalize a mpop GeoImage for Ninjo.
+ """Finalize a trollimage.Image for Ninjo.
Specialy take care of phycical scale and offset.
@@ -367,90 +371,70 @@ def _finalize(img, dtype=np.uint8, value_range_measurement_unit=None,
log.debug("Forcing fill value to %s", fill_value)
# Go back to the masked_array for compatibility
# with the following part of the code.
- if (np.issubdtype(img.data[0].dtype, np.integer)
- and '_FillValue' in img.data[0].attrs):
- nodata_value = img.data[0].attrs['_FillValue']
- data = img.data[0].values
- data = np.ma.array(data, mask=(data == nodata_value))
+ if (np.issubdtype(img.data.dtype, np.integer)
+ and '_FillValue' in img.data.attrs):
+ nodata_value = img.data.attrs['_FillValue']
+ if fill_value is None:
+ fill_value = nodata_value
+ data = img.data.squeeze()
else:
- data = img.data[0].to_masked_array()
+ data = img.data.squeeze()
fill_value = fill_value if fill_value is not None else np.iinfo(dtype).min
- log.debug("Before scaling: %.2f, %.2f, %.2f" %
- (data.min(), data.mean(), data.max()))
+ # log.debug("Before scaling: %.2f, %.2f, %.2f" %
+ # (data.min(), data.mean(), data.max()))
+ if value_range_measurement_unit and data_is_scaled_01:
+ # No additional scaling of the input data - assume that data is
+ # within [0.0, 1.0] and interpret 0.0 as
+ # value_range_measurement_unit[0] and 1.0 as
+ # value_range_measurement_unit[1]
- if np.ma.count_masked(data) == data.size:
- # All data is masked
- data = np.ones(data.shape, dtype=dtype) * fill_value
- scale = 1
- offset = 0
- else:
- if value_range_measurement_unit and data_is_scaled_01:
- # No additional scaling of the input data - assume that data is
- # within [0.0, 1.0] and interpret 0.0 as
- # value_range_measurement_unit[0] and 1.0 as
- # value_range_measurement_unit[1]
-
- # Make room for the transparent pixel value.
- data = data.clip(0, 1)
- data *= (np.iinfo(dtype).max - 1)
- data += 1
-
- scale = ((value_range_measurement_unit[1]
- - value_range_measurement_unit[0])
- / (np.iinfo(dtype).max - 1))
- # Handle the case where all data has the same value.
- scale = scale or 1
- offset = value_range_measurement_unit[0]
-
- mask = data.mask
- data = np.round(data.data).astype(dtype)
- offset -= scale
+ # Make room for the transparent pixel value.
+ data = data.clip(0, 1)
+ data *= (np.iinfo(dtype).max - 1)
+ data += 1
- if fill_value is None:
- fill_value = 0
+ scale = ((value_range_measurement_unit[1]
+ - value_range_measurement_unit[0])
+ / (np.iinfo(dtype).max - 1))
+ # Handle the case where all data has the same value.
+ scale = scale.where(scale != 0, 1)
+ offset = value_range_measurement_unit[0]
+
+ data = data.round().astype(dtype)
+ offset -= scale
+ if fill_value is None:
+ fill_value = 0
+
+ else:
+ if value_range_measurement_unit:
+ data.clip(value_range_measurement_unit[0],
+ value_range_measurement_unit[1], data)
+ chn_min = value_range_measurement_unit[0]
+ chn_max = value_range_measurement_unit[1]
+ log.debug("Scaling, using value range %.2f - %.2f" %
+ (value_range_measurement_unit[0], value_range_measurement_unit[1]))
else:
- if value_range_measurement_unit:
- data.clip(value_range_measurement_unit[0],
- value_range_measurement_unit[1], data)
- chn_min = value_range_measurement_unit[0]
- chn_max = value_range_measurement_unit[1]
- log.debug("Scaling, using value range %.2f - %.2f" %
- (value_range_measurement_unit[0], value_range_measurement_unit[1]))
- else:
- chn_max = data.max()
- chn_min = data.min()
- log.debug("Doing auto scaling")
-
- # Make room for transparent pixel.
- scale = ((chn_max - chn_min) /
- (np.iinfo(dtype).max - 1.0))
-
- # Handle the case where all data has the same value.
- scale = scale or 1
- offset = chn_min
-
- # Scale data to dtype, and adjust for transparent pixel forced
- # to be minimum.
- mask = data.mask
- data = 1 + ((data.data - offset) / scale).astype(dtype)
- offset -= scale
-
- data[mask] = fill_value
-
- if log.getEffectiveLevel() == logging.DEBUG:
- d__ = np.ma.array(data, mask=(data == fill_value))
- log.debug("After scaling: %.2f, %.2f, %.2f" % (d__.min(),
- d__.mean(),
- d__.max()))
- d__ = data * scale + offset
- d__ = np.ma.array(d__, mask=(data == fill_value))
- log.debug("Rescaling: %.2f, %.2f, %.2f" % (d__.min(),
- d__.mean(),
- d__.max()))
- del d__
+ chn_max = data.max()
+ chn_min = data.min()
+ log.debug("Doing auto scaling")
+
+ # Make room for transparent pixel.
+ scale = ((chn_max - chn_min) / (np.iinfo(dtype).max - 1.0))
+
+ # Handle the case where all data has the same value.
+ scale = scale.where(scale != 0, 1)
+ scale = scale.where(scale.notnull(), 1)
+ offset = chn_min.where(chn_min.notnull(), 0)
+
+ # Scale data to dtype, and adjust for transparent pixel forced
+ # to be minimum.
+ data = (1 + ((data - offset) / scale)).astype(dtype)
+ offset -= scale
+
+ data = data.where(data.notnull(), fill_value)
return data, scale, offset, fill_value
@@ -459,23 +443,11 @@ def _finalize(img, dtype=np.uint8, value_range_measurement_unit=None,
channels, fill_value = img._finalize(dtype)
else:
data, mode = img.finalize(fill_value=fill_value, dtype=dtype)
- # Go back to the masked_array for compatibility with
- # the rest of the code.
- channels = data.to_masked_array()
+ data = data.transpose('y', 'x', 'bands')
# Is this fill_value ok or what should it be?
- fill_value = (0, 0, 0, 0)
-
- if isinstance(img, np.ma.MaskedArray) and fill_value is None:
- mask = (np.ma.getmaskarray(channels[0]) &
- np.ma.getmaskarray(channels[1]) &
- np.ma.getmaskarray(channels[2]))
- channels.append((np.ma.logical_not(mask) *
- np.iinfo(channels[0].dtype).max).astype(channels[0].dtype))
- fill_value = (0, 0, 0, 0)
-
- data = np.dstack([channel.filled(fill_v)
- for channel, fill_v in zip(channels, fill_value)])
- return data, 1.0, 0.0, fill_value[0]
+ fill_value = fill_value or 0
+
+ return data, 1.0, 0.0, fill_value
elif img.mode == 'RGBA':
if not isinstance(img, np.ma.MaskedArray):
@@ -548,8 +520,8 @@ def save(img, filename, ninjo_product_name=None, writer_options=None, data_is_sc
fill_value = int(kwargs['fill_value'])
try:
- value_range_measurement_unit = (float(kwargs["ch_min_measurement_unit"]),
- float(kwargs["ch_max_measurement_unit"]))
+ value_range_measurement_unit = (xr.DataArray(kwargs["ch_min_measurement_unit"]).astype(float),
+ xr.DataArray(kwargs["ch_max_measurement_unit"]).astype(float))
except KeyError:
value_range_measurement_unit = None
@@ -585,7 +557,48 @@ def save(img, filename, ninjo_product_name=None, writer_options=None, data_is_sc
g += [0] * (256 - len(g))
b += [0] * (256 - len(b))
kwargs['cmap'] = r, g, b
- write(data, filename, area_def, ninjo_product_name, **kwargs)
+ return write(data, filename, area_def, ninjo_product_name, **kwargs)
+
+
+def ninjo_nav_parameters(options, area_def):
+ """Fill options with the navigation parameter in Ninjo format."""
+ # TODO: add altitude if available
+ proj = Proj(area_def.proj_dict)
+ upper_left = proj(
+ area_def.area_extent[0],
+ area_def.area_extent[3],
+ inverse=True)
+ lower_right = proj(
+ area_def.area_extent[2],
+ area_def.area_extent[1],
+ inverse=True)
+
+ # Ninjo's projection name.
+ options.setdefault('projection', _get_projection_name(area_def))
+
+ # Get pixel size
+ if 'pixel_xres' not in options or 'pixel_yres' not in options:
+ options['pixel_xres'], options['pixel_yres'] = \
+ _get_pixel_size(options['projection'], area_def)
+
+ options['meridian_west'] = upper_left[0]
+ options['meridian_east'] = lower_right[0]
+ if options['projection'].endswith("POL"):
+ if 'lat_ts' in area_def.proj_dict:
+ options['ref_lat1'] = area_def.proj_dict['lat_ts']
+ options['ref_lat2'] = 0
+ else:
+ if 'lat_0' in area_def.proj_dict:
+ options['ref_lat1'] = area_def.proj_dict['lat_0']
+ options['ref_lat2'] = 0
+ if 'lon_0' in area_def.proj_dict:
+ options['central_meridian'] = area_def.proj_dict['lon_0']
+
+ a, b = proj4_radius_parameters(area_def.proj_dict)
+ options['radius_a'] = a
+ options['radius_b'] = b
+ options['origin_lon'] = upper_left[0]
+ options['origin_lat'] = upper_left[1]
def write(image_data, output_fn, area_def, product_name=None, **kwargs):
@@ -610,33 +623,23 @@ def write(image_data, output_fn, area_def, product_name=None, **kwargs):
kwargs : dict
See _write
"""
- proj = Proj(area_def.proj_dict)
- upper_left = proj(
- area_def.area_extent[0],
- area_def.area_extent[3],
- inverse=True)
- lower_right = proj(
- area_def.area_extent[2],
- area_def.area_extent[1],
- inverse=True)
-
- if len(image_data.shape) == 3:
- if image_data.shape[2] == 4:
- shape = (area_def.y_size, area_def.x_size, 4)
+ if len(image_data.sizes) == 3:
+ if image_data.sizes['bands'] == 4:
+ # shape = (area_def.y_size, area_def.x_size, 4)
log.info("Will generate RGBA product")
- else:
- shape = (area_def.y_size, area_def.x_size, 3)
+ write_rgb = True
+ elif image_data.sizes['bands'] == 3:
+ # shape = (area_def.y_size, area_def.x_size, 3)
log.info("Will generate RGB product")
- write_rgb = True
+ write_rgb = True
+ else:
+ write_rgb = False
+ log.info("Will generate single band product")
+
else:
- shape = (area_def.y_size, area_def.x_size)
write_rgb = False
log.info("Will generate single band product")
- if image_data.shape != shape:
- raise ValueError("Raster shape %s does not correspond to expected shape %s" % (
- str(image_data.shape), str(shape)))
-
# Ninjo's physical units and value.
# If just a physical unit (e.g. 'C') is passed, it will then be
# translated into Ninjo's unit and value (e.q 'CELCIUS' and 'T').
@@ -645,22 +648,6 @@ def write(image_data, output_fn, area_def, product_name=None, **kwargs):
kwargs['physic_unit'], kwargs['physic_value'] = \
_get_physic_value(physic_unit)
- # Ninjo's projection name.
- kwargs['projection'] = kwargs.pop('projection', None) or \
- _get_projection_name(area_def) or \
- area_def.proj_id.split('_')[-1]
-
- # Get pixel size
- if 'pixel_xres' not in kwargs or 'pixel_yres' not in kwargs:
- kwargs['pixel_xres'], kwargs['pixel_yres'] = \
- _get_pixel_size(kwargs['projection'], area_def)
-
- # Get altitude.
- altitude = kwargs.pop('altitude', None) or \
- _get_satellite_altitude(output_fn)
- if altitude is not None:
- kwargs['altitude'] = altitude
-
if product_name:
# If ninjo_product_file in kwargs, load ninjo_product_file as config file
if 'ninjo_product_file' in kwargs:
@@ -670,29 +657,14 @@ def write(image_data, output_fn, area_def, product_name=None, **kwargs):
else:
options = {}
- options['meridian_west'] = upper_left[0]
- options['meridian_east'] = lower_right[0]
- if kwargs['projection'].endswith("POL"):
- if 'lat_ts' in area_def.proj_dict:
- options['ref_lat1'] = area_def.proj_dict['lat_ts']
- options['ref_lat2'] = 0
- else:
- if 'lat_0' in area_def.proj_dict:
- options['ref_lat1'] = area_def.proj_dict['lat_0']
- options['ref_lat2'] = 0
- if 'lon_0' in area_def.proj_dict:
- options['central_meridian'] = area_def.proj_dict['lon_0']
-
- a, b = proj4_radius_parameters(area_def.proj_dict)
- options['radius_a'] = a
- options['radius_b'] = b
- options['origin_lon'] = upper_left[0]
- options['origin_lat'] = upper_left[1]
- options['min_gray_val'] = image_data.min()
- options['max_gray_val'] = image_data.max()
options.update(kwargs) # Update/overwrite with passed arguments
+ if len(image_data.sizes) == 2:
+ options['min_gray_val'] = image_data.data.min().astype(int)
+ options['max_gray_val'] = image_data.data.max().astype(int)
+
+ ninjo_nav_parameters(options, area_def)
- _write(image_data, output_fn, write_rgb=write_rgb, **options)
+ return _write(image_data, output_fn, write_rgb=write_rgb, **options)
# -----------------------------------------------------------------------------
@@ -859,8 +831,8 @@ def _write(image_data, output_fn, write_rgb=False, **kwargs):
ref_lat1 = _eval_or_none("ref_lat1", float)
ref_lat2 = _eval_or_none("ref_lat2", float)
central_meridian = _eval_or_none("central_meridian", float)
- min_gray_val = int(kwargs.pop("min_gray_val", 0))
- max_gray_val = int(kwargs.pop("max_gray_val", 255))
+ min_gray_val = kwargs.pop("min_gray_val", 0)
+ max_gray_val = kwargs.pop("max_gray_val", 255)
altitude = _eval_or_none("altitude", float)
is_blac_corrected = int(bool(kwargs.pop("is_blac_corrected", 0)))
is_atmo_corrected = int(bool(kwargs.pop("is_atmo_corrected", 0)))
@@ -872,8 +844,8 @@ def _write(image_data, output_fn, write_rgb=False, **kwargs):
physic_value = str(kwargs.pop("physic_value", 'None'))
physic_unit = str(kwargs.pop("physic_unit", 'None'))
- gradient = float(kwargs.pop("gradient", 1.0))
- axis_intercept = float(kwargs.pop("axis_intercept", 0.0))
+ gradient = kwargs.pop("gradient", 1.0)
+ axis_intercept = kwargs.pop("axis_intercept", 0.0)
try:
transparent_pix = int(kwargs.get("transparent_pix", -1))
except Exception:
@@ -1075,27 +1047,34 @@ def _write(image_data, output_fn, write_rgb=False, **kwargs):
header_only_keys = ('byteorder', 'bigtiff', 'software', 'writeshape')
for key in header_only_keys:
if key in args:
- tifargs[key] = args[key]
- del args[key]
-
+ tifargs[key] = args.pop(key)
if 'writeshape' not in args:
args['writeshape'] = True
if 'bigtiff' not in tifargs and \
image_data.size * image_data.dtype.itemsize > 2000 * 2 ** 20:
tifargs['bigtiff'] = True
+ factor = 2
+ factors = []
+ while image_data.shape[0] // factor > tile_length and image_data.shape[1] // factor > tile_width:
+ factors.append(factor)
+ factor **= 2
+ if kwargs.get('compute', True):
+ return tiffwrite(output_fn, image_data, args, tifargs, factors)
+ else:
+ return delayed(tiffwrite)(output_fn, image_data, args, tifargs, factors)
+
- with tifffile.TiffWriter(output_fn, **tifargs) as tif:
+def tiffwrite(output_fn, image_data, args, tifargs, ovw_factors):
+ """Write to tiff."""
+ with local_tifffile.TiffWriter(output_fn, **tifargs) as tif:
+ image_data, args = da.compute(image_data, args)
tif.save(image_data, **args)
- for _, scale in enumerate((2, 4, 8, 16)):
- shape = (image_data.shape[0] / scale,
- image_data.shape[1] / scale)
- if shape[0] > tile_width and shape[1] > tile_length:
- args = _create_args(image_data[::scale, ::scale],
- pixel_xres * scale, pixel_yres * scale)
- for key in header_only_keys:
- if key in args:
- del args[key]
- tif.save(image_data[::scale, ::scale], **args)
+ for factor in ovw_factors:
+ ovw_args = args.copy()
+ ovw_args['extratags'] = dict()
+ ovw_args['tile_length'] //= factor
+ ovw_args['tile_width'] //= factor
+ tif.save(image_data[::factor, ::factor], **ovw_args)
log.info("Successfully created a NinJo tiff file: '%s'" % (output_fn,))
@@ -1116,7 +1095,7 @@ def read_tags(filename):
A list tags, one tag dictionary per page.
"""
pages = []
- with tifffile.TiffFile(filename) as tif:
+ with local_tifffile.TiffFile(filename) as tif:
for page in tif:
tags = {}
for tag in page.tags.values():
=====================================
pyninjotiff/tests/test_ninjotiff.py
=====================================
@@ -43,6 +43,7 @@ class FakeImage(object):
self.data = data
def finalize(self, fill_value=None, dtype=None):
+ """Finalize the image."""
if dtype is None:
dtype = np.uint8
if np.issubdtype(self.data.dtype, np.floating) and np.issubdtype(dtype, np.integer):
@@ -54,7 +55,10 @@ class FakeImage(object):
class FakeArea(object):
+ """Fake area class."""
+
def __init__(self, proj_dict, extent, y_size, x_size):
+ """Init the fake area."""
self.proj_dict = proj_dict
self.area_extent = extent
self.x_size, self.y_size = x_size, y_size
@@ -63,7 +67,10 @@ class FakeArea(object):
def test_write_bw():
- """Test saving a BW image."""
+ """Test saving a BW image.
+
+ Reflectances.
+ """
from pyninjotiff.ninjotiff import save
from pyninjotiff.tifffile import TiffFile
@@ -80,16 +87,16 @@ def test_write_bw():
('name', '1'),
('level', None),
('modifiers', ()),
- ('wavelength', (10.3, 10.8, 11.3)),
- ('calibration', 'brightness_temperature'),
+ ('wavelength', (0.5, 0.6, 0.7)),
+ ('calibration', 'reflectance'),
('start_time', TIME - datetime.timedelta(minutes=5)),
('end_time', TIME),
('area', area),
('ancillary_variables', []),
('enhancement_history', [{'offset': offset, 'scale': scale}])])
- kwargs = {'ch_min_measurement_unit': np.array([0]),
- 'ch_max_measurement_unit': np.array([120]),
+ kwargs = {'ch_min_measurement_unit': xr.DataArray(0),
+ 'ch_max_measurement_unit': xr.DataArray(120),
'compute': True, 'fill_value': None, 'sat_id': 6300014,
'chan_id': 100015, 'data_cat': 'PORN', 'data_source': 'SMHI',
'physic_unit': '%', 'nbits': 8}
@@ -105,9 +112,12 @@ def test_write_bw():
print(filename)
save(img, filename, data_is_scaled_01=True, **kwargs)
tif = TiffFile(filename)
- res = tif[0].asarray()
- assert(np.allclose(res[0, 0, ::256],
- np.array([256, 22016, 43520, 65280])))
+ page = tif[0]
+ res = page.asarray(colormapped=False).squeeze()
+ colormap = page.tags['color_map'].value
+ for i in range(3):
+ assert(np.all(np.array(colormap[i * 256:(i + 1) * 256]) == np.arange(256) * 256))
+ assert(np.all(res[0, ::256] == np.array([1, 86, 170, 255])))
def test_write_bw_inverted_ir():
@@ -153,9 +163,12 @@ def test_write_bw_inverted_ir():
print(filename)
save(img, filename, data_is_scaled_01=True, **kwargs)
tif = TiffFile(filename)
- res = tif[0].asarray()
- assert(np.allclose(res[0, 0, ::256],
- np.array([65024, 43264, 21760, 0])))
+ page = tif[0]
+ res = page.asarray(colormapped=False).squeeze()
+ colormap = page.tags['color_map'].value
+ for i in range(3):
+ assert(np.all(np.array(colormap[i * 256:(i + 1) * 256]) == np.arange(255, -1, -1) * 256))
+ assert(np.all(res[0, ::256] == np.array([1, 86, 170, 255])))
def test_write_bw_fill():
@@ -176,8 +189,8 @@ def test_write_bw_fill():
('name', '1'),
('level', None),
('modifiers', ()),
- ('wavelength', (10.3, 10.8, 11.3)),
- ('calibration', 'brightness_temperature'),
+ ('wavelength', (0.5, 0.6, 0.7)),
+ ('calibration', 'reflectance'),
('start_time', TIME - datetime.timedelta(minutes=25)),
('end_time', TIME - datetime.timedelta(minutes=20)),
('area', area),
@@ -205,9 +218,13 @@ def test_write_bw_fill():
print(filename)
save(img, filename, data_is_scaled_01=True, **kwargs)
tif = TiffFile(filename)
- res = tif[0].asarray()
- assert(np.allclose(res[0, 0, ::256],
- np.array([256, 22016, 43520, 65280])))
+ page = tif[0]
+ res = page.asarray(colormapped=False).squeeze()
+ colormap = page.tags['color_map'].value
+ for i in range(3):
+ assert(np.all(np.array(colormap[i * 256:(i + 1) * 256]) == np.arange(256) * 256))
+ assert(np.all(res[0, ::256] == np.array([1, 86, 170, 255])))
+ assert(np.all(res[256, :] == 0))
def test_write_bw_inverted_ir_fill():
@@ -257,9 +274,13 @@ def test_write_bw_inverted_ir_fill():
print(filename)
save(img, filename, data_is_scaled_01=True, **kwargs)
tif = TiffFile(filename)
- res = tif[0].asarray()
- assert(np.allclose(res[0, 0, ::256],
- np.array([65024, 43264, 21760, 0])))
+ page = tif[0]
+ res = page.asarray(colormapped=False).squeeze()
+ colormap = page.tags['color_map'].value
+ for i in range(3):
+ assert(np.all(np.array(colormap[i * 256:(i + 1) * 256]) == np.arange(255, -1, -1) * 256))
+ assert(np.all(res[0, ::256] == np.array([1, 86, 170, 255])))
+ assert(np.all(res[256, :] == 0))
def test_write_rgb():
@@ -400,6 +421,150 @@ def test_write_rgb_with_a():
np.testing.assert_allclose(res[:, :, 3] == 0, np.isnan(arr[0, :, :]))
+def test_write_rgb_tb():
+ """Test saving a non-trasparent RGB with thumbnails."""
+ from pyninjotiff.ninjotiff import save
+ from pyninjotiff.tifffile import TiffFile
+
+ area = FakeArea({'ellps': 'WGS84', 'lat_0': 90.0, 'lat_ts': 60.0, 'lon_0': 0.0, 'proj': 'stere'},
+ (-1000000.0, -4500000.0, 2072000.0, -1428000.0),
+ 1024, 1024)
+
+ x_size, y_size = 1024, 1024
+ arr = np.zeros((3, y_size, x_size))
+ radius = min(x_size, y_size) / 2.0
+ centre = x_size / 2, y_size / 2
+
+ for x in range(x_size):
+ for y in range(y_size):
+ rx = x - centre[0]
+ ry = y - centre[1]
+ s = ((x - centre[0])**2.0 + (y - centre[1])**2.0)**0.5 / radius
+ if s <= 1.0:
+ h = ((np.arctan2(ry, rx) / np.pi) + 1.0) / 2.0
+ rgb = colorsys.hsv_to_rgb(h, s, 1.0)
+ arr[:, y, x] = np.array(rgb)
+
+ attrs = dict([('platform_name', 'NOAA-18'),
+ ('resolution', 1050),
+ ('polarization', None),
+ ('level', None),
+ ('sensor', 'avhrr-3'),
+ ('ancillary_variables', []),
+ ('area', area),
+ ('start_time', TIME - datetime.timedelta(minutes=45)),
+ ('end_time', TIME - datetime.timedelta(minutes=40)),
+ ('wavelength', None),
+ ('optional_datasets', []),
+ ('standard_name', 'overview'),
+ ('name', 'overview'),
+ ('prerequisites', [0.6, 0.8, 10.8]),
+ ('optional_prerequisites', []),
+ ('calibration', None),
+ ('modifiers', None),
+ ('mode', 'RGB'),
+ ('enhancement_history', [{'scale': np.array([1, 1, -1]), 'offset': np.array([0, 0, 1])},
+ {'scale': np.array([0.0266347, 0.03559078, 0.01329783]),
+ 'offset': np.array([-0.02524969, -0.01996642, 3.8918446])},
+ {'gamma': 1.6}])])
+
+ kwargs = {'compute': True, 'fill_value': None, 'sat_id': 6300014,
+ 'chan_id': 6500015, 'data_cat': 'PPRN', 'data_source': 'SMHI', 'nbits': 8,
+ 'tile_length': 256, 'tile_width': 256}
+ data = da.from_array(arr.clip(0, 1), chunks=1024)
+ data = xr.DataArray(data, coords={'bands': ['R', 'G', 'B']}, dims=[
+ 'bands', 'y', 'x'], attrs=attrs)
+
+ from trollimage.xrimage import XRImage
+ img = XRImage(data)
+
+ with tempfile.NamedTemporaryFile(delete=DELETE_FILES) as tmpfile:
+ filename = tmpfile.name
+ if not DELETE_FILES:
+ print(filename)
+ save(img, filename, data_is_scaled_01=False, **kwargs)
+ tif = TiffFile(filename)
+ res = tif[0].asarray()
+ assert(tif.pages[0].tags['tile_length'].value == 256)
+ assert(tif.pages[1].tags['tile_length'].value == 128)
+ assert(tif.pages[0].tags['tile_width'].value == 256)
+ assert(tif.pages[1].tags['tile_width'].value == 128)
+ assert(len(tif.pages) == 2)
+ assert(tif.pages[0].shape == (1024, 1024, 4))
+ assert(tif.pages[1].shape == (512, 512, 4))
+ for idx in range(3):
+ np.testing.assert_allclose(res[:, :, idx], np.round(
+ arr[idx, :, :] * 255).astype(np.uint8))
+
+ tags = {'new_subfile_type': 0,
+ 'image_width': 1024,
+ 'image_length': 1024,
+ 'bits_per_sample': (8, 8, 8, 8),
+ 'compression': 32946,
+ 'photometric': 2,
+ 'orientation': 1,
+ 'samples_per_pixel': 4,
+ 'planar_configuration': 1,
+ 'software': b'tifffile/pytroll',
+ 'datetime': b'2020:01:17 14:17:23',
+ 'tile_width': 256,
+ 'tile_length': 256,
+ 'tile_offsets': (951, 24414, 77352, 126135, 141546, 206260, 272951, 318709, 349650, 413166, 475735,
+ 519168, 547960, 570326, 615924, 666705),
+ 'tile_byte_counts': (23463, 52938, 48783, 15411, 64714, 66691, 45758, 30941, 63516, 62569, 43433, 28792,
+ 22366, 45598, 50781, 13371),
+ 'extra_samples': 2,
+ 'sample_format': (1, 1, 1, 1),
+ 'model_pixel_scale': (0.026949458523585643, 0.027040118922685666, 0.0),
+ 'model_tie_point': (0.0, 0.0, 0.0, -35.00279008179894, 73.3850622630575, 0.0),
+ '40000': b'NINJO',
+ '40001': 6300014,
+ '40002': 1579264321,
+ '40003': 1579267043,
+ '40004': 6500015,
+ '40005': 2,
+ '40006': b'/tmp/tmpb4kn93qt',
+ '40007': b'PPRN',
+ '40008': b'',
+ '40009': 24,
+ '40010': b'SMHI',
+ '40011': 1,
+ '40012': 1024,
+ '40013': 1,
+ '40014': 1024,
+ '40015': b'NPOL',
+ '40016': -35.00278854370117,
+ '40017': 24.72344398498535,
+ '40018': 6378137.0,
+ '40019': 6356752.5,
+ '40021': 60.0,
+ '40022': 0.0,
+ '40023': 0.0,
+ '40024': b'None',
+ '40025': b'None',
+ '40026': 0,
+ '40027': 255,
+ '40028': 1.0,
+ '40029': 0.0,
+ '40040': 0,
+ '40041': 0,
+ '40042': 1,
+ '40043': 0,
+ '50000': 0,
+ 'fill_order': 1,
+ 'rows_per_strip': 4294967295,
+ 'resolution_unit': 2,
+ 'predictor': 1,
+ 'ycbcr_subsampling': 1,
+ 'ycbcr_positioning': 1}
+ read_tags = tif.pages[0].tags
+ assert(read_tags.keys() == tags.keys())
+ for key, val in tags.items():
+ if key in ['datetime', '40002', '40003', '40006']:
+ continue
+ assert(val == read_tags[key].value)
+
+
@pytest.mark.skip(reason="this is no implemented yet.")
def test_write_rgb_classified():
"""Test saving a transparent RGB."""
@@ -416,8 +581,8 @@ def test_write_rgb_classified():
attrs = dict([('platform_name', 'NOAA-18'),
('resolution', 1050),
('polarization', None),
- ('start_time', TIME - datetime.timedelta(minutes=55)),
- ('end_time', TIME - datetime.timedelta(minutes=50)),
+ ('start_time', TIME - datetime.timedelta(minutes=65)),
+ ('end_time', TIME - datetime.timedelta(minutes=60)),
('level', None),
('sensor', 'avhrr-3'),
('ancillary_variables', []),
@@ -454,3 +619,181 @@ def test_write_rgb_classified():
np.testing.assert_allclose(res[:, :, idx], np.round(
np.nan_to_num(arr[idx, :, :]) * 255).astype(np.uint8))
np.testing.assert_allclose(res[:, :, 3] == 0, np.isnan(arr[0, :, :]))
+
+
+def test_write_bw_colormap():
+ """Test saving a BW image with a colormap.
+
+ Albedo with a colormap.
+
+ Reflectances are 0, 29.76, 60, 90.24, 120.
+ """
+ from pyninjotiff.ninjotiff import save
+ from pyninjotiff.tifffile import TiffFile
+
+ area = FakeArea({'ellps': 'WGS84', 'lat_0': 90.0, 'lat_ts': 60.0, 'lon_0': 0.0, 'proj': 'stere'},
+ (-1000000.0, -4500000.0, 2072000.0, -1428000.0),
+ 1024, 1024)
+ scale = 1.0 / 120
+ offset = 0.0
+ attrs = dict([('resolution', 1050),
+ ('polarization', None),
+ ('platform_name', 'NOAA-18'),
+ ('sensor', 'avhrr-3'),
+ ('units', '%'),
+ ('name', '1'),
+ ('level', None),
+ ('modifiers', ()),
+ ('wavelength', (0.5, 0.6, 0.7)),
+ ('calibration', 'reflectance'),
+ ('start_time', TIME - datetime.timedelta(minutes=75)),
+ ('end_time', TIME - datetime.timedelta(minutes=70)),
+ ('area', area),
+ ('ancillary_variables', []),
+ ('enhancement_history', [{'offset': offset, 'scale': scale}])])
+
+ cm_vis = [0, 4095, 5887, 7167, 8191, 9215, 9983, 10751, 11519, 12287, 12799,
+ 13567, 14079, 14847, 15359, 15871, 16383, 16895, 17407, 17919, 18175,
+ 18687, 19199, 19711, 19967, 20479, 20735, 21247, 21503, 22015, 22271,
+ 22783, 23039, 23551, 23807, 24063, 24575, 24831, 25087, 25599, 25855,
+ 26111, 26367, 26879, 27135, 27391, 27647, 27903, 28415, 28671, 28927,
+ 29183, 29439, 29695, 29951, 30207, 30463, 30975, 31231, 31487, 31743,
+ 31999, 32255, 32511, 32767, 33023, 33279, 33535, 33791, 34047, 34303,
+ 34559, 34559, 34815, 35071, 35327, 35583, 35839, 36095, 36351, 36607,
+ 36863, 37119, 37119, 37375, 37631, 37887, 38143, 38399, 38655, 38655,
+ 38911, 39167, 39423, 39679, 39935, 39935, 40191, 40447, 40703, 40959,
+ 40959, 41215, 41471, 41727, 41983, 41983, 42239, 42495, 42751, 42751,
+ 43007, 43263, 43519, 43519, 43775, 44031, 44287, 44287, 44543, 44799,
+ 45055, 45055, 45311, 45567, 45823, 45823, 46079, 46335, 46335, 46591,
+ 46847, 46847, 47103, 47359, 47615, 47615, 47871, 48127, 48127, 48383,
+ 48639, 48639, 48895, 49151, 49151, 49407, 49663, 49663, 49919, 50175,
+ 50175, 50431, 50687, 50687, 50943, 50943, 51199, 51455, 51455, 51711,
+ 51967, 51967, 52223, 52223, 52479, 52735, 52735, 52991, 53247, 53247,
+ 53503, 53503, 53759, 54015, 54015, 54271, 54271, 54527, 54783, 54783,
+ 55039, 55039, 55295, 55551, 55551, 55807, 55807, 56063, 56319, 56319,
+ 56575, 56575, 56831, 56831, 57087, 57343, 57343, 57599, 57599, 57855,
+ 57855, 58111, 58367, 58367, 58623, 58623, 58879, 58879, 59135, 59135,
+ 59391, 59647, 59647, 59903, 59903, 60159, 60159, 60415, 60415, 60671,
+ 60671, 60927, 60927, 61183, 61439, 61439, 61695, 61695, 61951, 61951,
+ 62207, 62207, 62463, 62463, 62719, 62719, 62975, 62975, 63231, 63231,
+ 63487, 63487, 63743, 63743, 63999, 63999, 64255, 64255, 64511, 64511,
+ 64767, 64767, 65023, 65023, 65279]
+
+ kwargs = {'ch_min_measurement_unit': np.array([0]),
+ 'ch_max_measurement_unit': np.array([120]),
+ 'compute': True, 'fill_value': None, 'sat_id': 6300014,
+ 'chan_id': 100015, 'data_cat': 'PORN', 'data_source': 'SMHI',
+ 'physic_unit': '%', 'nbits': 8, 'cmap': [cm_vis] * 3}
+
+ data = da.tile(da.repeat(da.arange(5, chunks=1024) / 4.0, 205)[:-1],
+ 1024).reshape((1, 1024, 1024))[:, :1024]
+ data = xr.DataArray(data, coords={'bands': ['L']}, dims=[
+ 'bands', 'y', 'x'], attrs=attrs)
+ img = FakeImage(data)
+ with tempfile.NamedTemporaryFile(delete=DELETE_FILES) as tmpfile:
+ filename = tmpfile.name
+ if not DELETE_FILES:
+ print(filename)
+ save(img, filename, data_is_scaled_01=True, **kwargs)
+ tif = TiffFile(filename)
+ page = tif[0]
+ res = page.asarray(colormapped=False).squeeze()
+ colormap = page.tags['color_map'].value
+
+ assert(len(colormap) == 768)
+ assert(np.allclose(colormap[:256], cm_vis))
+ assert(np.allclose(colormap[256:512], cm_vis))
+ assert(np.allclose(colormap[512:], cm_vis))
+ assert(np.allclose(res[0, ::205], np.array([1, 64, 128, 192, 255])))
+
+
+def test_write_ir_colormap():
+ """Test saving a IR image with a colormap.
+
+ IR with a colormap.
+
+ Temperatures are -70, -40.24, -10, 20.24, 50.
+ """
+ from pyninjotiff.ninjotiff import save
+ from pyninjotiff.tifffile import TiffFile
+
+ area = FakeArea({'ellps': 'WGS84', 'lat_0': 90.0, 'lat_ts': 60.0, 'lon_0': 0.0, 'proj': 'stere'},
+ (-1000000.0, -4500000.0, 2072000.0, -1428000.0),
+ 1024, 1024)
+ scale = 1.0 / 120
+ offset = 70.0 / 120
+ attrs = dict([('resolution', 1050),
+ ('polarization', None),
+ ('platform_name', 'NOAA-18'),
+ ('sensor', 'avhrr-3'),
+ ('units', 'K'),
+ ('name', '4'),
+ ('level', None),
+ ('modifiers', ()),
+ ('wavelength', (10.3, 10.8, 11.3)),
+ ('calibration', 'brightness_temperature'),
+ ('start_time', TIME - datetime.timedelta(minutes=85)),
+ ('end_time', TIME - datetime.timedelta(minutes=80)),
+ ('area', area),
+ ('ancillary_variables', []),
+ ('enhancement_history', [{'offset': offset, 'scale': scale}])])
+
+ ir_map = [255, 1535, 2559, 3327, 4095, 4863, 5375, 5887, 6399,
+ 6911, 7423, 7935, 8447, 8959, 9471, 9983, 10239, 10751,
+ 11263, 11519, 12031, 12287, 12799, 13055, 13567, 13823,
+ 14335, 14591, 14847, 15359, 15615, 16127, 16383, 16639,
+ 17151, 17407, 17663, 17919, 18431, 18687, 18943, 19199,
+ 19711, 19967, 20223, 20479, 20735, 21247, 21503, 21759,
+ 22015, 22271, 22527, 22783, 23295, 23551, 23807, 24063,
+ 24319, 24575, 24831, 25087, 25343, 25599, 25855, 26367,
+ 26623, 26879, 27135, 27391, 27647, 27903, 28159, 28415,
+ 28671, 28927, 29183, 29439, 29695, 29951, 30207, 30463,
+ 30719, 30975, 31231, 31487, 31743, 31999, 31999, 32255,
+ 32511, 32767, 33023, 33279, 33535, 33791, 34047, 34303,
+ 34559, 34815, 35071, 35327, 35327, 35583, 35839, 36095,
+ 36351, 36607, 36863, 37119, 37375, 37375, 37631, 37887,
+ 38143, 38399, 38655, 38911, 39167, 39167, 39423, 39679,
+ 39935, 40191, 40447, 40703, 40703, 40959, 41215, 41471,
+ 41727, 41983, 41983, 42239, 42495, 42751, 43007, 43263,
+ 43263, 43519, 43775, 44031, 44287, 44287, 44543, 44799,
+ 45055, 45311, 45311, 45567, 45823, 46079, 46335, 46335,
+ 46591, 46847, 47103, 47359, 47359, 47615, 47871, 48127,
+ 48127, 48383, 48639, 48895, 49151, 49151, 49407, 49663,
+ 49919, 49919, 50175, 50431, 50687, 50687, 50943, 51199,
+ 51455, 51455, 51711, 51967, 52223, 52223, 52479, 52735,
+ 52991, 52991, 53247, 53503, 53759, 53759, 54015, 54271,
+ 54527, 54527, 54783, 55039, 55039, 55295, 55551, 55807,
+ 55807, 56063, 56319, 56319, 56575, 56831, 57087, 57087,
+ 57343, 57599, 57599, 57855, 58111, 58367, 58367, 58623,
+ 58879, 58879, 59135, 59391, 59391, 59647, 59903, 60159,
+ 60159, 60415, 60671, 60671, 60927, 61183, 61183, 61439,
+ 61695, 61695, 61951, 62207, 62463, 62463, 62719, 62975,
+ 62975, 63231, 63487, 63487, 63743, 63999, 63999, 64255,
+ 64511, 64511, 64767, 65023, 65023, 65279]
+
+ kwargs = {'ch_min_measurement_unit': np.array([-70]),
+ 'ch_max_measurement_unit': np.array([50]),
+ 'compute': True, 'fill_value': None, 'sat_id': 6300014,
+ 'chan_id': 900015, 'data_cat': 'PORN', 'data_source': 'SMHI',
+ 'physic_unit': 'C', 'nbits': 8, 'cmap': [ir_map] * 3}
+
+ data = da.tile(da.repeat(da.arange(5, chunks=1024) / 4.0, 205)[:-1],
+ 1024).reshape((1, 1024, 1024))[:, :1024]
+ data = xr.DataArray(data, coords={'bands': ['L']}, dims=[
+ 'bands', 'y', 'x'], attrs=attrs)
+ img = FakeImage(data)
+ with tempfile.NamedTemporaryFile(delete=DELETE_FILES) as tmpfile:
+ filename = tmpfile.name
+ if not DELETE_FILES:
+ print(filename)
+ save(img, filename, data_is_scaled_01=True, **kwargs)
+ tif = TiffFile(filename)
+ page = tif[0]
+ res = page.asarray(colormapped=False).squeeze()
+ colormap = page.tags['color_map'].value
+
+ assert(len(colormap) == 768)
+ assert(np.allclose(colormap[:256], ir_map))
+ assert(np.allclose(colormap[256:512], ir_map))
+ assert(np.allclose(colormap[512:], ir_map))
+ assert(np.allclose(res[0, ::205], np.array([1, 64, 128, 192, 255])))
=====================================
pyninjotiff/version.py
=====================================
@@ -22,4 +22,4 @@
"""Version file."""
-__version__ = "v0.2.0"
+__version__ = "v0.3.0"
=====================================
setup.py
=====================================
@@ -20,8 +20,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""Setup for pyninjotiff.
-"""
+"""Setup for pyninjotiff."""
import imp
@@ -44,6 +43,6 @@ setup(name="pyninjotiff",
url="https://github.com/pytroll/pyninjotiff",
packages=['pyninjotiff'],
zip_safe=False,
- install_requires=['numpy >=1.6', 'six', 'pyproj', 'pyresample'],
+ install_requires=['numpy >=1.6', 'six', 'pyproj', 'pyresample', 'dask[dataframe]', 'xarray'],
# test_suite='pyninjotiff.tests.suite',
)
View it on GitLab: https://salsa.debian.org/debian-gis-team/pyninjotiff/-/compare/8361471afc69771172bda37b32c42eea29250854...e9016607c0bbd990bd7b77ee38e1bc15e707db2c
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/pyninjotiff/-/compare/8361471afc69771172bda37b32c42eea29250854...e9016607c0bbd990bd7b77ee38e1bc15e707db2c
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20200229/44b475bc/attachment-0001.html>
More information about the Pkg-grass-devel
mailing list