[mipp] 01/02: Imported Upstream version 0.9.1
Antonio Valentino
a_valentino-guest at moszumanska.debian.org
Sun Jun 29 15:37:43 UTC 2014
This is an automated email from the git hooks/post-receive script.
a_valentino-guest pushed a commit to branch master
in repository mipp.
commit 173051726818179c12037b46ca6b5468fbe73bd8
Author: Antonio Valentino <antonio.valentino at tiscali.it>
Date: Sat Jun 28 19:30:48 2014 +0000
Imported Upstream version 0.9.1
---
.gitignore | 12 +
LICENSE.txt | 165 ++++
MANIFEST.in | 3 +
README | 4 +
doc/Makefile | 89 +++
doc/source/_static/MSG2_20100217_1330_overview.png | Bin 0 -> 908063 bytes
doc/source/conf.py | 194 +++++
doc/source/index.rst | 22 +
doc/source/introduction.rst | 197 +++++
mipp/__init__.py | 66 ++
mipp/cfg.py | 119 +++
mipp/geotiff.py | 80 ++
mipp/log.py | 60 ++
mipp/mda.py | 84 ++
mipp/xrit/GOMS.py | 213 +++++
mipp/xrit/MSG.py | 870 +++++++++++++++++++++
mipp/xrit/MTP.py | 335 ++++++++
mipp/xrit/SGS.py | 124 +++
mipp/xrit/__init__.py | 12 +
mipp/xrit/_xrit.py | 383 +++++++++
mipp/xrit/bin_reader.py | 54 ++
mipp/xrit/convert.py | 72 ++
mipp/xrit/loader.py | 452 +++++++++++
mipp/xrit/mda.py | 13 +
mipp/xrit/sat.py | 247 ++++++
mipp/xsar/CSK.py | 141 ++++
mipp/xsar/RS2.py | 100 +++
mipp/xsar/TSX.py | 169 ++++
mipp/xsar/__init__.py | 3 +
mipp/xsar/mda.py | 14 +
mipp/xsar/sat.py | 173 ++++
scr/decompress_xrit | 58 ++
scr/decompress_xrit.cron | 7 +
scr/fsd_driver | 27 +
scr/gts_driver | 13 +
scr/list_xrit_headers | 24 +
scr/process_fsd | 173 ++++
scr/process_gts | 39 +
setup.py | 47 ++
testit | 7 +
tests/buildpath_to_syspath.py | 19 +
tests/data/.gitattributes | 2 +
tests/data/20110825_104705_TSX1_SAR_SC_HH.mda | 20 +
tests/data/GOES11_10_7_135W_20100201_0600.mda | 20 +
...SG2________-HRV______-000012___-201010111400-__ | Bin 0 -> 3235638 bytes
...SG2________-HRV______-000013___-201010111400-__ | Bin 0 -> 3235638 bytes
...SG2________-HRV______-000018___-201011091200-__ | Bin 0 -> 3235638 bytes
...SG2________-IR_108___-000004___-201010111400-__ | Bin 0 -> 2159158 bytes
...SG2________-IR_108___-000005___-201010111400-__ | Bin 0 -> 2159158 bytes
...SG2________-_________-EPI______-201010111400-__ | Bin 0 -> 380415 bytes
...SG2________-_________-EPI______-201011091200-__ | Bin 0 -> 380415 bytes
...SG2________-_________-PRO______-201010111400-__ | Bin 0 -> 425551 bytes
...SG2________-_________-PRO______-201011091200-__ | Bin 0 -> 425551 bytes
...OES11______-10_7_135W-000003___-201002010600-__ | Bin 0 -> 1639569 bytes
...OES11______-10_7_135W-000004___-201002010600-__ | Bin 0 -> 1639569 bytes
...OES11______-10_7_135W-PRO______-201002010600-__ | Bin 0 -> 16225 bytes
...TSAT1R_____-10_8_140E-000003___-200912210900-__ | Bin 0 -> 1286667 bytes
...TSAT1R_____-10_8_140E-000004___-200912210900-__ | Bin 0 -> 1286667 bytes
...TSAT1R_____-10_8_140E-PRO______-200912210900-__ | Bin 0 -> 145 bytes
...ET7________-00_7_057E-000005___-200912211200-__ | Bin 0 -> 2516163 bytes
...ET7________-00_7_057E-000006___-200912211200-__ | Bin 0 -> 2516163 bytes
...ET7________-00_7_057E-PRO______-200912211200-__ | Bin 0 -> 194434 bytes
tests/data/MET7_00_7_057E_20091221_1200.mda | 20 +
tests/data/MSG2_HRV_20101011_1400.mda | 19 +
tests/data/MSG2_HRV_20101109_1200.mda | 19 +
tests/data/MSG2_IR_108_20101011_1400.mda | 19 +
tests/data/MTSAT1R_10_8_140E_20091221_0900.mda | 20 +
...05_20110825T104727_NSG_023264_8133_test.TSX.tar | Bin 0 -> 2170880 bytes
tests/data/goes11.cfg | 49 ++
tests/data/met7.cfg | 44 ++
tests/data/msg2.cfg | 95 +++
tests/data/msg2.cfg.out | 28 +
tests/data/mtsat1r.cfg | 49 ++
tests/data/tx01.cfg | 19 +
tests/test_misc.py | 39 +
tests/test_xrit.py | 204 +++++
tests/test_xsar.py | 70 ++
77 files changed, 5620 insertions(+)
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..7b8dc37
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,12 @@
+*~
+*.[oa]
+*.so
+*.pyc
+build
+dist
+debian
+etc
+tmp
+tests/jojo.py
+setup.cfg
+mipp.egg-info
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..65c5ca8
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,165 @@
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..2dc99e8
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,3 @@
+include LICENSE.txt
+include MANIFEST.in
+include mipp/xrit/convert/wrap_convert.h
diff --git a/README b/README
new file mode 100644
index 0000000..7816076
--- /dev/null
+++ b/README
@@ -0,0 +1,4 @@
+This is a Meteorological Ingest-Processing Package (mipp).
+
+It's main task is to convert satellite level-1.5 data into a
+format understood by mpop (http://github.com/mraspaud/mpop).
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000..3fe8728
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,89 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/mipp.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/mipp.qhc"
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
+ "run these through (pdf)latex."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/doc/source/_static/MSG2_20100217_1330_overview.png b/doc/source/_static/MSG2_20100217_1330_overview.png
new file mode 100644
index 0000000..6406c09
Binary files /dev/null and b/doc/source/_static/MSG2_20100217_1330_overview.png differ
diff --git a/doc/source/conf.py b/doc/source/conf.py
new file mode 100644
index 0000000..ac29119
--- /dev/null
+++ b/doc/source/conf.py
@@ -0,0 +1,194 @@
+# -*- coding: utf-8 -*-
+#
+# mipp documentation build configuration file, created by
+# sphinx-quickstart on Fri Feb 26 16:06:36 2010.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.append(os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'mipp'
+copyright = u'2010, Lars Orum Rasmussen'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '0.2'
+# The full version, including alpha/beta/rc tags.
+release = '0.2'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directory, that shouldn't be searched
+# for source files.
+exclude_trees = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. Major themes that come with
+# Sphinx are currently 'default' and 'sphinxdoc'.
+html_theme = 'sphinxdoc'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'mippdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+latex_paper_size = 'a4'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'mipp.tex', u'MIPP and SMHI/DMI Common Processing Environment',
+ u'Lars Orum Rasmussen', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/doc/source/index.rst b/doc/source/index.rst
new file mode 100644
index 0000000..db219a6
--- /dev/null
+++ b/doc/source/index.rst
@@ -0,0 +1,22 @@
+.. mipp documentation master file, created by
+ sphinx-quickstart on Fri Feb 26 16:06:36 2010.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+MIPP
+====
+
+This is a presentation of:
+
+.. toctree::
+ :maxdepth: 1
+
+ introduction
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/doc/source/introduction.rst b/doc/source/introduction.rst
new file mode 100644
index 0000000..afa1b96
--- /dev/null
+++ b/doc/source/introduction.rst
@@ -0,0 +1,197 @@
+============================
+ python-mipp an introduction
+============================
+
+``mipp`` is a Meteorological Ingest-Processing Package (http://github.com/loerum/mipp).
+
+ It's a Python libray and it's main task is to convert satellite level-1.5 data into a
+ format understood by ``mpop`` (http://github.com/mraspaud/mpop).
+
+ A more sophisticated interface to satellite data objects is supported by ``mpop``.
+
+In the start, it will handle **MET7**, **GEOS11**, **GOES12** and **MTSAT1R**,
+"eumetcasted" FSD data::
+
+ L-000-MTP___-MET7________-00_7_057E-PRO______-201002261600-__
+ L-000-MTP___-MET7________-00_7_057E-000001___-201002261600-C_
+ L-000-MTP___-MET7________-00_7_057E-000002___-201002261600-C_
+ L-000-MTP___-MET7________-00_7_057E-000003___-201002261600-C_
+ ...
+ ...
+ L-000-MSG2__-GOES11______-00_7_135W-PRO______-201002261600-__
+ L-000-MSG2__-GOES11______-00_7_135W-000001___-201002261600-C_
+ L-000-MSG2__-GOES11______-00_7_135W-000002___-201002261600-C_
+ L-000-MSG2__-GOES11______-00_7_135W-000003___-201002261600-C_
+ ...
+ ...
+
+
+``mipp`` will:
+ * decompress XRIT files (if Eumetsat's ``xRITDecompress`` is available).
+ * decode/strip-off (according to [CGMS]_, [MTP]_, [SGS]_) XRIT headers and collect meta-data.
+ * catenate image data into a numpy-array.
+
+ * if needed, convert 10 bit data to 16 bit
+ * if a region is defined (by a slice or center, size), only read what is specified.
+
+.. note::
+
+ * MET7: not calibrated.
+ * GOES, METSAT: calibration constants to Kelvin or Radiance (not Reflectance).
+
+
+
+Code Layout
+-----------
+
+.. describe:: xrit.py
+
+ It knows about the genric HRIT/XRIT format
+
+ * ``headers = read_headers(file_handle)``
+
+.. describe:: MTP.py
+
+ It knows about the specific format OpenMTP for MET7
+
+ * ``mda = read_metadata(prologue, image_file)``
+
+.. describe:: SGS.py
+
+ It knows about the specific format Support Ground Segments for GOES and MTSAT
+
+ * ``mda = read_metadata(prologue, image_files)``
+
+.. describe:: sat.py
+
+ It knows about satellites base on configurations files.
+ It returns a slice-able object (see below).
+
+ * ``image = load('met7', time_stamp, channel, mask=False, calibrated=True)``
+ * ``image = load_files(prologue, image_files, **kwarg)``
+
+.. describe:: slicer.py
+
+ It knows how to slice satellite images (return from ``load(...)``).
+ It returns meta-data and a numpy array.
+
+ * ``mda, image_data = image[1300:1800,220:520]``
+ * ``mda, image_data = image(center, size)``
+
+**Utilities**
+
+.. describe:: cfg.py
+
+ It knows how to read configuration files, describing satellites (see below).
+
+.. describe:: convert.py
+
+ 10 to 16 byte converter (uses a C extension)
+
+.. describe:: bin_reader.py
+
+ It reads binary data (network byte order)
+
+ * ``read_uint1(buf)``
+ * ``read_uint2(buf)``
+ * ``read_float4(buf)``
+ * ...
+
+.. describe:: mda.py
+
+ A simple (anonymous) metadata reader and writer
+
+.. describe:: geosnav.py
+
+ It will convert from/to pixel coordinates to/from geographical longitude, latitude coordinates.
+
+Example definition of a satellite
+---------------------------------
+.. code-block:: ini
+
+ # An item like:
+ # name = value
+ # is read in python like:
+ # try:
+ # name = eval(value)
+ # except:
+ # name = str(value)
+ #
+
+ [satellite]
+ satname = 'meteosat'
+ number = '07'
+ instruments = ('mviri',)
+ projection = 'geos(57.0)'
+
+ [mviri-level2]
+ format = 'mipp'
+
+ [mviri-level1]
+ format = 'xrit/MTP'
+ dir = '/data/eumetcast/in'
+ filename = 'L-000-MTP___-MET7________-%(channel)s_057E-%(segment)s-%Y%m%d%H%M-__'
+
+ [mviri-1]
+ name = '00_7'
+ frequency = (0.5, 0.7, 0.9)
+ resolution = 2248.49
+ size = (5000, 5000)
+
+ [mviri-2]
+ name = '06_4'
+ frequency = (5.7, 6.4, 7.1)
+ resolution = 4496.98
+ size = (2500, 2500)
+
+ [mviri-3]
+ name = '11_5'
+ frequency = (10.5, 11.5, 12.5)
+ resolution = 4496.98
+ size = (2500, 2500)
+
+
+Usage
+-----
+.. code-block:: python
+
+ import xrit
+
+ image = xrit.sat.load('meteosat07', datetime(2010, 2, 1, 10, 0), '00_7', mask=True)
+ mda, image_data = image(center=(50., 10.), size=(600, 500))
+ print mda
+ fname = './' + mda.product_name + '.dat'
+ print >>sys.stderr, 'Writing', fname
+ fp = open(fname, "wb")
+ image_data.tofile(fp)
+ fp.close()
+
+A script, process_fsd
+---------------------
+.. code-block:: text
+
+ process_fsd --check-satellite <prologue-file>
+ check if we handle this satellite
+
+ process_fsd --check [-l] <prologue-file>
+ check if number of image segments are as planned
+ -l, list corresponding image segment files
+
+ process_fsd --decompress [-o<output-dir>] <file> ... <file>
+ decompress files to output-dir (default is working directory)
+ -l, list decompressed files
+
+ process_fsd --metadata <prologue-file> <image-segment> ... <image-segment>
+ print meta-data
+
+ process_fsd [-o<output-dir>] <prologue-file> <image-segment> ... <image-segment>
+ it will binary dump image-data and ascii dump of meta-data)
+
+
+==============================
+
+ .. [CGMS] LRIT/HRIT Global Specification; CGMS 03; Issue 2.6; 12 August 1999
+ "MSG Ground Segment LRIT/HRIT Mission Specific Implementation"
+ EUM/MSG/SPE/057; Issue 6; 21 June 2006
+ .. [MTP] "The Meteosat Archive; Format Guide No. 1; Basic Imagery: OpenMTP Format"; EUM FG 1; Rev 2.1; April 2000
+ .. [SGS] "MSG Ground Segment LRIT/HRIT Mission Specific Implementation"; EUM/MSG/SPE/057; Issue 6; 21 June 2006
diff --git a/mipp/__init__.py b/mipp/__init__.py
new file mode 100644
index 0000000..c0fa216
--- /dev/null
+++ b/mipp/__init__.py
@@ -0,0 +1,66 @@
+#
+#
+import sys
+from datetime import datetime
+
+if sys.version_info < (2, 5):
+ import time
+ def strptime(string, fmt=None):
+ """This function is available in the datetime module only
+ from Python >= 2.5.
+ """
+ return datetime(*time.strptime(string, fmt)[:6])
+else:
+ strptime = datetime.strptime
+
+#-----------------------------------------------------------------------------
+#
+# All exception for the mipp module
+#
+#-----------------------------------------------------------------------------
+class MippError(Exception):
+ pass
+
+#-----------------------------------------------------------------------------
+#
+# Decoding error
+#
+#-----------------------------------------------------------------------------
+class DecodeError(MippError):
+ pass
+class UnknownSatellite(MippError):
+ pass
+#-----------------------------------------------------------------------------
+#
+# Image readings error
+#
+#-----------------------------------------------------------------------------
+class ReaderError(MippError):
+ pass
+
+class NoFiles(ReaderError):
+ pass
+
+#-----------------------------------------------------------------------------
+#
+# Config file reader error
+#
+#-----------------------------------------------------------------------------
+class ConfigReaderError(MippError):
+ pass
+
+#-----------------------------------------------------------------------------
+#
+# Navigations error
+#
+#-----------------------------------------------------------------------------
+class NavigationError(MippError):
+ pass
+
+#-----------------------------------------------------------------------------
+#
+# Calibrations error
+#
+#-----------------------------------------------------------------------------
+class CalibrationError(MippError):
+ pass
diff --git a/mipp/cfg.py b/mipp/cfg.py
new file mode 100644
index 0000000..dd87b57
--- /dev/null
+++ b/mipp/cfg.py
@@ -0,0 +1,119 @@
+#
+#
+#
+import os
+import re
+from ConfigParser import ConfigParser
+
+import mipp
+
+__all__ = ['read_config']
+
+def read_config(satname, instrument=''):
+ return _ConfigReader(satname, instrument)
+
+class _ConfigReader(object):
+
+ def __init__(self, satname, instrument=''):
+ try:
+ home = os.environ['PPP_CONFIG_DIR']
+ except KeyError:
+ raise mipp.ConfigReaderError(
+ "PPP_CONFIG_DIR environment variable is not set")
+
+ self.config_file = home + '/' + satname + '.cfg'
+ if not os.path.isfile(self.config_file):
+ raise mipp.ConfigReaderError(
+ "unknown satellite: '%s' (no such file: '%s')"%
+ (satname, self.config_file))
+ self._config = ConfigParser()
+ self._config.read(self.config_file)
+
+ instruments = self.get('satellite')['instruments']
+ if not instrument:
+ if len(instruments) == 1:
+ instrument = instruments[0]
+ else:
+ raise mipp.ConfigReaderError("please specify instrument")
+ else:
+ if instrument not in instruments:
+ raise mipp.ConfigReaderError("unknown instrument: '%s'"%
+ instrument)
+ self.instrument = instrument
+
+ self._channels = self._channels2dict(instrument)
+
+ def __call__(self, section):
+ return self.get(section)
+
+ def get(self, section):
+ options = {}
+ section = str(section) # allow get(1)
+ if section != 'satellite' and not section.startswith(self.instrument):
+ section = self.instrument + '-' + section
+ for key, val in self._config.items(section, raw=True):
+ options[key] = _eval(val)
+ return options
+
+ def get_channel(self, name):
+ try:
+ return self._channels[name]
+ except KeyError:
+ raise mipp.ConfigReaderError("unknown channel: '%s'"%name)
+
+ @property
+ def channels(self):
+ return self._channels
+
+ @property
+ def channel_names(self):
+ return sorted(self._channels.keys())
+
+ def _channels2dict(self, instrument):
+ rec = re.compile('^%s-\d+$'%instrument)
+ channels = {}
+ for sec in self._config.sections():
+ if rec.findall(sec):
+ chn = _Channel(self._config.items(sec, raw=True), raw=True)
+ channels[chn.name] = chn
+ return channels
+
+class _Channel:
+ def __init__(self, kvs, raw=False):
+ self.name = None
+ for key, val in kvs:
+ if raw:
+ val = _eval(val)
+ setattr(self, key, val)
+ def __str__(self):
+ keys = sorted(self.__dict__.keys())
+ text = ''
+ for key in keys:
+ if key[0] == '_':
+ continue
+ val = getattr(self, key)
+ if key == 'resolution':
+ val = "%.2f" % val
+ elif key == 'frequency':
+ val = "(%.2f, %.2f, %.2f)" % val
+ text += key + ': ' + str(val) + ', '
+ return text[:-2]
+
+def _eval(val):
+ try:
+ return eval(val)
+ except:
+ return str(val)
+
+if __name__ == '__main__':
+ import sys
+ dname, fname = os.path.split(sys.argv[1])
+ os.environ['PPP_CONFIG_DIR'] = dname
+ cfg = read_config(os.path.splitext(fname)[0])
+ for _name in ('satellite', 'level1', 'level2'):
+ _sec = cfg(_name)
+ print _name
+ for _key in sorted(_sec.keys()):
+ print ' ', _key + ':', _sec[_key]
+ for _name in cfg.channel_names:
+ print cfg.get_channel(_name)
diff --git a/mipp/geotiff.py b/mipp/geotiff.py
new file mode 100644
index 0000000..0732455
--- /dev/null
+++ b/mipp/geotiff.py
@@ -0,0 +1,80 @@
+from osgeo import gdal, osr
+
+import logging
+logger = logging.getLogger('mipp')
+
+def tiff2areadef(projection, geotransform, shape):
+ # Rewamp projection
+ import pyresample
+
+ srs = osr.SpatialReference()
+ srs.ImportFromWkt(projection)
+ proj4 = srs.ExportToProj4()
+ proj4_dict = {}
+ for i in proj4.replace('+', '').split():
+ try:
+ key, val = [v.strip() for v in i.split('=')]
+ except ValueError:
+ continue
+ proj4_dict[key] = val
+
+ area_extent = [geotransform[0],
+ geotransform[3] + geotransform[5]*shape[0],
+ geotransform[0] + geotransform[1]*shape[1],
+ geotransform[3]]
+ aid = proj4_dict['proj']
+ if aid.lower() == 'utm':
+ aid += proj4_dict['zone']
+ # give it some kind of ID
+ aname = aid + '_' + str(int(sum(geotransform)/1000.))
+
+ return pyresample.utils.get_area_def(aname, aname, aid,
+ proj4_dict,
+ shape[1], shape[0],
+ area_extent)
+
+def read_geotiff(filename):
+ dst = gdal.Open(filename)
+
+ #
+ # Dataset information
+ #
+ geotransform = dst.GetGeoTransform()
+ projection = dst.GetProjection()
+ metadata = dst.GetMetadata()
+
+ logger.debug('description: %s'%dst.GetDescription())
+ logger.debug('driver: %s / %s'%(dst.GetDriver().ShortName,
+ dst.GetDriver().LongName))
+ logger.debug('size: %d x %d x %d'%(dst.RasterXSize, dst.RasterYSize,
+ dst.RasterCount))
+ logger.debug('geo transform: %s'%str(geotransform))
+ logger.debug('origin: %.3f, %.3f'%(geotransform[0], geotransform[3]))
+ logger.debug('pixel size: %.3f, %.3f'%(geotransform[1], geotransform[5]))
+ logger.debug('projection: %s'%projection)
+ logger.debug('metadata: %s', metadata)
+
+ #
+ # Fetching raster data
+ #
+ band = dst.GetRasterBand(1)
+ logger.info('Band(1) type: %s, size %d x %d'%(
+ gdal.GetDataTypeName(band.DataType),
+ dst.RasterXSize, dst.RasterYSize))
+ shape = (dst.RasterYSize, dst.RasterXSize)
+ if band.GetOverviewCount() > 0:
+ logger.debug('overview count: %d'%band.GetOverviewCount())
+ if not band.GetRasterColorTable() is None:
+ logger.debug('colortable size: %d'%
+ band.GetRasterColorTable().GetCount())
+
+ data = band.ReadAsArray(0, 0, shape[1], shape[0])
+ logger.info('fetched array: %s %s %s [%d -> %.2f -> %d]'%
+ (type(data), str(data.shape), data.dtype,
+ data.min(), data.mean(), data.max()))
+
+ params = dict((('geotransform', geotransform),
+ ('projection', projection),
+ ('metadata', metadata)))
+
+ return params, data
diff --git a/mipp/log.py b/mipp/log.py
new file mode 100644
index 0000000..334f31d
--- /dev/null
+++ b/mipp/log.py
@@ -0,0 +1,60 @@
+import os
+import logging as log
+
+class NullHandler(log.Handler):
+ """Empty handler.
+ """
+ def emit(self, record):
+ """Record a message.
+ """
+ pass
+
+def debug_on():
+ """Turn debugging logging on.
+ """
+ logging_on(log.DEBUG)
+
+_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+_is_logging_on = False
+def logging_on(level=None):
+ """Turn logging on.
+ """
+ global _is_logging_on
+
+ if level == None:
+ if os.environ.get("DEBUG", ''):
+ level = log.DEBUG
+ else:
+ level = log.INFO
+
+ if not _is_logging_on:
+ console = log.StreamHandler()
+ console.setFormatter(log.Formatter(_format, '%Y-%m-%d %H:%M:%S'))
+ console.setLevel(level)
+ log.getLogger('').addHandler(console)
+ _is_logging_on = True
+
+ logger = log.getLogger('')
+ logger.setLevel(level)
+ for handler in logger.handlers:
+ handler.setLevel(level)
+
+def logging_off():
+ """Turn logging off.
+ """
+ global _is_logging_on
+ logger = log.getLogger('')
+ for handler in logger.handlers:
+ handler.close()
+ logger.removeHandler(handler)
+ logger.handlers = [NullHandler()]
+ _is_logging_on = False
+
+def get_logger(name):
+ """Return logger with null handle
+ """
+
+ logger = log.getLogger(name)
+ if not logger.handlers:
+ logger.addHandler(NullHandler())
+ return logger
diff --git a/mipp/mda.py b/mipp/mda.py
new file mode 100644
index 0000000..c1c718f
--- /dev/null
+++ b/mipp/mda.py
@@ -0,0 +1,84 @@
+#
+# $Id$
+#
+from datetime import datetime
+import numpy
+
+def mslice(mda):
+ _mda = Metadata()
+ for key, val in mda.__dict__.items():
+ if (not key.startswith('_') and
+ not callable(val) and
+ key not in mda.ignore_attributes):
+ setattr(_mda, key, val)
+ return _mda
+
+class Metadata(object):
+ token = ':'
+ ignore_attributes = ()
+ dont_eval = ('satnumber',)
+
+
+ def read(self, file_name):
+ """Read until empty line, 'EOH' or 'EOF'.
+ """
+ fpi = open(file_name)
+ try:
+ for line in fpi:
+ line = line.strip()
+ if not line or line == 'EOH':
+ # end of meta-data
+ break
+ line = line.split('#')[0].strip()
+ if not line:
+ # just a comment
+ continue
+ key, val = [s.strip() for s in line.split(self.token, 1)]
+ if key not in self.dont_eval:
+ try:
+ val = eval(val)
+ except:
+ pass
+ if key:
+ setattr(self, key, val)
+ finally:
+ fpi.close()
+ return self
+
+ def save(self, file_name):
+ fpo = open(file_name, 'w')
+ fpo.write(str(self) + '\n')
+ fpo.close()
+
+ def __str__(self):
+ keys = sorted(self.__dict__.keys())
+ strn = ''
+ for key in keys:
+ val = getattr(self, key)
+ if (not key.startswith('_') and
+ not callable(val) and
+ key not in self.ignore_attributes):
+ val = _nice2cmp(val)
+ strn += key + self.token + ' ' + str(val) + '\n'
+ return strn[:-1]
+
+def _nice2cmp(val):
+ # ... and nice to print
+ if isinstance(val, numpy.ndarray):
+ val = val.tolist()
+ elif isinstance(val, datetime):
+ val = str(val)
+ elif isinstance(val, float):
+ val = str(val)
+ elif isinstance(val, dict):
+ sdc = {}
+ for _key, _val in val.items():
+ if isinstance(_val, numpy.ndarray):
+ _val = _val.tolist()
+ sdc[_key] = _val
+ val = sdc
+ return val
+
+if __name__ == '__main__':
+ import sys
+ print Metadata().read(sys.argv[1])
diff --git a/mipp/xrit/GOMS.py b/mipp/xrit/GOMS.py
new file mode 100644
index 0000000..ab5d026
--- /dev/null
+++ b/mipp/xrit/GOMS.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2012
+
+# Author(s):
+
+# Martin Raspaud <martin.raspaud at smhi.se>
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Read Electro L N1 HRIT files.
+"""
+# comments on the document:
+# - data is in little endian
+# - geometric processing's TagChGroup should not be there.
+# - can't read the satellite name
+# - explanation on types ?
+
+import logging
+
+logger = logging.getLogger(__name__)
+
+from mipp.xrit import bin_reader as rbin
+from mipp.xrit import Metadata
+from StringIO import StringIO
+from mipp.xrit import _xrit
+import numpy as np
+
+
+class _Calibrator(object):
+ def __init__(self, hdr, channel_name):
+ self.hdr = hdr
+ self.vis = channel_name.startswith("00_")
+ channels = ["00_6",
+ "00_7",
+ "00_9",
+ "03_8",
+ "06_4",
+ "08_0",
+ "08_7",
+ "09_7",
+ "10_7",
+ "11_9"]
+ chnb = channels.index(channel_name)
+ self.calibration_table = hdr["ImageCalibration"][chnb, :]
+
+ def __call__(self, image, calibrate=1):
+ if calibrate == 0:
+ return image
+ if calibrate == 1:
+ res = np.ma.masked_less_equal(self.calibration_table[image], 0)
+ if self.vis:
+ return (res, "%")
+ else:
+ return (res, "K")
+
+
+def read_proheader(fp):
+ hdr = {}
+
+ satstatus = [("TagType", "<u4"),
+ ("TagLength", "<u4"),
+ ("SatelliteID", "<u8"),
+ ("SatelliteName", "S256"),
+ ("NominalLongitude", "<f8"),
+ ("SatelliteCondition", "<u4"),
+ ("TimeOffset", "<f8")]
+
+ satstatus = np.dtype(satstatus)
+ hdr["SatelliteStatus"] = np.fromstring(fp.read(satstatus.itemsize),
+ dtype=satstatus,
+ count=1)[0]
+
+
+ imaq = [("TagType", "<u4"),
+ ("TagLength", "<u4"),
+ ("Status", "<u4"),
+ ("StartDelay", "<i4"),
+ ("Cel", "<f8")]
+
+ imaq = np.dtype(imaq)
+ hdr["ImageAcqusition"] = np.fromstring(fp.read(imaq.itemsize*10),
+ dtype=imaq,
+ count=10)
+
+
+ imcal = np.dtype("(10, 1024)<i4")
+
+ hdr["ImageCalibration"] = np.fromstring(fp.read(imcal.itemsize),
+ dtype=imcal,
+ count=1)[0] / 1000.0
+
+ return hdr
+
+def read_epiheader(fp):
+
+ hdr = {}
+
+ rproc = [("TagType", "<u4"),
+ ("TagLength", "<u4"),
+ ("RPSummary",
+ [("Impulse", "<u4"),
+ ("IsStrNoiseCorrection", "<u4"),
+ ("IsOptic", "<u4"),
+ ("IsBrightnessAligment", "<u4")]),
+ ("OpticCorrection",
+ [("Degree", "<i4"),
+ ("A", "<f8", (16, ))]),
+ ("RPQuality",
+ [("EffDinRange", "<f8"),
+ ("EathDarkening", "<f8"),
+ ("Zone", "<f8"),
+ ("Impulse", "<f8"),
+ ("Group", "<f8"),
+ ("DefectCount", "<u4"),
+ ("DefectProcent", "<f8"),
+ ("S_Noise_DT_Preflight", "<f8"),
+ ("S_Noise_DT_Bort", "<f8"),
+ ("S_Noise_DT_Video", "<f8"),
+ ("S_Noise_DT_1_5", "<f8"),
+ ("CalibrStability", "<f8"),
+ ("TemnSKO", "<f8", (2, )),
+ ("StructSKO", "<f8", (2, )),
+ ("Struct_1_5", "<f8"),
+ ("Zone_1_ 5", "<f8"),
+ ("RadDif", "<f8")])]
+
+ rproc = np.dtype(rproc)
+ hdr["RadiometricProcessing"] = np.fromstring(fp.read(rproc.itemsize*10),
+ dtype=rproc,
+ count=10)
+ gproc = [("TagType", "<u4"),
+ ("TagLength", "<u4"),
+ ("TGeomNormInfo",
+ [("IsExist", "<u4"),
+ ("IsNorm", "<u4"),
+ ("SubLon", "<f8"),
+ ("TypeProjection", "<u4"),
+ ("PixInfo", "<f8", (4, ))]),
+ ("SatInfo",
+ [("TISO",
+ [("T0", "<f8"),
+ ("dT", "<f8"),
+ ("ASb", "<f8"),
+ ("Evsk", "<f8", (3, 3, 4)),
+ ("ARx", "<f8", (4, )),
+ ("ARy", "<f8", (4, )),
+ ("ARz", "<f8", (4, )),
+ ("AVx", "<f8", (4, )),
+ ("AVy", "<f8", (4, )),
+ ("AVz", "<f8", (4, ))]),
+ ("Type", "<i4")]),
+ ("TimeProcessing", "<f8"),
+ ("ApriorAccuracy", "<f8"),
+ ("RelativeAccuracy", "<f8", (2, ))]
+
+ gproc = np.dtype(gproc)
+ hdr["GeometricProcessing"] = np.fromstring(fp.read(gproc.itemsize*10),
+ dtype=gproc,
+ count=10)
+
+
+
+ return hdr
+
+def read_metadata(prologue, image_files, epilogue):
+ """ Selected items from the Electro L N1 prolog file.
+ """
+
+ segment_size = 464 # number of lines in a segment
+
+ hdr = {}
+
+ fp = StringIO(prologue.data)
+ phdr = read_proheader(fp)
+ fp = StringIO(epilogue.data)
+ ehdr = read_epiheader(fp)
+
+ hdr.update(phdr)
+ hdr.update(ehdr)
+
+ im = _xrit.read_imagedata(image_files[0])
+
+ md = Metadata()
+
+ md.sublon = np.rad2deg(hdr["SatelliteStatus"]["NominalLongitude"])
+
+ md.image_size = (im.structure.nc, im.structure.nc)
+
+ md.channel = im.product_name[:4]
+ md.satname = im.platform.lower()
+ md.line_offset = 0
+ md.data_type = im.structure.nb
+ md.no_data_value = 0
+ md.first_pixel = "north west"
+ md.calibrate = _Calibrator(hdr, md.channel)
+
+ segment_size = im.structure.nl
+ md.loff = im.navigation.loff + segment_size * (im.segment.seg_no - 1)
+ md.coff = im.navigation.coff
+ return md
diff --git a/mipp/xrit/MSG.py b/mipp/xrit/MSG.py
new file mode 100644
index 0000000..195086f
--- /dev/null
+++ b/mipp/xrit/MSG.py
@@ -0,0 +1,870 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2010, 2011, 2012, 2013
+
+# Author(s):
+
+# Martin Raspaud <martin.raspaud at smhi.se>
+# Lars Ø. Rasmusen <ras at dmi.dk>
+# Esben S. Nielsen <esn at dmi.dk>
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""This module will read MSG level1.5 files, format documented in:
+'MSG Level 1.5 Image Data Format Description', EUM/MSG/ICD/105, v5A, 22 August 2007
+"""
+#raise NotImplementedError
+import logging
+logger = logging.getLogger('mipp')
+
+import sys
+import numpy as np
+from StringIO import StringIO
+
+from mipp import CalibrationError
+from mipp.xrit import _xrit
+from mipp.xrit import Metadata
+from mipp.xrit import bin_reader as rbin
+
+__all__ = ['read_metadata']
+
+eval_np = eval
+log = np.log
+no_data_value = 0
+
+if sys.version_info[0] >= 2 and sys.version_info[1] >= 5:
+ try:
+ #Use numexpr if available
+ import numexpr
+ eval_np = numexpr.evaluate
+ logger.info('Using numexpr for fast numpy evaluation')
+ except ImportError:
+ logger.warning('Module numexpr not found. Performance will be slower.')
+else:
+ logger.warning('Older version of python. Module numexpr not used. '
+ 'Performance will be slower.')
+
+
+#Reflectance factor for visible bands
+HRV_F = 25.15
+VIS006_F = 20.76
+VIS008_F = 23.30
+IR_016_F = 19.73
+
+## Calibration coefficients from
+##'A Planned Change to the MSG Level 1.5 Image Product Radiance Definition'
+## ,
+## "Conversion from radiances to reflectances for SEVIRI warm channels"
+## EUM/MET/TEN/12/0332
+## , and
+## "The Conversion from Effective Radiances to Equivalent Brightness
+## Temperatures"
+## EUM/MET/TEN/11/0569
+
+CALIB = {}
+
+
+# Meteosat 8
+
+CALIB[321] = {'HRV': {'F': 78.7599 / np.pi},
+ 'VIS006': {'F': 65.2296 / np.pi},
+ 'VIS008': {'F': 73.0127 / np.pi},
+ 'IR_016': {'F': 62.3715 / np.pi},
+ 'IR_039': {'VC': 2567.33,
+ 'ALPHA': 0.9956,
+ 'BETA': 3.41},
+ 'WV_062': {'VC': 1598.103,
+ 'ALPHA': 0.9962,
+ 'BETA': 2.218},
+ 'WV_073': {'VC': 1362.081,
+ 'ALPHA': 0.9991,
+ 'BETA': 0.478},
+ 'IR_087': {'VC': 1149.069,
+ 'ALPHA': 0.9996,
+ 'BETA': 0.179},
+ 'IR_097': {'VC': 1034.343,
+ 'ALPHA': 0.9999,
+ 'BETA': 0.06},
+ 'IR_108': {'VC': 930.647,
+ 'ALPHA': 0.9983,
+ 'BETA': 0.625},
+ 'IR_120': {'VC': 839.66,
+ 'ALPHA': 0.9988,
+ 'BETA': 0.397},
+ 'IR_134': {'VC': 752.387,
+ 'ALPHA': 0.9981,
+ 'BETA': 0.578}}
+
+# Meteosat 9
+
+CALIB[322] = {'HRV': {'F': 79.0113 / np.pi},
+ 'VIS006': {'F': 65.2065 / np.pi},
+ 'VIS008': {'F': 73.1869 / np.pi},
+ 'IR_016': {'F': 61.9923 / np.pi},
+ 'IR_039': {'VC': 2568.832,
+ 'ALPHA': 0.9954,
+ 'BETA': 3.438},
+ 'WV_062': {'VC': 1600.548,
+ 'ALPHA': 0.9963,
+ 'BETA': 2.185},
+ 'WV_073': {'VC': 1360.330,
+ 'ALPHA': 0.9991,
+ 'BETA': 0.47},
+ 'IR_087': {'VC': 1148.620,
+ 'ALPHA': 0.9996,
+ 'BETA': 0.179},
+ 'IR_097': {'VC': 1035.289,
+ 'ALPHA': 0.9999,
+ 'BETA': 0.056},
+ 'IR_108': {'VC': 931.7,
+ 'ALPHA': 0.9983,
+ 'BETA': 0.64},
+ 'IR_120': {'VC': 836.445,
+ 'ALPHA': 0.9988,
+ 'BETA': 0.408},
+ 'IR_134': {'VC': 751.792,
+ 'ALPHA': 0.9981,
+ 'BETA': 0.561}}
+
+# Meteosat 10
+
+CALIB[323] = {'HRV': {'F': 78.9416 / np.pi},
+ 'VIS006': {'F': 65.5148 / np.pi},
+ 'VIS008': {'F': 73.1807 / np.pi},
+ 'IR_016': {'F': 62.0208 / np.pi},
+ 'IR_039': {'VC': 2547.771,
+ 'ALPHA': 0.9915,
+ 'BETA': 2.9002},
+ 'WV_062': {'VC': 1595.621,
+ 'ALPHA': 0.9960,
+ 'BETA': 2.0337},
+ 'WV_073': {'VC': 1360.337,
+ 'ALPHA': 0.9991,
+ 'BETA': 0.4340},
+ 'IR_087': {'VC': 1148.130,
+ 'ALPHA': 0.9996,
+ 'BETA': 0.1714},
+ 'IR_097': {'VC': 1034.715,
+ 'ALPHA': 0.9999,
+ 'BETA': 0.0527},
+ 'IR_108': {'VC': 929.842,
+ 'ALPHA': 0.9983,
+ 'BETA': 0.6084},
+ 'IR_120': {'VC': 838.659,
+ 'ALPHA': 0.9988,
+ 'BETA': 0.3882},
+ 'IR_134': {'VC': 750.653,
+ 'ALPHA': 0.9982,
+ 'BETA': 0.5390}}
+
+# Meteosat 11
+
+CALIB[324] = {'HRV': {'F': 79.0035/ np.pi},
+ 'VIS006': {'F': 65.2656 / np.pi},
+ 'VIS008': {'F': 73.1692 / np.pi},
+ 'IR_016': {'F': 61.9416 / np.pi},
+ 'IR_039': {'VC': 2555.280,
+ 'ALPHA': 0.9916,
+ 'BETA': 2.9438},
+ 'WV_062': {'VC': 1596.080,
+ 'ALPHA': 0.9959,
+ 'BETA': 2.0780},
+ 'WV_073': {'VC': 1361.748,
+ 'ALPHA': 0.9990,
+ 'BETA': 0.4929},
+ 'IR_087': {'VC': 1147.433,
+ 'ALPHA': 0.9996,
+ 'BETA': 0.1731},
+ 'IR_097': {'VC': 1034.851,
+ 'ALPHA': 0.9998,
+ 'BETA': 0.0597},
+ 'IR_108': {'VC': 931.122,
+ 'ALPHA': 0.9983,
+ 'BETA': 0.6256},
+ 'IR_120': {'VC': 839.113,
+ 'ALPHA': 0.9988,
+ 'BETA': 0.4002},
+ 'IR_134': {'VC': 748.585,
+ 'ALPHA': 0.9981,
+ 'BETA': 0.5635}}
+
+#Polynomial coefficients for spectral-effective BT fits
+BTFIT_A_IR_039 = 0.0
+BTFIT_A_WV_062 = 0.00001805700
+BTFIT_A_WV_073 = 0.00000231818
+BTFIT_A_IR_087 = -0.00002332000
+BTFIT_A_IR_097 = -0.00002055330
+BTFIT_A_IR_108 = -0.00007392770
+BTFIT_A_IR_120 = -0.00007009840
+BTFIT_A_IR_134 = -0.00007293450
+
+BTFIT_B_IR_039 = 1.011751900
+BTFIT_B_WV_062 = 1.000255533
+BTFIT_B_WV_073 = 1.000668281
+BTFIT_B_IR_087 = 1.011803400
+BTFIT_B_IR_097 = 1.009370670
+BTFIT_B_IR_108 = 1.032889800
+BTFIT_B_IR_120 = 1.031314600
+BTFIT_B_IR_134 = 1.030424800
+
+BTFIT_C_IR_039 = -3.550400
+BTFIT_C_WV_062 = -1.790930
+BTFIT_C_WV_073 = -0.456166
+BTFIT_C_IR_087 = -1.507390
+BTFIT_C_IR_097 = -1.030600
+BTFIT_C_IR_108 = -3.296740
+BTFIT_C_IR_120 = -3.181090
+BTFIT_C_IR_134 = -2.645950
+
+
+C1 = 1.19104273e-16
+C2 = 0.0143877523
+
+class _Calibrator(object):
+ def __init__(self, hdr, channel_name):
+ self.hdr = hdr
+ self.channel_name = channel_name
+
+ def __call__(self, image, calibrate=1):
+ """Computes the radiances and reflectances/bt of a given channel. The
+ *calibrate* argument should be set to 0 for no calibration, 1 for
+ default reflectances/bt calibration, and 2 for returning radiances. The
+ default value is 1.
+ """
+ hdr = self.hdr
+
+ channel_name = self.channel_name
+
+ if calibrate == 0:
+ return (image,
+ "counts")
+
+ channels = {"VIS006": 1,
+ "VIS008": 2,
+ "IR_016": 3,
+ "IR_039": 4,
+ "WV_062": 5,
+ "WV_073": 6,
+ "IR_087": 7,
+ "IR_097": 8,
+ "IR_108": 9,
+ "IR_120": 10,
+ "IR_134": 11,
+ "HRV": 12}
+
+ cal_type = (hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"])
+ chn_nb = channels[channel_name] - 1
+
+ mask = (image == no_data_value)
+
+ cslope = hdr["Level1_5ImageCalibration"][chn_nb]['Cal_Slope']
+ coffset = hdr["Level1_5ImageCalibration"][chn_nb]['Cal_Offset']
+
+ radiances = eval_np('image * cslope + coffset')
+ radiances[radiances < 0] = 0
+
+ if calibrate == 2:
+ return (np.ma.MaskedArray(radiances, mask=mask),
+ "mW m-2 sr-1 (cm-1)-1")
+
+
+ sat = hdr["SatelliteDefinition"]["SatelliteId"]
+ if sat not in CALIB:
+ raise CalibrationError("No calibration coefficients available for "
+ + "this satellite (" + str(sat) + ")")
+
+ if channel_name in ["HRV", "VIS006", "VIS008", "IR_016"]:
+ solar_irradiance = CALIB[sat][channel_name]["F"]
+ reflectance = eval_np('(radiances / solar_irradiance) * 100.')
+ return (np.ma.MaskedArray(reflectance, mask=mask),
+ "%")
+
+ wavenumber = CALIB[sat][channel_name]["VC"]
+ if cal_type[chn_nb] == 2:
+ #computation based on effective radiance
+ alpha = CALIB[sat][channel_name]["ALPHA"]
+ beta = CALIB[sat][channel_name]["BETA"]
+
+ cal_data = eval_np(('((C2 * 100. * wavenumber / '
+ 'log(C1 * 1.0e6 * wavenumber ** 3 / '
+ '(1.0e-5 * radiances) + 1)) - beta) / alpha'))
+
+ elif cal_type[chn_nb] == 1:
+ #computation based on spectral radiance
+ cal_data = eval_np(('C2 * 100. * wavenumber / '
+ 'log(C1 * 1.0e6 * wavenumber ** 3 / '
+ '(1.0e-5 * radiances) + 1))'))
+
+ coef_a = eval("BTFIT_A_" + channel_name)
+ coef_b = eval("BTFIT_B_" + channel_name)
+ coef_c = eval("BTFIT_C_" + channel_name)
+
+ cal_data = eval_np(('cal_data ** 2 * coef_a + '
+ 'cal_data * coef_b + coef_c'))
+
+ else:
+ raise RuntimeError("Something is seriously wrong in the metadata.")
+
+ mask = mask | np.isnan(cal_data) | np.isinf(cal_data)
+ cal_data = np.ma.MaskedArray(cal_data, mask=mask)
+ return (cal_data,
+ "K")
+
+def read_proheader(fp):
+ """Read the msg header.
+ """
+ hdr = dict()
+
+ # Satellite definition
+
+ satdef = {}
+ satdef["SatelliteId"] = rbin.read_uint2(fp.read(2))
+ satdef["NominalLongitude"] = rbin.read_float4(fp.read(4))
+ satdef["SatelliteStatus"] = ord(fp.read(1))
+
+ hdr["SatelliteDefinition"] = satdef
+ del satdef
+
+ # Satellite operations
+
+ satop = {}
+ satop["LastManoeuvreFlag"] = ord(fp.read(1)) > 0
+ satop["LastManoeuvreStartTime"] = rbin.read_cds_time(fp.read(6))
+ satop["LastManoeuvreEndTime"] = rbin.read_cds_time(fp.read(6))
+ satop["LastManoeuvreType"] = ord(fp.read(1))
+ satop["NextManoeuvreFlag"] = ord(fp.read(1)) > 0
+ satop["NextManoeuvreStartTime"] = rbin.read_cds_time(fp.read(6))
+ satop["NextManoeuvreEndTime"] = rbin.read_cds_time(fp.read(6))
+ satop["NextManoeuvreType"] = ord(fp.read(1))
+
+ hdr["SatelliteOperations"] = satop
+ del satop
+
+ # Orbit
+
+ orbit = {}
+ orbit["PeriodStartTime"] = rbin.read_cds_time(fp.read(6))
+ orbit["PeriodEndTime"] = rbin.read_cds_time(fp.read(6))
+ orbitcoef = np.dtype(">u2, >u4, >u2, >u4,"
+ " (8,)>f8, (8,)>f8, (8,)>f8,"
+ " (8,)>f8, (8,)>f8, (8,)>f8")
+ orbit["OrbitPolynomial"] = np.fromstring(fp.read(39600),
+ dtype=orbitcoef,
+ count=100)
+
+ hdr["Orbit"] = orbit
+ del orbit
+
+ # Attitude
+
+ attitude = {}
+ attitude["PeriodStartTime"] = rbin.read_cds_time(fp.read(6))
+ attitude["PeriodEndTime"] = rbin.read_cds_time(fp.read(6))
+ attitude["PrincipleAxisOffsetAngle"] = rbin.read_float8(fp.read(8))
+ attitudecoef = np.dtype(">u2, >u4, >u2, >u4, (8,)>f8, (8,)>f8, (8,)>f8")
+ attitude["AttitudePolynomial"] = np.fromstring(fp.read(20400),
+ dtype=attitudecoef,
+ count=100)
+
+ hdr["Attitude"] = attitude
+ del attitude
+
+ # SpinRateatRCStart
+
+ hdr["SpinRateatRCStart"] = rbin.read_float8(fp.read(8))
+
+ # UTCCorrelation
+
+ utccor = {}
+
+ utccor["PeriodStartTime"] = rbin.read_cds_time(fp.read(6))
+ utccor["PeriodEndTime"] = rbin.read_cds_time(fp.read(6))
+ utccor["OnBoardTimeStart"] = rbin.read_cuc_time(fp.read(7), 4, 3)
+ utccor["VarOnBoardTimeStart"] = rbin.read_float8(fp.read(8))
+ utccor["A1"] = rbin.read_float8(fp.read(8))
+ utccor["VarA1"] = rbin.read_float8(fp.read(8))
+ utccor["A2"] = rbin.read_float8(fp.read(8))
+ utccor["VarA2"] = rbin.read_float8(fp.read(8))
+
+ hdr["UTCCorrelation"] = utccor
+ del utccor
+
+ # PlannedAcquisitionTime
+
+ pat = {}
+ pat["TrueRepeatCycleStart"] = rbin.read_cds_expanded_time(fp.read(10))
+ pat["PlannedForwardScanEnd"] = rbin.read_cds_expanded_time(fp.read(10))
+ pat["PlannedRepeatCycleEnd"] = rbin.read_cds_expanded_time(fp.read(10))
+
+ hdr["PlannedAcquisitionTime"] = pat
+
+ # RadiometerStatus
+
+ radiostatus = {}
+ radiostatus["ChannelStatus"] = np.fromstring(fp.read(12), dtype=np.uint8)
+ radiostatus["DetectorStatus"] = np.fromstring(fp.read(42), dtype=np.uint8)
+
+ hdr["RadiometerStatus"] = radiostatus
+
+ # RadiometerSettings
+
+ radiosettings = {}
+ radiosettings["MDUSamplingDelays"] = np.fromstring(fp.read(42 * 2), dtype=">u2")
+ radiosettings["HRVFrameOffsets"] = {}
+ radiosettings["HRVFrameOffsets"]["MDUNomHRVDelay1"] = rbin.read_uint2(fp.read(2))
+ radiosettings["HRVFrameOffsets"]["MDUNomHRVDelay2"] = rbin.read_uint2(fp.read(2))
+ radiosettings["HRVFrameOffsets"]["Spare"] = rbin.read_uint2(fp.read(2))
+ radiosettings["HRVFrameOffsets"]["MDUNomHRVBreakline"] = rbin.read_uint2(fp.read(2))
+ radiosettings["DHSSSynchSelection"] = ord(fp.read(1))
+ radiosettings["MDUOutGain"] = np.fromstring(fp.read(42 * 2), dtype=">u2")
+ radiosettings["MDUCourseGain"] = np.fromstring(fp.read(42), dtype=np.uint8)
+ radiosettings["MDUFineGain"] = np.fromstring(fp.read(42 * 2), dtype=">u2")
+ radiosettings["MDUNumericalOffset"] = np.fromstring(fp.read(42 * 2), dtype=">u2")
+ radiosettings["PUGain"] = np.fromstring(fp.read(42 * 2), dtype=">u2")
+ radiosettings["PUOffset"] = np.fromstring(fp.read(27 * 2), dtype=">u2")
+ radiosettings["PUBias"] = np.fromstring(fp.read(15 * 2), dtype=">u2")
+ radiosettings["OperationParameters"] = {}
+ radiosettings["OperationParameters"]["L0_LineCounter"] = rbin.read_uint2(fp.read(2))
+ radiosettings["OperationParameters"]["K1_RetraceLines"] = rbin.read_uint2(fp.read(2))
+ radiosettings["OperationParameters"]["K2_PauseDeciseconds"] = rbin.read_uint2(fp.read(2))
+ radiosettings["OperationParameters"]["K3_RetraceLines"] = rbin.read_uint2(fp.read(2))
+ radiosettings["OperationParameters"]["K4_PauseDeciseconds"] = rbin.read_uint2(fp.read(2))
+ radiosettings["OperationParameters"]["K5_RetraceLines"] = rbin.read_uint2(fp.read(2))
+ radiosettings["OperationParameters"]["X_DeepSpaceWindowPosition"] = ord(fp.read(1))
+ radiosettings["RefocusingLines"] = rbin.read_uint2(fp.read(2))
+ radiosettings["RefocusingDirection"] = ord(fp.read(1))
+ radiosettings["RefocusingPosition"] = rbin.read_uint2(fp.read(2))
+ radiosettings["ScanRefPosFlag"] = ord(fp.read(1)) > 0
+ radiosettings["ScanRefPosNumber"] = rbin.read_uint2(fp.read(2))
+ radiosettings["ScanRefPosVal"] = rbin.read_float4(fp.read(4))
+ radiosettings["ScanFirstLine"] = rbin.read_uint2(fp.read(2))
+ radiosettings["ScanLastLine"] = rbin.read_uint2(fp.read(2))
+ radiosettings["RetraceStartLine"] = rbin.read_uint2(fp.read(2))
+
+ hdr["RadiometerSettings"] = radiosettings
+
+ # RadiometerOperations
+
+ radiooper = {}
+
+ radiooper["LastGainChangeFlag"] = ord(fp.read(1)) > 0
+ radiooper["LastGainChangeTime"] = rbin.read_cds_time(fp.read(6))
+ radiooper["Decontamination"] = {}
+ radiooper["Decontamination"]["DecontaminationNow"] = ord(fp.read(1)) > 0
+ radiooper["Decontamination"]["DecontaminationStart"] = rbin.read_cds_time(fp.read(6))
+ radiooper["Decontamination"]["DecontaminationEnd"] = rbin.read_cds_time(fp.read(6))
+
+
+ radiooper["BBCalScheduled"] = ord(fp.read(1)) > 0
+ radiooper["BBCalibrationType"] = ord(fp.read(1))
+ radiooper["BBFirstLine"] = rbin.read_uint2(fp.read(2))
+ radiooper["BBLastLine"] = rbin.read_uint2(fp.read(2))
+ radiooper["ColdFocalPlaneOpTemp"] = rbin.read_uint2(fp.read(2))
+ radiooper["WarmFocalPlaneOpTemp"] = rbin.read_uint2(fp.read(2))
+
+
+ hdr["RadiometerOperations"] = radiooper
+
+ ## CelestialEvents
+ # CelestialBodiesPosition
+
+ celbodies = {}
+ celbodies["PeriodTimeStart"] = rbin.read_cds_time(fp.read(6))
+ celbodies["PeriodTimeEnd"] = rbin.read_cds_time(fp.read(6))
+ celbodies["RelatedOrbitFileTime"] = fp.read(15)
+ celbodies["RelatedAttitudeFileTime"] = fp.read(15)
+ earthmoonsuncoef = np.dtype(">u2, >u4, >u2, >u4, (8,)>f8, (8,)>f8")
+ celbodies["EarthEphemeris"] = np.fromstring(fp.read(14000),
+ dtype=earthmoonsuncoef,
+ count=100)
+ celbodies["MoonEphemeris"] = np.fromstring(fp.read(14000),
+ dtype=earthmoonsuncoef,
+ count=100)
+ celbodies["SunEphemeris"] = np.fromstring(fp.read(14000),
+ dtype=earthmoonsuncoef,
+ count=100)
+ starcoef = np.dtype(">u2, >u2, >u4, >u2, >u4, (8,)>f8, (8,)>f8")
+ starcoefs = np.dtype([('starcoefs', starcoef, (20,))])
+
+ celbodies["StarEphemeris"] = np.fromstring(fp.read(284000),
+ dtype=starcoefs,
+ count=100)
+
+ hdr["CelestialBodiesPosition"] = celbodies
+
+ # RelationToImage
+
+ reltoim = {}
+ reltoim["TypeofEclipse"] = ord(fp.read(1))
+ reltoim["EclipseStartTime"] = rbin.read_cds_time(fp.read(6))
+ reltoim["EclipseEndTime"] = rbin.read_cds_time(fp.read(6))
+ reltoim["VisibleBodiesInImage"] = ord(fp.read(1))
+ reltoim["BodiesClosetoFOV"] = ord(fp.read(1))
+ reltoim["ImpactOnImageQuality"] = ord(fp.read(1))
+
+ hdr["RelationToImage"] = reltoim
+
+ ## ImageDescriptionRecord
+
+ grid_origin = ["north west", "south west", "south east", "north east"]
+
+ # ProjectionDescription
+
+ projdes = {}
+ projdes["TypeOfProjection"] = ord(fp.read(1))
+ projdes["LongitudeOfSSP"] = rbin.read_float4(fp.read(4))
+
+ hdr["ProjectionDescription"] = projdes
+
+ # ReferenceGridVIS_IR
+
+ refvisir = {}
+ refvisir["NumberOfLines"] = rbin.read_int4(fp.read(4))
+ refvisir["NumberOfColumns"] = rbin.read_int4(fp.read(4))
+ refvisir["LineDirGridStep"] = rbin.read_float4(fp.read(4))
+ refvisir["ColumnDirGridStep"] = rbin.read_float4(fp.read(4))
+ refvisir["GridOrigin"] = grid_origin[ord(fp.read(1))]
+
+ hdr["ReferenceGridVIS_IR"] = refvisir
+
+ # ReferenceGridHRV
+
+ refhrv = {}
+ refhrv["NumberOfLines"] = rbin.read_int4(fp.read(4))
+ refhrv["NumberOfColumns"] = rbin.read_int4(fp.read(4))
+ refhrv["LineDirGridStep"] = rbin.read_float4(fp.read(4))
+ refhrv["ColumnDirGridStep"] = rbin.read_float4(fp.read(4))
+ refhrv["GridOrigin"] = grid_origin[ord(fp.read(1))]
+
+ hdr["ReferenceGridHRV"] = refhrv
+
+ # PlannedCoverageVIS_IR
+
+ covvisir = {}
+ covvisir["SouthernLinePlanned"] = rbin.read_int4(fp.read(4))
+ covvisir["NorthernLinePlanned"] = rbin.read_int4(fp.read(4))
+ covvisir["EasternColumnPlanned"] = rbin.read_int4(fp.read(4))
+ covvisir["WesternColumnPlanned"] = rbin.read_int4(fp.read(4))
+
+ hdr["PlannedCoverageVIS_IR"] = covvisir
+
+ # PlannedCoverageHRV
+
+ covhrv = {}
+
+ covhrv["LowerSouthLinePlanned"] = rbin.read_int4(fp.read(4))
+ covhrv["LowerNorthLinePlanned"] = rbin.read_int4(fp.read(4))
+ covhrv["LowerEastColumnPlanned"] = rbin.read_int4(fp.read(4))
+ covhrv["LowerWestColumnPlanned"] = rbin.read_int4(fp.read(4))
+ covhrv["UpperSouthLinePlanned"] = rbin.read_int4(fp.read(4))
+ covhrv["UpperNorthLinePlanned"] = rbin.read_int4(fp.read(4))
+ covhrv["UpperEastColumnPlanned"] = rbin.read_int4(fp.read(4))
+ covhrv["UpperWestColumnPlanned"] = rbin.read_int4(fp.read(4))
+
+ hdr["PlannedCoverageHRV"] = covhrv
+
+ # Level 1_5 ImageProduction
+
+ image_proc_direction = ["North-South", "South-North"]
+ pixel_gen_direction = ["East-West", "West-East"]
+
+ l15prod = {}
+ l15prod["ImageProcDirection"] = image_proc_direction[ord(fp.read(1))]
+ l15prod["PixelGenDirection"] = pixel_gen_direction[ord(fp.read(1))]
+
+ # 0: No processing, 1: Spectral radiance, 2: Effective radiance
+ l15prod["PlannedChanProcessing"] = np.fromstring(fp.read(12),
+ dtype=np.uint8)
+
+ hdr["Level 1_5 ImageProduction"] = l15prod
+
+
+ ## RadiometricProcessing
+
+ # RPSummary
+
+ rpsummary = {}
+ rpsummary["RadianceLinearization"] = np.fromstring(fp.read(12), dtype=np.bool)
+
+ rpsummary["DetectorEqualization"] = np.fromstring(fp.read(12), dtype=np.bool)
+ rpsummary["OnboardCalibrationResult"] = np.fromstring(fp.read(12), dtype=np.bool)
+ rpsummary["MPEFCalFeedback"] = np.fromstring(fp.read(12), dtype=np.bool)
+ rpsummary["MTFAdaptation"] = np.fromstring(fp.read(12), dtype=np.bool)
+ rpsummary["StraylightCorrectionFlag"] = np.fromstring(fp.read(12), dtype=np.bool)
+
+ hdr["RPSummary"] = rpsummary
+
+ # Level1_5ImageCalibration
+
+ caltype = np.dtype([('Cal_Slope', '>f8'), ('Cal_Offset', '>f8')])
+
+ hdr["Level1_5ImageCalibration"] = np.fromstring(fp.read(192), dtype=caltype)
+
+
+ # BlackBodyDataUsed
+
+ bbdu = {}
+
+ bbdu["BBObservationUTC"] = rbin.read_cds_expanded_time(fp.read(10))
+ bbdu["BBRelatedData"] = {}
+ bbdu["BBRelatedData"]["OnBoardBBTime"] = rbin.read_cuc_time(fp.read(7), 4, 3)
+ bbdu["BBRelatedData"]["MDUOutGain"] = np.fromstring(fp.read(42 * 2),
+ dtype=">u2")
+ bbdu["BBRelatedData"]["MDUCoarseGain"] = np.fromstring(fp.read(42),
+ dtype=np.uint8)
+ bbdu["BBRelatedData"]["MDUFineGain"] = np.fromstring(fp.read(42 * 2),
+ dtype=">u2")
+ bbdu["BBRelatedData"]["MDUNumericalOffset"] = np.fromstring(fp.read(42 * 2),
+ dtype=">u2")
+ bbdu["BBRelatedData"]["PUGain"] = np.fromstring(fp.read(42 * 2),
+ dtype=">u2")
+ bbdu["BBRelatedData"]["PUOffset"] = np.fromstring(fp.read(27 * 2),
+ dtype=">u2")
+ bbdu["BBRelatedData"]["PUBias"] = np.fromstring(fp.read(15 * 2),
+ dtype=">u2")
+ # 12 bits bitstrings... convert to uint16
+ data = np.fromstring(fp.read(int(42 * 1.5)),
+ dtype=np.uint8)
+ data = data.astype(np.uint16)
+ data[::3] = data[::3]*256 + data[1::3] // 16
+ data[1::3] = (data[1::3] & 0x0f)*16 + data[2::3]
+ result = np.ravel(data.reshape(-1,3)[:,:2])
+ bbdu["BBRelatedData"]["DCRValues"] = result
+ bbdu["BBRelatedData"]["X_DeepSpaceWindowPosition"] = ord(fp.read(1))
+ bbdu["BBRelatedData"]["ColdFPTemperature"] = {}
+ bbdu["BBRelatedData"]["ColdFPTemperature"]["FCUNominalColdFocalPlaneTemp"] = rbin.read_uint2(fp.read(2)) / 100.
+ bbdu["BBRelatedData"]["ColdFPTemperature"]["FCURedundantColdFocalPlaneTemp"] = rbin.read_uint2(fp.read(2)) / 100.
+ bbdu["BBRelatedData"]["WarmFPTemperature"] = {}
+ bbdu["BBRelatedData"]["WarmFPTemperature"]["FCUNominalWarmFocalPlaneVHROTemp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["WarmFPTemperature"]["FCURedundantWarmFocalPlaneVHROTemp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["ScanMirrorTemperature"] = {}
+ bbdu["BBRelatedData"]["ScanMirrorTemperature"]["FCUNominalScanMirrorSensor1Temp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["ScanMirrorTemperature"]["FCURedundantScanMirrorSensor1Temp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["ScanMirrorTemperature"]["FCUNominalScanMirrorSensor2Temp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["ScanMirrorTemperature"]["FCURedundantScanMirrorSensor2Temp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["M1M2M3Temperature"] = {}
+ bbdu["BBRelatedData"]["M1M2M3Temperature"]["FCUNominalM1MirrorSensor1Temp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["M1M2M3Temperature"]["FCURedundantM1MirrorSensor1Temp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["M1M2M3Temperature"]["FCUNominalM1MirrorSensor2Temp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["M1M2M3Temperature"]["FCURedundantM1MirrorSensor2Temp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["M1M2M3Temperature"]["FCUNominalM23AssemblySensor1Temp"] = ord(fp.read(1)) / 4. + 265
+ bbdu["BBRelatedData"]["M1M2M3Temperature"]["FCURedundantM23AssemblySensor1Temp"] = ord(fp.read(1)) / 4. + 265
+ bbdu["BBRelatedData"]["M1M2M3Temperature"]["FCUNominalM23AssemblySensor2Temp"] = ord(fp.read(1)) / 4. + 265
+ bbdu["BBRelatedData"]["M1M2M3Temperature"]["FCURedundantM23AssemblySensor2Temp"] = ord(fp.read(1)) / 4. + 265
+ bbdu["BBRelatedData"]["BaffleTemperature"] = {}
+ bbdu["BBRelatedData"]["BaffleTemperature"]["FCUNominalM1BaffleTemp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["BaffleTemperature"]["FCURedundantM1BaffleTemp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["BlackBodyTemperature"] = {}
+ bbdu["BBRelatedData"]["BlackBodyTemperature"]["FCUNominalBlackBodySensorTemp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["BlackBodyTemperature"]["FCURedundantBlackBodySensorTemp"] = rbin.read_uint2(fp.read(2)) / 100. + 250
+ bbdu["BBRelatedData"]["FCUMode"] = {}
+ bbdu["BBRelatedData"]["FCUMode"]["FCUNominalSMMStatus"] = rbin.read_uint2(fp.read(2))
+ bbdu["BBRelatedData"]["FCUMode"]["FCURedundantSMMStatus"] = rbin.read_uint2(fp.read(2))
+ extracted_data_type = np.dtype([('NumberOfPixelsUsed', '>u4'),
+ ('MeanCount', '>f4'),
+ ('RMS', '>f4'),
+ ('MaxCount', '>u2'),
+ ('MinCount', '>u2'),
+ ('BB_Processing_Slope', '>f8'),
+ ('BB_Processing_Offset', '>f8')])
+
+ bbdu["BBRelatedData"]["ExtractedBBData"] = np.fromstring(fp.read(32 * 12),
+ dtype=extracted_data_type)
+ impf_cal_type = np.dtype([("ImageQualityFlag", "u1"),
+ ("ReferenceDataFlag", "u1"),
+ ("AbsCalMethod", "u1"),
+ ("Pad1", "u1"),
+ ("AbsCalWeightVic", ">f4"),
+ ("AbsCalWeightXsat", ">f4"),
+ ("AbsCalCoeff", ">f4"),
+ ("AbsCalError", ">f4"),
+ ("CalMonBias", ">f4"),
+ ("CalMonRms", ">f4"),
+ ("OffsetCount", ">f4")])
+
+
+ bbdu["MPEFCalFeedback"] = np.fromstring(fp.read(32 * 12),
+ dtype=impf_cal_type)
+
+ bbdu["RadTransform"] = np.fromstring(fp.read(42 * 64 * 4),
+ dtype=">f4").reshape((42,64))
+ bbdu["RadProcMTFAdaptation"] = {}
+
+ bbdu["RadProcMTFAdaptation"]["VIS_IRMTFCorrectionE_W"] = np.fromstring(fp.read(33 * 16 * 4),
+ dtype=">f4").reshape((33, 16))
+ bbdu["RadProcMTFAdaptation"]["VIS_IRMTFCorrectionN_S"] = np.fromstring(fp.read(33 * 16 * 4),
+ dtype=">f4").reshape((33, 16))
+ bbdu["RadProcMTFAdaptation"]["HRVMTFCorrectionE_W"] = np.fromstring(fp.read(9 * 16 * 4),
+ dtype=">f4").reshape((9, 16))
+ bbdu["RadProcMTFAdaptation"]["HRVMTFCorrectionN_S"] = np.fromstring(fp.read(9 * 16 * 4),
+ dtype=">f4").reshape((9, 16))
+ bbdu["RadProcMTFAdaptation"]["StraylightCorrection"] = np.fromstring(fp.read(12 * 8 * 8 * 4),
+ dtype=">f4").reshape((12, 8, 8))
+
+ hdr["BlackBodyDataUsed"] = bbdu
+
+ # GeometricProcessing
+
+ geoproc = {}
+ geoproc["OptAxisDistances"] = {}
+ geoproc["OptAxisDistances"]["E-WFocalPlane"] = np.fromstring(fp.read(42 * 4),
+ dtype=">f4")
+ geoproc["OptAxisDistances"]["N-SFocalPlane"] = np.fromstring(fp.read(42 * 4),
+ dtype=">f4")
+
+ geoproc["EarthModel"] = {}
+ geoproc["EarthModel"]["TypeOfEarthModel"] = ord(fp.read(1))
+ geoproc["EarthModel"]["EquatorialRadius"] = rbin.read_float8(fp.read(8))
+ geoproc["EarthModel"]["NorthPolarRadius"] = rbin.read_float8(fp.read(8))
+ geoproc["EarthModel"]["SouthPolarRadius"] = rbin.read_float8(fp.read(8))
+ geoproc["AtmosphericModel"] = np.fromstring(fp.read(12 * 360 * 4),
+ dtype=">f4").reshape((12, 360))
+ geoproc["ResamplingFunctions"] = np.fromstring(fp.read(12),
+ dtype=np.uint8)
+
+ hdr["GeometricProcessing"] = geoproc
+
+ return hdr
+
+def read_epiheader(fp):
+ """Read the msg header.
+ """
+ ftr = dict()
+ ftr["15TRAILERVersion"] = ord(fp.read(1))
+ ftr["SateliteID"] = rbin.read_uint2(fp.read(2))
+ ftr["NominalImageScanning"] = ord(fp.read(1)) > 0
+ ftr["ReducedScan"] = ord(fp.read(1)) > 0
+ ftr["ForwardScanStart"] = rbin.read_cds_time(fp.read(6))
+ ftr["ForwardScanEnd"] = rbin.read_cds_time(fp.read(6))
+ ftr["NominalBehaviour"] = ord(fp.read(1)) > 0
+ ftr["RadScanIrregularity"] = ord(fp.read(1)) > 0
+ ftr["RadStoppage"] = ord(fp.read(1)) > 0
+ ftr["RepeatCycleNotCompleted"] = ord(fp.read(1)) > 0
+ ftr["GainChangeTookPlace"] = ord(fp.read(1)) > 0
+ ftr["DecontaminationTookPlace"] = ord(fp.read(1)) > 0
+ ftr["NoBBCalibrationAchieved"] = ord(fp.read(1)) > 0
+ ftr["IncorrectTemperature"] = ord(fp.read(1)) > 0
+ ftr["InvalidBBData"] = ord(fp.read(1)) > 0
+ ftr["InvalidAuxOrHKTMData"] = ord(fp.read(1)) > 0
+ ftr["RefocusingMechanismActuated"] = ord(fp.read(1)) > 0
+ ftr["MirrorBackToReferencePos"] = ord(fp.read(1)) > 0
+ ftr["PlannedNumberOfL10Lines"] = np.fromstring(fp.read(12 * 4),
+ dtype=">u4")
+ ftr["NumberOfMissingL10Lines"] = np.fromstring(fp.read(12 * 4),
+ dtype=">u4")
+ ftr["NumberOfCorruptedL10Lines"] = np.fromstring(fp.read(12 * 4),
+ dtype=">u4")
+ ftr["NumberOfReplacedL10Lines"] = np.fromstring(fp.read(12 * 4),
+ dtype=">u4")
+ validitytype = np.dtype([('NominalImage', '>u1'),
+ ('NonNominalBecauseIncomplete', '>u1'),
+ ('NonNominalRadiometricQuality', '>u1'),
+ ('NonNominalGeometricQuality', '>u1'),
+ ('NonNominalTimeliness', '>u1'),
+ ('IncompleteL15', '>u1')])
+ ftr["L15ImageValidity"] = np.fromstring(fp.read(12 * 6),
+ dtype=validitytype)
+
+ ftr["SouthernLineActual"] = rbin.read_int4(fp.read(4))
+ ftr["NorthernLineActual"] = rbin.read_int4(fp.read(4))
+ ftr["EasternColumnActual"] = rbin.read_int4(fp.read(4))
+ ftr["WesternColumnActual"] = rbin.read_int4(fp.read(4))
+ ftr["LowerSouthLineActual"] = rbin.read_int4(fp.read(4))
+ ftr["LowerNorthLineActual"] = rbin.read_int4(fp.read(4))
+ ftr["LowerEastColumnActual"] = rbin.read_int4(fp.read(4))
+ ftr["LowerWestColumnActual"] = rbin.read_int4(fp.read(4))
+ ftr["UpperSouthLineActual"] = rbin.read_int4(fp.read(4))
+ ftr["UpperNorthLineActual"] = rbin.read_int4(fp.read(4))
+ ftr["UpperEastColumnActual"] = rbin.read_int4(fp.read(4))
+ ftr["UpperWestColumnActual"] = rbin.read_int4(fp.read(4))
+
+ return ftr
+
+def read_metadata(prologue, image_files, epilogue):
+ """ Selected items from the Meteosat-9 prolog file.
+ """
+ segment_size = 464 # number of lines in a segment
+
+ fp = StringIO(prologue.data)
+ hdr = read_proheader(fp)
+
+ fp = StringIO(epilogue.data)
+ ftr = read_epiheader(fp)
+
+ im = _xrit.read_imagedata(image_files[0])
+
+ md = Metadata()
+ md.calibrate = _Calibrator(hdr, im.product_name)
+
+ md.sublon = hdr["ProjectionDescription"]["LongitudeOfSSP"]
+ md.product_name = im.product_id
+ md.channel = im.product_name
+ if md.channel == "HRV":
+ md.image_size = np.array((hdr["ReferenceGridHRV"]["NumberOfLines"],
+ hdr["ReferenceGridHRV"]["NumberOfColumns"]))
+ else:
+ md.image_size = np.array((hdr["ReferenceGridVIS_IR"]["NumberOfLines"],
+ hdr["ReferenceGridVIS_IR"]["NumberOfColumns"]))
+
+ md.satname = im.platform.lower()
+ md.product_type = 'full disc'
+ md.region_name = 'full disc'
+ if md.channel == "HRV":
+ md.first_pixel = hdr["ReferenceGridHRV"]["GridOrigin"]
+ ns_, ew_ = md.first_pixel.split()
+ md.boundaries = np.array([[
+ ftr["LowerSouthLineActual"],
+ ftr["LowerNorthLineActual"],
+ ftr["LowerEastColumnActual"],
+ ftr["LowerWestColumnActual"]],
+ [ftr["UpperSouthLineActual"],
+ ftr["UpperNorthLineActual"],
+ ftr["UpperEastColumnActual"],
+ ftr["UpperWestColumnActual"]]])
+
+ im_loff = im.navigation.loff + segment_size * (im.segment.seg_no - 1)
+ md.coff = (ftr["Lower"+ew_.capitalize()+"ColumnActual"]
+ + im.navigation.coff - 1)
+ md.loff = (ftr["Lower"+ns_.capitalize()+"LineActual"]
+ + im_loff - 1)
+
+ else:
+ md.first_pixel = hdr["ReferenceGridVIS_IR"]["GridOrigin"]
+ ns_, ew_ = md.first_pixel.split()
+ md.boundaries = np.array([[
+ ftr["SouthernLineActual"],
+ ftr["NorthernLineActual"],
+ ftr["EasternColumnActual"],
+ ftr["WesternColumnActual"]]])
+
+ im_loff = im.navigation.loff + segment_size * (im.segment.seg_no - 1)
+ md.coff = (ftr[ew_.capitalize()+"ernColumnActual"]
+ + im.navigation.coff - 1)
+ md.loff = (ftr[ns_.capitalize()+"ernLineActual"]
+ + im_loff - 1)
+
+ md.data_type = im.structure.nb
+ md.no_data_value = no_data_value
+ md.line_offset = 0
+ md.time_stamp = im.time_stamp
+ md.production_time = im.production_time
+ md.calibration_unit = 'counts'
+
+ return md
+
+if __name__ == '__main__':
+ p = _xrit.read_prologue(sys.argv[1])
+ e = _xrit.read_epilogue(sys.argv[-1])
+ print read_metadata(p, sys.argv[2:-1], e)
diff --git a/mipp/xrit/MTP.py b/mipp/xrit/MTP.py
new file mode 100644
index 0000000..0060f7c
--- /dev/null
+++ b/mipp/xrit/MTP.py
@@ -0,0 +1,335 @@
+#
+# $Id$
+#
+
+"""This module will read satellit data files in OpenMTP format (eg. Meteosat-7 prolog file). Format described in:
+'The Meteosat Archive; Format Guide No. 1; Basic Imagery: OpenMTP Format'; EUM FG 1; Rev 2.1; April 2000
+"""
+
+import sys
+from datetime import timedelta
+from StringIO import StringIO
+import numpy as np
+
+from mipp import CalibrationError
+from mipp import strptime
+from mipp.xrit import _xrit
+from mipp.xrit import Metadata
+from mipp.xrit import bin_reader as rbin
+
+__all__ = ['read_metadata']
+
+ASCII_HEADER_LEN = 1345
+BINARY_HEADER_LEN = 144515
+BINARY_HEADER_LEN_VISCOMP = 192999
+
+def _read_ascii_header(fp):
+ fp = StringIO(fp.read(ASCII_HEADER_LEN)) # Don't mix iteration and read method.
+ hdr = dict()
+ for line in fp:
+ k = line[:14].strip()
+ v = line[15:].strip()
+ hdr[k] = v
+ return hdr
+
+def _read_binary_header(fp, product_type):
+ hdr = dict()
+ hdr['fname'] = fp.read(8)
+ hdr['year'] = rbin.read_int4(fp.read(4))
+ hdr['jday'] = rbin.read_int4(fp.read(4))
+ hdr['slot'] = rbin.read_int4(fp.read(4))
+ hdr['dtype'] = rbin.read_int4(fp.read(4))
+ hdr['date'] = rbin.read_int4(fp.read(4))
+ hdr['time'] = rbin.read_int4(fp.read(4))
+ hdr['pltfrm'] = fp.read(2)
+ fp.read(2) # spares
+ hdr['proc'] = rbin.read_int4(fp.read(4))
+ hdr['chan'] = rbin.read_int4(fp.read(4))
+ calco_str = fp.read(5)
+ if calco_str == '\0\0\0\0\0':
+ hdr['calco'] = None
+ else:
+ hdr['calco'] = float(calco_str) / 100000.0
+ space_str = fp.read(3)
+ if space_str == '\0\0\0':
+ hdr['space'] = 0.0
+ else:
+ hdr['space'] = float(space_str) / 10.0
+ hdr['caltim'] = fp.read(5)
+ fp.read(3) # spares
+ hdr['rec2siz'] = rbin.read_int4(fp.read(4))
+ hdr['lrecsiz'] = rbin.read_int4(fp.read(4))
+ hdr['loffset'] = rbin.read_int4(fp.read(4))
+ hdr['rtmet'] = fp.read(15)
+ hdr['dmmod'] = rbin.read_int4(fp.read(4))
+ hdr['rsmod'] = rbin.read_int4(fp.read(4))
+ hdr['ssp'] = rbin.read_float4(fp.read(4))
+ fp.read(12)
+ fp.read(4)
+ fp.read(8)
+ hdr['line1'] = rbin.read_int4(fp.read(4))
+ hdr['pixel1'] = rbin.read_int4(fp.read(4))
+ hdr['nlines'] = rbin.read_int4(fp.read(4))
+ hdr['npixels'] = rbin.read_int4(fp.read(4))
+ fp.read(16)
+ #hdr['mlt1'] = fp.read(2500)
+ #hdr['mlt2'] = fp.read(2500)
+ hdr['imgqua'] = rbin.read_int4(fp.read(4))
+ fp.read(16)
+ fp.read(2636) # only present for unrectified images
+ hdr['ndgrp'] = rbin.read_int4(fp.read(4))
+ hdr['dmstrt'] = rbin.read_int4(fp.read(4))
+ hdr['dmend'] = rbin.read_int4(fp.read(4))
+ hdr['dmstep'] = rbin.read_int4(fp.read(4))
+ fp.read(8*105*105)
+ hdr['ncor'] = rbin.read_int4(fp.read(4))
+ hdr['chid1'] = rbin.read_int4(fp.read(4))
+ fp.read(16*3030)
+ if product_type == 'PVISBAN':
+ hdr['chid2'] = rbin.read_int4(fp.read(4))
+ fp.read(16*3030)
+ return hdr
+
+temp2rad = {}
+
+# channel IR1
+temp2rad[4] = np.array([[170.0, 0.667], [171.0, 0.697], [172.0, 0.727], [173.0,
+0.758], [174.0, 0.789], [175.0, 0.822], [176.0, 0.856], [177.0, 0.891], [178.0,
+0.927], [179.0, 0.964], [180.0, 1.002], [181.0, 1.040], [182.0, 1.080], [183.0,
+1.122], [184.0, 1.164], [185.0, 1.207], [186.0, 1.251], [187.0, 1.297], [188.0,
+1.344], [189.0, 1.392], [190.0, 1.441], [191.0, 1.491], [192.0, 1.542], [193.0,
+1.595], [194.0, 1.649], [195.0, 1.704], [196.0, 1.761], [197.0, 1.818], [198.0,
+1.877], [199.0, 1.938], [200.0, 1.999], [201.0, 2.062], [202.0, 2.127], [203.0,
+2.192], [204.0, 2.259], [205.0, 2.328], [206.0, 2.397], [207.0, 2.468], [208.0,
+2.541], [209.0, 2.615], [210.0, 2.690], [211.0, 2.767], [212.0, 2.846], [213.0,
+2.925], [214.0, 3.007], [215.0, 3.089], [216.0, 3.174], [217.0, 3.259], [218.0,
+3.347], [219.0, 3.435], [220.0, 3.526], [221.0, 3.617], [222.0, 3.711], [223.0,
+3.806], [224.0, 3.902], [225.0, 4.000], [226.0, 4.100], [227.0, 4.201], [228.0,
+4.304], [229.0, 4.408], [230.0, 4.514], [231.0, 4.622], [232.0, 4.731], [233.0,
+4.842], [234.0, 4.955], [235.0, 5.069], [236.0, 5.185], [237.0, 5.302], [238.0,
+5.422], [239.0, 5.542], [240.0, 5.665], [241.0, 5.789], [242.0, 5.915], [243.0,
+6.043], [244.0, 6.172], [245.0, 6.303], [246.0, 6.436], [247.0, 6.570], [248.0,
+6.706], [249.0, 6.844], [250.0, 6.983], [251.0, 7.125], [252.0, 7.268], [253.0,
+7.412], [254.0, 7.559], [255.0, 7.707], [256.0, 7.857], [257.0, 8.009], [258.0,
+8.162], [259.0, 8.317], [260.0, 8.474], [261.0, 8.633], [262.0, 8.793], [263.0,
+8.955], [264.0, 9.119], [265.0, 9.285], [266.0, 9.453], [267.0, 9.622], [268.0,
+9.793], [269.0, 9.966], [270.0, 10.141], [271.0, 10.317], [272.0, 10.495],
+[273.0, 10.675], [274.0, 10.857], [275.0, 11.040], [276.0, 11.225], [277.0,
+11.412], [278.0, 11.601], [279.0, 11.792], [280.0, 11.984], [281.0, 12.178],
+[282.0, 12.374], [283.0, 12.572], [284.0, 12.772], [285.0, 12.973], [286.0,
+13.176], [287.0, 13.381], [288.0, 13.587], [289.0, 13.796], [290.0, 14.006],
+[291.0, 14.218], [292.0, 14.432], [293.0, 14.647], [294.0, 14.864], [295.0,
+15.083], [296.0, 15.304], [297.0, 15.527], [298.0, 15.751], [299.0, 15.977],
+[300.0, 16.205], [301.0, 16.435], [302.0, 16.666], [303.0, 16.899], [304.0,
+17.134], [305.0, 17.371], [306.0, 17.609], [307.0, 17.849], [308.0, 18.091],
+[309.0, 18.335]])
+
+# channel IR2
+
+temp2rad[5] = np.array([ [170.0, 0.664], [171.0, 0.693], [172.0, 0.723],
+[173.0, 0.753], [174.0, 0.785], [175.0, 0.818], [176.0, 0.851], [177.0, 0.886],
+[178.0, 0.922], [179.0, 0.958], [180.0, 0.996], [181.0, 1.035], [182.0, 1.075],
+[183.0, 1.115], [184.0, 1.157], [185.0, 1.200], [186.0, 1.245], [187.0, 1.290],
+[188.0, 1.336], [189.0, 1.384], [190.0, 1.433], [191.0, 1.483], [192.0, 1.534],
+[193.0, 1.586], [194.0, 1.640], [195.0, 1.695], [196.0, 1.751], [197.0, 1.808],
+[198.0, 1.867], [199.0, 1.927], [200.0, 1.988], [201.0, 2.051], [202.0, 2.115],
+[203.0, 2.180], [204.0, 2.247], [205.0, 2.315], [206.0, 2.384], [207.0, 2.455],
+[208.0, 2.527], [209.0, 2.601], [210.0, 2.676], [211.0, 2.752], [212.0, 2.830],
+[213.0, 2.909], [214.0, 2.990], [215.0, 3.072], [216.0, 3.156], [217.0, 3.241],
+[218.0, 3.328], [219.0, 3.416], [220.0, 3.506], [221.0, 3.598], [222.0, 3.690],
+[223.0, 3.785], [224.0, 3.881], [225.0, 3.978], [226.0, 4.077], [227.0, 4.178],
+[228.0, 4.280], [229.0, 4.384], [230.0, 4.490], [231.0, 4.597], [232.0, 4.705],
+[233.0, 4.816], [234.0, 4.928], [235.0, 5.041], [236.0, 5.156], [237.0, 5.273],
+[238.0, 5.392], [239.0, 5.512], [240.0, 5.634], [241.0, 5.757], [242.0, 5.882],
+[243.0, 6.009], [244.0, 6.138], [245.0, 6.268], [246.0, 6.400], [247.0, 6.534],
+[248.0, 6.669], [249.0, 6.806], [250.0, 6.945], [251.0, 7.085], [252.0, 7.227],
+[253.0, 7.371], [254.0, 7.517], [255.0, 7.664], [256.0, 7.813], [257.0, 7.964],
+[258.0, 8.117], [259.0, 8.271], [260.0, 8.427], [261.0, 8.585], [262.0, 8.745],
+[263.0, 8.906], [264.0, 9.069], [265.0, 9.234], [266.0, 9.400], [267.0, 9.569],
+[268.0, 9.739], [269.0, 9.911], [270.0, 10.084], [271.0, 10.260], [272.0,
+10.437], [273.0, 10.616], [274.0, 10.796], [275.0, 10.979], [276.0, 11.163],
+[277.0, 11.349], [278.0, 11.537], [279.0, 11.726], [280.0, 11.918], [281.0,
+12.111], [282.0, 12.306], [283.0, 12.502], [284.0, 12.701], [285.0, 12.901],
+[286.0, 13.103], [287.0, 13.306], [288.0, 13.512], [289.0, 13.719], [290.0,
+13.928], [291.0, 14.139], [292.0, 14.351], [293.0, 14.566], [294.0, 14.782],
+[295.0, 14.999], [296.0, 15.219], [297.0, 15.440], [298.0, 15.663], [299.0,
+15.888], [300.0, 16.115], [301.0, 16.343], [302.0, 16.573], [303.0, 16.805],
+[304.0, 17.039], [305.0, 17.274], [306.0, 17.511], [307.0, 17.750], [308.0,
+17.991], [309.0, 18.233], [310.0, 18.477], [311.0, 18.723], [312.0, 18.970],
+[313.0, 19.219], [314.0, 19.470], [315.0, 19.723], [316.0, 19.977], [317.0,
+20.233], [318.0, 20.491], [319.0, 20.751], [320.0, 21.012], [321.0, 21.275],
+[322.0, 21.539], [323.0, 21.806], [324.0, 22.074], [325.0, 22.343], [326.0,
+22.615], [327.0, 22.888], [328.0, 23.163], [329.0, 23.439], [330.0, 23.717],
+[331.0, 23.997], [332.0, 24.279], [333.0, 24.562], [334.0, 24.846], [335.0,
+25.133], [336.0, 25.421], [337.0, 25.711], [338.0, 26.002], [339.0, 26.295],
+[340.0, 26.590], [341.0, 26.886], [342.0, 27.184], [343.0, 27.484], [344.0,
+27.785], [345.0, 28.088], [346.0, 28.392], [347.0, 28.698], [348.0, 29.006],
+[349.0, 29.315], [350.0, 29.626], [351.0, 29.938], [352.0, 30.252], [353.0,
+30.568], [354.0, 30.885], [355.0, 31.204], [356.0, 31.524], [357.0, 31.846],
+[358.0, 32.170], [359.0, 32.495], [360.0, 32.822], [361.0, 33.150], [362.0,
+33.480], [363.0, 33.811], [364.0, 34.144], [365.0, 34.478], [366.0, 34.814],
+[367.0, 35.151], [368.0, 35.490], [369.0, 35.831]])
+
+
+# channel WV1
+
+temp2rad[6] = np.array([ [170.0, 0.021], [171.0, 0.023], [172.0, 0.024],
+[173.0, 0.026], [174.0, 0.028], [175.0, 0.030], [176.0, 0.033], [177.0, 0.035],
+[178.0, 0.038], [179.0, 0.040], [180.0, 0.043], [181.0, 0.046], [182.0, 0.049],
+[183.0, 0.053], [184.0, 0.056], [185.0, 0.060], [186.0, 0.064], [187.0, 0.068],
+[188.0, 0.073], [189.0, 0.077], [190.0, 0.082], [191.0, 0.087], [192.0, 0.093],
+[193.0, 0.099], [194.0, 0.105], [195.0, 0.111], [196.0, 0.118], [197.0, 0.125],
+[198.0, 0.132], [199.0, 0.139], [200.0, 0.147], [201.0, 0.156], [202.0, 0.164],
+[203.0, 0.174], [204.0, 0.183], [205.0, 0.193], [206.0, 0.204], [207.0, 0.214],
+[208.0, 0.226], [209.0, 0.238], [210.0, 0.250], [211.0, 0.263], [212.0, 0.276],
+[213.0, 0.290], [214.0, 0.305], [215.0, 0.320], [216.0, 0.335], [217.0, 0.352],
+[218.0, 0.369], [219.0, 0.386], [220.0, 0.405], [221.0, 0.423], [222.0, 0.443],
+[223.0, 0.464], [224.0, 0.485], [225.0, 0.507], [226.0, 0.529], [227.0, 0.553],
+[228.0, 0.577], [229.0, 0.602], [230.0, 0.628], [231.0, 0.655], [232.0, 0.683],
+[233.0, 0.712], [234.0, 0.741], [235.0, 0.772], [236.0, 0.804], [237.0, 0.837],
+[238.0, 0.870], [239.0, 0.905], [240.0, 0.941], [241.0, 0.978], [242.0, 1.016],
+[243.0, 1.056], [244.0, 1.096], [245.0, 1.138], [246.0, 1.181], [247.0, 1.225],
+[248.0, 1.271], [249.0, 1.317], [250.0, 1.366], [251.0, 1.415], [252.0, 1.466],
+[253.0, 1.518], [254.0, 1.572], [255.0, 1.627], [256.0, 1.684], [257.0, 1.742],
+[258.0, 1.802], [259.0, 1.864], [260.0, 1.927], [261.0, 1.991], [262.0, 2.058],
+[263.0, 2.126], [264.0, 2.195], [265.0, 2.267], [266.0, 2.340], [267.0, 2.415],
+[268.0, 2.492], [269.0, 2.570], [270.0, 2.651], [271.0, 2.733], [272.0, 2.818],
+[273.0, 2.904], [274.0, 2.993], [275.0, 3.083], [276.0, 3.175], [277.0, 3.270],
+[278.0, 3.367], [279.0, 3.465], [280.0, 3.566], [281.0, 3.670], [282.0, 3.775],
+[283.0, 3.883], [284.0, 3.993], [285.0, 4.105], [286.0, 4.220], [287.0, 4.337],
+[288.0, 4.456], [289.0, 4.578], [290.0, 4.703], [291.0, 4.829], [292.0, 4.959],
+[293.0, 5.091], [294.0, 5.226], [295.0, 5.363], [296.0, 5.503], [297.0, 5.645],
+[298.0, 5.790], [299.0, 5.938], [300.0, 6.089], [301.0, 6.243], [302.0, 6.399],
+[303.0, 6.559], [304.0, 6.721], [305.0, 6.886], [306.0, 7.054], [307.0, 7.225],
+[308.0, 7.399], [309.0, 7.576], [310.0, 7.756], [311.0, 7.940], [312.0, 8.126],
+[313.0, 8.316], [314.0, 8.509], [315.0, 8.705], [316.0, 8.904], [317.0, 9.107],
+[318.0, 9.312], [319.0, 9.522], [320.0, 9.734], [321.0, 9.950], [322.0,
+10.170], [323.0, 10.393], [324.0, 10.619], [325.0, 10.849], [326.0, 11.082],
+[327.0, 11.319], [328.0, 11.560], [329.0, 11.804], [330.0, 12.052], [331.0,
+12.304], [332.0, 12.559], [333.0, 12.818], [334.0, 13.081], [335.0, 13.347],
+[336.0, 13.618], [337.0, 13.892], [338.0, 14.170], [339.0, 14.452], [340.0,
+14.738], [341.0, 15.028], [342.0, 15.322], [343.0, 15.620], [344.0, 15.922],
+[345.0, 16.228], [346.0, 16.539], [347.0, 16.853], [348.0, 17.171], [349.0,
+17.494], [350.0, 17.821], [351.0, 18.152], [352.0, 18.487], [353.0, 18.827],
+[354.0, 19.171], [355.0, 19.519], [356.0, 19.871], [357.0, 20.228], [358.0,
+20.590], [359.0, 20.955], [360.0, 21.326], [361.0, 21.700], [362.0, 22.080],
+[363.0, 22.463], [364.0, 22.851], [365.0, 23.244], [366.0, 23.642], [367.0,
+24.044], [368.0, 24.450], [369.0, 24.862]])
+
+# channel WV2
+
+temp2rad[7] = np.array([ [170.0, 0.020], [171.0, 0.022], [172.0, 0.024],
+[173.0, 0.026], [174.0, 0.028], [175.0, 0.030], [176.0, 0.032], [177.0, 0.034],
+[178.0, 0.037], [179.0, 0.039], [180.0, 0.042], [181.0, 0.045], [182.0, 0.048],
+[183.0, 0.051], [184.0, 0.055], [185.0, 0.059], [186.0, 0.063], [187.0, 0.067],
+[188.0, 0.071], [189.0, 0.076], [190.0, 0.080], [191.0, 0.085], [192.0, 0.091],
+[193.0, 0.096], [194.0, 0.102], [195.0, 0.108], [196.0, 0.115], [197.0, 0.122],
+[198.0, 0.129], [199.0, 0.136], [200.0, 0.144], [201.0, 0.152], [202.0, 0.161],
+[203.0, 0.170], [204.0, 0.179], [205.0, 0.189], [206.0, 0.199], [207.0, 0.210],
+[208.0, 0.221], [209.0, 0.232], [210.0, 0.244], [211.0, 0.257], [212.0, 0.270],
+[213.0, 0.284], [214.0, 0.298], [215.0, 0.313], [216.0, 0.328], [217.0, 0.344],
+[218.0, 0.361], [219.0, 0.378], [220.0, 0.396], [221.0, 0.414], [222.0, 0.434],
+[223.0, 0.454], [224.0, 0.474], [225.0, 0.496], [226.0, 0.518], [227.0, 0.541],
+[228.0, 0.565], [229.0, 0.590], [230.0, 0.615], [231.0, 0.642], [232.0, 0.669],
+[233.0, 0.697], [234.0, 0.726], [235.0, 0.756], [236.0, 0.788], [237.0, 0.820],
+[238.0, 0.853], [239.0, 0.887], [240.0, 0.922], [241.0, 0.959], [242.0, 0.996],
+[243.0, 1.035], [244.0, 1.074], [245.0, 1.115], [246.0, 1.158], [247.0, 1.201],
+[248.0, 1.246], [249.0, 1.292], [250.0, 1.339], [251.0, 1.388], [252.0, 1.438],
+[253.0, 1.489], [254.0, 1.542], [255.0, 1.596], [256.0, 1.652], [257.0, 1.709],
+[258.0, 1.768], [259.0, 1.828], [260.0, 1.890], [261.0, 1.953], [262.0, 2.019],
+[263.0, 2.085], [264.0, 2.154], [265.0, 2.224], [266.0, 2.296], [267.0, 2.370],
+[268.0, 2.445], [269.0, 2.522], [270.0, 2.602], [271.0, 2.683], [272.0, 2.766],
+[273.0, 2.851], [274.0, 2.937], [275.0, 3.026], [276.0, 3.117], [277.0, 3.210],
+[278.0, 3.305], [279.0, 3.402], [280.0, 3.502], [281.0, 3.603], [282.0, 3.707],
+[283.0, 3.813], [284.0, 3.921], [285.0, 4.031], [286.0, 4.144], [287.0, 4.259],
+[288.0, 4.377], [289.0, 4.497], [290.0, 4.619], [291.0, 4.744], [292.0, 4.871],
+[293.0, 5.001], [294.0, 5.133], [295.0, 5.268], [296.0, 5.406], [297.0, 5.546],
+[298.0, 5.689], [299.0, 5.835], [300.0, 5.983], [301.0, 6.134], [302.0, 6.288],
+[303.0, 6.445], [304.0, 6.605], [305.0, 6.767], [306.0, 6.933], [307.0, 7.101],
+[308.0, 7.272], [309.0, 7.447], [310.0, 7.624], [311.0, 7.805], [312.0, 7.988],
+[313.0, 8.175], [314.0, 8.365], [315.0, 8.558], [316.0, 8.754], [317.0, 8.953],
+[318.0, 9.156], [319.0, 9.362], [320.0, 9.571], [321.0, 9.784], [322.0,
+10.000], [323.0, 10.219], [324.0, 10.442], [325.0, 10.669], [326.0, 10.899],
+[327.0, 11.132], [328.0, 11.369], [329.0, 11.610], [330.0, 11.854], [331.0,
+12.101], [332.0, 12.353], [333.0, 12.608], [334.0, 12.867], [335.0, 13.130],
+[336.0, 13.396], [337.0, 13.666], [338.0, 13.940], [339.0, 14.218], [340.0,
+14.500], [341.0, 14.786], [342.0, 15.075], [343.0, 15.369], [344.0, 15.666],
+[345.0, 15.968], [346.0, 16.274], [347.0, 16.583], [348.0, 16.897], [349.0,
+17.215], [350.0, 17.537], [351.0, 17.863], [352.0, 18.194], [353.0, 18.528],
+[354.0, 18.867], [355.0, 19.211], [356.0, 19.558], [357.0, 19.910], [358.0,
+20.266], [359.0, 20.626], [360.0, 20.991], [361.0, 21.361], [362.0, 21.734],
+[363.0, 22.113], [364.0, 22.495], [365.0, 22.883], [366.0, 23.274], [367.0,
+23.671], [368.0, 24.072], [369.0, 24.477]])
+
+class _Calibrator(object):
+ def __init__(self, hdr):
+ self.hdr = hdr
+
+ def __call__(self, image, calibrate=1):
+ """From http://www.eumetsat.int/Home/Main/DataProducts/Calibration/MFGCalibration/index.htm?l=en
+ """
+ # don't know how to calibrate
+ if calibrate == 0:
+ return (image,
+ "counts")
+
+ if(self.hdr["space"] is None or
+ self.hdr["calco"] is None):
+ raise CalibrationError("Not implemented")
+ radiances = (image - self.hdr["space"]) * self.hdr["calco"]
+ if calibrate == 2:
+ return (radiances,
+ "W m-2 sr-1")
+
+ # using piecewise linear interpolation between lookup table values.
+
+ return (np.interp(radiances.ravel(),
+ # known radiances
+ temp2rad[self.hdr["chan"]][:, 1],
+ # known bt's
+ temp2rad[self.hdr["chan"]][:, 0]).reshape(radiances.shape),
+ "K")
+
+def read_metadata(prologue, image_files):
+ """ Selected items from the Meteosat-7 prolog file.
+ """
+ im = _xrit.read_imagedata(image_files[0])
+ fp = StringIO(prologue.data)
+ asc_hdr = _read_ascii_header(fp)
+ bin_hdr = _read_binary_header(fp, asc_hdr['ProductType'])
+ md = Metadata()
+ md.calibrate = _Calibrator(bin_hdr)
+ md.product_name = prologue.product_id
+ pf = asc_hdr['Platform']
+ if pf == 'M7':
+ pf = 'MET7'
+ md.satname = pf.lower()
+ md.channel = prologue.product_name[:4]
+ md.product_type = asc_hdr['ProductType']
+ md.region_name = 'full disc'
+ md.sublon = bin_hdr['ssp']
+ md.first_pixel = asc_hdr['FirstPixelOri']
+ md.data_type = bin_hdr['dtype']*8
+ md.no_data_value = 0
+ md.image_size = (int(asc_hdr['NumberOfPixels']), int(asc_hdr['NumberOfLines']))
+ md.line_offset = int(asc_hdr['LineOffset'])
+ # handle 24 hour clock
+ d, t = strptime(asc_hdr['Date'], "%y%m%d"), int(asc_hdr['Time'])
+ md.time_stamp = d + timedelta(hours=t//100, minutes=t%100)
+ md.production_time = strptime(asc_hdr['ProdDate'] + asc_hdr['ProdTime'], "%y%m%d%H:%M:%S")
+ md.calibration_unit = 'counts'
+
+ # Calibration table
+ md.calibration_table = dict((('name', ''),
+ ('unit', ''),
+ ('table', None)))
+
+ segment_size = im.structure.nl
+ md.loff = im.navigation.loff + segment_size * (im.segment.seg_no - 1)
+ md.coff = im.navigation.coff
+
+ return md
+
+if __name__ == '__main__':
+ p = _xrit.read_prologue(sys.argv[1])
+ print read_metadata(p, sys.argv[2:])
diff --git a/mipp/xrit/SGS.py b/mipp/xrit/SGS.py
new file mode 100644
index 0000000..3c5fa16
--- /dev/null
+++ b/mipp/xrit/SGS.py
@@ -0,0 +1,124 @@
+#
+# $Id$
+#
+
+"""This module will read satellit data files in SGS (Support Ground Segments) format (eg. GOES, MTSAT).
+Format described in:
+'MSG Ground Segment LRIT/HRIT Mission Specific Implementation'; EUM/MSG/SPE/057; Issue 6; 21 June 2006
+"""
+
+import sys
+import numpy
+
+from mipp.xrit import _xrit
+from mipp.xrit import Metadata
+from mipp.xrit import bin_reader as rbin
+
+no_data_value = 0
+
+__all__ = ['read_metadata']
+
+def _read_sgs_common_header(fp):
+ hdr = dict()
+ hdr['CommonHeaderVersion'] = rbin.read_uint1(fp.read(1))
+ fp.read(3)
+ hdr['NominalSGSProductTime'] = rbin.read_cds_time(fp.read(6))
+ hdr['SGSProductQuality'] = rbin.read_uint1(fp.read(1))
+ hdr['SGSProductCompleteness'] = rbin.read_uint1(fp.read(1))
+ hdr['SGSProductTimeliness'] = rbin.read_uint1(fp.read(1))
+ hdr['SGSProcessingInstanceId'] = rbin.read_uint1(fp.read(1))
+ hdr['BaseAlgorithmVersion'] = fp.read(16).strip()
+ hdr['ProductAlgorithmVersion'] = fp.read(16).strip()
+ return hdr
+
+def _read_sgs_product_header(fp):
+ hdr = dict()
+ hdr['ImageProductHeaderVersion'] = rbin.read_uint1(fp.read(1))
+ fp.read(3)
+ hdr['ImageProductHeaderLength'] = rbin.read_uint4(fp.read(4))
+ hdr['ImageProductVersion'] = rbin.read_uint1(fp.read(1))
+ #hdr['ImageProductHeaderData'] = fp.read()
+ return hdr
+
+class _Calibrator(object):
+ def __init__(self, hdr):
+ self.hdr = hdr
+
+ dd = []
+ for k in sorted(hdr.keys()):
+ if isinstance(k, int):
+ v = hdr[k]
+ dd.append([float(k), v])
+ self.calibration_table = numpy.array(dd, dtype=numpy.float32)
+
+ def __call__(self, image, calibrate=1):
+ cal = self.calibration_table
+
+ if type(cal) != numpy.ndarray:
+ cal = numpy.array(cal)
+
+ if cal.shape == (256, 2):
+ cal = cal[:,1] # nasty !!!
+ cal[int(no_data_value)] = no_data_value
+ image = cal[image] # this does not work on masked arrays !!!
+ elif cal.shape == (2, 2):
+ scale = (cal[1][1] - cal[0][1])/(cal[1][0] - cal[0][0])
+ offset = cal[0][1] - cal[0][0]*scale
+ image = numpy.select([image == no_data_value*scale], [no_data_value], default=offset + image*scale)
+ else:
+ raise mipp.DecodeError("Could not recognize the shape %s of the calibration table"%str(cal.shape))
+
+ return (image,
+ self.hdr['_UNIT'])
+
+def read_metadata(prologue, image_files):
+ """ Selected items from the GOES image data files (not much information in prologue).
+ """
+ im = _xrit.read_imagedata(image_files[0])
+ hdr = im.data_function.data_definition
+ md = Metadata()
+ md.calibrate = _Calibrator(hdr)
+ md.satname = im.platform.lower()
+ md.product_type = 'full disc'
+ md.region_name = 'full disc'
+ md.product_name = prologue.product_id
+ md.channel = prologue.product_name[:4]
+ ssp = float(im.product_name[5:-1].replace('_','.'))
+ if im.product_name[-1].lower() == 'w':
+ ssp *= -1
+ md.sublon = ssp
+ md.first_pixel = 'north west'
+ md.data_type = im.structure.nb
+ nseg = im.segment.planned_end_seg_no - im.segment.planned_start_seg_no + 1
+ md.image_size = (im.structure.nc, im.structure.nl*nseg) # !!!
+ md.line_offset = 0
+ md.time_stamp = im.time_stamp
+ md.production_time = im.production_time
+ md.calibration_unit = 'counts'
+
+ # Calibration table
+ dd = []
+ for k in sorted(hdr.keys()):
+ if isinstance(k, int):
+ v = hdr[k]
+ dd.append([float(k), v])
+
+ md.calibration_table = dict((('name', im.data_function.data_definition['_NAME']),
+ ('unit', im.data_function.data_definition['_UNIT']),
+ ('table', numpy.array(dd, dtype=numpy.float32))))
+
+ md.no_data_value = no_data_value
+
+ segment_size = im.structure.nl
+ md.loff = im.navigation.loff + segment_size * (im.segment.seg_no - 1)
+ md.coff = im.navigation.coff
+
+ return md
+
+def read_prologue_headers(fp):
+ hdr = _read_sgs_common_header(fp)
+ hdr.update(_read_sgs_product_header(fp))
+ return hdr
+
+if __name__ == '__main__':
+ print read_metadata(_xrit.read_prologue(sys.argv[1]), sys.argv[2:])
diff --git a/mipp/xrit/__init__.py b/mipp/xrit/__init__.py
new file mode 100644
index 0000000..a312dd7
--- /dev/null
+++ b/mipp/xrit/__init__.py
@@ -0,0 +1,12 @@
+#
+from mipp.xrit import sat
+from mipp.xrit.mda import Metadata
+
+# low level XRIT data readers.
+from mipp.xrit._xrit import (read_prologue,
+ read_epilogue,
+ read_imagedata,
+ read_gts_message,
+ read_mpef_clm,
+ decompress,
+ list)
diff --git a/mipp/xrit/_xrit.py b/mipp/xrit/_xrit.py
new file mode 100644
index 0000000..2271da2
--- /dev/null
+++ b/mipp/xrit/_xrit.py
@@ -0,0 +1,383 @@
+#
+# $Id$
+#
+
+"""This module will read LRIT/HRIT headers. Format described in:
+"LRIT/HRIT Global Specification"; CGMS 03; Issue 2.6; 12 August 1999
+"MSG Ground Segment LRIT/HRIT Mission Specific Implementation"; EUM/MSG/SPE/057; Issue 6; 21 June 2006
+"""
+
+import sys
+import os
+from StringIO import StringIO
+
+import mipp
+from mipp.xrit import bin_reader as rbin
+
+__all__ = ['read_prologue',
+ 'read_epilogue',
+ 'read_imagedata',
+ 'read_gts_message',
+ 'read_mpef_clm',
+ 'decompress',
+ 'list']
+
+def decompress(infile, outdir='.'):
+ """Will decompress a XRIT data file and return the path to the decompressed file.
+ It expect to find Eumetsat's xRITDecompress through the environment variable XRIT_DECOMPRESS_PATH
+ """
+ from subprocess import Popen, PIPE
+ cmd = os.environ.get('XRIT_DECOMPRESS_PATH', None)
+ if not cmd:
+ raise mipp.DecodeError("XRIT_DECOMPRESS_PATH is not defined (path to xRITDecompress")
+
+ cwd = os.getcwd()
+ os.chdir(outdir)
+ p = Popen([cmd, infile], stdout=PIPE)
+ stdout = StringIO(p.communicate()[0])
+ status = p.returncode
+ os.chdir(cwd)
+
+ outfile = ''
+ for line in stdout:
+ try:
+ k, v = [x.strip() for x in line.split(':', 1)]
+ except ValueError:
+ break
+ if k == 'Decompressed file':
+ outfile = v
+ break
+
+ if status != 0:
+ raise mipp.DecodeError("xrit_decompress '%s', failed, status=%d"%(infile, status))
+ if not outfile:
+ raise mipp.DecodeError("xrit_decompress '%s', failed, no output file is generated"%infile)
+ return outdir + '/' + outfile
+
+#-----------------------------------------------------------------------------
+#
+# XRIT header records
+#
+#-----------------------------------------------------------------------------
+class PrimaryHeader(object):
+ hdr_type = 0
+ hdr_name = 'primary_header'
+ def __init__(self, fp):
+ self.rec_len = rbin.read_uint2(fp.read(2))
+ self.file_type = rbin.read_uint1(fp.read(1))
+ self.total_hdr_len = rbin.read_uint4(fp.read(4))
+ self.data_field_len = rbin.read_uint8(fp.read(8))
+
+ def __str__(self):
+ return "hdr_type:%d, rec_len:%d, file_type:%d, total_hdr_len:%d, data_field_len:%d"%\
+ (self.hdr_type, self.rec_len, self.file_type, self.total_hdr_len, self.data_field_len)
+
+class ImageStructure(object):
+ hdr_type = 1
+ hdr_name = 'structure'
+ def __init__(self, fp):
+ self.rec_len = rbin.read_uint2(fp.read(2))
+ self.nb = rbin.read_uint1(fp.read(1))
+ self.nc = rbin.read_uint2(fp.read(2))
+ self.nl = rbin.read_uint2(fp.read(2))
+ self.compress_flag = rbin.read_uint1(fp.read(1))
+
+ def __str__(self):
+ return "hdr_type:%d, rec_len:%d, nb:%d, nc:%d, nl:%d, compress_flag:%d"%\
+ (self.hdr_type, self.rec_len, self.nb, self.nc, self.nl, self.compress_flag)
+
+class ImageNavigation(object):
+ hdr_type = 2
+ hdr_name = 'navigation'
+ def __init__(self, fp):
+ self.rec_len = rbin.read_uint2(fp.read(2))
+ self.proj_name = fp.read(32).strip()
+ self.cfac = rbin.read_int4(fp.read(4))
+ self.lfac = rbin.read_int4(fp.read(4))
+ self.coff = rbin.read_int4(fp.read(4))
+ self.loff = rbin.read_int4(fp.read(4))
+ i1 = self.proj_name.find('(')
+ i2 = self.proj_name.find(')')
+ if i1 != -1 and i2 != -1:
+ self.ssp = float(self.proj_name[i1+1:i2])
+ else:
+ self.ssp = None
+
+ def __str__(self):
+ return "hdr_type:%d, rec_len:%d, proj_name:'%s', cfac:%d, lfac:%d. coff:%d, loff:%d"%\
+ (self.hdr_type, self.rec_len, self.proj_name, self.cfac, self.lfac, self.coff, self.loff)
+
+class ImageDataFunction(object):
+ hdr_type = 3
+ hdr_name = 'data_function'
+ def __init__(self, fp):
+ self.rec_len = rbin.read_uint2(fp.read(2))
+ self.data_definition = _decode_data_definition(fp.read(self.rec_len-3))
+
+ def __str__(self):
+ return "hdr_type:%d, rec_len:%d, data_definition:'%s'"%\
+ (self.hdr_type, self.rec_len, self.data_definition)
+
+class AnnotationHeader(object):
+ hdr_type = 4
+ hdr_name = 'annotation'
+ def __init__(self, fp):
+ self.rec_len = rbin.read_uint2(fp.read(2))
+ self.text = fp.read(self.rec_len-3).strip()
+ a = [x.strip('_') for x in self.text.split('-')]
+ self.xrit_channel_id = a[0]
+ self.dissemination_id = int(a[1])
+ self.dissemination_sc = a[2]
+ self.platform = a[3]
+ self.product_name = a[4]
+ self.segment_name = a[5]
+ self.time_stamp = mipp.strptime(a[6], "%Y%m%d%H%M")
+ self.flags = a[7]
+ self.segment_id = a[3] + '_' + a[4] + '_' + a[5] + '_' + self.time_stamp.strftime("%Y%m%d_%H%M")
+ self.product_id = a[3] + '_' + a[4] + '_' + self.time_stamp.strftime("%Y%m%d_%H%M")
+
+ def __str__(self):
+ return "hdr_type:%d, rec_len:%d, text:%s"%\
+ (self.hdr_type, self.rec_len, self.text)
+
+class TimeStampRecord(object):
+ hdr_type = 5
+ hdr_name = 'time_stamp'
+ def __init__(self, fp):
+ self.rec_len = rbin.read_uint2(fp.read(2))
+ self.cds_p_field = rbin.read_uint1(fp.read(1))
+ self.time_stamp = rbin.read_cds_time(fp.read(6))
+
+ def __str__(self):
+ return "hdr_type:%d, rec_len:%d, time_stamp:%s"%\
+ (self.hdr_type, self.rec_len, str(self.time_stamp))
+
+class SegmentIdentification(object):
+ hdr_type = 128
+ hdr_name = 'segment'
+ def __init__(self, fp):
+ self.rec_len = rbin.read_uint2(fp.read(2))
+ self.gp_sc_id = rbin.read_uint2(fp.read(2))
+ self.spectral_channel_id = rbin.read_uint1(fp.read(1))
+ self.seg_no = rbin.read_uint2(fp.read(2))
+ self.planned_start_seg_no = rbin.read_uint2(fp.read(2))
+ self.planned_end_seg_no = rbin.read_uint2(fp.read(2))
+ self.data_field_repr = rbin.read_uint1(fp.read(1))
+
+ def __str__(self):
+ return "hdr_type:%d, rec_len:%d gp_sc_id:%d, spectral_channel_id:%d, seg_no:%d, planned_start_seg_no:%d, planned_end_seg_no:%d, data_field_repr:%d"%\
+ (self.hdr_type, self.rec_len, self.gp_sc_id, self.spectral_channel_id,\
+ self.seg_no, self.planned_start_seg_no, self.planned_end_seg_no, self.data_field_repr)
+
+class ImageSegmentLineQuality(object):
+ hdr_type = 129
+ hdr_name = 'image_quality'
+
+ def __init__(self, fp):
+ self.rec_len = rbin.read_uint2(fp.read(2))
+ a = []
+ nb = 3
+ while nb < (self.rec_len):
+ ln = rbin.read_int4(fp.read(4))
+ stamp = rbin.read_cds_time(fp.read(6))
+ lv = rbin.read_uint1(fp.read(1))
+ lr = rbin.read_uint1(fp.read(1))
+ lg = rbin.read_uint1(fp.read(1))
+ a.append((ln, stamp, lv, lr, lg))
+ #print ln, lv, lr, lg, stamp
+ nb += 13
+ self.line_quality = a
+
+ def __str__(self):
+ return "hdr_type:%d, rec_len:%d"%\
+ (self.hdr_type, self.rec_len)
+
+class UnknownHeader(object):
+ hdr_name = 'unknown'
+ def __init__(self, hdr_type, fp):
+ self.hdr_type = hdr_type
+ self.rec_len = rbin.read_uint2(fp.read(2))
+ self.data = fp.read(self.rec_len-3)
+ def __str__(self):
+ return "hdr_type:%d, rec_len:%d"%\
+ (self.hdr_type, self.rec_len)
+
+def _decode_data_definition(buf):
+ dd = dict()
+ lines = [x.strip() for x in buf.strip().split('\r')]
+ for a in lines:
+ k, v = [x.strip() for x in a.split(':=')]
+ if k[0] == '$':
+ dd[k] = int(v)
+ elif k[0] == '_':
+ dd[k] = v
+ elif k.isdigit():
+ dd[int(k)] = float(v)
+ else:
+ raise mipp.DecodeError("could not decode data definition: '%s'"%a)
+ return dd
+
+header_map = {0: PrimaryHeader,
+ 1: ImageStructure,
+ 2: ImageNavigation,
+ 3: ImageDataFunction,
+ 4: AnnotationHeader,
+ 5: TimeStampRecord,
+ 128: SegmentIdentification,
+ 129: ImageSegmentLineQuality}
+header_types = tuple(sorted(header_map.keys()))
+
+def read_header(fp):
+ hdr_type = rbin.read_uint1(fp.read(1))
+ if hdr_type != 0:
+ raise mipp.DecodeError("first header has to be a Primary Header, this one is of type %d"%hdr_type)
+ phdr = PrimaryHeader(fp)
+ yield phdr
+ current_size = phdr.rec_len
+ while current_size < phdr.total_hdr_len:
+ hdr_type = rbin.read_uint1(fp.read(1))
+ cls = header_map.get(hdr_type, None)
+ if cls:
+ hdr = cls(fp)
+ else:
+ hdr = UnknownHeader(hdr_type, fp)
+ yield hdr
+ current_size += hdr.rec_len
+
+def read_headers(fp):
+ return [h for h in read_header(fp)]
+
+#-----------------------------------------------------------------------------
+#
+# File level
+#
+#-----------------------------------------------------------------------------
+class Segment(object):
+ def __init__(self, file_name):
+ self.file_name = file_name
+ fp = open(file_name)
+ for h in read_header(fp):
+ if h.hdr_type == 0:
+ self.file_type = h.file_type
+ elif h.hdr_type == 4:
+ self.platform = h.platform
+ self.product_name = h.product_name
+ self.segment_name = h.segment_name
+ self.time_stamp = h.time_stamp
+ self.product_id = h.product_id
+ self.segment_id = h.segment_id
+ elif h.hdr_type == 5:
+ self.production_time = h.time_stamp
+ elif h.hdr_type in header_types:
+ setattr(self, h.hdr_name, h)
+ fp.close()
+ try:
+ self.is_compressed = bool(self.structure.compress_flag)
+ except AttributeError:
+ self.is_compressed = False
+ # lazy reading of data
+ self._blob = None
+
+ @property
+ def data(self):
+ if not self._blob:
+ fp = open(self.file_name)
+ read_headers(fp)
+ self._blob = fp.read()
+ fp.close
+ return self._blob
+
+ def pprint(self):
+ keys = self.__dict__.keys()
+ keys.sort()
+ for k in keys:
+ if not k.startswith('_'):
+ print k + ':', self.__dict__[k]
+
+ def __str__(self):
+ return self.segment_id
+
+class ImageSegment(Segment):
+
+ def __init__(self, file_name):
+ Segment.__init__(self, file_name)
+ self.bytes_per_line = (self.structure.nc*self.structure.nb)/8
+ self.fp = None
+
+ def readline(self, nlines=1):
+ if not self.fp:
+ self.fp = open(self.file_name)
+ read_headers(self.fp)
+ data = self.fp.read(self.bytes_per_line*nlines)
+ if not data:
+ raise mipp.DecodeError("could not read", self.bytes_per_line*nlines, "bytes")
+ return data
+
+ def close(self):
+ if self.fp:
+ self.fp.close()
+ self.fp = None
+
+def read_prologue(file_name):
+ s = Segment(file_name)
+ if s.file_type == 128:
+ return s
+ else:
+ raise mipp.DecodeError("this is no 'prologue' file: '%s'"%file_name)
+
+def read_epilogue(file_name):
+ s = Segment(file_name)
+ if s.file_type == 129:
+ return s
+ else:
+ raise mipp.DecodeError("this is no 'epilogue' file: '%s'"%file_name)
+
+def read_imagedata(file_name):
+ s = Segment(file_name)
+ if s.file_type == 0:
+ return ImageSegment(file_name)
+ else:
+ raise mipp.DecodeError("this is no 'image data' file: '%s'"%file_name)
+
+
+def read_gts_message(file_name):
+ s = Segment(file_name)
+ if s.file_type == 1:
+ return s
+ else:
+ raise mipp.DecodeError("this is no 'GTS Message' file: '%s'"%file_name)
+
+def read_mpef_clm(file_name):
+ s = Segment(file_name)
+ if s.file_type == 144:
+ return s
+ else:
+ raise mipp.DecodeError("this is no 'MPEF cloud mask' file: '%s'"%file_name)
+
+def list(file_name, dump_data=False):
+ fname = 'xrit.dat'
+ fp = open(file_name)
+ for hdr in read_header(fp):
+ print hdr
+ if hdr.hdr_name == 'annotation':
+ fname = hdr.segment_id
+ data = fp.read()
+ fp.close()
+ if dump_data:
+ print 'Writing', fname
+ fp = open(fname, 'wb')
+ fp.write(data)
+ fp.close()
+
+#-----------------------------------------------------------------------------
+if __name__ == '__main__':
+ args = sys.argv[1:]
+ if len(args) > 1:
+ if args[0] == '-d':
+ dump_data = True
+ filename = args[1]
+ else:
+ dump_data = False
+ filename = args[0]
+
+ list(filename, dump_data)
diff --git a/mipp/xrit/bin_reader.py b/mipp/xrit/bin_reader.py
new file mode 100644
index 0000000..c5ee2c0
--- /dev/null
+++ b/mipp/xrit/bin_reader.py
@@ -0,0 +1,54 @@
+#
+# $Id$
+#
+# Unpack binary data, all in network (big-endian) byte order.
+#
+import struct
+from datetime import datetime, timedelta
+
+def read_uint1(buf):
+ return struct.unpack("!B", buf)[0]
+
+def read_uint2(buf):
+ return struct.unpack("!H", buf)[0]
+
+def read_uint4(buf):
+ return struct.unpack("!I", buf)[0]
+
+def read_uint8(buf):
+ v = struct.unpack("!2I", buf)
+ return v[0]*pow(2L, 32) + v[1]
+
+def read_int2(buf):
+ return struct.unpack("!h", buf)[0]
+
+def read_int4(buf):
+ return struct.unpack("!i", buf)[0]
+
+def read_float4(buf):
+ return struct.unpack("!f", buf)[0]
+
+def read_float8(buf):
+ return struct.unpack("!d", buf)[0]
+
+
+def read_cds_time(buf):
+ days = read_uint2(buf[:2])
+ msecs = read_uint4(buf[2:6])
+ return datetime(1958, 1, 1) + timedelta(days=days, milliseconds=msecs)
+
+def read_cds_expanded_time(buf):
+ days = read_uint2(buf[:2])
+ msecs = read_uint4(buf[2:6])
+ usecs = read_uint2(buf[6:8])
+ nsecs = read_uint2(buf[8:10])
+ return datetime(1958, 1, 1) + timedelta(days=days, milliseconds=msecs, microseconds=(usecs+nsecs/1000.))
+
+def read_cuc_time(buf, coarce, fine):
+ ctime = 0
+ ftime = 0
+ for i in range(coarce - 1, -1, -1):
+ ctime += ord(buf[coarce - i - 1]) * 2 ** (i * 8)
+ for i in range(fine):
+ ftime += ord(buf[coarce + i]) * 2 ** ((i + 1) * -8)
+ return datetime(1958, 1, 1) + timedelta(seconds=ctime + ftime)
diff --git a/mipp/xrit/convert.py b/mipp/xrit/convert.py
new file mode 100644
index 0000000..f5a3414
--- /dev/null
+++ b/mipp/xrit/convert.py
@@ -0,0 +1,72 @@
+from StringIO import StringIO
+import numpy as np
+
+def dec10216(in_buffer):
+ return _dec10216(in_buffer)
+
+def hrpt_dec10216(in_buffer):
+ #
+ # It will handle the input data as raw HRPT data (level 0), which
+ # originally is saved as packed 10 bit words width a record length
+ # of (11090 - 2) (the last two words are ignored).
+ # The AAPP software expect 10 bit right adjusted in 16 bit words
+ # and a complete record length of 11090 words.
+ #
+ # !!! THIS ONE COULD BE FASTER !!!
+ #
+ HRPT_RECLEN = 11090
+ fp = StringIO(dec10216(in_buffer))
+ data = ''
+ blob_size = (HRPT_RECLEN - 2)*2
+ blob = fp.read(blob_size)
+ while blob:
+ if len(blob) == blob_size:
+ blob += '\0\0\0\0'
+ data += blob
+ blob = fp.read(blob_size)
+ return data
+
+def _dec10216(inbuf):
+ inbuf = np.fromstring(inbuf, dtype=np.uint8)
+ arr10 = inbuf.astype(np.uint16)
+ arr16 = np.zeros((len(arr10) / 5 * 4,), dtype=np.uint16)
+ arr10_len = (len(arr16) * 5) / 4
+ arr10 = arr10[:arr10_len] # adjust size
+ """
+ /*
+ * pack 4 10-bit words in 5 bytes into 4 16-bit words
+ *
+ * 0 1 2 3 4 5
+ * 01234567890123456789012345678901234567890
+ * 0 1 2 3 4
+ */
+ ip = &in_buffer[i];
+ op = &out_buffer[j];
+ op[0] = ip[0]*4 + ip[1]/64;
+ op[1] = (ip[1] & 0x3F)*16 + ip[2]/16;
+ op[2] = (ip[2] & 0x0F)*64 + ip[3]/4;
+ op[3] = (ip[3] & 0x03)*256 +ip[4];
+ """
+ arr16.flat[::4] = np.left_shift(arr10[::5], 2) + \
+ np.right_shift((arr10[1::5]), 6)
+ arr16.flat[1::4] = np.left_shift((arr10[1::5] & 63), 4) + \
+ np.right_shift((arr10[2::5]), 4)
+ arr16.flat[2::4] = np.left_shift(arr10[2::5] & 15, 6) + \
+ np.right_shift((arr10[3::5]), 2)
+ arr16.flat[3::4] = np.left_shift(arr10[3::5] & 3, 8) + \
+ arr10[4::5]
+ return arr16.tostring()
+
+if __name__ == '__main__':
+ BLOB_SIZE = 10240 # has to be a multiply of 5
+ import sys
+ try:
+ if sys.argv[1] == 'hrpt':
+ decoder = hrpt_dec10216
+ except IndexError:
+ decoder = dec10216
+ blob = sys.stdin.read(BLOB_SIZE)
+ while blob:
+ sys.stdout.write(decoder(blob))
+ blob = sys.stdin.read(BLOB_SIZE)
+
diff --git a/mipp/xrit/loader.py b/mipp/xrit/loader.py
new file mode 100644
index 0000000..cb9e74d
--- /dev/null
+++ b/mipp/xrit/loader.py
@@ -0,0 +1,452 @@
+#
+# $id$
+#
+# Inspired by NWCLIB
+#
+import numpy
+import types
+import copy
+
+import logging
+logger = logging.getLogger('mipp')
+
+import mipp
+from mipp.xrit import _xrit, convert
+
+__all__ = ['ImageLoader']
+
+def _null_converter(blob):
+ return blob
+
+class ImageLoader(object):
+
+ def __init__(self, mda, image_files, mask=False, calibrate=False):
+ self.mda = mda
+ self.image_files = image_files
+ self.do_mask = mask
+ self.do_calibrate = calibrate
+ # full disc and square
+ self._allrows = slice(0, self.mda.image_size[0]) # !!!
+ self._allcolumns = slice(0, self.mda.image_size[0])
+
+ def raw_slicing(self, item):
+ """Raw slicing, no rotation of image.
+ """
+ # All data reading should end up here.
+
+ # Don't mess with callers metadata.
+ mda = copy.copy(self.mda)
+ rows, columns = self._handle_item(item)
+
+ ns_, ew_ = mda.first_pixel.split()
+
+ if not hasattr(mda, "boundaries"):
+ image = self._read(rows, columns, mda)
+
+ else:
+ #
+ # Here we handle the case of partly defined channels.
+ # (for example MSG's HRV channel)
+ #
+ image = None
+
+ offset = 0
+ offset_position = 0
+
+ for region in (mda.boundaries - 1):
+ offset += region[0] - offset_position
+ offset_position = region[1] + 1
+
+ rlines = slice(region[0], region[1] + 1)
+ rcols = slice(region[2], region[3] + 1)
+
+ # check is we are outside the region
+ if (rows.start > rlines.stop or
+ rows.stop < rlines.start or
+ columns.start > rcols.stop or
+ columns.stop < rcols.start):
+ continue
+
+ lines = slice(max(rows.start, rlines.start) - offset,
+ min(rows.stop, rlines.stop) - offset)
+ cols = slice(max(columns.start, rcols.start) - rcols.start,
+ min(columns.stop, rcols.stop) - rcols.start)
+ rdata = self._read(lines, cols, mda)
+ lines = slice(max(rows.start, rlines.start) - rows.start,
+ min(rows.stop, rlines.stop) - rows.start)
+ cols = slice(max(columns.start, rcols.start) - columns.start,
+ min(columns.stop, rcols.stop) - columns.start)
+ if image is None:
+ image = (numpy.zeros((rows.stop - rows.start,
+ columns.stop - columns.start),
+ dtype=rdata.dtype)
+ + mda.no_data_value)
+ if self.do_mask:
+ image = numpy.ma.masked_all_like(image)
+
+ if ns_ == "south":
+ lines = slice(image.shape[0] - lines.stop,
+ image.shape[0] - lines.start)
+ if ew_ == "east":
+ cols = slice(image.shape[1] - cols.stop,
+ image.shape[1] - cols.start)
+ if self.do_mask:
+ image.mask[lines, cols] = rdata.mask
+ image[lines, cols] = rdata
+
+ if not hasattr(image, 'shape'):
+ logger.warning("Produced no image")
+ return None, None
+
+ #
+ # Update meta-data
+ #
+ mda.area_extent = numpy.array(self._slice2extent(rows, columns, rotated=True), dtype=numpy.float64)
+
+ if (rows != self._allrows) or (columns != self._allcolumns):
+ mda.region_name = 'sliced'
+
+ mda.data_type = 8*image.itemsize
+ mda.image_size = numpy.array([image.shape[1], image.shape[0]])
+
+ return mipp.mda.mslice(mda), image
+
+ def __getitem__(self, item):
+ """Deafult slicing, handles rotated images.
+ """
+ rows, columns = self._handle_item(item)
+ ns_, ew_ = self.mda.first_pixel.split()
+ if ns_ == 'south':
+ rows = slice(self.mda.image_size[1] - rows.stop,
+ self.mda.image_size[1] - rows.start)
+ if ew_ == 'east':
+ columns = slice(self.mda.image_size[0] - columns.stop,
+ self.mda.image_size[0] - columns.start)
+ return self.raw_slicing((rows, columns))
+
+ def __call__(self, area_extent=None):
+ """Slice according to (ll_x, ll_y, ur_x, ur_y) or read full disc.
+ """
+ if area_extent == None:
+ # full disc
+ return self[:]
+
+ # slice
+ area_extent = tuple(area_extent)
+ if len(area_extent) != 4:
+ raise TypeError, "optional argument must be an area_extent"
+
+ ns_, ew_ = self.mda.first_pixel.split()
+
+ if ns_ == "south":
+ loff = self.mda.image_size[0] - self.mda.loff - 1
+ else:
+ loff = self.mda.loff - 1
+
+ if ew_ == "east":
+ coff = self.mda.image_size[1] - self.mda.coff - 1
+ else:
+ coff = self.mda.coff - 1
+
+
+ row_size = self.mda.pixel_size[0]
+ col_size = self.mda.pixel_size[1]
+
+ logger.debug('area_extent: %.2f, %.2f, %.2f, %.2f'%tuple(area_extent))
+ logger.debug('area_extent: resolution %.2f, %.2f'%(row_size, col_size))
+ logger.debug('area_extent: loff, coff %d, %d'%(loff, coff))
+ logger.debug('area_extent: expected size %d, %d'%\
+ (int(numpy.round((area_extent[2] - area_extent[0])/col_size)),\
+ int(numpy.round((area_extent[3] - area_extent[1])/row_size))))
+
+ col_start = int(numpy.round(area_extent[0] / col_size + coff + 0.5))
+ row_stop = int(numpy.round(area_extent[1] / -row_size + loff - 0.5))
+ col_stop = int(numpy.round(area_extent[2] / col_size + coff - 0.5))
+ row_start = int(numpy.round(area_extent[3] / -row_size + loff + 0.5))
+
+ row_stop += 1
+ col_stop += 1
+
+ logger.debug('area_extent: computed size %d, %d'%(col_stop - col_start, row_stop - row_start))
+
+ return self[row_start:row_stop, col_start:col_stop]
+
+ def _handle_item(self, item):
+ """Transform item into slice(s).
+ """
+ if isinstance(item, slice):
+ # specify rows and all columns
+ rows, columns = item, self._allcolumns
+ elif isinstance(item, int):
+ # specify one row and all columns
+ rows, columns = slice(item, item + 1), self._allcolumns
+ elif isinstance(item, tuple):
+ if len(item) == 2:
+ # both row and column are specified
+ rows, columns = item
+ if isinstance(rows, int):
+ rows = slice(item[0], item[0] + 1)
+ if isinstance(columns, int):
+ columns = slice(item[1], item[1] + 1)
+ else:
+ raise IndexError, "can only handle two indexes, not %d"%len(item)
+ elif item == None:
+ # full disc
+ rows, columns = self._allrows, self._allcolumns
+ else:
+ raise IndexError, "don't understand the indexes"
+
+ # take care of [:]
+ if rows.start == None:
+ rows = self._allrows
+ if columns.start == None:
+ columns = self._allcolumns
+
+ if (rows.step != 1 and rows.step != None) or \
+ (columns.step != 1 and columns.step != None):
+ raise IndexError, "Currently we don't support steps different from one"
+
+ return rows, columns
+
+ def _slice2extent(self, rows, columns, rotated=True):
+ """ Calculate area extent.
+ If rotated=True then rows and columns are reflecting the actual rows and columns.
+ """
+ ns_, ew_ = self.mda.first_pixel.split()
+
+ loff = self.mda.loff
+ coff = self.mda.coff
+ if ns_ == "south":
+ loff = self.mda.image_size[0] - loff - 1
+ if rotated:
+ rows = slice(self.mda.image_size[1] - rows.stop,
+ self.mda.image_size[1] - rows.start)
+ else:
+ loff -= 1
+ if ew_ == "east":
+ coff = self.mda.image_size[1] - coff - 1
+ if rotated:
+ columns = slice(self.mda.image_size[0] - columns.stop,
+ self.mda.image_size[0] - columns.start)
+ else:
+ coff -= 1
+
+ logger.debug('slice2extent: size %d, %d'% \
+ (columns.stop - columns.start, rows.stop - rows.start))
+ rows = slice(rows.start, rows.stop - 1)
+ columns = slice(columns.start, columns.stop - 1)
+
+ row_size = self.mda.pixel_size[0]
+ col_size = self.mda.pixel_size[1]
+
+ ll_x = (columns.start - coff - 0.5)*col_size
+ ll_y = -(rows.stop - loff + 0.5)*row_size
+ ur_x = (columns.stop - coff + 0.5)*col_size
+ ur_y = -(rows.start - loff - 0.5)*row_size
+
+ logger.debug('slice2extent: computed extent %.2f, %.2f, %.2f, %.2f'% \
+ (ll_x, ll_y, ur_x, ur_y))
+ logger.debug('slice2extent: computed size %d, %d'% \
+ (int(numpy.round((ur_x - ll_x)/col_size)), \
+ int(numpy.round((ur_y - ll_y)/row_size))))
+
+ return [ll_x, ll_y, ur_x, ur_y]
+
+ def _read(self, rows, columns, mda):
+ shape = (rows.stop - rows.start, columns.stop - columns.start)
+ if (columns.start < 0 or
+ columns.stop > mda.image_size[0] or
+ rows.start < 0 or
+ rows.stop > mda.image_size[1]):
+ raise IndexError, "index out of range"
+
+ image_files = self.image_files
+
+ #
+ # Order segments
+ #
+ segments = {}
+ for f in image_files:
+ s = _xrit.read_imagedata(f)
+ segments[s.segment.seg_no] = f
+ start_seg_no = s.segment.planned_start_seg_no
+ end_seg_no = s.segment.planned_end_seg_no
+ ncols = s.structure.nc
+ segment_nlines = s.structure.nl
+
+ #
+ # Data type
+ #
+ converter = _null_converter
+ if mda.data_type == 8:
+ data_type = numpy.uint8
+ data_type_len = 8
+ elif mda.data_type == 10:
+ converter = convert.dec10216
+ data_type = numpy.uint16
+ data_type_len = 16
+ elif mda.data_type == 16:
+ data_type = numpy.uint16
+ data_type_len = 16
+ else:
+ raise mipp.ReaderError, "unknown data type: %d bit per pixel"\
+ %mda.data_type
+
+ #
+ # Calculate initial and final line and column.
+ # The interface 'load(..., center, size)' will produce
+ # correct values relative to the image orientation.
+ # line_init, line_end : 1-based
+ #
+ line_init = rows.start + 1
+ line_end = line_init + rows.stop - rows.start - 1
+ col_count = shape[1]
+ col_offset = (columns.start)*data_type_len//8
+
+ #
+ # Calculate initial and final segments
+ # depending on the image orientation.
+ # seg_init, seg_end : 1-based.
+ #
+ seg_init = ((line_init-1)//segment_nlines) + 1
+ seg_end = ((line_end-1)//segment_nlines) + 1
+
+ #
+ # Calculate initial line in image, line increment
+ # offset for columns and factor for columns,
+ # and factor for columns, depending on the image
+ # orientation
+ #
+ if mda.first_pixel == 'north west':
+ first_line = 0
+ increment_line = 1
+ factor_col = 1
+ elif mda.first_pixel == 'north east':
+ first_line = 0
+ increment_line = 1
+ factor_col = -1
+ elif mda.first_pixel == 'south west':
+ first_line = shape[0] - 1
+ increment_line = -1
+ factor_col = 1
+ elif mda.first_pixel == 'south east':
+ first_line = shape[0] - 1
+ increment_line = -1
+ factor_col = -1
+ else:
+ raise mipp.ReaderError, "unknown geographical orientation of " + \
+ "first pixel: '%s'"%mda.first_pixel
+
+ #
+ # Generate final image with no data
+ #
+ image = numpy.zeros(shape, dtype=data_type) + mda.no_data_value
+
+ #
+ # Begin the segment processing.
+ #
+ seg_no = seg_init
+ line_in_image = first_line
+ while seg_no <= seg_end:
+ line_in_segment = 1
+
+ #
+ # Calculate initial line in actual segment.
+ #
+ if seg_no == seg_init:
+ init_line_in_segment = (line_init
+ - (segment_nlines*(seg_init - 1)))
+ else:
+ init_line_in_segment = 1
+
+ #
+ # Calculate final line in actual segment.
+ #
+ if seg_no == seg_end:
+ end_line_in_segment = line_end - (segment_nlines*(seg_end - 1))
+ else:
+ end_line_in_segment = segment_nlines
+
+ #
+ # Open segment file.
+ #
+ seg_file = segments.get(seg_no, None)
+ if not seg_file:
+ #
+ # No data for this segment.
+ #
+ logger.warning("Segment number %d not found"%seg_no)
+
+ # all image lines are already set to no-data count.
+ line_in_segment = init_line_in_segment
+ while line_in_segment <= end_line_in_segment:
+ line_in_segment += 1
+ line_in_image += increment_line
+ else:
+ #
+ # Data for this segment.
+ #
+ logger.info("Read %s"%seg_file)
+ seg = _xrit.read_imagedata(seg_file)
+
+ #
+ # Skip lines not processed.
+ #
+ while line_in_segment < init_line_in_segment:
+ line = seg.readline()
+ line_in_segment += 1
+
+ #
+ # Reading and processing segment lines.
+ #
+ while line_in_segment <= end_line_in_segment:
+ line = seg.readline()[mda.line_offset:]
+ line = converter(line)
+
+ line = (numpy.frombuffer(line,
+ dtype=data_type,
+ count=col_count,
+ offset=col_offset)[::factor_col])
+
+ #
+ # Insert image data.
+ #
+ image[line_in_image] = line
+
+ line_in_segment += 1
+ line_in_image += increment_line
+
+ seg.close()
+
+ seg_no += 1
+
+ #
+ # Compute mask before calibration
+ #
+
+ mask = (image == mda.no_data_value)
+
+ #
+ # Calibrate ?
+ #
+ mda.is_calibrated = False
+ if self.do_calibrate:
+ # do this before masking.
+ calibrate = self.do_calibrate
+ if isinstance(calibrate, bool):
+ # allow boolean True/False for 1/0
+ calibrate = int(calibrate)
+ image, mda.calibration_unit = mda.calibrate(image, calibrate=calibrate)
+ mda.is_calibrated = True
+
+ #
+ # With or without mask ?
+ #
+ if self.do_mask and not isinstance(image, numpy.ma.core.MaskedArray):
+ image = numpy.ma.array(image, mask=mask, copy=False)
+ elif ((not self.do_mask) and
+ isinstance(image, numpy.ma.core.MaskedArray)):
+ image = image.filled(mda.no_data_value)
+
+ return image
+
diff --git a/mipp/xrit/mda.py b/mipp/xrit/mda.py
new file mode 100644
index 0000000..a5c3f75
--- /dev/null
+++ b/mipp/xrit/mda.py
@@ -0,0 +1,13 @@
+#
+# $Id$
+#
+import mipp.mda
+
+class Metadata(mipp.mda.Metadata):
+ token = ':'
+ ignore_attributes = ('line_offset', 'first_pixel',
+ 'coff', 'loff',
+ 'image_data', 'boundaries')
+if __name__ == '__main__':
+ import sys
+ print Metadata().read(sys.argv[1])
diff --git a/mipp/xrit/sat.py b/mipp/xrit/sat.py
new file mode 100644
index 0000000..d88bb4f
--- /dev/null
+++ b/mipp/xrit/sat.py
@@ -0,0 +1,247 @@
+#
+# $Id$
+#
+import numpy
+import glob
+import imp
+import types
+import re
+
+import logging
+logger = logging.getLogger('mipp')
+
+import mipp
+import mipp.cfg
+from mipp.xrit import _xrit
+from mipp.xrit.loader import ImageLoader
+
+__all__ = ['load_meteosat07',
+ 'load_meteosat09',
+ 'load_goes11',
+ 'load_goes12',
+ 'load_goes13',
+ 'load_mtsat1r',
+ 'load_mtsat2',
+ 'load_electrol',
+ 'load',
+ 'load_files']
+
+CHECK_CONFIG_SUBLON = False
+
+class SatelliteLoader(object):
+ # Currently this one only works for geos satellites
+ #
+ # We will return an ImageLoader object where access to data is like:
+ # image[:], image[] or image() will return full disk
+ # image[2:56, 1020:1070]
+ # image.area_extent(area_extent)
+ #
+ def __init__(self, config_reader):
+ #
+ # Read configuration file based on satellite name
+ #
+ sat = config_reader('satellite')
+ projname = sat['projection'].lower()
+ if not projname.startswith('geos'):
+ raise mipp.ReaderError("currently we only support projections of type: 'GEOS'")
+
+ #
+ # Load format decoder based on level1 format
+ #
+ format = config_reader('level1')['format']
+ try:
+ args = imp.find_module(format)
+ except ImportError:
+ raise mipp.ReaderError("unknown level-1 format: '%s'"%format)
+ try:
+ m = imp.load_module(format, *args)
+ finally:
+ if args[0]:
+ args[0].close()
+
+ self._metadata_reader = m.read_metadata
+
+ #
+ # Attributing
+ #
+ self.__dict__.update(sat)
+
+ self._config_reader = config_reader
+ self.satname = self.satname + self.number
+ self.satnumber = self.number
+ delattr(self, 'number')
+
+ # backwards compatible
+ if not hasattr(self, 'proj4_params'):
+ try:
+ sublon = float(projname.split('(')[1].split(')')[0])
+ except (IndexError, ValueError):
+ raise mipp.ReaderError("Could not determine sub satellite point from projection name '%s'"%
+ projname)
+ self.proj4_params = "proj=geos lon_0=%.2f lat_0=0.00 a=6378169.00 b=6356583.80 h=35785831.00"%sublon
+
+ def load(self, time_stamp, channel, **kwarg):
+ if channel not in self._config_reader.channel_names:
+ raise mipp.ReaderError("unknown channel name '%s'"%channel)
+ opt = self._config_reader('level1')
+ val = {}
+ val["channel"] = channel + '*'
+
+ # Prologue
+
+ val["segment"] = "PRO".ljust(9, '_')
+
+ filename_pro = opt.get('filename_pro', opt['filename'])
+ prologue = glob.glob(opt['dir'] + '/' + \
+ (time_stamp.strftime(filename_pro)%val))
+ if not prologue:
+ raise mipp.NoFiles("missing prologue file: '%s'"%(time_stamp.strftime(filename_pro)%val))
+ prologue = prologue[0]
+
+ # Regular channels
+
+ val["segment"] = "0????????"
+ image_files = glob.glob(opt['dir'] + '/' + \
+ time_stamp.strftime(opt['filename'])%val)
+ if not image_files:
+ raise mipp.NoFiles("no data files: '%s'"%(time_stamp.strftime(opt['filename'])%val))
+ image_files.sort()
+
+ logger.info("Read %s"%prologue)
+ prologue = _xrit.read_prologue(prologue)
+
+ # Epilogue
+
+ val["segment"] = "EPI".ljust(9, '_')
+
+ filename_epi = opt.get('filename_pro', opt['filename'])
+ epilogue = glob.glob(opt['dir'] + '/' + \
+ (time_stamp.strftime(filename_epi)%val))
+
+ if not epilogue:
+ logger.info("No epilogue file to read.")
+ else:
+ epilogue = epilogue[0]
+ logger.info("Read %s"%epilogue)
+ epilogue = _xrit.read_epilogue(epilogue)
+ return self.load_files(prologue, image_files,
+ epilogue=epilogue, **kwarg)
+
+ return self.load_files(prologue, image_files, **kwarg)
+
+ def load_files(self, prologue, image_files, only_metadata=False, **kwargs):
+ image_files.sort()
+ if only_metadata:
+ return self._read_metadata(prologue, image_files, **kwargs)
+ else:
+ return self._read(prologue, image_files, **kwargs)
+
+
+ def _read_metadata(self, prologue, image_files, epilogue=None):
+ if epilogue:
+ mda = self._metadata_reader(prologue, image_files, epilogue=epilogue)
+ else:
+ mda = self._metadata_reader(prologue, image_files)
+ if "%.2f"%mda.sublon != "%.2f"%self.sublon:
+ if CHECK_CONFIG_SUBLON:
+ raise mipp.ReaderError("Sub satellite point in config file (%.2f) don't match data (%.2f)"%
+ (self.sublon, mda.sublon))
+ else:
+ logger.warning("Modifying sub satellite point from %.2f to %.2f"%
+ (self.sublon, mda.sublon))
+ self.sublon = mda.sublon
+
+
+ chn = self._config_reader.get_channel(mda.channel)
+ if mda.image_size[0] != chn.size[0]:
+ raise mipp.ReaderError("unknown image width for %s, %s: %d"%
+ (self.satname, mda.channel, mda.image_size[0]))
+
+ mda.pixel_size = numpy.array([chn.resolution, chn.resolution], dtype=numpy.float64)
+ for k, v in self.__dict__.items():
+ if k[0] != '_' and type(v) != types.FunctionType:
+ setattr(mda, k, v)
+
+ img = _xrit.read_imagedata(image_files[0])
+
+ return mda
+
+ def _read(self, prologue, image_files, epilogue=None, **kwargs):
+ if epilogue:
+ mda = self._read_metadata(prologue, image_files, epilogue=epilogue)
+ else:
+ mda = self._read_metadata(prologue, image_files)
+ len_img = (((mda.image_size[0] + mda.line_offset)*mda.image_size[1])*mda.data_type)//8
+ logger.info("Data size: %dx%d pixels, %d bytes, %d bits per pixel",
+ mda.image_size[0], mda.image_size[1], len_img, mda.data_type)
+
+ #
+ # Return a proxy slicer
+ #
+ return ImageLoader(mda, image_files, **kwargs)
+
+ #
+ # Manipulate proj4's lon_0 parameter
+ #
+ _sublon_re = re.compile('(lon_0)=(\S+)')
+ def _get_sublon(self):
+ m = self._sublon_re.search(self.proj4_params)
+ if m:
+ return float(m.group(2))
+ raise TypeError, "'SatelliteLoader' object (attribute proj4_params) has no 'sublon' attribute"
+ def _set_sublon(self, slon):
+ slon = "lon_0=%.2f"%float(slon)
+ p = self.proj4_params
+ m = self._sublon_re.search(p)
+ if m:
+ self.proj4_params = p.replace(m.group(0), slon)
+ else:
+ self.proj4_params += " %s"%slon
+ sublon = property(_get_sublon, _set_sublon)
+
+#-----------------------------------------------------------------------------
+#
+# Interface
+#
+#-----------------------------------------------------------------------------
+def load_files(prologue, image_files, epilogue=None, **kwarg):
+ if type(prologue) == type('string'):
+ logger.info("Read %s"%prologue)
+ prologue = _xrit.read_prologue(prologue)
+ if epilogue and type(epilogue) == type('string'):
+ logger.info("Read %s"%epilogue)
+ epilogue = _xrit.read_epilogue(epilogue)
+ satname = prologue.platform.lower()
+ return SatelliteLoader(mipp.cfg.read_config(satname)).load_files(prologue,
+ image_files,
+ epilogue=epilogue,
+ **kwarg)
+
+def load(satname, time_stamp, channel, **kwarg):
+ return SatelliteLoader(mipp.cfg.read_config(satname)).load(time_stamp, channel, **kwarg)
+
+def load_meteosat07(time_stamp, channel, **kwarg):
+ return load('meteosat07', time_stamp, channel, **kwarg)
+
+def load_meteosat09(time_stamp, channel, **kwarg):
+ return load('meteosat09', time_stamp, channel, **kwarg)
+
+def load_goes11(time_stamp, channel, **kwarg):
+ return load('goes11', time_stamp, channel, **kwarg)
+
+def load_goes12(time_stamp, channel, **kwarg):
+ return load('goes12', time_stamp, channel, **kwarg)
+
+def load_goes13(time_stamp, channel, **kwarg):
+ return load('goes13', time_stamp, channel, **kwarg)
+
+def load_mtsat1r(time_stamp, channel, **kwarg):
+ return load('mtsat1r', time_stamp, channel, **kwarg)
+
+def load_mtsat2(time_stamp, channel, **kwarg):
+ return load('mtsat2', time_stamp, channel, **kwarg)
+
+def load_electrol(time_stamp, channel, **kwarg):
+ return load('electrol', time_stamp, channel, **kwarg)
+
+#-----------------------------------------------------------------------------
diff --git a/mipp/xsar/CSK.py b/mipp/xsar/CSK.py
new file mode 100644
index 0000000..18b4eb1
--- /dev/null
+++ b/mipp/xsar/CSK.py
@@ -0,0 +1,141 @@
+#
+# WORK IN PROGRESS for CosmoSkyMed
+#
+import os
+import copy
+import numpy as np
+from datetime import datetime, timedelta
+from lxml import etree
+
+import logging
+logger = logging.getLogger('mipp')
+
+import mipp
+from mipp import geotiff
+from mipp.xsar import Metadata
+
+__all__ = ['read_metadata', 'read_image']
+
+class _Calibrator(object):
+ def __init__(self, mda):
+ self.error = "Unknown calibration"
+
+ def __call__(self, image, calibrate=1):
+ if calibrate == 0:
+ return (image,
+ 'counts')
+ raise mipp.CalibrationError, self.error
+
+def read_metadata(xmlbuffer):
+ mda = Metadata()
+
+ # Speciel decoders
+ def dec_timeformat(strn):
+ strn = strn.split('.')
+ return (datetime.strptime(strn[0], "%Y-%m-%d %H:%M:%S") +
+ timedelta(seconds=float('.' + strn[1])))
+ def dec_orbit_number(strn):
+ return int(strn[:5])
+
+ attributes = (
+ ('_ROOT_/Attribute', {
+ 'Satellite ID': ('satellite_name', str),
+ 'Product Filename': ('image_filename', str),
+ 'Product Type': ('product_type', str),
+ 'Acquisition Station ID': ('facility_id', str),
+ 'Scene Sensing Start UTC': ('time_start', dec_timeformat),
+ 'Scene Sensing Stop UTC': ('time_stop', dec_timeformat),
+ 'Orbit Number': ('orbit_number', dec_orbit_number),
+ 'Sample Format': ('product_format', str),
+ 'Image Scale': ('image_scale', str),
+ 'Image Layers': ('layers', int),
+ 'Bits per Sample': ('bits_per_sample', int),
+ 'Samples per Pixel': ('samples_per_pixel', int),
+ }),
+
+ ('MBI/Attribute', {
+ 'Column Spacing': ('sample_spacing', float),
+ 'Line Spacing': ('line_spacing', float)
+ }),
+
+ ('S01/Attribute', {
+ 'Polarisation': ('polarisation', str),
+ }),
+ )
+
+ tree = etree.fromstring(xmlbuffer)
+
+ #
+ # Get Atrributes
+ #
+ for path, attr in attributes:
+ names = attr.keys()
+ path = tree.xpath(path)
+ for i in path:
+ name = i.attrib['Name']
+ if name in names:
+ names.remove(name)
+ val = i.text
+ setattr(mda, attr[name][0], attr[name][1](val))
+
+
+
+ satid = 'CSK'
+ if not mda.satellite_name.upper().startswith(satid):
+ raise mipp.ReaderError(
+ "This does not look like a CosmoSkymed product, " +
+ "satellite ID does now start with '%s'"%satid)
+
+ mda.image_filename = os.path.splitext(mda.image_filename)[0] + '.MBI.tif'
+
+ mda.no_data_value = 0
+ mda.calibrated = 'NOTCALIBRATED'
+
+ return mda
+
+def read_image(mda, filename=None, mask=True, calibrate=1):
+ mda = copy.copy(mda)
+ mda.calibrate = _Calibrator(mda)
+ mda.calibration_unit = 'counts'
+ mda.is_calibrated = False
+ del mda.calibrated
+ mda.product_name = (mda.time_start.strftime("%Y%m%d_%H%M%S") + '_' +
+ mda.satellite_name + '_' +
+ mda.product_type + '_' +
+ mda.polarisation)
+ logger.info('Product name: %s'% mda.product_name)
+
+ if not filename:
+ filename = mda.image_filename
+
+ params, data = geotiff.read_geotiff(filename)
+ area_def = geotiff.tiff2areadef(params['projection'],
+ params['geotransform'],
+ data.shape)
+
+ mda.proj4_params = area_def.proj4_string.replace('+', '')
+ mda.area_extent = area_def.area_extent
+ mda.tiff_params = params
+
+ if calibrate:
+ data, mda.calibration_unit = mda.calibrate(data, calibrate)
+ mda.is_calibrated = True
+ logger.info('calibrated: %s %s [%.2f -> %.2f -> %.2f] %s'%
+ (str(data.shape), data.dtype, data.min(),
+ data.mean(), data.max(), mda.calibration_unit))
+
+ if mask:
+ mask = (data == mda.no_data_value)
+ data = np.ma.array(data, mask=mask, copy=False)
+
+ return mda, data
+
+if __name__ == '__main__':
+ import sys
+ with open(sys.argv[1]) as fp:
+ _mda = read_metadata(fp.read())
+ _mda.image_filename = os.path.dirname(sys.argv[1]) + '/' + \
+ _mda.image_filename
+ print _mda
+ _mda, _data = read_image(_mda, calibrate=False)
+ print _data.min(), _data.mean(), _data.max()
diff --git a/mipp/xsar/RS2.py b/mipp/xsar/RS2.py
new file mode 100644
index 0000000..c2037f3
--- /dev/null
+++ b/mipp/xsar/RS2.py
@@ -0,0 +1,100 @@
+#
+# WORK IN PROGRESS for Radarsat-2
+#
+import numpy
+from datetime import datetime
+from lxml import etree
+
+from mipp.xsar import Metadata
+
+class TiePoints(object):
+ def __init__(self, image, geodedic):
+ self.image = image
+ self.geodedic = geodedic
+
+def read_metadata(xml_file):
+ metadata = Metadata()
+
+ # XML Namespace
+ ns_rs2 = {'xsi': 'http://www.rsi.ca/rs2/prod/xml/schemas'}
+
+ # Speciel decoders
+ def dec_isoformat(rts):
+ return datetime.strptime(rts, "%Y-%m-%dT%H:%M:%S.%fZ")
+ def dec_orbit_number(rts):
+ return int(rts[:5])
+
+ attributes = {
+ 'product_id': ('xsi:productId', str),
+ 'satellite_name': ('xsi:sourceAttributes/xsi:satellite', str),
+ 'sensor_name': ('xsi:sourceAttributes/xsi:sensor', str),
+ 'beam_mode': ('xsi:sourceAttributes/xsi:beamModeMnemonic', str),
+ 'facility_id': ('xsi:sourceAttributes/xsi:inputDatasetFacilityId', str),
+ 'start_time': ('xsi:sourceAttributes/xsi:rawDataStartTime', dec_isoformat),
+ 'orbit_number': ('xsi:sourceAttributes/xsi:orbitAndAttitude/xsi:orbitInformation/xsi:orbitDataFile', dec_orbit_number),
+ 'product_format': ('xsi:imageAttributes/xsi:productFormat', str),
+ 'bits_per_sample': ('xsi:imageAttributes/xsi:rasterAttributes/xsi:bitsPerSample', int),
+ 'samples': ('xsi:imageAttributes/xsi:rasterAttributes/xsi:numberOfSamplesPerLine', int),
+ 'lines': ('xsi:imageAttributes/xsi:rasterAttributes/xsi:numberOfLines', int),
+ 'sample_spacing': ('xsi:imageAttributes/xsi:rasterAttributes/xsi:sampledPixelSpacing', float),
+ 'line_spacing': ('xsi:imageAttributes/xsi:rasterAttributes/xsi:sampledLineSpacing', float),
+ 'data_files': ('xsi:imageAttributes/xsi:fullResolutionImageData', str),
+ 'center_lat': ('centre_lat', str),
+ 'center_lon': ('centre_lon', str),
+ 'tie_point_lines': ('tie_point_lines', str),
+ 'tie_point_samples': ('tie_point_samples', str),
+ 'tie_point_line_jump': ('tie_point_line_jump', str),
+ 'tie_point_sample_jump': ('tie_point_sample_jump', str)}
+
+ tree = etree.parse(xml_file)
+
+ # Get some attributes
+ for key, (att, dec) in attributes.items():
+ if att.startswith('xsi'):
+ rec = tree.xpath(att, namespaces=ns_rs2)
+ if len(rec) > 1:
+ val = tuple([dec(i.text) for i in rec])
+ else:
+ val = dec(rec[0].text)
+ setattr(metadata, key, val)
+
+ #
+ # Get tiepoints
+ #
+ tiepoints_xpath = 'xsi:imageAttributes/xsi:geographicInformation/xsi:geolocationGrid/xsi:imageTiePoint'
+ tiepoints_tree = tree.xpath(tiepoints_xpath, namespaces=ns_rs2)
+ tiepoints_count = len(tiepoints_tree)
+
+ pix_coordinates = numpy.zeros((tiepoints_count, 2))
+ geo_coordinates = numpy.zeros((tiepoints_count, 2))
+ counter = 0
+ for elm in tiepoints_tree:
+ if elm.tag.endswith('imageTiePoint'):
+ pixel, line, lat, lon = None, None, None, None
+ for i in elm.iter():
+ if i.getparent().tag.endswith('imageCoordinate'):
+ if i.tag.endswith('pixel'):
+ pixel = float(i.text)
+ elif i.tag.endswith('line'):
+ line = float(i.text)
+ elif i.getparent().tag.endswith('geodeticCoordinate'):
+ if i.tag.endswith('latitude'):
+ lat = float(i.text)
+ elif i.tag.endswith('longitude'):
+ lon = float(i.text)
+ if None not in (pixel, line, lat, lon):
+ pix_coordinates[counter] = [line, pixel]
+ geo_coordinates[counter] = [lat, lon]
+ counter += 1
+
+ if counter > 0:
+ setattr(metadata, 'tiepoints', TiePoints(pix_coordinates, geo_coordinates))
+
+ return metadata
+
+if __name__ == '__main__':
+ import sys
+ mda = read_metadata(sys.argv[1])
+ print mda
+ print mda.tiepoints.image
+ print mda.tiepoints.geodedic
diff --git a/mipp/xsar/TSX.py b/mipp/xsar/TSX.py
new file mode 100644
index 0000000..d1bd500
--- /dev/null
+++ b/mipp/xsar/TSX.py
@@ -0,0 +1,169 @@
+#
+#
+#
+import copy
+import numpy as np
+from datetime import datetime
+from lxml import etree
+from osgeo import osr
+
+import mipp
+from mipp import geotiff
+from mipp.xsar import Metadata
+
+import logging
+logger = logging.getLogger('mipp')
+
+no_data_value = 0
+
+def _tiff2areadef(projection, geotransform, shape):
+ # revamp projection
+ import pyresample
+
+ srs = osr.SpatialReference()
+ srs.ImportFromWkt(projection)
+ proj4 = srs.ExportToProj4()
+ proj4_dict = {}
+ for i in proj4.replace('+', '').split():
+ try:
+ key, val = [val.strip() for val in i.split('=')]
+ except ValueError:
+ continue
+
+ proj4_dict[key] = val
+ area_extent = [geotransform[0],
+ geotransform[3] + geotransform[5]*shape[0],
+ geotransform[0] + geotransform[1]*shape[1],
+ geotransform[3]]
+ aid = proj4_dict['proj']
+ if aid.lower() == 'utm':
+ aid += proj4_dict['zone']
+ # give it some kind of ID
+ aname = aid + '_' + str(int(sum(geotransform)/1000.))
+
+ return pyresample.utils.get_area_def(aname, aname, aid,
+ proj4_dict,
+ shape[1], shape[0],
+ area_extent)
+
+class _Calibrator(object):
+ def __init__(self, mda):
+ self.factor = mda.calibration_factor
+ self.unit = mda.calibration_unit.lower()
+ self.error = None
+ if mda.calibrated != 'CALIBRATED':
+ self.error = "Data is not calibrated"
+ if mda.beamid != mda.calibration_beamid:
+ self.error = \
+ "BeamID for image data and calibration factor don't match"
+ if mda.calibration_unit.lower() != 'radar-brightness':
+ self.error = "Unknown calibration unit '%s'" % (
+ self.calibration_unit.lower())
+
+ def __call__(self, image, calibrate=1):
+ if calibrate == 0:
+ return (image,
+ 'counts')
+ if self.error:
+ raise mipp.CalibrationError, self.error
+ return (image*image*self.factor,
+ self.unit)
+
+def read_metadata(xmlbuffer):
+
+ # Speciel decoders
+ def dec_isoformat(rts):
+ return datetime.strptime(rts, "%Y-%m-%dT%H:%M:%S.%fZ")
+ def dec_orbit_number(rts):
+ return int(rts[:5])
+ def dec_satellite_name(rts):
+ return rts.replace('-', '')
+ def dec_calibration_unit(rts):
+ _trans = {'radar brightness': 'nrcs'}
+ rts = rts.replace(' ', '-').lower()
+ return rts
+
+ attributes = {
+ 'product_level': ('generalHeader/itemName', str),
+ 'satellite_name': ('productInfo/missionInfo/mission', dec_satellite_name),
+ 'orbit_number': ('productInfo/missionInfo/absOrbit', dec_orbit_number),
+ 'sensor_type': ('productInfo/acquisitionInfo/sensor', str),
+ 'beam_mode': ('productInfo/acquisitionInfo/imagingMode', str),
+ 'polarisation': ('productInfo/acquisitionInfo/polarisationList/polLayer', str),
+ 'beamid': ('productInfo/acquisitionInfo/elevationBeamConfiguration', str),
+ 'calibrated': ('productInfo/productVariantInfo/radiometricCorrection', str),
+ 'calibration_factor': ('calibration/calibrationConstant/calFactor', float),
+ 'calibration_beamid': ('calibration/calibrationConstant/beamID', str),
+ 'calibration_unit': ('productInfo/imageDataInfo/pixelValueID', dec_calibration_unit),
+ 'image_data_path': ('productComponents/imageData/file/location/path', str),
+ 'image_data_filename': ('productComponents/imageData/file/location/filename', str),
+ 'time_start': ('productInfo/sceneInfo/start/timeUTC', dec_isoformat),
+ 'center_coor_lat': ('productInfo/sceneInfo/sceneCenterCoord/lat', float),
+ 'center_coor_lon': ('productInfo/sceneInfo/sceneCenterCoord/lon', float)
+ }
+
+ check_attributes = {'product_level': 'level 1b product',
+ 'satellite_name': 'tsx',
+ 'sensor_type': 'sar'}
+
+ tree = etree.fromstring(xmlbuffer)
+
+ # Check satellite, sensor and product level
+ for key, val in check_attributes.items():
+ try:
+ path = attributes[key][0]
+ attr = tree.xpath(path)[0].text.lower()
+ if not attr.startswith(val):
+ raise mipp.ReaderError("This does not look like a TSX SAR " +
+ "Level 1B Product, %s is '%s' expected '%s'" %
+ (key, attr, val))
+ except IndexError:
+ raise mipp.ReaderError("This does not look like a TSX SAR " +
+ "Level 1B Product, could not find attribute '%s' (%s)" %
+ (key, path))
+
+ mda = Metadata()
+ for key, val in attributes.items():
+ setattr(mda, key, val[1](tree.xpath(val[0])[0].text))
+ mda.image_filename = (mda.image_data_path + '/' + mda.image_data_filename)
+ delattr(mda, 'image_data_path')
+ delattr(mda, 'image_data_filename')
+ return mda
+
+def read_image(mda, filename=None, mask=True, calibrate=1):
+ mda = copy.copy(mda)
+ mda.calibrate = _Calibrator(mda)
+ mda.calibration_unit = 'counts'
+ mda.is_calibrated = False
+ del mda.calibrated
+ mda.product_name = (mda.time_start.strftime("%Y%m%d_%H%M%S") + '_' +
+ mda.satellite_name + '_' + mda.sensor_type + '_' +
+ mda.beam_mode + '_' + mda.polarisation)
+ logger.info('Product name: %s'% mda.product_name)
+
+ if not filename:
+ filename = mda.image_filename
+
+ params, data = geotiff.read_geotiff(filename)
+ area_def = geotiff.tiff2areadef(params['projection'],
+ params['geotransform'],
+ data.shape)
+
+ mda.proj4_params = area_def.proj4_string.replace('+', '')
+ mda.area_extent = area_def.area_extent
+ mda.tiff_params = params
+
+ mda.no_data_value = no_data_value
+
+ if calibrate:
+ data, mda.calibration_unit = mda.calibrate(data, calibrate)
+ mda.is_calibrated = True
+ logger.info('calibrated: %s %s [%.2f -> %.2f -> %.2f] %s'%
+ (str(data.shape), data.dtype, data.min(),
+ data.mean(), data.max(), mda.calibration_unit))
+
+ if mask:
+ mask = (data == no_data_value)
+ data = np.ma.array(data, mask=mask, copy=False)
+
+ return mda, data
diff --git a/mipp/xsar/__init__.py b/mipp/xsar/__init__.py
new file mode 100644
index 0000000..04c5607
--- /dev/null
+++ b/mipp/xsar/__init__.py
@@ -0,0 +1,3 @@
+#
+from mipp.xsar import sat
+from mipp.xsar.mda import Metadata
diff --git a/mipp/xsar/mda.py b/mipp/xsar/mda.py
new file mode 100644
index 0000000..ce7ccee
--- /dev/null
+++ b/mipp/xsar/mda.py
@@ -0,0 +1,14 @@
+#
+# $Id$
+#
+import mipp.mda
+
+class Metadata(mipp.mda.Metadata):
+ token = ':'
+ ignore_attributes = ('data',
+ 'calibrate',
+ 'tiepoints')
+
+if __name__ == '__main__':
+ import sys
+ print Metadata().read(sys.argv[1])
diff --git a/mipp/xsar/sat.py b/mipp/xsar/sat.py
new file mode 100644
index 0000000..8704a65
--- /dev/null
+++ b/mipp/xsar/sat.py
@@ -0,0 +1,173 @@
+#
+# $Id$
+#
+import os
+import tarfile
+import glob
+import fnmatch
+import imp
+
+import logging
+logger = logging.getLogger('mipp')
+
+import mipp
+import mipp.cfg
+
+__all__ = ['load']
+
+def _find_in_tarfile(tfile, fname):
+ tar = tarfile.open(tfile)
+ try:
+ for name in tar.getnames():
+ if name.endswith(fname):
+ return name
+ finally:
+ tar.close()
+ raise mipp.NoFiles("found no archive file '%s'"%fname)
+
+class SatelliteLoader(object):
+ def __init__(self, config_reader):
+ #
+ # Read configuration file based on satellite name
+ #
+ sat = config_reader('satellite')
+
+ #
+ # Load format decoder based on level1 format
+ #
+ lv1_format = config_reader('level1')['format']
+ logger.info("Loading %s"%lv1_format)
+ try:
+ _args = imp.find_module(lv1_format)
+ except ImportError:
+ raise mipp.ReaderError("unknown level-1 format: '%s'"%lv1_format)
+ try:
+ mdl = imp.load_module(lv1_format, *_args)
+ finally:
+ if _args[0]:
+ _args[0].close()
+
+ self._metadata_reader = mdl.read_metadata
+ self._image_reader = mdl.read_image
+ self._tar_file = None
+
+ #
+ # Attributing
+ #
+ self.__dict__.update(sat)
+
+ self._config_reader = config_reader
+ self.satname = self.satname + self.number
+ self.satnumber = self.number
+ del self.number
+
+ def load(self, time_stamp, channel, **kwargs):
+ return self.load_file(self._find_tarfile(time_stamp),
+ channel, **kwargs)
+
+ def load_file(self, filename, channel,
+ only_metadata=False, mask=True, calibrate=1):
+ if channel not in self._config_reader.channel_names:
+ raise mipp.ReaderError("unknown channel name '%s'"%channel)
+ self._tar_file = filename
+ mda = self._load_metadata(channel)
+ if only_metadata:
+ return mda
+ mda, img = self._load_image(mda, mask=mask, calibrate=calibrate)
+ return mipp.mda.mslice(mda), img
+
+
+ def _load_metadata(self, channel):
+ del channel
+ opt = self._config_reader('level1')
+ mda_file = opt['filename_metadata']
+ tar = tarfile.open(self._tar_file)
+ names = []
+ try:
+ for name in tar.getnames():
+ if fnmatch.fnmatch(os.path.basename(name), mda_file):
+ names.append(name)
+ if len(names) == 0:
+ raise mipp.NoFiles("found no metadata file: '%s'"%mda_file)
+ elif len(names) > 1:
+ raise mipp.NoFiles("found multiple metadata files: '%s'" %
+ str(names))
+ logger.info("Extracting '%s'"%names[0])
+ xmldata = tar.extractfile(names[0]).read()
+ finally:
+ tar.close()
+ return self._metadata_reader(xmldata)
+
+ def _load_image(self, mda, mask=True, calibrate=1):
+ import tempfile
+ import shutil
+ image_file = _find_in_tarfile(self._tar_file, mda.image_filename)
+ tar = tarfile.open(self._tar_file)
+ tmpdir = tempfile.mkdtemp()
+ logger.info("Extracting '%s' into '%s'"%(image_file, tmpdir))
+ try:
+ tar.extract(image_file, tmpdir)
+ image_file = tmpdir + '/' + mda.image_filename
+ mda, image = self._image_reader(mda, image_file,
+ mask=mask, calibrate=calibrate)
+ finally:
+ tar.close()
+ shutil.rmtree(tmpdir)
+ return mda, image
+
+ def _find_tarfile(self, time_stamp):
+ opt = self._config_reader('level1')
+ if not os.path.isdir(opt['dir']):
+ raise IOError, "No such directory: %s" % opt['dir']
+ tar_file = glob.glob(opt['dir'] + '/' +
+ time_stamp.strftime(opt['filename_archive']))
+ if not tar_file:
+ raise mipp.NoFiles("found no archive file: '%s'"%
+ (time_stamp.strftime(opt['filename_archive'])))
+ elif len(tar_file) > 1:
+ raise mipp.NoFiles("found multiple archive files: '%s'" %
+ str(tar_file))
+ return tar_file[0]
+
+#-----------------------------------------------------------------------------
+#
+# Interface
+#
+#-----------------------------------------------------------------------------
+def load(satname, time_stamp, channel, **kwarg):
+ return SatelliteLoader(
+ mipp.cfg.read_config(satname)).load(time_stamp, channel, **kwarg)
+
+def load_file(filename, channel, **kwarg):
+ # Satellite name should be read from metadata (and not filename) !!!
+ satname = os.path.basename(filename).split('_')[0].lower()
+ return SatelliteLoader(
+ mipp.cfg.read_config(satname)).load_file(filename, channel, **kwarg)
+
+#-----------------------------------------------------------------------------
+
+if __name__ == '__main__':
+ import sys
+ import getopt
+ import loggers.simple
+ logger = loggers.simple.get('mipp')
+
+ only_mda = False
+ opts, args = getopt.getopt(sys.argv[1:], "m")
+ for k, v in opts:
+ if k == '-m':
+ only_mda = True
+ try:
+ _file = args[0]
+ except IndexError:
+ print >> sys.stderr, "usage: sat.py [-m] <tar-file>"
+ sys.exit(1)
+ _mda = load_file(_file, 'sarx', only_metadata=True)
+ if only_mda:
+ pass
+ elif _mda.calibrated != 'CALIBRATED':
+ logger.warning("Data is not calibrated")
+ _mda, _image = load_file(_file, 'sarx', calibrate=0)
+ else:
+ _mda, _image = load_file(_file, 'sarx')
+ print '\n', _mda
diff --git a/scr/decompress_xrit b/scr/decompress_xrit
new file mode 100755
index 0000000..9392d3f
--- /dev/null
+++ b/scr/decompress_xrit
@@ -0,0 +1,58 @@
+#!/usr/bin/python
+#
+import os
+import sys
+import getopt
+import glob
+from datetime import datetime
+
+from mipp import xrit
+
+#-----------------------------------------------------------------------------
+def usage():
+ print >>sys.stderr, """\
+ decompress_xrit [-w<work-dir>] [<in-dir>] [<out-dir>]
+ If directory not defined, environment variables are used"""
+ sys.exit(2)
+
+#-----------------------------------------------------------------------------
+#
+# Handle options
+#
+#-----------------------------------------------------------------------------
+in_dir = os.environ.get('XRIT_IN_DIR', '')
+out_dir = os.environ.get('XRIT_OUT_DIR', '')
+wrk_dir = os.environ.get('XRIT_WRK_DIR', '')
+opts, args = getopt.getopt(sys.argv[1:], 'w:')
+for k, v in opts:
+ if k == '-w':
+ wrk_dir = v
+try:
+ in_dir = args[0]
+ out_dir = args[1]
+except IndexError:
+ pass
+
+if not in_dir or not out_dir:
+ usage()
+if not wrk_dir:
+ wrk_dir = out_dir
+
+#-----------------------------------------------------------------------------
+#
+# Get all XRIT files, decompress and move to output directory
+#
+#-----------------------------------------------------------------------------
+def cmp_ctime(f1, f2):
+ result = cmp(os.path.getctime(f1), os.path.getctime(f2))
+ if result == 0:
+ result = cmp(os.path.basename(f1), os.path.basename(f2))
+ return result
+
+file_names = glob.glob(in_dir + '/[HL]*')
+file_names.sort(cmp_ctime)
+for file_name in file_names:
+ print 'doing:', str(datetime.fromtimestamp(os.path.getctime(file_name))), file_name
+ fn = xrit.decompress(file_name, wrk_dir)
+ os.rename(fn, out_dir + '/' + os.path.basename(fn))
+ os.remove(file_name)
diff --git a/scr/decompress_xrit.cron b/scr/decompress_xrit.cron
new file mode 100755
index 0000000..1da5249
--- /dev/null
+++ b/scr/decompress_xrit.cron
@@ -0,0 +1,7 @@
+#!/bin/sh
+#
+base=$(dirname $(realpath $0))
+
+. /opt/etc/setup.sh
+. $base/setup.sh
+$base/decompress_xrit
diff --git a/scr/fsd_driver b/scr/fsd_driver
new file mode 100755
index 0000000..cf23af1
--- /dev/null
+++ b/scr/fsd_driver
@@ -0,0 +1,27 @@
+#!/bin/sh
+#
+base=$(dirname $(realpath $0))
+
+. ${base}/etc/setup.sh
+last_processed=$XRIT_IN_DIR/fsd_processor.last
+
+for pro_file in $(find $XRIT_IN_DIR -newer $last_processed -name '*-PRO______-*'); do
+ # check if we handle this satellite
+ process_fsd --check-satellite $pro_file || continue
+
+ # check segments
+ process_fsd --check $pro_file || continue
+ echo $(basename $pro_file) > $last_processed
+
+ # get image files
+ img_files=$(process_fsd --check -l $pro_file)
+ test $? -eq 0 && test -n "$img_files" || continue
+
+ # process it
+ echo "$(date '+%Y-%m-%d %H:%M:%S') doing: $pro_file"
+ echo "process_fsd -h -o$XRIT_OUT_DIR"
+ echo "$pro_file"
+ echo "$img_files"
+ process_fsd -h -o$XRIT_OUT_DIR $pro_file $img_files
+ rm -f $img_files
+done
diff --git a/scr/gts_driver b/scr/gts_driver
new file mode 100755
index 0000000..def3168
--- /dev/null
+++ b/scr/gts_driver
@@ -0,0 +1,13 @@
+#!/bin/sh
+#
+base=$(dirname $(dirname $(realpath $0)))
+
+. ${base}/etc/setup.sh
+last_processed=$XRIT_IN_DIR/gts_processor.last
+
+for gts_file in $(find $XRIT_IN_DIR -newer $last_processed -name 'L-000-MSG?__-MPEF________-AMV______-*'); do
+ echo $gts_file > $last_processed
+ echo "$(date '+%Y-%m-%d %H:%M:%S') doing: $gts_file"
+ echo "process_gts -o$XRIT_OUT_DIR $gts_file"
+ process_gts -o$XRIT_OUT_DIR $gts_file
+done
diff --git a/scr/list_xrit_headers b/scr/list_xrit_headers
new file mode 100755
index 0000000..61b94ac
--- /dev/null
+++ b/scr/list_xrit_headers
@@ -0,0 +1,24 @@
+#!/usr/bin/python
+#
+import sys
+import getopt
+
+from mipp import xrit
+
+def usage():
+ print >>sys.stderr, "usage: list_xrit_headers [-d] <file-name>"
+ print >>sys.stderr, " -d, ... and dump data"
+ sys.exit(2)
+
+dumpit = False
+opts, args = getopt.getopt(sys.argv[1:], 'd')
+for k, v in opts:
+ if k == '-d':
+ dumpit = True
+
+try:
+ file_name = args[0]
+except IndexError:
+ usage()
+
+xrit.list(file_name, dumpit)
diff --git a/scr/process_fsd b/scr/process_fsd
new file mode 100755
index 0000000..0eb1c4b
--- /dev/null
+++ b/scr/process_fsd
@@ -0,0 +1,173 @@
+#!/usr/bin/python
+#
+import sys
+import os
+import getopt
+import glob
+from datetime import datetime, timedelta
+import numpy
+
+from mipp import xrit, log
+
+log.logging_on()
+logger = log.get_logger('fsd')
+
+max_age = {'MET7': timedelta(seconds=25*60),
+ 'GOES11' : timedelta(seconds=5*60),
+ 'GOES12' : timedelta(seconds=5*60),
+ 'GOES13' : timedelta(seconds=5*60),
+ 'MTSAT1R' : timedelta(seconds=5*60)}
+
+#-----------------------------------------------------------------------------
+
+def usage():
+ print >>sys.stderr, """\
+ process_fsd --check-satellite <prologue-file>
+ check if we handle this satellite
+
+ process_fsd --check [-l] <prologue-file>
+ check if number of image segments are as planned
+ -l, list corresponding image segment files
+
+ process_fsd --decompress [-o<output-dir>] <file> ... <file>
+ decompress files to output-dir (default is working directory)
+ -l, list decompressed files
+
+ process_fsd --metadata <prologue-file> <image-segment> ... <image-segment>
+ print meta-data
+
+ process_fsd [-h] [-o<output-dir>] <prologue-file> <image-segment> ... <image-segment>
+ -h, save image data to a HDF5 file (default is binary dump of image-data and ascii dump of meta-data)\
+ """
+ sys.exit(2)
+
+#-----------------------------------------------------------------------------
+
+def check_platform(prologue):
+ return max_age.has_key(prologue.platform)
+
+def check_segments(prologue, listit=False):
+ dname, fname = os.path.split(prologue.file_name)
+ fname = fname.replace('-PRO______-', '-0????????-')
+ fname = fname[:-2] + '??'
+ image_files = glob.glob(dname + '/' + fname)
+ if not image_files:
+ return False
+ image_files.sort()
+ if listit:
+ for f in image_files:
+ print f
+ ctime = os.path.getctime(prologue.file_name)
+ now = datetime.now()
+ im = xrit.read_imagedata(image_files[-1])
+ if im.segment.planned_end_seg_no == im.segment.seg_no:
+ return True
+ elif now - ctime > max_age[im.platform]:
+ return True
+ return False
+
+def decompress(image_files, outdir='.', listit=False):
+ for f in sorted(image_files):
+ outfile = xrit.decompress(f, outdir)
+ if listit:
+ print outfile
+
+def process(prologue, image_files, outdir='.', saveashdf5=False):
+ im = xrit.read_imagedata(image_files[-1])
+ logger.info('Processing: %s, %d image data files', prologue.product_id, len(image_files))
+ mda, img = xrit.sat.load_files(prologue, image_files, mask=False, calibrate=False)()
+ logger.info("Image data min, max: %.2f, %.2f %s", img.min(), img.max(), mda.calibration_unit)
+ fname = outdir + '/' + prologue.product_id
+ if saveashdf5:
+ import hdfdmi
+ fname += '.H5'
+ logger.info("Writing: '%s'", fname)
+ hdfdmi.save(mda, img, fname)
+ else:
+ fname_mda = fname + '.mda'
+ fp = open(fname_mda, 'w')
+ logger.info("Writing: '%s'", fname_mda)
+ fp.write(str(mda) + '\n')
+ fp.close()
+ fname += '.dat'
+ logger.info("Writing: '%s'", fname)
+ if type(img) == numpy.ma.MaskedArray:
+ img = img.filled(mda.no_data_value)
+ img.tofile(fname)
+
+ return True
+
+#-----------------------------------------------------------------------------
+
+long_options = ['check', 'check-satellite', 'decompress', 'metadata']
+nlopt = 0
+outdir = '.'
+check = False
+check_satellite = False
+decomp = False
+metadata = False
+listit = False
+saveashdf5 = False
+opts, args = getopt.getopt(sys.argv[1:], 'o:lh', long_options)
+for k, v in opts:
+ if k == '--decompress':
+ decomp = True
+ nlopt += 1
+ elif k == '--check':
+ check = True
+ nlopt += 1
+ elif k == '--check-satellite':
+ check_satellite = True
+ nlopt += 1
+ elif k == '--metadata':
+ nlopt += 1
+ metadata = True
+ elif k == '-o':
+ outdir = v
+ elif k == '-l':
+ listit = True
+ elif k == '-h':
+ saveashdf5 = True
+
+if nlopt > 1:
+ logger.error("Please specify only one of these: %s", ', '.join(['--' + s for s in long_options]))
+ sys.exit(2)
+
+pro_file = ''
+image_files = []
+try:
+ if check or check_satellite:
+ pro_file = args[0]
+ elif decomp:
+ image_files = args
+ else:
+ pro_file = args[0]
+ image_files = args[1:]
+except IndexError:
+ usage()
+
+if pro_file:
+ prologue = xrit.read_prologue(pro_file)
+
+#-----------------------------------------------------------------------------
+
+return_code = 0
+
+if check:
+ if not check_segments(prologue, listit):
+ return_code = 1
+
+elif check_satellite:
+ if not check_platform(prologue):
+ return_code = 1
+
+elif decomp:
+ decompress(image_files, outdir, listit)
+
+elif metadata:
+ print xrit.sat.load_files(prologue, image_files, only_metadata=True)
+
+else:
+ process(prologue, image_files, outdir, saveashdf5)
+
+sys.exit(return_code)
diff --git a/scr/process_gts b/scr/process_gts
new file mode 100755
index 0000000..810ed4d
--- /dev/null
+++ b/scr/process_gts
@@ -0,0 +1,39 @@
+#!/usr/bin/python
+#
+import sys
+import os
+import getopt
+import logging
+
+from mpop import xrit, log
+
+log.logging_on()
+logger = log.get_logger('gts')
+
+#-----------------------------------------------------------------------------
+
+def usage():
+ print >>sys.stdout, "usage: process_gts [-o<output-dir>] <file-name>"
+ sys.exit(2)
+
+#-----------------------------------------------------------------------------
+
+outdir = '.'
+opts, args = getopt.getopt(sys.argv[1:], 'o:')
+for k, v in opts:
+ if k == '-o':
+ outdir = v
+
+try:
+ file_name = args[0]
+except IndexError:
+ usage()
+
+#-----------------------------------------------------------------------------
+
+gts = xrit.read_gts_message(file_name)
+gts_file = outdir + '/' + gts.product_id
+logger.info("Writing '%s'", gts_file)
+fp = open(gts_file, "wb")
+fp.write(gts.data)
+fp.close()
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..d3390b9
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,47 @@
+# Copyright (c) 2009-2012.
+
+# DMI,
+# Lyngbyvej 100
+# DK-2100 Copenhagen
+# Denmark
+
+# Author(s):
+
+# Lars Orum Rasmussen <loerum at gmail.com>
+
+# This file is part of mipp.
+
+# mipp is free software: you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# mipp is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with mipp. If not, see <http://www.gnu.org/licenses/>.
+
+"""Setup file for mipp.
+"""
+import os
+from setuptools import setup, Extension
+
+setup(name = 'mipp',
+ description='Meteorological ingest processing package',
+ author='Lars Orum Rasmussen',
+ author_email='loerum at hmail.com',
+ classifiers=["Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Science/Research",
+ "License :: OSI Approved :: GNU General Public License v3 " +
+ "or later (GPLv3+)",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Topic :: Scientific/Engineering"],
+ url="https://github.com/loerum/mipp",
+ version = '0.9.1',
+ packages = ['mipp', 'mipp.xrit', 'mipp.xsar'],
+ zip_safe = False,
+ )
diff --git a/testit b/testit
new file mode 100755
index 0000000..7a32e07
--- /dev/null
+++ b/testit
@@ -0,0 +1,7 @@
+#!/bin/sh
+# option '-s' is 'no capture'
+#
+cd $(dirname $0)
+python setup.py build
+echo "nosetests "$1" -w tests"
+nosetests "$1" -w tests
diff --git a/tests/buildpath_to_syspath.py b/tests/buildpath_to_syspath.py
new file mode 100644
index 0000000..ae4f234
--- /dev/null
+++ b/tests/buildpath_to_syspath.py
@@ -0,0 +1,19 @@
+import sys
+import os
+from distutils.util import get_platform
+
+proj_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
+build_base = 'build'
+plat = get_platform()
+
+python_version = '.'.join([str(i) for i in sys.version_info[:2]])
+build_purelib = os.path.join(build_base, 'lib' + '-' + python_version) # is it at all used ?
+build_platlib = os.path.join(build_base, 'lib.' + plat + '-' + python_version)
+
+build_path = os.path.join(proj_path, build_platlib)
+if not os.path.isdir(build_path):
+ assert False, "No such build path '%s'"%build_path
+sys.path.insert(0, build_path)
+
+if __name__ == '__main__':
+ print build_platlib
diff --git a/tests/data/.gitattributes b/tests/data/.gitattributes
new file mode 100644
index 0000000..4300199
--- /dev/null
+++ b/tests/data/.gitattributes
@@ -0,0 +1,2 @@
+H-* -crlf -diff -merge
+L-* -crlf -diff -merge
diff --git a/tests/data/20110825_104705_TSX1_SAR_SC_HH.mda b/tests/data/20110825_104705_TSX1_SAR_SC_HH.mda
new file mode 100644
index 0000000..f695370
--- /dev/null
+++ b/tests/data/20110825_104705_TSX1_SAR_SC_HH.mda
@@ -0,0 +1,20 @@
+area_extent: (492493.75, 8257780.25, 496717.75, 8262004.25)
+beam_mode: SC
+beamid: scan_009
+calibration_beamid: scan_009
+calibration_factor: 1.12882900334e-05
+calibration_unit: radar-brightness
+center_coor_lat: 74.4390827236
+center_coor_lon: -57.3073330118
+image_filename: IMAGEDATA/IMAGE_HH_SRA_scan_009.tif
+is_calibrated: True
+no_data_value: 0
+orbit_number: 23264
+polarisation: HH
+product_level: Level 1B Product
+product_name: 20110825_104705_TSX1_SAR_SC_HH
+proj4_params: units=m ellps=WGS84 datum=WGS84 proj=utm zone=21
+satellite_name: TSX1
+sensor_type: SAR
+tiff_params: {'projection': 'PROJCS["WGS 84 / UTM zone 21N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.2572235630016,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-57],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORI [...]
+time_start: 2011-08-25 10:47:05.298000
diff --git a/tests/data/GOES11_10_7_135W_20100201_0600.mda b/tests/data/GOES11_10_7_135W_20100201_0600.mda
new file mode 100644
index 0000000..3ba6378
--- /dev/null
+++ b/tests/data/GOES11_10_7_135W_20100201_0600.mda
@@ -0,0 +1,20 @@
+area_extent: [-398654.27220000001, -402660.84779999999, 402660.84779999999, 398654.27220000001]
+calibration_table: {'table': [[0.0, 170.0], [1023.0, 340.0]], 'name': 'calibrated infrared', 'unit': 'degree Kelvin'}
+calibration_unit: degree Kelvin
+channel: 10_7
+data_type: 32
+image_size: [200, 200]
+instruments: ('mviri',)
+is_calibrated: True
+no_data_value: 0
+pixel_size: [4006.5756000000001, 4006.5756000000001]
+product_name: GOES11_10_7_135W_20100201_0600
+product_type: full disc
+production_time: 2010-02-01 06:47:17.432000
+proj4_params: proj=geos lon_0=-135.00 lat_0=0.00 a=6378169.00 b=6356583.80 h=35785831.00
+projection: geos(-135.0)
+region_name: sliced
+satname: goes11
+satnumber: 11
+sublon: -135.0
+time_stamp: 2010-02-01 06:00:00
diff --git a/tests/data/H-000-MSG2__-MSG2________-HRV______-000012___-201010111400-__ b/tests/data/H-000-MSG2__-MSG2________-HRV______-000012___-201010111400-__
new file mode 100644
index 0000000..57f4e4c
Binary files /dev/null and b/tests/data/H-000-MSG2__-MSG2________-HRV______-000012___-201010111400-__ differ
diff --git a/tests/data/H-000-MSG2__-MSG2________-HRV______-000013___-201010111400-__ b/tests/data/H-000-MSG2__-MSG2________-HRV______-000013___-201010111400-__
new file mode 100644
index 0000000..82b2bb7
Binary files /dev/null and b/tests/data/H-000-MSG2__-MSG2________-HRV______-000013___-201010111400-__ differ
diff --git a/tests/data/H-000-MSG2__-MSG2________-HRV______-000018___-201011091200-__ b/tests/data/H-000-MSG2__-MSG2________-HRV______-000018___-201011091200-__
new file mode 100644
index 0000000..4c477a1
Binary files /dev/null and b/tests/data/H-000-MSG2__-MSG2________-HRV______-000018___-201011091200-__ differ
diff --git a/tests/data/H-000-MSG2__-MSG2________-IR_108___-000004___-201010111400-__ b/tests/data/H-000-MSG2__-MSG2________-IR_108___-000004___-201010111400-__
new file mode 100644
index 0000000..38278e7
Binary files /dev/null and b/tests/data/H-000-MSG2__-MSG2________-IR_108___-000004___-201010111400-__ differ
diff --git a/tests/data/H-000-MSG2__-MSG2________-IR_108___-000005___-201010111400-__ b/tests/data/H-000-MSG2__-MSG2________-IR_108___-000005___-201010111400-__
new file mode 100644
index 0000000..1745523
Binary files /dev/null and b/tests/data/H-000-MSG2__-MSG2________-IR_108___-000005___-201010111400-__ differ
diff --git a/tests/data/H-000-MSG2__-MSG2________-_________-EPI______-201010111400-__ b/tests/data/H-000-MSG2__-MSG2________-_________-EPI______-201010111400-__
new file mode 100644
index 0000000..2906ff7
Binary files /dev/null and b/tests/data/H-000-MSG2__-MSG2________-_________-EPI______-201010111400-__ differ
diff --git a/tests/data/H-000-MSG2__-MSG2________-_________-EPI______-201011091200-__ b/tests/data/H-000-MSG2__-MSG2________-_________-EPI______-201011091200-__
new file mode 100644
index 0000000..e636f8f
Binary files /dev/null and b/tests/data/H-000-MSG2__-MSG2________-_________-EPI______-201011091200-__ differ
diff --git a/tests/data/H-000-MSG2__-MSG2________-_________-PRO______-201010111400-__ b/tests/data/H-000-MSG2__-MSG2________-_________-PRO______-201010111400-__
new file mode 100644
index 0000000..d816ed4
Binary files /dev/null and b/tests/data/H-000-MSG2__-MSG2________-_________-PRO______-201010111400-__ differ
diff --git a/tests/data/H-000-MSG2__-MSG2________-_________-PRO______-201011091200-__ b/tests/data/H-000-MSG2__-MSG2________-_________-PRO______-201011091200-__
new file mode 100644
index 0000000..6b0af45
Binary files /dev/null and b/tests/data/H-000-MSG2__-MSG2________-_________-PRO______-201011091200-__ differ
diff --git a/tests/data/L-000-MSG2__-GOES11______-10_7_135W-000003___-201002010600-__ b/tests/data/L-000-MSG2__-GOES11______-10_7_135W-000003___-201002010600-__
new file mode 100644
index 0000000..39fad5f
Binary files /dev/null and b/tests/data/L-000-MSG2__-GOES11______-10_7_135W-000003___-201002010600-__ differ
diff --git a/tests/data/L-000-MSG2__-GOES11______-10_7_135W-000004___-201002010600-__ b/tests/data/L-000-MSG2__-GOES11______-10_7_135W-000004___-201002010600-__
new file mode 100644
index 0000000..3f7a9e7
Binary files /dev/null and b/tests/data/L-000-MSG2__-GOES11______-10_7_135W-000004___-201002010600-__ differ
diff --git a/tests/data/L-000-MSG2__-GOES11______-10_7_135W-PRO______-201002010600-__ b/tests/data/L-000-MSG2__-GOES11______-10_7_135W-PRO______-201002010600-__
new file mode 100644
index 0000000..732a67d
Binary files /dev/null and b/tests/data/L-000-MSG2__-GOES11______-10_7_135W-PRO______-201002010600-__ differ
diff --git a/tests/data/L-000-MSG2__-MTSAT1R_____-10_8_140E-000003___-200912210900-__ b/tests/data/L-000-MSG2__-MTSAT1R_____-10_8_140E-000003___-200912210900-__
new file mode 100644
index 0000000..c78e83b
Binary files /dev/null and b/tests/data/L-000-MSG2__-MTSAT1R_____-10_8_140E-000003___-200912210900-__ differ
diff --git a/tests/data/L-000-MSG2__-MTSAT1R_____-10_8_140E-000004___-200912210900-__ b/tests/data/L-000-MSG2__-MTSAT1R_____-10_8_140E-000004___-200912210900-__
new file mode 100644
index 0000000..78ec519
Binary files /dev/null and b/tests/data/L-000-MSG2__-MTSAT1R_____-10_8_140E-000004___-200912210900-__ differ
diff --git a/tests/data/L-000-MSG2__-MTSAT1R_____-10_8_140E-PRO______-200912210900-__ b/tests/data/L-000-MSG2__-MTSAT1R_____-10_8_140E-PRO______-200912210900-__
new file mode 100644
index 0000000..2152474
Binary files /dev/null and b/tests/data/L-000-MSG2__-MTSAT1R_____-10_8_140E-PRO______-200912210900-__ differ
diff --git a/tests/data/L-000-MTP___-MET7________-00_7_057E-000005___-200912211200-__ b/tests/data/L-000-MTP___-MET7________-00_7_057E-000005___-200912211200-__
new file mode 100644
index 0000000..a18f2ea
Binary files /dev/null and b/tests/data/L-000-MTP___-MET7________-00_7_057E-000005___-200912211200-__ differ
diff --git a/tests/data/L-000-MTP___-MET7________-00_7_057E-000006___-200912211200-__ b/tests/data/L-000-MTP___-MET7________-00_7_057E-000006___-200912211200-__
new file mode 100644
index 0000000..e70ba65
Binary files /dev/null and b/tests/data/L-000-MTP___-MET7________-00_7_057E-000006___-200912211200-__ differ
diff --git a/tests/data/L-000-MTP___-MET7________-00_7_057E-PRO______-200912211200-__ b/tests/data/L-000-MTP___-MET7________-00_7_057E-PRO______-200912211200-__
new file mode 100644
index 0000000..8656460
Binary files /dev/null and b/tests/data/L-000-MTP___-MET7________-00_7_057E-PRO______-200912211200-__ differ
diff --git a/tests/data/MET7_00_7_057E_20091221_1200.mda b/tests/data/MET7_00_7_057E_20091221_1200.mda
new file mode 100644
index 0000000..2a1c3b0
--- /dev/null
+++ b/tests/data/MET7_00_7_057E_20091221_1200.mda
@@ -0,0 +1,20 @@
+area_extent: [-1123120.7549999999, -450822.24499999994, 1125369.2449999999, 898271.75499999989]
+calibration_table: {'table': None, 'name': '', 'unit': ''}
+calibration_unit: counts
+channel: 00_7
+data_type: 8
+image_size: [1000, 600]
+instruments: ('mviri',)
+is_calibrated: False
+no_data_value: 0
+pixel_size: [2248.4899999999998, 2248.4899999999998]
+product_name: MET7_00_7_057E_20091221_1200
+product_type: PVISBAN
+production_time: 2009-12-21 11:36:00
+proj4_params: proj=geos lon_0=57.00 lat_0=0.00 a=6378140.00 b=6356755.00 h=35785831.00
+projection: geos(57.0)
+region_name: sliced
+satname: meteosat07
+satnumber: 07
+sublon: 57.0
+time_stamp: 2009-12-21 12:00:00
diff --git a/tests/data/MSG2_HRV_20101011_1400.mda b/tests/data/MSG2_HRV_20101011_1400.mda
new file mode 100644
index 0000000..2a0fc33
--- /dev/null
+++ b/tests/data/MSG2_HRV_20101011_1400.mda
@@ -0,0 +1,19 @@
+area_extent: [-501567.37595780345, -198526.6682504965, 498566.97291119647, 401553.9410709035]
+calibration_unit: %
+channel: HRV
+data_type: 64
+image_size: [1000, 600]
+instruments: ('seviri',)
+is_calibrated: True
+no_data_value: 0
+pixel_size: [1000.134348869, 1000.134348869]
+product_name: MSG2_HRV_20101011_1400
+product_type: full disc
+production_time: 2010-10-11 14:15:10.623000
+proj4_params: proj=geos lon_0=0.00 lat_0=0.00 a=6378169.00 b=6356583.80 h=35785831.00
+projection: geos(0.0)
+region_name: sliced
+satname: meteosat09
+satnumber: 09
+sublon: 0.0
+time_stamp: 2010-10-11 14:00:00
diff --git a/tests/data/MSG2_HRV_20101109_1200.mda b/tests/data/MSG2_HRV_20101109_1200.mda
new file mode 100644
index 0000000..ad6ff18
--- /dev/null
+++ b/tests/data/MSG2_HRV_20101109_1200.mda
@@ -0,0 +1,19 @@
+area_extent: [-4822147.7630718835, 2333813.5030858112, 4177061.1080513783, 2783873.9600768615]
+calibration_unit: %
+channel: HRV
+data_type: 64
+image_size: [8998, 450]
+instruments: ('seviri',)
+is_calibrated: True
+no_data_value: 0
+pixel_size: [1000.134348869, 1000.134348869]
+product_name: MSG2_HRV_20101109_1200
+product_type: full disc
+production_time: 2010-11-09 12:15:10.269000
+proj4_params: proj=geos lon_0=0.00 lat_0=0.00 a=6378169.00 b=6356583.80 h=35785831.00
+projection: geos(0.0)
+region_name: sliced
+satname: meteosat09
+satnumber: 09
+sublon: 0.0
+time_stamp: 2010-11-09 12:00:00
diff --git a/tests/data/MSG2_IR_108_20101011_1400.mda b/tests/data/MSG2_IR_108_20101011_1400.mda
new file mode 100644
index 0000000..a116b0e
--- /dev/null
+++ b/tests/data/MSG2_IR_108_20101011_1400.mda
@@ -0,0 +1,19 @@
+area_extent: [-298540.11499879154, -301540.51816460851, 2401822.7342365086, 598580.43158049148]
+calibration_unit: K
+channel: IR_108
+data_type: 64
+image_size: [900, 300]
+instruments: ('seviri',)
+is_calibrated: True
+no_data_value: 0
+pixel_size: [3000.4031658170002, 3000.4031658170002]
+product_name: MSG2_IR_108_20101011_1400
+product_type: full disc
+production_time: 2010-10-11 14:15:10.623000
+proj4_params: proj=geos lon_0=0.00 lat_0=0.00 a=6378169.00 b=6356583.80 h=35785831.00
+projection: geos(0.0)
+region_name: sliced
+satname: meteosat09
+satnumber: 09
+sublon: 0.0
+time_stamp: 2010-10-11 14:00:00
diff --git a/tests/data/MTSAT1R_10_8_140E_20091221_0900.mda b/tests/data/MTSAT1R_10_8_140E_20091221_0900.mda
new file mode 100644
index 0000000..2a67152
--- /dev/null
+++ b/tests/data/MTSAT1R_10_8_140E_20091221_0900.mda
@@ -0,0 +1,20 @@
+area_extent: [-398000.0, -402000.0, 402000.0, 398000.0]
+calibration_table: {'table': [[0.0, 330.0], [1.0, 329.68600463867188], [2.0, 329.37200927734375], [3.0, 329.0570068359375], [4.0, 328.74099731445312], [5.0, 328.42498779296875], [6.0, 328.10800170898438], [7.0, 327.79000854492188], [8.0, 327.47198486328125], [9.0, 327.15301513671875], [10.0, 326.8330078125], [11.0, 326.51300048828125], [12.0, 326.19198608398438], [13.0, 325.8699951171875], [14.0, 325.5469970703125], [15.0, 325.2239990234375], [16.0, 324.89999389648438], [17.0, 324.575012 [...]
+calibration_unit: degree Kelvin
+channel: 10_8
+data_type: 32
+image_size: [200, 200]
+instruments: ('mviri',)
+is_calibrated: True
+no_data_value: 0
+pixel_size: [4000.0, 4000.0]
+product_name: MTSAT1R_10_8_140E_20091221_0900
+product_type: full disc
+production_time: 2009-12-21 09:15:53.244000
+proj4_params: proj=geos lon_0=140.00 lat_0=0.00 a=6378169.00 b=6356583.80 h=35785831.00
+projection: geos(140.0)
+region_name: sliced
+satname: mtsat1r
+satnumber: 1r
+sublon: 140.0
+time_stamp: 2009-12-21 09:00:00
diff --git a/tests/data/TX01_SAR_SC_GEC_20110825T104705_20110825T104727_NSG_023264_8133_test.TSX.tar b/tests/data/TX01_SAR_SC_GEC_20110825T104705_20110825T104727_NSG_023264_8133_test.TSX.tar
new file mode 100644
index 0000000..286d54a
Binary files /dev/null and b/tests/data/TX01_SAR_SC_GEC_20110825T104705_20110825T104727_NSG_023264_8133_test.TSX.tar differ
diff --git a/tests/data/goes11.cfg b/tests/data/goes11.cfg
new file mode 100644
index 0000000..21646e7
--- /dev/null
+++ b/tests/data/goes11.cfg
@@ -0,0 +1,49 @@
+#
+# Level 1.5 configuration file for GOES11
+#
+# An item like:
+# name = value
+# is read in python like:
+# try:
+# name = eval(value)
+# except NameError:
+# name = str(value)
+#
+
+[satellite]
+satname = 'goes'
+number = '11'
+instruments = ('mviri',)
+projection = 'geos(-135.0)'
+
+[mviri-level2]
+format = 'mipp'
+
+[mviri-level1]
+format = 'mipp/xrit/SGS'
+dir = '/data/xrit/out'
+filename = 'L-000-MSG?__-GOES11______-%(channel)s_135W-%(segment)s-%Y%m%d%H%M-__'
+
+[mviri-1]
+name = '00_7'
+frequency = (0.5, 0.7, 0.9)
+resolution = 4006.5756
+size = (2816, 2816)
+
+[mviri-2]
+name = '03_9'
+frequency = (2.9, 3.9, 4.9)
+resolution = 4006.5756
+size = (2816, 2816)
+
+[mviri-3]
+name = '06_8'
+frequency = (6.1, 6.8, 7.5)
+resolution = 8013.1510
+size = (1408, 1408)
+
+[mviri-4]
+name = '10_7'
+frequency = (9.7, 10.7, 11.7)
+resolution = 4006.5756
+size = (2816, 2816)
diff --git a/tests/data/met7.cfg b/tests/data/met7.cfg
new file mode 100644
index 0000000..b56d571
--- /dev/null
+++ b/tests/data/met7.cfg
@@ -0,0 +1,44 @@
+#
+# Level 1.5 configuration file for Meteosat-7
+#
+# An item like:
+# name = value
+# is read in python like:
+# try:
+# name = eval(value)
+# except NameError:
+# name = str(value)
+#
+
+[satellite]
+satname = 'meteosat'
+number = '07'
+instruments = ('mviri',)
+projection = 'geos(57.0)'
+proj4_params = 'proj=geos lon_0=57.00 lat_0=0.00 a=6378140.00 b=6356755.00 h=35785831.00'
+
+[mviri-level2]
+format = 'mipp'
+
+[mviri-level1]
+format = 'mipp/xrit/MTP'
+dir = '/data/xrit/out'
+filename = 'L-000-MTP___-MET7________-%(channel)s_057E-%(segment)s-%Y%m%d%H%M-__'
+
+[mviri-1]
+name = '00_7'
+frequency = (0.5, 0.7, 0.9)
+resolution = 2248.49
+size = (5000, 5000)
+
+[mviri-2]
+name = '06_4'
+frequency = (5.7, 6.4, 7.1)
+resolution = 4496.98
+size = (2500, 2500)
+
+[mviri-3]
+name = '11_5'
+frequency = (10.5, 11.5, 12.5)
+resolution = 4496.98
+size = (2500, 2500)
diff --git a/tests/data/msg2.cfg b/tests/data/msg2.cfg
new file mode 100644
index 0000000..0cb0bef
--- /dev/null
+++ b/tests/data/msg2.cfg
@@ -0,0 +1,95 @@
+[satellite]
+satname = 'meteosat'
+number = '09'
+instruments = ('seviri',)
+projection = 'geos(0.0)'
+proj4_params = 'proj=geos lon_0=0.00 lat_0=0.00 a=6378169.00 b=6356583.80 h=35785831.00'
+
+[seviri-level1]
+format='mipp/xrit/MSG'
+dir = '/data/xrit/out'
+filename_pro = 'H-000-MSG2__-MSG2________-_________-%(segment)s-%Y%m%d%H%M-__'
+filename = 'H-000-MSG2__-MSG2________-%(channel)s-%(segment)s-%Y%m%d%H%M-__'
+
+[seviri-level2]
+format='mipp'
+
+[seviri-1]
+name = 'VIS006'
+frequency = (0.56, 0.635, 0.71)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-2]
+name = 'VIS008'
+frequency = (0.74, 0.81, 0.88)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-3]
+name = 'IR_016'
+frequency = (1.5, 1.64, 1.78)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-4]
+name = 'IR_039'
+frequency = (3.48, 3.92, 4.36)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-5]
+name = 'WV_062'
+frequency = (5.35, 6.25, 7.15)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-6]
+name = 'WV_073'
+frequency = (6.85, 7.35, 7.85)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-7]
+name = 'IR_087'
+frequency = (8.3, 8.7, 9.1)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-8]
+name = 'IR_097'
+frequency = (9.38, 9.66, 9.94)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-9]
+name = 'IR_108'
+frequency = (9.8, 10.8, 11.8)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-10]
+name = 'IR_120'
+frequency = (11.0, 12.0, 13.0)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-11]
+name = 'IR_134'
+frequency = (12.4, 13.4, 14.4)
+resolution = 3000.403165817
+size = (3712, 3712)
+
+[seviri-12]
+name = 'HRV'
+frequency = (0.5, 0.7, 0.9)
+resolution = 1000.134348869
+size = (11136, 11136)
+
+[offline]
+msg_dir=/data/software/safnwc_v2009_nwclib
+msg_inc=%(msg_dir)s/include
+msg_lib=%(msg_dir)s/bin
+msg_bin=%(msg_dir)s/bin
+ctype_dir=%(msg_dir)s/export/PGE02
+ctth_dir=%(msg_dir)s/export/PGE03
\ No newline at end of file
diff --git a/tests/data/msg2.cfg.out b/tests/data/msg2.cfg.out
new file mode 100644
index 0000000..35515e4
--- /dev/null
+++ b/tests/data/msg2.cfg.out
@@ -0,0 +1,28 @@
+satellite
+ instruments: ('seviri',)
+ number: 09
+ proj4_params: proj=geos lon_0=0.00 lat_0=0.00 a=6378169.00 b=6356583.80 h=35785831.00
+ projection: geos(0.0)
+ satname: meteosat
+level1
+ dir: /data/xrit/out
+ filename: H-000-MSG2__-MSG2________-%(channel)s-%(segment)s-%Y%m%d%H%M-__
+ filename_pro: H-000-MSG2__-MSG2________-_________-%(segment)s-%Y%m%d%H%M-__
+ format: mipp/xrit/MSG
+level2
+ format: mipp
+frequency: (0.56, 0.64, 0.71), name: VIS006, resolution: 3000.40, size: (3712, 3712)
+frequency: (0.74, 0.81, 0.88), name: VIS008, resolution: 3000.40, size: (3712, 3712)
+frequency: (1.50, 1.64, 1.78), name: IR_016, resolution: 3000.40, size: (3712, 3712)
+frequency: (0.50, 0.70, 0.90), name: HRV, resolution: 1000.13, size: (11136, 11136)
+frequency: (1.50, 1.64, 1.78), name: IR_016, resolution: 3000.40, size: (3712, 3712)
+frequency: (3.48, 3.92, 4.36), name: IR_039, resolution: 3000.40, size: (3712, 3712)
+frequency: (8.30, 8.70, 9.10), name: IR_087, resolution: 3000.40, size: (3712, 3712)
+frequency: (9.38, 9.66, 9.94), name: IR_097, resolution: 3000.40, size: (3712, 3712)
+frequency: (9.80, 10.80, 11.80), name: IR_108, resolution: 3000.40, size: (3712, 3712)
+frequency: (11.00, 12.00, 13.00), name: IR_120, resolution: 3000.40, size: (3712, 3712)
+frequency: (12.40, 13.40, 14.40), name: IR_134, resolution: 3000.40, size: (3712, 3712)
+frequency: (0.56, 0.64, 0.71), name: VIS006, resolution: 3000.40, size: (3712, 3712)
+frequency: (0.74, 0.81, 0.88), name: VIS008, resolution: 3000.40, size: (3712, 3712)
+frequency: (5.35, 6.25, 7.15), name: WV_062, resolution: 3000.40, size: (3712, 3712)
+frequency: (6.85, 7.35, 7.85), name: WV_073, resolution: 3000.40, size: (3712, 3712)
diff --git a/tests/data/mtsat1r.cfg b/tests/data/mtsat1r.cfg
new file mode 100644
index 0000000..b23a29e
--- /dev/null
+++ b/tests/data/mtsat1r.cfg
@@ -0,0 +1,49 @@
+#
+# Level 1.5 configuration file for MTSAT1R
+#
+# An item like:
+# name = value
+# is read in python like:
+# try:
+# name = eval(value)
+# except:
+# name = str(value)
+#
+
+[satellite]
+satname = 'mtsat'
+number = '1r'
+instruments = ('mviri',)
+projection = 'geos(140.0)'
+
+[mviri-level2]
+format = 'mipp'
+
+[mviri-level1]
+format = 'mipp/xrit/SGS'
+dir = '/data/xrit/out'
+filename = 'L-000-MSG?__-MTSAT1R_____-%(channel)s_140E-%(segment)s-%Y%m%d%H%M-__'
+
+[mviri-1]
+name = '00_7'
+frequency = (0.5, 0.7, 0.9)
+resolution = 4000.0
+size = (2752, 2752)
+
+[mviri-2]
+name = '03_8'
+frequency = (2.8, 3.8, 4.8)
+resolution = 4000.0
+size = (2752, 2752)
+
+[mviri-3]
+name = '06_8'
+frequency = (6.1, 6.8, 7.5)
+resolution = 4000.0
+size = (2752, 2752)
+
+[mviri-4]
+name = '10_8'
+frequency = (9.8, 10.8, 11.8)
+resolution = 4000.0
+size = (2752, 2752)
diff --git a/tests/data/tx01.cfg b/tests/data/tx01.cfg
new file mode 100644
index 0000000..ab3f68e
--- /dev/null
+++ b/tests/data/tx01.cfg
@@ -0,0 +1,19 @@
+[satellite]
+satname = 'tsx'
+number = '1'
+instruments = ('sarx',)
+
+[sarx-level1]
+format=mipp/xsar/TSX
+dir = '/data/xsar/out'
+filename_archive = 'TX01_SAR*_%Y%m%dT%H%M%S_*.tar'
+filename_metadata = 'TSX1_SAR_*.xml'
+
+[sarx-level2]
+format='mipp_xsar'
+
+[sarx-1]
+name = 'sarx'
+frequency = (9.50, 9.65, 9.80) # GHz
+resolution = 8.25
+size = ()
diff --git a/tests/test_misc.py b/tests/test_misc.py
new file mode 100644
index 0000000..80ae325
--- /dev/null
+++ b/tests/test_misc.py
@@ -0,0 +1,39 @@
+import sys
+import os
+import unittest
+import cStringIO
+
+import buildpath_to_syspath
+print sys.path
+import mipp.cfg
+
+datadir = (os.path.dirname(__file__) or '.') + '/data'
+
+class Test(unittest.TestCase):
+
+ def test_config_parser(self):
+ cfgfile = 'msg2'
+ os.environ['PPP_CONFIG_DIR'] = datadir
+ c = mipp.cfg.read_config(cfgfile)
+ fp = cStringIO.StringIO()
+ for name in ('satellite', 'level1', 'level2'):
+ h = c(name)
+ print >>fp, name
+ for k in sorted(h.keys()):
+ print >>fp, ' ', k + ':', h[k]
+ print >>fp, mipp.cfg._Channel(c(1).items())
+ print >>fp, mipp.cfg._Channel(c(2).items())
+ print >>fp, mipp.cfg._Channel(c(3).items())
+ for name in c.channel_names:
+ print >>fp, c.get_channel(name)
+ text1 = fp.getvalue().strip()
+ fp.close()
+ fp = open(datadir + '/' + cfgfile + '.cfg.out')
+ text2 = fp.read().strip()
+ fp.close()
+ self.assertTrue(text1 == text2, msg='Reading %s.cfg failed'%cfgfile)
+
+if __name__ == '__main__':
+ unittest.main()
+
+
diff --git a/tests/test_xrit.py b/tests/test_xrit.py
new file mode 100644
index 0000000..b4f052a
--- /dev/null
+++ b/tests/test_xrit.py
@@ -0,0 +1,204 @@
+import os
+import sys
+from datetime import datetime
+import numpy
+import unittest
+
+import buildpath_to_syspath
+import mipp
+from mipp import xrit
+from mipp.mda import _nice2cmp
+
+datadir = (os.path.dirname(__file__) or '.') + '/data'
+save_mda = False
+debug = os.environ.has_key('DEBUG')
+
+
+try:
+ # give the possibility to test other config files
+ os.environ['PPP_CONFIG_DIR'] = os.environ['LOCAL_PPP_CONFIG_DIR']
+except KeyError:
+ os.environ['PPP_CONFIG_DIR'] = datadir
+if not os.path.isdir(os.environ['PPP_CONFIG_DIR']):
+ raise mipp.ConfigReaderError, "No config dir: '%s'"%os.environ['PPP_CONFIG_DIR']
+
+goes_files = [datadir + '/L-000-MSG2__-GOES11______-10_7_135W-PRO______-201002010600-__',
+ datadir + '/L-000-MSG2__-GOES11______-10_7_135W-000003___-201002010600-__',
+ datadir + '/L-000-MSG2__-GOES11______-10_7_135W-000004___-201002010600-__']
+goes_sum = 11629531.0
+
+mtsat_files = [datadir + '/L-000-MSG2__-MTSAT1R_____-10_8_140E-PRO______-200912210900-__',
+ datadir + '/L-000-MSG2__-MTSAT1R_____-10_8_140E-000003___-200912210900-__',
+ datadir + '/L-000-MSG2__-MTSAT1R_____-10_8_140E-000004___-200912210900-__']
+mtsat_sum = 11148074.0
+
+met7_files = [datadir + '/L-000-MTP___-MET7________-00_7_057E-PRO______-200912211200-__',
+ datadir + '/L-000-MTP___-MET7________-00_7_057E-000005___-200912211200-__',
+ datadir + '/L-000-MTP___-MET7________-00_7_057E-000006___-200912211200-__']
+met7_sum = 11662791
+
+msg_files = [datadir + '/H-000-MSG2__-MSG2________-_________-PRO______-201010111400-__',
+ datadir + '/H-000-MSG2__-MSG2________-IR_108___-000004___-201010111400-__',
+ datadir + '/H-000-MSG2__-MSG2________-IR_108___-000005___-201010111400-__',
+ datadir + '/H-000-MSG2__-MSG2________-_________-EPI______-201010111400-__']
+msg_sum = 75116847.263172984
+
+hrv_files = [datadir + '/H-000-MSG2__-MSG2________-_________-PRO______-201010111400-__',
+ datadir + '/H-000-MSG2__-MSG2________-HRV______-000012___-201010111400-__',
+ datadir + '/H-000-MSG2__-MSG2________-HRV______-000013___-201010111400-__',
+ datadir + '/H-000-MSG2__-MSG2________-_________-EPI______-201010111400-__']
+hrv_sum = 11328340.753558
+
+hrv2_files = [datadir + '/H-000-MSG2__-MSG2________-_________-PRO______-201011091200-__',
+ datadir + '/H-000-MSG2__-MSG2________-HRV______-000018___-201011091200-__',
+ datadir + '/H-000-MSG2__-MSG2________-_________-EPI______-201011091200-__']
+hrv2_sum = 44049589.065626
+
+def make_image(mda, img, outdir='.'):
+ if not debug:
+ return
+ import Image as pil
+ fname = outdir + '/' + mda.product_name + '.png'
+ img = ((img - img.min()) * 255.0 /
+ (img.max() - img.min()))
+ if type(img) == numpy.ma.MaskedArray:
+ img = img.filled(mda.no_data_value)
+ img = pil.fromarray(numpy.array(img, numpy.uint8))
+ img.save(fname)
+
+def compare_mda(m1, m2):
+ def compare_arrays(e1, e2):
+ for x1, x2 in zip(e1, e2):
+ if "%.3f"%x1 != "%.3f"%x2:
+ return False
+ return True
+
+ k1 = sorted(m1.__dict__.keys())
+ k2 = sorted(m2.__dict__.keys())
+ if not k1 == k2:
+ return False
+ for k in k1:
+ if k in ('area_extent', 'pixel_size'):
+ if not compare_arrays(getattr(m1, k), getattr(m2, k)):
+ return False
+ elif not _nice2cmp(getattr(m1, k)) == _nice2cmp(getattr(m2, k)):
+ return False
+
+ return True
+
+class Test(unittest.TestCase):
+
+ def test_goes(self):
+ loader = xrit.sat.load_files(goes_files[0], goes_files[1:], calibrate=True)
+ mda, img = loader[1308:1508,1308:1508]
+ if save_mda:
+ mda.save(mda.product_name + '.mda')
+ mdac = xrit.Metadata().read(datadir + '/' + mda.product_name + '.mda')
+ mdac.data_type = 8*img.itemsize
+ cross_sum = img.sum()
+ make_image(mda, img)
+ self.assertTrue(compare_mda(mda, mdac), msg='GOES metadata differ')
+ self.assertTrue(img.shape == (200, 200), msg='GOES image reading/slicing failed, wrong shape')
+ self.failUnlessAlmostEqual(cross_sum, goes_sum, 3,
+ msg='GOES image reading/slicing failed, wrong cross_sum (%.3f != %.3f)'%(
+ cross_sum, goes_sum))
+
+ def test_mtsat(self):
+ loader = xrit.sat.load_files(mtsat_files[0], mtsat_files[1:], calibrate=True)
+ mda, img = loader[1276:1476,1276:1476]
+ if save_mda:
+ mda.save(mda.product_name + '.mda')
+ mdac = xrit.Metadata().read(datadir + '/' + mda.product_name + '.mda')
+ mdac.data_type = 8*img.itemsize
+ cross_sum = img.sum()
+ make_image(mda, img)
+ self.assertTrue(compare_mda(mda, mdac), msg='MTSAT metadata differ')
+ self.assertTrue(img.shape == (200, 200), msg='MTSAT image reading/slicing failed, wrong shape')
+ self.failUnlessAlmostEqual(cross_sum, mtsat_sum, 3,
+ msg='MTSAT image reading/slicing failed, wrong cross_sum (%.3f != %.3f)'%(
+ cross_sum, mtsat_sum))
+
+ def test_met7(self):
+ loader = xrit.sat.load_files(met7_files[0], met7_files[1:], calibrate=False)
+ mda, img = loader.raw_slicing((slice(2300,2900), slice(2000,3000)))
+ if save_mda:
+ mda.save(mda.product_name + '.mda')
+ mdac = xrit.Metadata().read(datadir + '/' + mda.product_name + '.mda')
+ cross_sum = img.sum()
+ make_image(mda, img)
+ self.assertTrue(compare_mda(mda, mdac), msg='MET7 metadata differ')
+ self.assertTrue(img.shape == (600, 1000), msg='MET7 image reading/slicing failed, wrong shape')
+ self.failUnlessAlmostEqual(cross_sum, met7_sum, 3,
+ msg='MET7 image reading/slicing failed, wrong cross_sum (%.3f != %.3f)'%(
+ cross_sum, met7_sum))
+
+ def test_msg(self):
+ loader = xrit.sat.load_files(msg_files[0], msg_files[1:-1], epilogue=msg_files[-1],
+ calibrate=True)
+ mda, img = loader[1656:1956,1756:2656]
+ if save_mda:
+ mda.save(mda.product_name + '.mda')
+ mdac = xrit.Metadata().read(datadir + '/' + mda.product_name + '.mda')
+ mdac.data_type = 8*img.itemsize
+ cross_sum = img.sum()
+ make_image(mda, img)
+ self.assertTrue(compare_mda(mda, mdac), msg='MSG metadata differ')
+ self.assertTrue(img.shape == (300, 900), msg='MSG image reading/slicing failed, wrong shape')
+ self.failUnlessAlmostEqual(cross_sum, msg_sum, 3,
+ msg='MSG image reading/slicing reflectances failed, wrong cross_sum (%.3f != %.3f)'%(
+ cross_sum, msg_sum))
+
+ mda, img = loader(mda.area_extent)
+ if save_mda:
+ mda.save(mda.product_name + '.mda')
+ cross_sum = img.sum()
+ self.assertTrue(compare_mda(mda, mdac), msg='MSG metadata differ, when using area_extent')
+ self.failUnlessAlmostEqual(cross_sum, msg_sum, 3,
+ msg='MSG image reading/slicing failed, when using area_extent, wrong cross_sum (%.3f != %.3f)'%(
+ cross_sum, msg_sum))
+
+ def test_msg2(self):
+ loader = xrit.sat.load_files(msg_files[0], msg_files[1:-1], epilogue=msg_files[-1],
+ calibrate=2)
+ mda, img = loader[1656:1956,1756:2656]
+ cross_sum = img.sum()
+ expected = 22148991.0194
+ self.failUnlessAlmostEqual(cross_sum, expected, 3,
+ msg='MSG image reading/slicing radiances failed, wrong cross_sum (%.3f != %.3f)'%(
+ cross_sum, expected))
+
+
+ def test_hrv(self):
+ loader = xrit.sat.load_files(hrv_files[0], hrv_files[1:-1], epilogue=hrv_files[-1], calibrate=True)
+ mda, img = loader[5168:5768,5068:6068]
+ if save_mda:
+ mda.save(mda.product_name + '.mda')
+ mdac = xrit.Metadata().read(datadir + '/' + mda.product_name + '.mda')
+ mdac.data_type = 8*img.itemsize
+ cross_sum = img.sum()
+ make_image(mda, img)
+ self.assertTrue(compare_mda(mda, mdac), msg='MSG-HRV metadata differ')
+ self.assertTrue(img.shape == (600, 1000), msg='MSG-HRV image reading/slicing failed, wrong shape')
+ self.failUnlessAlmostEqual(cross_sum, hrv_sum, 3,
+ msg='MSG-HRV image reading/slicing failed, wrong cross_sum (%.3f != %.3f)'%(
+ cross_sum, hrv_sum))
+
+ def test_hrv2(self):
+ loader = xrit.sat.load_files(hrv2_files[0], hrv2_files[1:-1], epilogue=hrv2_files[-1], calibrate=True)
+ mda, img = loader[2786:3236,748:9746]
+ if save_mda:
+ mda.save(mda.product_name + '.mda')
+ mdac = xrit.Metadata().read(datadir + '/' + mda.product_name + '.mda')
+ mdac.data_type = 8*img.itemsize
+ cross_sum = img.sum()
+ make_image(mda, img)
+
+ self.assertTrue(compare_mda(mda, mdac), msg='MSG-HRV metadata differ')
+ self.assertTrue(img.shape == (450, 8998), msg='MSG-HRV image reading/slicing failed, wrong shape')
+ self.failUnlessAlmostEqual(cross_sum, hrv2_sum, 3,
+ msg='MSG-HRV image reading/slicing failed, wrong cross_sum (%.3f != %.3f)'%(
+ cross_sum, hrv2_sum))
+
+if __name__ == '__main__':
+ save_mda = False
+ unittest.main()
diff --git a/tests/test_xsar.py b/tests/test_xsar.py
new file mode 100644
index 0000000..b77ff3c
--- /dev/null
+++ b/tests/test_xsar.py
@@ -0,0 +1,70 @@
+import os
+import sys
+from datetime import datetime
+import numpy
+import unittest
+
+import buildpath_to_syspath
+import mipp
+from mipp import xsar
+from mipp.mda import _nice2cmp, mslice
+
+datadir = (os.path.dirname(__file__) or '.') + '/data'
+save_mda = False
+
+try:
+ # give the possibility to test other config files
+ os.environ['PPP_CONFIG_DIR'] = os.environ['LOCAL_PPP_CONFIG_DIR']
+except KeyError:
+ os.environ['PPP_CONFIG_DIR'] = datadir
+if not os.path.isdir(os.environ['PPP_CONFIG_DIR']):
+ raise mipp.ConfigReaderError, "No config dir: '%s'"%os.environ['PPP_CONFIG_DIR']
+
+txs1_file = datadir + '/TX01_SAR_SC_GEC_20110825T104705_20110825T104727_NSG_023264_8133_test.TSX.tar'
+tsx1_sum = 3895.1342095
+
+def make_image(mda, img, outdir='.'):
+ if not os.environ.has_key('DEBUG'):
+ return
+ import Image as pil
+ fname = outdir + '/' + mda.product_name + '.png'
+ img = ((img - img.min()) * 255.0 /
+ (img.max() - img.min()))
+ if type(img) == numpy.ma.MaskedArray:
+ img = img.filled(mda.no_data_value)
+ img = pil.fromarray(numpy.array(img, numpy.uint8))
+ img.save(fname)
+
+def compare_mda(m1, m2):
+ m1 = mslice(m1)
+ m2 = mslice(m2)
+ for m in (m1, m2):
+ try:
+ del m.tiff_params
+ except AttributeError:
+ pass
+ k1 = sorted(m1.__dict__.keys())
+ k2 = sorted(m2.__dict__.keys())
+ if not k1 == k2:
+ return False
+ for k in k1:
+ if not _nice2cmp(getattr(m1, k)) == _nice2cmp(getattr(m2, k)):
+ return False
+ return True
+
+class Test(unittest.TestCase):
+
+ def test_tsx(self):
+ mda, img = xsar.sat.load_file(txs1_file, 'sarx', calibrate=True)
+ if save_mda:
+ mda.save(mda.product_name + '.mda')
+ mdac = xsar.Metadata().read(datadir + '/' + mda.product_name + '.mda')
+ cross_sum = img.sum()
+ make_image(mda, img)
+ self.assertTrue(compare_mda(mda, mdac), msg='TSX metadata differ')
+ self.assertTrue(img.shape == (512, 512), msg='TSX image reading failed, wrong shape')
+ self.failUnlessAlmostEqual(cross_sum, tsx1_sum, 3, msg='TSX image reading failed')
+
+if __name__ == '__main__':
+ save_mda = False
+ unittest.main()
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/mipp.git
More information about the Pkg-grass-devel
mailing list