[doris] 02/14: New upstream version 5.0.3~beta+dfsg

Antonio Valentino a_valentino-guest at moszumanska.debian.org
Mon Aug 7 17:57:34 UTC 2017


This is an automated email from the git hooks/post-receive script.

a_valentino-guest pushed a commit to branch master
in repository doris.

commit 647e1958283a658fcd887ee9c2b6e77ec6b69fbc
Author: Antonio Valentino <antonio.valentino at tiscali.it>
Date:   Sun Aug 6 18:08:52 2017 +0000

    New upstream version 5.0.3~beta+dfsg
---
 ENVISAT_TOOLS/.viminfo                             |   29 -
 INSTALL                                            |   86 -
 LICENSE.txt                                        |  674 ++++++
 MANIFEST.in                                        |   11 +
 README.txt                                         |  118 +
 __init__.py                                        |    0
 bin/construct_dem.sh                               |  592 -----
 bin/doris.adddummystep.sh                          |   61 +
 bin/plotcpm                                        |    2 +-
 bin/rs2_dump_data.py                               |    9 +-
 bin/rs2_dump_header2doris.py                       |  346 ++-
 bin/run                                            |    2 +-
 bin/tsx_dump_header2doris.py                       |   25 +-
 bin/tsx_dump_header2doris_noxpath.py               |   15 +-
 {src => doris_core}/COPYING                        |    0
 src/Makefile.debug-full => doris_core/Makefile     |   80 +-
 {src => doris_core}/Makefile.bert                  |    0
 {src => doris_core}/Makefile.cygwin                |    0
 {src => doris_core}/Makefile.debug                 |    0
 {src => doris_core}/Makefile.debug-full            |    0
 .../Makefile.doris_v3.17_MacOSX10.4_gcc4           |    0
 {src => doris_core}/Makefile.sun                   |    0
 {src => doris_core}/README                         |    0
 {src => doris_core}/TODO                           |    0
 {src => doris_core}/bk_baseline.hh                 |    0
 {src => doris_core}/bk_messages.hh                 |    0
 {src => doris_core}/configure                      |    0
 {src => doris_core}/constants.hh                   |    6 +-
 {src => doris_core}/coregistration.cc              | 1927 ++++++++++++----
 {src => doris_core}/coregistration.hh              |   43 +-
 {src => doris_core}/estorbit.cc                    |    0
 {src => doris_core}/estorbit.hh                    |    0
 {src => doris_core}/exceptions.cc                  |    0
 {src => doris_core}/exceptions.hh                  |    0
 {src => doris_core}/filtering.cc                   |    0
 {src => doris_core}/filtering.hh                   |    0
 {src => doris_core}/geocode.cc                     |    4 +-
 {src => doris_core}/geocode.hh                     |    0
 {src => doris_core}/ioroutines.cc                  |  257 ++-
 {src => doris_core}/ioroutines.hh                  |    4 +
 {src => doris_core}/matrix_test.cc                 |    0
 {src => doris_core}/matrixbk.cc                    |    0
 {src => doris_core}/matrixbk.hh                    |    7 +
 {src => doris_core}/matrixspecs.cc                 |    0
 {src => doris_core}/newsincev2.4                   |    0
 {src => doris_core}/orbitbk.cc                     |    0
 {src => doris_core}/orbitbk.hh                     |    0
 {src => doris_core}/processor.cc                   |  138 +-
 {src => doris_core}/productinfo.cc                 |    0
 {src => doris_core}/productinfo.hh                 |    0
 {src => doris_core}/products.cc                    |   48 +-
 {src => doris_core}/products.hh                    |    5 +-
 {src => doris_core}/readdata.cc                    |  528 ++---
 {src => doris_core}/readdata.hh                    |    0
 {src => doris_core}/readinput.cc                   |  199 +-
 {src => doris_core}/readinput.hh                   |   70 +-
 {src => doris_core}/referencephase.cc              |   39 +-
 {src => doris_core}/referencephase.hh              |    0
 {src => doris_core}/slcimage.cc                    |  225 +-
 {src => doris_core}/slcimage.hh                    |   26 +-
 {src => doris_core}/tmp_strptime.cc                |    0
 {src => doris_core}/unwrap.cc                      |    0
 {src => doris_core}/unwrap.hh                      |    0
 {src => doris_core}/utilities.cc                   |   13 +-
 {src => doris_core}/utilities.hh                   |    2 +
 doris_stack/functions/ESD_functions.py             |  285 +++
 doris_stack/functions/ESD_ps_ds.py                 |  493 ++++
 doris_stack/functions/__init__.py                  |    0
 doris_stack/functions/baselines.py                 |   99 +
 doris_stack/functions/burst_metadata.py            |  153 ++
 doris_stack/functions/cleanup_stack.py             |  125 +
 doris_stack/functions/compassbearing.py            |   26 +
 doris_stack/functions/concatenate_decatenate.py    |  215 ++
 doris_stack/functions/correct_ESD.py               |   51 +
 doris_stack/functions/create_image.py              |   13 +
 doris_stack/functions/do_deramp_SLC.py             |   81 +
 doris_stack/functions/do_reramp_SLC.py             |   99 +
 doris_stack/functions/get_ramp.py                  |  282 +++
 doris_stack/functions/get_winpos.py                |  223 ++
 doris_stack/functions/job.started                  |    0
 doris_stack/functions/load_shape_unzip.py          |  220 ++
 doris_stack/functions/orbit_coordinates.py         |  226 ++
 doris_stack/functions/precise_read.py              |  155 ++
 doris_stack/functions/read_write_data.py           |   62 +
 doris_stack/functions/resdata.py                   |  404 ++++
 .../functions/sentinel_dump_data_function.py       |   78 +
 doris_stack/functions/stack_cleanup.py             |  184 ++
 doris_stack/functions/swath_metadata.py            |  155 ++
 doris_stack/functions/xml_query.py                 |  162 ++
 doris_stack/main_code/__init__.py                  |    0
 doris_stack/main_code/burst.py                     |   95 +
 doris_stack/main_code/doris_config.py              |   27 +
 doris_stack/main_code/doris_main.py                |   28 +
 doris_stack/main_code/doris_parameters.py          |  114 +
 doris_stack/main_code/doris_parameters_path.py     |   12 +
 doris_stack/main_code/doris_profile.py             |   17 +
 doris_stack/main_code/doris_sentinel_1.py          |  179 ++
 doris_stack/main_code/dorisparameters.py           |  120 +
 doris_stack/main_code/grs_config.py                |   27 +
 doris_stack/main_code/grs_profile.py               |   17 +
 doris_stack/main_code/image.py                     |  124 +
 doris_stack/main_code/jobHandlerScript             |   43 +
 doris_stack/main_code/jobs.py                      |  119 +
 doris_stack/main_code/resdata.py                   |  404 ++++
 doris_stack/main_code/single_master_stack.py       | 2411 ++++++++++++++++++++
 doris_stack/main_code/stack.py                     |  735 ++++++
 doris_stack/main_code/swath.py                     |  119 +
 editor_support/Makefile                            |   58 -
 editor_support/README                              |   39 -
 editor_support/vim/syntax/doris.vim                |   70 -
 editor_support/vimrc_example                       |   99 -
 {ENVISAT_TOOLS => envisat_tools}/Makefile          |    2 +-
 {ENVISAT_TOOLS => envisat_tools}/README            |    0
 {ENVISAT_TOOLS => envisat_tools}/envisat_dump_HH.c |    0
 {ENVISAT_TOOLS => envisat_tools}/envisat_dump_VV.c |   44 +-
 .../envisat_dump_data.c                            |    0
 .../envisat_dump_header.c                          |    0
 .../envisat_dump_header2doris.csh                  |    0
 install/INSTALL.txt                                |  116 +
 install/__init__.py                                |    0
 install/doris_config_template.xml                  |   10 +
 install/init_cfg.py                                |   73 +
 prepare_stack/__init__.py                          |    0
 prepare_stack/create_datastack_bash.py             |   77 +
 prepare_stack/create_dem.py                        |  779 +++++++
 prepare_stack/create_doris_input_xml.py            |  163 ++
 prepare_stack/create_inputfiles.py                 |  103 +
 prepare_stack/doris_input_template.xml             |   43 +
 prepare_stack/download_sentinel_data_orbits.py     |  472 ++++
 prepare_stack/inputfile_template.xml               |  154 ++
 prepare_stack/prepare_datastack.py                 |   61 +
 prepare_stack/prepare_datastack_main.py            |   20 +
 {SARtools => sar_tools}/Makefile                   |    6 +-
 {SARtools => sar_tools}/README                     |    0
 {SARtools => sar_tools}/bkconvert.cc               |    0
 {SARtools => sar_tools}/cpxconj.cc                 |    0
 {SARtools => sar_tools}/cpxdiv.cc                  |    0
 {SARtools => sar_tools}/cpxfiddle.cc               |    0
 {SARtools => sar_tools}/cpxmult.cc                 |    0
 {SARtools => sar_tools}/dateconv.cc                |    0
 {SARtools => sar_tools}/flapjack.cc                |    0
 SARtools/floatmult.cc => sar_tools/floatmask.cc    |   49 +-
 {SARtools => sar_tools}/floatmult.cc               |    0
 {SARtools => sar_tools}/rasterheader.cc            |    0
 {SARtools => sar_tools}/readrasterheader.cc        |    0
 {SARtools => sar_tools}/wrap.cc                    |    0
 setup.py                                           |   13 +
 147 files changed, 14182 insertions(+), 2242 deletions(-)

diff --git a/ENVISAT_TOOLS/.viminfo b/ENVISAT_TOOLS/.viminfo
deleted file mode 100755
index cc089af..0000000
--- a/ENVISAT_TOOLS/.viminfo
+++ /dev/null
@@ -1,29 +0,0 @@
-# This viminfo file was generated by Vim 6.1.
-# You may edit it if you're careful!
-
-
-# hlsearch on (H) or off (h):
-~h
-# Command Line History (newest to oldest):
-:q
-
-# Search String History (newest to oldest):
-
-# Expression History (newest to oldest):
-
-# Input Line History (newest to oldest):
-
-# Input Line History (newest to oldest):
-
-# Registers:
-
-# File marks:
-'0  1  0  ~/DEVELOP/DORIS/doris/ENVISAT_TOOLS/envisat_dump_HH.c
-
-# Jumplist (newest first):
--'  1  0  ~/DEVELOP/DORIS/doris/ENVISAT_TOOLS/envisat_dump_HH.c
-
-# History of marks within files (newest to oldest):
-
-> ~/DEVELOP/DORIS/doris/ENVISAT_TOOLS/envisat_dump_HH.c
-	"	1	0
diff --git a/INSTALL b/INSTALL
deleted file mode 100755
index a4357d7..0000000
--- a/INSTALL
+++ /dev/null
@@ -1,86 +0,0 @@
-
-This file "INSTALL" describes the installation of the "Doris" Delft 
-radar interferometric software package.  Doris has been installed on
-virtually all operating systems world-wide (mainly UNIX like,
-for MS Windows(c) your best option is using Cygwin.)
-
-If the following does not work for you, please first search the FAQ via
-  http://enterprise.lr.tudelft.nl/doris
-
-Installation should take approximately 10 minutes of your time.
-
-
-There are 3 components in this distribution (doris v4.01).
-  1. src and bin subdirectory:   main software (C++ source code, python and shell scripts);
-  2. ENVISAT_TOOLS subdir:       standalone reader for ENVISAT (C source code);
-  3. SARtools subdir:            standalone utilities (C++ source code).
-
-
-
-To install Doris, basically do the following:
---------------------------------------------
-1.  Read the manual.
-
-
---------------------------------------------
-- OPTIONAL: installation of FFTW library ---
-- tested for Cygwin, SUN Solaris, Linux ----
---------------------------------------------
-11. download fftw-3.2.1 from http://www.fftw.org : wget -c http://www.fftw.org/fftw-3.2.1.tar.gz
-12  gunzip and tar xvf it in subdirectory of doris root installation.
-13. cd fftw-3.2.1
-14. ./configure --prefix=`pwd` --enable-float
-15. make 
-16. make install
-
-
---------------------------------------------
-- COMPILATION OF DORIS SOURCE CODE ---------
---------------------------------------------
-21. cd doris/src
-22. Read the README file
-23. ./configure             (creates "Makefile")  # requires tcsh shell to run, to install type "sudo apt-get install tcsh" at shell prompt on Ubuntu platform.
-                            ( +answer the questions about libraries, etc.)
-24. make                    (compiles the software)
-25. make install            (installs doris and bin scripts)
-
-
---------------------------------------------
-- COMPILATION OF DORIS UTILITIES -----------
---------------------------------------------
-31. cd doris/SARtools
-32. make
-32. Review/edit the Makefile if this does not work
-    (for example if you do not want to use GNU gcc/g++ as compiler)
-33. make install            (installs in /usr/local/bin unless you edit the Makefile)
-
-
-41. cd doris/ENVISAT_TOOLS                         # on 64-bit system requires libc-dev-i386 library ex: "sudo apt-get install libc-dev-i386"
-42. make
-42. Review/edit the Makefile if this does not work
-    (for example if you do not want to use gcc as compiler)
-44. make install
-
-
---------------------------------------------
-- INSTALLATION OF USEFUL EXTERNAL SOFTWARE -
---------------------------------------------
-51. The doris software depends for full functionality on:
-52.   getorb:  precise orbits for ERS/ENVISAT      # requires fortran(ff7, g77 or gfortran)   ex: "sudo apt-get install gfortran" and edit Makefile and update FC = gfortran
-               (used by Doris step M_PORBITS and S_PORBITS).
-53.   snaphu:  standalone executable used for unwrapping 
-               (used by Doris step UNWRAP).
-54.   GMT:     plotting of coregistration offset vectors 
-               (used by Doris via the scripts in bin dir). 
-
-    These programs should also be installed on your system.  Refer
-    to the download area of the doris website 
-      http://enterprise.lr.tudelft.nl/doris/ 
-    where to obtain these programs.
-
-Enjoy,
-
-TUDELFT, MGP Radar Group 2009
-doris_users at tudelft.nl
-
-
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..9cecc1d
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,674 @@
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    {one line to give the program's name and a brief idea of what it does.}
+    Copyright (C) {year}  {name of author}
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+    {project}  Copyright (C) {year}  {fullname}
+    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+  The GNU General Public License does not permit incorporating your program
+into proprietary programs.  If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.  But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..93d9f5d
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,11 @@
+include *.txt
+include install/*.xml
+include install/*.txt
+include prepare_stack/*.xml
+recursive-include docs *.txt
+include sar_tools/*
+include envisat_tools/*
+include doris_core/*
+include bin/*
+include envisat_tools/epr_api-2.2/*
+include envisat_tools/epr_api-2.2/src/*
\ No newline at end of file
diff --git a/README.txt b/README.txt
new file mode 100644
index 0000000..e43d890
--- /dev/null
+++ b/README.txt
@@ -0,0 +1,118 @@
+===========
+Doris v5 Beta
+===========
+
+The new Doris version, Doris 5, is developed to process a stack of Sentinel-1 images, as well as all the already familiar
+functionality of Doris 4.
+
+This is a beta version. Therefore, you still may experience some problems. Please report them to us. But even better,
+try to fix them! We are very happy to discuss with you how you can contribute to this project!
+
+The new Doris version consists of 2 parts:
+-       The doris_core directory cantaining the Doris core code, which is similar to the original Doris code and is
+        written in C. This code is mainly used to create individual interferograms based on different steps.
+-       The doris_stack directory containing scripts written in Python. These scripts automate the processing of a
+        single master stack of Sentinel-1 images. The scripts manage the processing of the bursts of a sentinel 1 image,
+        contain algorithms specific to processing sentinel 1 images and support parallelisation of the processing of the
+        bursts. The functionality of these scripts can be further extended to support more sensors and modes.
+
+In addition, you will find a stack preparation script, to automatically download the burst you need for your Area of
+Interest which you defined by a shape file, automatically download the SRTM DEM associated with this area, and setup
+your processing structure.
+
+
+Installation
+===========
+
+See the INSTALL file in the install directory. This file descripes the installation of the C libraries, python libraries
+and some utility software.
+
+
+Creating Sentinel-1 datastacks
+=============================
+
+
+Create a folder structure
+-----------------------------
+
+After installing the software you can create your first doris datastack. To do so you have to prepare the following:
+- Create folders to download radar data and orbit files. In a further stage these files can be downloaded automatically,
+    but it is also possible to do it yourself manually.
+- Create a folder where you can store intermediate DEM results. Data will be downloaded automatically, so you only have
+    to create the folder itself. Note that these automatic downloads are based on SRTM data and are therefore limited to
+    60 degrees south and north of the equator.
+- Create a .shp file with your area of interest. You can use different software packages, but ArcGIS and QGIS (free) are
+    the most convenient for this purpose. Or you could download from one of the websites that offer free shapefiles for
+    administrative boundaries (for example: http://www.diva-gis.org/Data)
+- Finally, create the folder where you want to process your datastack. Be aware that to process your data you will need
+    at least 100 GB of free space on your disc.
+
+
+Register for Sentinel and SRTM downloads
+----------------------------------------
+
+Additionally, you will need an account for downloading Sentinel-1 and SRTM data. You can use the following links to
+create an account. (How to link them to your code is described in the INSTALL file)
+- To register for Sentinel-1 data download use: https://scihub.copernicus.eu/dhus/#/self-registration
+- To register for SRTM download use: https://urs.earthdata.nasa.gov/users/new/
+
+
+Run the stack preparation script
+----------------------------------------
+
+Move to the prepare_stack directory:
+cd prepare_stack
+Run the python script:
+python prepare_datastack.py
+
+This code will ask you to define the different folders you created before. The script will ask you whether you want
+to run your code in parallel. Generally, this is recommended as it speeds up your processing speed. Note that either the
+number of cores and your RAM can be limiting (one process will use about 4GB of RAM). Because it is not possible to mix
+different orbits in one datastack it will also ask you which orbit you want to use and whether it is ascending or
+descending. Please check this beforehand on the ESA website (https://scihub.copernicus.eu)
+Finally, the code will ask you the start date, end date and master date:
+- start date    > What is the first image (in time) you want to process?
+- end date      > What is the last image (in time) you want to process? (Tip: This date can be in the far future if you
+                    just want to download all images till now)
+- master data   > This image will be used as the master of your stack. Other images will be resampled
+                    to the geometry of this master image.
+After finishing this script, the new datastack is automatically created together with a DEM of the area. This can take
+a while in case the download speeds are low or your area is large.
+
+
+Editing the data stack settings (generally not needed)
+----------------------------------------------------
+
+You can enter the folder to find, the newly created DEM, your .shp file, configuration files (inputfiles) and the stack.
+Further there is the doris_input.xml file where all configuration settings for your datastack are stored.
+This file is created in the folder where you will process your datastack. So, if you want to change this configuration 
+afterwards, you can make some changes there.
+
+
+Processing
+=========================================
+
+In the main folder of your datastack you will have three bash files:
+create_dem.sh           > To create a DEM for your area. This is already done if you used the automatic DEM generation
+download_sentinel.sh    > This will run the a download of sentinel images for the specified track over your area of
+                            interest. Only dates between your start and end date are considered. This script will also
+                            download the needed precise or restituted orbit files.
+You can call this scripts using bash <script_name>
+
+After downloading your DEM, radar data and orbit files you can start your processing by the following command:
+bash doris_stack.sh
+
+or, if your server uses qsub (for parallel processing)
+
+qsub doris_stack.sh
+
+If you want to extend your datastack later on, you can run the scripts again for the same datastack. It will check which
+files are new and only process them. This software is therefore perfectly fit for continues monitoring.
+Be sure that you do not change your master image in between, as this will break your code.
+
+
+
+Enjoy,
+
+TUDELFT RADAR GROUP 2017
+doris_users at tudelft.nl
\ No newline at end of file
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/bin/construct_dem.sh b/bin/construct_dem.sh
deleted file mode 100755
index ae6ec48..0000000
--- a/bin/construct_dem.sh
+++ /dev/null
@@ -1,592 +0,0 @@
-#!/bin/bash
-##
-## construct_dem.sh
-## v1.2, 18-12-2009
-## v1.1, 17-12-2008
-## v1.0, 22-08-2006
-## 
-## Made by Freek van Leijen and Zbigniew Perski
-## Delft Institute of Earth Observation and Space Systems
-## Delft University of Technology
-## 
-## Started on  Wed May 17 10:00:00 2006 Freek van Leijen
-## Previous update Mon May 22 17:36:09 2006 Petar Marinkovic
-## Last update Wed June 28 22:28:19 Mahmut Arikan
-## added httpSupport (v1.2) on Fri Dec 18 16:39:39 Batuhan Osmanoglu
-## 
-##
-## DESCRIPTION: Downloads, merges and fills voids of SRTM
-## data based on coordinates of the area of interest.
-## Only basic Linux/Unix commands, wget and GMT are used.
-##
-## NOTE: Scripts will have a problem with Antarctica and
-## when your area crosses the -180/180 meridian.
-##
-## INPUT: [$1] : project_name
-##        [$2] : W
-##        [$3] : E
-##        [$4] : S
-##        [$5] : N
-##        [$6] : srtm_code [SRTM1/3] (optional)
-##        [$7] : link_1 (optional, default is http://dds.cr.usgs.gov/srtm) 
-##        [$8] : link_2 (optional, default is http://dds.cr.usgs.gov/srtm)
-##        [$9] : ftp_user   (optional)
-##       [$10] : ftp_pass   (optional)
-##
-## EXAMPLE: ./construct_dem.sh netherlands 3.3 7.3 50.7 53.7 SRTM3
-##
-## CHANGE.LOG:
-##
-## MA (some modifications in bash part, keeping some more output files, doris missing prm added)
-## FvL fixed bug in input check, explicitely stated ftp_user and ftp_pass
-## FvL changed ftp server for SRTM30, update for Doris v4.01, 2008
-## MA define variable for awk calls
-## Batu added http support, changed folder name to /version2_1/ from version2
-##
-## TODO Skiping download of never existing tiles, may be a file list for excluded tiles over oceans
-##
-Revision="v1.2"
-
-AWK=`AWK=$(which nawk 2> /dev/null); [ "$AWK" == "" ] && echo $(which awk) || echo $AWK` # MA awk variable: 1. look for nawk (on old systems) else use awk (gnu awk)
-
-# -- FUNCTIONS declarations [modularity] -----
-
-# ftpcall or wget
-downloadFile(){
-serverType=`echo ${3}| awk 'BEGIN{ FS=":"};{print $1}'`
-case "${serverType}" in 
-  ftp )
-  ftp_call $1 $2 $3 $4 $5  
-  ;;
-  http )
-  #generate index
-  i=`echo ${3}| awk 'BEGIN{ FS="/"};{print $3}'`
-  if [ ! -e ${i} ] 
-  then 
-    generateIndexFile ${3} ${i}
-  fi
-  tileUrl=`grep "$1/${2}" ${i}| head -n1`
-  [ ! -z ${tileUrl} ] && wget ${tileUrl}
-  ;;
-esac
-}
-
-# generates the file list of http site
-generateIndexFile(){
-servername=$1
-filename=$2
-#generate temporary folder
-str0=$$ #get PID
-POS=2  # Starting from position 2 in the string.
-LEN=8  # Extract eight characters.
-str1=$( echo "$str0" | md5sum | md5sum )
-# Doubly scramble:     ^^^^^^   ^^^^^^
-tempFolder="${str1:$POS:$LEN}"
-#projectFolder=$PWD
-mkdir ${tempFolder}
-wget -q -k -nd -r -P"./${tempFolder}" -A*.html* "${servername}" #q:quiet, k:convert links, nd:no directories, r:recursive, P:output directory prefix, A:accept pattern
-cat ./${tempFolder}/index.html.* | egrep -o "http:.*version2.*SRTM1.*hgt*" | cut -f1 --delimiter=\" >   ${filename}
-cat ./${tempFolder}/index.html.* | egrep -o "http:.*version2.*SRTM3.*hgt*" | cut -f1 --delimiter=\" >>  ${filename}
-cat ./${tempFolder}/index.html.* | egrep -o "http:.*version2.*SRTM30.*dem*" | cut -f1 --delimiter=\" >> ${filename}
-#cat index.html.* | egrep -o "http:.*version1.*SRTM3.*dem*" | cut -f1 --delimiter=\" >> ${projectFolder}/${filename}
-rm -rf ./${tempFolder}
-[ $? -ne 0 ] && exit 1
-}
-
-# downloads DEM patch from ftp site
-ftp_call()
-{
-remotedir=$1
-remotefile=$2
-ftp_link=$3
-ftp_user=$4
-ftp_pass=$5
-ftp -inv $ftp_link << EOF >> ftp_log 
-user $ftp_user $ftp_pass
-binary
-cd $remotedir
-pwd
-get $remotefile
-bye
-EOF
-}
-
-#MA more functions
-#printing parameters N E S W
-pPRM(){ echo -e "Parameters:\n\t\t\tNorth:$north_border\n\tWest:$west\t\t\t\tEast:$east_border
-\n\t\t\tSouth:$south  "; }
-
-#MA need by below case statement
-ckSRTM(){ 
-	   srtm=`echo $srtm | tr a-z A-Z` # change to upper case
-	   [[ $srtm != "SRTM1" && $srtm != "SRTM3" ]] && \
-	   echo -e "\nNot a valid SRTM version $srtm !!! See usage.\n" && exit 127;
-	   #echo -e "\nUsing SRTM Version \033[01;33m$srtm \033[00m";
-	   echo -e "\nUsing SRTM Version $srtm";
-}
-
-pFTP(){
-	   echo -e "\nSRTM   url: $ftp_link_1"
-	   echo -e "SRTM30 url: $ftp_link_2"
-}
-
-
-# arguments substitution and checks
-
-# Create/check project directory
-project=$1
-[ ! -d ${project} ] && mkdir $project
-
-# FLOOR FIRST(!) integers and then check
-west=`echo ${2%%.*}  | ${AWK} '{ if ($1 < 0) $1 = $1 - 1; print $1 }'`
-east=`echo ${3%%.*}  | ${AWK} '{ if ($1 < 0) $1 = $1 - 1; print $1 }'`
-south=`echo ${4%%.*} | ${AWK} '{ if ($1 < 0) $1 = $1 - 1; print $1 }'`
-north=`echo ${5%%.*} | ${AWK} '{ if ($1 < 0) $1 = $1 - 1; print $1 }'`
-
-#FvL
-east_border=$[$east+1] 
-north_border=$[$north+1]
-
-# MA checking parameters
-# input arguments : check
-# check on arguments $7 and $8, if specified parsed, otherwise default ftp 
-# links used
-
-case $# in
-	5) pPRM;  
-	   srtm="SRTM3"; # default
-	   echo -e "\nUsing default SRTM Version $srtm";
-	   #ftp_link_1="e0srp01u.ecs.nasa.gov"; ftp_link_2="e0srp01u.ecs.nasa.gov";
-	   ftp_link_1="http://dds.cr.usgs.gov/srtm/";ftp_link_2="http://dds.cr.usgs.gov/srtm/" 
-	   ftp_user="anonymous"; ftp_pass="anonymous";
-	   pFTP;
-	   #exit 127 #debug
-	   ;;
-	6) pPRM; srtm=$6
-	   ckSRTM;
-	   #ftp_link_1="e0srp01u.ecs.nasa.gov"; ftp_link_2="e0srp01u.ecs.nasa.gov"; 
-	   ftp_link_1="http://dds.cr.usgs.gov/srtm/";ftp_link_2="http://dds.cr.usgs.gov/srtm/" 
-	   ftp_user="anonymous"; ftp_pass="anonymous";
-	   pFTP;
-	   ;;
-	7) pPRM; srtm=$6
-           ckSRTM;
-	   #ftp_link_1=$7; ftp_link_2="e0srp01u.ecs.nasa.gov";
-	   ftp_link_1=$7; ftp_link_2="http://dds.cr.usgs.gov/srtm/"
-	   ftp_user="anonymous"; ftp_pass="anonymous";
-	   pFTP ;;
-	8) pPRM; srtm=$6
-	   ckSRTM; 
-	   ftp_link_1=$7; ftp_link_2=$8;
-	   pFTP ;;
-	9) pPRM; srtm=$6
-	   ckSRTM; 
-	   ftp_link_1=$7; ftp_link_2=$8;
-           ftp_user=$9; ftp_pass=;
-	   pFTP ;;
-	10) pPRM; srtm=$6
-	   ckSRTM; 
-	   ftp_link_1=$7; ftp_link_2=$8;
-           ftp_user=$9; ftp_pass=${10}; # careful with ${1} vs ${10}
-	   pFTP ;;
-	*) echo -e "Doris software,  Revision: $Revision,  Author: TUDelft  ";
-	   echo -e "\n[Usage]    : `basename $0` project W E S N SRTM[1|3] <ftp1> <ftp2>" ;
-	   echo -e "\n[Example]  : `basename $0` netherlands 3.3 7.3 50.7 53.7 SRTM3 "
-	   echo -e "\n[ftp sites]: SRTM: e0srp01u.ecs.nasa.gov & SRTM30: topex.ucsd.edu (defaults)" 
-           echo -e "\n[awk ver.] : ${AWK} - $(${AWK} -W version | head -n 1) \n"
-             which xyz2grd &> /dev/null || \
-	     echo -e "[remark]   : this script requires that GMT package is installed on your system.\n";  # MA
-	   echo -e "\nPlease check parameters: $* \n"; 
-           exit;;
-esac
-
- 
-#MA check if west > east, if so exit. BTW  no check for NORTHING in the code
-[ ${west%%.*} -gt ${east%%.*} ] && echo -e "\n E:$east can't be less than W:$west! \n" && exit 127
-#MA check if west > east, if so exit. BTW  no check for NORTHING in the code
-[ ${south%%.*} -gt ${north%%.*} ] && echo -e "\n N:$north can't be less than S:$south! \n" && exit 127
-
-#MA keep some of intermediate files, see end of this script:
-echo -e "\nDo you want to keep the following intermediate files:  "
-echo -e "${project}/srtm_${project}.grd \n${project}/srtm30_${project}_merged.grd \
-\n${project}/final_${project}.grd \t (Y/N) or Ctrl-C to exit? \c"
-read keep
-
-#MA SRTM remote dir definitions
-case $srtm in
-		SRTM1) dir_list="Region_01 Region_02 Region_03 Region_04 Region_05 Region_06 Region_07"
-  			 gmt_format=1c ;;
-		SRTM3) dir_list="Africa Australia Eurasia Islands North_America South_America"
-			 gmt_format=3c ;;
-		*) echo "ERROR: You did not specify the srtm version (correctly)"; exit 127 ;;
-esac
-echo -e " Ftp dirlist:\n \b$dir_list"
-
-
-# define output files
-outfile1=${project}/srtm_${project}.grd
-outfile2=${project}/srtm30_${project}_merged.grd
-outfile3=${project}/srtm30_${project}.grd
-outfile4=${project}/final_${project}.grd
-outfile5=${project}/final_${project}.dem
- 
-# download srtm and merge the tiles
-echo ""
-echo "--------------------------------------------------"
-echo "Downloading srtm and merging the tiles ..."
-echo "--------------------------------------------------"
-echo ""
-countb=1
-for ((long=$west; long <= $east; long++))
-do
-  counta=1
-  for ((lat=$south; lat <= $north; lat++))
-  do
-    long1=$long
-    lat1=$lat
-
-    if [ $long1 -lt 0 ] && [ $lat1 -lt 0 ]
-    then 
-      let "long1 = (( 0 - $long1 ))"
-      let "lat1 = (( 0 - $lat1 ))"
-      long1=`echo $long1 | ${AWK} '{printf ("%03d",$1)}'`
-      lat1=`echo $lat1 | ${AWK} '{printf ("%02d",$1)}'`
-      file=S${lat1}W${long1}.hgt
-
-    elif [ $long1 -lt 0 ] && [ $lat1 -ge 0 ]
-    then 
-      let "long1 = (( 0 - $long1 ))"
-      long1=`echo $long1 | ${AWK} '{printf ("%03d",$1)}'`
-      lat1=`echo $lat1 | ${AWK} '{printf ("%02d",$1)}'`
-      file=N${lat1}W${long1}.hgt
-
-    elif [ $long1 -ge 0 ] && [ $lat1 -lt 0 ]
-    then 
-      let "lat1 = (( 0 - $lat1 ))"
-      long1=`echo $long1 | ${AWK} '{printf ("%03d",$1)}'`
-      lat1=`echo $lat1 | ${AWK} '{printf ("%02d",$1)}'`
-      file=S${lat1}E${long1}.hgt
-
-    elif [ $long1 -ge 0 ] && [ $lat1 -ge 0 ]
-    then
-      long1=`echo $long1 | ${AWK} '{printf ("%03d",$1)}'`
-      lat1=`echo $lat1 | ${AWK} '{printf ("%02d",$1)}'`
-      file=N${lat1}E${long1}.hgt
-    fi
-
-    echo "Downloading" $file "..."
-    if [ ! -e "$file" ]
-    then
-      # download the tile
-      for dir in $dir_list
-        do
-        remotedir=srtm/version2_1/${srtm}/${dir}
-        echo "Checking" ${remotedir} "..."
-        downloadFile $remotedir ${file}.zip $ftp_link_1 $ftp_user $ftp_pass
-
-        if [ -e "${file}.zip" ]
-        then
-          echo "File found"
-          unzip -o ${file}.zip
-          rm ${file}.zip
-          break
-        fi
-      done
-    else
-      echo "File already existed."
-    fi
-    
-    # merge the tiles in latitude direction using GMT
-    xyz2grd $file -G${project}/tile.grd -I$gmt_format -R$long/$[$long+1]/$lat/$[$lat+1] -ZTLhw
-    if [ "$counta" = "1" ]; then
-	mv ${project}/tile.grd ${project}/srtm_$long.grd
-    else
-	grdpaste ${project}/tile.grd ${project}/srtm_$long.grd -G${project}/srtm_$long.grd
-    fi
-    counta=$[$counta+1]
-
-  done
-  
-  # merge the tiles in longitude direction using GMT
-  if [ "$countb" = "1" ]; then
-      mv ${project}/srtm_$long.grd $outfile1
-  else
-      grdpaste ${project}/srtm_$long.grd $outfile1 -G$outfile1
-  fi
-  countb=$[$countb+1]
-
-done
-
-
-# determine strm30 tile(s) (to fill voids)
-# based on the 4 corners of the area
-# 
-# one extra degree is added (or subtracted) to avoid problems
-# on the borders of srtm30 (caused by the different sampling
-# rate and grid start of srtm1/3 and srtm30)
-cnr_long[0]=$[$west-1]
-cnr_long[1]=$[$west-1]
-cnr_long[2]=$[$east+2]
-cnr_long[3]=$[$east+2]
-cnr_lat[0]=$[$south-1]
-cnr_lat[1]=$[$north+2]
-cnr_lat[2]=$[$south-1]
-cnr_lat[3]=$[$north+2]
-
-for ((v=0; v<=3; v++))
-do
-  for ((Xlat=-60; Xlat<=90; Xlat=Xlat+50))
-  do
-    let "temp = $Xlat-${cnr_lat[${v}]}"
-    if [ $temp -ge 0 ] && [ $temp -lt 50 ]
-    then
-      Xlat1[${v}]=$Xlat
-      if [ Xlat1[${v}] != -60 ]
-      then
-        for ((Xlong=-180; Xlong<=140; Xlong=Xlong+40))
-        do
-          let "temp = $Xlong-${cnr_long[${v}]}"
-          if [ $temp -ge -40 ] && [ $temp -lt 0 ]
-          then
-            Xlong1[${v}]=$Xlong
-          fi
-        done
-      else
-        for ((Xlong=-180; Xlong<=120; Xlong=Xlong+60))
-        do
-          let "temp = $Xlong-${cnr_long[${v}]}"
-          if [ $temp -ge -40 ] && [ $temp -lt 0 ]
-          then
-            Xlong1[${v}]=$Xlong
-          fi
-        done
-      fi
-    fi
-  done
-done
-
-# determine the unique tile(s)
-Xlat=`echo ${Xlat1[*]} | ${AWK} '{for (v = 1; v<=4; v++) print $v}' | sort | uniq | sort -n` 
-Xlong=`echo ${Xlong1[*]} | ${AWK} '{for (v = 1; v<=4; v++) print $v}' | sort | uniq | sort -n` 
-
-# download srtm30 (and merge the tiles)
-echo ""
-echo "--------------------------------------------------"
-echo "Downloading srtm30 and merging the tiles ..."
-echo "--------------------------------------------------"
-echo ""
-countb=1
-for long in $Xlong
-do
-  counta=1
-  for lat in $Xlat
-  do
-    long1=$long
-    lat1=$lat
-
-    if [ $long1 -lt 0 ] && [ $lat1 -lt 0 ]
-    then 
-      let "long1 = (( 0 - $long1 ))"
-      let "lat1 = (( 0 - $lat1 ))"
-      long1=`echo $long1 | ${AWK} '{printf ("%03d",$1)}'`
-      lat1=`echo $lat1 | ${AWK} '{printf ("%02d",$1)}'`
-      file=W${long1}S${lat1}.DEM
-      file2=w${long1}s${lat1}
-
-    elif [ $long1 -lt 0 ] && [ $lat1 -ge 0 ]
-    then 
-      let "long1 = (( 0 - $long1 ))"
-      long1=`echo $long1 | ${AWK} '{printf ("%03d",$1)}'`
-      lat1=`echo $lat1 | ${AWK} '{printf ("%02d",$1)}'`
-      file=W${long1}N${lat1}.DEM
-      file2=w${long1}n${lat1}
-
-    elif [ $long1 -ge 0 ] && [ $lat1 -lt 0 ]
-    then 
-      let "lat1 = (( 0 - $lat1 ))"
-      long1=`echo $long1 | ${AWK} '{printf ("%03d",$1)}'`
-      lat1=`echo $lat1 | ${AWK} '{printf ("%02d",$1)}'`
-      file=E${long1}S${lat1}.DEM
-      file2=e${long1}s${lat1}
-
-    elif [ $long1 -ge 0 ] && [ $lat1 -ge 0 ]
-    then
-      long1=`echo $long1 | ${AWK} '{printf ("%03d",$1)}'`
-      lat1=`echo $lat1 | ${AWK} '{printf ("%02d",$1)}'`
-      file=E${long1}N${lat1}.DEM
-      file2=e${long1}n${lat1}
-    fi
-    
-    echo "Downloading" $file "..."
-    if [ ! -e "$file" ]
-    then
-      # download the tile
-      remotedir=srtm/version2_1/SRTM30/$file2
-      downloadFile $remotedir ${file2}.dem.zip $ftp_link_2 $ftp_user $ftp_pass
-
-      if [ -e "${file2}.dem.zip" ]
-      then
-        echo "File found"
-        unzip -o ${file2}.dem.zip
-        rm ${file2}.dem.zip
-      fi
-    else
-      echo "File already existed."
-    fi
-  
-    # merge the tiles in latitude direction using GMT
-    west1=$(echo "$long+0.004166667" | bc)
-    east1=$(echo "$long+40-0.004166667" | bc)
-    south1=$(echo "$lat-50+0.00416667" | bc)
-    north1=$(echo "$lat-0.004166667" | bc)
-    xyz2grd $file -G${project}/tile.grd -I30c -R$west1/$east1/$south1/$north1 -ZTLhw
-    if [ "$counta" = "1" ]; then
-	mv ${project}/tile.grd ${project}/srtm30_$long.grd
-        south_first=$south1
-    else
-        south2=$(echo "$lat-50-0.004166667"| bc)
-        grdmath -R$west1/$east1/$south2/$south1 -I30c 0 0 NAN = ${project}/dummy.grd
-        grdpaste ${project}/dummy.grd ${project}/tile.grd -G${project}/tile.grd
-	grdpaste ${project}/tile.grd ${project}/srtm30_$long.grd -G${project}/srtm30_$long.grd
-    fi
-    counta=$[$counta+1]
-
-  done
-  
-  # merge the tiles in longitude direction using GMT
-  if [ "$countb" = "1" ]; then
-      mv ${project}/srtm30_$long.grd $outfile2
-  else
-      west2=$(echo "$long-0.004166666" | bc)
-      grdmath -R$west2/$west1/$south_first/$north1 -I30c 0 0 NAN = ${project}/dummy.grd
-      grdpaste ${project}/dummy.grd ${project}/srtm30_$long.grd -G${project}/srtm30_$long.grd
-      grdpaste ${project}/srtm30_$long.grd $outfile2 -G$outfile2
-  fi
-  countb=$[$countb+1]
-
-done
-
-echo ""
-echo "--------------------------------------------------"
-echo "Filling voids ..."
-echo "--------------------------------------------------"
-echo ""
-
-# resample to the same resolution as SRTM data (-I0.000833333) and trimmed to the same size
-grdsample $outfile2 -G$outfile3 -I$gmt_format -R${west}/$[${east}+1]/${south}/$[${north}+1]
-
-# define NaN in SRTM (NAN=-32768):
-grdmath  $outfile1 -32768 NAN = ${project}/nan.grd
-
-#void fill with AND command (AND 2 NaN if A and B == NaN, B if A == NaN, else A)
-
-grdmath ${project}/nan.grd $outfile3 AND = $outfile4
-
-
-# write result to binary file (which Doris can read)
-grd2xyz $outfile4 -Zf > $outfile5
-
-echo ""
-echo "--------------------------------------------------"
-echo "Creating output ..."
-echo "--------------------------------------------------"
-echo ""
-
-# write Doris input lines to file (input.doris_comprefdem)
-input_doris=${project}/input.doris_${project}
-xmin=`grdinfo ${outfile4} | grep x_min | sed 's/.*x_min: //g' | sed 's/x_max.*//g'`
-ymax=`grdinfo ${outfile4} | grep y_max | sed 's/.*y_max: //g' | sed 's/y_inc.*//g'`
-xinc=`grdinfo ${outfile4} | grep x_inc | sed 's/.*x_inc: //g' | sed 's/name.*//g'| sed 's/units.*//g'`
-yinc=`grdinfo ${outfile4} | grep y_inc | sed 's/.*y_inc: //g' | sed 's/name.*//g'| sed 's/units.*//g'`
-Nx=`grdinfo ${outfile4} | grep nx | sed 's/.*nx: //g'`
-Ny=`grdinfo ${outfile4} | grep ny | sed 's/.*ny: //g'`
-dempath=`pwd`
-
-echo -e "# The processing cards generated by $(basename $0) script." > $input_doris
-echo -e "# Using parameters: $@" >> $input_doris
-echo -e '# Copy the section(s) that is/are necessary to your processing setup.\n' >> $input_doris
-echo "c         ___             ___" >> $input_doris
-echo "comment   ___SIM AMPLITUDE___" >> $input_doris
-echo "c                            " >> $input_doris
-echo "SAM_IN_DEM     $dempath/$outfile5" >> $input_doris
-echo -e "SAM_IN_FORMAT   r4 \t\t\t // default is short integer"  >> $input_doris
-echo "SAM_IN_SIZE    $Ny $Nx" >> $input_doris
-echo "SAM_IN_DELTA   $yinc $xinc" >> $input_doris
-echo "SAM_IN_UL      $ymax $xmin"  >> $input_doris
-echo "SAM_IN_NODATA  -32768" >> $input_doris
-echo -e "SAM_OUT_FILE   master.sam \t // master simulated amplitude" >> $input_doris
-echo -e "# SAM_OUT_DEM_LP   master_demhei_lp.raw \t // radarcoded dem to master extend" >> $input_doris
-echo -e "# SAM_OUT_THETA_LP  master_theta_lp.raw \t // radarcoded dem to master extend" >> $input_doris
-echo " " >> $input_doris
-echo " " >> $input_doris
-echo "c         ___          ___" >> $input_doris
-echo "comment   ___DEM ASSIST___" >> $input_doris
-echo "c                            " >> $input_doris
-echo "DAC_IN_DEM     $dempath/$outfile5" >> $input_doris
-echo -e "DAC_IN_FORMAT   r4 \t\t\t // default is short integer"  >> $input_doris
-echo "DAC_IN_SIZE    $Ny $Nx" >> $input_doris
-echo "DAC_IN_DELTA   $yinc $xinc" >> $input_doris
-echo "DAC_IN_UL      $ymax $xmin"  >> $input_doris
-echo "DAC_IN_NODATA  -32768" >> $input_doris
-echo " " >> $input_doris
-echo " " >> $input_doris
-echo "c         ___             ___" >> $input_doris
-echo "comment   ___REFERENCE DEM___" >> $input_doris
-echo "c                            " >> $input_doris
-echo "## CRD_METHOD   DEMINTRPMETHOD" >> $input_doris
-echo "CRD_IN_DEM     $dempath/$outfile5" >> $input_doris
-echo -e "CRD_IN_FORMAT   r4 \t\t\t // default is short integer"  >> $input_doris
-echo "CRD_IN_SIZE    $Ny $Nx" >> $input_doris
-echo "CRD_IN_DELTA   $yinc $xinc" >> $input_doris
-echo "CRD_IN_UL      $ymax $xmin"  >> $input_doris
-echo "CRD_IN_NODATA  -32768" >> $input_doris
-echo -e "CRD_OUT_FILE   master_slave.crd \t // reference dem phase" >> $input_doris
-
-# ---------------------
-# visualization of SRTM
-# ---------------------
-
-
-#global settings:
-gmtdefaults -D >.gmtdefaults
-gmtset DOTS_PR_INCH 1200 
-gmtset PAPER_MEDIA A2+
-gmtset PAGE_ORIENTATION portrait
-
-# parameters
-area=-R${west}/$[${east}+1]/${south}/$[${north}+1]
-let "central_long = (( ($west+$[${east}+1])/2 ))"
-projection=-Jt${central_long}/1:3000000
-out=${project}/srtm_${project}.ps
-
-
-grdgradient $outfile4 -Ne0.6 -A45/315 -G${project}/gradient.grd
-
-echo "-10000 150 10000 150" > ${project}/gray.cpt
-#makecpt -T-50/2000/1 -Cz_per.cpt > pl_gtopo30.cpt
-
-grdimage $outfile4 -I${project}/gradient.grd $area $projection -C${project}/gray.cpt -K > $out
-
-pscoast $area $projection -O -Df  -W -I1/1p/0/0/255 -I2/0.5p/0/0/255 -I3/0.25p/0/0/255 -I4/0.25p/0/0/255 -Ic/0.25p/0/0/255 -Ba1g0/0.5g0 -N1/0.35tap >> $out
-
-#MA Clean up
-#keep srtm_tr.grd srtm30_tr_merged.grd final_tr.grd 
-case $keep in
-	y|Y) # Keep intermediate files
-	     rm -rf $project/srtm_[0-9][0-9].grd  $project/nan.grd \
-	     $project/dummy.grd $project/*.cpt $project/gradient.grd \
-	     $project/srtm30_tr.grd $project/tile.grd
-	     echo "--------------------------------------------------"
-	     echo "Kept intermedia files!"
-	     echo "--------------------------------------------------";;
-	  *) # Full clean up, just .dem .ps input.doris_comprefdem
-	     rm -rf ${project}/*.grd; rm -rf ${project}/*.cpt
-	     echo -e "Outputs: \n\t$outfile5 \n\t$input_doris \n\t$out" ;;
-esac
-
-
-echo ""
-echo "--------------------------------------------------"
-echo "Done!"
-echo "--------------------------------------------------"
-echo ""
diff --git a/bin/doris.adddummystep.sh b/bin/doris.adddummystep.sh
new file mode 100755
index 0000000..4c334c4
--- /dev/null
+++ b/bin/doris.adddummystep.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+##
+##
+## AUTHOR:   Freek van Leijen, based on original doris.rmstep.sh script by Mahmut Arikan
+## EMAIL:    F.J.vanLeijen at tudelft.nl
+## VERSION:  v1.0
+## DATE:     20141024
+##
+## TUDelft, Radar Group  - 2014
+##
+##
+## adds a dummy process in a Doris .res file
+##
+
+#sed -in -e '/coarse_correl/,/End_process_control/s/1/0/gp' -e '/Start_coarse_correl/,$d' 08956_09958.res
+#$d indicates until last line.
+
+case $# in
+        2) #    process_name                                              res files
+           lineno=$(awk '/'$1':/{print NR-1}' $2);  # get me the line number of the process line - 1
+           [[  "$lineno" == "" ]] && echo -e "No such entry: $1 in $2 .\n"  && exit 127
+
+           if [ "`uname`" == "Darwin" ]; then     # MAC_OSX
+             sed -i .bak2 -e '/^'$1'/s/0/1/g' $2
+           else                             # other platforms
+             sed -i -e '/^'$1'/s/0/1/g' $2
+           fi
+
+           echo " " >> $2
+           echo "*******************************************************************" >> $2
+           echo "*_Start_"$1":" >> $2
+           echo "*******************************************************************" >> $2
+           echo "Dummy" >> $2
+           echo "*******************************************************************" >> $2
+           echo "* End_"$1":_NORMAL" >> $2
+           echo "*******************************************************************" >> $2
+            
+           ;;
+        *) echo -e "\nUSAGE: ${0##*/} <doris_process_name> <.res> \n";
+           echo -e "   EX: ${0##*/} coarse_correl   master_slave.res \n ";
+           echo -e "       ${0##*/}      resample   slave.res \n ";
+           echo -e "Doris process names by order:
+ 1.  precise_orbits      14. resample
+ 2.  crop                15. interfero
+ 3.  sim_amplitude       16. comp_refphase
+ 4.  master_timing       17. subtr_refphase
+ 5.  filt_azi            18. comp_refdem
+ 6.  filt_range          19. subtr_refdem
+ 7   oversample          20. coherence
+ 8.  coarse_orbits       21. filtphase
+ 9.  coarse_correl       22. unwrap
+10.  fine_coreg          23. slant2h
+11.  timing_error        24. geocoding
+12.  dem_assist          25. dinsar
+13.  comp_coregpm        26. <extra>     
+                          
+                                               " ;
+echo -e "\n Thank you for using Doris!\n  TU Delft - DEOS Radar Group 2011 \n";
+exit 0;;
+esac
+
diff --git a/bin/plotcpm b/bin/plotcpm
index 6aed593..33d5b5d 100755
--- a/bin/plotcpm
+++ b/bin/plotcpm
@@ -255,7 +255,7 @@ if ( $DOBG ) then
   @ SIZECI2  = $WIDTH * $HEIGHT * 4
   #@ SIZECR4  = $WIDTH * $HEIGHT * 8  # [LEN] tcsh produces integer overflow with large files
   set SIZECR4  = ` awk 'BEGIN{print '$WIDTH' * '$HEIGHT' * 8}' `
-  set FSIZE  = `ls -l $SLCFILE | $AWK '{print $5}'`
+  set FSIZE  = `ls -l $SLCFILE | $AWK '{print $6}'`
   if ( $FSIZE == $SIZECI2 ) then
     echo " *** File format ${SLCFILE}: complex short."
     set SLCFORMAT = ci2
diff --git a/bin/rs2_dump_data.py b/bin/rs2_dump_data.py
index f614344..e8203b2 100755
--- a/bin/rs2_dump_data.py
+++ b/bin/rs2_dump_data.py
@@ -6,10 +6,12 @@
 #
 # Author: TUDelft - 2010
 # Maintainer: Mahmut Arikan
-# License: GPL
+#
+# Developed based on tsx_dump_data.py code.
 #
 #-----------------------------------------------------------------#
 import os,sys,time
+from os import path
 
 import xml.etree.ElementTree as etree  # parameters required for cropping and flipping
 
@@ -76,10 +78,7 @@ else:
 
 
 # Extract some important parameters for coordinate manipulation
-#
-#tree         = etree.parse('product.xml')  # HARDCODED filename 
-tmpfield     = inputFileName.split('/')
-productfile  = '/'.join(tmpfield[:-1]) + '/' + 'product.xml'
+productfile  = path.join(path.dirname(inputFileName), 'product.xml')
 tree         = etree.parse(productfile) # 
 NS           = 'http://www.rsi.ca/rs2/prod/xml/schemas'
 PASS         = tree.find('.//{%s}passDirection'     % NS).text     # Ascending or Descending
diff --git a/bin/rs2_dump_header2doris.py b/bin/rs2_dump_header2doris.py
index ef52383..914a3a3 100755
--- a/bin/rs2_dump_header2doris.py
+++ b/bin/rs2_dump_header2doris.py
@@ -6,7 +6,6 @@
 #
 # Author: TUDelft - 2010
 # Maintainer: Mahmut Arikan
-# License: GPL
 #
 # Developed based on tsx_dump_header2doris.py.
 #
@@ -15,10 +14,12 @@
 
 # NO XPATH version
 # 2012.Nov  Fix for ascending mode time parameters Samie Esfahany and Mahmut
+# 2013.Sep  Fix for UF and MF acquisition modes and cleanup, Piers van der TOrren
 #-----------------------------------------------------------------#
 
 from lxml import etree
-import string, time, sys
+import sys
+from datetime import datetime
 
 #try:
 #    from lxml import etree
@@ -26,7 +27,7 @@ import string, time, sys
 #    import xml.etree.ElementTree as etree
 
 
-codeRevision=1.1   # this code revision number
+codeRevision=1.2   # this code revision number
 
 def usage():
     print 'INFO    : @(#)Doris InSAR software, $Revision: %s $, $Author: TUDelft $' % codeRevision
@@ -46,157 +47,140 @@ except:
     usage()
     sys.exit(1)
 
-inTree = etree.parse(inputFileName)
 
+# Helper functions
+def nsmap_none(path, ns='None:'):
+    """ Add a namespace to each tag in the given path which doesn't have one.
+    """
+    def add_ns_if_none(tag):
+        if tag in ('', '.', '*') or ':' in tag:
+            return tag
+        else:
+            return ''.join((ns, tag))
+    return '/'.join(add_ns_if_none(tag) for tag in path.split('/'))
 
+def hms2sec(hmsString):
+    """ input hmsString syntax: XX:XX:XX.xxxxxx
+    """
+    return int(hmsString[0:2])*3600 + \
+        int(hmsString[3:5])*60 + \
+        float(hmsString[6:])
 
 # constants
-SOL             = 299792458.0;    # speed of light
+SOL = 299792458.0    # speed of light
+dateformat = '%Y-%m-%dT%H:%M:%S.%fZ'
 
-# get default namespace
-uri=inTree.getroot().nsmap[None]  # xmlns="http://www.rsi.ca/rs2/prod/xml/schemas" --> http://www.rsi.ca/rs2/prod/xml/schemas
-NS='{'+uri+'}'                    # 
+# default namespace
+nsmap = {None:"http://www.rsi.ca/rs2/prod/xml/schemas"}
+ns = '{' + nsmap[None] + '}'
 
-# inTree.find('{http://www.rsi.ca/rs2/prod/xml/schemas}sourceAttributes/{http://www.rsi.ca/rs2/prod/xml/schemas}inputDatasetId').text
-# print etree.tounicode(inTree) # debug
+
+inTree = etree.parse(inputFileName)
 
 # query syntax for every field
-queryList = {\
+queryList = {
              # mission info
-             'mission'             : './/%ssourceAttributes/%ssatellite' % (NS,NS),\
-             # imageData file
-             # fullResolutionImageData pole="HH" # inTree.findall('.//imageAttributes/fullResolutionImageData')[0].text, and more [1] .. [3]
-             'imageData'           : './/%simageAttributes/%sfullResolutionImageData' % (NS,NS),\
-             #'imageLines'          : './/imageAttributes/rasterAttributes/numberOfLines',\
-             'imageLines'          : './/%simageAttributes//%snumberOfLines' % (NS,NS),\
-             'imagePixels'         : './/%simageAttributes//%snumberOfSamplesPerLine' % (NS,NS),\
+             'mission'             : 'sourceAttributes/satellite',
+             # imageFile file
+             # fullResolutionImageData pole="HH" # inTree.findall('imageAttributes/fullResolutionImageData')[0].text, and more [1] .. [3]
+             'imageFile'           : 'imageAttributes/fullResolutionImageData',
+             #'imageLines'          : 'imageAttributes/rasterAttributes/numberOfLines',
+             'imageLines'          : 'imageAttributes//numberOfLines',
+             'imagePixels'         : 'imageAttributes//numberOfSamplesPerLine',
+             'imageLineSpacing'    : 'imageAttributes//sampledLineSpacing',
+             'imagePixelSpacing'   : 'imageAttributes//sampledPixelSpacing',
              # volume info
-             #'volFile' : './/productComponents/annotation/file/location/filename',\ # HARDCODED!!! for radarsat-2
+             #'volFile' : 'productComponents/annotation/file/location/filename', # HARDCODED!!! for radarsat-2
              # following smt like Level 1B Product, check manual
-             'volID'               : './/%sproductId' % (NS),\
-             'volRef'              : './/%sdocumentIdentifier' % (NS),\
+             'volID'               : 'productId',
+             'volRef'              : 'documentIdentifier',
              # product info
-             #productSpec'          : './/generalHeader/referenceDocument',\  # TSX
-             'productSpec'         : './/%sdocumentIdentifier' % (NS),\
-             'productVolDate'      : './/%simageGenerationParameters//%sprocessingTime' % (NS,NS),\
-             'productSoftVer'      : './/%simageGenerationParameters//%ssoftwareVersion' % (NS,NS),\
-             'productDate'         : './/%ssourceAttributes/%srawDataStartTime' % (NS,NS),\
-             'productFacility'     : './/%simageGenerationParameters//%sprocessingFacility' % (NS,NS),\
+             #productSpec'          : 'generalHeader/referenceDocument',  # TSX
+             'productSpec'         : 'documentIdentifier',
+             'productVolDate'      : 'imageGenerationParameters//processingTime',
+             'productSoftVer'      : 'imageGenerationParameters//softwareVersion',
+             'productDate'         : 'sourceAttributes/rawDataStartTime',
+             'productFacility'     : 'imageGenerationParameters//processingFacility',
              # scene info
-             #'scenePol'           : './/sourceAttributes/radarParameters/acquisitionType',\    # Fine Quad Polarization
-             'scenePol'            : './/%ssourceAttributes//%spolarizations' % (NS,NS),\
-             'sceneBeam'           : './/%ssourceAttributes//%sbeams' % (NS,NS),\
-             'sceneBeamMode'       : './/%ssourceAttributes/%sbeamModeMnemonic' % (NS,NS),\
-             'sceneCenLat'         : './/%simageAttributes/%sgeographicInformation/%srationalFunctions/%slatitudeOffset'  % (NS,NS,NS,NS),\
-             'sceneCenLon'         : './/%simageAttributes/%sgeographicInformation/%srationalFunctions/%slongitudeOffset' % (NS,NS,NS,NS),\
-             'sceneRecords'        : './/%ssarProcessingInformation/%snumberOfLinesProcessed' % (NS,NS),\
-             'antennaLookDir'      : './/%ssourceAttributes//%santennaPointing' % (NS,NS),\
-             'missinglines'        : './/%ssourceAttributes//%snumberOfMissingLines' % (NS,NS),\
+             #'scenePol'           : 'sourceAttributes/radarParameters/acquisitionType',    # Fine Quad Polarization
+             'scenePol'            : 'sourceAttributes//polarizations',
+             'sceneBeam'           : 'sourceAttributes//beams',
+             'sceneBeamMode'       : 'sourceAttributes/beamModeMnemonic',
+             'list_sceneLat'       : 'imageAttributes/geographicInformation/geolocationGrid/imageTiePoint/geodeticCoordinate/latitude',
+             'list_sceneLon'       : 'imageAttributes/geographicInformation/geolocationGrid/imageTiePoint/geodeticCoordinate/longitude',
+             'sceneRecords'        : 'imageGenerationParameters/sarProcessingInformation/numberOfLinesProcessed',
+             'antennaLookDir'      : 'sourceAttributes//antennaPointing',
+             'missinglines'        : 'sourceAttributes//numberOfMissingLines',
              # orbit info
-             'orbitABS'            : './/%sorbitAndAttitude//%sorbitDataFile'           % (NS,NS),\
-             'orbitDir'            : './/%ssourceAttributes//%spassDirection'           % (NS,NS),\
-             'orbitTime'           : './/%ssourceAttributes//%sstateVector/%stimeStamp' % (NS,NS,NS),\
-             'orbitX'              : './/%ssourceAttributes//%sstateVector/%sxPosition' % (NS,NS,NS),\
-             'orbitY'              : './/%ssourceAttributes//%sstateVector/%syPosition' % (NS,NS,NS),\
-             'orbitZ'              : './/%ssourceAttributes//%sstateVector/%szPosition' % (NS,NS,NS),\
-             'orbitXV'             : './/%ssourceAttributes//%sstateVector/%sxVelocity' % (NS,NS,NS),\
-             'orbitYV'             : './/%ssourceAttributes//%sstateVector/%syVelocity' % (NS,NS,NS),\
-             'orbitZV'             : './/%ssourceAttributes//%sstateVector/%szVelocity' % (NS,NS,NS),\
+             'orbitABS'            : 'sourceAttributes/orbitAndAttitude//orbitDataFile',
+             'orbitDir'            : 'sourceAttributes//passDirection',
+             'list_orbitTime'      : 'sourceAttributes//stateVector/timeStamp',
+             'list_orbitX'         : 'sourceAttributes//stateVector/xPosition',
+             'list_orbitY'         : 'sourceAttributes//stateVector/yPosition',
+             'list_orbitZ'         : 'sourceAttributes//stateVector/zPosition',
+             'list_orbitXV'        : 'sourceAttributes//stateVector/xVelocity',
+             'list_orbitYV'        : 'sourceAttributes//stateVector/yVelocity',
+             'list_orbitZV'        : 'sourceAttributes//stateVector/zVelocity',
              # range
-             'rangeRSR'            : './/%ssourceAttributes//%sadcSamplingRate'                           % (NS,NS),\
-             'rangeBW'             : './/%simageGenerationParameters//%srangeLookBandwidth'               % (NS,NS),\
-             'rangeWind'           : './/%simageGenerationParameters//%srangeWindow/%swindowName'         % (NS,NS,NS),\
-             'rangeTimePix'        : './/%simageGenerationParameters//%sslantRangeTimeToFirstRangeSample' % (NS,NS),\
+             'list_rangeRSR'       : 'sourceAttributes//adcSamplingRate', # for UF mode there are two subpulses which have to be added together
+             'rangeBW'             : 'imageGenerationParameters//rangeLookBandwidth',
+             'rangeWind'           : 'imageGenerationParameters//rangeWindow/windowName',
+             'rangeWindCoeff'      : 'imageGenerationParameters//rangeWindow/windowCoefficient',
+             'rangeTimePix'        : 'imageGenerationParameters//slantRangeTimeToFirstRangeSample',
              # azimuth
-             'azimuthPRF'          : './/%ssourceAttributes//%spulseRepetitionFrequency'          % (NS,NS),\
-             'azimuthBW'           : './/%simageGenerationParameters//%sazimuthLookBandwidth'     % (NS,NS),\
-             'azimuthWind'         : './/%simageGenerationParameters//%swindowName'               % (NS,NS),\
-             'azimuthTimeStart'    : './/%simageGenerationParameters//%szeroDopplerTimeFirstLine' % (NS,NS),\
-             'azimuthTimeStop'    : './/%simageGenerationParameters//%szeroDopplerTimeLastLine' % (NS,NS),\
+             'azimuthPRF'          : 'sourceAttributes//pulseRepetitionFrequency', # for some modes (MF, UF) this value is changed in processing, calculate from other values
+             'azimuthBW'           : 'imageGenerationParameters//azimuthLookBandwidth',
+             'azimuthWind'         : 'imageGenerationParameters//azimuthWindow/windowName',
+             'azimuthWindCoeff'    : 'imageGenerationParameters//azimuthWindow/windowCoefficient',
+             'azimuthTimeFirstLine': 'imageGenerationParameters//zeroDopplerTimeFirstLine',
+             'azimuthTimeLastLine' : 'imageGenerationParameters//zeroDopplerTimeLastLine',
              # doppler
-             'dopplerTime'         : './/%simageGenerationParameters//%stimeOfDopplerCentroidEstimate' % (NS,NS),\
-             'dopplerCoeff'        : './/%simageGenerationParameters//%sdopplerCentroidCoefficients'   % (NS,NS),\
+             'dopplerTime'         : 'imageGenerationParameters//timeOfDopplerCentroidEstimate',
+             'dopplerCoeff'        : 'imageGenerationParameters//dopplerCentroidCoefficients',
              # for wavelength computation
-             'radarfreq'           : './/%ssourceAttributes//%sradarCenterFrequency'                   % (NS,NS),
-             #  wavelength_computed = (0.000000001*SOL/atof(c8freq));// seems more reliable, BK 03/04
+             'radarfreq'           : 'sourceAttributes//radarCenterFrequency',
+             #  wavelength_computed = (0.000000001*SOL/atof(c8freq)) seems more reliable, BK 03/04
              }
 
 
-
-# temp variables and parameters
-container     = {}
-# containerTemp = {}        
-events        = ('end',)
-
-# functions : not sure if needed
-def fast_iter_string(context):
-    for event,elem in context:
-        return elem.text
-
-# works with lists
-def fast_iter_list(context,tag=''):
-    for event,elem in context:
-        return elem.iterchildren(tag=tag).next().text
-
-def hms2sec(hmsString,convertFlag='int'):
-    # input hmsString syntax: XX:XX:XX.xxxxxx
-    secString = int(hmsString[0:2])*3600 + \
-        int(hmsString[3:5])*60 + \
-        float(hmsString[6:])
-    if convertFlag == 'int' :
-        return int(secString)
-    elif convertFlag == 'float' :
-        return float(secString)
+# get variables and parameters from xml
+container = {}
+for key, value in queryList.iteritems():
+    if key.startswith('list_'):
+        container[key] = [tag.text for tag in inTree.findall(nsmap_none(value, ns))]
     else:
-        return int(secString)
-
-for key in queryList.keys():
-
-    try:
-        vars()[key];                     # check if non-existed
-    except KeyError or NameError:
-        vars()[key] = [];                # if non-exist, initialize
-
-    for nodes in inTree.findall(queryList[key]):
-#
-        if key == 'dopplerCoeff':
-            vars()[key].append(nodes.text)
-            vars()[key] = vars()[key][0].split() # ['-1.668835506800000e+02 1.055662889201444e+05 -2.651685365500000e+07']
-            container['dopplerCoeff0']= [ vars()[key][0] ]
-            container['dopplerCoeff1']= [ vars()[key][1] ]
-            container['dopplerCoeff2']= [ vars()[key][2] ]
-
-#            if nodes.attrib.values()[0] == '0':
-#                keyTemp = 'dopplerCoeff0' # reset key
-#                try:
-#                    vars()[keyTemp];
-#                except KeyError or NameError:
-#                    vars()[keyTemp] = [];
-#                vars()[keyTemp].append(nodes.text)
-#
-#            elif nodes.attrib.values()[0] == '1':
-#                keyTemp = 'dopplerCoeff1' # reset key
-#                try:
-#                    vars()[keyTemp];
-#                except KeyError or NameError:
-#                    vars()[keyTemp] = [];
-#                vars()[keyTemp].append(nodes.text)
-#
-#            elif nodes.attrib.values()[0] == '2':
-#                keyTemp = 'dopplerCoeff2' # reset key
-#                try:
-#                    vars()[keyTemp];
-#                except KeyError or NameError:
-#                    vars()[keyTemp] = [];
-#                vars()[keyTemp].append(nodes.text)
-#
-#            container[keyTemp] = vars()[keyTemp]
-#
-        else:
-            vars()[key].append(nodes.text)
-            
-    container[key] = vars()[key]
+        container[key] = inTree.findtext(nsmap_none(value, ns))
+        if container[key] == None:
+            raise Exception('Path {0} not found in XML'.format(value))
+
+container['dopplerCoeff'] = container['dopplerCoeff'].split()
+
+def mean(l):
+    return sum(l)/len(l)
+container['sceneCenLat'] = mean([float(val) for val in container['list_sceneLat']])
+container['sceneCenLon'] = mean([float(val) for val in container['list_sceneLon']])
+
+# sum subpulses for UF like modes
+RSR1 = sum(float(val) for val in container['list_rangeRSR'])
+# alternative: calculate RSR from given pixel spacing, former way doesn't work correctly for reduced resolution XF images. Difference is a factor 1.0000001
+container['rangeRSR'] = SOL/float(container['imagePixelSpacing'])/2
+# for backwards compatibility use first method when values are very close to each other
+if 0.9999 < RSR1/container['rangeRSR'] < 1.0001:
+    container['rangeRSR'] = RSR1
+
+# Calculate PRF
+azimuthTimeFirstLine = datetime.strptime(container['azimuthTimeFirstLine'], dateformat)
+azimuthTimeLastLine = datetime.strptime(container['azimuthTimeLastLine'], dateformat)
+obs_time = (azimuthTimeLastLine - azimuthTimeFirstLine).total_seconds()
+# set start time to the first observed line (for ascending the image is flipped)
+if obs_time > 0:
+    azimuthTimeStart = azimuthTimeFirstLine
+else:
+    azimuthTimeStart = azimuthTimeLastLine
+    obs_time = -obs_time
 
+if container['sceneBeam'] != 'S3': # Hacky fix for S3 merged images
+    container['azimuthPRF'] = (float(container['imageLines']) - 1)/obs_time
 
 # ---------------------------------------------------------------------------------------------------------
 
@@ -205,80 +189,72 @@ for key in queryList.keys():
 
 dummyVar = 'DUMMY'
 
-if container['orbitDir'][0] == 'Ascending':
- azimuthTimeDoris =  container['azimuthTimeStop'][0]
-else:
- azimuthTimeDoris =  container['azimuthTimeStart'][0]
-
-
-
-print('\nrs2_dump_header2doris.py v%s, doris software, 2009\n' % codeRevision)
+print('\nrs2_dump_header2doris.py v%s, doris software, 2013\n' % codeRevision)
 print('*******************************************************************')
 print('*_Start_readfiles:')
 print('*******************************************************************')
-print('Volume file: 					%s' % 'product.xml') # container['volFile'][0]) # HARDCODED!!! for Radarsat-2
-print('Volume_ID: 					%s' % container['volID'][0])
-print('Volume_identifier: 				%s' % container['volRef'][0])
+print('Volume file: 					%s' % 'product.xml') # container['volFile']) # HARDCODED!!! for Radarsat-2
+print('Volume_ID: 					%s' % container['volID'])
+print('Volume_identifier: 				%s' % container['volRef'])
 print('Volume_set_identifier: 				%s' % dummyVar)
-print('(Check)Number of records in ref. file: 		%s' % container['sceneRecords'][0])
-print('SAR_PROCESSOR:                                  %s %s' % (str.split(container['productSpec'][0])[0][:2],container['productSoftVer'][0]))
-print('SWATH:                                          %s' % container['sceneBeam'][0])
-print('PASS:                                           %s' % container['orbitDir'][0])
-print('IMAGING_MODE:                                   %s %s' % (container['sceneBeamMode'][0],container['scenePol'][0]))
-print('RADAR_FREQUENCY (Hz):                           %f' % float(container['radarfreq'][0]))
+print('(Check)Number of records in ref. file: 		%s' % container['sceneRecords'])
+print('SAR_PROCESSOR:                                  %s %s' % (str.split(container['productSpec'])[0][:2],container['productSoftVer']))
+print('SWATH:                                          %s' % container['sceneBeam'])
+print('PASS:                                           %s' % container['orbitDir'])
+print('IMAGING_MODE:                                   %s %s' % (container['sceneBeamMode'],container['scenePol']))
+print('RADAR_FREQUENCY (Hz):                           %s' % container['radarfreq'])
 print('')
-print('Product type specifier: 	                %s' % container['mission'][0])
-print('Logical volume generating facility: 		%s' % container['productFacility'][0])
-print('Logical volume creation date: 			%s' % container['productVolDate'][0])
-print('Location and date/time of product creation: 	%s' % container['productDate'][0])
-#print('Scene identification: 				Orbit: %s %s Mode: %s' % (container['orbitABS'][0].split('_')[0],container['orbitDir'][0],container['sceneBeamMode'][0]))
-print('Scene identification: 				Orbit: %s  %s' % (container['orbitABS'][0].split('_')[0], azimuthTimeDoris))
-print('Scene location: 		                lat: %.4f lon: %.4f' % (float(container['sceneCenLat'][0]),float(container['sceneCenLon'][0])))
+print('Product type specifier: 	                %s' % container['mission'])
+print('Logical volume generating facility: 		%s' % container['productFacility'])
+print('Logical volume creation date: 			%s' % container['productVolDate'])
+print('Location and date/time of product creation: 	%s' % container['productDate'])
+#print('Scene identification: 				Orbit: %s %s Mode: %s' % (container['orbitABS'].split('_')[0],container['orbitDir'],container['sceneBeamMode']))
+print('Scene identification: 				Orbit: %s  %s' % (container['orbitABS'].split('_')[0], azimuthTimeStart.strftime(dateformat)))
+print('Scene location: 		                lat: %.4f lon: %.4f' % (float(container['sceneCenLat']),float(container['sceneCenLon'])))
 print('')
-print('Leader file:                                 	%s' % 'product.xml') # container['volFile'][0]) # HARDCODED!!! for Radarsat-2
-print('Sensor platform mission identifer:         	%s' % container['mission'][0])
-print('Scene_centre_latitude:                     	%s' % container['sceneCenLat'][0])   # CORRECTT this by taking average or so!!!!!!!!!!
-print('Scene_centre_longitude:                    	%s' % container['sceneCenLon'][0])
+print('Leader file:                                 	%s' % 'product.xml') # container['volFile']) # HARDCODED!!! for Radarsat-2
+print('Sensor platform mission identifer:         	%s' % container['mission'])
+print('Scene_centre_latitude:                     	%s' % container['sceneCenLat'])
+print('Scene_centre_longitude:                    	%s' % container['sceneCenLon'])
 print('Scene_centre_heading:                            %s' % 'Null') # needs to be computed from geoinfo
-print('Radar_wavelength (m):                      	%s' % str(SOL/float(radarfreq[0])))
-print('First_pixel_azimuth_time (UTC):			%s %s' % (time.strftime("%d-%b-%Y",time.strptime(azimuthTimeDoris.split('T')[0],"%Y-%m-%d")),azimuthTimeDoris.split('T')[1][:-1]))
-print('Pulse_Repetition_Frequency (computed, Hz): 	%s' % container['azimuthPRF'][0])
-print('Total_azimuth_band_width (Hz):             	%s' % container['azimuthBW'][0])
-print('Weighting_azimuth:                         	%s' % str.upper(container['azimuthWind'][0]))
-print('Xtrack_f_DC_constant (Hz, early edge):     	%s' % container['dopplerCoeff0'][0])
-print('Xtrack_f_DC_linear (Hz/s, early edge):     	%s' % container['dopplerCoeff1'][0])
-print('Xtrack_f_DC_quadratic (Hz/s/s, early edge): 	%s' % container['dopplerCoeff2'][0])
-print('Range_time_to_first_pixel (2way) (ms):     	%0.15f' % (float(container['rangeTimePix'][0])*1000))
-print('Range_sampling_rate (computed, MHz):       	%0.6f' % (float(container['rangeRSR'][0])/1000000))
-print('Total_range_band_width (MHz):               	%s' % (float(container['rangeBW'][0])/1000000))
-print('Weighting_range:                            	%s' % str.upper(container['rangeWind'][0]))
+print('Radar_wavelength (m):                      	%s' % str(SOL/float(container['radarfreq'])))
+print('First_pixel_azimuth_time (UTC):			%s' % azimuthTimeStart.strftime('%d-%b-%Y %H:%M:%S.%f'))
+print('Pulse_Repetition_Frequency (computed, Hz): 	%s' % container['azimuthPRF'])
+print('Total_azimuth_band_width (Hz):             	%s' % float(container['azimuthBW']))
+print('Weighting_azimuth:                         	%s %f' % (str.upper(container['azimuthWind']), float(container['azimuthWindCoeff'])))
+print('Xtrack_f_DC_constant (Hz, early edge):     	%s' % container['dopplerCoeff'][0])
+print('Xtrack_f_DC_linear (Hz/s, early edge):     	%s' % container['dopplerCoeff'][1])
+print('Xtrack_f_DC_quadratic (Hz/s/s, early edge): 	%s' % container['dopplerCoeff'][2])
+print('Range_time_to_first_pixel (2way) (ms):     	%0.15f' % (float(container['rangeTimePix'])*1000))
+print('Range_sampling_rate (computed, MHz):       	%0.6f' % (float(container['rangeRSR'])/1000000))
+print('Total_range_band_width (MHz):               	%s' % (float(container['rangeBW'])/1000000))
+print('Weighting_range:                            	%s %f' % (str.upper(container['rangeWind']), float(container['rangeWindCoeff'])))
 print('')
 print('*******************************************************************')
-print('Datafile: 					%s' % container['imageData'][0])
+print('Datafile: 					%s' % container['imageFile'])
 print('Dataformat: 				%s' % 'GeoTIFF')  # hardcoded!!!
-print('Number_of_lines_original: 			%s' % container['imageLines'][0])
-print('Number_of_pixels_original: 	                %s' % container['imagePixels'][0])
+print('Number_of_lines_original: 			%s' % container['imageLines'])
+print('Number_of_pixels_original: 	                %s' % container['imagePixels'])
 print('*******************************************************************')
 print('* End_readfiles:_NORMAL')
 print('*******************************************************************')
 print('')
 print('')
 print('*******************************************************************')
-print('*_Start_leader_datapoints:  %s ' % container['orbitABS'][0].split('_')[1])
+print('*_Start_leader_datapoints:  %s ' % container['orbitABS'].split('_')[1])
 print('*******************************************************************')
 print(' t(s)		X(m)		Y(m)		Z(m)      X_V(m/s)      Y_V(m/s)      Z_V(m/s)')
-print('NUMBER_OF_DATAPOINTS: 			%s' % len(container['orbitTime']))
-print('')
+print('NUMBER_OF_DATAPOINTS: %s' % len(container['list_orbitTime']))
 
 # MA : positions and velocities
-for i in range(len(container['orbitTime'])):
-    print(' %.5f %s %s %s %s %s %s' % (hms2sec(container['orbitTime'][i].split('T')[1].strip('Z'),'float'),\
-                                  container['orbitX'][i],\
-                                  container['orbitY'][i],\
-                                  container['orbitZ'][i],\
-                                  container['orbitXV'][i],\
-                                  container['orbitYV'][i],\
-                                  container['orbitZV'][i]))
+for i in range(len(container['list_orbitTime'])):
+    print(' %.6f %s %s %s %s %s %s' % (hms2sec(container['list_orbitTime'][i].split('T')[1].strip('Z')),
+                                  container['list_orbitX'][i],
+                                  container['list_orbitY'][i],
+                                  container['list_orbitZ'][i],
+                                  container['list_orbitXV'][i],
+                                  container['list_orbitYV'][i],
+                                  container['list_orbitZV'][i]))
 
 print('')
 print('*******************************************************************')
diff --git a/bin/run b/bin/run
index 6c6fe8a..05244dd 100755
--- a/bin/run
+++ b/bin/run
@@ -361,7 +361,7 @@ BATCH           ON                      // do process non-interactive
 LISTINPUT       ON                      // do copy this file to log
 HEIGHT          0.0                     // average WGS84 terrain height
 TIEPOINT        0.0 0.0 0.0             // lat/lon/hei of a tiepoint
-ORB_INTERP      POLYFIT 4               // use 5th degree polynomial
+ORB_INTERP      POLYFIT 4               // 5th degree polynomial
 c ORB_INTERP      SPLINE                // natural cubic spline
 c ORB_INTERP      POLYFIT               // orbit interpolation method
 c ELLIPSOID     WGS84                   // WGS84 (default), GRS80, BESSEL or define a,b  (major radius, minor radius)
diff --git a/bin/tsx_dump_header2doris.py b/bin/tsx_dump_header2doris.py
index d9f1a2b..bb06314 100755
--- a/bin/tsx_dump_header2doris.py
+++ b/bin/tsx_dump_header2doris.py
@@ -1,7 +1,4 @@
-#!/usr/bin/env python
-
-#[BO]When user has multiple python installations or EPD it is better to use the
-#    python defined in the environment. 20110429
+#!/usr/bin/python
 
 # live code for parsing of XML TSX file into python data structures
 # and from there into DORIS res file structure
@@ -9,25 +6,8 @@
 # this is rather fair implementation and should be used as a structure
 # for the future implementation of XML/GeoTIFF data readers
 
-#[BO] EPD (Entought) running on Centos fails at from lxml import etree.
-#     The exception tries another import. The cElementTree has c bindings.
-#     I believe it is faster than the regular ElementTree. 
-
-#[BO] Removed the heading field since it fails every once in a while for
-#     reading certain TSX/TDX data sets. 20120409 See also:
-#     http://www.osmanoglu.org/sar/88-tsxdumpheader2dorispy-fails-with-keyerror
-
+from lxml import etree
 import string, time, sys
-try:
-    import xml.etree.cElementTree as etree
-except:
-    try:
-        from lxml import etree
-    except:
-        #import xml.etree.ElementTree as etree
-        print 'Failed to load lxml.etree or xml.etree.cElementTree'
-        sys.exit(1)
-          
 #import xml.etree.ElementTree as ElementTree
 #import types
 
@@ -183,7 +163,6 @@ print('Leader file:                                 	%s' % container['volFile'][
 print('Sensor platform mission identifer:         	%s' % container['mission'][0])
 print('Scene_centre_latitude:                     	%s' % container['sceneCenLat'][0])
 print('Scene_centre_longitude:                    	%s' % container['sceneCenLon'][0])
-#print('Scene_center_heading: 	                %2.0f' % (float(container['heading'][0])))
 print('Radar_wavelength (m):                      	0.031') #HARDCODED!!!
 print('First_pixel_azimuth_time (UTC):			%s %s' % (time.strftime("%d-%b-%Y",time.strptime(container['azimuthTimeStart'][0].split('T')[0],"%Y-%m-%d")),container['azimuthTimeStart'][0].split('T')[1][:-1]))
 print('Pulse_Repetition_Frequency (computed, Hz): 	%s' % container['azimuthPRF'][0])
diff --git a/bin/tsx_dump_header2doris_noxpath.py b/bin/tsx_dump_header2doris_noxpath.py
index 6e889c6..4c5e641 100755
--- a/bin/tsx_dump_header2doris_noxpath.py
+++ b/bin/tsx_dump_header2doris_noxpath.py
@@ -1,7 +1,4 @@
-#!/usr/bin/env python
-
-#[BO]When user has multiple python installations or EPD it is better to use the
-#    python defined in the environment. 20110429
+#!/usr/bin/python
 
 # live code for parsing of XML TSX file into python data structures
 # and from there into DORIS res file structure
@@ -9,10 +6,6 @@
 # this is rather fair implementation and should be used as a structure
 # for the future implementation of XML/GeoTIFF data readers
 
-#[BO] EPD (Entought) running on Centos fails at from lxml import etree.
-#     The exception tries another import. The cElementTree has c bindings.
-#     I believe it is faster than the regular ElementTree. 
-
 from lxml import etree
 import string, time, sys
 #import xml.etree.ElementTree as ElementTree
@@ -210,10 +203,7 @@ print('Total_azimuth_band_width (Hz):             	%s' % container['azimuthBW'][
 print('Weighting_azimuth:                         	%s' % str.upper(container['azimuthWind'][0]))
 print('Xtrack_f_DC_constant (Hz, early edge):     	%s' % container['dopplerCoeff0'][0])
 print('Xtrack_f_DC_linear (Hz/s, early edge):     	%s' % container['dopplerCoeff1'][0])
-try:
-  print('Xtrack_f_DC_quadratic (Hz/s/s, early edge): 	%s' % container['dopplerCoeff2'][0])
-except:
-  print('Xtrack_f_DC_quadratic (Hz/s/s, early edge):    %s' % [0][0])
+print('Xtrack_f_DC_quadratic (Hz/s/s, early edge): 	%s' % container['dopplerCoeff2'][0])
 print('Range_time_to_first_pixel (2way) (ms):     	%0.15f' % (float(container['rangeTimePix'][0])*1000))
 print('Range_sampling_rate (computed, MHz):       	%0.6f' % (float(container['rangeRSR'][0])/1000000))
 print('Total_range_band_width (MHz):               	%s' % (float(container['rangeBW'][0])/1000000))
@@ -221,7 +211,6 @@ print('Weighting_range:                            	%s' % str.upper(container['r
 print('')
 print('*******************************************************************')
 print('Datafile: 					%s' % container['imageData'][0])
-print('Dataformat:        %s' % 'TSX_COSAR')  # hardcoded!!!
 print('Number_of_lines_original: 			%s' % container['imageLines'][0])
 print('Number_of_pixels_original: 	                %s' % container['imagePixels'][0])
 print('*******************************************************************')
diff --git a/src/COPYING b/doris_core/COPYING
similarity index 100%
rename from src/COPYING
rename to doris_core/COPYING
diff --git a/src/Makefile.debug-full b/doris_core/Makefile
old mode 100755
new mode 100644
similarity index 73%
copy from src/Makefile.debug-full
copy to doris_core/Makefile
index 83937e9..fa1307c
--- a/src/Makefile.debug-full
+++ b/doris_core/Makefile
@@ -1,9 +1,11 @@
 #########################################################################
 # Makefile for the Doris software.					#
 # Created by: configure						#
-# 04-Dec-1998  Delft University of Technology, Geodesy			#
-# Bert Kampes (b.m.kampes at lr.tudelft.nl)				#
-# http://enterprise.geo.tudelft.nl/doris/				#
+# 04-Dec-1998                                                           #
+#                                                                       #
+# Delft University of Technology                                        #
+# Delft Institute of Earth Observation and Space Systems                #
+# http://doris.tudelft.nl               				#
 # See also the user manual, annex installation.				#
 #									#
 ### Usage ### 								#
@@ -28,6 +30,10 @@
 #  HP  aCC version A.01.07 on HP-UX B.10.20 A 9000/785			#
 #  GNU g++ version ?       on Linux X86					#
 #  SGI ?								#
+#  Sun Studion 9,10,11     on Solaris Sparc and X86/AMD64		#
+#  GNU g++ versions v3.4, v4.0, v4.1, v4.2, v4.3 on Linux X86 and AMD64 # 
+#  Sun Sudion v11          on Linux AMD64				#
+#  Intel Compilers v9, v10, v11 on Linux AMD64				#
 #########################################################################
 ###################################################################
 ###################################################################
@@ -37,7 +43,7 @@
 SHELL   = /bin/sh
 
 ### Specify compiler/installation directory ###
-INSTALLDIR = /usr/local/bin
+INSTALLDIR = /data/src/Doris_s1_git/bin
 CC	   = g++
 SCRIPTSDIR = ../bin
 
@@ -46,13 +52,14 @@ SCRIPTSDIR = ../bin
 DEF1    = -D__DEBUGMAT1	# 		-1- range checking in matrix class
 DEF2    = -D__DEBUGMAT2	# 		-2- info matrix class (debug only)
 DEF3    = -D__DEBUG #			-3- debug for other files
-DEF4    = -Wno-deprecated -trigraphs #  -4- do not display warnings
-DEF5    = -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE # support for large files
+DEF4    = -Wno-deprecated            #  -4- do not display warnings due to depricated entries 
+DEF5    = -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE #  -5- support for large files
+#DEF6    = -DNO_FASTTRIG              #  -6-  extra functional switches
 #DEF9     = -D__NO_IOS_BINARY__ # -9- comment out if you ios::binary (most will)
 #
 ### Do not change following 2, simply comment DEF1-8 above
 DEFSDEBUG 	= $(DEF1) $(DEF2) $(DEF3)
-DEFS		= $(DEF4) $(DEF5)  -D__USE_FFTW_LIBRARY__ -D__X86PROCESSOR__   -I/usr/include 
+DEFS		= $(DEF4) $(DEF5) $(DEF6)   -D__USE_FFTW_LIBRARY__ -D__X86PROCESSOR__   -I/usr/include
 
 ### NOTE ###
 # If you get warnings like the following (I got them with g++, not with aCC)
@@ -65,6 +72,8 @@ DEFS		= $(DEF4) $(DEF5)  -D__USE_FFTW_LIBRARY__ -D__X86PROCESSOR__   -I/usr/incl
 ### END NOTE ###
 
 
+# LOCAL dir
+LOCAL_INSTALLDIR = ../bin
 
 ### CFLAGS ###
 # Compiler options flag ###
@@ -77,20 +86,20 @@ CFLAGSOPT 	= -O $(DEFS)
 ### CCNOTE ###
 ### Change here ###
 ### set CFLAGS depending on if you compile debug version or working version ###
-CFLAGS 	= $(CFLAGSDEBUG)
-#CFLAGS 	= $(CFLAGSOPT)
+#CFLAGS 	= $(CFLAGSDEBUG)
+CFLAGS 	= $(CFLAGSOPT)
 ### END CCNOTE ###
 
 ### Library locations flag ###
 ### -lcl : used for veclib 
 ### -lm  : used for fftw
-LFLAGS  =   -L/usr/lib -lfftw3f -lm
+LFLAGS  =   -L/lib64 -lfftw3f -lm
 
 
 #####################################################
 ### No need to change anything below here... ####
 #####################################################
-EXECUTABLE = 	doris394.debug
+EXECUTABLE = 	doris
 
 SWSRCS	=	processor.cc \
 		utilities.cc \
@@ -104,23 +113,34 @@ SWSRCS	=	processor.cc \
 		geocode.cc \
 		unwrap.cc \
 		matrixspecs.cc \
-		exceptions.cc
+		exceptions.cc \
+		estorbit.cc
 SWOBJS	=	$(SWSRCS:.cc=.o)
 
 ### scripts of doris.tar in SCRIPTSDIR, used at make install
 SCRIPTS	=	helpdoris \
-		baseline.doris \
+		baseline.doris.sh \
+		baseline.doris.csh \
+                construct_dem.sh \
 		coregpm.doris \
+		doris* \
 		heightamb \
 		phasefilt.doris \
-		plotcpm \
-		plotoffsets \
+		plotcpm* \
+		plotoffsets* \
 		run \
 		viewanddel \
 		cpx2ps \
 		lonlathei2ascii \
 		ascii2ascii_UTM \
-		ascii2ps
+		ascii2ps* \
+                tsx* \
+                rs2* \
+                csk* \
+                gammaReadfiles.csh \
+                hhmmss2sec.py \
+                sec2hhmmss.py
+
 
 
 
@@ -172,8 +192,22 @@ install:	$(EXECUTABLE)
 		@echo "* ...Installation finished... *"
 		@echo "*******************************"
 		@echo " "
+		@echo "* Check that $(INSTALLDIR) is in your path search: echo \$$PATH ."
+		@echo " "
 
 
+installcb:	$(EXECUTABLE)
+		@echo "* Installing $(EXECUTABLE) in: $(LOCAL_INSTALLDIR)"
+		@cp -f $(EXECUTABLE) $(LOCAL_INSTALLDIR)/.
+		$(MAKE) cleaner
+		@echo " "
+		@echo "*******************************"
+		@echo "* ...Installation finished... *"
+		@echo "*******************************"
+		@echo " "
+		@echo "* Check that $(LOCAL_INSTALLDIR) is in your path search: echo \$$PATH ."
+		@echo " "
+
 
 #####################################################
 ### Testers ###
@@ -183,17 +217,21 @@ testdoris:	$(EXECUTABLE)
 		@echo "* Executing command: $(EXECUTABLE) -v"
 		$(EXECUTABLE) -v
 		@echo " "
-testorbit:	ioroutines.o matrixspecs.o readinput.o utilities.o
-		$(CC) $(CFLAGS) orbitbk.cc -D__TESTMAIN__ \
-		ioroutines.o matrixspecs.o readinput.o utilities.o \
+
+### Orbit test program for debugging ###
+test-orbit:	ioroutines.o matrixspecs.o utilities.o exceptions.cc slcimage.cc orbitbk.cc matrixbk.cc bk_messages.hh
+		$(CC) $(CFLAGS) -D__TESTMAIN__ \
+		ioroutines.o matrixspecs.o utilities.o exceptions.cc slcimage.cc orbitbk.cc \
+		$(LFLAGS) \
 		-o $@
 ### Matrix test program for debugging ###
 ### fast_sin defined in utilities.cc, which requires ioroutines, which, etc.
-matrixbk_test:  matrixbk_test.o matrixspecs.o utilities.o
-		$(CC) $(CFLAGS) matrixbk_test.o matrixspecs.o \
+test-matrix:  matrix_test.cc matrixspecs.o utilities.o utilities.o ioroutines.o matrixbk.cc
+		$(CC) $(CFLAGS) matrix_test.cc matrixspecs.o \
 		utilities.o ioroutines.o exceptions.cc orbitbk.cc \
 		$(LFLAGS) \
 		-o $@
+		@echo " "
 
 
 #####################################################
diff --git a/src/Makefile.bert b/doris_core/Makefile.bert
similarity index 100%
rename from src/Makefile.bert
rename to doris_core/Makefile.bert
diff --git a/src/Makefile.cygwin b/doris_core/Makefile.cygwin
similarity index 100%
rename from src/Makefile.cygwin
rename to doris_core/Makefile.cygwin
diff --git a/src/Makefile.debug b/doris_core/Makefile.debug
similarity index 100%
rename from src/Makefile.debug
rename to doris_core/Makefile.debug
diff --git a/src/Makefile.debug-full b/doris_core/Makefile.debug-full
similarity index 100%
rename from src/Makefile.debug-full
rename to doris_core/Makefile.debug-full
diff --git a/src/Makefile.doris_v3.17_MacOSX10.4_gcc4 b/doris_core/Makefile.doris_v3.17_MacOSX10.4_gcc4
similarity index 100%
rename from src/Makefile.doris_v3.17_MacOSX10.4_gcc4
rename to doris_core/Makefile.doris_v3.17_MacOSX10.4_gcc4
diff --git a/src/Makefile.sun b/doris_core/Makefile.sun
similarity index 100%
rename from src/Makefile.sun
rename to doris_core/Makefile.sun
diff --git a/src/README b/doris_core/README
similarity index 100%
rename from src/README
rename to doris_core/README
diff --git a/src/TODO b/doris_core/TODO
similarity index 100%
rename from src/TODO
rename to doris_core/TODO
diff --git a/src/bk_baseline.hh b/doris_core/bk_baseline.hh
similarity index 100%
rename from src/bk_baseline.hh
rename to doris_core/bk_baseline.hh
diff --git a/src/bk_messages.hh b/doris_core/bk_messages.hh
similarity index 100%
rename from src/bk_messages.hh
rename to doris_core/bk_messages.hh
diff --git a/src/configure b/doris_core/configure
similarity index 100%
rename from src/configure
rename to doris_core/configure
diff --git a/src/constants.hh b/doris_core/constants.hh
similarity index 97%
rename from src/constants.hh
rename to doris_core/constants.hh
index 5bc64f4..007ac38 100755
--- a/src/constants.hh
+++ b/doris_core/constants.hh
@@ -155,7 +155,10 @@ extern bk_messages ERROR;
 //#define SWVERSION "version  4.04-beta4 (10-03-2011)" "\n\t\t     build \t" __TIMESTAMP__ // [MA] master printout crop numlines+numpixels
 //#define SWVERSION "version  4.05-beta1 (28-03-2011)" "\n\t\t     build \t" __TIMESTAMP__ // [HB] ESTORBIT module
 //#define SWVERSION "version  4.06-beta1 (23-10-2011)" "\n\t\t     build \t" __TIMESTAMP__ // [BA] Modified Goldstein Filter
-#define SWVERSION "version  4.06-beta2 (28-12-2011)" "\n\t\t     build \t" __TIMESTAMP__ // [MA] path length fix for demassist at productfill and the rest.
+//#define SWVERSION "version  4.06-beta2 (28-12-2011)" "\n\t\t     build \t" __TIMESTAMP__ // [MA] path length fix for demassist at productfill and the rest.
+//#define SWVERSION "version  4.06-beta3 (31-10-2013)" "\n\t\t     build \t" __TIMESTAMP__ // [FvL] path length fix in readcoeff function (ioroutines.cc).
+//#define SWVERSION "version  4.0.7 (23-07-2014)" "\n\t\t     build \t" __TIMESTAMP__ // [FvL] removed unwanted automatic removal of pre-calculated reference phase in INTERFERO step
+#define SWVERSION "version  4.0.8 (04-09-2014)" "\n\t\t     build \t" __TIMESTAMP__ // [FvL] new version based on svn trunk with still untested spotlight developments.
 
 // ====== Typedefs for portability ======
 typedef short int           int16;    // 16 bits --> 2 bytes.  It has a range of -32768 to 32767. [ from -2^15 to (2^15 - 1) ]  [MA]
@@ -248,6 +251,7 @@ const int16     SLC_ASAR    = 2;                // ENVISAT
 const int16     SLC_ASAR_AP_HH = 21;            // ENVISAT AP HH
 const int16     SLC_ASAR_AP_VV = 22;            // ENVISAT AP VV
 
+const int16     SLC_S1A     = 30;               //Sentinel-1 A (TOPS mode))
 const int16     SLC_RSAT    = 3;                // RadarSAT (and Atlantis processor??)
 const int16     SLC_JERS    = 4;                // JERS (ceos?)
 // for ALOS: [PM]
diff --git a/src/coregistration.cc b/doris_core/coregistration.cc
old mode 100755
new mode 100644
similarity index 80%
rename from src/coregistration.cc
rename to doris_core/coregistration.cc
index 059d661..c316cf6
--- a/src/coregistration.cc
+++ b/doris_core/coregistration.cc
@@ -77,7 +77,7 @@
 void coarseporbit(
         const input_ell &ell,
         const slcimage  &master,
-        const slcimage  &slave, 
+        const slcimage  &slave,
         orbit           &masterorbit,  // cannot be const for spline
         orbit           &slaveorbit,   // cannot be const for spline
         const BASELINE  &baseline)
@@ -104,7 +104,7 @@ void coarseporbit(
 
   // ______Compute line,pixel for slave of this xyz______
   real8 lin,pix;
-  const int32 xyz2lpiter = 
+  const int32 xyz2lpiter =
     xyz2lp(lin,pix,slave,slaveorbit,P,MAXITER,CRITERTIM);
 
   // ______ Some extra parameters (not used, just info) ______ // BK 19-Oct-2000
@@ -279,14 +279,14 @@ void coarsecorrel(
 // ______Only odd Masksize possible_____
   bool forceoddl = false;
   bool forceoddp = false;
-  if (!isodd(MasksizeL)) 
+  if (!isodd(MasksizeL))
     {
     forceoddl = true; 
     MasksizeL+=1;                       // force oddness
     }
   if (!isodd(MasksizeP))
     {
-    forceoddp = true; 
+    forceoddp = true;
     MasksizeP+=1;                       // force oddness
     }
 
@@ -298,16 +298,24 @@ void coarsecorrel(
   const int32 spN = sinfo.currentwindow.pixhi  - initoffsetP;
 
   // ______Corners of useful overlap master,slave in master system______
-  const uint BORDER = 20;// slightly smaller 
-  const uint l0   = uint(max(int32(minfo.currentwindow.linelo),sl0) + 0.5*MasksizeL + AccL + BORDER);
-  const uint lN   = uint(min(int32(minfo.currentwindow.linehi),slN) - 0.5*MasksizeL - AccL - BORDER);
-  const uint p0   = uint(max(int32(minfo.currentwindow.pixlo),sp0)  + 0.5*MasksizeP + AccP + BORDER);
-  const uint pN   = uint(min(int32(minfo.currentwindow.pixhi),spN)  - 0.5*MasksizeP - AccP - BORDER);
+  //const uint BORDER = 20;// slightly smaller
+  //const uint l0   = uint(max(int32(minfo.currentwindow.linelo),sl0) + 0.5*MasksizeL + AccL + BORDER);
+  //const uint lN   = uint(min(int32(minfo.currentwindow.linehi),slN) - 0.5*MasksizeL - AccL - BORDER);
+  //const uint p0   = uint(max(int32(minfo.currentwindow.pixlo),sp0)  + 0.5*MasksizeP + AccP + BORDER);
+  //const uint pN   = uint(min(int32(minfo.currentwindow.pixhi),spN)  - 0.5*MasksizeP - AccP - BORDER);
+  // [FvL]
+  const uint BORDER = 20;// slightly smaller
+  const int l0   = uint(max(int32(minfo.currentwindow.linelo),sl0) + 0.5*MasksizeL + AccL + BORDER);
+  const int lN   = uint(min(int32(minfo.currentwindow.linehi),slN) - 0.5*MasksizeL - AccL - BORDER);
+  const int p0   = uint(max(int32(minfo.currentwindow.pixlo),sp0)  + 0.5*MasksizeP + AccP + BORDER);
+  const int pN   = uint(min(int32(minfo.currentwindow.pixhi),spN)  - 0.5*MasksizeP - AccP - BORDER);
   const window overlap(l0,lN,p0,pN);
 
   // ______Distribute Nwin points over window______
   // ______Centers(i,0): line, (i,1): pixel, (i,2) flagfromdisk______
-  matrix<uint> Centers;
+  //matrix<uint> Centers;
+  // [FvL] for correct folding of points outside overlap window
+  matrix<int> Centers;
   if (pointsrandom)                             // no filename specified
     {
     Centers = distributepoints(real4(Nwin),overlap);
@@ -323,9 +331,13 @@ void coarsecorrel(
     for (uint i=0; i<Nwin; ++i)
       {
       ifpos >> ll >> pp;
-      Centers(i,0) = uint(ll);                  // correct for lower left corner
-      Centers(i,1) = uint(pp);                  // correct for lower left corner
-      Centers(i,2) = uint(1);                   // flag from file
+      //Centers(i,0) = uint(ll);                  // correct for lower left corner
+      //Centers(i,1) = uint(pp);                  // correct for lower left corner
+      //Centers(i,2) = uint(1);                   // flag from file
+      // [FvL] for correct folding of points outside overlap window
+      Centers(i,0) = int(ll);                  // correct for lower left corner
+      Centers(i,1) = int(pp);                  // correct for lower left corner
+      Centers(i,2) = int(1);                   // flag from file
       ifpos.getline(dummyline,ONE27,'\n');              // goto next line.
       }
     ifpos.close();
@@ -439,11 +451,11 @@ void coarsecorrel(
     // ______Read windows from files, compute magnitude______
     Mcmpl  = minfo.readdata(master);
     Scmpl  = sinfo.readdata(slavemask);
-    Master = magnitude(Mcmpl); 
-    Mask   = magnitude(Scmpl); 
+    Master = magnitude(Mcmpl);
+    Mask   = magnitude(Scmpl);
 
     // ______Compute correlation matrix and find maximum______
-    Correl = correlate(Master,Mask); 
+    Correl = correlate(Master,Mask);
     uint L, P;
 //    MA: if maximum correlation is 0, which is due to NaNs, assign -999
 //    so in getoffset they are disregarded as in magfft. See getoffset.
@@ -484,13 +496,13 @@ void coarsecorrel(
                  <<  MasksizeL + 2*AccL << ", " << MasksizeP + 2*AccP
                  << "\nNumber \tposl \tposp \toffsetl offsetp \tcorrelation\n";
   for (uint k=0; k<Nwin; k++)
-    { 
+    {
     // MA remove NaN valued coh windows from  Nwin, to be used in resfile
     if (  Result(k,0) == -999  ) NwinNANrm = NwinNANrm - 1;
-    scratchlogfile << k << "\t" << Centers(k,0) 
-                        << "\t" << Centers(k,1) 
-                        << "\t" << Result(k,1) 
-                        << "\t" << Result(k,2) 
+    scratchlogfile << k << "\t" << Centers(k,0)
+                        << "\t" << Centers(k,1)
+                        << "\t" << Result(k,1)
+                        << "\t" << Result(k,2)
                         << "\t" << Result(k,0) << endl;
      }
   scratchlogfile << "Estimated total offset (l,p): \t"
@@ -509,14 +521,14 @@ void coarsecorrel(
                  << "\nCoarse_correlation_translation_pixels: \t"
                  <<  offsetPixels                               // 1 digit after point?
                  << "\nNumber of correlation windows: \t\t" //MA informational
-                 <<  NwinNANrm 
+                 <<  NwinNANrm
                  << " of " << Nwin ;
   scratchresfile << "\n\n#     center(l,p)   coherence   offsetL   offsetP\n";
     for (uint k=0; k<Nwin; k++)
-     { 
+     {
       //MA remove/skip -999 values before writing resfile. For magspace.
-      // All the values are kept in  doris.log 
-      if  ( Result(k,0) == -999 )  continue;  
+      // All the values are kept in  doris.log
+      if  ( Result(k,0) == -999 )  continue;
       scratchresfile << k  << " \t" << Centers(k,0) << " \t" << Centers(k,1) << " \t"
            << Result(k,0)  << " \t" << Result(k,1)  << " \t" << Result(k,2)  << "\n";
      }
@@ -591,7 +603,7 @@ void coarsecorrelfft(
     pointsrandom = false;                       // only use these points
 
   // ______Only pow2 Masksize possible_____
-  if (!ispower2(MasksizeL)) 
+  if (!ispower2(MasksizeL))
     {
     PRINT_ERROR("coarse correl fft: MasksizeL should be 2^n")
     throw(input_error);
@@ -610,16 +622,23 @@ void coarsecorrelfft(
   const int32 spN = sinfo.currentwindow.pixhi  - initoffsetP;
 
   // ______Corners of useful overlap master,slave in master system______
-  const uint BORDER = 20;// slightly smaller 
-  const uint l0   = max(int32(minfo.currentwindow.linelo),sl0) + BORDER;
-  const uint lN   = min(int32(minfo.currentwindow.linehi),slN) - MasksizeL - BORDER;
-  const uint p0   = max(int32(minfo.currentwindow.pixlo),sp0)  + BORDER;
-  const uint pN   = min(int32(minfo.currentwindow.pixhi),spN)  - MasksizeP - BORDER;
+  //const uint BORDER = 20;// slightly smaller
+  //const uint l0   = max(int32(minfo.currentwindow.linelo),sl0) + BORDER;
+  //const uint lN   = min(int32(minfo.currentwindow.linehi),slN) - MasksizeL - BORDER;
+  //const uint p0   = max(int32(minfo.currentwindow.pixlo),sp0)  + BORDER;
+  //const uint pN   = min(int32(minfo.currentwindow.pixhi),spN)  - MasksizeP - BORDER;
+  // [FvL] for correct folding of points outside overlap window
+  const uint BORDER = 20;// slightly smaller
+  const int l0   = max(int32(minfo.currentwindow.linelo),sl0) + BORDER;
+  const int lN   = min(int32(minfo.currentwindow.linehi),slN) - MasksizeL - BORDER;
+  const int p0   = max(int32(minfo.currentwindow.pixlo),sp0)  + BORDER;
+  const int pN   = min(int32(minfo.currentwindow.pixhi),spN)  - MasksizeP - BORDER;
   const window overlap(l0,lN,p0,pN);
 
   // ______Distribute Nwin points over window______
   // ______Minlminp(i,0): line, (i,1): pixel, (i,2) flagfromdisk______
-  matrix<uint> Minlminp;
+  //matrix<uint> Minlminp;
+  matrix<int> Minlminp; //[FvL]
   if (pointsrandom)                             // no filename specified
     {
     Minlminp = distributepoints(real4(Nwin),overlap);
@@ -634,9 +653,13 @@ void coarsecorrelfft(
     for (uint i=0; i<Nwin; ++i)
       {
       ifpos >> ll >> pp;
-      Minlminp(i,0) = uint(ll-0.5*MasksizeL);   // correct for lower left corner
-      Minlminp(i,1) = uint(pp-0.5*MasksizeP);   // correct for lower left corner
-      Minlminp(i,2) = uint(1);                  // flag from file
+      //Minlminp(i,0) = uint(ll-0.5*MasksizeL);   // correct for lower left corner
+      //Minlminp(i,1) = uint(pp-0.5*MasksizeP);   // correct for lower left corner
+      //Minlminp(i,2) = uint(1);                  // flag from file
+      // [FvL]
+      Minlminp(i,0) = int(ll-0.5*MasksizeL);   // correct for lower left corner
+      Minlminp(i,1) = int(pp-0.5*MasksizeP);   // correct for lower left corner
+      Minlminp(i,2) = int(1);                  // flag from file
       ifpos.getline(dummyline,ONE27,'\n');      // goto next line.
       }
     ifpos.close();
@@ -710,7 +733,7 @@ void coarsecorrelfft(
   // ______Compute coherence of these points______
   matrix<complr4> Master;
   matrix<complr4> Mask;
-  matrix<real4>   Result(Nwin,3);               // R(i,0):delta l; 
+  matrix<real4>   Result(Nwin,3);               // R(i,0):delta l;
                                                 //  R(i,1):delta p; R(i,2):correl
   // ______ Progress messages ______
   int32 percent    = 0;
@@ -734,7 +757,7 @@ void coarsecorrelfft(
     window master(minMwinL, minMwinL+MasksizeL-1,
                   minMwinP, minMwinP+MasksizeP-1);// size=masksize
     // ______Same points in slave system (disk)______
-    window mask(minMwinL+initoffsetL, 
+    window mask(minMwinL+initoffsetL,
                 minMwinL+initoffsetL+MasksizeL-1,
                 minMwinP+initoffsetP,
                 minMwinP+initoffsetP+MasksizeP-1);
@@ -746,7 +769,7 @@ void coarsecorrelfft(
     // ______ Coherence/max correlation ______
     real4 offsetL, offsetP;
     //const real4 coheren = corrfft(absMaster,absMask,offsetL,offsetP);
-    //const real4 coheren = coherencefft(Master, Mask, 
+    //const real4 coheren = coherencefft(Master, Mask,
     //  1, MasksizeL/2, MasksizeP/2, //do not ovs, search full matrix for max
     //  offsetL,offsetP);// returned
     const real4 coheren = crosscorrelate(Master, Mask,
@@ -775,8 +798,194 @@ void coarsecorrelfft(
   // ______ Get good general estimate for offsetL, offsetP ______
   int32 offsetLines  = -999;
   int32 offsetPixels = -999;
-  getoffset(Result,offsetLines,offsetPixels);
+  
+  //MCC
+  //MCC instead of one offset output a simple polynomial
+  int32 initNobs = Result.lines();  //Nof observations
+  int32 Nobs =0;
+  // select first the values with coherence higher than thCoh
+  matrix<int32> indHigh(initNobs,1);
+  real8 thCoh = 0.2;
+  
+  //get number of Obs above a threshold
+  for (uint k=0; k<initNobs; k++)
+    {
+      //INFO<<real8(Result(k,0));
+     // INFO.print();
+      if (real8(Result(k,0)) >thCoh)
+      {
+         // INFO<<"Good";
+         // INFO<<real8(Result(k,0));
+
+     // INFO.print();
 
+          indHigh(k,0)=k;
+          Nobs++;
+      }
+  }
+  //righ hand side (a.k.a xhat) for Lines and pixels
+  matrix<real8>rhsL(2,1);
+  matrix<real8>rhsP(2,1);
+
+  //the means are used to calculate the value of the linear poly at the mean line and pixel
+  uint32 meanP = 0; //mean PX number
+  uint32 meanL = 0;//mean line number
+  
+  //if there are not values above the coehrence then used the iunitial offsets e.g., based on orbits
+if (Nobs<1)
+{
+   
+    offsetLines  = initoffsetL;
+    offsetPixels = initoffsetP;
+    rhsL(0,0)    = 0;
+    rhsL(1,0)    = offsetLines;
+    rhsP(0,0)    = 0;
+    rhsP(1,0)    = offsetPixels;
+  
+}
+  //if there are not too enough to calculate the poly then used the traditional "weighted mean" method 
+else if (Nobs<10)
+{
+   getoffset(Result,offsetLines,offsetPixels);
+    rhsL(0,0)    = 0;
+    rhsL(1,0)    = offsetLines;
+    rhsP(0,0)    = 0;
+    rhsP(1,0)    = offsetPixels;
+  
+}
+  // do estimations using BLUE
+else
+{
+    //To exot the while loop which is used to remove outliers
+   bool flagExit = false;
+    
+  matrix<real8> yL(Nobs,1);                   // observation
+  matrix<real8> yP(Nobs,1);                   // observation
+  matrix<real8> AL(Nobs,2);                 // designmatrix
+  matrix<real8> AP(Nobs,2);                 // designmatrix
+  matrix<real8> Qy_1(Nobs,1);            // diagonal covariance matrix defined as vector to save memory
+  matrix<uint32> indeces(Nobs,1);
+  
+    //While loop is perfomed until the maximum residual are very small or not enough obs
+ while (Nobs>9 &  flagExit != true)
+ {INFO << "Nobs " << Nobs;
+  INFO.print();
+  //down there we remove the worst obs, then we need to resize the matrices
+  yL.resize(Nobs,1);
+  yP.resize(Nobs,1);
+  AL.resize(Nobs,2);
+  AP.resize(Nobs,2);
+  Qy_1.resize(Nobs,1);
+  indeces.resize(Nobs,1);
+  uint32 newK =0;
+
+  // select values with good coherence
+  for (uint k=0; k<initNobs; k++)
+    {
+     if (real8(Result(k,0) ) >thCoh)
+     {
+      Qy_1(newK,0)= real8(Result(k,0) ) ;
+      yL(newK,0) = real8(Result(k,1) ) ;
+      yP(newK,0) = real8(Result(k,2)  );
+
+      AL(newK,0) = real8(Minlminp(k,0) );
+      AL(newK,1) = 1  ;
+
+      AP(newK,0) = real8(Minlminp(k,1)  ) ;
+      AP(newK,1) = 1  ;
+      meanP = meanP + uint32(Minlminp(k,1)  ) ;
+      meanL = meanL + uint32(Minlminp(k,0)  ) ;
+      indeces(newK,0) = k;
+      newK++;
+     }
+    }
+  
+  INFO << "Nof new Obs : " << newK;
+  INFO.print();
+  
+  Qy_1 = Qy_1 / mean(Qy_1);// normalize weights (for tests!)
+  meanP = meanP/newK;
+  meanL = meanL/newK;
+  //
+  //matrix<real8>rhsL(2,1);
+  //getLS(yL,AL,Qy_1,xhat_rhsL);
+  
+  //LS Qx_hat for lines and pixels
+  matrix<real8> Qx_hat_L    = matTxmat(AL,diagxmat(Qy_1,AL));
+  matrix<real8> Qx_hat_P    = matTxmat(AP,diagxmat(Qy_1,AP));
+ 
+   //xhat for lines and pixels, still it needs to be multiplied times inverse of Qxhat, see below
+   rhsL = matTxmat(AL,diagxmat(Qy_1,yL));
+   rhsP = matTxmat(AP,diagxmat(Qy_1,yP));
+    
+    // ______Compute solution______
+    choles(Qx_hat_L);             // Cholesky factorisation normalmatrix
+    choles(Qx_hat_P);             // Cholesky factorisation normalmatrix
+
+    // final solution
+    solvechol(Qx_hat_L,rhsL);     // Solution unknowns in rhs
+    solvechol(Qx_hat_P,rhsP);     // Solution unknowns in rhs
+
+    // estimation of residuals and removal of ouliers
+    matrix<real8> yL_hat        = AL * rhsL;
+    matrix<real8> yP_hat        = AP * rhsP;
+    matrix<real8> eL_hat      = yL - yL_hat;
+    matrix<real8> eP_hat      = yP - yP_hat;
+    real4 max_eL =0;
+    real4 max_eP =0;
+    uint32 indMaxL =0;
+    uint32 indMaxP =0;
+  
+// looks for the obs which has whose residual norm is maximum, for both lines and pixels
+    for (uint32 k=0; k<Nobs;k++)
+    {      
+        if (sqrt(eL_hat(k,0)*eL_hat(k,0)) >max_eL)
+        {
+        max_eL  = sqrt(eL_hat(k,0)*eL_hat(k,0));
+        indMaxL = k;
+    
+        }
+        if ( sqrt(eP_hat(k,0)*eP_hat(k,0))>max_eP)
+        {
+        max_eP=sqrt(eP_hat(k,0)*eP_hat(k,0));
+        indMaxP =k;
+   
+        } 
+    }
+    
+     INFO<< "max_eL : " <<  max_eL;
+     INFO<< ", max_eP : " <<  max_eP;
+     INFO.print();
+     
+     //if residuals are small then exit
+    if (max_eL <3.0 && max_eP<3.0)
+    {
+        INFO<<"exiting estimation loop";
+        INFO.print();
+        flagExit = true;
+        break;
+    }
+    else
+    {
+       INFO<<"removing obs " <<indeces(indMaxL,0) <<", and obs "<< indeces(indMaxP,0) << ", offset L: " 
+           << Result(indeces(indMaxL,0),1)<<", offset P: " << Result(indeces(indMaxP,0),2);
+       INFO.print();
+       //remove residuals by setting its coherence to zero
+       Result(indeces(indMaxL,0),0)=0;
+       Result(indeces(indMaxP,0),0)=0;
+       //update the number of observations
+       if (indMaxP!=indMaxL)
+        Nobs -=2;
+       else
+        Nobs--;    
+    }
+    
+
+   }//end while Nobs>10
+    offsetLines  = int32(meanL*rhsL(0,0)) +  int32(rhsL(1,0))   ;
+    offsetPixels =  int32(meanP*rhsP(0,0)) + int32( rhsP(1,0))  ;
+}//else if Nof<10
+    //MCC
   // ______ Write to files ______
   ofstream scratchlogfile("scratchlogcoarse2", ios::out | ios::trunc);
   bk_assert(scratchlogfile,"coarsecorrelfft: scratchlogcoarse2",__FILE__,__LINE__);
@@ -789,13 +998,13 @@ void coarsecorrelfft(
                  <<  MasksizeL << ", " << MasksizeP
                  << "\n\nNumber \tposL \tposP \toffsetL offsetP\tcorrelation\n";
   for (uint k=0; k<Nwin; k++)
-    { 
+    {
     // MA remove NaN valued coh windows from  Nwin, to be used in resfile
     if (  Result(k,0) == -999  ) NwinNANrm = NwinNANrm - 1;
-    scratchlogfile << k << "\t" << Minlminp(k,0) 
-                        << "\t" << Minlminp(k,1) 
-                        << "\t" << Result(k,1) 
-                        << "\t" << Result(k,2) 
+    scratchlogfile << k << "\t" << Minlminp(k,0)
+                        << "\t" << Minlminp(k,1)
+                        << "\t" << Result(k,1)
+                        << "\t" << Result(k,2)
                         << "\t" << Result(k,0) << endl;
      }
   scratchlogfile << "Estimated total offset (l,p): \t"
@@ -814,14 +1023,22 @@ void coarsecorrelfft(
                  <<  offsetLines                        // 1 digit after point?
                  << "\nCoarse_correlation_translation_pixels: \t"
                  <<  offsetPixels                      // 1 digit after point?
+                 << "\nSlope_CoarseCorr_lines: \t\t" //MCC
+                 <<  rhsL(0,0)
+                 << "\nInitial_Offset_CoarseCorr_lines: \t"
+                 <<   rhsL(1,0)                             // MCC
+                 << "\nSlope_CoarseCorr_pixels: \t\t" //MCC
+                 <<  rhsP(0,0) 
+                 <<  "\nInitial_Offset_CoarseCorr_pixels: \t"
+                 <<   rhsP(1,0)                             // MCC
                  << "\nNumber of correlation windows: \t\t" //MA informational
-                 <<  NwinNANrm 
+                 <<  NwinNANrm
                  << " of " << Nwin ;
   scratchresfile << "\n\n#     center(l,p)   coherence   offsetL   offsetP\n";
     for (uint k=0; k<Nwin; k++)
-     { 
+     {
       //MA remove/skip NaN -999 values before writing resfile. For magfft.
-      // All the values are kept in  doris.log 
+      // All the values are kept in  doris.log
       if (  Result(k,0) == -999 ) continue;
       scratchresfile << k  << " \t" << Minlminp(k,0) << " \t" << Minlminp(k,1) << " \t"
            << Result(k,0)  << " \t" << Result(k,1)  << " \t" << Result(k,2)  << "\n";
@@ -877,7 +1094,7 @@ void coarsecorrelfft(
  *    Mahmut Arikan, 12-Nov-2008                                *
  ****************************************************************/
 void mtiming_correl(
-        const input_mtiming   &mtiminginput, 
+        const input_mtiming   &mtiminginput,
         const slcimage           &minfo,
         const productinfo        &sinfo)     // simamp
   {
@@ -905,14 +1122,14 @@ void mtiming_correl(
 // ______Only odd Masksize possible_____
   bool forceoddl = false;
   bool forceoddp = false;
-  if (!isodd(MasksizeL)) 
+  if (!isodd(MasksizeL))
     {
-    forceoddl = true; 
+    forceoddl = true;
     MasksizeL+=1;                       // force oddness
     }
   if (!isodd(MasksizeP))
     {
-    forceoddp = true; 
+    forceoddp = true;
     MasksizeP+=1;                       // force oddness
     }
 
@@ -926,21 +1143,28 @@ void mtiming_correl(
   DEBUG.print();
 
   // ______Corners of useful overlap master,slave in master system______
-  const uint BORDER = 20;// slightly smaller 
-  const uint l0   = uint(max(int32(minfo.currentwindow.linelo),sl0) + 0.5*MasksizeL + AccL + BORDER);
-  const uint lN   = uint(min(int32(minfo.currentwindow.linehi),slN) - 0.5*MasksizeL - AccL - BORDER);
-  const uint p0   = uint(max(int32(minfo.currentwindow.pixlo),sp0)  + 0.5*MasksizeP + AccP + BORDER);
-  const uint pN   = uint(min(int32(minfo.currentwindow.pixhi),spN)  - 0.5*MasksizeP - AccP - BORDER);
+  //const uint BORDER = 20;// slightly smaller
+  //const uint l0   = uint(max(int32(minfo.currentwindow.linelo),sl0) + 0.5*MasksizeL + AccL + BORDER);
+  //const uint lN   = uint(min(int32(minfo.currentwindow.linehi),slN) - 0.5*MasksizeL - AccL - BORDER);
+  //const uint p0   = uint(max(int32(minfo.currentwindow.pixlo),sp0)  + 0.5*MasksizeP + AccP + BORDER);
+  //const uint pN   = uint(min(int32(minfo.currentwindow.pixhi),spN)  - 0.5*MasksizeP - AccP - BORDER);
+  // [FvL]
+  const uint BORDER = 20;// slightly smaller
+  const int l0   = uint(max(int32(minfo.currentwindow.linelo),sl0) + 0.5*MasksizeL + AccL + BORDER);
+  const int lN   = uint(min(int32(minfo.currentwindow.linehi),slN) - 0.5*MasksizeL - AccL - BORDER);
+  const int p0   = uint(max(int32(minfo.currentwindow.pixlo),sp0)  + 0.5*MasksizeP + AccP + BORDER);
+  const int pN   = uint(min(int32(minfo.currentwindow.pixhi),spN)  - 0.5*MasksizeP - AccP - BORDER);
+
 /*
   // ______Check masksize against height and width of the crop______
   if( int32(MasksizeL) > int32(lN-l0) || int32(MasksizeP) > int32(pN-p0) )
     {
      ERROR << "MTE: Impossible to continue! Masksize larger than the overlapping crop width or height. Please check.";
-     ERROR.print();     
+     ERROR.print();
      ERROR << "MTE: MasksizeL [" << MasksizeL << "] > crop height [" << int32(lN-l0) << "] ?";
-     ERROR.print();     
+     ERROR.print();
      ERROR << "MTE: MasksizeP [" << MasksizeP << "] >  crop width [" << int32(pN-p0) << "] ?";
-     ERROR.print();     
+     ERROR.print();
     throw(input_error) ;
     }
 */
@@ -955,7 +1179,8 @@ void mtiming_correl(
 
   // ______Distribute Nwin points over window______
   // ______Centers(i,0): line, (i,1): pixel, (i,2) flagfromdisk______
-  matrix<uint> Centers;
+  //matrix<uint> Centers; [FvL]
+  matrix<int> Centers;
   if (pointsrandom)                             // no filename specified
     {
     Centers = distributepoints(real4(Nwin),overlap);
@@ -970,10 +1195,15 @@ void mtiming_correl(
     uint ll,pp;
     for (uint i=0; i<Nwin; ++i)
       {
+       
       ifpos >> ll >> pp;
-      Centers(i,0) = uint(ll);                  // correct for lower left corner
-      Centers(i,1) = uint(pp);                  // correct for lower left corner
-      Centers(i,2) = uint(1);                   // flag from file
+      //Centers(i,0) = uint(ll);                  // correct for lower left corner
+      //Centers(i,1) = uint(pp);                  // correct for lower left corner
+      //Centers(i,2) = uint(1);                   // flag from file
+      // [FvL]
+      Centers(i,0) = int(ll);                  // correct for lower left corner
+      Centers(i,1) = int(pp);                  // correct for lower left corner
+      Centers(i,2) = int(1);                   // flag from file
       ifpos.getline(dummyline,ONE27,'\n');              // goto next line.
       }
     ifpos.close();
@@ -1028,7 +1258,7 @@ void mtiming_correl(
         {
         troubleoverlap=true;
         WARNING << STEP << "point from file: "
-             << i+1 << " " << Centers(i,0) << " " 
+             << i+1 << " " << Centers(i,0) << " "
              << Centers(i,1)
              << " outside overlap master, slave. New position: ";
         Centers(i,1) = pN + pN-Centers(i,1);
@@ -1046,7 +1276,7 @@ void mtiming_correl(
 
   // ______Compute correlation of these points______
   matrix<complr4> Mcmpl;        // Master complex image
-  matrix<real4> Sampl;          // Simulated amplitude 
+  matrix<real4> Sampl;          // Simulated amplitude
   matrix<real4> mMag;           // amplitude master
   matrix<real4> Correl;         // matrix with correlations
   matrix<real4> Result(Nwin,3); // R(i,0)=correlation; (i,1)=delta l; (i,2)=delta p;
@@ -1078,7 +1308,7 @@ void mtiming_correl(
     mwin.pixlo  = cenMwinP - (MasksizeP-1)/2 - AccP;  // MP is forced odd
     mwin.pixhi  = mwin.pixlo + MasksizeP +2*AccP - 1;
 
-  // Products actually only hold data within the window. 
+  // Products actually only hold data within the window.
   // Therefore we need to convert back to file's(x,y) before reading data.
   // Batu 2007 08 01
   // uint cenSwinL    = cenMwinL + initoffsetL - sinfo.win.linelo +1 ;          // adjust initoffset
@@ -1091,7 +1321,7 @@ void mtiming_correl(
     swin.linehi = swin.linelo + MasksizeL - 1;
     swin.pixlo  = cenSwinP - (MasksizeP-1)/2;      // MP is forced odd
     swin.pixhi  = swin.pixlo + MasksizeP - 1;
-//    DEBUG << "   cenSwinL " << cenSwinL << " cenSwinP " << cenSwinL; 
+//    DEBUG << "   cenSwinL " << cenSwinL << " cenSwinP " << cenSwinL;
 //    DEBUG.print();
 //    DEBUG << "sl0 " << swin.linelo << " slN " << swin.linehi  << " sp0 " << swin.pixlo << " spN " << swin.pixhi;
 //    DEBUG.print();
@@ -1100,12 +1330,12 @@ void mtiming_correl(
     // Sampl  = sinfo.readdatar4(master); // readfile(Sampl,master,numberoflatpixels?,winfromfile?,zerooffset)
     Mcmpl  = minfo.readdata(swin);      // small patch
     Sampl  = sinfo.readdatar4(mwin);       // big   patch
-    mMag   = magnitude(Mcmpl); 
-    matrix<real4> &sMask = mMag ;           // amplitude small patch from master that shifts over 
+    mMag   = magnitude(Mcmpl);
+    matrix<real4> &sMask = mMag ;           // amplitude small patch from master that shifts over
     matrix<real4> &mMask = Sampl ;          // amplitude big   patch from simamp
 
     // ______Compute correlation matrix and find maximum______
-    //#Correl = correlate(Master,Mask); 
+    //#Correl = correlate(Master,Mask);
     Correl = correlate(mMask,sMask);   // correlate(simamp,masteramp)
     uint L, P;
 //    MA: if maximum correlation is 0, which is due to NaNs, assign -999
@@ -1146,7 +1376,7 @@ void mtiming_correl(
   // ______ Compute Time ______
   // minus sign is due to the offsets being reference to DEM (offset = master-dem)
   offsets2timing(minfo, -offsetLines, -offsetPixels, masterAztime, masterRatime); // using overall offsets to
-                                                                                  // determine master timing error 
+                                                                                  // determine master timing error
 
   INFO << "Estimated master azimuth timing error [sec]: " << masterAztime << " sec.";
   INFO.print();
@@ -1173,13 +1403,13 @@ void mtiming_correl(
                  <<  MasksizeL + 2*AccL << ", " << MasksizeP + 2*AccP
                  << "\nNumber \tposl \tposp \toffsetl offsetp \tcorrelation\n";
   for (uint k=0; k<Nwin; k++)
-    { 
+    {
     // MA remove NaN valued coh windows from  Nwin, to be used in resfile
     if (  Result(k,0) == -999  ) NwinNANrm = NwinNANrm - 1;
-    scratchlogfile << k << "\t" << Centers(k,0) 
-                        << "\t" << Centers(k,1) 
-                        << "\t" << Result(k,1) 
-                        << "\t" << Result(k,2) 
+    scratchlogfile << k << "\t" << Centers(k,0)
+                        << "\t" << Centers(k,1)
+                        << "\t" << Result(k,1)
+                        << "\t" << Result(k,2)
                         << "\t" << Result(k,0) << endl;
      }
   scratchlogfile << "Estimated total offset (l,p): \t"
@@ -1196,7 +1426,7 @@ void mtiming_correl(
                  << "\nCorrelation method \t\t\t: \t" << "magspace "               // mtiminginput.method == 22
                  << "(" << MasksizeL + 2*AccL << "," << MasksizeP + 2*AccP << ")"
                  << "\nNumber of correlation windows used \t: \t"                     //MA informational
-                 <<  NwinNANrm << " of " << Nwin 
+                 <<  NwinNANrm << " of " << Nwin
                  << "\nEstimated translation master w.r.t. synthetic amplitude (master-dem):"
                  << "\n  Positive offsetL: master image is to the bottom"
                  << "\n  Positive offsetP: master image is to the right"
@@ -1255,7 +1485,7 @@ void mtiming_correl(
  *  - coarse offsets between dem and the master                 *
  *                                                              *
  *    Bert Kampes, 12-Dec-1998 (coarsecorrelfft)                *
- *    Mahmut Arikan, 04-Dec-2008 
+ *    Mahmut Arikan, 04-Dec-2008
  ****************************************************************/
 void mtiming_correlfft(
     const input_mtiming    &mtiminginput,
@@ -1285,7 +1515,7 @@ void mtiming_correlfft(
     pointsrandom = false;                       // only use these points
 
   // ______Only pow2 Masksize possible_____
-  if (!ispower2(MasksizeL)) 
+  if (!ispower2(MasksizeL))
     {
     PRINT_ERROR("mtiming correl fft: MasksizeL should be 2^n")
     throw(input_error);
@@ -1306,11 +1536,17 @@ void mtiming_correlfft(
   DEBUG.print();
 
   // ______Corners of useful overlap master,slave in master system______
-  const uint BORDER = 20;// slightly smaller 
-  const uint l0   = max(int32(minfo.currentwindow.linelo),sl0) + BORDER;
-  const uint lN   = min(int32(minfo.currentwindow.linehi),slN) - MasksizeL - BORDER;
-  const uint p0   = max(int32(minfo.currentwindow.pixlo),sp0)  + BORDER;
-  const uint pN   = min(int32(minfo.currentwindow.pixhi),spN)  - MasksizeP - BORDER;
+  //const uint BORDER = 20;// slightly smaller
+  //const uint l0   = max(int32(minfo.currentwindow.linelo),sl0) + BORDER;
+  //const uint lN   = min(int32(minfo.currentwindow.linehi),slN) - MasksizeL - BORDER;
+  //const uint p0   = max(int32(minfo.currentwindow.pixlo),sp0)  + BORDER;
+  //const uint pN   = min(int32(minfo.currentwindow.pixhi),spN)  - MasksizeP - BORDER;
+  // [FvL]
+  const uint BORDER = 20;// slightly smaller
+  const int l0   = max(int32(minfo.currentwindow.linelo),sl0) + BORDER;
+  const int lN   = min(int32(minfo.currentwindow.linehi),slN) - MasksizeL - BORDER;
+  const int p0   = max(int32(minfo.currentwindow.pixlo),sp0)  + BORDER;
+  const int pN   = min(int32(minfo.currentwindow.pixhi),spN)  - MasksizeP - BORDER;
   const window overlap(l0,lN,p0,pN);
 
   DEBUG << "overlap l0: " << l0 << " lN " << lN << " p0 " << p0 << " pN " << pN;
@@ -1318,7 +1554,8 @@ void mtiming_correlfft(
 
   // ______Distribute Nwin points over window______
   // ______Minlminp(i,0): line, (i,1): pixel, (i,2) flagfromdisk______
-  matrix<uint> Minlminp;
+  //matrix<uint> Minlminp; // [FvL]
+  matrix<int> Minlminp;
   if (pointsrandom)                             // no filename specified
     {
     Minlminp = distributepoints(real4(Nwin),overlap);
@@ -1333,9 +1570,13 @@ void mtiming_correlfft(
     for (uint i=0; i<Nwin; ++i)
       {
       ifpos >> ll >> pp;
-      Minlminp(i,0) = uint(ll-0.5*MasksizeL);   // correct for lower left corner
-      Minlminp(i,1) = uint(pp-0.5*MasksizeP);   // correct for lower left corner
-      Minlminp(i,2) = uint(1);                  // flag from file
+      //Minlminp(i,0) = uint(ll-0.5*MasksizeL);   // correct for lower left corner
+      //Minlminp(i,1) = uint(pp-0.5*MasksizeP);   // correct for lower left corner
+      //Minlminp(i,2) = uint(1);                  // flag from file
+      // [FvL]
+      Minlminp(i,0) = int(ll-0.5*MasksizeL);   // correct for lower left corner
+      Minlminp(i,1) = int(pp-0.5*MasksizeP);   // correct for lower left corner
+      Minlminp(i,2) = int(1);                  // flag from file
       ifpos.getline(dummyline,ONE27,'\n');      // goto next line.
       }
     ifpos.close();
@@ -1411,9 +1652,9 @@ void mtiming_correlfft(
 
   // ______Compute coherence of these points______
   matrix<complr4> Mcmpl;          // Master complex image
-  matrix<real4>   Sampl;          // Simulated amplitude 
+  matrix<real4>   Sampl;          // Simulated amplitude
   matrix<complr4> Scmpl;           // real4 simamp --> creal4 simamp
-  matrix<real4>   Result(Nwin,3); //  R(i,0):delta l; 
+  matrix<real4>   Result(Nwin,3); //  R(i,0):delta l;
                                   //  R(i,1):delta p; R(i,2):correl
 
   // ______ Progress messages ______
@@ -1446,7 +1687,7 @@ void mtiming_correlfft(
     Sampl  = sinfo.readdatar4(mwin);         // simamp (DEM) read patch
     Scmpl   = mat2cr4(Sampl);
     Sampl.resize(1,1);                       // dealloc...
-    matrix<complr4> &sMask = Mcmpl ;         // complex patch from the master that shifts over 
+    matrix<complr4> &sMask = Mcmpl ;         // complex patch from the master that shifts over
     matrix<complr4> &mMask = Scmpl ;         // complex patch from the simamp
                                              // patch sizes are equal but
                                              // shifted patch can have initial
@@ -1455,7 +1696,7 @@ void mtiming_correlfft(
     // ______ Coherence/max correlation ______
     real4 offsetL, offsetP;
     //const real4 coheren = corrfft(absMaster,absMask,offsetL,offsetP);
-    //const real4 coheren = coherencefft(Master, Mask, 
+    //const real4 coheren = coherencefft(Master, Mask,
     //  1, MasksizeL/2, MasksizeP/2, //do not ovs, search full matrix for max
     //  offsetL,offsetP);// returned
     const real4 coheren = crosscorrelate(mMask, sMask,
@@ -1463,7 +1704,7 @@ void mtiming_correlfft(
                                         offsetL,offsetP); // returned
     DEBUG << "Offset between chips (l,p)    = " << offsetL << ", " << offsetP;
     DEBUG.print();
-    if ( coheren > 1 ) continue; // MA ignore correlation > 1. 
+    if ( coheren > 1 ) continue; // MA ignore correlation > 1.
 
     // ______ Store result of this patch ______
     Result(i,0) = coheren;
@@ -1497,11 +1738,11 @@ void mtiming_correlfft(
   // ______ Initialize Variables ______
   real8 masterAztime  = -999;
   real8 masterRatime  = -999;
-  
+
   // ______ Compute Time ______
   // minus sign is due to the offsets being reference to DEM (offset = master-dem)
   offsets2timing(minfo, -offsetLines, -offsetPixels, masterAztime, masterRatime); // using overall offsets to
-                                                                                  // determine master timing error 
+                                                                                  // determine master timing error
 
   INFO << "Estimated master azimuth timing error [sec]: " << masterAztime << " sec.";
   INFO.print();
@@ -1524,13 +1765,13 @@ void mtiming_correlfft(
                  <<  MasksizeL << ", " << MasksizeP
                  << "\n\nNumber \tposL \tposP \toffsetL offsetP\tcorrelation\n";
   for (uint k=0; k<Nwin; k++)
-    { 
+    {
     // MA remove NaN valued coh windows from  Nwin, to be used in resfile
     if (  Result(k,0) == -999  ) NwinNANrm = NwinNANrm - 1;
-    scratchlogfile << k << "\t" << Minlminp(k,0) 
-                        << "\t" << Minlminp(k,1) 
-                        << "\t" << Result(k,1) 
-                        << "\t" << Result(k,2) 
+    scratchlogfile << k << "\t" << Minlminp(k,0)
+                        << "\t" << Minlminp(k,1)
+                        << "\t" << Result(k,1)
+                        << "\t" << Result(k,2)
                         << "\t" << Result(k,0) << endl;
      }
   scratchlogfile << "Estimated total offset (l,p): \t"
@@ -1547,7 +1788,7 @@ void mtiming_correlfft(
                  << "\nCorrelation method \t\t\t: \t" << "magfft "               // mtiminginput.method == 21
                  << "(" << MasksizeL << "," << MasksizeP << ")"
                  << "\nNumber of correlation windows used \t: \t"                     //MA informational
-                 <<  NwinNANrm << " of " << Nwin 
+                 <<  NwinNANrm << " of " << Nwin
                  << "\nEstimated translation master w.r.t. synthetic amplitude (master-dem):"
                  << "\n  Positive offsetL: master image is to the bottom"
                  << "\n  Positive offsetP: master image is to the right"
@@ -1561,9 +1802,9 @@ void mtiming_correlfft(
                  << masterRatime  << " sec.";                                    // in seconds
 //  scratchresfile << "\n\n#     center(l,p)   coherence   offsetL   offsetP\n";
 //    for (uint k=0; k<Nwin; k++)
-//     { 
+//     {
 //      //MA remove/skip NaN: -999 values before writing resfile. For magfft.
-//      // All the values are kept in  doris.log 
+//      // All the values are kept in  doris.log
 //      if (  Result(k,0) == -999 ) continue;
 //      scratchresfile << k  << " \t" << Minlminp(k,0) << " \t" << Minlminp(k,1) << " \t"
 //           << Result(k,0)  << " \t" << Result(k,1)  << " \t" << Result(k,2)  << "\n";
@@ -1625,7 +1866,7 @@ real4 corrfft(
          const matrix<real4> &Master,                   // magnitude image
          const matrix<real4> &Mask,                     // magnitude image
          real4 &offsetL,                                // updated
-         real4 &offsetP)                                // updated 
+         real4 &offsetP)                                // updated
   {
   TRACE_FUNCTION("corrfft (BK 18-Oct-1999)");
   // ______ Internal variables ______
@@ -1745,7 +1986,9 @@ real4 corrfft(
  *                                                              *
  *    Bert Kampes, 21-Jan-1999                                  *
  ****************************************************************/
-matrix<uint> distributepoints(
+//matrix<uint> distributepoints(
+// [FvL] for correct folding of points outside overlap window when inserted by file
+matrix<int> distributepoints(
         real4 nW,
         const window &win)
   {
@@ -1754,16 +1997,17 @@ matrix<uint> distributepoints(
   real4 pixels = win.pixhi  - win.pixlo  + 1;
 
   uint numw = uint(nW);
-  matrix<uint> Result(numw,uint(3));
+  //matrix<uint> Result(numw,uint(3)); // [FvL]
+  matrix<int> Result(numw,uint(3));
   // ______ Distribution for dl=dp ______
   real4 wp = sqrt(nW/(lines/pixels));   // wl: #windows in line direction
   real4 wl = nW / wp;                   // wp: #windows in pixel direction
   if (wl < wp)                          // switch wl,wp : later back
-    wl = wp; 
+    wl = wp;
   int32 wlint  = int32(rint(wl));// round largest
   real4 deltal = (lines-1) / (real4(wlint-1));
   int32 totp   = int32(pixels*wlint);
-  real4 deltap = (real4(totp-1)) / (real4(nW-1)); 
+  real4 deltap = (real4(totp-1)) / (real4(nW-1));
   real4 p      = -deltap;
   real4 l      = 0.;
   uint lcnt    = 0;
@@ -1777,8 +2021,10 @@ matrix<uint> distributepoints(
       lcnt++;
       }
     l = lcnt * deltal;
-    Result(i,0) = uint(rint(l));
-    Result(i,1) = uint(rint(p));
+    //Result(i,0) = uint(rint(l));
+    //Result(i,1) = uint(rint(p)); // [FvL]
+    Result(i,0) = int(rint(l));
+    Result(i,1) = int(rint(p));
     }
 
   // ______ Correct distribution to window ______
@@ -1844,23 +2090,26 @@ void getoffset(
   for (uint i=0; i<nW; i++)
   { //MA fix to ignore -999 values from statistics
     if  ( sortResult(i,0) == -999 )  {
-    nWNANrm = nWNANrm - 1; continue; } 
+    nWNANrm = nWNANrm - 1; continue; }
      mean_coh+=sortResult(i,0);
   }
   //mean_coh /= real4(nW);
   mean_coh /= real4(nWNANrm);
-  for (uint i=0; i<nW; i++) 
+  for (uint i=0; i<nW; i++)
   { //MA fix to ignore -999 values from statistics
    if  ( sortResult(i,0) == -999 ) continue;
     var_coh +=sqr(sortResult(i,0)-mean_coh);
   }
   //var_coh /= real4(nW-1);
   var_coh /= real4(nWNANrm-1);
+  
   INFO << "Mean coherence at estimated positions: " << mean_coh;
   INFO.print();
   const real4 std_coh = sqrt(var_coh);
   INFO << "Standard deviation coherence:          " << std_coh;
   INFO.print();
+  if (mean_coh<0.1)
+    mean_coh=0.1;
   const real4 thresh_coh = mean_coh;
   INFO << "Using as threshold:                    " << thresh_coh;
   INFO.print();
@@ -1930,12 +2179,12 @@ void getoffset(
     valueL = int32(Result(i,0)+0.5);
     valueP = int32(Result(i,1)+0.5);
     correl = Result(i,2);
-    if (correl > highestcorrel) 
+    if (correl > highestcorrel)
       highestcorrel = correl;
     cnt = 0;
     for (j=0; j<nW; j++)
       {
-      if (abs(Result(j,0) - valueL) < 2  &&  
+      if (abs(Result(j,0) - valueL) < 2  &&
           abs(Result(j,1) - valueP) < 2)
         cnt++;
       }
@@ -2005,14 +2254,14 @@ void getmodeoffset(
   //DEBUG.print("unsorted input matrix:");
   //Result.showdata();
 /*
-  Result(0,0)=0.1   ; Result(0,1)=3 ; Result(0,2)=2  ;   
-  Result(1,0)=0.2   ; Result(1,1)=1 ; Result(1,2)=4  ;   
-  Result(2,0)=0.3   ; Result(2,1)=4 ; Result(2,2)=1  ;   
-  Result(3,0)=0.2   ; Result(3,1)=1 ; Result(3,2)=3  ;   
-  Result(4,0)=0.2   ; Result(4,1)=3 ; Result(4,2)=1  ;   
-  Result(5,0)=0.3   ; Result(5,1)=1 ; Result(5,2)=-1  ;   
-  Result(6,0)=0.2   ; Result(6,1)=4 ; Result(6,2)=0  ;   
-  Result(7,0)=0.1   ; Result(7,1)=1 ; Result(7,2)=-2  ;   
+  Result(0,0)=0.1   ; Result(0,1)=3 ; Result(0,2)=2  ;
+  Result(1,0)=0.2   ; Result(1,1)=1 ; Result(1,2)=4  ;
+  Result(2,0)=0.3   ; Result(2,1)=4 ; Result(2,2)=1  ;
+  Result(3,0)=0.2   ; Result(3,1)=1 ; Result(3,2)=3  ;
+  Result(4,0)=0.2   ; Result(4,1)=3 ; Result(4,2)=1  ;
+  Result(5,0)=0.3   ; Result(5,1)=1 ; Result(5,2)=-1  ;
+  Result(6,0)=0.2   ; Result(6,1)=4 ; Result(6,2)=0  ;
+  Result(7,0)=0.1   ; Result(7,1)=1 ; Result(7,2)=-2  ;
 
 Result.showdata();
 cerr << endl;
@@ -2028,11 +2277,11 @@ cerr << endl;
 
   // --- Set offset to highest coherence estimate ---
   offsetLines   = int32(rint(sortResult(0,1))); // rounds negative too, was -999
-  offsetPixels  = int32(rint(sortResult(0,2))); // rounds negative too 
-                                                // [ why set to highes coherence mean 
+  offsetPixels  = int32(rint(sortResult(0,2))); // rounds negative too
+                                                // [ why set to highes coherence mean
                                                 // loop index could start from i==0.]
 
-  // ______ Remove window offests with -999 (NaN) coherence values _____ 
+  // ______ Remove window offests with -999 (NaN) coherence values _____
   // added by [MA]
   const uint nW = sortResult.lines(); // Number of windows
   uint nWNANrm  = nW;                 // Number of windows without NAN values
@@ -2042,19 +2291,19 @@ cerr << endl;
   real4 var_coh  = 0.0;
   real4 mean_coh = 0.0;
   for (uint i=0; i<nW; i++)             // [MA] fix to ignore -999 values from statistics
-    { 
-     if ( sortResult(i,0) == -999 )     // if NaN 
+    {
+     if ( sortResult(i,0) == -999 )     // if NaN
        {
         nWNANrm -= 1;          // determine number of windows without NaN
-        continue; 
-       } 
-     mean_coh+=sortResult(i,0);         
+        continue;
+       }
+     mean_coh+=sortResult(i,0);
     }
   //mean_coh /= real4(nW);
-  mean_coh /= real4(nWNANrm);           // mean coherence 
+  mean_coh /= real4(nWNANrm);           // mean coherence
 
   for (uint i=0; i<nW; i++)             // [MA fix to ignore -999 values from statistics
-    { 
+    {
      if  ( sortResult(i,0) == -999 ) continue;
      var_coh +=sqr(sortResult(i,0)-mean_coh);
     }
@@ -2067,7 +2316,7 @@ cerr << endl;
   INFO << "Standard deviation coherence:          " << std_coh;
   INFO.print();
 
-  // ______ Statistics about threshold ______ 
+  // ______ Statistics about threshold ______
   const real4 thresh_coh = mean_coh;
   INFO << "Using as threshold:                    " << thresh_coh;
   INFO.print();
@@ -2083,7 +2332,7 @@ cerr << endl;
   for (register uint i=1; i<nW; i++)
     {
     if (sortResult(i,0)>=thresh_coh)
-      { 
+      {
       cnt++;
       mean_coh     += sortResult(i,0);
       offsetLines  += int32(rint(sortResult(i,1)));// round
@@ -2140,19 +2389,19 @@ cerr << endl;
   INFO.print("------------------------------------------------------");
 
   mysort231(sortResult);                        // re-sort on 2nd, 3rd than 1st column
-  // sortResult.showdata();       
+  // sortResult.showdata();
   int32 mode_val = 0, mode_idx = -1;            // mode count, mode index
-  int32 evenmode_val = 0, nEven= 0;             // check for equal values of mode 
-  int32 L=NaN, P=NaN, offset_freq=0;            // Line, Pixel, frequency 
+  int32 evenmode_val = 0, nEven= 0;             // check for equal values of mode
+  int32 L=NaN, P=NaN, offset_freq=0;            // Line, Pixel, frequency
   real4 offset_mcoh=0.0;                        // avg. coherence for each set of offsets
   for (register uint i=0; i<nW; i++)            // Major reason of this main loop is individual stdout request.
     {
     if (sortResult(i,0)>=thresh_coh)
-      { 
+      {
       // _____ frequency of offsets _____  [MA]
-      if ( L != int32(rint(sortResult(i,1))) ||    // skip initializing of  
+      if ( L != int32(rint(sortResult(i,1))) ||    // skip initializing of
            P != int32(rint(sortResult(i,2)))    )  // the same offset multiple times
-        {                       
+        {
           L=int32(rint(sortResult(i,1)));          // get initial values
           P=int32(rint(sortResult(i,2)));
         }
@@ -2161,13 +2410,13 @@ cerr << endl;
           continue ;   // L, P equal to previous values then skip counting
                        // since matrix is sorted on L,P
          }
-      offset_freq=0;   // reset   
+      offset_freq=0;   // reset
       offset_mcoh=0;
       for (register uint j=0; j<nW; j++)           // scan data for occurences of an offset
         {                                          // for all offsets
          if ( L == int32(rint(sortResult(j,1))) && P == int32(rint(sortResult(j,2))) )
            {
-             offset_freq++; 
+             offset_freq++;
              offset_mcoh += sortResult(j,0);      // for decission on even mode values
            }                                      // at different L,P pair.
         } // end scan data
@@ -2195,21 +2444,21 @@ cerr << endl;
 
       scratchlogfile << '\n' << offset_mcoh << "\t " << L << "\t   " << P << "\t\t" << offset_freq << "\t " << mode_idx; // pass to .log
       } // above threshold
-    }   // end mode 
+    }   // end mode
 
     scratchlogfile << "\n\n*******************************************************************";
     scratchlogfile.close();  // close scratchlogmtiminghtr
 
     // _____ Even occurence check _____
-    if (mode_val == evenmode_val) // there are even values of mode. 
+    if (mode_val == evenmode_val) // there are even values of mode.
       {
         WARNING << "There are " << nEven << " offset pairs which has equal mode values are equal.";
         WARNING.print();
-        WARNING << "Check offset results and logs, and increase the number and/or the size of the correlation windows.";       
+        WARNING << "Check offset results and logs, and increase the number and/or the size of the correlation windows.";
         WARNING.print();
       }
 
- 
+
   offsetLines  = int32(rint(sortResult(mode_idx,1)));  // update mode offsets
   offsetPixels = int32(rint(sortResult(mode_idx,2)));
   PROGRESS.print("getmodeoffset: End of mode analysis ");
@@ -2257,16 +2506,35 @@ cerr << endl;
 void finecoreg(
         const input_fine &fineinput,
         const slcimage   &minfo,
-        const slcimage   &sinfo)
-  {
+        const slcimage   &sinfo,
+        const input_ell &ell,
+        orbit           &masterorbit,  // cannot be const for spline
+        orbit           &slaveorbit,   // cannot be const for spline
+        const BASELINE  &baseline)
+//input_ellips, master, slave, masterorbit, slaveorbit, baseline);
+  {          
+    if (fineinput.shiftazi == 0)
+     {            
+           INFO << "I assume you have already deramped or centered the data spectrum..." ;
+           INFO.print();
+    }
+    else if (fineinput.shiftazi == 2)
+     {            
+           INFO << "\nPROCESS: Deramp Master and Slave spectrum in FINE COREGISTRATION..." ;
+           INFO.print();
+         
+          // deramp( minfo, fineinput ,masterorbit);
+           
+    }
+    
   TRACE_FUNCTION("finecoreg (BK 29-Oct-99)")
   char dummyline[ONE27];
   //const uint Mfilelines   = minfo.currentwindow.lines();
   //const uint Sfilelines   = sinfo.currentwindow.lines();
   const uint Nwin         = fineinput.Nwin;                 // n windows, from file or random
   uint NwinNANrm          = fineinput.Nwin;                 // [MA] number of windows w/o NaN
-  const int32 initoffsetL = fineinput.initoffsetL;          // initial offset
-  const int32 initoffsetP = fineinput.initoffsetP;          // initial offset
+  int32 initoffsetL       = fineinput.initoffsetL;          // initial offset
+  int32 initoffsetP       = fineinput.initoffsetP;          // initial offset
   uint MasksizeL          = fineinput.MasksizeL;            // size of correlation window
   uint MasksizeP          = fineinput.MasksizeP;            // size of correlation window
   uint AccL               = fineinput.AccL;                 // size of small chip
@@ -2293,16 +2561,22 @@ void finecoreg(
   const int32 spN = sinfo.currentwindow.pixhi  - initoffsetP;
 
   // ______Corners of useful overlap master,slave in master system______
+  //const uint BORDER = 20;// make slightly smaller
+  //const uint l0   = max(int32(minfo.currentwindow.linelo),sl0) + BORDER;
+  //const uint lN   = min(int32(minfo.currentwindow.linehi),slN) - MasksizeL - BORDER;
+  //const uint p0   = max(int32(minfo.currentwindow.pixlo),sp0)  + BORDER;
+  //const uint pN   = min(int32(minfo.currentwindow.pixhi),spN)  - MasksizeP - BORDER;
   const uint BORDER = 20;// make slightly smaller
-  const uint l0   = max(int32(minfo.currentwindow.linelo),sl0) + BORDER;
-  const uint lN   = min(int32(minfo.currentwindow.linehi),slN) - MasksizeL - BORDER;
-  const uint p0   = max(int32(minfo.currentwindow.pixlo),sp0)  + BORDER;
-  const uint pN   = min(int32(minfo.currentwindow.pixhi),spN)  - MasksizeP - BORDER;
+  const int l0   = max(int32(minfo.currentwindow.linelo),sl0) + BORDER;
+  const int lN   = min(int32(minfo.currentwindow.linehi),slN) - MasksizeL - BORDER;
+  const int p0   = max(int32(minfo.currentwindow.pixlo),sp0)  + BORDER;
+  const int pN   = min(int32(minfo.currentwindow.pixhi),spN)  - MasksizeP - BORDER;
   const window overlap(l0,lN,p0,pN);
 
   // ______ Distribute Nwin points over window, or read from file ______
   // ______ Minlminp(i,0): line, (i,1): pixel, (i,2) flagfromdisk ______
-  matrix<uint> Minlminp;
+  //matrix<uint> Minlminp; // [FvL]
+  matrix<int> Minlminp;
   if (pointsrandom)                             // no filename specified
     {
     Minlminp = distributepoints(real4(Nwin),overlap);
@@ -2317,9 +2591,13 @@ void finecoreg(
     for (uint i=0; i<Nwin; ++i)
       {
       ifpos >> ll >> pp;
-      Minlminp(i,0) = uint(ll - 0.5*MasksizeL); // correct for lower left corner
-      Minlminp(i,1) = uint(pp - 0.5*MasksizeP); // correct for lower left corner
-      Minlminp(i,2) = uint(1);                  // flag from file
+      //Minlminp(i,0) = uint(ll - 0.5*MasksizeL); // correct for lower left corner
+      //Minlminp(i,1) = uint(pp - 0.5*MasksizeP); // correct for lower left corner
+      //Minlminp(i,2) = uint(1);                  // flag from file
+      // [FvL]
+      Minlminp(i,0) = int(ll - 0.5*MasksizeL); // correct for lower left corner
+      Minlminp(i,1) = int(pp - 0.5*MasksizeP); // correct for lower left corner
+      Minlminp(i,2) = int(1);                  // flag from file
       ifpos.getline(dummyline,ONE27,'\n');      // goto next line.
       }
     ifpos.close();
@@ -2334,7 +2612,7 @@ void finecoreg(
     // ______ no check for uniqueness of points ______
     bool troubleoverlap = false;
     for (uint i=0; i<Nwin; ++i)
-      {
+      {//windows
       if (Minlminp(i,0) < l0)
         {
         troubleoverlap=true;
@@ -2388,20 +2666,29 @@ void finecoreg(
       }
     }
 
+ 
   // ______Compute coherence of these points______
   matrix<complr4> Master;
   matrix<complr4> Mask;
-  matrix<real4>   Result(Nwin,3);       // R(i,0):delta l; 
+  matrix<real4>   Result(Nwin,3);       // R(i,0):delta l;
                                         // R(i,1):delta p; R(i,2):correl
-
+  
   // ______ Progress message ______
   int32 tenpercent = int32(rint(Nwin/10.0));
   if (tenpercent==0) tenpercent = 1000;
   int32 percent = 0;
 
-  // ====== Compute for all locations ======
+   int32 fivepercent = int32(rint(Nwin/5.0));
+  if (fivepercent==0) fivepercent = 1000;
+  
+//if (fineinput.method== fc_coherence)          // input file  ()
+ //     radarcodedem(fineinput, input_ellips, input_i_comprefdem,
+  //                 master, slave, interferogram, masterorbit, slaveorbit);
+    
+// ====== Compute for all locations ======
   for (uint i=0;i<Nwin;i++)
-    {
+    {//all locations
+     
     // ______ Give progress message ______
     if (i%tenpercent==0)
       {
@@ -2413,13 +2700,28 @@ void finecoreg(
     // ______Minlminp (lower left corners) of window in master system______
     const uint minMwinL = Minlminp(i,0);
     const uint minMwinP = Minlminp(i,1);
+    
+    //***
+    //INFO <<"Pos: " <<minMwinL <<", "<<minMwinP 
+    //     <<"\n , Before initoffsetL: "<< initoffsetL << "initoffsetP" << initoffsetP;
+    // INFO.print();
+     
+     
+    initoffsetL =   lrint(sinfo.slopeL*minMwinL +sinfo.realoffsetL);           // initial slope pixels
+    initoffsetP =   lrint(sinfo.slopeP*minMwinP +sinfo.realoffsetP);              // initial slope lines
+    
+    
+   // INFO << "\n After initoffsetL: "<< initoffsetL << ", initoffsetP: " << initoffsetP;
+   // INFO.print();
+    //MCC
+    
     DEBUG.print(" ");
     DEBUG << "Window: " << i << " [" << minMwinL << ", " << minMwinP << "]";
     DEBUG.print();
     window master(minMwinL, minMwinL+MasksizeL-1,
                   minMwinP, minMwinP+MasksizeP-1);// size=masksize
     // ______Same points in slave system (disk)______
-    window mask(minMwinL+initoffsetL, 
+    window mask(minMwinL+initoffsetL,
                 minMwinL+initoffsetL+MasksizeL-1,
                 minMwinP+initoffsetP,
                 minMwinP+initoffsetP+MasksizeP-1);// size=masksize
@@ -2427,6 +2729,9 @@ void finecoreg(
     Master = minfo.readdata(master);
     Mask   = sinfo.readdata(mask);
 
+   
+    
+    
     // ______Coherence______
     // ______update offsetL/P______
     real4 offsetL, offsetP;
@@ -2448,16 +2753,16 @@ void finecoreg(
         if ( AccL > MasksizeL/2 )              // [MA] fix for Acc being half of Masksize at max
           {
            AccL = MasksizeL/2 ;
-           WARNING << "FINE: AccL for magfft can be half of the window size at max, changing to "  << AccL ;  
+           WARNING << "FINE: AccL for magfft can be half of the window size at max, changing to "  << AccL ;
            WARNING.print();
           }
         else if ( AccP > MasksizeP/2 )
           {
            AccP = MasksizeP/2 ;
-           WARNING << "FINE: AccP for magfft can be half of the window size at max, changing to "  << AccP ;  
+           WARNING << "FINE: AccP for magfft can be half of the window size at max, changing to "  << AccP ;
            WARNING.print();
           }
-         
+
         coheren = crosscorrelate(Master, Mask, OVS, AccL, AccP,
                                  offsetL, offsetP);// returned
         break;
@@ -2470,39 +2775,271 @@ void finecoreg(
         if ( AccL > MasksizeL/2 )              // [MA] fix for Acc being half of Masksize at max
           {
            AccL = MasksizeL/2 ;
-           WARNING << "FINE: AccL for magfft can be half of the window size at max, changing to "  << AccL ;  
+           WARNING << "FINE: AccL for magfft can be half of the window size at max, changing to "  << AccL ;
            WARNING.print();
           }
         else if ( AccP > MasksizeP/2 )
           {
            AccP = MasksizeP/2 ;
-           WARNING << "FINE: AccP for magfft can be half of the window size at max, changing to "  << AccP ;  
+           WARNING << "FINE: AccP for magfft can be half of the window size at max, changing to "  << AccP ;
            WARNING.print();
           }
 
         // ______ Oversample complex chips by factor two ______
         // ______ neg.shift input shifts to -> 0
-        DEBUG.print("Centering azimuth spectrum patches around 0");
+         
+        if (fineinput.shiftazi == 1)//Using the DC poly only
+        {
+        DEBUG.print("Centering azimuth spectrum patches around 0 using the DC polynomial");
         const real4 m_pixlo = real4(master.pixlo);// neg.shift -> 0
         const real4 s_pixlo = real4(mask.pixlo);// neg.shift -> 0
+        
         shiftazispectrum(Master,minfo,-m_pixlo);// shift from fDC to zero
         shiftazispectrum(Mask,  sinfo,-s_pixlo);// shift from fDC to zero
+        }
         DEBUG.print("Oversampling patches with factor two using zero padding");
         const matrix<complr4> m_ovs_chip = oversample(Master,2,2);
         const matrix<complr4> s_ovs_chip = oversample(Mask,  2,2);
         // ______ Peak in cross-corr of magnitude of ovs data ______
         DEBUG.print("Cross-correlating magnitude of ovs patches");
         DEBUG.print("(no need to shift spectrum back)");// (else account for ovs..)
-        //coheren = coherencefft(m_ovs_chip, s_ovs_chip, 
-        //                       OVS/2, 2*AccL, 2*AccP, 
+        //coheren = coherencefft(m_ovs_chip, s_ovs_chip,
+        //                       OVS/2, 2*AccL, 2*AccP,
         //                       offsetL,offsetP);
-        coheren = crosscorrelate(m_ovs_chip, s_ovs_chip, 
-                                 OVS/2, 2*AccL, 2*AccP, 
+        coheren = crosscorrelate(m_ovs_chip, s_ovs_chip,
+                                 OVS/2, 2*AccL, 2*AccP,
                                  offsetL,offsetP);
         offsetL /= 2.0;// orig data oversampled by factor 2
         offsetP /= 2.0;// orig data oversampled by factor 2
         break;
         }
+       // ====== This should work for ERS/N1; different PRFs ======
+      case fc_intensity: // slow (better): oversample complex data first
+        {
+            INFO<<  "intensity method "<<endl;
+            INFO.print();
+        if ( AccL > MasksizeL/2 )              // [MA] fix for Acc being half of Masksize at max
+          {
+           AccL = MasksizeL/2 ;
+           WARNING << "FINE: AccL for magfft can be half of the window size at max, changing to "  << AccL ;
+           WARNING.print();
+          }
+        else if ( AccP > MasksizeP/2 )
+          {
+           AccP = MasksizeP/2 ;
+           WARNING << "FINE: AccP for magfft can be half of the window size at max, changing to "  << AccP ;
+           WARNING.print();
+          }
+
+        // ______ Oversample complex chips by factor two ______
+        // ______ neg.shift input shifts to -> 0
+        // bool doCenterSpec = true;
+         //Do not remove if the radar is Sentinel-1
+        // if (minfo.sensor == SLC_S1A)
+        //     doCenterSpec = false;
+         
+        if (fineinput.shiftazi == 1)
+        {
+        DEBUG.print("Centering azimuth spectrum patches around 0 using the DC polynomial");
+        const real4 m_pixlo = real4(master.pixlo);// neg.shift -> 0
+        const real4 s_pixlo = real4(mask.pixlo);// neg.shift -> 0
+        
+        shiftazispectrum(Master,minfo,-m_pixlo);// shift from fDC to zero
+        shiftazispectrum(Mask,  sinfo,-s_pixlo);// shift from fDC to zero
+        }
+        DEBUG.print("Oversampling patches with factor two using zero padding");
+        const matrix<complr4> m_ovs_chip = oversample(Master,2,2);
+        const matrix<complr4> s_ovs_chip = oversample(Mask,  2,2);
+        // ______ Peak in cross-corr of magnitude of ovs data ______
+        DEBUG.print("Cross-correlating magnitude of ovs patches");
+        DEBUG.print("(no need to shift spectrum back)");// (else account for ovs..)
+        //coheren = coherencefft(m_ovs_chip, s_ovs_chip,
+        //                       OVS/2, 2*AccL, 2*AccP,
+        //                       offsetL,offsetP);
+        coheren = intensity(m_ovs_chip, s_ovs_chip,
+                                 OVS/2, 2*AccL, 2*AccP,
+                                 offsetL,offsetP);
+        offsetL /= 2.0;// orig data oversampled by factor 2
+        offsetP /= 2.0;// orig data oversampled by factor 2
+        break;
+        }
+           // ====== New method (MCC Sept 2014) ======
+      case fc_coherence: //
+        {
+
+        if ( AccL > MasksizeL/2 )              // [MA] fix for Acc being half of Masksize at max
+          {
+           AccL = MasksizeL/2 ;
+           WARNING << "FINE: AccL for magfft can be half of the window size at max, changing to "  << AccL ;
+           WARNING.print();
+          }
+        else if ( AccP > MasksizeP/2 )
+          {
+           AccP = MasksizeP/2 ;
+           WARNING << "FINE: AccP for magfft can be half of the window size at max, changing to "  << AccP ;
+           WARNING.print();
+          }
+        
+         matrix<real4> refPhaseDEM(mask.lines(),mask.pixels());//only for CCC, but I need to define it here
+         
+        // slcimage   deminfo = minfo;
+          
+        if (specified(fineinput.forefdem)) // if spec. then read the needed window
+        {
+          
+            window zerooffset  (0,0,0,0) ;
+           
+            window demWin = master;
+          
+            
+            demWin.linelo -= minfo.currentwindow.linelo + 1;
+            demWin.linehi -= minfo.currentwindow.linelo + 1;
+            demWin.pixlo  -= minfo.currentwindow.pixlo  + 1;
+            demWin.pixhi  -= minfo.currentwindow.pixlo  + 1;
+          
+            
+         //   INFO << "reading DEM phases from: " << fineinput.forefdem  << "\n";
+         //   INFO << "        nof lines : " <<minfo.currentwindow.lines()<<endl;
+         //   INFO << " demWin.linelo " << demWin.linelo << " info.currentwindow.linelo  " << minfo.currentwindow.linelo<<endl;
+         //   INFO << " demWin.linehi  " << demWin.linehi << " info.currentwindow.linehi  " << minfo.currentwindow.linehi<<endl;
+         //   INFO << " demWin.pixlo  " << demWin.pixlo << " info.currentwindow.pixlo  " << minfo.currentwindow.pixlo<<endl;
+         //   INFO << " demWin.pixhi  " << demWin.pixhi << " info.currentwindow.pixhi  " << minfo.currentwindow.pixhi<<endl;
+         //   INFO.print();
+            
+           // refPhaseDEM = deminfo.readdata(master);
+            readfile(refPhaseDEM,fineinput.forefdem,minfo.currentwindow.lines(),demWin,zerooffset);
+        }
+
+        // ______ Oversample complex chips by factor two ______
+        // ______ neg.shift input shifts to -> 0
+
+        if (fineinput.shiftazi == 1)
+        {
+        DEBUG.print("Centering azimuth spectrum patches around 0 using the DC polynomial");
+        const real4 m_pixlo = real4(master.pixlo);// neg.shift -> 0
+        const real4 s_pixlo = real4(mask.pixlo);// neg.shift -> 0
+        shiftazispectrum(Master,minfo,-m_pixlo);// shift from fDC to zero
+        shiftazispectrum(Mask,  sinfo,-s_pixlo);// shift from fDC to zero
+        }
+    
+        DEBUG.print("Oversampling patches with factor two using zero padding");
+
+        uint ovsFc = 2;//2^4 
+        const matrix<complr4> m_ovs_chip = oversample(Master,ovsFc,ovsFc);
+        // MCC
+        //s_ovs_chip is the oversample salve
+        //It is going to be modified
+        // s_ovs_chip spectrum will be centered at the same frequency as the master.
+        // Otherwise the coherence is way understimated
+    
+        matrix<complr4> s_ovs_chip = oversample(Mask,  ovsFc,ovsFc);
+        //size matrix
+        uint L = m_ovs_chip.lines();
+        uint P = m_ovs_chip.pixels();
+
+     
+     //reference phase
+        matrix <real8> REFPHASE(Master.lines(),Master.pixels());      
+        matrix <real8> allPixels(Master.lines(),Master.pixels()); 
+        matrix <real8> allLines(Master.lines(),Master.pixels()); 
+        
+       const int16   MAXITER   = 10;        // maximum number of iterations
+       const real8   CRITERPOS = 1e-6;      // 1micrometer
+       const real8   CRITERTIM = 1e-10;     // seconds (~10-6 m)
+       const real8 m_minpi4cdivlam = (-4*PI*SOL)/minfo.wavelength;
+       const real8 s_minpi4cdivlam = (-4*PI*SOL)/sinfo.wavelength;
+
+        real8 pixel = 0;           // master coord. system
+        real8 line  = 0;
+        // ______ Compute ref. phase for this buffer ______
+    for (register int32 ll=0; ll<Master.lines(); ++ll)
+      {
+      for (register int32 pp=0; pp<Master.pixels(); ++pp)
+        {
+  
+      
+            
+        pixel = pp + master.pixlo;
+        line  = ll + master.linelo;
+        allPixels(ll,pp)  = real8(pixel);
+        allLines(ll,pp)   = real8(line);
+        // ______ Compute range time for this pixel ______
+        //const real8 m_trange = pix2tr(pixel,master.t_range1,master.rsr2x);
+        const real8 m_trange = minfo.pix2tr(pixel);
+        const real8 m_tazi   = minfo.line2ta(line); // added by FvL
+
+        // ______ Compute xyz of this point P from position in image ______
+        cn P;                                       // point, returned by lp2xyz
+        lp2xyz(line,pixel,ell,minfo,masterorbit,
+               P,MAXITER,CRITERPOS);
+   
+        // ______ Compute xyz for slave satellite from P ______
+        real8 s_tazi;                               // returned, not used
+        real8 s_trange;                             // returned
+        xyz2t(s_tazi,s_trange,sinfo,
+              slaveorbit,
+              P,MAXITER,CRITERTIM);
+
+        if (specified(fineinput.forefdem) ) 
+        {
+          
+            //refPhaseDEM is the ref phase including DEM
+          REFPHASE(ll,pp) = m_minpi4cdivlam*m_trange -
+                            s_minpi4cdivlam*s_trange + real8(refPhaseDEM(ll,pp));
+        }
+        else
+        {
+        REFPHASE(ll,pp) = m_minpi4cdivlam*m_trange -
+                          s_minpi4cdivlam*s_trange;
+        }
+       
+        //add ref phase to slave, or subtract it from master, both are the same
+        Mask(ll,pp) *=   complr4(fast_cos(REFPHASE(ll,pp)),fast_sin(REFPHASE(ll,pp)));
+      }
+    }
+        
+        
+    //    std::ostringstream partName ;
+    //    partName<<"REFPHASE"<< i<<".bin";
+        
+    //    std::string strfilenameRamp = partName.str();
+    //    char * filenameRamp = new char [strfilenameRamp.size()+1];
+
+        //char filenameRamp[strfilenameRamp.size()+1];
+    //    strcpy( filenameRamp,strfilenameRamp.c_str());
+        
+        
+   //    ofstream ofilefftIfg;    
+   //    openfstream(ofilefftIfg,filenameRamp,true);
+   //    bk_assert(ofilefftIfg,filenameRamp,__FILE__,__LINE__);
+   //    ofilefftIfg << REFPHASE;
+      
+   //    ofilefftIfg.close();
+      
+       
+          
+      //  fast_dotmultconjphase(Mask,REFPHASE);
+        s_ovs_chip = oversample(Mask,  ovsFc,ovsFc);
+//testing        
+      
+        
+//testing
+        
+        
+        //s_ovs_chip is centered at the same value as the master
+        //I create it constant because the original code was also a const
+       const  matrix<complr4> detr_s_ovs_chip = s_ovs_chip;
+        coheren = coherencefft(m_ovs_chip, detr_s_ovs_chip,
+                                 OVS/2, 2*AccL, 2*AccP,
+                                        offsetL,offsetP);
+        
+        offsetL /= real8(ovsFc);// orig data oversampled by factor 2
+        offsetP /= real8(ovsFc);// orig data oversampled by factor 2
+        
+       
+        break;
+        }
+
       case fc_magspace:
         coheren = coherencespace(fineinput, Master, Mask, offsetL, offsetP);
         break;
@@ -2519,6 +3056,7 @@ void finecoreg(
       INFO.print();
     } // for nwin
 
+  
 
   // ______ Position approx. with respect to center of window ______
   // ______ correct position array for center instead of lower left ______
@@ -2568,7 +3106,7 @@ void finecoreg(
   ofstream scratchresfile("scratchresfine", ios::out | ios::trunc);
   bk_assert(scratchresfile,"finecoreg: scratchresfine",__FILE__,__LINE__);
 
-  scratchresfile 
+  scratchresfile
     << "\n\n*******************************************************************"
     << "\n*_Start_" << processcontrol[pr_i_fine]
     << "\n*******************************************************************"
@@ -2594,8 +3132,8 @@ void finecoreg(
   resfile=fopen("scratchresfine","a");
   for (uint i=0; i<Nwin; i++)
    { //MA remove/skip NaN values before writing resfile.
-   if  ( isnan(Result(i,2)) )  continue;  
-    fprintf(resfile,"%4.0f %5.0f %5.0f %# 9.2f %# 9.2f %# 6.2f\n",
+   if  ( isnan(Result(i,2)) )  continue;
+    fprintf(resfile,"%4.0f %5.0f %5.0f %# 11.5f %# 11.5f %# 10.5f\n",
             real4(i), real4(Minlminp(i,0)), real4(Minlminp(i,1)),
             Result(i,0), Result(i,1), Result(i,2));
   }
@@ -2613,7 +3151,6 @@ void finecoreg(
   } // END finecoreg
 
 
-
 /****************************************************************
  * coherencefft                                                 *
  *                                                              *
@@ -2623,33 +3160,30 @@ void finecoreg(
  *                                                              *
  * input:                                                       *
  *  - Master                                                    *
- *  - Mask (size Master)                                        *
- * output:                                                      *
- *  - coherence value [-1 1]                                    *
+ *  - Mask (slave of size Master)                               *
+ * - ovsfactor oversampling factor                              *
+ *
+ * AccL i.e., accuracy in Azimuth , search window               *
+ *  * AccP i.e. accuracy in Range , search window               *
+ *  * output:                                                   *
+ *  - coherence value [0 1]                                     *
  *  - updated offsetL, P                                        *
  *    positive offsetL: Mask is shifted up                      *
  *    positive offsetP: Mask is shifted left                    *
  *                                                              *
- *    Bert Kampes, 03-Feb-1999                                  *
- * bugfix? streamlined, based on magnitude forced               *
- *    Bert Kampes, 16-Nov-1999                                  *
- * 1) should find max at pixel level, then oversample sub-pixel *
- * but it seems to be implemented strangely                     *
- * 2) oversampling should be performed on complex images with   *
- * factor 2, so to avoid aliasing of spectrum (shift azi).      *
- *    Bert Kampes, 12-Aug-2005                                  *
+ * Miguel Caro Cuenca  using code from Bert Kampes*
+ *  Sept 2014 *
  ****************************************************************/
 real4 coherencefft(
-        //const input_fine &fineinput,
         const matrix<complr4> &Master,  // data
         const matrix<complr4> &Mask,    // data
-        const uint ovsfactor,           // ovs factor (1 for not)
-        const uint AccL,                // search window
-        const uint AccP,                // search window
-        real4 &offsetL,                 // returned
-        real4 &offsetP)                 // returned
+        const int32 ovsfactor,          // ovs factor (1 for not) (not uint)
+        const int32 AccL,               // search window (not uint)
+        const int32 AccP,               // search window (not uint)
+        real4 &offsetL,                 // returned peak corr
+        real4 &offsetP)                 // returned peak corr
   {
-  TRACE_FUNCTION("coherencefft (BK 16-Nov-1999)")
+  TRACE_FUNCTION("coherencefft (MCC Sept-2014)")
   // ______ Internal variables ______
   const int32 L     = Master.lines();
   const int32 P     = Master.pixels();
@@ -2659,11 +3193,247 @@ real4 coherencefft(
   const int32 halfP = P/2;
 
   // ______ Check input ______
+  if (Master.lines() != Mask.lines() || Master.pixels() != Mask.pixels())
+    {
+    PRINT_ERROR("Mask, Master not same size.")
+    throw(input_error);
+    }
+  if (!(ispower2(L) || ispower2(P)))
+    {
+    PRINT_ERROR("Mask, Master size not power of 2.")
+    throw(input_error);
+    }
   if (!ispower2(ovsfactor))
     {
-    PRINT_ERROR("coherencefft factor not power of 2")
+    PRINT_ERROR("coherencefft factor not power of 2.")
     throw(input_error);
     }
+
+
+  DEBUG.print("Calculating sum of the pixel power for COHerent cross-correlation");
+
+  // sum pixel power master and Mask
+  real4 sumPowMaster =0.0;
+  real4 sumPowMask   =0.0;
+
+   //Calculate sum of square norms to normalize coherence //
+  //register int32 l,p;
+  for (register int32 l=0; l<=L-1; ++l)         // all shifts
+    {
+    for (register int32 p=0; p<=P-1; ++p)       // all shifts
+      {
+
+        sumPowMaster += (sqr(Master(l,p).real()) + sqr(Master(l,p).imag()));
+        sumPowMask   += (sqr(Mask(l,p).real())   + sqr(Mask(l,p).imag()));
+
+      }
+    }
+
+  //Normalization constant see eq. 4.3.2 in Hanssen, (2001).
+  real4 prodSum = sqrt(sumPowMaster*sumPowMask);
+
+
+  // ====== (1) Compute cross-products of Master/Mask ======
+  // ______ Pad with N zeros to prevent periodical convolution ______
+  matrix<complr4> Master2(twoL,twoP);           // initial 0
+  matrix<complr4> Mask2(twoL,twoP);             // initial 0
+  window windef(0,0,0,0);                       // defaults to total matrix
+  window win1(0, L-1, 0, P-1);
+  window win2(halfL, halfL+L-1, halfP, halfP+P-1);
+  Master2.setdata(win1,Master,windef);      // copy of master mcc
+  Mask2.setdata(win2,Mask,windef);          // copy of slave  mcc
+  
+
+  // ______ Crossproducts in spectral/space domain ______
+  // ______ Use Mask2 to store cross products temporarly ______
+  
+  // fft(Master2,2);                             // forward transform over rows
+  // fft(Master2,1);
+ 
+  fft2d(Master2);
+ 
+  //MCC DEBUG
+#ifdef REALLYDEBUG
+ INFO << "nof lines ifftMask2 : "<< Master2.lines() ;
+ INFO.print();
+  ofstream ofileccoh;
+  openfstream(ofileccoh, "fftMaster.bin", true);
+  bk_assert(ofileccoh, "fftMaster.bin", __FILE__, __LINE__);
+  ofileccoh << Master2;
+  ofileccoh.close();
+
+    //MCC DEBUG
+ #endif
+
+   //fft(Mask2,2);                             // forward transform over rows
+   //fft(Mask2,1);
+ 
+  fft2d(Mask2);
+ 
+  Master2.conj();
+ 
+  Mask2 *= Master2;      // corr = conj(M).*S
+  
+  //ifft(Mask2,2);
+ // ifft(Mask2,1);
+  ifft2d(Mask2);         // real(Mask2): cross prod. in space
+
+  
+  
+  //MCC DEBUG
+#ifdef REALLYDEBUG
+ INFO << "nof lines ifftMask2 : "<< Mask2.lines() ;
+ INFO.print();
+  ofstream ofileccoh;
+  openfstream(ofileccoh, "iffMask2r.bin", true);
+  bk_assert(ofileccoh, "iffMask2.bin", __FILE__, __LINE__);
+  ofileccoh << Mask2;
+  ofileccoh.close();
+
+    //MCC DEBUG
+ #endif
+  // ====== (2) compute norms for all shifts ======
+  // ______ use tricks to do this efficient ______
+  // ______ real(Mask2) contains cross-products ______
+  // ______ Mask2(0,0):Mask2(N,N) for shifts = -N/2:N/2 ______
+  // ______ rest of this matrix should not be used ______
+  // ______ Use Master2 to store intensity here in re,im ______
+  Master2.clean();                              // reset to zeros
+
+
+
+
+  // ====== (3) find maximum correlation at pixel level ======
+  matrix<complr4> Coherence(L+1,P+1);//coherence for each shift mcc
+  real4 maxcorr  = -999.0;
+  int32 maxcorrL = 0;// local index in Covar of maxcorr
+  int32 maxcorrP = 0;// local index in Covar of maxcorr
+
+//max Corr Mag
+  real4 currentMagCoh = 0.0;
+  for (register int32 l=halfL-AccL; l<halfL+AccL; ++l)         // all shifts
+    {
+    for (register int32 p=halfP-AccP; p<halfP+AccP; ++p)       // all shifts
+      {
+      Coherence(l,p) = (Mask2(l,p)) / prodSum;
+      //Coherence(l,p) = (Mask2(l,p)) ;
+      currentMagCoh  = sqrt(sqr(real(Coherence(l,p))) + sqr(imag(Coherence(l,p))));
+
+      //if ( Covar(l,p) > 1 ) { Covar(l,p) = -999.0 ; }  // MA quick fix for values bigger then 1
+      if ( currentMagCoh > maxcorr)
+        {
+
+        maxcorr  = currentMagCoh;
+        maxcorrL = l;// local index in Magnitude of Coh of maxcorr
+        maxcorrP = p;// local index in Magnitude Coh of maxcorr
+        if (maxcorr > 1 ) continue; // [MA] stop checking this chip further for maxcorr
+        }
+      }
+    }
+
+  //From here the rest is the same as in correlation
+  offsetL = -halfL + maxcorrL; // update by reference
+  offsetP = -halfP + maxcorrP; // update by reference
+  DEBUG << "Pixel level offset:     "
+        << offsetL << ", " << offsetP << " (corr=" << maxcorr << ")";
+  DEBUG.print();
+
+  // ====== (4) oversample to find peak sub-pixel ======
+  // ====== Estimate shift by oversampling estimated correlation ======
+  if (ovsfactor>1)
+    {
+
+    // --- (4a) get little chip around max. corr, if possible ---
+    // --- make sure that we can copy the data ---
+    if (maxcorrL<AccL)
+      {
+      DEBUG << "Careful, decrease AccL or increase winsizeL";
+      DEBUG.print();
+      maxcorrL = AccL;
+      }
+    if (maxcorrP<AccP)
+      {
+      DEBUG << "Careful, decrease AccP or increase winsizeP";
+      DEBUG.print();
+      maxcorrP = AccP;
+      }
+    if (maxcorrL>(L-AccL))
+      {
+      DEBUG << "Careful, decrease AccL or increase winsizeL";
+      DEBUG.print();
+      maxcorrL = L-AccL;
+      }
+    if (maxcorrP>(P-AccP))
+      {
+      DEBUG << "Careful, decrease AccP or increase winsizeP";
+      DEBUG.print();
+      maxcorrP = P-AccP;
+      }
+    // --- Now get the chip around max corr ---
+
+    //Using the magnitude of the coherence
+    window win3(maxcorrL-AccL,maxcorrL+AccL-1, maxcorrP-AccP,maxcorrP+AccP-1);
+    const matrix<real4> chip(win3,magnitude(Coherence));// construct as part
+
+
+
+    // --- (4b) oversample chip to obtain sub-pixel max ---
+    uint offL;
+    uint offP;
+    maxcorr =  max(oversample(chip, ovsfactor, ovsfactor), offL,offP);
+    offsetL = -halfL + maxcorrL - AccL + real4(offL) / real4(ovsfactor);
+    offsetP = -halfP + maxcorrP - AccP + real4(offP) / real4(ovsfactor);
+
+    DEBUG << "Sub-pixel level offset: "
+          << offsetL << ", " << offsetP << " (corr=" << maxcorr << ")";
+    DEBUG.print();
+    }
+    return maxcorr;
+} // END coherencefft
+
+
+
+
+
+/****************************************************************
+ * crosscorrelate                                               *
+ *                                                              *
+ * cross correlation of zero-meaned magnitude of two patches    *
+ *  uses ffts, some tricks for speed-up.                        *
+ *  optionally improves peak position to sub-pixel.             *
+ * This is an improvement upon coherencefft: faster and local peak *
+ * Better to put this in matrixspecs                            *
+ *                                                              *
+ * input:                                                       *
+ *  - Master                                                    *
+ *  - Mask (same size as Master)                                *
+ * output:                                                      *
+ *  - peak correlation value [-1 1]                             *
+ *  - updated offsetL, P                                        *
+ *    positive offsetL: Mask is shifted up                      *
+ *    positive offsetP: Mask is shifted left                    *
+ *                                                              *
+ * Bert Kampes, 12-Aug-2005                                     *
+ ****************************************************************/
+real4 crosscorrelate(
+        const matrix<complr4> &Master,  // data
+        const matrix<complr4> &Mask,    // data
+        const int32 ovsfactor,          // ovs factor (1 for not) (not uint)
+        const int32 AccL,               // search window (not uint)
+        const int32 AccP,               // search window (not uint)
+        real4 &offsetL,                 // returned peak corr
+        real4 &offsetP)                 // returned peak corr
+  {
+  TRACE_FUNCTION("crosscorrelate (BK 12-Aug-2005)")
+  // ______ Internal variables ______
+  const int32 L     = Master.lines();
+  const int32 P     = Master.pixels();
+  const int32 twoL  = 2*L;
+  const int32 twoP  = 2*P;
+  const int32 halfL = L/2;
+  const int32 halfP = P/2;
+  const int32 minIfgAmp =  -99999;       // Minimum amplitude of a ifg to be considered as window for coarse coreg
+  // ______ Check input ______
   if (Master.lines() != Mask.lines() || Master.pixels() != Mask.pixels())
     {
     PRINT_ERROR("Mask, Master not same size.")
@@ -2674,89 +3444,164 @@ real4 coherencefft(
     PRINT_ERROR("Mask, Master size not power of 2.")
     throw(input_error);
     }
-
-  // ______ Zero mean magnitude images ______
-  DEBUG.print("Using de-meaned magnitude patches for incoherent cross-correlation");
-  matrix<real4> magMaster = magnitude(Master);
-  matrix<real4> magMask   = magnitude(Mask);
-  magMaster              -= mean(magMaster);
-  magMask                -= mean(magMask);
-
-  // ====== FFT's of master/mask ======
-  // ______ Pad with N zeros to prevent periodical convolution ______
-  matrix<complr4> Master2(twoL,twoP);           // initial 0
-  matrix<complr4> Mask2(twoL,twoP);             // initial 0
-  window windef(0,0,0,0);                       // defaults to total matrix
-  window win1(0, L-1, 0, P-1);  
-  window win2(halfL, halfL+L-1, halfP, halfP+P-1);
-  Master2.setdata(win1,mat2cr4(magMaster),windef);      // zero-mean magnitude
-  Mask2.setdata(win2,mat2cr4(magMask),windef);          // zero-mean magnitude
-
-  // ______ Crossproducts in spectral/space domain ______
-  // ______ Use Mask2 to store cross products temporarly ______
+  if (!ispower2(ovsfactor))
+    {
+    PRINT_ERROR("coherencefft factor not power of 2")
+    throw(input_error);
+    }
+
+  // ______ Zero mean magnitude images ______
+  DEBUG.print("Using de-meaned magnitude patches for incoherent cross-correlation");
+  matrix<real4> magMaster = magnitude(Master);
+  matrix<real4> magMask   = magnitude(Mask);
+  magMaster              -= mean(magMaster);
+  magMask                -= mean(magMask);
+
+  // ====== (1) Compute cross-products of Master/Mask ======
+  // ______ Pad with N zeros to prevent periodical convolution ______
+  matrix<complr4> Master2(twoL,twoP);           // initial 0
+  matrix<complr4> Mask2(twoL,twoP);             // initial 0
+  window windef(0,0,0,0);                       // defaults to total matrix
+  window win1(0, L-1, 0, P-1);
+  window win2(halfL, halfL+L-1, halfP, halfP+P-1);
+  Master2.setdata(win1,mat2cr4(magMaster),windef);      // zero-mean magnitude
+  Mask2.setdata(win2,mat2cr4(magMask),windef);          // zero-mean magnitude
+  // ______ Crossproducts in spectral/space domain ______
+  // ______ Use Mask2 to store cross products temporarly ______
+  fft2d(Master2);
+  fft2d(Mask2);
+  Master2.conj();
+  Mask2 *= Master2;      // corr = conj(M).*S
+  ifft2d(Mask2);         // real(Mask2): cross prod. in space
+
+  // ====== (2) compute norms for all shifts ======
+  // ______ use tricks to do this efficient ______
+  // ______ real(Mask2) contains cross-products ______
+  // ______ Mask2(0,0):Mask2(N,N) for shifts = -N/2:N/2 ______
+  // ______ rest of this matrix should not be used ______
+  // ______ Use Master2 to store intensity here in re,im ______
+  Master2.clean();                              // reset to zeros
+  register int32 l,p;
+  // --- flipud(fliplr(master^2) in real ---
+  // --- mask^2 in imag part; this saves a fft ---
+  // --- automatically the real/imag parts contain the norms ---
+  for (l=L; l<twoL; ++l)
+    for (p=P; p<twoP; ++p)
+      Master2(l,p) = complr4(
+        sqr(magMaster(twoL-1-l,twoP-1-p)),
+        sqr(magMask(l-L,p-P)));
+  // --- use a static block for fast computation ---
+  static matrix<complr4> BLOCK;// initial 0
+  if (int32(BLOCK.lines())!=twoL || int32(BLOCK.pixels())!=twoP)
+    {
+    DEBUG << "crosscorrelate:changing static block to size ["
+          << twoL << ", " << twoP << "]";
+    DEBUG.print();
+    BLOCK.resize(twoL,twoP);
+    for (l=halfL; l<halfL+L; ++l)
+      for (p=halfP; p<halfP+P; ++p)
+        BLOCK(l,p) = complr4(1.0);
+    fft2d(BLOCK);
+    BLOCK.conj();// static variable: keep this for re-use
+    }
+  // _____ Compute the cross-products, i.e., the norms for each shift ---
+  // ______ Master2(0,0):Master2(N,N) for shifts = -N/2:N/2 ______
   fft2d(Master2);
-  fft2d(Mask2);
-  Master2.conj();
-  Mask2 *= Master2;                             // corr = conj(M).*S
-  ifft2d(Mask2);                                // cross prod. in space
+  Master2 *= BLOCK;
+  ifft2d(Master2);// real(Master2): powers of Master; imag(Master2): Mask
 
-  // ______ keep cross-products for shifts [-AccL,+AccL) ______
-  window wintmp(halfL-AccL, halfL+AccL-1, halfP-AccP, halfP+AccP-1);
-  matrix<complr4> TMP(wintmp,Mask2);
-  matrix<real4> Covar = real(TMP);              // imag==0
 
-  // ====== Compute norms, zero padded matrices ======
-  const complr4 ONE(1.0);
-  matrix<complr4> blok(L,P);
-  blok.setdata(ONE);                            // only real part
+  // ====== (3) find maximum correlation at pixel level ======
+  matrix<real4> Covar(L+1,P+1);// correlation for each shift
+  real4 maxcorr  = -999.0;
+  real4 maxCorrAmp = 0;
+  int32 maxcorrL = 0;// local index in Covar of maxcorr
+  int32 maxcorrP = 0;// local index in Covar of maxcorr
+  for (register int32 l=halfL-AccL; l<halfL+AccL; ++l)         // all shifts
+    {
+    for (register int32 p=halfP-AccP; p<halfP+AccP; ++p)       // all shifts
+      {
+        maxCorrAmp =  sqrt(real(Master2(l,p))*imag(Master2(l,p)));
+        Covar(l,p) = real(Mask2(l,p)) /maxCorrAmp;
+                   //sqrt(real(Master2(l,p))*imag(Master2(l,p)));
 
-  Master2.clean();                              // reset to zeros
-  Mask2.clean();                                // reset to zeros
-  Master2.setdata(win1,mat2cr4(sqr(magMaster)),windef);// use Master2 for intensity
-  Mask2.setdata(win2,blok,windef);              // use Mask2 for padded Block
-
-  fft2d(Master2);                               // (intensity of master)
-  fft2d(Mask2);                                 // (block)
-  Mask2.conj();                                 // conj(block)
-  Master2 *= Mask2;
-  Master2.conj();                               // Master2 == conj(Master)*block
-  ifft2d(Master2);
-  // ______ Master2 now contains norms of master image in space domain ______
-  // ______ Resize to shifts [-AccL,+AccL) ______
-  TMP.setdata(Master2,wintmp);                  // fill TMP
-  matrix<real4> pmaster = real(TMP);            // norms in pmaster
-
-  // ====== Now compute norms for slave image ======
-  Master2.clean();                              // reset to zeros
-  window win5(L,twoL-1,P,twoP-1);
-  Master2.setdata(win5,mat2cr4(sqr(magMask)),windef);
-  fft2d(Master2);                               // (intensity of slave)
-  Master2 *= Mask2;                             // Master2 == conj(block)*Slave
-  ifft2d(Master2);
-  // ______ Master2 now contains norms of slave image in space domain ______
-  // ______ Resize to shifts [-AccL,+AccL) ______
-  TMP.setdata(Master2,wintmp);                  // fill TMP
-  const matrix<real4> pmask = real(TMP);        // norms in pmask
-  pmaster *= pmask;
-  Covar   /= sqrt(pmaster);
 
+      //if ( Covar(l,p) > 1 ) { Covar(l,p) = -999.0 ; }  // MA quick fix for values bigger then 1
+      //if (Covar(l,p) > maxcorr && Covar(l,p)<1.09)// MCC Covar(l,p)<1.09 fixed problem for amplitude =0, which produces a covar=Inf
+        if (Covar(l,p) > maxcorr && maxCorrAmp>minIfgAmp)
+        {
+        maxcorr  = Covar(l,p);
+        maxcorrL = l;// local index in Covar of maxcorr
+        maxcorrP = p;// local index in Covar of maxcorr
+        if (maxcorr > 1 ) continue; // [MA] stop checking this chip further for maxcorr
+        }
+      }
+    }
+//  INFO << "PowMaster : " <<   sqrt(real(Master2(maxcorrL,maxcorrP))*imag(Master2(maxcorrL,maxcorrP)));
+ //  INFO.print();
+  offsetL = -halfL + maxcorrL; // update by reference
+  offsetP = -halfP + maxcorrP; // update by reference
+  DEBUG << "Pixel level offset:     "
+        << offsetL << ", " << offsetP << " (corr=" << maxcorr << ")";
+  DEBUG.print();
+
+  // ====== (4) oversample to find peak sub-pixel ======
   // ====== Estimate shift by oversampling estimated correlation ======
-  uint offL;
-  uint offP;
-  const real4 maxcorr = (ovsfactor==1) ?
-    max(Covar,offL,offP) :
-    max(oversample(Covar,ovsfactor,ovsfactor),offL,offP);
-  offsetL = -real4(AccL) + real4(offL)/real4(ovsfactor);// update by reference
-  offsetP = -real4(AccP) + real4(offP)/real4(ovsfactor);// update by reference
+  if (ovsfactor>1)
+    {
+    // --- (4a) get little chip around max. corr, if possible ---
+    // --- make sure that we can copy the data ---
+    if (maxcorrL<AccL)
+      {
+      DEBUG << "Careful, decrease AccL or increase winsizeL";
+      DEBUG.print();
+      maxcorrL = AccL;
+      }
+    if (maxcorrP<AccP)
+      {
+      DEBUG << "Careful, decrease AccP or increase winsizeP";
+      DEBUG.print();
+      maxcorrP = AccP;
+      }
+    if (maxcorrL>(L-AccL))
+      {
+      DEBUG << "Careful, decrease AccL or increase winsizeL";
+      DEBUG.print();
+      maxcorrL = L-AccL;
+      }
+    if (maxcorrP>(P-AccP))
+      {
+      DEBUG << "Careful, decrease AccP or increase winsizeP";
+      DEBUG.print();
+      maxcorrP = P-AccP;
+      }
+    // --- Now get the chip around max corr ---
+    //matrix<real4> chip(2*AccL,2*AccP);// locally oversample corr
+    //for (l=maxcorrL-AccL; l<maxcorrL+AccL; ++l)
+    //  for (p=maxcorrP-AccP; p<maxcorrP+AccP; ++p)
+    //    chip(l-(maxcorrL-AccL),p-(maxcorrP-AccP)) = Covar(l,p);
+    window win3(maxcorrL-AccL,maxcorrL+AccL-1, maxcorrP-AccP,maxcorrP+AccP-1);
+    const matrix<real4> chip(win3,Covar);// construct as part
+    // --- (4b) oversample chip to obtain sub-pixel max ---
+    uint offL;
+    uint offP;
+    maxcorr =  max(oversample(chip, ovsfactor, ovsfactor), offL,offP);
+    offsetL = -halfL + maxcorrL - AccL + real4(offL)/real4(ovsfactor);
+    offsetP = -halfP + maxcorrP - AccP + real4(offP)/real4(ovsfactor);
+    DEBUG << "Sub-pixel level offset: "
+          << offsetL << ", " << offsetP << " (corr=" << maxcorr << ")";
+    DEBUG.print();
+    }
   return maxcorr;
-  } // END coherencefft
+  } // END crosscorrelate
+
 
 
 
 /****************************************************************
- * crosscorrelate                                               *
+ * intensity                                               *
  *                                                              *
- * cross correlation of zero-meaned magnitude of two patches    *
+ * cross correlation of normalized intensity of two patches    *
  *  uses ffts, some tricks for speed-up.                        *
  *  optionally improves peak position to sub-pixel.             *
  * This is an improvement upon coherencefft: faster and local peak *
@@ -2771,9 +3616,10 @@ real4 coherencefft(
  *    positive offsetL: Mask is shifted up                      *
  *    positive offsetP: Mask is shifted left                    *
  *                                                              *
- * Bert Kampes, 12-Aug-2005                                     *
+ * Bert Kampes, 12-Aug-2005    
+ * MCC change magnitude for intensity Dec 2014                           *
  ****************************************************************/
-real4 crosscorrelate(
+real4 intensity(
         const matrix<complr4> &Master,  // data
         const matrix<complr4> &Mask,    // data
         const int32 ovsfactor,          // ovs factor (1 for not) (not uint)
@@ -2790,7 +3636,7 @@ real4 crosscorrelate(
   const int32 twoP  = 2*P;
   const int32 halfL = L/2;
   const int32 halfP = P/2;
-
+  const int32 minIfgAmp =  -99999;       // Minimum amplitude of a ifg to be considered as window for coarse coreg
   // ______ Check input ______
   if (Master.lines() != Mask.lines() || Master.pixels() != Mask.pixels())
     {
@@ -2810,17 +3656,19 @@ real4 crosscorrelate(
 
   // ______ Zero mean magnitude images ______
   DEBUG.print("Using de-meaned magnitude patches for incoherent cross-correlation");
-  matrix<real4> magMaster = magnitude(Master);
-  matrix<real4> magMask   = magnitude(Mask);
-  magMaster              -= mean(magMaster);
-  magMask                -= mean(magMask);
+  //matrix<real4> magMaster = magnitude(Master);
+  //matrix<real4> magMask   = magnitude(Mask);
+  matrix<real4> magMaster = intensity(Master);
+  matrix<real4> magMask   = intensity(Mask);
+  magMaster              /= mean(magMaster);
+  magMask                /= mean(magMask);
 
   // ====== (1) Compute cross-products of Master/Mask ======
   // ______ Pad with N zeros to prevent periodical convolution ______
   matrix<complr4> Master2(twoL,twoP);           // initial 0
   matrix<complr4> Mask2(twoL,twoP);             // initial 0
   window windef(0,0,0,0);                       // defaults to total matrix
-  window win1(0, L-1, 0, P-1);  
+  window win1(0, L-1, 0, P-1);
   window win2(halfL, halfL+L-1, halfP, halfP+P-1);
   Master2.setdata(win1,mat2cr4(magMaster),windef);      // zero-mean magnitude
   Mask2.setdata(win2,mat2cr4(magMask),windef);          // zero-mean magnitude
@@ -2858,7 +3706,7 @@ real4 crosscorrelate(
     BLOCK.resize(twoL,twoP);
     for (l=halfL; l<halfL+L; ++l)
       for (p=halfP; p<halfP+P; ++p)
-        BLOCK(l,p) = complr4(1.0); 
+        BLOCK(l,p) = complr4(1.0);
     fft2d(BLOCK);
     BLOCK.conj();// static variable: keep this for re-use
     }
@@ -2872,16 +3720,21 @@ real4 crosscorrelate(
   // ====== (3) find maximum correlation at pixel level ======
   matrix<real4> Covar(L+1,P+1);// correlation for each shift
   real4 maxcorr  = -999.0;
+  real4 maxCorrAmp = 0;
   int32 maxcorrL = 0;// local index in Covar of maxcorr
   int32 maxcorrP = 0;// local index in Covar of maxcorr
-  for (l=0; l<=L; ++l)         // all shifts
+ for (register int32 l=halfL-AccL; l<halfL+AccL; ++l)         // all shifts
     {
-    for (p=0; p<=P; ++p)       // all shifts
+    for (register int32 p=halfP-AccP; p<halfP+AccP; ++p)       // all shifts
       {
-      Covar(l,p) = real(Mask2(l,p)) /
-                   sqrt(real(Master2(l,p))*imag(Master2(l,p)));
+        maxCorrAmp =  sqrt(real(Master2(l,p))*imag(Master2(l,p)));
+        Covar(l,p) = real(Mask2(l,p)) /maxCorrAmp;
+                   //sqrt(real(Master2(l,p))*imag(Master2(l,p)));
+
+
       //if ( Covar(l,p) > 1 ) { Covar(l,p) = -999.0 ; }  // MA quick fix for values bigger then 1
-      if (Covar(l,p) > maxcorr)
+      //if (Covar(l,p) > maxcorr && Covar(l,p)<1.09)// MCC Covar(l,p)<1.09 fixed problem for amplitude =0, which produces a covar=Inf
+        if (Covar(l,p) > maxcorr && maxCorrAmp>minIfgAmp)
         {
         maxcorr  = Covar(l,p);
         maxcorrL = l;// local index in Covar of maxcorr
@@ -2890,9 +3743,11 @@ real4 crosscorrelate(
         }
       }
     }
+//  INFO << "PowMaster : " <<   sqrt(real(Master2(maxcorrL,maxcorrP))*imag(Master2(maxcorrL,maxcorrP)));
+ //  INFO.print();
   offsetL = -halfL + maxcorrL; // update by reference
   offsetP = -halfP + maxcorrP; // update by reference
-  DEBUG << "Pixel level offset:     " 
+  DEBUG << "Pixel level offset:     "
         << offsetL << ", " << offsetP << " (corr=" << maxcorr << ")";
   DEBUG.print();
 
@@ -2939,12 +3794,12 @@ real4 crosscorrelate(
     maxcorr =  max(oversample(chip, ovsfactor, ovsfactor), offL,offP);
     offsetL = -halfL + maxcorrL - AccL + real4(offL)/real4(ovsfactor);
     offsetP = -halfP + maxcorrP - AccP + real4(offP)/real4(ovsfactor);
-    DEBUG << "Sub-pixel level offset: " 
+    DEBUG << "Sub-pixel level offset: "
           << offsetL << ", " << offsetP << " (corr=" << maxcorr << ")";
     DEBUG.print();
     }
   return maxcorr;
-  } // END crosscorrelate
+  } // END intensity
 
 
 
@@ -3016,9 +3871,9 @@ real4 coherencespace(
       {
       matrix<real4> magMask   = magnitude(Mask);        // magnitude
       magMask                -= mean(magMask);          // subtract mean
-      matrix<real4> Mask2(winmask,magMask);             // construct as part 
+      matrix<real4> Mask2(winmask,magMask);             // construct as part
       real4 normmask          = norm2(Mask2);
-      matrix<real4> Master2(MasksizeL, MasksizeP); 
+      matrix<real4> Master2(MasksizeL, MasksizeP);
       matrix<real4> magMaster = magnitude(Master);
       magMaster              -= mean(magMaster);
       window winmaster;
@@ -3047,7 +3902,7 @@ real4 coherencespace(
         }
       break;
       }
-   
+
     default:
       PRINT_ERROR("unknown method")
       throw(unhandled_case_error);
@@ -3116,14 +3971,14 @@ void coregpm(
   const real8 maxP     = master.originalwindow.pixhi;
 
   // ______ A priori sigma of  offset ______
-  // ______ Read this factor from the result file 
+  // ______ Read this factor from the result file
   // ______ "Oversampling factor: 32"
   // ______ "Window_size_L_for_correlation: 4"
   // ______ "Window_size_P_for_correlation: 121"
   DEBUG.print("Reading oversampling factor from result file");
   uint osfactor  = 32;// oversamplingsfactor
-  int32 corrwinL = 64;// window size to compute FINE correlation 
-  int32 corrwinP = 64;// window size to compute FINE correlation 
+  int32 corrwinL = 64;// window size to compute FINE correlation
+  int32 corrwinP = 64;// window size to compute FINE correlation
   char c4osfactor[4];
   char c10corrwinL[10];
   char c10corrwinP[10];
@@ -3133,7 +3988,7 @@ void coregpm(
   if (found) corrwinL = int32(atoi(c10corrwinL));
   found = readres(c10corrwinP,sizeof(c10corrwinP),i_resfile, "Window_size_P_for_correlation:", 0);
   if (found) corrwinP = int32(atoi(c10corrwinP));
-  corrwinL = max(10,corrwinL-8);// if fft method peak is not at center 
+  corrwinL = max(10,corrwinL-8);// if fft method peak is not at center
   corrwinP = max(10,corrwinP-8);//  +then effective number of samples is smaller
   // _____ oversampling factor is bin in which maximum can be found _____
   // _____ ovsf=16-->apriorisigma=0.03
@@ -3160,9 +4015,9 @@ void coregpm(
   // ______ Find #points > threshold ______
   matrix<real4> Data   = getofffile(i_resfile, THRESHOLD);
   // ______ Data contains the following: ______
-  // Data(i,0) = winnumber; Data(i,1) = posL; Data(i,2) = posP; 
+  // Data(i,0) = winnumber; Data(i,1) = posL; Data(i,2) = posP;
   // Data(i,3) = offL;      Data(i,4) = offP; Data(i,5) = corr;
-  
+
   // ______ start added by FvL ______
   ifstream DeltaLfile, DeltaPfile;
   streampos pos;
@@ -3173,7 +4028,7 @@ void coregpm(
       bk_assert(DeltaLfile,"dac_delta_line.raw",__FILE__,__LINE__);
       openfstream(DeltaPfile,"dac_delta_pixel.raw");
       bk_assert(DeltaPfile,"dac_delta_pixel.raw",__FILE__,__LINE__);
-      
+
       int32 posL, posP;
       real4 offL, offP;
       real8 deltaL,deltaP;
@@ -3182,18 +4037,18 @@ void coregpm(
       real4 ms_r_timing_error_P = real4(slave.r_timing_error);
 
       for (register int32 ii=0; ii<Data.lines(); ii++)
-        {        
+        {
           posL = int32(Data(ii,1));
           posP = int32(Data(ii,2));
           offL = Data(ii,3);
           offP = Data(ii,4);
-          pos = (streampos)((posL-master.currentwindow.linelo)*                 // [MA] (streampos) define in the lhs to eliminate int wrapping 
+          pos = (streampos)((posL-master.currentwindow.linelo)*                 // [MA] (streampos) define in the lhs to eliminate int wrapping
                             master.currentwindow.pixels() + posP - master.currentwindow.pixlo);
           pos = (streampos)(pos * sizer8);
-          
+
           DeltaLfile.seekg(pos,ios::beg);
           DeltaPfile.seekg(pos,ios::beg);
-          
+
           DeltaLfile.read((char*)&deltaL,sizer8);
           DeltaPfile.read((char*)&deltaP,sizer8);
 
@@ -3201,7 +4056,7 @@ void coregpm(
           Data(ii,4) = offP-real4(deltaP)-ms_r_timing_error_P;
         }
     }
-  
+
   // ______ end added by FvL ______
 
   int32 ITERATION = 0;
@@ -3257,19 +4112,19 @@ void coregpm(
       PRINT_ERROR("coregpm: Number of windows > threshold is smaller than parameters solved for.")
       throw(input_error);
       }
-  
+
     // ______Set up system of equations______
     // ______Order unknowns: A00 A10 A01 A20 A11 A02 A30 A21 A12 A03 for degree=3______
     matrix<real8> yL(Nobs,1);                   // observation
     matrix<real8> yP(Nobs,1);                   // observation
     matrix<real8> A(Nobs,Nunk);                 // designmatrix
     matrix<real8> Qy_1(Nobs,1);                 // a priori covariance matrix (diag)
-  
+
     // ______ Normalize data for polynomial ______
     INFO << "coregpm: polynomial normalized by factors: "
          << minL << " " << maxL << " " << minP << " " << maxP << " to [-2,2]";
     INFO.print();
-  
+
     // ______Fill matrices______
     DEBUG.print("Setting up design matrix for LS adjustment");
     for (i=0; i<Nobs; i++)
@@ -3278,7 +4133,7 @@ void coregpm(
       real8 posP = normalize(real8(Data(i,2)),minP,maxP);
       yL(i,0)    = real8(Data(i,3));
       yP(i,0)    = real8(Data(i,4));
-      DEBUG << "coregpm: (" << posL << ", "<< posP << "): yL=" 
+      DEBUG << "coregpm: (" << posL << ", "<< posP << "): yL="
             << yL(i,0) << " yP=" << yP(i,0);
       DEBUG.print();
       // ______Set up designmatrix______
@@ -3292,7 +4147,7 @@ void coregpm(
           }
         }
       }
-  
+
 
     // ______Weight matrix data______
     DEBUG.print("Setting up (inverse of) covariance matrix for LS adjustment");
@@ -3300,7 +4155,7 @@ void coregpm(
       {
       case 0:
         for (i=0; i<Nobs; i++)
-          Qy_1(i,0) = real8(1.0); 
+          Qy_1(i,0) = real8(1.0);
         break;
       case 1:
         DEBUG.print("Using sqrt(coherence) as weights.");
@@ -3332,7 +4187,7 @@ void coregpm(
         for (i=0; i<Nobs; i++)
           {
           // N_corr: number of samples for cross-corr; approx. FC_WINSIZE
-          // number of effictive samples depends on data ovs factor 
+          // number of effictive samples depends on data ovs factor
           // Bamler 2000: also on oversampling ratio of data, but ignored here.
           const real4 N_corr   = real4(corrwinL*corrwinP)/real4(master.ovs_az*master.ovs_rg);
           const real4 coh      = Data(i,5);// estimated correlation; assume unbiased?
@@ -3355,7 +4210,7 @@ void coregpm(
         throw(unhandled_case_error);
       }
 
- 
+
     // ______Compute Normalmatrix, rghthandside______
     matrix<real8> N    = matTxmat(A,diagxmat(Qy_1,A));
     //matrix<real8> rhsL = matTxmat(A,diagxmat(Qy_1,yL));
@@ -3377,14 +4232,14 @@ void coregpm(
     INFO << "coregpm: max(abs(N*inv(N)-I)) = " << maxdev;
     INFO.print();
     // ___ use trace buffer to store string, remember to rewind it ___
-    if (maxdev > .01) 
+    if (maxdev > .01)
       {
       ERROR << "coregpm: maximum deviation N*inv(N) from unity = " << maxdev
             << ". This is larger than 0.01";
       ERROR.print(ERROR.get_str());
       throw(some_error);
       }
-    else if (maxdev > .001) 
+    else if (maxdev > .001)
       {
       WARNING << "coregpm: maximum deviation N*inv(N) from unity = " << maxdev
               << ". This is between 0.01 and 0.001";
@@ -3404,7 +4259,7 @@ void coregpm(
     matrix<real8> Qe_hat = -Qy_hat;
     for (i=0; i<Nobs; i++)
       Qe_hat(i,i) += (1. / Qy_1(i,0));
-  
+
     // ______Overall model test (variance factor)______
     overallmodeltestL = 0.;
     overallmodeltestP = 0.;
@@ -3419,7 +4274,7 @@ void coregpm(
     INFO.print();
     INFO << "coregpm: overallmodeltest Pixels = " << overallmodeltestP;
     INFO.print();
-  
+
     // ______Datasnooping, assume Qy diag______
     wtestL.resize(Nobs,1);
     wtestP.resize(Nobs,1);
@@ -3433,11 +4288,11 @@ void coregpm(
     maxwL     = max(abs(wtestL),winL,dumm);     // returns winL
     maxwP     = max(abs(wtestP),winP,dumm);     // returns winP
     INFO << "maximum wtest statistic azimuth = " << maxwL
-         << " for window number: " 
+         << " for window number: "
          <<  Data(winL,0);
     INFO.print();
     INFO << "maximum wtest statistic range   = " << maxwP
-         << " for window number: " 
+         << " for window number: "
          <<  Data(winP,0);
     INFO.print();
     // --- use summed wtest for outlier detection ---
@@ -3446,7 +4301,7 @@ void coregpm(
     real8 maxwsum = max(wtestsum,winL,dumm);// idx to remove
     INFO << "Detected outlier:  summed sqr.wtest = " << maxwsum
          << "; observation: " << winL
-         << "; window number: " 
+         << "; window number: "
          <<  Data(winL,0);
     INFO.print();
 
@@ -3475,7 +4330,7 @@ void coregpm(
       }
 
     // ______ Only warn if last iteration has been done ______
-    if (DONE == 1) 
+    if (DONE == 1)
       {
       // ___ use trace buffer to store string, remember to rewind it ___
       if (overallmodeltestL > 10)
@@ -3523,7 +4378,7 @@ void coregpm(
   // ____ start added by FvL _________
   // Determine inverse transformation
   // (slave corners only, needed for overlap)
-  
+
   // ______ Normalize data for polynomial ______
   const real8 sminL     = slave.originalwindow.linelo;
   const real8 smaxL     = slave.originalwindow.linehi;
@@ -3532,7 +4387,7 @@ void coregpm(
 
   // ______Check redundancy______
   int32 Nobs = Data.lines();                          // Number of points > threshold
-  
+
   // ______Set up system of equations for slave______
   // ______Order unknowns: A00 A10 A01 A20 A11 A02 A30 A21 A12 A03 for degree=3______
   matrix<real8> srhsL;
@@ -3541,12 +4396,12 @@ void coregpm(
   matrix<real8> yP(Nobs,1);                   // observation
   matrix<real8> A(Nobs,Nunk);                 // designmatrix
   matrix<real8> Qy_1(Nobs,1);                 // a priori covariance matrix (diag)
-  
+
   // ______ Normalize data for polynomial ______
   INFO << "coregpm: slave polynomial normalized by factors: "
        << sminL << " " << smaxL << " " << sminP << " " << smaxP << " to [-2,2]";
   INFO.print();
-  
+
   // ______Fill matrices______
   DEBUG.print("Setting up design matrix for LS adjustment");
   for (i=0; i<Nobs; i++)
@@ -3555,7 +4410,7 @@ void coregpm(
       real8 posP = normalize(real8(Data(i,2)+Data(i,4)),sminP,smaxP);
       yL(i,0)    = real8(-Data(i,3));
       yP(i,0)    = real8(-Data(i,4));
-      DEBUG << "coregpm: (" << posL << ", "<< posP << "): yL=" 
+      DEBUG << "coregpm: (" << posL << ", "<< posP << "): yL="
             << yL(i,0) << " yP=" << yP(i,0);
       DEBUG.print();
       // ______Set up designmatrix______
@@ -3576,7 +4431,7 @@ void coregpm(
       {
       case 0:
         for (i=0; i<Nobs; i++)
-          Qy_1(i,0) = real8(1.0); 
+          Qy_1(i,0) = real8(1.0);
         break;
       case 1:
         DEBUG.print("Using sqrt(coherence) as weights.");
@@ -3608,7 +4463,7 @@ void coregpm(
         for (i=0; i<Nobs; i++)
           {
           // N_corr: number of samples for cross-corr; approx. FC_WINSIZE
-          // number of effictive samples depends on data ovs factor 
+          // number of effictive samples depends on data ovs factor
           // Bamler 2000: also on oversampling ratio of data, but ignored here.
           const real4 N_corr   = real4(corrwinL*corrwinP)/real4(master.ovs_az*master.ovs_rg);
           const real4 coh      = Data(i,5);// estimated correlation; assume unbiased?
@@ -3677,9 +4532,9 @@ void coregpm(
   deltapixel_slaveNN = polyval(normalize(slave_lN,sminL,smaxL),
                           normalize(slave_pN,sminP,smaxP),
                           srhsP,DEGREE);
- 
+
   // ____ end added by FvL _________
- 
+
   // ______ Create dump file for making plots ______
   ofstream cpmdata("CPM_Data", ios::out | ios::trunc);
   bk_assert(cpmdata,"coregpm: CPM_DATA",__FILE__,__LINE__);
@@ -3701,10 +4556,10 @@ void coregpm(
   //for (i=0; i<Nobs; i++)
   for (i=0; i<Data.lines(); i++)
     fprintf(cpm,
-    "%4.0f %5.0f %5.0f %# 9.2f %# 9.2f %# 6.2f %6.2f %6.2f %6.2f %6.2f\n",
-            Data(i,0), Data(i,1), Data(i,2), 
+    "%4.0f %5.4f %5.4f %# 9.5f %# 9.5f %# 6.2f %6.2f %6.2f %6.2f %6.2f\n",
+            Data(i,0), Data(i,1), Data(i,2),
             Data(i,3), Data(i,4), Data(i,5),
-            eL_hat(i,0), eP_hat(i,0), 
+            eL_hat(i,0), eP_hat(i,0),
             abs(wtestL(i,0)), abs(wtestP(i,0)));
   fclose(cpm);
 
@@ -3714,10 +4569,10 @@ void coregpm(
   bk_assert(scratchlogfile,"coregpm: scratchlogcpm",__FILE__,__LINE__);
   scratchlogfile
     << "\n\n*******************************************************************"
-    << "\n* COMP_COREGPM:" 
+    << "\n* COMP_COREGPM:"
     << "\n*******************************************************************"
     << "\nA polynomial model is weighted least squares estimated"
-    << "\nfor azimuth and range through the FINE offset vectors."  
+    << "\nfor azimuth and range through the FINE offset vectors."
     << "\nThe number of coefficient are the unknowns, the number of"
     << "\nobservations are the offset vectors above the THRESHOLD"
     << "\nspecified in the input file.  To estimate the unknowns, at"
@@ -3732,9 +4587,9 @@ void coregpm(
     << "\nthe observations and errors, which can be done with the utility"
     << "\nscripts provided by Doris (calls to GMT)."
     << "\nAlso see any book on LS methods."
-    << "\n\nDegree of model:\t\t\t\t" 
+    << "\n\nDegree of model:\t\t\t\t"
     << DEGREE
-    << "\nThreshold on data (correlation):\t\t\t" 
+    << "\nThreshold on data (correlation):\t\t\t"
     << THRESHOLD
     << "\nOversmaplings factor used in fine:           \t"
     << osfactor
@@ -3744,13 +4599,13 @@ void coregpm(
     << SIGMAL
     << "\nA priori sigma range (based on experience): \t"
     << SIGMAP
-    << "\nNumber of observations: \t\t\t" 
+    << "\nNumber of observations: \t\t\t"
     << Data.lines()
-    << "\nNumber of rejected observations: \t\t\t" 
+    << "\nNumber of rejected observations: \t\t\t"
     << ITERATION
-    << "\nNumber of unknowns: \t\t\t\t" 
+    << "\nNumber of unknowns: \t\t\t\t"
     << Nunk
-    << "\nOverall model test in Azimuth direction: \t" 
+    << "\nOverall model test in Azimuth direction: \t"
     << overallmodeltestL
     << "\nOverall model test in Range direction: \t\t"
     << overallmodeltestP
@@ -3758,7 +4613,7 @@ void coregpm(
     << maxwL
     << "\n  for window number: \t\t\t\t"
     <<  Data(winL,0)
-    << "\nLargest w test statistic in Range direction: \t" 
+    << "\nLargest w test statistic in Range direction: \t"
     << maxwP
     << "\n  for window number: \t\t\t\t"
     <<  Data(winP,0)
@@ -3827,7 +4682,11 @@ void coregpm(
     << "\n*_Start_" << processcontrol[pr_i_coregpm]
     << "\n*******************************************************************"
     << "\nDegree_cpm:\t" << DEGREE
+    << "\nNormalization_Lines:   \t" <<sminL<< " " <<smaxL<< ""
+    << "\nNormalization_Pixels:  \t" <<sminP<< " " <<smaxP<< ""
     << "\nEstimated_coefficientsL:\n";
+  
+  
   int32 coeffL = 0;
   int32 coeffP = 0;
   for (i=0; i<Nunk; i++)
@@ -3900,7 +4759,7 @@ void coregpm(
   y_axis(1,0) = maxP;
   normalize(x_axis,minL,maxL);
   normalize(y_axis,minP,maxP);
-  matrix<real4> offsetcornersL = polyval<real4>(x_axis,y_axis,Lcoeff);  // MA 
+  matrix<real4> offsetcornersL = polyval<real4>(x_axis,y_axis,Lcoeff);  // MA
   matrix<real4> offsetcornersP = polyval<real4>(x_axis,y_axis,Pcoeff);
   INFO.print(" ");
   INFO.print("Modeled transformation in azimuth:");
@@ -3929,8 +4788,8 @@ void coregpm(
     DEBUG.print("And account for currentwindow, not orig window...");
     PROGRESS.print("Started dumping evaluated model azimuth.");
     TRACE.print();// empty buffer to be sure
-    TRACE << "offsetazi_" << master.originalwindow.lines() 
-               <<          "_" << master.originalwindow.pixels() 
+    TRACE << "offsetazi_" << master.originalwindow.lines()
+               <<          "_" << master.originalwindow.pixels()
                << ".r4";
     char fileazi[ONE27];
     strcpy(fileazi,TRACE.get_str());
@@ -3963,11 +4822,11 @@ void coregpm(
          << " number of pixels: "
          << master.originalwindow.pixels();
     INFO.print();
-    
+
     // ______ same for range ______
     PROGRESS.print("Started dumping evaluated model range.");
     TRACE.print();// empty buffer to be sure
-    TRACE << "offsetrange_" << master.originalwindow.lines() 
+    TRACE << "offsetrange_" << master.originalwindow.lines()
              << "_" << master.originalwindow.pixels() << ".r4";
     char filerange[ONE27];
     strcpy(filerange,TRACE.get_str());
@@ -4050,7 +4909,7 @@ matrix<real4> getofffile(
         infile >> winnumber >> posL >> posP >> offL >> offP >> corr;
         infile.getline(dummyline,ONE27,'\n');           // goto next data record
         if (corr > threshold)
-          Nobs++; 
+          Nobs++;
         }
 
       if (Nobs == 0)
@@ -4129,7 +4988,7 @@ matrix<real4> cc4(
       y(i,0) = (alpha+2)*xx2*xx - (alpha+3)*xx2 + 1;
     else if (xx < 2)
       y(i,0) = alpha*xx2*xx - 5*alpha*xx2 + 8*alpha*xx - 4*alpha;
-    else 
+    else
       y(i,0) = 0.0;
     }
   return y;
@@ -4172,11 +5031,11 @@ matrix<real4> cc6(
       y(i,0) = (alpha-beta+2)*xx2*xx - (alpha-beta+3)*xx2 + 1;
     //y(i,0) = (alpha+beta+2)*xx2*xx - (alpha+beta+3)*xx2 + 1;??wrong in paper?
     else if (xx < 2)
-      y(i,0) =   alpha*xx2*xx - (5*alpha-beta)*xx2 
+      y(i,0) =   alpha*xx2*xx - (5*alpha-beta)*xx2
                + (8*alpha-3*beta)*xx - (4*alpha-2*beta);
     else if (xx < 3)
       y(i,0) = beta*xx2*xx - 8*beta*xx2 + 21*beta*xx - 18*beta;
-    else 
+    else
       y(i,0) = 0.;
     }
   return y;
@@ -4427,7 +5286,7 @@ matrix<real4> rc_kernel(
  * back afterwards. (see e.g. thesis Geudtner)                  *
  #%// BK 09-Nov-2000                                            *
  * Seems to be a bug in shifting the data spectrum if more      *
- * buffers are used, working on it.                             *
+ * buffers are used, working on it.              _Data               *
  * (Increase FORSURE variable if crash)                         *
  #%// BK 19-Nov-2000                                            *
  ****************************************************************/
@@ -4438,15 +5297,17 @@ void resample(
         const slcimage          &slave,
         const matrix<real8>     &cpmL,          // coregistration parameters
         const matrix<real8>     &cpmP,          // coregistration parameters
-        const int16             &demassist
+        const int16             &demassist,
+        const matrix<real8>     &minMaxL,
+        const matrix<real8>     &minMaxP
 )
   {
   TRACE_FUNCTION("resample (BK 16-Mar-1999; BK 09-Nov-2000)")
-  if (resampleinput.shiftazi==true)
+  if   (resampleinput.shiftazi == 1)
     DEBUG.print("shifting kernelL to data fDC BK 26-Oct-2002");
   // ___ Handle input ___
-  //const uint BUFFERMEMSIZE = generalinput.memory;       // Bytes  500MB --> 500 000 000 bytes 
-  const real8 BUFFERMEMSIZE = generalinput.memory;       // Bytes  500MB --> 500 000 000 bytes 
+  //const uint BUFFERMEMSIZE = generalinput.memory;       // Bytes  500MB --> 500 000 000 bytes
+  const real8 BUFFERMEMSIZE = generalinput.memory;       // Bytes  500MB --> 500 000 000 bytes
   const int32 Npoints      = resampleinput.method%100;  // #pnts interpolator
   if (isodd(Npoints))
     {
@@ -4458,12 +5319,18 @@ void resample(
   //const uint  Sfilelines   = slave.currentwindow.lines();
   const uint sizeofci16    = sizeof(compli16);
   const uint sizeofcr4     = sizeof(complr4);
+  const uint sizeofr4      = sizeof(real4); //[FvL]
 
   // ______ Normalize data for polynomial ______
-  const real8 minL         = master.originalwindow.linelo;
-  const real8 maxL         = master.originalwindow.linehi;
-  const real8 minP         = master.originalwindow.pixlo;
-  const real8 maxP         = master.originalwindow.pixhi;
+ // const real8 minL         = master.originalwindow.linelo;
+ // const real8 maxL         = master.originalwindow.linehi;
+ // const real8 minP         = master.originalwindow.pixlo;
+ // const real8 maxP         = master.originalwindow.pixhi;
+  const real8 minL           = minMaxL(0,0);
+  const real8 maxL           = minMaxL(1,0);
+  const real8 minP           = minMaxP(0,0);
+  const real8 maxP           = minMaxP(1,0);
+  
   INFO << "resample: polynomial normalized by factors: "
        << minL << " " << maxL << " " << minP << " " << maxP << " to [-2,2]";
   INFO.print();
@@ -4494,7 +5361,8 @@ void resample(
   // ______  table[0]= 0 1 0 0 ;table[INTERVAL]= 0 0 1 0
   // ______ intervals in lookup table: dx
   // ______ for high doppler 100 is OK (fdc=3prf; 6pi --> 10deg error?)
-  const int32 INTERVAL  = 127;                          // precision: 1./INTERVAL [pixel]
+  // ______ 2047? 4095? which one is better for Sentinel-1 (Wu Wenhao)
+  const int32 INTERVAL  = 2047;                          // precision: 1./INTERVAL [pixel]
   const int32 Ninterval = INTERVAL + 1;                 // size of lookup table
   const real8 dx        = 1.0/INTERVAL;                 // interval look up table
   INFO << "resample: lookup table size: " << Ninterval;
@@ -4526,70 +5394,70 @@ void resample(
       {
       // --- Extremely simple kernels (not good, but fast) ---
       case rs_rect:
-        (*pntKernelAz[i]) = mat2cr4(rect(x_axis)); 
-        (*pntKernelRg[i]) = mat2cr4(rect(x_axis)); 
+        (*pntKernelAz[i]) = mat2cr4(rect(x_axis));
+        (*pntKernelRg[i]) = mat2cr4(rect(x_axis));
         break;
       case rs_tri:
-        (*pntKernelAz[i]) = mat2cr4(tri(x_axis));  
-        (*pntKernelRg[i]) = mat2cr4(tri(x_axis));  
+        (*pntKernelAz[i]) = mat2cr4(tri(x_axis));
+        (*pntKernelRg[i]) = mat2cr4(tri(x_axis));
         break;
       // --- Truncated sinc ---
       case rs_ts6p:
-        (*pntKernelAz[i]) = mat2cr4(ts6(x_axis));  
-        (*pntKernelRg[i]) = mat2cr4(ts6(x_axis));  
+        (*pntKernelAz[i]) = mat2cr4(ts6(x_axis));
+        (*pntKernelRg[i]) = mat2cr4(ts6(x_axis));
         break;
       case rs_ts8p:
-        (*pntKernelAz[i]) = mat2cr4(ts8(x_axis));  
-        (*pntKernelRg[i]) = mat2cr4(ts8(x_axis));  
+        (*pntKernelAz[i]) = mat2cr4(ts8(x_axis));
+        (*pntKernelRg[i]) = mat2cr4(ts8(x_axis));
         break;
       case rs_ts16p:
-        (*pntKernelAz[i]) = mat2cr4(ts16(x_axis)); 
-        (*pntKernelRg[i]) = mat2cr4(ts16(x_axis)); 
+        (*pntKernelAz[i]) = mat2cr4(ts16(x_axis));
+        (*pntKernelRg[i]) = mat2cr4(ts16(x_axis));
         break;
       // --- Cubic Convolution kernel: theoretical better than truncated sinc. ---
       case rs_cc4p:
-        (*pntKernelAz[i]) = mat2cr4(cc4(x_axis));  
-        (*pntKernelRg[i]) = mat2cr4(cc4(x_axis));  
+        (*pntKernelAz[i]) = mat2cr4(cc4(x_axis));
+        (*pntKernelRg[i]) = mat2cr4(cc4(x_axis));
         break;
       case rs_cc6p:
-        (*pntKernelAz[i]) = mat2cr4(cc6(x_axis));  
-        (*pntKernelRg[i]) = mat2cr4(cc6(x_axis));  
+        (*pntKernelAz[i]) = mat2cr4(cc6(x_axis));
+        (*pntKernelRg[i]) = mat2cr4(cc6(x_axis));
         break;
       // --- KNAB kernel: theoretical better than cubic conv. ---
       case rs_knab4p:
-        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,4)); 
-        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,4)); 
+        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,4));
+        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,4));
         break;
       case rs_knab6p:
-        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,6)); 
-        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,6)); 
+        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,6));
+        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,6));
         break;
       case rs_knab8p:
-        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,8)); 
-        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,8)); 
+        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,8));
+        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,8));
         break;
       case rs_knab10p:
-        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,10)); 
-        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,10)); 
+        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,10));
+        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,10));
         break;
       case rs_knab16p:
-        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,16)); 
-        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,16)); 
+        (*pntKernelAz[i]) = mat2cr4(knab(x_axis,CHI_az,16));
+        (*pntKernelRg[i]) = mat2cr4(knab(x_axis,CHI_rg,16));
         break;
       // --- Raised cosine: theoretical best ---
       case rs_rc6p:
-        (*pntKernelAz[i]) = mat2cr4(rc_kernel(x_axis,CHI_az,6)); 
-        (*pntKernelRg[i]) = mat2cr4(rc_kernel(x_axis,CHI_rg,6)); 
+        (*pntKernelAz[i]) = mat2cr4(rc_kernel(x_axis,CHI_az,6));
+        (*pntKernelRg[i]) = mat2cr4(rc_kernel(x_axis,CHI_rg,6));
         break;
       case rs_rc12p:
-        (*pntKernelAz[i]) = mat2cr4(rc_kernel(x_axis,CHI_az,12)); 
-        (*pntKernelRg[i]) = mat2cr4(rc_kernel(x_axis,CHI_rg,12)); 
+        (*pntKernelAz[i]) = mat2cr4(rc_kernel(x_axis,CHI_az,12));
+        (*pntKernelRg[i]) = mat2cr4(rc_kernel(x_axis,CHI_rg,12));
         break;
       default:
         PRINT_ERROR("impossible.")
         throw(unhandled_case_error);
       }//kernel selector
-    (*pntAxis[i]) = x_axis;// to shift kernelL use: k*=exp(-i*2pi*axis*fdc/prf) 
+    (*pntAxis[i]) = x_axis;// to shift kernelL use: k*=exp(-i*2pi*axis*fdc/prf)
     x_axis       -= dx;    // Note: 'wrong' way (mirrored)
     }
   // ====== Usage: pntKernelAz[0]->showdata(); or (*pntKernelAz[0][0]).showdata(); ======
@@ -4634,7 +5502,7 @@ void resample(
   real4 ms_r_timing_error_P = real4(slave.r_timing_error);
   const int32 sizer8  = sizeof(real8);
   ifstream DeltaLfile, DeltaPfile;
-    
+
   if (demassist)
     {
       openfstream(DeltaLfile,"dac_delta_line.raw");
@@ -4642,18 +5510,18 @@ void resample(
       openfstream(DeltaPfile,"dac_delta_pixel.raw");
       bk_assert(DeltaPfile,"dac_delta_pixel.raw",__FILE__,__LINE__);
     }
-  
+
   streampos pos;
 
   // ______Corners of overlap in master system______
   // changed by FvL
-  
+
   window overlap;
   if (demassist)
     overlap = getoverlap(master,slave,real8(Npointsd2),real8(ms_az_timing_error_L),real8(ms_r_timing_error_P));
   else
     overlap = getoverlap(master,slave,real8(Npointsd2),real8(0),real8(0));
-  
+
 
   // ====== Adjust overlap possibly for RS_DBOW card ======
   int32 write0lines1  = 0;                      // DBOW card, 0's at start
@@ -4754,6 +5622,8 @@ void resample(
   // ______ Declare/allocate matrices ______
   matrix<complr4> BUFFER;                       // load after output is written
   matrix<complr4> RESULT(nlines,overlap.pixhi-overlap.pixlo+1);
+  matrix<real4> SLAVE_LINE(nlines,overlap.pixhi-overlap.pixlo+1); // for output final shifts [FvL]
+  matrix<real4> SLAVE_PIXEL(nlines,overlap.pixhi-overlap.pixlo+1); // for output final shifts [FvL]
   matrix<complr4> PART(Npoints,Npoints);
 
 #ifdef __USE_VECLIB_LIBRARY__
@@ -4766,12 +5636,27 @@ void resample(
 #endif
 
 
-  // ====== Open output file ======
+  // ====== Open output files ======
   ofstream ofile;
   openfstream(ofile,resampleinput.fileout,generalinput.overwrit);
   bk_assert(ofile,resampleinput.fileout,__FILE__,__LINE__);
 
+  ofstream slavelineofile;
+  openfstream(slavelineofile,"rsmp_orig_slave_line.raw",generalinput.overwrit);
+  bk_assert(slavelineofile,"rsmp_orig_slave_line.raw",__FILE__,__LINE__);
+
+  ofstream slavepixelofile;
+  openfstream(slavepixelofile,"rsmp_orig_slave_pixel.raw",generalinput.overwrit);
+  bk_assert(slavepixelofile,"rsmp_orig_slave_pixel.raw",__FILE__,__LINE__);
+
+  //ofstream slavelineofile("rsmp_orig_slave_line.raw", ios::out | ios::trunc); //[FvL]
+  //bk_assert(slavelineofile,"rsmp_orig_slave_line.raw",__FILE__,__LINE__);
+
+  //ofstream slavepixelofile("rsmp_orig_slave_pixel.raw", ios::out | ios::trunc); //[FvL]
+  //bk_assert(slavepixelofile,"rsmp_orig_slave_pixel.raw",__FILE__,__LINE__);
+
   // ________ First write zero lines if appropriate (DBOW) ______
+  const real4 zeror4(0); //[FvL]
   switch (resampleinput.oformatflag)
     {
     case FORMATCR4:
@@ -4781,17 +5666,25 @@ void resample(
         for (int32 thispixel=0;
              thispixel<int32(RESULT.pixels())+write0pixels1+write0pixelsN;
              ++thispixel)
+          {
           ofile.write((char*)&zerocr4,sizeofcr4);
+          slavelineofile.write((char*)&zeror4,sizeofr4); //[FvL]
+          slavepixelofile.write((char*)&zeror4,sizeofr4);
+          }
       break;
       }
     case FORMATCI2:
       {
       const compli16 zeroci16(0,0);
       for (int32 thisline=0; thisline<write0lines1; ++thisline)
-        for (int32 thispixel=0; 
+        for (int32 thispixel=0;
              thispixel<int32(RESULT.pixels())+write0pixels1+write0pixelsN;
              ++thispixel)
+          {
           ofile.write((char*)&zeroci16,sizeofci16);
+          slavelineofile.write((char*)&zeror4,sizeofr4); //[FvL]
+          slavepixelofile.write((char*)&zeror4,sizeofr4);
+          }
       break;
       }
     default:
@@ -4852,14 +5745,20 @@ void resample(
             for (int32 thispixel=0; thispixel<write0pixels1; ++thispixel)
               {
               ofile.write((char*)&zerocr4,sizeofcr4);
+              slavelineofile.write((char*)&zeror4,sizeofr4); //[FvL]
+              slavepixelofile.write((char*)&zeror4,sizeofr4);
               }
             // ______ WRITE the interpolated data per row ______
             ofile.write((char*)&RESULT[thisline][0],RESULT.pixels()*sizeof(RESULT(0,0)));
+            slavelineofile.write((char*)&SLAVE_LINE[thisline][0],SLAVE_LINE.pixels()*sizeof(SLAVE_LINE(0,0))); //[FvL]
+            slavepixelofile.write((char*)&SLAVE_PIXEL[thisline][0],SLAVE_PIXEL.pixels()*sizeof(SLAVE_PIXEL(0,0)));
             // ______ Write zero pixels at end ______
             for (int32 thispixel=0; thispixel<write0pixelsN; ++thispixel)
               {
               ofile.write((char*)&zerocr4,sizeofcr4);
-              }
+              slavelineofile.write((char*)&zeror4,sizeofr4); //[FvL]
+              slavepixelofile.write((char*)&zeror4,sizeofr4);
+            }
             }
           break;
           }
@@ -4873,6 +5772,8 @@ void resample(
             for (int32 thispixel=0; thispixel<write0pixels1; ++thispixel)
               {
               ofile.write((char*)&zeroci16,sizeofci16);
+              slavelineofile.write((char*)&zeror4,sizeofr4); //[FvL]
+              slavepixelofile.write((char*)&zeror4,sizeofr4);
               }
             // ______ Write the interpolated data per row ______
             for (int32 thispixel=0; thispixel<int32(RESULT.pixels()); ++thispixel)
@@ -4880,11 +5781,15 @@ void resample(
               // no default conversion, this seems slow, test this (BK)
               castedresult = cr4toci2(RESULT(thisline,thispixel));
               ofile.write((char*)&castedresult,sizeofci16);
+              slavelineofile.write((char*)&SLAVE_LINE[thisline][0],SLAVE_LINE.pixels()*sizeof(SLAVE_LINE(0,0))); //[FvL]
+              slavepixelofile.write((char*)&SLAVE_PIXEL[thisline][0],SLAVE_PIXEL.pixels()*sizeof(SLAVE_PIXEL(0,0)));
               }
             // ______ Write zero pixels at end ______
             for (int32 thispixel=0; thispixel<write0pixelsN; ++thispixel)
               {
               ofile.write((char*)&zeroci16,sizeofci16);
+              slavelineofile.write((char*)&zeror4,sizeofr4); //[FvL]
+              slavepixelofile.write((char*)&zeror4,sizeofr4);
               }
             }
           break;
@@ -4893,7 +5798,7 @@ void resample(
           PRINT_ERROR("impossible format")
           throw(unhandled_case_error);
         }
-      }
+      }//end if linecnt
 
     else // output buffer not full yet
       {
@@ -4911,22 +5816,22 @@ void resample(
           pos = (streampos)(pos * sizer8);
           DeltaLfile.seekg(pos,ios::beg);                  // [MA] better to check for failbit
           DeltaLfile.read((char*)&deltaL_dem,sizer8);
-          
+
           deltaL_poly = polyval(normalize(real4(line),minL,maxL),
                                 normalize(real4(overlap.pixlo),minP,maxP),
                                 cpmL,degree_cpmL);
-          
+
           real4 firstline_pixlo  = real4(line  + deltaL_dem + deltaL_poly + ms_az_timing_error_L);
 
           pos = (streampos)((line-master.currentwindow.linelo)*master.currentwindow.pixels() + overlap.pixhi - master.currentwindow.pixlo);
           pos = (streampos)(pos * sizer8);
           DeltaLfile.seekg(pos,ios::beg);
           DeltaLfile.read((char*)&deltaL_dem,sizer8);
-          
+
           deltaL_poly = polyval(normalize(real4(line),minL,maxL),
                                 normalize(real4(overlap.pixhi),minP,maxP),
                                 cpmL,degree_cpmL);
-          
+
           real4 firstline_pixhi  = real4(line  + deltaL_dem + deltaL_poly + ms_az_timing_error_L);
 
 
@@ -4935,7 +5840,7 @@ void resample(
           // LAST BUFFER FIX
           // [DON] Davide Nitti,  the overrun of last line due to buffer nlines.
           // start added by don
-          if (line2 > int32(master.currentwindow.linehi)) 
+          if (line2 > int32(master.currentwindow.linehi))
           {
              DEBUG << "Variable line2: [ACTUAL Value: " << line2 << " - NEW Value: " << master.currentwindow.linehi << "]";
              DEBUG.print();
@@ -4947,50 +5852,50 @@ void resample(
           pos = (streampos)(pos * sizer8);
           DeltaLfile.seekg(pos,ios::beg);
           DeltaLfile.read((char*)&deltaL_dem,sizeof(deltaL_dem)); // [MA] sizer8 --> sizeof(deltaL_dem)
-          
+
           if ( DeltaLfile.fail() ) { // [MA]  put it to a proper class
           WARNING << "Failed to read position: " << pos  ; // coherence will be lost in lastbuffer
           WARNING.print() ;
-          // exit(1) 
+          // exit(1)
           }
 
           deltaL_poly = polyval(normalize(real4(line2),minL,maxL),
                                 normalize(real4(overlap.pixlo),minP,maxP),
                                 cpmL,degree_cpmL);
-          
+
           real4 lastline_pixlo  = (real4)(line2  + deltaL_dem + deltaL_poly + ms_az_timing_error_L);
 
           pos = (streampos)((line2-master.currentwindow.linelo)*master.currentwindow.pixels() + overlap.pixhi - master.currentwindow.pixlo);
           pos = (streampos)(pos * sizer8);
           DeltaLfile.seekg(pos,ios::beg);
           DeltaLfile.read((char*)&deltaL_dem,sizer8);
-          
+
           deltaL_poly = polyval(normalize(real4(line2),minL,maxL),
                                 normalize(real4(overlap.pixhi),minP,maxP),
                                 cpmL,degree_cpmL);
-          
+
           real4 lastline_pixhi  = (real4)(line2  + deltaL_dem + deltaL_poly + ms_az_timing_error_L);
 
           firstline = int32(ceil(min(firstline_pixlo,firstline_pixhi)))-Npoints;
-          lastline  = int32(ceil(min(lastline_pixlo,lastline_pixhi)))+Npoints;        
+          lastline  = int32(ceil(min(lastline_pixlo,lastline_pixhi)))+Npoints;
         }
       else
         {
-          firstline = int32(ceil(min(line + 
+          firstline = int32(ceil(min(line +
                        polyval(normalize(real4(line),minL,maxL),
                                normalize(real4(overlap.pixlo),minP,maxP),
                                              cpmL,degree_cpmL),
-                       line + 
+                       line +
                        polyval(normalize(real4(line),minL,maxL),
                                normalize(real4(overlap.pixhi),minP,maxP),
                                cpmL,degree_cpmL))))
                            - Npoints;
           int32 line2 = line + nlines - 1;
-          lastline  = int32(ceil(min(line2 + 
+          lastline  = int32(ceil(min(line2 +
                         polyval(normalize(real4(line2),minL,maxL),
                                 normalize(real4(overlap.pixlo),minP,maxP),
                                 cpmL,degree_cpmL),
-                                 line2 + 
+                                 line2 +
                         polyval(normalize(real4(line2),minL,maxL),
                                 normalize(real4(overlap.pixhi),minP,maxP),
                                 cpmL,degree_cpmL))))
@@ -5000,7 +5905,7 @@ void resample(
       //const int32 FORSURE = 25;         // extend buffer by 2*FORSURE start/end
       int32 FORSURE = 25;         // extend buffer by 2*FORSURE start/end
       if ( master.ovs_az > 1 && master.ovs_az < 32  ) // [MA] To avoid any extreme value in the result file.
-       { 
+       {
         FORSURE = FORSURE*master.ovs_az;              // [MA] the value should scale with oversampling otherwise it may fail.
         DEBUG << "FORSURE: " << FORSURE << " extra lines before and after each buffer (oversampled)";
         DEBUG.print();
@@ -5009,8 +5914,8 @@ void resample(
        {
         DEBUG << "FORSURE: " << FORSURE << " extra lines before and after each buffer (zero-looked)";
         DEBUG.print();
-       } 
-       
+       }
+
       firstline -= FORSURE; // extend buffer
       lastline  += FORSURE; // extend buffer
 
@@ -5019,7 +5924,7 @@ void resample(
       if (firstline < int32(slave.currentwindow.linelo))
         firstline = slave.currentwindow.linelo;
 
-      if (lastline > int32(slave.currentwindow.linehi)) 
+      if (lastline > int32(slave.currentwindow.linehi))
         lastline = slave.currentwindow.linehi;
       // ______ Fill slave BUFFER from disk ______
       window winslavefile(firstline, lastline,  // part of slave loaded
@@ -5038,17 +5943,17 @@ void resample(
       {
         if (demassist)
           {
-            
+
             //pos = overlap.pixels() * ( line - overlap.linelo ) + pixel - overlap.pixlo;
             pos = (streampos)((line-master.currentwindow.linelo)*master.currentwindow.pixels() + pixel - master.currentwindow.pixlo);
             pos = (streampos)(pos * sizer8);
 
             DeltaLfile.seekg(pos,ios::beg);
             DeltaPfile.seekg(pos,ios::beg);
-            
+
             DeltaLfile.read((char*)&deltaL_dem,sizer8);
             DeltaPfile.read((char*)&deltaP_dem,sizer8);
-            
+
             deltaL_poly = polyval(normalize(real4(line),minL,maxL),
                       normalize(real4(pixel),minP,maxP),
                       cpmL,degree_cpmL);
@@ -5068,7 +5973,7 @@ void resample(
             //interpL = line  + polyval(line,pixel,cpmL,degree_cpmL); // e.g. 255.35432
             //interpP = pixel + polyval(line,pixel,cpmP,degree_cpmP); // e.g. 2.5232
             // ______ BK USE normalized coordinates, do this smarter .... !!!!
-            interpL = line  + 
+            interpL = line  +
               polyval(normalize(real4(line),minL,maxL),
                       normalize(real4(pixel),minP,maxP),
                       cpmL,degree_cpmL);                              // e.g. 255.35432
@@ -5107,7 +6012,7 @@ void resample(
         }
       if (firstL+Npointsm1 > slave.currentwindow.linehi)
         {
-        WARNING << "lastL larger than on disk (required for interpolation). continuing" 
+        WARNING << "lastL larger than on disk (required for interpolation). continuing"
         << "lineL: " << firstL+Npointsm1 << " > " << slave.currentwindow.linehi ;
         WARNING.print();
         RESULT(linecnt,pixel-overlap.pixlo) = complr4(0.,0.);
@@ -5128,7 +6033,7 @@ void resample(
 #endif
 
       // ______ Shift azimuth kernel with fDC before interpolation ______
-      if (resampleinput.shiftazi==true)
+      if   (resampleinput.shiftazi == 1)
         {
         // ___ Doppler centroid is function of range only ____
         const real4 tmp = 2.0*PI*slave.pix2fdc(interpP)/slave.prf;
@@ -5161,9 +6066,16 @@ void resample(
         RESULT(linecnt,pixel-overlap.pixlo) = complr4(ANS.re,ANS.im);
         #else // do not use VECLIB
         // ______ NO VECLIB: slower, but works ______
-        RESULT(linecnt,pixel-overlap.pixlo) = 
+        RESULT(linecnt,pixel-overlap.pixlo) =
              ((matTxmat(PART*kernelP, kernelL))(0,0));
         #endif // VECLIB y/n
+
+	    // ========== collect final shifts ====================== [FvL]
+	    // (required for re-ramping of the spectrum for TOPS data
+
+	    SLAVE_LINE(linecnt,pixel-overlap.pixlo) = interpL;
+	    SLAVE_PIXEL(linecnt,pixel-overlap.pixlo) = interpP;
+
       } // for all pixels in overlap
     } // for all lines in overlap
 
@@ -5175,26 +6087,50 @@ void resample(
        << "] (master coord. system)";
   DEBUG.print();
 
+   const int16 nofLinesBuf = 1;//DONOT change it to more than ONE line
+ //  const int32 maxNofBuf = int32(floor(real4(linecnt)/real4(nofLinesBuf)));
   // ______ Actually write ______
   switch (resampleinput.oformatflag)
     {
     case FORMATCR4:
       {
-      const complr4 zerocr4(0,0);
+           
+     
+          //This buffer is of size 1 line and RESULT.pixels() plus the zero-border pixels
+       matrix<complr4> thisBuffer(nofLinesBuf,RESULT.pixels()+write0pixels1+write0pixelsN);
+       matrix<real4> thisBuffer_line(nofLinesBuf,SLAVE_LINE.pixels()+write0pixels1+write0pixelsN);
+       matrix<real4> thisBuffer_pixel(nofLinesBuf,SLAVE_PIXEL.pixels()+write0pixels1+write0pixelsN);
+       
+       DEBUG << "thisBuffer pixels : " << thisBuffer.pixels() << "\n";
+       DEBUG << "thisBuffer lines:   " << thisBuffer.lines();
+       DEBUG.print();
+       
       for (int32 thisline=0; thisline<=linecnt; thisline++)
         {
-        // ______ Write zero pixels at start ______
-        for (int32 thispixel=0; thispixel<write0pixels1; ++thispixel)
-          {
-          ofile.write((char*)&zerocr4,sizeofcr4);
-          }
-        // ______ WRITE the interpolated data per row ______
-        ofile.write((char*)&RESULT[thisline][0],RESULT.pixels()*sizeofcr4);
-        // ______ Write zero pixels at end ______
-        for (int32 thispixel=0; thispixel<write0pixelsN; ++thispixel)
-          {
-          ofile.write((char*)&zerocr4,sizeofcr4);
-          }
+        //Use loop to create buffer not to write each pixel results
+        
+          //Write the results per line
+          //First allocate the results to the corresponding window
+           window windef(0,0,0,0);                       // default, thus copy to total matrix
+           
+           //The allocation window starts at 0 and ends at 0
+           // Starts at pixel write0pixels1 and ends at pixel RESULT.pixels()-1
+           //the rest are atutimatically set to zero
+           window win1(0, 0,write0pixels1, RESULT.pixels()-1);
+  
+           //Allocate the corresponding line to the buffer this buffer
+          thisBuffer.setdata(win1,RESULT.getrow(thisline),windef) ;
+          thisBuffer_line.setdata(win1,SLAVE_LINE.getrow(thisline),windef) ;
+          thisBuffer_pixel.setdata(win1,SLAVE_PIXEL.getrow(thisline),windef) ;
+
+            //Dump data to file
+            // ______ WRITE the interpolated data per row ______
+              ofile.write((char*)&thisBuffer[0][0],thisBuffer.pixels()*sizeof(thisBuffer(0,0)));
+              slavelineofile.write((char*)&thisBuffer_line[0][0],thisBuffer_line.pixels()*sizeof(thisBuffer_line(0,0))); //[FvL]
+              slavepixelofile.write((char*)&thisBuffer_pixel[0][0],thisBuffer_pixel.pixels()*sizeof(thisBuffer_pixel(0,0)));
+              
+           
+   
         }
       break;
       }
@@ -5202,24 +6138,41 @@ void resample(
       {
       const compli16 zeroci16(0,0);
       compli16 castedresult;
+       matrix<compli16> thisBuffer(nofLinesBuf,RESULT.pixels()+write0pixels1+write0pixelsN);
+       matrix<real4> thisBuffer_line(nofLinesBuf,SLAVE_LINE.pixels()+write0pixels1+write0pixelsN);
+       matrix<real4> thisBuffer_pixel(nofLinesBuf,SLAVE_PIXEL.pixels()+write0pixels1+write0pixelsN);
+       
+       DEBUG << "thisBuffer pixels : " << thisBuffer.pixels() << "\n";
+       DEBUG << "thisBuffer lines:   " << thisBuffer.lines();
+       DEBUG.print();
+       
       for (int32 thisline=0; thisline<=linecnt; thisline++)
         {
-        // ______ Write zero pixels at start ______
-        for (int32 thispixel=0; thispixel<write0pixels1; ++thispixel)
-          {
-          ofile.write((char*)&zeroci16,sizeofci16);
-          }
-        // ______ Write the interpolated data per row ______
-        for (int32 thispixel=0; thispixel<int32(RESULT.pixels()); thispixel++)
-          {
-          castedresult = cr4toci2(RESULT(thisline,thispixel));
-          ofile.write((char*)&castedresult,sizeofci16);
-          }
-        // ______ Write zero pixels at end ______
-        for (int32 thispixel=0; thispixel<write0pixelsN; ++thispixel)
-          {
-          ofile.write((char*)&zeroci16,sizeofci16);
-          }
+        //Use loop to create buffer not to write the each pixel results
+         //Write the results per line
+          //First allocate the results to the corresponding window
+           window windef(0,0,0,0);                       // default, thus copy to total matrix
+           window win1(0, 0,write0pixels1, RESULT.pixels()-1);
+           
+            //The allocation window starts at 0 and ends at 0
+           // Starts at pixel write0pixels1 and ends at pixel RESULT.pixels()-1
+           //the rest are atutimatically set to zero
+           //Need loop for casted data
+          for (int32 thisPx = write0pixels1; thisPx<= RESULT.pixels()-1;thisPx++)
+           thisBuffer(0,thisPx) = cr4toci2(RESULT(thisline,thisPx-write0pixels1));
+              
+        
+          thisBuffer_line.setdata(win1,SLAVE_LINE.getrow(thisline),windef) ;
+          thisBuffer_pixel.setdata(win1,SLAVE_PIXEL.getrow(thisline),windef) ;
+      
+            //Dump data to file
+            // ______ WRITE the interpolated data per row ______
+          
+              ofile.write((char*)&thisBuffer[0][0],thisBuffer.pixels()*sizeof(thisBuffer(0,0)));
+              slavelineofile.write((char*)&thisBuffer_line[0][0],thisBuffer_line.pixels()*sizeof(thisBuffer_line(0,0))); //[FvL]
+              slavepixelofile.write((char*)&thisBuffer_pixel[0][0],thisBuffer_pixel.pixels()*sizeof(thisBuffer_pixel(0,0)));
+         
+    
         }
       break;
       }
@@ -5239,7 +6192,11 @@ void resample(
         for (int32 thispixel=0;
              thispixel<int32(RESULT.pixels())+write0pixels1+write0pixelsN;
              ++thispixel)
+          {
           ofile.write((char*)&zerocr4,sizeofcr4);
+          slavelineofile.write((char*)&zeror4,sizeofr4); //[FvL]
+          slavepixelofile.write((char*)&zeror4,sizeofr4);
+          }
       break;
       }
     case FORMATCI2:
@@ -5249,7 +6206,11 @@ void resample(
         for (int32 thispixel=0;
              thispixel<int32(RESULT.pixels())+write0pixels1+write0pixelsN;
              ++thispixel)
+          {
           ofile.write((char*)&zeroci16,sizeofci16);
+          slavelineofile.write((char*)&zeror4,sizeofr4); //[FvL]
+          slavepixelofile.write((char*)&zeror4,sizeofr4);
+          }
       break;
       }
     default:
@@ -5257,8 +6218,10 @@ void resample(
       throw(unhandled_case_error);
     }
   ofile.close();
+  slavelineofile.close(); //[FvL]
+  slavepixelofile.close(); //[FvL]
 
-
+//fclose (pFile);
 
   // ====== Write results to slave resfile ======
   char rsmethod[EIGHTY];
@@ -5274,37 +6237,37 @@ void resample(
       strcpy(rsmethod,"4 point cubic convolution");
       break;
     case rs_cc6p:
-      strcpy(rsmethod,"6 point cubic convolution"); 
+      strcpy(rsmethod,"6 point cubic convolution");
       break;
     case rs_ts6p:
-      strcpy(rsmethod,"6 point truncated sinc"); 
+      strcpy(rsmethod,"6 point truncated sinc");
       break;
     case rs_ts8p:
-      strcpy(rsmethod,"8 point truncated sinc"); 
+      strcpy(rsmethod,"8 point truncated sinc");
       break;
     case rs_ts16p:
-      strcpy(rsmethod,"16 point truncated sinc"); 
+      strcpy(rsmethod,"16 point truncated sinc");
       break;
     case rs_knab4p:
-      strcpy(rsmethod,"4 point knab kernel"); 
+      strcpy(rsmethod,"4 point knab kernel");
       break;
     case rs_knab6p:
-      strcpy(rsmethod,"6 point knab kernel"); 
+      strcpy(rsmethod,"6 point knab kernel");
       break;
     case rs_knab8p:
-      strcpy(rsmethod,"8 point knab kernel"); 
+      strcpy(rsmethod,"8 point knab kernel");
       break;
     case rs_knab10p:
-      strcpy(rsmethod,"10 point knab kernel"); 
+      strcpy(rsmethod,"10 point knab kernel");
       break;
     case rs_knab16p:
-      strcpy(rsmethod,"16 point knab kernel"); 
+      strcpy(rsmethod,"16 point knab kernel");
       break;
     case rs_rc6p:
-      strcpy(rsmethod,"6 point raised cosine kernel"); 
+      strcpy(rsmethod,"6 point raised cosine kernel");
       break;
     case rs_rc12p:
-      strcpy(rsmethod,"12 point raised cosine kernel"); 
+      strcpy(rsmethod,"12 point raised cosine kernel");
       break;
     default:
       PRINT_ERROR("impossible.")
@@ -5349,10 +6312,12 @@ void resample(
 
   ofstream scratchresfile("scratchresresample", ios::out | ios::trunc);
   bk_assert(scratchresfile,"resample: scratchresresample",__FILE__,__LINE__);
-  scratchresfile 
+  scratchresfile
     << "\n\n*******************************************************************"
     << "\n*_Start_" << processcontrol[pr_s_resample]
     << "\n*******************************************************************"
+    << "\nNormalization_Lines:   \t" <<minL<< " " <<maxL<< ""
+    << "\nNormalization_Pixels:  \t" <<minP<< " " <<maxP<< ""
     << "\nShifted azimuth spectrum:             \t\t"
     <<  resampleinput.shiftazi
     << "\nData_output_file:                     \t\t"
@@ -5415,7 +6380,7 @@ void ms_timing_error(
         int32                 &coarse_orbit_offsetP)
   {
   TRACE_FUNCTION("ms_timing_error (FvL 6-SEP-2007)")
-    
+
     INFO << coarse_orbit_offsetL;
     INFO.print();
     INFO << coarse_orbit_offsetP;
@@ -5434,14 +6399,14 @@ void ms_timing_error(
   const real8 maxP     = master.originalwindow.pixhi;
 
   // ______ A priori sigma of  offset ______
-  // ______ Read this factor from the result file 
+  // ______ Read this factor from the result file
   // ______ "Oversampling factor: 32"
   // ______ "Window_size_L_for_correlation: 4"
   // ______ "Window_size_P_for_correlation: 121"
   DEBUG.print("Reading oversampling factor from result file");
   uint osfactor  = 32;// oversamplingsfactor
-  int32 corrwinL = 64;// window size to compute FINE correlation 
-  int32 corrwinP = 64;// window size to compute FINE correlation 
+  int32 corrwinL = 64;// window size to compute FINE correlation
+  int32 corrwinP = 64;// window size to compute FINE correlation
   char c4osfactor[4];
   char c10corrwinL[10];
   char c10corrwinP[10];
@@ -5451,7 +6416,7 @@ void ms_timing_error(
   if (found) corrwinL = int32(atoi(c10corrwinL));
   found = readres(c10corrwinP,sizeof(c10corrwinP),i_resfile, "Window_size_P_for_correlation:", 0);
   if (found) corrwinP = int32(atoi(c10corrwinP));
-  corrwinL = max(10,corrwinL-8);// if fft method peak is not at center 
+  corrwinL = max(10,corrwinL-8);// if fft method peak is not at center
   corrwinP = max(10,corrwinP-8);//  +then effective number of samples is smaller
   // _____ oversampling factor is bin in which maximum can be found _____
   // _____ ovsf=16-->apriorisigma=0.03
@@ -5473,7 +6438,7 @@ void ms_timing_error(
   // ______ Find #points > threshold ______
   matrix<real4> Data   = getofffile(i_resfile, THRESHOLD);
   // ______ Data contains the following: ______
-  // Data(i,0) = winnumber; Data(i,1) = posL; Data(i,2) = posP; 
+  // Data(i,0) = winnumber; Data(i,1) = posL; Data(i,2) = posP;
   // Data(i,3) = offL;      Data(i,4) = offP; Data(i,5) = corr;
 
 
@@ -5530,19 +6495,19 @@ void ms_timing_error(
       PRINT_ERROR("ms_timing_error: Number of windows > threshold is smaller than parameters solved for.")
       throw(input_error);
       }
-  
+
     // ______Set up system of equations______
     // ______Order unknowns: A00 A10 A01 A20 A11 A02 A30 A21 A12 A03 for degree=3______
     matrix<real8> yL(Nobs,1);                   // observation
     matrix<real8> yP(Nobs,1);                   // observation
     matrix<real8> A(Nobs,Nunk);                 // designmatrix
     matrix<real8> Qy_1(Nobs,1);                 // a priori covariance matrix (diag)
-  
+
     // ______ Normalize data for polynomial ______
     DEBUG << "ms_timing_error: polynomial normalized by factors: "
          << minL << " " << maxL << " " << minP << " " << maxP << " to [-2,2]";
     DEBUG.print();
-  
+
     // ______Fill matrices______
     DEBUG.print("Setting up design matrix for LS adjustment");
     for (i=0; i<Nobs; i++)
@@ -5551,7 +6516,7 @@ void ms_timing_error(
       real8 posP = normalize(real8(Data(i,2)),minP,maxP);
       yL(i,0)    = real8(Data(i,3));
       yP(i,0)    = real8(Data(i,4));
-      DEBUG << "ms_timing_error: (" << posL << ", "<< posP << "): yL=" 
+      DEBUG << "ms_timing_error: (" << posL << ", "<< posP << "): yL="
             << yL(i,0) << " yP=" << yP(i,0);
       DEBUG.print();
       // ______Set up designmatrix______
@@ -5565,14 +6530,14 @@ void ms_timing_error(
           }
         }
       }
-  
+
 
     // ______Weight matrix data______
     DEBUG.print("Setting up (inverse of) covariance matrix for LS adjustment");
     for (i=0; i<Nobs; i++)
       Qy_1(i,0) = real8(1.0); //unweighted, could be changed later
 
- 
+
     // ______Compute Normalmatrix, rghthandside______
     matrix<real8> N    = matTxmat(A,diagxmat(Qy_1,A));
     rhsL = matTxmat(A,diagxmat(Qy_1,yL));
@@ -5591,14 +6556,14 @@ void ms_timing_error(
     DEBUG << "ms_timing_error: max(abs(N*inv(N)-I)) = " << maxdev;
     DEBUG.print();
     // ___ use trace buffer to store string, remember to rewind it ___
-    if (maxdev > .01) 
+    if (maxdev > .01)
       {
       ERROR << "ms_timing_error: maximum deviation N*inv(N) from unity = " << maxdev
             << ". This is larger than 0.01";
       ERROR.print(ERROR.get_str());
       throw(some_error);
       }
-    else if (maxdev > .001) 
+    else if (maxdev > .001)
       {
       WARNING << "ms_timing_error: maximum deviation N*inv(N) from unity = " << maxdev
               << ". This is between 0.01 and 0.001";
@@ -5615,7 +6580,7 @@ void ms_timing_error(
     matrix<real8> Qe_hat = -Qy_hat;
     for (i=0; i<Nobs; i++)
       Qe_hat(i,i) += (1. / Qy_1(i,0));
-  
+
     // ______Overall model test (variance factor)______
     overallmodeltestL = 0.;
     overallmodeltestP = 0.;
@@ -5630,7 +6595,7 @@ void ms_timing_error(
     DEBUG.print();
     DEBUG << "ms_timing_error: overallmodeltest Pixels = " << overallmodeltestP;
     DEBUG.print();
-  
+
     // ______Datasnooping, assume Qy diag______
     wtestL.resize(Nobs,1);
     wtestP.resize(Nobs,1);
@@ -5644,11 +6609,11 @@ void ms_timing_error(
     maxwL     = max(abs(wtestL),winL,dumm);     // returns winL
     maxwP     = max(abs(wtestP),winP,dumm);     // returns winP
     DEBUG << "maximum wtest statistic azimuth = " << maxwL
-          << " for window number: " 
+          << " for window number: "
           <<  Data(winL,0);
     DEBUG.print();
     DEBUG << "maximum wtest statistic range   = " << maxwP
-          << " for window number: " 
+          << " for window number: "
           <<  Data(winP,0);
     DEBUG.print();
     // --- use summed wtest for outlier detection ---
@@ -5657,7 +6622,7 @@ void ms_timing_error(
     real8 maxwsum = max(wtestsum,winL,dumm);// idx to remove
     DEBUG << "Detected outlier:  summed sqr.wtest = " << maxwsum
           << "; observation: " << winL
-          << "; window number: " 
+          << "; window number: "
           <<  Data(winL,0);
     DEBUG.print();
 
@@ -5686,7 +6651,7 @@ void ms_timing_error(
       }
 
     // ______ Only warn if last iteration has been done ______
-    if (DONE == 1) 
+    if (DONE == 1)
       {
       // ___ use trace buffer to store string, remember to rewind it ___
       if (overallmodeltestL > 10)
diff --git a/src/coregistration.hh b/doris_core/coregistration.hh
similarity index 83%
rename from src/coregistration.hh
rename to doris_core/coregistration.hh
index 6c17048..88aeb96 100755
--- a/src/coregistration.hh
+++ b/doris_core/coregistration.hh
@@ -110,7 +110,9 @@ void mtiming_correlfft(
 
 
 // ______ Distribute nW windows over win ______
-matrix<uint> distributepoints(
+//matrix<uint> distributepoints(
+// [FvL] for correct folding of points outside overlap window when inserted by file
+matrix<int> distributepoints(
         real4                     numberofpoints,
         const window             &win);
 
@@ -129,20 +131,27 @@ void getmodeoffset(
 
 
 // ______ Fine coregistration ______
-void finecoreg(
-        const input_fine        &fineinput,
-        const slcimage  &minfo,
-        const slcimage  &sinfo);
+//void finecoreg(
+//        const input_fine        &fineinput,
+ //       const slcimage  &minfo,
+ //       const slcimage  &sinfo);
 
+void finecoreg(
+        const input_fine &fineinput,
+        const slcimage   &minfo,
+        const slcimage   &sinfo,
+        const input_ell &ell,
+        orbit           &masterorbit,  // cannot be const for spline
+        orbit           &slaveorbit,   // cannot be const for spline
+        const BASELINE  &baseline);
 
-// ______ Correlation with FFT ______
+// ______ Correlation with FFT MCC______
 real4 coherencefft(
-        //const input_fine      &fineinput, 
         const matrix<complr4>   &Master,
         const matrix<complr4>   &Mask,
-        const uint factor,              // ovs factor (1 for not)
-        const uint AccL,                // search window to oversample
-        const uint AccP,                // search window to oversample
+        const int32 factor,              // ovs factor (1 for not)
+        const int32 AccL,                // search window to oversample
+        const int32 AccP,                // search window to oversample
         real4                   &offsetL,
         real4                   &offsetP);
 
@@ -157,6 +166,16 @@ real4 crosscorrelate(
         real4                   &offsetL,
         real4                   &offsetP);
 
+// ______ Correlation with FFT ______
+real4 intensity(
+        const matrix<complr4>   &Master,
+        const matrix<complr4>   &Mask,
+        const int32 factor,              // ovs factor (1 for not)
+        const int32 AccL,                // search window to oversample
+        const int32 AccP,                // search window to oversample
+        real4                   &offsetL,
+        real4                   &offsetP);
+
 
 // ______ Correlation in space domain ______
 real4 coherencespace(
@@ -192,7 +211,9 @@ void resample(
         const slcimage          &slave,
         const matrix<real8>     &cpmL,
         const matrix<real8>     &cpmP,
-        const int16             &demassist);
+        const int16             &demassist,
+        const matrix<real8>     &minMaxL,//[MCC]
+        const matrix<real8>     &minMaxP);//[MCC]
 
 // ______ Compute master-slave timing error ______
 void ms_timing_error(
diff --git a/src/estorbit.cc b/doris_core/estorbit.cc
similarity index 100%
rename from src/estorbit.cc
rename to doris_core/estorbit.cc
diff --git a/src/estorbit.hh b/doris_core/estorbit.hh
similarity index 100%
rename from src/estorbit.hh
rename to doris_core/estorbit.hh
diff --git a/src/exceptions.cc b/doris_core/exceptions.cc
similarity index 100%
rename from src/exceptions.cc
rename to doris_core/exceptions.cc
diff --git a/src/exceptions.hh b/doris_core/exceptions.hh
similarity index 100%
rename from src/exceptions.hh
rename to doris_core/exceptions.hh
diff --git a/src/filtering.cc b/doris_core/filtering.cc
similarity index 100%
rename from src/filtering.cc
rename to doris_core/filtering.cc
diff --git a/src/filtering.hh b/doris_core/filtering.hh
similarity index 100%
rename from src/filtering.hh
rename to doris_core/filtering.hh
diff --git a/src/geocode.cc b/doris_core/geocode.cc
similarity index 99%
rename from src/geocode.cc
rename to doris_core/geocode.cc
index 648fe72..a32ef6f 100755
--- a/src/geocode.cc
+++ b/doris_core/geocode.cc
@@ -114,7 +114,9 @@ void slant2hschwabisch(
 
   // ______ Distribute points in original master system (not multilooked) ______
   // ______ (i,0): line, (i,1): pixel, (i,2) flagfromdisk (not used here) ______
-  matrix<uint> Position = distributepoints(Npoints,unwrappedinterf.win);
+  //matrix<uint> Position = distributepoints(Npoints,unwrappedinterf.win);
+  // [FvL] for correct folding of points outside overlap window when inserted by file
+  matrix<int> Position = distributepoints(Npoints,unwrappedinterf.win);
 
 
 // ====== STEP 1 ======
diff --git a/src/geocode.hh b/doris_core/geocode.hh
similarity index 100%
rename from src/geocode.hh
rename to doris_core/geocode.hh
diff --git a/src/ioroutines.cc b/doris_core/ioroutines.cc
similarity index 95%
rename from src/ioroutines.cc
rename to doris_core/ioroutines.cc
index b36eed9..e14ef91 100755
--- a/src/ioroutines.cc
+++ b/doris_core/ioroutines.cc
@@ -66,7 +66,7 @@ char *strptime(const char *s, const char  *format,  struct tm *tm);
  *  - screen: cpu and wallclock time                            *
  *                                                              *
  *    Bert Kampes, 11-Dec-1998                                  *
- ****************************************************************/ 
+ ****************************************************************/
 void printcpu(
         bool init)
   {
@@ -104,12 +104,12 @@ void printcpu(
   const real4 cputsec = cput_total - cputmin*60;
   DEBUG << " cputime used for process: \t"
        <<  setw(6) << real4(cput_process)/real4(CLOCKS_PER_SEC)
-       << " sec (total: " 
+       << " sec (total: "
        << setw(4) << cputmin << " min "
        << setw(3) << cputsec << " sec)\n"
-       << "\t   wallclock: \t\t\t" 
+       << "\t   wallclock: \t\t\t"
        << setw(6) << wct_process
-       << " sec (total: " 
+       << " sec (total: "
        << setw(4) << wct_total/60 << " min "
        << setw(3) << wct_total%60 << " sec)";
   DEBUG.print();
@@ -190,13 +190,13 @@ void inittest()
   if (0x0001==*(int *)(&test[0])) littleendian=1;
   #ifdef __X86PROCESSOR__
     DEBUG.print("\"__X86PROCESSOR__\"        defined (little Endian machine)");
-    if (littleendian == 1) 
+    if (littleendian == 1)
       INFO.print("Little Endian machine defined and this is correct.");
     else
       WARNING.print("Little Endian machine defined and this is NOT correct.");
   #else
     DEBUG.print("\"__X86PROCESSOR__\"        not defined (big Endian machine)");
-    if (littleendian == 0) 
+    if (littleendian == 0)
       INFO.print("Big Endian machine defined and this is correct.");
     else
       WARNING.print("Big Endian machine defined and this is NOT correct.");
@@ -219,59 +219,59 @@ void inittest()
   if (tm_ref.tm_hour != 1)  status=1;
   if (tm_ref.tm_min  != 2)  status=1;
   if (tm_ref.tm_sec  != 3)  status=1;
-  if (status == 0) 
+  if (status == 0)
     INFO.print("strptime function works fine.");
   else
     WARNING.print("strptime function seems NOT TO BE OK.");
 
 
   // ______ Some info ______
-  if (sizeof(int16) != 2) 
+  if (sizeof(int16) != 2)
     {
     PRINT_ERROR("code: 900: sizeof int16(short) != 2: see typedefs in constants.h")
     throw(some_error);
     }
-  if (sizeof(int32) != 4) 
+  if (sizeof(int32) != 4)
     {
     PRINT_ERROR("code: 900: sizeof int32(int) != 4: see typedefs in constants.h")
     throw(some_error);
     }
-  if (sizeof(uint) != 4) 
+  if (sizeof(uint) != 4)
     {
     PRINT_ERROR("code: 900: sizeof uint(unsigned int) != 4: see typedefs in constants.h")
     throw(some_error);
     }
-  if (sizeof(real8) != 8) 
+  if (sizeof(real8) != 8)
     {
     PRINT_ERROR("code: 900: sizeof real8(double) != 8: see typedefs in constants.h")
     throw(some_error);
     }
-  if (sizeof(real4) != 4) 
+  if (sizeof(real4) != 4)
     {
     PRINT_ERROR("code: 900: sizeof real4(float) != 4: see typedefs in constants.h")
     throw(some_error);
     }
-  if (sizeof(compli16) != 4) 
+  if (sizeof(compli16) != 4)
     {
     PRINT_ERROR("code: 900: sizeof compli16(complex short) != 4: see typedefs in constants.h")
     throw(some_error);
     }
-  if (sizeof(compli32) != 8) 
+  if (sizeof(compli32) != 8)
     {
     PRINT_ERROR("code: 900: sizeof compli32(complex int) != 8: see typedefs in constants.h")
     throw(some_error);
     }
-  if (sizeof(complr4) != 8) 
+  if (sizeof(complr4) != 8)
     {
     PRINT_ERROR("code: 900: sizeof complr4(complex float) != 8: see typedefs in constants.h")
     throw(some_error);
     }
-  if (sizeof(complr8) != 16) 
+  if (sizeof(complr8) != 16)
     {
     PRINT_ERROR("code: 900: sizeof complr16(complex double) != 16: see typedefs in constants.h")
     throw(some_error);
     }
-  if (int32(7.5) != 7) 
+  if (int32(7.5) != 7)
     {
     PRINT_ERROR("code: 900: it is assumed that int(7.5)==7")
     throw(some_error);
@@ -324,7 +324,7 @@ bool doinitwrite(
     {
     if (!(generalinput.process[pr_m_readfiles]  ||
           generalinput.process[pr_m_crop]       ||
-          generalinput.process[pr_m_oversample] || 
+          generalinput.process[pr_m_oversample] ||
           generalinput.process[pr_m_porbits]    ||
 	  generalinput.process[pr_m_morbits]    || //[HB]
           generalinput.process[pr_m_simamp]     || //[MA] 2008
@@ -339,7 +339,7 @@ bool doinitwrite(
     {
     if (!(generalinput.process[pr_s_readfiles]  ||
           generalinput.process[pr_s_crop]       ||
-          generalinput.process[pr_s_oversample] || 
+          generalinput.process[pr_s_oversample] ||
           generalinput.process[pr_s_porbits]    ||
 	  generalinput.process[pr_s_morbits]    || //[HB]
           generalinput.process[pr_s_simamp]     || //[MA] 200903. fake entry
@@ -509,7 +509,7 @@ void initwrite(
               << processcontrol[pr_m_crop]        <<   " \t\t\t0\n"
               << processcontrol[pr_m_simamp]      <<   " \t\t0\n"    //[MA] 2008
               << processcontrol[pr_m_mtiming]     <<   " \t\t0\n"    //[MA] 2008
-              << processcontrol[pr_m_oversample]  <<   " \t\t0\n" 
+              << processcontrol[pr_m_oversample]  <<   " \t\t0\n"
               << processcontrol[pr_m_resample]    <<   " \t\t0\n"    //[MA] 2009. fake entry to make slc.res files equivalent
               << processcontrol[pr_m_filtazi]     <<   " \t\t0\n"
               << processcontrol[pr_m_filtrange]   <<   " \t\t0\n"
@@ -526,7 +526,7 @@ void initwrite(
               << processcontrol[pr_s_crop]        <<   " \t\t\t0\n"
               << processcontrol[pr_s_simamp]      <<   " \t\t0\n"    //[MA] 2009. fake entry
               << processcontrol[pr_s_mtiming]     <<   " \t\t0\n"    //[MA] 2009. fake entry no processing is defined
-              << processcontrol[pr_s_oversample]  <<   " \t\t0\n" 
+              << processcontrol[pr_s_oversample]  <<   " \t\t0\n"
               << processcontrol[pr_s_resample]    <<   " \t\t0\n"
               << processcontrol[pr_s_filtazi]     <<   " \t\t0\n"
               << processcontrol[pr_s_filtrange]   <<   " \t\t0\n"
@@ -652,10 +652,10 @@ void getanswer(
   {
   TRACE_FUNCTION("getanswer (BK 11-Dec-1998)")
   char dummychar;
-  cerr << "\n Press <ENTER> to continue."; 
+  cerr << "\n Press <ENTER> to continue.";
   cin.unsetf(ios::skipws);      // ignore ws (just enter)
   cin >> dummychar;
-  cerr << " continuing...\n"; 
+  cerr << " continuing...\n";
   } // END getanswer
 
 
@@ -686,7 +686,7 @@ void getanswer(
  ****************************************************************/
 bool readres(
         char* returnword,
-        const int16 sizeofrw, 
+        const int16 sizeofrw,
         const char* file,
         const char* pattern,
         const int16 skipwords,      //=0 default
@@ -831,7 +831,7 @@ void updateprocesscontrol(
   //ifstream tmpfile2("scratchcopy", ios::in | ios::nocreate);  // temporary copy
   ifstream tmpfile2("scratchcopy", ios::in);    // temporary copy
   bk_assert(tmpfile2,"updateprocesscontrols: scratchcopy",__FILE__,__LINE__);
-  ofstream resfile2(file, ios::out | ios::trunc);                  // do replace ! 
+  ofstream resfile2(file, ios::out | ios::trunc);                  // do replace !
   bk_assert(resfile2,file,__FILE__,__LINE__);
 
 
@@ -849,7 +849,7 @@ void updateprocesscontrol(
     tmpfile2 >> word;
     resfile2 << word;
     tmpfile2.getline(dummyline,4*ONE27,'\n');     // go to next line
-    if (!strcmp(word,"End_process_control")) 
+    if (!strcmp(word,"End_process_control"))
       {
       resfile2 << endl;
       break;
@@ -871,7 +871,7 @@ void updateprocesscontrol(
       else if (!strcmp(word,processcontrol[pr_m_mtiming]))
         (checkprocess[pr_m_mtiming])  ? resfile2 << "\t\t1\n" : resfile2 << "\t\t0\n";      //[MA] 2008
       else if (!strcmp(word,processcontrol[pr_m_oversample]))
-        (checkprocess[pr_m_oversample])   ? resfile2 << "\t\t1\n" : resfile2 << "\t\t0\n"; 
+        (checkprocess[pr_m_oversample])   ? resfile2 << "\t\t1\n" : resfile2 << "\t\t0\n";
       else if (!strcmp(word,processcontrol[pr_m_resample]))                                 //[MA] 2009, fake entry no processing is defined
         (checkprocess[pr_m_resample])  ? resfile2 << "\t\t1\n" : resfile2 << "\t\t0\n";
       else if (!strcmp(word,processcontrol[pr_m_filtazi]))
@@ -880,7 +880,7 @@ void updateprocesscontrol(
         (checkprocess[pr_m_filtrange])  ? resfile2 << "\t\t1\n" : resfile2 << "\t\t0\n";
       else if (!strcmp(word,processcontrol[pr_m_EXTRA]))
         (checkprocess[pr_m_EXTRA])  ? resfile2 << "\t\t1\n" : resfile2 << "\t\t0\n";
-      else 
+      else
         {
         ERROR << "PANIC: forgotten to update routine? " << word
              << " not recognized in master resultfile.";
@@ -912,7 +912,7 @@ void updateprocesscontrol(
         (checkprocess[pr_s_filtrange])  ? resfile2 << "\t\t1\n" : resfile2 << "\t\t0\n";
       else if (!strcmp(word,processcontrol[pr_s_EXTRA]))
         (checkprocess[pr_s_EXTRA])  ? resfile2 << "\t\t1\n" : resfile2 << "\t\t0\n";
-      else 
+      else
         {
         ERROR << "PANIC: forgotten to update routine? " << word
              << " not recognized in slave resultfile.";
@@ -960,7 +960,7 @@ void updateprocesscontrol(
         (checkprocess[pr_i_dinsar]) ? resfile2 << "\t\t\t1\n" : resfile2 << "\t\t\t0\n";
       else if (!strcmp(word,processcontrol[pr_i_EXTRA2]))
         (checkprocess[pr_i_EXTRA2]) ? resfile2 << "\t\t1\n" : resfile2 << "\t\t0\n";
-      else 
+      else
         {
         ERROR << "PANIC: forgotten to update routine? " << word
              << " not recognized in interferogram resultfile.";
@@ -1063,17 +1063,17 @@ void checkprocessing(
         while (strcmp(dummyline,"Start_process_control"))
           {
           resfile.getline(dummyline,4*ONE27,'\n');
-          DEBUG << "read line: " << dummyline << ends; 
+          DEBUG << "read line: " << dummyline << ends;
           DEBUG.print();
           linecnt++;
-          if (linecnt==100) 
+          if (linecnt==100)
             {
             WARNING << "Checked first 100 lines, did not find: \"Start_process_control\" in file: " << file ;
             WARNING.print();
             break;
             }
           }
-  
+
         // ______ Read processcontrols into array checkprocess ______
         linecnt=0;
         while (strcmp(dummyline,"End_process_control"))
@@ -1083,14 +1083,14 @@ void checkprocessing(
           DEBUG.print();
           fillprocessed(dummyline,checkprocess,fileid);
           linecnt++;
-          if (linecnt==100) 
+          if (linecnt==100)
             {
             WARNING << "Checked first 100 lines, did not find: \"End_process_control\"   in file: " << file ;
             WARNING.print();
             break;
             }
           }
-       
+
         // ______ Read resultsections in file for array checkprocess2 ______
         resfile.getline(dummyline,4*ONE27,'\n');          // read line
         while(resfile)
@@ -1101,7 +1101,7 @@ void checkprocessing(
           DEBUG.print();
           }
         resfile.close();
-      
+
         // ______ Check resultsections with process control flags ______
         bool dofixprocesscontrol=false;
         for (i=0;i<NUMPROCESSES;i++)
@@ -1109,31 +1109,31 @@ void checkprocessing(
           if (checkprocess[i] != checkprocess2[i])
             {
             dofixprocesscontrol=true;
-            WARNING << "Step: " << i << " (" << processcontrol[i] 
+            WARNING << "Step: " << i << " (" << processcontrol[i]
                  << ") ";
             if (checkprocess[i]==1)
-              WARNING << "in process control flag, but result is not in \"" 
+              WARNING << "in process control flag, but result is not in \""
                    << file << "\".";
             else
-              WARNING << "not in process control flag, but result is in \"" 
+              WARNING << "not in process control flag, but result is in \""
                    << file << "\".";
             WARNING.print();
             if (generalinput.interactive) getanswer();
             } // something is wrong
           } // for all steps
-  
+
         // ______ Check if repairs have to be made ______
         if (dofixprocesscontrol)
           {
           if (breakit == 1)
             {
-            cerr << "\nAlready tried to fix process controls. Should I try again?\n"; 
+            cerr << "\nAlready tried to fix process controls. Should I try again?\n";
             getanswer();
             }
           updateprocesscontrol(file, fileid);                   // repair routine
           breakit += 1;                                         // only one try
           }
-        else // nothing strange 
+        else // nothing strange
           breakit = 10;                                         // > stop condition
       } // try to repair
     } // existed(file)
@@ -1156,19 +1156,19 @@ void checkprocessing(
     checkrequest(pr_m_crop,checkprocesstmp,
                  1,pr_m_readfiles);                               // required (RN)?
     //checkrequest(pr_m_crop,checkprocesstmp,0);                  // no requirements
-  if (generalinput.process[pr_m_oversample])                      // requested 
-    checkrequest(pr_m_oversample,checkprocesstmp,1,pr_m_crop);    // oversample requires a cropped image 
+  if (generalinput.process[pr_m_oversample])                      // requested
+    checkrequest(pr_m_oversample,checkprocesstmp,1,pr_m_crop);    // oversample requires a cropped image
   if (generalinput.process[pr_m_porbits])                         // requested
     checkrequest(pr_m_porbits,checkprocesstmp, 1,pr_m_readfiles); // required for time info
     if (checkprocess2[NUMPROCESSES])                              // extra check
       DEBUG.print("orbits from leader file will be deleted.");
   if (generalinput.process[pr_m_morbits])                         // requested [HB]
-    checkrequest(pr_m_morbits,checkprocesstmp,                    // required 
+    checkrequest(pr_m_morbits,checkprocesstmp,                    // required
 		 1,pr_m_readfiles);
   if (generalinput.process[pr_m_simamp])                          // requested [MA]
-    checkrequest(pr_m_simamp,checkprocesstmp, 1,pr_m_crop);       // amplitude simulation requires crop step 
+    checkrequest(pr_m_simamp,checkprocesstmp, 1,pr_m_crop);       // amplitude simulation requires crop step
   if (generalinput.process[pr_m_mtiming])                         // requested [MA]
-    checkrequest(pr_m_mtiming,checkprocesstmp, 1,pr_m_simamp);       // correlation with simulated amplitude requires a simulated amplitude image 
+    checkrequest(pr_m_mtiming,checkprocesstmp, 1,pr_m_simamp);       // correlation with simulated amplitude requires a simulated amplitude image
   if (generalinput.process[pr_m_filtazi])                         // requested
     checkrequest(pr_m_filtazi,checkprocesstmp,
                  1,pr_m_crop);                                    // required
@@ -1181,13 +1181,13 @@ void checkprocessing(
   if (generalinput.process[pr_s_crop])                          // requested
     checkrequest(pr_s_crop,checkprocesstmp,
                  1,pr_s_readfiles);                             // required for check
-  if (generalinput.process[pr_s_oversample])                    // requested  
-    checkrequest(pr_s_oversample,checkprocesstmp,1,pr_s_crop);  // oversample requires a cropped image 
+  if (generalinput.process[pr_s_oversample])                    // requested
+    checkrequest(pr_s_oversample,checkprocesstmp,1,pr_s_crop);  // oversample requires a cropped image
   if (generalinput.process[pr_s_porbits])                       // requested
     checkrequest(pr_s_porbits,checkprocesstmp,
                  1,pr_s_readfiles);                             // required for time info
   if (generalinput.process[pr_s_morbits])                         // requested  [HB]
-    checkrequest(pr_s_morbits,checkprocesstmp,                    // required 
+    checkrequest(pr_s_morbits,checkprocesstmp,                    // required
 		 1,pr_s_readfiles);
     if (checkprocess2[NUMPROCESSES])                            // extra check
       DEBUG.print("orbits from leader file will be deleted.");
@@ -1226,7 +1226,7 @@ void checkprocessing(
     3,pr_m_crop,pr_s_crop,pr_i_fine); //[FvL]
   if (generalinput.process[pr_i_coregpm])                       // requested
     checkrequest(pr_i_coregpm,checkprocesstmp,
-                 1,pr_i_fine); 
+                 1,pr_i_fine);
 
   // this should go here...
   // BK 24-Aug-2000
@@ -1260,7 +1260,7 @@ void checkprocessing(
     checkrequest(pr_i_unwrap,checkprocesstmp,
     1,pr_i_interfero);                                          // required
   if (generalinput.process[pr_i_estorbits])                     // requested [HB]
-    checkrequest(pr_i_estorbits,checkprocesstmp,                // required 
+    checkrequest(pr_i_estorbits,checkprocesstmp,                // required
 		 1,pr_i_subtrrefpha);
   if (generalinput.process[pr_i_slant2h])                       // requested
     checkrequest(pr_i_slant2h,checkprocesstmp,
@@ -1307,8 +1307,8 @@ void checkrequest(
     ERROR << "Results of step: "
          << step << " (" << processcontrol[step]
          << ") already in result file.";
-    ERROR << "\n TIP    : use \'doris.rmstep.sh\' to cleanup " <<  processcontrol[step] 
-          << " entries in result file." ;  // [MA] TODO report result filename file  or general.? 
+    ERROR << "\n TIP    : use \'doris.rmstep.sh\' to cleanup " <<  processcontrol[step]
+          << " entries in result file." ;  // [MA] TODO report result filename file  or general.?
     PRINT_ERROR(ERROR.get_str())
     throw(input_error);
     }
@@ -1320,7 +1320,7 @@ void checkrequest(
   va_list arglist;                              // use ellipses
   va_start(arglist,alreadyprocess);
   /* *** SOME compiler required the second form, though it seems wrong,
-     *** in order to compile doris comment out the second, and put a comment before 
+     *** in order to compile doris comment out the second, and put a comment before
      *** the first form. */
   // seems that while passing '...' type is converted to int, so use that here...
   //int16 N = va_arg(arglist, int16);             // number of arguments=first ellipses
@@ -1330,7 +1330,7 @@ void checkrequest(
   for (register int32 i=0; i<N; i++)
     {
     /* *** SOME compiler required the second form, though it seems wrong,
-       *** in order to compile doris comment out the second, and put a comment before 
+       *** in order to compile doris comment out the second, and put a comment before
        *** the first form. */
     //requiredstep = va_arg(arglist, int16);
     requiredstep = va_arg(arglist, int);
@@ -1340,7 +1340,7 @@ void checkrequest(
       ERROR << "Requested step: "
             << step << " (" << processcontrol[step]
             << ") seems impossible, because step "
-            << requiredstep << " (" << processcontrol[requiredstep] 
+            << requiredstep << " (" << processcontrol[requiredstep]
             << ") is not in resultfile.";
       PRINT_ERROR(ERROR.get_str())
       throw(input_error);// exit
@@ -1629,11 +1629,11 @@ void fillcheckprocess(
     else if (!strcmp(line,endnormal[pr_m_crop]))
       checkprocess[pr_m_crop]=1;
     else if (!strcmp(line,endnormal[pr_m_simamp]))             // [MA]
-      checkprocess[pr_m_simamp]=1; 
+      checkprocess[pr_m_simamp]=1;
     else if (!strcmp(line,endnormal[pr_m_mtiming]))            // [MA]
-      checkprocess[pr_m_mtiming]=1; 
-    else if (!strcmp(line,endnormal[pr_m_oversample])) 
-      checkprocess[pr_m_oversample]=1; 
+      checkprocess[pr_m_mtiming]=1;
+    else if (!strcmp(line,endnormal[pr_m_oversample]))
+      checkprocess[pr_m_oversample]=1;
     else if (!strcmp(line,endnormal[pr_m_filtazi]))
       checkprocess[pr_m_filtazi]=1;
     else if (!strcmp(line,endnormal[pr_m_filtrange]))
@@ -1654,8 +1654,8 @@ void fillcheckprocess(
       checkprocess[pr_s_morbits]=1;
     else if (!strcmp(line,endnormal[pr_s_crop]))
       checkprocess[pr_s_crop]=1;
-    else if (!strcmp(line,endnormal[pr_s_oversample])) 
-      checkprocess[pr_s_oversample]=1; 
+    else if (!strcmp(line,endnormal[pr_s_oversample]))
+      checkprocess[pr_s_oversample]=1;
     else if (!strcmp(line,endnormal[pr_s_filtazi]))
       checkprocess[pr_s_filtazi]=1;
     else if (!strcmp(line,endnormal[pr_s_filtrange]))
@@ -1748,7 +1748,7 @@ void fillprocessed(
   int32         tin;
 
   int32 linesz = strlen(line);                  // w/o \0
-  if (linesz > 30) 
+  if (linesz > 30)
     return;
   char  word[4*ONE27];                            // should be enough
 // ______ Disect line ______
@@ -1764,12 +1764,12 @@ void fillprocessed(
         word[i]='\0';                                   // replace space by \0
         break;                                          // for
         }
-      else 
+      else
         {
         return;
         }
       }
-    }                                   
+    }
   if (!space) return;                           // must be a space in line
 
   int16 processflag;
@@ -1777,7 +1777,7 @@ void fillprocessed(
     processflag=0;
   else if (line[linesz-1]=='1')
     processflag=1;
-  else 
+  else
     return;
 
   // ====== Fill process control ======
@@ -1793,11 +1793,11 @@ void fillprocessed(
       else if (!strcmp(word,processcontrol[pr_m_crop]))
         checkprocess[pr_m_crop]=processflag;
       else if (!strcmp(word,processcontrol[pr_m_simamp]))        // [MA]
-        checkprocess[pr_m_simamp]=processflag; 
+        checkprocess[pr_m_simamp]=processflag;
       else if (!strcmp(word,processcontrol[pr_m_mtiming]))       // [MA]
-        checkprocess[pr_m_mtiming]=processflag; 
-      else if (!strcmp(word,processcontrol[pr_m_oversample])) 
-        checkprocess[pr_m_oversample]=processflag; 
+        checkprocess[pr_m_mtiming]=processflag;
+      else if (!strcmp(word,processcontrol[pr_m_oversample]))
+        checkprocess[pr_m_oversample]=processflag;
       else if (!strcmp(word,processcontrol[pr_m_filtazi]))
         checkprocess[pr_m_filtazi]=processflag;
       else if (!strcmp(word,processcontrol[pr_m_filtrange]))
@@ -1820,8 +1820,8 @@ void fillprocessed(
         checkprocess[pr_s_morbits]=processflag;
       else if (!strcmp(word,processcontrol[pr_s_crop]))
         checkprocess[pr_s_crop]=processflag;
-      else if (!strcmp(word,processcontrol[pr_s_oversample])) 
-        checkprocess[pr_s_oversample]=processflag; 
+      else if (!strcmp(word,processcontrol[pr_s_oversample]))
+        checkprocess[pr_s_oversample]=processflag;
       else if (!strcmp(word,processcontrol[pr_s_filtazi]))
         checkprocess[pr_s_filtazi]=processflag;
       else if (!strcmp(word,processcontrol[pr_s_filtrange]))
@@ -1997,7 +1997,7 @@ void removedatleader(
     {
     ifile.getline(dummyline,4*ONE27,'\n');
     }
-  ifile.getline(dummyline,4*ONE27,'\n');                                          
+  ifile.getline(dummyline,4*ONE27,'\n');
 
   // ______Copy rest of file______
   while (ifile)
@@ -2103,11 +2103,11 @@ window getoverlap(
   real8 approxoffL = cpmL(0,0);                         // zero order term;
   real8 approxoffP = cpmP(0,0);                         // zero order term;
 
-//  real8 sL00 = slave.currentwindow.linelo - 
+//  real8 sL00 = slave.currentwindow.linelo -
 //                polyval(slave.currentwindow.linelo - approxoffL,
 //                        slave.currentwindow.pixlo  - approxoffP, cpmL);
 // ______ Use normalized polynomial ______
-  const real8 sL00 = slave.currentwindow.linelo - 
+  const real8 sL00 = slave.currentwindow.linelo -
        polyval(normalize(real8(slave.currentwindow.linelo)-approxoffL,minL,maxL),
                normalize(real8(slave.currentwindow.pixlo) -approxoffP,minP,maxP),
                cpmL);
@@ -2186,12 +2186,12 @@ window getoverlap(
         const real8         &timing_P)
   {
     TRACE_FUNCTION("getoverlap (FvL 22-SEP-07)")
-      
+
     real8 ml0 = master.currentwindow.linelo;
     real8 mlN = master.currentwindow.linehi;
     real8 mp0 = master.currentwindow.pixlo;
     real8 mpN = master.currentwindow.pixhi;
-    
+
     real8 sl00 = slave.currentwindow.linelo+slave.slavemasteroffsets.l00+Npointsd2-timing_L;
     real8 sp00 = slave.currentwindow.pixlo+slave.slavemasteroffsets.p00+Npointsd2-timing_P;
     real8 sl0N = slave.currentwindow.linelo+slave.slavemasteroffsets.l0N+Npointsd2-timing_L;
@@ -2200,7 +2200,7 @@ window getoverlap(
     real8 spN0 = slave.currentwindow.pixlo+slave.slavemasteroffsets.pN0+Npointsd2-timing_P;
     real8 slNN = slave.currentwindow.linehi+slave.slavemasteroffsets.lNN-Npointsd2-timing_L;
     real8 spNN = slave.currentwindow.pixhi+slave.slavemasteroffsets.pNN-Npointsd2-timing_P;
-    
+
     matrix<real8> mh1sv1(2,1), mh1sv2(2,1), mh2sv1(2,1), mh2sv2(2,1),
       mv1sh1(2,1), mv1sh2(2,1), mv2sh1(2,1), mv2sh2(2,1);
     lineintersect(ml0,mp0,ml0,mpN,sl00,sp00,slN0,spN0,mh1sv1);
@@ -2211,19 +2211,19 @@ window getoverlap(
     lineintersect(ml0,mp0,mlN,mp0,slN0,spN0,slNN,spNN,mv1sh2);
     lineintersect(ml0,mpN,mlN,mpN,sl00,sp00,sl0N,sp0N,mv2sh1);
     lineintersect(ml0,mpN,mlN,mpN,slN0,spN0,slNN,spNN,mv2sh2);
-    
+
     real8 overlap_l0 = max(max(max(max(max(max(ml0,sl00),sl0N),mh1sv1(0,0)),mh1sv2(0,0)),mv1sh1(0,0)),mv2sh1(0,0));
     real8 overlap_p0 = max(max(max(max(max(max(mp0,sp00),spN0),mh1sv1(1,0)),mh2sv1(1,0)),mv1sh1(1,0)),mv1sh2(1,0));
     real8 overlap_lN = min(min(min(min(min(min(mlN,slN0),slNN),mh2sv1(0,0)),mh2sv2(0,0)),mv1sh2(0,0)),mv2sh2(0,0));
     real8 overlap_pN = min(min(min(min(min(min(mpN,sp0N),spNN),mh1sv2(1,0)),mh2sv2(1,0)),mv2sh1(1,0)),mv2sh2(1,0));
-    
+
     // ______Corners of overlap master,slave in master system______
     window overlap;
     overlap.linelo = int32(ceil(overlap_l0));
     overlap.linehi = int32(floor(overlap_lN));
     overlap.pixlo = int32(ceil(overlap_p0));
     overlap.pixhi = int32(floor(overlap_pN));
-    
+
     return overlap;
   } // END getoverlap
 
@@ -2247,14 +2247,14 @@ void lineintersect(
                    matrix<real8> &exy)
       {
         TRACE_FUNCTION("lineintersect (FvL 22-SEP-2007)")
-  
+
         real8 u1 = bx-ax;
         real8 u2 = by-ay;
         real8 v1 = dx-cx;
         real8 v2 = dy-cy;
         real8 w1 = ax-cx;
         real8 w2 = ay-cy;
-        
+
         real8 s = (v2*w1-v1*w2)/(v1*u2-v2*u1);
         exy(0,0) = ax+s*u1;
         exy(1,0) = ay+s*u2;
@@ -2299,7 +2299,7 @@ matrix<real8> readcoeff(
     infile >> word;
     if (strcmp(pattern,word))                           // no pattern match.
       {
-      infile.getline(dummyline,ONE27,'\n');             // goto next line.
+      infile.getline(dummyline,4*ONE27,'\n');             // goto next line.
       }
     else                                                // pattern match.
       {
@@ -2332,6 +2332,74 @@ matrix<real8> readcoeff(
   } // END readcoeff
 
 
+/****************************************************************
+ *    readcoeff                                                 *
+ *                                                              *
+ * Pattern is searched in file (1st word),                      *
+ * After Pattern the Ncoeffs must follow                        *
+ * next (Ncoeff) lines are assumed to contain the coefficients  *
+ *                                                              *
+ * e.g.: readcoeff(resfile,"Degree_flat:",9)                    *
+ *                                                              *
+ * input:                                                       *
+ *  - file name to search                                       *
+ *  - pattern to search for                                     *
+ *                                                              *
+ * output:                                                      *
+ *  - matrix<real8> coefficients(Nc , 1)                        *
+ *                                                              *
+ *    Bert Kampes, 12-Mar-1999                                  *
+ ****************************************************************/
+matrix<real8> readnormcoeff(
+        const char* file,
+        const char* pattern)
+  {
+  TRACE_FUNCTION("readcoeff (BK 12-Mar-1999)")
+  char                  dummyline[ONE27];
+  char                  word[EIGHTY];
+  bool                  foundword = false;
+  matrix<real8>         coeffs(2,1);               // store coefficients
+
+  ifstream infile(file, ios::in);
+  bk_assert(infile,file,__FILE__,__LINE__);
+
+  // ====== Search infile ======
+  while (infile)
+    {
+    infile >> word;
+    if (strcmp(pattern,word))                           // no pattern match.
+      {
+      infile.getline(dummyline,4*ONE27,'\n');             // goto next line.
+      }
+    else                                                // pattern match.
+      {
+     
+        foundword = true;
+     
+        infile >> coeffs(0,0)>>coeffs(1,0);
+     
+      break;    
+      break;                                            // file
+      }                                                 // else
+    }                                                   // file
+  infile.close();
+
+  if (!foundword)
+    {
+    ERROR << "readcoeff: file: " << file
+         << ": could not find string \"" << pattern << "\".";
+    PRINT_ERROR(ERROR.get_str());
+    throw(file_error);
+    }
+  else
+    {
+    INFO << "read: " << 2 << " coefficients after: \""
+         << pattern << "\"";
+    INFO.print();
+    }
+  return coeffs;
+  } // END readcoeff
+
 
 /****************************************************************
  *    openfstream                                               *
@@ -2350,6 +2418,8 @@ void openfstream(
         const char* ifilename)
   {
   TRACE_FUNCTION("openfstream (BK 11-Sep-2004)")
+  INFO << "Opening input file: " << ifilename;
+  INFO.print();
   DEBUG << "Opening input file: " << ifilename;
   DEBUG.print();
   #ifdef __NO_IOS_BINARY__
@@ -2394,7 +2464,7 @@ void openfstream(
     {
     if (existed(ofilename) == true)
       {
-      ERROR << "output file \": " 
+      ERROR << "output file \": "
             << ofilename << "\" exists, use OVERWRITE ON";
       PRINT_ERROR(ERROR.get_str())
       throw(file_error);
@@ -2488,7 +2558,7 @@ void bk_assert(
 /****************************************************************
  *    tolower                                                   *
  *                                                              *
- * Convert string to lower case                                 * 
+ * Convert string to lower case                                 *
  * input:                                                       *
  *  - __LINE__                                                  *
  * output:                                                      *
@@ -2512,7 +2582,7 @@ void tolower(char *s)
 /****************************************************************
  *    toupper                                                   *
  *                                                              *
- * Convert string to upper case                                 * 
+ * Convert string to upper case                                 *
  * input:                                                       *
  *  - __LINE__                                                  *
  * output:                                                      *
@@ -2526,8 +2596,11 @@ void toupper(char *s)
   #ifdef WIN32
     s = _strupr(s);// Jia
   #else
-  while (*s != '\0')
-    *s++ = toupper(*s);                 // cctype
+  while (*s != '\0') {
+    //*s++ = toupper(*s);  // [AV] do not works correctly with g++ 4.8 
+    *s = std::toupper(*s);                 // cctype
+    ++s;
+  }
   #endif
   }
 
@@ -2535,7 +2608,7 @@ void toupper(char *s)
 /****************************************************************
  *    int2str                                                   *
  *                                                              *
- * Convert integer to string                                    * 
+ * Convert integer to string                                    *
  * input:                                                       *
  *  - integer                                                   *
  * output:                                                      *
@@ -2543,7 +2616,7 @@ void toupper(char *s)
  *                                                              *
  *    Mahmut Arikan, 23-Oct-2008                                *
  ****************************************************************/
-string int2str(const int &integer) 
+string int2str(const int &integer)
         {
         TRACE_FUNCTION("int2str (MA 23-Oct-2008)")
         ostringstream datastream;
diff --git a/src/ioroutines.hh b/doris_core/ioroutines.hh
similarity index 97%
rename from src/ioroutines.hh
rename to doris_core/ioroutines.hh
index 4de8b2e..83c4f09 100755
--- a/src/ioroutines.hh
+++ b/doris_core/ioroutines.hh
@@ -105,6 +105,10 @@ matrix<real8> readcoeff(
         const char* pattern,
         const int16 Ncoefficients);
 
+// ______ Read normalization coefficients from ifgs result file________
+matrix<real8> readnormcoeff(
+        const char* file,
+        const char* pattern);
 
 // ______ Updates process_control in resultfiles ______
 void updateprocesscontrol(
diff --git a/src/matrix_test.cc b/doris_core/matrix_test.cc
similarity index 100%
rename from src/matrix_test.cc
rename to doris_core/matrix_test.cc
diff --git a/src/matrixbk.cc b/doris_core/matrixbk.cc
similarity index 100%
rename from src/matrixbk.cc
rename to doris_core/matrixbk.cc
diff --git a/src/matrixbk.hh b/doris_core/matrixbk.hh
similarity index 99%
rename from src/matrixbk.hh
rename to doris_core/matrixbk.hh
index 37c62d8..03685a5 100755
--- a/src/matrixbk.hh
+++ b/doris_core/matrixbk.hh
@@ -647,6 +647,8 @@ friend void readfile(matrix<Type> &Result, const char *file,
 
   const uint lines  = win.lines();
   const uint pixels = win.pixels();
+  //INFO << "lines " << lines <<"  "  << pixels;
+  //INFO.print();
   //const uint start  = ((win.linelo-1)*filepixels+win.pixlo-1)*sizepixel; [MA]
   const uint64 start  = (uint64)((win.linelo-1)*filepixels+win.pixlo-1)*sizepixel; // both sides should have the same type to
                                                                                   //  detect/eliminate integer overflow [MA]
@@ -660,6 +662,11 @@ friend void readfile(matrix<Type> &Result, const char *file,
   for (register uint lin=0; lin<lines; ++lin)
     {
     // read data at row: lin
+   // INFO << "lin  " << lin<<endl;
+   // INFO << "seek " << start+filepixels*lin*sizepixel<<endl;
+   // INFO << "read " << pixels*sizepixel<<endl;
+   // INFO.print();
+    
     ifile.seekg(start+filepixels*lin*sizepixel,ios::beg);
     ifile.read((char*)&Result.data[lin][0],pixels*sizepixel);
     }
diff --git a/src/matrixspecs.cc b/doris_core/matrixspecs.cc
similarity index 100%
rename from src/matrixspecs.cc
rename to doris_core/matrixspecs.cc
diff --git a/src/newsincev2.4 b/doris_core/newsincev2.4
similarity index 100%
rename from src/newsincev2.4
rename to doris_core/newsincev2.4
diff --git a/src/orbitbk.cc b/doris_core/orbitbk.cc
similarity index 100%
rename from src/orbitbk.cc
rename to doris_core/orbitbk.cc
diff --git a/src/orbitbk.hh b/doris_core/orbitbk.hh
similarity index 100%
rename from src/orbitbk.hh
rename to doris_core/orbitbk.hh
diff --git a/src/processor.cc b/doris_core/processor.cc
similarity index 97%
rename from src/processor.cc
rename to doris_core/processor.cc
index fcd107e..9ce084a 100755
--- a/src/processor.cc
+++ b/doris_core/processor.cc
@@ -130,7 +130,7 @@ int main(
   #endif
 
   // ====== ident string for `what doris` ======
-  char ident[] = "@(#)Doris InSAR software, $Revision: 4.06.2 $, $Author: TUDelft $";
+  char ident[] = "@(#)Doris InSAR software, $Revision: 4.0.8 $, $Author: TUDelft $";
   cerr << endl;
   INFO.print(ident);//use ident so it is not optimized away
 
@@ -426,7 +426,7 @@ int main(
       if (status != 0)                                                          // [MA] TODO make it a function
         {
         ERROR << "tsx_dump_header2doris.py: failed with exit code: " << status;
-        PRINT_ERROR(ERROR.get_str())
+        PRINT_ERROR(ERROR.get_str());
         throw(some_error);
         }
       INFO.reset();
@@ -526,10 +526,16 @@ int main(
     DEBUG.print("Time spent for reading files master:");
     printcpu(); 
     }
-
+  
+  //  INFO<<"\n master res : " << input_general.m_resfile << "\n";
+  //  INFO.print();
+  // MCC
+  
 // ______Fill slcimage struct______
   if (existed(input_general.m_resfile))
     {
+  //    INFO<<"\n master res : " << input_general.m_resfile << " EXISTS!\n";
+   //   INFO.print();
     master.fillslcimage(input_general.m_resfile);
     interferogram.win = master.currentwindow;
     }
@@ -1922,6 +1928,30 @@ int main(
     master.coarseoffsetL = -slave.coarseoffsetL;        // (never used)
     master.coarseoffsetP = -slave.coarseoffsetP;        // azifilt
 
+    //MCC
+    char c6slopeP[25];
+    readres(c6slopeP,sizeof(c6slopeP),input_general.i_resfile,
+            "Slope_CoarseCorr_pixels:", 0);
+    slave.slopeP = atof(c6slopeP);            // initial slope pixels
+    
+    char c6slopeL[25];
+    readres(c6slopeL,sizeof(c6slopeL),input_general.i_resfile,
+            "Slope_CoarseCorr_lines:", 0);
+    slave.slopeL =  atof(c6slopeL);              // initial slope lines
+    
+    char c6realoffsetL[25];
+    readres(c6realoffsetL,sizeof(c6realoffsetL),input_general.i_resfile,
+            "Initial_Offset_CoarseCorr_lines:", 0);
+    
+    slave.realoffsetL =   atof(c6realoffsetL);            // initial offset pixels
+    
+    char c6realoffsetP[25];
+    readres(c6realoffsetP,sizeof(c6realoffsetP),input_general.i_resfile,
+            "Initial_Offset_CoarseCorr_pixels:", 0);
+    
+    slave.realoffsetP =   atof(c6realoffsetP);              // initial offset lines
+    //MCC
+    
     // ______ corners of current slave in master coordinate system ______
     const int32 sL0 = slave.currentwindow.linelo - slave.coarseoffsetL;
     const int32 sLN = slave.currentwindow.linehi - slave.coarseoffsetL;
@@ -2090,20 +2120,66 @@ int main(
       input_i_fine.initoffsetL = slave.coarseoffsetL;
       input_i_fine.initoffsetP = slave.coarseoffsetP;
       }
-    finecoreg(input_i_fine, master, slave);// selects other methods within
-
-//    // ______ Select method ______
-//    if (input_i_fine.method == fc_magfft  ||
-//        input_i_fine.method == fc_magspace  )
-//      finecoreg(input_i_fine, master, slave);    // selects other methods within
-//    else if (input_i_fine.method == fc_oversample) // still 2b implemented in place
-//      //finecoreg2(input_i_fine, master, slave)
-//      ;
-//    else
-//      {
-//      PRINT_ERROR("Method FC not implemented.")
-//      throw(input_error);// exit
-//      }
+    //MCC
+    //Computes a radar-coded DEM using the code of demassist
+    // setunspecified(input_i_fine.firefdem);   //testing
+    //  setunspecified(input_i_fine.forefdem);  //testing
+      if (input_i_fine.method == fc_coherence && specified( input_i_fine.firefdem)) 
+  {
+          
+     input_comprefdem input_fine_dem;
+     setunspecified(input_fine_dem.firefdem);             // check later, mandatory
+     setunspecified(input_fine_dem.fodemi);               // check later, then set default
+     //_____ added by FvL
+     setunspecified(input_fine_dem.foh2ph);               // check later, then set default
+     // ____ end added by FvL
+     setunspecified(input_fine_dem.forefdemhei);          // check later, then set default
+     
+     strcpy(input_fine_dem.fodem,"demcrop.raw"); 
+     strcpy(input_fine_dem.forefdem,  "refPhaseDEM.raw" );  
+     strcpy(input_fine_dem.firefdem,  input_i_fine.firefdem);  
+     INFO << "file In : " << input_fine_dem.firefdem << endl;
+     INFO.print();
+     input_fine_dem.iformatflag     = input_i_fine.iformatflag;
+     input_fine_dem.demrows         = input_i_fine.demrows;
+     input_fine_dem.demcols         = input_i_fine.demcols;
+      
+     input_fine_dem.demdeltalat     = input_i_fine.demdeltalat;
+     input_fine_dem.demdeltalon     = input_i_fine.demdeltalon;
+     input_fine_dem.demlatleftupper = input_i_fine.demlatleftupper;
+     input_fine_dem.demlonleftupper = input_i_fine.demlonleftupper;
+     input_fine_dem.demnodata       = input_i_fine.demnodata;
+     input_fine_dem.includerefpha   = false;
+     
+     input_fine_dem.isCCC = true;
+     productinfo dummyinterferogram = interferogram;
+     dummyinterferogram.multilookL = 1;
+     dummyinterferogram.multilookP = 1;
+     dummyinterferogram.win = master.currentwindow;
+     try
+     {
+         
+        radarcodedem(input_general, input_ellips, input_fine_dem,
+                   master, slave,dummyinterferogram, masterorbit, slaveorbit);
+      strcpy(input_i_fine.forefdem, input_fine_dem.forefdem);
+     
+     }
+     catch  (int e)
+     {
+       WARNING << "I could NOT radar-code your DEM. Exception: " << e << endl;
+       WARNING <<"Continuing CCC without DEM \n";
+       WARNING.print();
+       setunspecified(input_i_fine.forefdem);
+     }
+
+     
+   
+  }
+    
+  // INFO   <<"master.Ks" << master.Ks;
+ //  INFO.print();
+      finecoreg(input_i_fine, master, slave,input_ellips, masterorbit, slaveorbit, baseline);
+ 
 
     // ______ Update log files ______
     updatefile("scratchlogfine",input_general.logfile);
@@ -2332,10 +2408,13 @@ int main(
     char c10offL[11];
     readres(c10offL,sizeof(c10offL),input_general.i_resfile,"Degree_cpm:");
     int32 degreecpm = atoi(c10offL);
+    
     coeff_cpmL = readcoeff(input_general.i_resfile,
                  "Estimated_coefficientsL:",Ncoeffs(degreecpm));
     coeff_cpmP = readcoeff(input_general.i_resfile,
                  "Estimated_coefficientsP:",Ncoeffs(degreecpm));
+    
+   
     // bk 1 sep 2000, req. for resample...
     //interferogram.win = getoverlap(master,slave,coeff_cpmL,coeff_cpmP);
 
@@ -2406,6 +2485,19 @@ int main(
       cerr << "\nProcessing resampling of slave image. (might take some time.)";
       getanswer();
       }
+    matrix<real8> minMaxL(2,1);
+    minMaxL = readnormcoeff(input_general.i_resfile,
+                 "Normalization_Lines:");
+    
+    matrix<real8> minMaxP(2,1);
+    minMaxP = readnormcoeff(input_general.i_resfile,
+                 "Normalization_Pixels:");
+    
+   
+    INFO << "\n Normalized lines to [-2,2] from:" <<minMaxL(0,0)<< "," <<minMaxL(1,0); 
+    INFO << "\n Normalized pixels to [-2,2] from:" <<minMaxP(0,0)<< "," <<minMaxP(1,0); 
+    INFO << "\n ";
+    INFO.print();
 
      if (input_s_resample.dbow_geo.pixhi != 0)
         {
@@ -2477,9 +2569,12 @@ int main(
       // Batu - End KML generation.
 
     // ______ (interf.win contains approx. offset) ______
-    resample(input_general, input_s_resample,
+ //   resample(input_general, input_s_resample,
+ //            master, slave, coeff_cpmL, coeff_cpmP,
+ //            alreadyprocessed[pr_i_demassist]);
+         resample(input_general, input_s_resample,
              master, slave, coeff_cpmL, coeff_cpmP,
-             alreadyprocessed[pr_i_demassist]);
+             alreadyprocessed[pr_i_demassist],minMaxL,minMaxP);
     // ______ Update log files ______
     updatefile("scratchlogresample",input_general.logfile);
     updatefile("scratchresresample",input_general.s_resfile);
@@ -2633,7 +2728,7 @@ int main(
     // ______ slave.currentwin is in master system (after resample) and is ______
     // ______ smaller than master.currentwin ______
     if (input_i_interfero.method==int_oldmethod)
-      compinterfero(master, slave, input_general, input_i_interfero, coeff_flat);
+      compinterfero(master, slave, input_general, input_i_interfero);
     else if (input_i_interfero.method==int_oversample)
       {
       PRINT_ERROR("NOT IMPLEMENTED IN THIS VERSION.")
@@ -3675,7 +3770,8 @@ void usage(char *programname)
        << "\t  -c:           return copyright notice.\n"
        << "\t  -q:           return random quote (not so random).\n"
        << "\t  -v:           return version number.\n\n\n"
-       << "  LIBRARIES (used): " << libs << endl << endl;
+       << "  LIBRARIES (used): " << libs << "\n\n"
+       << "  Compiled on DATE: " __DATE__ << " at: " << __TIME__ << endl << endl;
   throw(usage_error);// normal exit
   } // END usage
 
diff --git a/src/productinfo.cc b/doris_core/productinfo.cc
similarity index 100%
rename from src/productinfo.cc
rename to doris_core/productinfo.cc
diff --git a/src/productinfo.hh b/doris_core/productinfo.hh
similarity index 100%
rename from src/productinfo.hh
rename to doris_core/productinfo.hh
diff --git a/src/products.cc b/doris_core/products.cc
similarity index 98%
rename from src/products.cc
rename to doris_core/products.cc
index 83ed28c..dfd36a4 100755
--- a/src/products.cc
+++ b/doris_core/products.cc
@@ -67,7 +67,7 @@
  *    compinterfero                                             *
  *                                                              *
  * Compute products:                                            *
- *  - (compex) interferogram, evaluate reference phase model    *
+ *  - (compex) interferogram                                    *
  * note: master-slave                                           *
  * Assumed that slave.currentwin is in master coord. system     *
  * and is smaller than or equal to maste.currentwin.            *
@@ -81,13 +81,15 @@
  * bugfix computations, subtract reference phase                *
  * for all points before multilooking.                          *
  *    Bert Kampes, 06-Oct-1999                                  *
+ *                                                              *
+ * Change log: - FvL, 23-Jul-2014, removed unwanted automatic   *
+ *   removal of pre-calculated reference phase                  *
  ****************************************************************/
 void compinterfero(
         const slcimage         &master,
         const slcimage         &slave,
         const input_gen        &input_general,
-        const input_interfero  &input_i_interfero,
-        const matrix<real8>    &coeff_flatearth)
+        const input_interfero  &input_i_interfero)
   {
   TRACE_FUNCTION("compinterfero (BK 06-Oct-1999)");
   INFO << "INTERFERO: master input file: " << master.file;
@@ -122,26 +124,16 @@ void compinterfero(
   const int32 multiP        = input_i_interfero.multilookP;
   bool nocint               = true;                     // output complex phase image
   bool noint                = true;                     // no output real phase image
-  bool noflatearthcorrection = false;                   // do correction
   if (specified(input_i_interfero.focint))
     nocint  = false;
   if (specified(input_i_interfero.foint))
     noint  = false;
-  if (coeff_flatearth.size() == 0)                      // step flatearth not done or degree=0
-    noflatearthcorrection = true;
 
   // ______ Normalize data for polynomial ______
   const real8 minL = master.originalwindow.linelo;
   const real8 maxL = master.originalwindow.linehi;
   const real8 minP = master.originalwindow.pixlo;
   const real8 maxP = master.originalwindow.pixhi;
-  if (!noflatearthcorrection)
-    {
-    INFO << "compinterfero: polynomial normalized by factors: "
-         << minL << " " << maxL << " " << minP << " " << maxP
-         << " to [-2,2]";
-    INFO.print();
-    }
 
   // ====== Open output files ======
   ofstream ofilecint;
@@ -161,7 +153,7 @@ void compinterfero(
   // ====== allocate matrices ======
   const int32 numpixels      = (slave.currentwindow.pixels());
   const int32 bytesperline   = numpixels * sizeof(complr4);
-  const real4 numbigmatrices = (noflatearthcorrection) ? 3.2 : 4.2;             // M, S, R
+  const real4 numbigmatrices = 3.2;             // M, S
   //int32 numlines             = int32((BUFFERMEMSIZE/numbigmatrices)/bytesperline);// lines in buffer
   int32 numlines             = int32(ceil( (BUFFERMEMSIZE/numbigmatrices) / bytesperline )); // lines in buffer [MA] see reference.cc BUFFERMEMSIZE
   while (numlines%multiL)                               // correct numlines to multiple of multiL
@@ -201,14 +193,6 @@ void compinterfero(
   const int32 nummlrestlines = numrestlines/multiL;     // floor...
   const int32 EXTRABUFFER    = nummlrestlines ? 1 : 0;
 
-  matrix<real4> p_axis(numpixels,1);
-  if (!noflatearthcorrection)
-    {
-    for (int32 i=0; i<numpixels; i++)
-      p_axis(i,0) = winfile.pixlo  + i;
-    normalize(p_axis,minP,maxP);// ______ Normalize data ______
-    }
-
   for (int32 blocks=1; blocks<=numfullbuffers+EXTRABUFFER; blocks++)
     {
     // ______ Progress info ______
@@ -228,20 +212,7 @@ void compinterfero(
     matrix<complr4> MASTER = master.readdata(winfile);
     matrix<complr4> SLAVE  = slave.readdata(winfile);
 
-    // ====== Compute method 1. S=S.R 2. M=M.S* ======
-    // ______ Compute S = S.R if there is a reference phase ______
-    if (!noflatearthcorrection)
-      {
-      matrix<real4> l_axis(numlines,1);
-      for (int32 i=0; i<numlines; i++)
-        l_axis(i,0) = winfile.linelo + i;
-      // ______ Normalize data ______
-      normalize(l_axis,minL,maxL);
-      matrix<real4> REFPHASE = polyval<real4>(l_axis, p_axis, coeff_flatearth);
-      SLAVE *= fast_angle2cmplx(REFPHASE);
-      } // compute S=S.R
-
-    // ______ Compute M = M* conj(S.R) ______
+    // ______ Compute M = M* conj(S) ______
     MASTER *= conj(SLAVE);              // ?better SLAVE.conj(); for speed and memory
 
     // ====== Multilook if appropriate ======
@@ -309,11 +280,6 @@ void compinterfero(
                  << "\nData_output_format_real_interferogram: \t\t"
                  << "real4";
     }
-  scratchresfile << "\nFlatearth correction subtracted: \t";
-  if (!noflatearthcorrection)
-    scratchresfile << "yes";
-  else
-    scratchresfile << "no";
   scratchresfile 
                  << "\nFirst_line (w.r.t. original_master): \t"
                  <<  slave.currentwindow.linelo
diff --git a/src/products.hh b/doris_core/products.hh
similarity index 96%
rename from src/products.hh
rename to doris_core/products.hh
index eaefce5..dd038b4 100755
--- a/src/products.hh
+++ b/doris_core/products.hh
@@ -56,14 +56,11 @@ void sim_amplitude(
         orbit                  &masterorbit);
 
 // ______ Compute the (complex) interferogram ______
-// ______ Subtract ref.pha if present ______
 void compinterfero(
         const slcimage          &master,
         const slcimage          &slave,
         const input_gen         &input_general,
-        const input_interfero   &input_i_interfero,
-        const matrix<real8>     &coeff_refpha);
-
+        const input_interfero   &input_i_interfero);
 
 // ______ Subtract ref.pha from complex interferogram ______
 // ______ evaluate polynomial from comprefpha ______
diff --git a/src/readdata.cc b/doris_core/readdata.cc
similarity index 95%
rename from src/readdata.cc
rename to doris_core/readdata.cc
index e14af17..00627d5 100755
--- a/src/readdata.cc
+++ b/doris_core/readdata.cc
@@ -66,7 +66,7 @@ int32 julday(int32 id, int32 mm, int32 iyyy)
   {
   int32 jul;
   int32 ja,jy,jm;
-  if (iyyy==0) 
+  if (iyyy==0)
     {
     //PRINT_ERROR("julday: error")
     //throw(some_error);
@@ -85,7 +85,7 @@ int32 julday(int32 id, int32 mm, int32 iyyy)
     jm=mm+13;
     }
   jul = int32(floor(365.25*jy)+floor(30.6001*jm)+id+1720995);
-  if (id+31L*(mm+12L*iyyy) >= IGREG) 
+  if (id+31L*(mm+12L*iyyy) >= IGREG)
     {
     ja   = int32(0.01*jy);
     jul += 2-ja+int32(0.25*ja);
@@ -119,7 +119,7 @@ int32 julday(int32 id, int32 mm, int32 iyyy)
  #%// Davide Nitti (Don), 11-Nov-2008 Reader update for ALOS    *
  ****************************************************************/
 void readvolume(
-        input_readfiles &readfiles_arg, 
+        input_readfiles &readfiles_arg,
         const char* checkvol1,
         const char* checkvol2,
         const char* checkvol3)
@@ -166,7 +166,7 @@ void readvolume(
   uint rec_seq;// type B4
   unsigned char rec_sub1, rec_type, rec_sub2, rec_sub3;// type B1
 
-// ======Open files====== 
+// ======Open files======
   // ___ check if opened correctly, if not, try to use uppercase
   // ___ from SCENE1/lea_01.001 #%// BK 27-Nov-2003
   ifstream volumefile;
@@ -240,7 +240,7 @@ void readvolume(
   volumefile.read((char*)&rec_sub2,sizeb1);// second record sub type code
   volumefile.read((char*)&rec_sub3,sizeb1);// third record sub type code
   DEBUG.print("Expecting record 1 with code {192,192,18,18}");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -343,7 +343,7 @@ void readvolume(
   volumefile.read((char*)&rec_sub2,sizeb1);// second record sub type code
   volumefile.read((char*)&rec_sub3,sizeb1);// third record sub type code
   DEBUG.print("Expecting record 2 with code {219,192,18,18}");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -380,7 +380,7 @@ void readvolume(
   volumefile.read((char*)&rec_sub2,sizeb1);// second record sub type code
   volumefile.read((char*)&rec_sub3,sizeb1);// third record sub type code
   DEBUG.print("Expecting record 3 with code {219,192,18,18}");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -426,7 +426,7 @@ void readvolume(
   volumefile.read((char*)&rec_type,sizeb1);// record type code
   volumefile.read((char*)&rec_sub2,sizeb1);// second record sub type code
   volumefile.read((char*)&rec_sub3,sizeb1);// third record sub type code
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -441,7 +441,7 @@ void readvolume(
     {
     //WARNING.print("This is not the expected text record, trying next one.")
     readfiles_arg.sar_processor = SARPR_ATL;// set determined sar processor/format
-    
+
         // Modified by LG for reading ALOS Fine
         if ( (readfiles_arg.sensor_id == SLC_ALOS) && (int(rec_type)==192) )
                          readfiles_arg.sar_processor = SARPR_JAX;
@@ -470,7 +470,7 @@ void readvolume(
     WARNING.print();
     }
   volumefile.seekg(startrec4+16,ios::beg);
-  volumefile.read((char*)&c40typespec,sizea40);         // product type specifier 
+  volumefile.read((char*)&c40typespec,sizea40);         // product type specifier
   c40typespec[40]='\0';
   volumefile.read((char*)&c60product,sizea60);          // loc&date product gen.
   c60product[60]='\0';
@@ -490,7 +490,7 @@ void readvolume(
 
 // ______Write information to scratchfiles______
   scratchlogfile << "\n*******************************************************************"
-                 << "\n* EXTRACTED DATA FROM VOLUME FILE: " 
+                 << "\n* EXTRACTED DATA FROM VOLUME FILE: "
                  <<  readfiles_arg.volfile << " *"
                  << "\n*******************************************************************"
 
@@ -560,7 +560,7 @@ void readvolume(
     scratchresfile << "\n*_Start_" << processcontrol[pr_m_readfiles];
   scratchresfile
     << "\n*******************************************************************"
-    << "\nVolume file: \t\t\t\t\t" 
+    << "\nVolume file: \t\t\t\t\t"
     <<  readfiles_arg.volfile
     << "\nVolume_ID: \t\t\t\t\t"
     <<  c16physid
@@ -637,7 +637,7 @@ void readvolume(
  * Included RSAT format based on document of ASF                *
  #%// Bert Kampes, 03-Aug-2004                                  *
  #%// Davide Nitti (Don), 11-Nov-2008  fixes for doppler        *
- #     coefficient unit for Radarsat1 and ALOS                  * 
+ #     coefficient unit for Radarsat1 and ALOS                  *
  ****************************************************************/
 void readleader(
         input_readfiles &readfiles_arg,
@@ -664,7 +664,7 @@ void readleader(
                         lenrec3,                // length of record3
                         lenrec4,                // bc length of record4
                         lenrec5,                // bc/gk length of record5
-                        lenrec6,                // gk length of record6 
+                        lenrec6,                // gk length of record6
                         lenrec7;                // bk rsat record
  char                   c2motioncomp[3],
                         c4dummy[5],             // correctly 5 for \0
@@ -699,8 +699,8 @@ void readleader(
                         c16latNN[17], c16lonNN[17],
                         c16latN1[17], c16lonN1[17],
                         c16leafilename[17],
-                        c16centerlat[17], 
-                        c16centerlon[17], 
+                        c16centerlat[17],
+                        c16centerlon[17],
                         c16centerheading[17],
                         c16ellipsoid[17],
                         c16semimajor[17],
@@ -802,7 +802,7 @@ void readleader(
 
 
 
-// ======Open files====== 
+// ======Open files======
   ifstream leaderfile;
   openfstream(leaderfile,readfiles_arg.leaderfile);
   bk_assert(leaderfile,readfiles_arg.leaderfile,__FILE__,__LINE__);
@@ -817,7 +817,7 @@ void readleader(
   leaderfile.read((char*)&rec_sub2,sizeb1);// second record sub type code
   leaderfile.read((char*)&rec_sub3,sizeb1);// third record sub type code
   DEBUG.print("Expecting record 1 with code {63,192,18,18}");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -857,7 +857,7 @@ void readleader(
   DEBUG.print("RSAT record length is 4096, ERS 1886, but");
   DEBUG.print("ERS contains more info on zero doppler times, etc.");
   DEBUG.print("RSAT seems to have that info in the data file.");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -902,7 +902,7 @@ void readleader(
 
   // ______Scene parameters______
  // Modified by LG for reading ALOS Fine
- if (readfiles_arg.sensor_id == SLC_ALOS) 
+ if (readfiles_arg.sensor_id == SLC_ALOS)
  {
          leaderfile.seekg(startrec2+20,ios::beg);
          leaderfile.read((char*)&c32sceneref,sizea32);
@@ -923,7 +923,7 @@ void readleader(
   c16centerlon[16]='\0';
   leaderfile.read((char*)&c16centerheading,sizef16);    // center true heading
   c16centerheading[16]='\0';
-  leaderfile.read((char*)&c16ellipsoid,sizea16);        // ell. designated 
+  leaderfile.read((char*)&c16ellipsoid,sizea16);        // ell. designated
   c16ellipsoid[16]='\0';
   leaderfile.read((char*)&c16semimajor,sizef16);        // ell. semi major
   c16semimajor[16]='\0';
@@ -1115,9 +1115,9 @@ void readleader(
   c4clutterlock[4]='\0';
   leaderfile.read((char*)&c4autofocus,sizea4);          // flag
   c4autofocus[4]='\0';
-  leaderfile.read((char*)&c16linespace,sizef16);        // 
+  leaderfile.read((char*)&c16linespace,sizef16);        //
   c16linespace[16]='\0';
-  leaderfile.read((char*)&c16pixspace,sizef16);         // 
+  leaderfile.read((char*)&c16pixspace,sizef16);         //
   c16pixspace[16]='\0';
   leaderfile.read((char*)&c16rcompdes,sizea16);         // range compression designator
   c16rcompdes[16]='\0';
@@ -1184,7 +1184,7 @@ if (readfiles_arg.sensor_id!=SLC_RSAT)
   DEBUG.print("RSAT record length is 4096, ERS 1886, but");
   DEBUG.print("ERS contains more info on zero doppler times, etc.");
   DEBUG.print("RSAT seems to have that in data file");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -1253,7 +1253,7 @@ if (readfiles_arg.sensor_id!=SLC_RSAT)
     c16platincl[16]='\0';
     leaderfile.read((char*)&c16platascn,sizef16);       // actual ascending node
     c16platascn[16]='\0';
-    leaderfile.read((char*)&c16geocenter,sizef16);      // 
+    leaderfile.read((char*)&c16geocenter,sizef16);      //
     c16geocenter[16]='\0';
     leaderfile.read((char*)&c16platalt,sizef16);        // altitude
     c16platalt[16]='\0';
@@ -1267,7 +1267,7 @@ if (readfiles_arg.sensor_id!=SLC_RSAT)
     c16refmajor[16]='\0';
     leaderfile.read((char*)&c16refminor,sizef16);       // semi minor
     c16refminor[16]='\0';
-  
+
     // ______ Coordinates of four corner points ______
     leaderfile.seekg(startrec3+1072,ios::beg);
     leaderfile.read((char*)&c16lat11,sizef16);          // lat. 1st line 1st pix.
@@ -1295,7 +1295,7 @@ if (readfiles_arg.sensor_id!=SLC_RSAT)
   leaderfile.seekg(startrec4+8,ios::beg);               //  slc platform position data record
   leaderfile.read((char*)&lenrec4,sizeb4);              // length of record4
   lenrec4 = ntohl(lenrec4);     // bk 6 jul 2000, byteorder x86 machines.
-  DEBUG << "readleader::record 4: start at: " << startrec4 
+  DEBUG << "readleader::record 4: start at: " << startrec4
         << "; length (variable): " << lenrec4;
   DEBUG.print();
 
@@ -1354,13 +1354,13 @@ if (readfiles_arg.sensor_id!=SLC_RSAT)
   // --- RECORD 5 (Bianca Cassee) slc facility related data record [general type]) ---
   DEBUG.print("record 5 of leader file.");              //bc 18 dec 2003
   uint startrec5=lenrec1+lenrec2+lenrec3+lenrec4;       //bc
-  leaderfile.seekg(startrec5+8,ios::beg);       //slc facility related 
+  leaderfile.seekg(startrec5+8,ios::beg);       //slc facility related
   leaderfile.read((char*)&lenrec5,sizeb4);      //bc length of record4
   lenrec5 = ntohl(lenrec5);                     //byteorder x86 machines.
-  DEBUG << "readleader::record 5: start at: " << startrec5 
+  DEBUG << "readleader::record 5: start at: " << startrec5
         << "; length: " << lenrec5;
   DEBUG.print();
- 
+
 
   // ______ Calibration information  ______              //bc
   leaderfile.seekg(startrec5+582,ios::beg);             //bc
@@ -1416,7 +1416,7 @@ if (readfiles_arg.sensor_id!=SLC_RSAT)
   c4numinvalid[4]='\0';
   if (atoi(c4numinvalid) != 0)
     {
-    WARNING << "Number of invalid samples " << c4numinvalid 
+    WARNING << "Number of invalid samples " << c4numinvalid
             << " not 0: rsr may be wrongly computed.";
     WARNING.print();
     }
@@ -1425,10 +1425,10 @@ if (readfiles_arg.sensor_id!=SLC_RSAT)
   //  // --- RECORD 6 (slc facility related data record [pcs type]) ---
   //  DEBUG.print("record 6 of leader file.");                 //gk 28 jan 2004
   //  uint startrec6=lenrec1+lenrec2+lenrec3+lenrec4+lenrec5;  //gk
-  //  leaderfile.seekg(startrec6+8,ios::beg);       //slc facility related 
+  //  leaderfile.seekg(startrec6+8,ios::beg);       //slc facility related
   //  leaderfile.read((char*)&lenrec6,sizeb4);      //gk length of record5
   //  lenrec6 = ntohl(lenrec6);                     //byteorder x86 machines.
-  //  DEBUG << "readleader::record 6: start at: " << startrec6 
+  //  DEBUG << "readleader::record 6: start at: " << startrec6
   //        << "; length: " << lenrec6;
   //  DEBUG.print();
 
@@ -1449,7 +1449,7 @@ else//RSAT method specified
   DEBUG.print("ERS:  Expecting record 3 with code {10,20,31,20}");
   DEBUG.print("RSAT: Expecting record 3 with code {18,60,18,20}");
   DEBUG.print("RSAT record length should be 1620, ERS 1620");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -1458,7 +1458,7 @@ else//RSAT method specified
   leaderfile.seekg(startrec3+8,ios::beg);//
   leaderfile.read((char*)&lenrec3,sizeb4);// length of record3
   lenrec3 = ntohl(lenrec3);     // bk 6 jul 2000, byteorder x86 machines.
-  DEBUG << "readleader::record 3: start at: " << startrec3 
+  DEBUG << "readleader::record 3: start at: " << startrec3
         << "; length: " << lenrec3;
   DEBUG.print();
   if (int(rec_sub1)==18 && int(rec_type)==60 && int(rec_sub2)==18 && int(rec_sub3)==20)
@@ -1480,7 +1480,7 @@ else//RSAT method specified
   DEBUG.print("ERS:  Expecting record 4 with code {10,30,31,20}");
   DEBUG.print("RSAT: Expecting record 4 with code {18,70,18,20}");
   DEBUG.print("RSAT record length should be 16920");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -1489,7 +1489,7 @@ else//RSAT method specified
   leaderfile.seekg(startrec4+8,ios::beg);//
   leaderfile.read((char*)&lenrec4,sizeb4);// length of record4
   lenrec4 = ntohl(lenrec4);     // bk 6 jul 2000, byteorder x86 machines.
-  DEBUG << "readleader::record 4: start at: " << startrec4 
+  DEBUG << "readleader::record 4: start at: " << startrec4
         << "; length: " << lenrec4;
   DEBUG.print();
   if (int(rec_sub1)==18 && int(rec_type)==70 && int(rec_sub2)==18 && int(rec_sub3)==20)
@@ -1511,7 +1511,7 @@ else//RSAT method specified
   DEBUG.print("ERS:  Expecting record 5 with code {10,200,31,50}");
   DEBUG.print("RSAT: Expecting record 5 with code {18,70,18,20}");
   DEBUG.print("RSAT record length should be 16920");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -1520,7 +1520,7 @@ else//RSAT method specified
   leaderfile.seekg(startrec5+8,ios::beg);//
   leaderfile.read((char*)&lenrec5,sizeb4);// length of record5
   lenrec5 = ntohl(lenrec5);     // bk 6 jul 2000, byteorder x86 machines.
-  DEBUG << "readleader::record 5: start at: " << startrec5 
+  DEBUG << "readleader::record 5: start at: " << startrec5
         << "; length: " << lenrec5;
   DEBUG.print();
   if (int(rec_sub1)==18 && int(rec_type)==70 && int(rec_sub2)==18 && int(rec_sub3)==20)
@@ -1542,7 +1542,7 @@ else//RSAT method specified
   DEBUG.print("ERS:  Expecting record 6 with code {10,200,31,50}");
   DEBUG.print("RSAT: Expecting record 6 with code {18,120,18,20}");
   DEBUG.print("RSAT record length should be 7726");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -1577,7 +1577,7 @@ else//RSAT method specified
   leaderfile.read((char*)&rec_sub3,sizeb1);// third record sub type code
   DEBUG.print("RSAT: Expecting record 7 with code {18,30,18,20}");
   DEBUG.print("RSAT record length should be 8960");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -1691,7 +1691,7 @@ else//RSAT method specified
       }
     }
   // --- if this is an inertial system (rsat) we need to convert to earth fixed ---
-  // --- Moreover: we want to reduce the number of data points to local arc ------- 
+  // --- Moreover: we want to reduce the number of data points to local arc -------
   WARNING.print("Convert orbit data to earth fixed (please check this).");
   if (!(strcmp(c64rcs,"INERTIAL")))
     INFO.print("Inertial system for orbit: transforming to Earth fixed.");
@@ -1706,11 +1706,11 @@ else//RSAT method specified
   // can we simply use the given Greenwich mean angle to rotate around Z?
   // or do we need to take precesion.nutation polarwobble into account?
   // to what (time) does the annotated GMHA refer?
-  DEBUG << "GMHA [rad]: " << GMHA; 
+  DEBUG << "GMHA [rad]: " << GMHA;
   DEBUG.print();
-  DEBUG << "Convertion from inertial to earth fixed [1/-1]: " << csi2cse; 
+  DEBUG << "Convertion from inertial to earth fixed [1/-1]: " << csi2cse;
   DEBUG.print();
-  DEBUG << "earthrot [rad/s]: " << earthrot; 
+  DEBUG << "earthrot [rad/s]: " << earthrot;
   DEBUG.print();
   // --- Create a new state vector matrix ---
   // --- these computation could be checked by using TIEPOINT, the center point -----
@@ -1721,7 +1721,7 @@ else//RSAT method specified
     {
     real8 dt       = real8(i)*atof(c22interval);// since annotated GMHA???
     real8 angle    = csi2cse*(GMHA+earthrot*dt);// current angle of Greenwich
-    DEBUG << "current angle for this data point [deg]: " << rad2deg(angle); 
+    DEBUG << "current angle for this data point [deg]: " << rad2deg(angle);
     DEBUG.print();
     STATE(0,i)     = cos(angle)*STATE_INERTIAL(0,i)-sin(angle)*STATE_INERTIAL(1,i);// x
     STATE(1,i)     = sin(angle)*STATE_INERTIAL(0,i)+cos(angle)*STATE_INERTIAL(1,i);// y
@@ -1809,7 +1809,7 @@ else//RSAT method specified
       real8(tijdend.tm_sec + 60*tijdend.tm_min + 3600*tijdend.tm_hour);
     // BK 28-Sep-2000: numlin -1 !
     prfcomputed = (atof(c16numlin) - 1.) / (taN-ta1);
-  
+
     // ______ compute rsr ______
     //  const real8 tr1     = atof(c16zd1strange);              // 2way ms
     //  const real8 trN     = atof(c16zdlstrange);              // 2way ms
@@ -1817,7 +1817,7 @@ else//RSAT method specified
     rsrcomputed = 0.001 * (atof(c16numpix)-1.0) /
                   (atof(c16zdlstrange)-atof(c16zd1strange));    // MHz
     } // else skipped map projection record
-   
+
   // BK 28-Oct-2003, for Atlantis processor
   // ___ Check if rsr is in MHz, assume about 20 MHz ---
   //if (rsrcomputed>10000000.0 && rsrcomputed<30000000.0)
@@ -1854,9 +1854,9 @@ else//RSAT method specified
   ofstream scratchlogfile("scratchloglea", ios::out | ios::trunc);
   bk_assert(scratchlogfile,"readleader: scratchloglea",__FILE__,__LINE__);
 
-  scratchlogfile 
+  scratchlogfile
     << "\n\n*******************************************************************"
-    << "\n* EXTRACTED DATA FROM LEADER FILE: " 
+    << "\n* EXTRACTED DATA FROM LEADER FILE: "
     <<  readfiles_arg.leaderfile << " *"
     << "\n*******************************************************************"
 
@@ -1903,7 +1903,7 @@ else//RSAT method specified
     <<  c16scenelength
     << "\nProcessed scene width incl. zero fill (km): \t\t"
     <<  c16scenewidth
-   
+
     << "\n\nSLC data set summary record: general mission/sensor parameters"
     << "\n--------------------------------------------------------------"
     << "\nSensor platform mission identifier: \t\t\t"
@@ -1993,7 +1993,7 @@ else//RSAT method specified
 
     << "\n\nSLC data set summary record: sensor specific parameters"
     << "\n-------------------------------------------------------"
-  
+
     << "\nSatellite encoded binary time code: \t\t\t"
     <<  c16sattimecode
     << "\nSatellite clock time (UTC) (YYYYMMDDhhmmssttt): \t"
@@ -2083,7 +2083,7 @@ else//RSAT method specified
     <<  c16pixspace
     << "\nProcessor range compression designator: \t\t"
     <<  c16rcompdes
-   
+
     << "\n\nSLC data set summary record: sensor specific local use segment"
     << "\n--------------------------------------------------------------"
     << "\nZero-doppler range time (two-way)"
@@ -2100,7 +2100,7 @@ else//RSAT method specified
     <<  c24zdcenazitime
     << "\n +of last azimuth pixel (UTC): \t\t\t"
     <<  c24zdlstazitime
-   
+
     << "\n\nMap projection data record: general information"
     << "\n-----------------------------------------------"
     << "\nMap projection descriptor: \t\t\t\t"
@@ -2138,7 +2138,7 @@ else//RSAT method specified
     <<  c16refmajor
     << "\nSemiminor axis of ref.ellipsoid (km): \t\t\t"
     <<  c16refminor
-   
+
     << "\n\nMap projection data record: coordinates of four corner points"
     << "\n-------------------------------------------------------------"
     << "\n1st line 1st pixel geodetic latitude"
@@ -2159,7 +2159,7 @@ else//RSAT method specified
     <<  c16latN1
     << "\nlast line 1st pixel geodetic longitude (degrees): \t"
     <<  c16lonN1
-   
+
     << "\n\nSLC platform position data record: positional data points"
     << "\n---------------------------------------------------------"
     << "\nNumber of data points: \t\t\t\t"
@@ -2186,12 +2186,12 @@ else//RSAT method specified
     <<  c16ctposerr
     << "\nRadial position error (meters): \t\t"
     <<  c16rposerr
-    
+
     << "\n\nSLC facility related data record [general type]: calibration information"
-    << "\n------------------------------------------------------------------------" 
+    << "\n------------------------------------------------------------------------"
     << "\nIncidence angle at first range pixel (at mid-azimuth): \t"
     <<  c16incangle1strange //bc
-    << "\nIncidence angle at centre range pixel (at mid-azimuth): " 
+    << "\nIncidence angle at centre range pixel (at mid-azimuth): "
     <<  c16incanglecenrange //bc
     << "\nIncidence angle at last range pixel (at mid-azimuth): \t"
     <<  c16incanglelstrange //bc
@@ -2219,25 +2219,25 @@ if (readfiles_arg.sensor_id==SLC_RSAT && readfiles_arg.sar_processor==SARPR_ATL)
   scratchlogfile << "\nSLC platform position data record: data point: " << point
                  << "\n------------------------------------------------\n"
                  << point << " data point - Seconds of day (s):    \t\t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << secofday << endl
                  << point << " data point - Position vector X (m): \t\t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE_INERTIAL(0,k) << endl
                  << point << " data point - Position vector Y (m): \t\t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE_INERTIAL(1,k) << endl
                  << point << " data point - Position vector Z (m): \t\t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE_INERTIAL(2,k) << endl
                  << point << " data point - Velocity vector X (mm/s): \t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE_INERTIAL(3,k) << endl
                  << point << " data point - Velocity vector Y (mm/s): \t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE_INERTIAL(4,k) << endl
                  << point << " data point - Velocity vector Z (mm/s): \t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE_INERTIAL(5,k) << endl;
     }
   scratchlogfile << "\nRSAT: and converted to earth fixed using GMHA:";
@@ -2251,25 +2251,25 @@ if (readfiles_arg.sensor_id==SLC_RSAT && readfiles_arg.sar_processor==SARPR_ATL)
   scratchlogfile << "\nSLC platform position data record: data point: " << point
                  << "\n------------------------------------------------\n"
                  << point << " data point - Seconds of day (s):    \t\t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << secofday << endl
                  << point << " data point - Position vector X (m): \t\t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE(0,k) << endl
                  << point << " data point - Position vector Y (m): \t\t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE(1,k) << endl
                  << point << " data point - Position vector Z (m): \t\t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE(2,k) << endl
                  << point << " data point - Velocity vector X (mm/s): \t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE(3,k) << endl
                  << point << " data point - Velocity vector Y (mm/s): \t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE(4,k) << endl
                  << point << " data point - Velocity vector Z (mm/s): \t"
-                 << setprecision(13) 
+                 << setprecision(13)
                  << STATE(5,k) << endl;
     }
 
@@ -2347,13 +2347,13 @@ if (readfiles_arg.sensor_id==SLC_RSAT && readfiles_arg.sar_processor==SARPR_ATL)
   ofstream scratchresfile("scratchreslea", ios::out | ios::trunc);
   bk_assert(scratchresfile,"readleader: scratchreslea",__FILE__,__LINE__);
   // Modified by LG for reading ALOS Fine
-  if(!strcmp(c16centerlat,"                ")) 
+  if(!strcmp(c16centerlat,"                "))
           strcpy(c16centerlat,"0");
-  if(!strcmp(c16centerlon,"                ")) 
+  if(!strcmp(c16centerlon,"                "))
           strcpy(c16centerlon,"0");
 
   scratchresfile
-    << "Leader file:                                 \t" 
+    << "Leader file:                                 \t"
     <<  readfiles_arg.leaderfile
     << "\nSensor platform mission identifer:         \t"
     <<  c32sensorid
@@ -2376,7 +2376,7 @@ if (readfiles_arg.sensor_id==SLC_RSAT && readfiles_arg.sar_processor==SARPR_ATL)
       << c16plathead;                                      // heading more digits no true, PGS doesn't have true heading but VMP does. TODO put switch for VMP and PGS
       }
     //<< c8platformheading;                               // heading
-    //<< c16centerheading;                                 // MA for VMP true heading  
+    //<< c16centerheading;                                 // MA for VMP true heading
   // start_added_by_don
   if (readfiles_arg.sensor_id == SLC_ALOS)
     {
@@ -2493,7 +2493,7 @@ if (readfiles_arg.sensor_id==SLC_RSAT && readfiles_arg.sar_processor==SARPR_ATL)
     << endl;
 
   scratchresfile.setf(ios::fixed | ios::floatfield);
-  scratchresfile 
+  scratchresfile
     << "\n\n*******************************************************************"
     << "\n*_Start_leader_datapoints"
     << "\n*******************************************************************"
@@ -2504,19 +2504,19 @@ if (readfiles_arg.sensor_id==SLC_RSAT && readfiles_arg.sar_processor==SARPR_ATL)
 
   for (register int32 l=0;l<numdatapoints;l++)          // number of data points
     {
-    scratchresfile 
+    scratchresfile
       << setprecision(11)                                // [MA] 10 --> 11 since we have iiiii.dddddd decimal digits for day of seconds
       << atof(c22seconds)+real4(l)*atof(c22interval);
     for (register int32 m=0;m<3;m++)                    // no velocities
       {
-      scratchresfile 
-        << " \t" 
-        << setprecision(13) 
+      scratchresfile
+        << " \t"
+        << setprecision(13)
         << STATE(m,l);
       }
     scratchresfile << endl;
     }
-  scratchresfile 
+  scratchresfile
     << "\n*******************************************************************"
     << "\n* End_leader_datapoints:_NORMAL"      // fixed string...
     << "\n*******************************************************************\n";
@@ -2545,13 +2545,13 @@ if (readfiles_arg.sensor_id==SLC_RSAT && readfiles_arg.sar_processor==SARPR_ATL)
   INFO << "Pulse repetition frequency (Hz):       "
        << c16prf << ends;
   INFO.print();
-  INFO << "Pulse repetition frequency (computed): " 
+  INFO << "Pulse repetition frequency (computed): "
        << setprecision(16) << prfcomputed;
   INFO.print();
   INFO << "Range sampling rate (Mhz):             "
        << c16samplingrate << ends;
   INFO.print();
-  INFO << "Range sampling rate (computed Mhz):    " 
+  INFO << "Range sampling rate (computed Mhz):    "
        << setprecision(16) << rsrcomputed;
   INFO.print();
   INFO << "UTC of first azimuth line:             "
@@ -2597,7 +2597,7 @@ if (readfiles_arg.sensor_id==SLC_RSAT && readfiles_arg.sar_processor==SARPR_ATL)
     WARNING << INFO.get_str() << " not HAMMING.";
     WARNING.print();
     }
-    
+
   PROGRESS.print("readleader finished.");
   } // END READLEADER
 
@@ -2616,7 +2616,7 @@ if (readfiles_arg.sensor_id==SLC_RSAT && readfiles_arg.sar_processor==SARPR_ATL)
 void readnull(
         const input_readfiles &readfiles_arg)
   {
-// ======Open files====== 
+// ======Open files======
   ifstream nullfile;
   openfstream(nullfile,readfiles_arg.nullfile);
   bk_assert(nullfile,readfiles_arg.nullfile,__FILE__,__LINE__);
@@ -2682,7 +2682,7 @@ void readdat(
 
 
 
-// ______Open files______ 
+// ______Open files______
   ifstream datfile;
   openfstream(datfile,readfiles_arg.datfile);
   bk_assert(datfile,readfiles_arg.datfile,__FILE__,__LINE__);
@@ -2698,7 +2698,7 @@ void readdat(
   datfile.read((char*)&rec_sub2,sizeb1);// second record sub type code
   datfile.read((char*)&rec_sub3,sizeb1);// third record sub type code
   DEBUG.print("ERS/RSAT: Expecting record 1 with code {63,192,18,18}");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -2762,7 +2762,7 @@ void readdat(
   bk_assert(scratchlogdat,"readdat: scratchlogdat",__FILE__,__LINE__);
 
   scratchlogdat << "\n\n*******************************************************************"
-                << "\n* EXTRACTED DATA FROM DATA FILE: " 
+                << "\n* EXTRACTED DATA FROM DATA FILE: "
                 <<  readfiles_arg.datfile << " *"
                 << "\n*******************************************************************"
                 << "\nNumber of SAR channels in file:         \t"
@@ -2811,7 +2811,7 @@ void readdat(
   datfile.read((char*)&rec_sub2,sizeb1);// second record sub type code
   datfile.read((char*)&rec_sub3,sizeb1);// third record sub type code
   DEBUG.print("RSAT: Expecting record 2 with code {50,11,18,20}");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -2832,7 +2832,7 @@ void readdat(
   uint startrec3 = lenrec1+lenrec2;
   uint startrecN = lenrec1+(numlines-1)*lenrec2;// start of last record
   // --- azimuth time to first line (depends on decrease/increase): ---
-  uint zdmsecofday1 = 99999;// B4 
+  uint zdmsecofday1 = 99999;// B4
   uint zdmsecofday2 = 99999;// B4
   uint zdmsecofdayN = 99999;// B4
   datfile.seekg(startrec2+44,ios::beg);//
@@ -2854,7 +2854,7 @@ void readdat(
   real8 prf_check = real8(numlines-1)/(abs(real8(zdmsecofday1)-real8(zdmsecofdayN))/1000.0);
   INFO << "PRF check (computed [Hz]): " << prf_check;
   INFO.print();
-  
+
 
   // format should be: "22-AUG-1997 18:22:10.246"
   if (zdmsecofday1 < zdmsecofdayN)// increase, use ZD time of first line
@@ -2932,7 +2932,7 @@ void readdat(
   real8 rsr_check = real8(numpixels-1)/abs(2000000.0*(real8(range1st)-real8(rangelst))/SOL);
   INFO << "RSR check (computed [MHz]): " << rsr_check;
   INFO.print();
-  }  
+  }
   //char dummydate[] = "01-JAN-1990 ";// not used except maybe getorb later
   //WARNING.print("RSAT: using a dummy date for orbit, only secofday important.");
   scratchresdat
@@ -2958,7 +2958,7 @@ void readdat(
   if (numchannels != 1)                         // ??
     {
     WARNING << "code 904: Number of channels in file: "
-         << readfiles_arg.datfile << " = " 
+         << readfiles_arg.datfile << " = "
          << numchannels << " != 1 ";
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
@@ -2978,17 +2978,17 @@ void readdat(
     }
 
 // ______ Check with previous section ______
-  if (numlines != numdatarec) 
+  if (numlines != numdatarec)
     {
-    WARNING << "code 904: Number of lines seems not to be consistent in file: " 
+    WARNING << "code 904: Number of lines seems not to be consistent in file: "
          << readfiles_arg.datfile << " : " << numlines << " != " << numdatarec;
     WARNING.print();
     WARNING.print(" +this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
     }
   if (bottomborder != topborder != leftborder != rightborder != 0)
     {
-    WARNING << "code 904: Not implemented: offset border: left,right,bottom,top: " 
-         << leftborder << "," << rightborder << "," << bottomborder << "," 
+    WARNING << "code 904: Not implemented: offset border: left,right,bottom,top: "
+         << leftborder << "," << rightborder << "," << bottomborder << ","
          << topborder << " in file: " << readfiles_arg.datfile;
     WARNING.print();
     WARNING.print(" +this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
@@ -2999,8 +2999,8 @@ void readdat(
      {
      if ((numbytesdata / 8) != numpixels)
        {
-       WARNING << "code 904AAA: Number of pixels seems to be inconsistent in file: "  
-            << readfiles_arg.datfile << ": " 
+       WARNING << "code 904AAA: Number of pixels seems to be inconsistent in file: "
+            << readfiles_arg.datfile << ": "
      << numpixels << " != " << (numbytesdata / 8);
        WARNING.print();
        WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
@@ -3010,8 +3010,8 @@ void readdat(
      {
   if ((numbytesdata / 4) != numpixels)
     {
-    WARNING << "code 904: Number of pixels seems to be inconsistent in file: "  
-         << readfiles_arg.datfile << ": " 
+    WARNING << "code 904: Number of pixels seems to be inconsistent in file: "
+         << readfiles_arg.datfile << ": "
          << numpixels << " != " << (numbytesdata / 4);
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
@@ -3055,7 +3055,7 @@ void writeslc(
                         c6[7],                  // correctly 7 for \0
                         c8[9];                  // correctly 9 for \0
 
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("writeslc (BK 11-Dec-1998)")
   PROGRESS.print("Start cropping slc data.");
   #ifdef __X86PROCESSOR__
@@ -3064,7 +3064,7 @@ void writeslc(
   INFO.print("NO byte swapping performed, you must be on Big Endian platform.");
   #endif
 
-  // ______ Open files ______ 
+  // ______ Open files ______
   ifstream datfile;
   openfstream(datfile,crop_arg.filein1);
   bk_assert(datfile,crop_arg.filein1,__FILE__,__LINE__);
@@ -3156,17 +3156,17 @@ void writeslc(
     }
 
 // ______ Check with previous section ______
-  if (numlines != numdatarec) 
+  if (numlines != numdatarec)
     {
-    WARNING << "code 904: Number of lines seems not to be consistent in file: " 
+    WARNING << "code 904: Number of lines seems not to be consistent in file: "
          << crop_arg.filein1 << " : " << numlines << " != " << numdatarec;
     WARNING.print();
     WARNING.print(" +this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
     }
   if ((numbytesdata / 4) != numpixels)
     {
-    WARNING << "code 904: Number of pixels seems to be inconsistent in file: "  
-         << crop_arg.filein1 << ": " 
+    WARNING << "code 904: Number of pixels seems to be inconsistent in file: "
+         << crop_arg.filein1 << ": "
          << numpixels << " != " << (numbytesdata / 4);
     WARNING.print();
     WARNING.print(" +this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
@@ -3181,7 +3181,7 @@ void writeslc(
   uint pixelstart = 1;
   uint pixelend   = numpixels;                          // only for resultfile
 
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
     {
     window tempdbow(crop_arg.dbow.linelo, crop_arg.dbow.linehi,
@@ -3226,7 +3226,7 @@ void writeslc(
   matrix <int16> TMPSHORT(1,2);
   datfile >> TMPSHORT;          // read in first complex pixel for test
   real8 tmpmag = sqrt(
-    real8(int16(ntohs(TMPSHORT(0,0)))*int16(ntohs(TMPSHORT(0,0)))) + 
+    real8(int16(ntohs(TMPSHORT(0,0)))*int16(ntohs(TMPSHORT(0,0)))) +
     real8(int16(ntohs(TMPSHORT(0,1)))*int16(ntohs(TMPSHORT(0,1)))));
   DEBUG << "First complex element in datafile: ("
        << int16(ntohs(TMPSHORT(0,0))) << ","
@@ -3259,8 +3259,8 @@ void writeslc(
 
   if (lenrec2 != lendatarec2)
     {
-    ERROR << "code 904: Length of datarecords seems to be inconsistent in file: "  
-         << crop_arg.filein1 << ": " 
+    ERROR << "code 904: Length of datarecords seems to be inconsistent in file: "
+         << crop_arg.filein1 << ": "
          << lenrec2 << " != " << lendatarec2;
     WARNING.print(ERROR.get_str());
     ERROR.reset();
@@ -3306,19 +3306,19 @@ void writeslc(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_short"
 
 // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  linestart
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  lineend
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  pixelstart
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  pixelend
     << "\nNumber of lines (non-multilooked): \t\t" <<  lineend-linestart+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  pixelend-pixelstart+1
@@ -3336,15 +3336,15 @@ void writeslc(
   if (numchannels != 1)                         // ??
     {
     WARNING << "code 904: Number of channels in file: "
-         << crop_arg.filein1 << " = " 
+         << crop_arg.filein1 << " = "
          << numchannels << " != 1 ";
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
     }
   if (bottomborder != topborder != leftborder != rightborder != 0)
     {
-    WARNING << "code 904: Not implemented: offset border: left,right,bottom,top: " 
-         << leftborder << "," << rightborder << "," << bottomborder << "," 
+    WARNING << "code 904: Not implemented: offset border: left,right,bottom,top: "
+         << leftborder << "," << rightborder << "," << bottomborder << ","
          << topborder << " in file: " << crop_arg.filein1;
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
@@ -3374,7 +3374,7 @@ void gammaprocessor_crop(
                         c6[7],                  // correctly 7 for \0
                         c8[9];                  // correctly 9 for \0
 
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("gammaprocessor_crop (Batu 15-Dec-2009)")
   PROGRESS.print("Start cropping Gamma focused slc data.");
   INFO.print("Data assumed in the host platform format. NO byte swapping performed.");
@@ -3386,14 +3386,14 @@ void gammaprocessor_crop(
   if (numlines <= 0)
     {
     WARNING << "Number of lines (less than zero?) ="
-         <<  numlines 
+         <<  numlines
          <<  "Check input result file.";
     WARNING.print();
     }
   if (numpixels <= 0)
     {
     WARNING << "Number of pixels (less than zero?) ="
-         <<  numpixels 
+         <<  numpixels
          <<  "Check input result file.";
     WARNING.print();
     }
@@ -3409,7 +3409,7 @@ void gammaprocessor_crop(
 
   lenOneLine      = numpixels*4;			// each complex pixel is 4 bytes.BO.
 
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
     {
     window tempdbow(crop_arg.dbow.linelo, crop_arg.dbow.linehi,
@@ -3458,7 +3458,7 @@ void gammaprocessor_crop(
   matrix <int16> TMPSHORT(1,2);
   datfile >> TMPSHORT;          // read in first complex pixel for test
   real8 tmpmag = sqrt(
-    real8(int16(ntohs(TMPSHORT(0,0)))*int16(ntohs(TMPSHORT(0,0)))) + 
+    real8(int16(ntohs(TMPSHORT(0,0)))*int16(ntohs(TMPSHORT(0,0)))) +
     real8(int16(ntohs(TMPSHORT(0,1)))*int16(ntohs(TMPSHORT(0,1)))));
   DEBUG << "First complex element in datafile: ("
        << int16(ntohs(TMPSHORT(0,0))) << ","
@@ -3502,7 +3502,7 @@ void gammaprocessor_crop(
     datoutfile << LINE;
     }
   datfile.close();                                      // close files
-  datoutfile.close();  
+  datoutfile.close();
 
 // ====== Write results to scratchfile ======
   ofstream scratchresfile("scratchres2raw", ios::out | ios::trunc);
@@ -3518,19 +3518,19 @@ void gammaprocessor_crop(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_short"
 
 // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  linestart
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  lineend
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  pixelstart
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  pixelend
     << "\nNumber of lines (non-multilooked): \t\t" <<  lineend-linestart+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  pixelend-pixelstart+1
@@ -3560,22 +3560,22 @@ void gammaprocessor_crop(
 void envisat_dump_data(
         const input_crop &crop_arg)
   {
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("envisat_dump_data (BK 16-Jun-2003)")
   // ______ Build command ______
   // ______ make sure l0 etc. are correctly defined ______
   // ____ assume these are filled correctly ___
   INFO.reset();
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
-    INFO << "envisat_dump_data " << crop_arg.filein1 
+    INFO << "envisat_dump_data " << crop_arg.filein1
          << " " << crop_arg.fileout1
-         << " " << crop_arg.dbow.linelo 
+         << " " << crop_arg.dbow.linelo
          << " " << crop_arg.dbow.linehi
          << " " << crop_arg.dbow.pixlo
          << " " << crop_arg.dbow.pixhi << ends;
   else
-    INFO << "envisat_dump_data " << crop_arg.filein1 
+    INFO << "envisat_dump_data " << crop_arg.filein1
          << " " << crop_arg.fileout1 << ends;
   char cmd[512];// command string
   strcpy(cmd, INFO.get_str());
@@ -3598,18 +3598,18 @@ void envisat_dump_data(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_short"
     // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linelo
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linehi
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixlo
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixhi
     << "\nNumber of lines (non-multilooked): \t\t" <<  crop_arg.dbow.linehi-crop_arg.dbow.linelo+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  crop_arg.dbow.pixhi-crop_arg.dbow.pixlo+1
@@ -3629,22 +3629,22 @@ void envisat_dump_data(
 void envisat_dump_VV(
         const input_crop &crop_arg)
   {
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("envisat_dump_VV (MCC 16-Jun-2003)")
   // ______ Build command ______
   // ______ make sure l0 etc. are correctly defined ______
   // ____ assume these are filled correctly ___
   INFO.reset();
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
-    INFO << "envisat_dump_VV " << crop_arg.filein1 
+    INFO << "envisat_dump_VV " << crop_arg.filein1
          << " " << crop_arg.fileout1
-         << " " << crop_arg.dbow.linelo 
+         << " " << crop_arg.dbow.linelo
          << " " << crop_arg.dbow.linehi
          << " " << crop_arg.dbow.pixlo
          << " " << crop_arg.dbow.pixhi << ends;
   else
-    INFO << "envisat_dump_VV " << crop_arg.filein1 
+    INFO << "envisat_dump_VV " << crop_arg.filein1
          << " " << crop_arg.fileout1 << ends;
   char cmd[512];// command string
   strcpy(cmd, INFO.get_str());
@@ -3667,18 +3667,18 @@ void envisat_dump_VV(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_short"
     // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linelo
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linehi
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixlo
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixhi
     << "\nNumber of lines (non-multilooked): \t\t" <<  crop_arg.dbow.linehi-crop_arg.dbow.linelo+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  crop_arg.dbow.pixhi-crop_arg.dbow.pixlo+1
@@ -3697,22 +3697,22 @@ void envisat_dump_VV(
 void envisat_dump_HH(
         const input_crop &crop_arg)
   {
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("envisat_dump_HH (BK 16-Jun-2003)")
   // ______ Build command ______
   // ______ make sure l0 etc. are correctly defined ______
   // ____ assume these are filled correctly ___
   INFO.reset();
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
-    INFO << "envisat_dump_HH " << crop_arg.filein1 
+    INFO << "envisat_dump_HH " << crop_arg.filein1
          << " " << crop_arg.fileout1
-         << " " << crop_arg.dbow.linelo 
+         << " " << crop_arg.dbow.linelo
          << " " << crop_arg.dbow.linehi
          << " " << crop_arg.dbow.pixlo
          << " " << crop_arg.dbow.pixhi << ends;
   else
-    INFO << "envisat_dump_HH " << crop_arg.filein1 
+    INFO << "envisat_dump_HH " << crop_arg.filein1
          << " " << crop_arg.fileout1 << ends;
   char cmd[512];// command string
   strcpy(cmd, INFO.get_str());
@@ -3735,18 +3735,18 @@ void envisat_dump_HH(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_short"
     // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linelo
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linehi
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixlo
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixhi
     << "\nNumber of lines (non-multilooked): \t\t" <<  crop_arg.dbow.linehi-crop_arg.dbow.linelo+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  crop_arg.dbow.pixhi-crop_arg.dbow.pixlo+1
@@ -3775,16 +3775,16 @@ void envisat_dump_HH(
 void tsx_dump_data(
        const input_crop &crop_arg)
 {
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("tsx_dump_data (PM 06-Apr-2009)")
     // ______ Build command ______
     // ______ make sure l0 etc. are correctly defined ______
     // ____ assume these are filled correctly ___
     int16 status = 0;    // [MA] check exit status of system calls for proper error handling
     INFO.reset();
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
-    INFO << "tsx_dump_data.py " << crop_arg.filein1 
+    INFO << "tsx_dump_data.py " << crop_arg.filein1
          << " " << crop_arg.fileout1
          //<< " " << crop_arg.dbow.linelo - 1
          << " " << crop_arg.dbow.linelo
@@ -3793,7 +3793,7 @@ void tsx_dump_data(
          << " " << crop_arg.dbow.pixlo
          << " " << crop_arg.dbow.pixhi << ends;
   else
-    INFO << "tsx_dump_data.py " << crop_arg.filein1 
+    INFO << "tsx_dump_data.py " << crop_arg.filein1
          << " " << crop_arg.fileout1 << ends;
   char cmd[512];// command string
   strcpy(cmd, INFO.get_str());
@@ -3809,7 +3809,7 @@ void tsx_dump_data(
     }
   INFO.reset();
   INFO.print();
-  
+
   // ====== Write results to scratchfile ======
   ofstream scratchresfile("scratchres2raw", ios::out | ios::trunc);
   bk_assert(scratchresfile,"writeslc: scratchres2raw",__FILE__,__LINE__);
@@ -3822,18 +3822,18 @@ void tsx_dump_data(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_short"
     // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linelo
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linehi
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixlo
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixhi
     << "\nNumber of lines (non-multilooked): \t\t" <<  crop_arg.dbow.linehi-crop_arg.dbow.linelo+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  crop_arg.dbow.pixhi-crop_arg.dbow.pixlo+1
@@ -3865,16 +3865,16 @@ void tsx_dump_data(
 void rs2_dump_data(
        const input_crop &crop_arg)
 {
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("rs2_dump_data (MA,PM 04-Oct-2009)")
     // ______ Build command ______
     // ______ make sure l0 etc. are correctly defined ______
     // ____ assume these are filled correctly ___
     int16 status = 0;                                                       // [MA] check exit status of system calls for proper error handling
     INFO.reset();
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
-    INFO << "rs2_dump_data.py " << crop_arg.filein1 
+    INFO << "rs2_dump_data.py " << crop_arg.filein1
          << " " << crop_arg.fileout1
          //<< " " << crop_arg.dbow.linelo - 1
          << " " << crop_arg.dbow.linelo
@@ -3883,7 +3883,7 @@ void rs2_dump_data(
          << " " << crop_arg.dbow.pixlo
          << " " << crop_arg.dbow.pixhi << ends;
   else
-    INFO << "rs2_dump_data.py " << crop_arg.filein1 
+    INFO << "rs2_dump_data.py " << crop_arg.filein1
          << " " << crop_arg.fileout1 << ends;
   char cmd[512];// command string
   strcpy(cmd, INFO.get_str());
@@ -3899,7 +3899,7 @@ void rs2_dump_data(
     }
   INFO.reset();
   INFO.print();
-  
+
   // ====== Write results to scratchfile ======
   ofstream scratchresfile("scratchres2raw", ios::out | ios::trunc);
   bk_assert(scratchresfile,"writeslc: scratchres2raw",__FILE__,__LINE__);
@@ -3912,18 +3912,18 @@ void rs2_dump_data(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_short"
     // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linelo
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linehi
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixlo
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixhi
     << "\nNumber of lines (non-multilooked): \t\t" <<  crop_arg.dbow.linehi-crop_arg.dbow.linelo+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  crop_arg.dbow.pixhi-crop_arg.dbow.pixlo+1
@@ -3955,16 +3955,16 @@ void rs2_dump_data(
 void csk_dump_data(
        const input_crop &crop_arg)
 {
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("csk_dump_data (MA,PD 23-Jun-2010)")
     // ______ Build command ______
     // ______ make sure l0 etc. are correctly defined ______
     // ____ assume these are filled correctly ___
     int16 status = 0;                                                       // [MA] check exit status of system calls for proper error handling
     INFO.reset();
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
-    INFO << "csk_dump_data.py " << crop_arg.filein1 
+    INFO << "csk_dump_data.py " << crop_arg.filein1
          << " " << crop_arg.fileout1
          //<< " " << crop_arg.dbow.linelo - 1
          << " " << crop_arg.dbow.linelo
@@ -3973,7 +3973,7 @@ void csk_dump_data(
          << " " << crop_arg.dbow.pixlo
          << " " << crop_arg.dbow.pixhi << ends;
   else
-    INFO << "csk_dump_data.py " << crop_arg.filein1 
+    INFO << "csk_dump_data.py " << crop_arg.filein1
          << " " << crop_arg.fileout1 << ends;
   char cmd[512];// command string
   strcpy(cmd, INFO.get_str());
@@ -3989,7 +3989,7 @@ void csk_dump_data(
     }
   INFO.reset();
   INFO.print();
-  
+
   // ====== Write results to scratchfile ======
   ofstream scratchresfile("scratchres2raw", ios::out | ios::trunc);
   bk_assert(scratchresfile,"writeslc: scratchres2raw",__FILE__,__LINE__);
@@ -4002,18 +4002,18 @@ void csk_dump_data(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_short"
     // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linelo
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.linehi
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixlo
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  crop_arg.dbow.pixhi
     << "\nNumber of lines (non-multilooked): \t\t" <<  crop_arg.dbow.linehi-crop_arg.dbow.linelo+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  crop_arg.dbow.pixhi-crop_arg.dbow.pixlo+1
@@ -4058,7 +4058,7 @@ void radarsat_dump_data(
   uint rec_seq;// type B4
   unsigned char rec_sub1, rec_type, rec_sub2, rec_sub3;// type B1
 
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("radarsat_dump_data (Bert Kampes 04-Aug-2004)")
   PROGRESS.print("Start cropping slc data for RADARSAT.");
   #ifdef __X86PROCESSOR__
@@ -4067,7 +4067,7 @@ void radarsat_dump_data(
   INFO.print("NO byte swapping performed, you must be on Big Endian platform.");
   #endif
 
-  // ______ Open files ______ 
+  // ______ Open files ______
   ifstream datfile;
   openfstream(datfile,crop_arg.filein1);
   bk_assert(datfile,crop_arg.filein1,__FILE__,__LINE__);
@@ -4083,7 +4083,7 @@ void radarsat_dump_data(
   datfile.read((char*)&rec_sub2,sizeb1);// second record sub type code
   datfile.read((char*)&rec_sub3,sizeb1);// third record sub type code
   DEBUG.print("RSAT: Expecting record 1 with code {63,192,18,18}");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -4161,17 +4161,17 @@ void radarsat_dump_data(
   DEBUG.print();
 
 // ______ Check with previous section ______
-  if (numlines != numdatarec) 
+  if (numlines != numdatarec)
     {
-    WARNING << "code 904: Number of lines seems not to be consistent in file: " 
+    WARNING << "code 904: Number of lines seems not to be consistent in file: "
          << crop_arg.filein1 << " : " << numlines << " != " << numdatarec;
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
     }
   if ((numbytesdata / 4) != numpixels)
     {
-    WARNING << "code 904: Number of pixels seems to be inconsistent in file: "  
-         << crop_arg.filein1 << ": " 
+    WARNING << "code 904: Number of pixels seems to be inconsistent in file: "
+         << crop_arg.filein1 << ": "
          << numpixels << " != " << (numbytesdata / 4);
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
@@ -4188,7 +4188,7 @@ void radarsat_dump_data(
   uint orig_numlines = numlines;
   uint orig_numpixels = numpixels;
 
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
     {
     WARNING.print("cropping data may be difficult, due to INC/DECREASE storage");
@@ -4233,7 +4233,7 @@ void radarsat_dump_data(
   datfile.read((char*)&rec_sub2,sizeb1);// second record sub type code
   datfile.read((char*)&rec_sub3,sizeb1);// third record sub type code
   DEBUG.print("RSAT: Expecting record 2 with code {50,11,18,20}");
-  DEBUG << "rec_seq: " << rec_seq 
+  DEBUG << "rec_seq: " << rec_seq
         << "; rec_sub1: " << int(rec_sub1)
         << "; rec_type: " << int(rec_type)
         << "; rec_sub2: " << int(rec_sub2)
@@ -4253,7 +4253,7 @@ void radarsat_dump_data(
   uint startrec3 = lenrec1+lenrec2;
   uint startrecN = lenrec1+(numlines-1)*lenrec2;// start of last record
   // --- azimuth time to first line (depends on decrease/increase): ---
-  uint zdmsecofday1 = 99999;// B4 
+  uint zdmsecofday1 = 99999;// B4
   uint zdmsecofday2 = 99999;// B4
   uint zdmsecofdayN = 99999;// B4
   datfile.seekg(startrec2+44,ios::beg);//
@@ -4306,7 +4306,7 @@ void radarsat_dump_data(
     WARNING.print("DECREASE pixel direction detected: I will flip left-right the RSAT data");
     increasing_pix = false;
     }
- 
+
 
 
   // ====== Process requested lines ======
@@ -4320,7 +4320,7 @@ void radarsat_dump_data(
   matrix <int16> TMPSHORT(1,2);
   datfile >> TMPSHORT;          // read in first complex pixel for test
   real8 tmpmag = sqrt(
-    real8(int16(ntohs(TMPSHORT(0,0)))*int16(ntohs(TMPSHORT(0,0)))) + 
+    real8(int16(ntohs(TMPSHORT(0,0)))*int16(ntohs(TMPSHORT(0,0)))) +
     real8(int16(ntohs(TMPSHORT(0,1)))*int16(ntohs(TMPSHORT(0,1)))));
   DEBUG << "First complex element in datafile: ("
        << int16(ntohs(TMPSHORT(0,0))) << ","
@@ -4352,8 +4352,8 @@ void radarsat_dump_data(
   lenrec2 = ntohl(lenrec2);     // bk 6 jul 2000, byteorder x86 machines.
   if (lenrec2 != lendatarec2)
     {
-    ERROR << "code 904: Length of datarecords seems to be inconsistent in file: "  
-         << crop_arg.filein1 << ": " 
+    ERROR << "code 904: Length of datarecords seems to be inconsistent in file: "
+         << crop_arg.filein1 << ": "
          << lenrec2 << " != " << lendatarec2;
     WARNING.print(ERROR.get_str());
     ERROR.reset();
@@ -4408,19 +4408,19 @@ void radarsat_dump_data(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_short"
 
     // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  linestart
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  lineend
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  pixelstart
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  pixelend
     << "\nNumber of lines (non-multilooked): \t\t" <<  lineend-linestart+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  pixelend-pixelstart+1
@@ -4438,15 +4438,15 @@ void radarsat_dump_data(
   if (numchannels != 1)                         // ??
     {
     WARNING << "code 904: Number of channels in file: "
-         << crop_arg.filein1 << " = " 
+         << crop_arg.filein1 << " = "
          << numchannels << " != 1 ";
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
     }
   if (bottomborder != topborder != leftborder != rightborder != 0)
     {
-    WARNING << "code 904: Not implemented: offset border: left,right,bottom,top: " 
-         << leftborder << "," << rightborder << "," << bottomborder << "," 
+    WARNING << "code 904: Not implemented: offset border: left,right,bottom,top: "
+         << leftborder << "," << rightborder << "," << bottomborder << ","
          << topborder << " in file: " << crop_arg.filein1;
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
@@ -4482,7 +4482,7 @@ void  OversampleSLC(
   strcpy(outfile,oversampleinput.fileoutovs);
   const int32 OsrRange   = oversampleinput.OsrRange;   // Range oversampling ratio.
   const int32 OsrAzimuth = oversampleinput.OsrAzimuth; // Azimuth oversampling ratio.
-  const int32 FilterSize = oversampleinput.FilterSize; // Length of the kernel for the oversampling in range. 
+  const int32 FilterSize = oversampleinput.FilterSize; // Length of the kernel for the oversampling in range.
   if (OsrAzimuth!=1 && OsrAzimuth!=2)
     {
     ERROR.print("oversampling in azimuth: only factor 2");
@@ -4513,16 +4513,16 @@ void  OversampleSLC(
   // ______ Interpolation kernel section ______
   matrix <RN_DSP_CPXACCURACY> LINE_IN(1,OsrRange*(numpixels-1)+1);// zero alternating
   matrix <RN_DSP_CPXACCURACY> LINE_OUT(1,OsrRange*numpixels);// ovs line
-  const int32 interp_size         = FilterSize  * OsrRange - 1; 
-  matrix <RN_DSP_CPXACCURACY> INTERP_KERNEL(1,interp_size);     
+  const int32 interp_size         = FilterSize  * OsrRange - 1;
+  matrix <RN_DSP_CPXACCURACY> INTERP_KERNEL(1,interp_size);
 
   // ______ Generate the range interpolator impulse response ______
-  const RN_DSP_ACCURACY invosr    = 1.0/RN_DSP_ACCURACY(OsrRange); 
-  RN_DSP_ACCURACY interpsamplepos = invosr - RN_DSP_ACCURACY(FilterSize)/2.0; 
-  for (int32 i=0; i<interp_size; i++) 
+  const RN_DSP_ACCURACY invosr    = 1.0/RN_DSP_ACCURACY(OsrRange);
+  RN_DSP_ACCURACY interpsamplepos = invosr - RN_DSP_ACCURACY(FilterSize)/2.0;
+  for (int32 i=0; i<interp_size; i++)
     {
-    INTERP_KERNEL(0,i) = RN_DSP_CPXACCURACY(sinc(interpsamplepos),0); 
-    interpsamplepos   += invosr; 
+    INTERP_KERNEL(0,i) = RN_DSP_CPXACCURACY(sinc(interpsamplepos),0);
+    interpsamplepos   += invosr;
     }
 
   // ______ Normalize kernel (BK) ______
@@ -4549,7 +4549,7 @@ void  OversampleSLC(
     matrix<real4> x_axis(NP_kernel_az,1);
     for (int32 i=0; i<NP_kernel_az; ++i)
       x_axis(i,0) = -NP_kernel_az/2 + 0.5 + i;// [-2.5 -1.5 -0.5 0.5 1.5 2.5]
-    matrix<complr4> tmp_kernel = mat2cr4(rc_kernel(x_axis, CHI, NP_kernel_az)); 
+    matrix<complr4> tmp_kernel = mat2cr4(rc_kernel(x_axis, CHI, NP_kernel_az));
     DEBUG.print("Normalizing kernel");
     real4 qsum = 0.0;
     for (int32 i=0; i<NP_kernel_az; ++i)
@@ -4602,7 +4602,7 @@ void  OversampleSLC(
           LINE_IN(0,OsrRange*ii) = RN_DSP_CPXACCURACY(bufferrreal4(0,0),bufferrimag4(0,0));
           }
         break;
-        } 
+        }
       // ______ Convert first to ci2 before writing to file ______
       case FORMATCI2:
         {
@@ -4630,7 +4630,7 @@ void  OversampleSLC(
     maxpos = (interp_size-1)/2 + (LINE_IN.pixels() - 1) + OsrRange - 1;
     for (int ii=minpos; ii<=maxpos; ii++)
       {
-      LINE_OUT(0,RN_k) = 0; 
+      LINE_OUT(0,RN_k) = 0;
       jmin = max(int32(0), int32(ii-interp_size+1));
       jmax = min(int32(ii),int32(LINE_IN.pixels()-1));
       for (int j=jmin; j<=jmax; j++)
@@ -4665,7 +4665,7 @@ void  OversampleSLC(
       for (int32 x=0; x<LINE_OUT.pixels(); ++x)
         BUFFER_AZ(NP_kernel_az-1,x) = complr4(LINE_OUT(0,x));// add new line
       // ______ Oversample in azimuth (interpolate) ______
-      LINE_OUT2 = sum(dotmult(BUFFER_AZ,KERNEL_AZ),1);// at half+0.5 
+      LINE_OUT2 = sum(dotmult(BUFFER_AZ,KERNEL_AZ),1);// at half+0.5
       }
 
     // ______ Write LINE_OUT in the output file ______
@@ -4721,13 +4721,13 @@ void  OversampleSLC(
             {
             // _____ write line 2.0 ______
             for (int ii=0; ii<LINE_OUT.pixels(); ii++)
-              {  
+              {
               bufferci16 = cr4toci2(complr4(BUFFER_AZ(NP_kernel_az/2-1,ii)));
               ofile.write((char*)&bufferci16,sizeof(compli16));
               }
             // _____ write line 2.5 ______
             for (int ii=0; ii<LINE_OUT.pixels(); ii++)
-              {  
+              {
               bufferci16 = cr4toci2(complr4(LINE_OUT2(0,ii)));
               ofile.write((char*)&bufferci16,sizeof(compli16));
               }
@@ -4760,8 +4760,8 @@ void  OversampleSLC(
 
 
   // ______ Close files ______
-  #undef RN_DSP_ACCURACY 
-  #undef RN_DSP_CPXACCURACY 
+  #undef RN_DSP_ACCURACY
+  #undef RN_DSP_CPXACCURACY
   ifile.close();
   ofile.close();
 
@@ -4785,21 +4785,21 @@ void  OversampleSLC(
   // ______ [BK] write new file size, etc. here ______
   scratchresfile
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  outfile
     << "\nData_output_format: \t\t\t\t";
     if (oversampleinput.oformatflag==FORMATCR4)
       scratchresfile << "complex_real4";
     if (oversampleinput.oformatflag==FORMATCI2)
       scratchresfile << "complex_short";
-  scratchresfile 
+  scratchresfile
     << "\nFirst_line (w.r.t. ovs_image):       \t\t"
     <<  (imageinfo.currentwindow.linelo-1)*OsrAzimuth+1
-    << "\nLast_line (w.r.t. ovs_image):        \t\t" 
+    << "\nLast_line (w.r.t. ovs_image):        \t\t"
     <<  imageinfo.currentwindow.linehi*OsrAzimuth
-    << "\nFirst_pixel (w.r.t. ovs_image):      \t\t" 
+    << "\nFirst_pixel (w.r.t. ovs_image):      \t\t"
     <<  (imageinfo.currentwindow.pixlo-1)*OsrRange+1
-    << "\nLast_pixel (w.r.t. ovs_image):       \t\t" 
+    << "\nLast_pixel (w.r.t. ovs_image):       \t\t"
     <<  imageinfo.currentwindow.pixhi*OsrRange
     << "\nMultilookfactor_azimuth_direction:   \t\t"
     << 1.0/OsrAzimuth
@@ -4812,13 +4812,13 @@ void  OversampleSLC(
     << OsrRange*numpixels
     << "\n#First_line (w.r.t. original_image): \t\t"
     <<  imageinfo.currentwindow.linelo
-    << "\n#Last_line (w.r.t. original_image):  \t\t" 
+    << "\n#Last_line (w.r.t. original_image):  \t\t"
     <<  imageinfo.currentwindow.linehi
-    << "\n#First_pixel (w.r.t. original_image):\t\t" 
+    << "\n#First_pixel (w.r.t. original_image):\t\t"
     <<  imageinfo.currentwindow.pixlo
-    << "\n#Last_pixel (w.r.t. original_image): \t\t" 
+    << "\n#Last_pixel (w.r.t. original_image): \t\t"
     <<  imageinfo.currentwindow.pixhi;
-  scratchresfile 
+  scratchresfile
     << "\n*******************************************************************";
   if (fileid == MASTERID)
     scratchresfile <<  "\n* End_" << processcontrol[pr_m_oversample] << "_NORMAL";
@@ -4836,7 +4836,7 @@ void  OversampleSLC(
 // Copy code from writeslc
 // do some modification
 // Modified by LG for reading ALOS Fine
-// To read complex real4 data 
+// To read complex real4 data
 void palsar_fine_dump_data(
         const input_gen &generalinput,
         const input_crop &crop_arg,
@@ -4853,11 +4853,11 @@ void palsar_fine_dump_data(
                         c6[7],                  // correctly 7 for \0
                         c8[9];                  // correctly 9 for \0
 
-  // ______ Write some info ______ 
+  // ______ Write some info ______
   TRACE_FUNCTION("palsar_fine_dump_data (LG 28-Dec-2005)")
   PROGRESS.print("Start cropping slc data.");
-  
-  // ______ Open files ______ 
+
+  // ______ Open files ______
   ifstream datfile;
   openfstream(datfile,crop_arg.filein1);
   bk_assert(datfile,crop_arg.filein1,__FILE__,__LINE__);
@@ -4872,7 +4872,7 @@ void palsar_fine_dump_data(
     WARNING << "palsar_fine_dump_data : length of record 1 = \""
          <<  lenrec1 << "\"; expected \"720\" for PALSAR FINE SLC (CEOS, full scene).";
     WARNING.print();
-    
+
     }
 
   datfile.seekg(180,ios::beg);
@@ -4936,7 +4936,7 @@ void palsar_fine_dump_data(
 
 // ====== Check with volumefile / internal ======
 // It seems that the lines (N+1) get from volume file is wrong,
-  // it seems to be the pixle number in one line 
+  // it seems to be the pixle number in one line
   if (numlines != checklines)
     {
     WARNING << "code 902: data file: "
@@ -4950,17 +4950,17 @@ void palsar_fine_dump_data(
     }
 
 // ______ Check with previous section ______
-  if (numlines != numdatarec) 
+  if (numlines != numdatarec)
     {
-    WARNING << "code 904: Number of lines seems not to be consistent in file: " 
+    WARNING << "code 904: Number of lines seems not to be consistent in file: "
          << crop_arg.filein1 << " : " << numlines << " != " << numdatarec;
     WARNING.print();
     WARNING.print(" +this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
     }
   if ((numbytesdata / 8) != numpixels)
     {
-    WARNING << "code 904: Number of pixels seems to be inconsistent in file: "  
-         << crop_arg.filein1 << ": " 
+    WARNING << "code 904: Number of pixels seems to be inconsistent in file: "
+         << crop_arg.filein1 << ": "
          << numpixels << " != " << (numbytesdata / 8);
     WARNING.print();
     WARNING.print(" +this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
@@ -4975,7 +4975,7 @@ void palsar_fine_dump_data(
   uint pixelstart = 1;
   uint pixelend   = numpixels;                          // only for resultfile
 
-  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 && 
+  if (crop_arg.dbow.linehi!=0 && crop_arg.dbow.linelo!=0 &&
       crop_arg.dbow.pixhi!=0 && crop_arg.dbow.pixlo!=0)
     {
     window tempdbow(crop_arg.dbow.linelo, crop_arg.dbow.linehi,
@@ -5007,9 +5007,9 @@ void palsar_fine_dump_data(
     }
 
 // ______ Note complex<short> not in ANSI c ______
-  // 
+  //
       matrix <real4>    LINE(1,2*numpixels);            // size of real4
-  
+
 // ====== Process requested lines ======
   ofstream datoutfile;
   openfstream(datoutfile,crop_arg.fileout1,generalinput.overwrit);
@@ -5017,14 +5017,14 @@ void palsar_fine_dump_data(
 
   // ______ info on data, to avoid X86 problems ______
   // ______ according to CEOS specs, byte 413 is first complex pixel, etc. ______
-  // 720 + 84124*linenum + 412 
+  // 720 + 84124*linenum + 412
   datfile.seekg(lenrec1 + 412,ios::beg);
 
   matrix <real4> TMPREAL4(1,2);
   datfile >> TMPREAL4;          // read in first complex pixel for test
 
   real8 tmpmag = sqrt(
-    real8(real4(ntohl(TMPREAL4(0,0)))*real4(ntohl(TMPREAL4(0,0)))) + 
+    real8(real4(ntohl(TMPREAL4(0,0)))*real4(ntohl(TMPREAL4(0,0)))) +
     real8(real4(ntohl(TMPREAL4(0,1)))*real4(ntohl(TMPREAL4(0,1)))));
 
   DEBUG << "First complex element in datafile: ("
@@ -5059,8 +5059,8 @@ void palsar_fine_dump_data(
 
   if (lenrec2 != lendatarec2)
     {
-    ERROR << "code 904: Length of datarecords seems to be inconsistent in file: "  
-         << crop_arg.filein1 << ": " 
+    ERROR << "code 904: Length of datarecords seems to be inconsistent in file: "
+         << crop_arg.filein1 << ": "
          << lenrec2 << " != " << lendatarec2;
     WARNING.print(ERROR.get_str());
     ERROR.reset();
@@ -5112,19 +5112,19 @@ void palsar_fine_dump_data(
   scratchresfile
     << "\t\t\t" <<  crop_arg.idcrop
     << "\n*******************************************************************"
-    << "\nData_output_file: \t\t\t\t" 
+    << "\nData_output_file: \t\t\t\t"
     <<  crop_arg.fileout1
-    << "\nData_output_format: \t\t\t\t" 
+    << "\nData_output_format: \t\t\t\t"
     << "complex_real4"
 
 // ______ updateslcimage greps these ______
     << "\nFirst_line (w.r.t. original_image): \t\t"
     <<  linestart
-    << "\nLast_line (w.r.t. original_image): \t\t" 
+    << "\nLast_line (w.r.t. original_image): \t\t"
     <<  lineend
-    << "\nFirst_pixel (w.r.t. original_image): \t\t" 
+    << "\nFirst_pixel (w.r.t. original_image): \t\t"
     <<  pixelstart
-    << "\nLast_pixel (w.r.t. original_image): \t\t" 
+    << "\nLast_pixel (w.r.t. original_image): \t\t"
     <<  pixelend
     << "\nNumber of lines (non-multilooked): \t\t" <<  lineend-linestart+1
     << "\nNumber of pixels (non-multilooked): \t\t" <<  pixelend-pixelstart+1
@@ -5142,15 +5142,15 @@ void palsar_fine_dump_data(
   if (numchannels != 1)                         // ??
     {
     WARNING << "code 904: Number of channels in file: "
-         << crop_arg.filein1 << " = " 
+         << crop_arg.filein1 << " = "
          << numchannels << " != 1 ";
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
     }
   if (bottomborder != topborder != leftborder != rightborder != 0)
     {
-    WARNING << "code 904: Not implemented: offset border: left,right,bottom,top: " 
-         << leftborder << "," << rightborder << "," << bottomborder << "," 
+    WARNING << "code 904: Not implemented: offset border: left,right,bottom,top: "
+         << leftborder << "," << rightborder << "," << bottomborder << ","
          << topborder << " in file: " << crop_arg.filein1;
     WARNING.print();
     WARNING.print("this means SLC FORMAT IS DIFFERENT THEN EXPECTED.");
diff --git a/src/readdata.hh b/doris_core/readdata.hh
similarity index 100%
rename from src/readdata.hh
rename to doris_core/readdata.hh
diff --git a/src/readinput.cc b/doris_core/readinput.cc
similarity index 97%
rename from src/readinput.cc
rename to doris_core/readinput.cc
index 477cf8b..2c45931 100755
--- a/src/readinput.cc
+++ b/doris_core/readinput.cc
@@ -153,6 +153,7 @@ void readinput(
 //____RaffaeleNutricato END MODIFICATION SECTION 3
         input_filtazi     &filtaziinput,
         input_coarsecorr  &coarsecorrinput,
+ //       input_deramp      &derampinput, //MCC
         input_fine        &fineinput,
         input_reltiming   &reltiminginput, //[FvL]
         input_demassist   &demassistinput, //[FvL]
@@ -344,6 +345,7 @@ void readinput(
   coarsecorrinput.initoffsetL   = 0;                    // default initial offset
   coarsecorrinput.initoffsetP   = 0;                    // default initial offset
 
+  //deramp.filein1
   setunspecified(fineinput.ifpositions);                // check later, then set default
   const int32 def_fc_nwin       = 601;                  // default #windows
   fineinput.MasksizeL           = 64;                   // default correlation size
@@ -356,7 +358,26 @@ void readinput(
   fineinput.plotoffsets         = false;                // default no plotting
   fineinput.plotmagbg           = false;                // default no plotting
   fineinput.plotthreshold       = 0.3;                  // default no plotting
-
+  fineinput.shiftazi            = 1;                 // [1] shift spectrum to 0
+  
+  //ADD by MCC for fine CCCoregistration
+  
+  setunspecified(fineinput.forefdem);
+  setunspecified(fineinput.firefdem);
+  fineinput.iformatflag   = FORMATI2;              // default gtopo30
+  fineinput.demrows       = 6000;                  // default gtopo30
+  fineinput.demcols       = 4800;                  // default gtopo30
+  fineinput.demnodata     = -9999;                 // default gtopo30
+  fineinput.demdeltalat   = deg2rad(0.00833333333333333333);// default gtopo30
+  fineinput.demdeltalon   = deg2rad(0.00833333333333333333);// default gtopo30
+  fineinput.demlatleftupper = deg2rad(89.995833333333333333);      // w020n90.DEM
+  fineinput.demlonleftupper = deg2rad(-19.995833333333333333);// w020n90.DEM
+ 
+  //added by MCC for fine CCCoregistration
+  
+ 
+  
+  
   //____ added by FvL ____
   reltiminginput.threshold         = 0.4;               // default threshold data
   reltiminginput.maxiter           = 10000;             // default max. 10000 outliers removed
@@ -412,7 +433,7 @@ void readinput(
   resampleinput.dbow_geo.linehi = 0;                    // max. line coord. initialization
   resampleinput.dbow_geo.pixlo  = 0;                    // min. pixel coord. initialization
   resampleinput.dbow_geo.pixhi  = 0;                    // max. pixel coord. initialization
-  resampleinput.shiftazi        = true;                 // default apply shift
+  resampleinput.shiftazi        = 1;                    // default [1] apply shift
 
   interferoinput.method         = int_oldmethod;        // default method
   setunspecified(interferoinput.focint);                // check later, then set default
@@ -476,7 +497,9 @@ void readinput(
   comprefdeminput.demdeltalon   = deg2rad(0.00833333333333333333);// default gtopo30
   comprefdeminput.demlatleftupper = deg2rad(89.995833333333333333);     // w020n90.DEM
   comprefdeminput.demlonleftupper = deg2rad(-19.995833333333333333);// w020n90.DEM
-
+  
+  comprefdeminput.isCCC =false; // MCC
+  
   strcpy(subtrrefdeminput.focint,"cint.minrefdem.raw"); // default name
   subtrrefdeminput.offsetL      = 0;                    // default no offset
   subtrrefdeminput.offsetP      = 0;                    // default no offset
@@ -2688,6 +2711,10 @@ void readinput(
         fineinput.method=fc_magspace;
       else if (!strcmp(keyword,"OVERSAMPLE"))
         fineinput.method=fc_oversample;
+      else if (!strcmp(keyword,"COHERENCE"))
+        fineinput.method=fc_coherence;
+      else if (!strcmp(keyword,"INTENSITY"))
+        fineinput.method=fc_intensity;
       else
         {
         ERROR << "FC_METHOD: method " <<  keyword
@@ -2725,7 +2752,132 @@ void readinput(
       else
         WARNING.print("FC_PLOT: missing argument(s). (default: 0.4 NOBG)");
       }
+    
+      else if (!strcmp(keyword,"FC_DEM"))          // input file
+      {
+      strcpy(fineinput.firefdem,  word[1] );       // pass keyword
+      writearg(fineinput.firefdem);
+      }
+
+    // **********************************************************************
+    else if (!strcmp(keyword,"FC_DEM_FORMAT"))       //  format input file
+      {
+      keyword =  word[1];       // pass keyword
+      writearg(keyword);
+      toupper(keyword);
+      if (!strcmp(keyword,"R4") || !strcmp(keyword,"REAL4"))
+        fineinput.iformatflag = FORMATR4;
+      else if (!strcmp(keyword,"I2") || !strcmp(keyword,"SHORT"))
+        fineinput.iformatflag = FORMATI2;  // default
+      else if (!strcmp(keyword,"I2_BIGENDIAN") || 
+               !strcmp(keyword,"SHORT_BIGENDIAN"))
+        fineinput.iformatflag = FORMATI2_BIGENDIAN;        // default
+      else if (!strcmp(keyword,"R8") || !strcmp(keyword,"REAL8"))
+        fineinput.iformatflag = FORMATR8;
+      else
+        {
+        ERROR << "FC_IN_FORMAT: input format "
+             <<  keyword
+             << " not known (R4 R8 I2 (native) SHORT_BIGENDIAN); line "
+             << linecnt << ".";
+        PRINT_ERROR(ERROR.get_str())
+        throw(keyword_error);
+        }
+      }
 
+// **********************************************************************
+    else if (!strcmp(keyword,"FC_DEM_SIZE"))         // nrow ncols (lat lon)
+      {
+      char *pLast1, *pLast2 = NULL;
+      fineinput.demrows = strtoul(word[1], &pLast1, BASE10); 
+      fineinput.demcols = strtoul(word[2], &pLast2, BASE10);
+      if ( pLast1 == word[1] || pLast2 == word[2] ) // fails to convert one of them to double.
+       {
+        ERROR << "FC_DEM_SIZE: "  << word[1] << " : " 
+              << word[2] << " are not valid.";
+        PRINT_ERROR(ERROR.get_str())
+        throw(keyword_error);
+       }
+      writearg(fineinput.demrows);
+      writearg(fineinput.demcols);
+      }
+
+// **********************************************************************
+    else if (!strcmp(keyword,"FC_DEM_DELTA"))        // degrees delta lat lon
+      {
+      fineinput.demdeltalat =  atof(word[1]) ;     // pass keyword
+      keyword                    =  word[2] ;         // update keyword
+      writearg(fineinput.demdeltalat);
+      writearg(keyword);  // lon
+      if (isdigit(keyword[0]) || keyword[0]=='.')    // likely to be 2 numbers
+        fineinput.demdeltalon = atof(keyword);
+      else // default same gridsize
+        fineinput.demdeltalon = fineinput.demdeltalat;
+
+      // ______ Store as radians ______
+      fineinput.demdeltalat = deg2rad(fineinput.demdeltalat);
+      fineinput.demdeltalon = deg2rad(fineinput.demdeltalon);
+      }
+
+// **********************************************************************
+    else if (!strcmp(keyword,"FC_DEM_UL"))           // upperleft coordinates
+      {
+      char *pLast1, *pLast2 = NULL;
+      fineinput.demlatleftupper = strtod(word[1], &pLast1);
+                  fineinput.demlonleftupper = strtod(word[2], &pLast2);
+      if ( pLast1 == word[1] || pLast2 == word[2] ) // fails to convert
+       {
+        ERROR << "FC_DEM_UL: "  << word[1] << " : "
+                 << word[2] << " are not valid.";
+        PRINT_ERROR(ERROR.get_str())
+        throw(keyword_error);
+       }
+
+      writearg(fineinput.demlatleftupper);
+      writearg(fineinput.demlonleftupper);
+      fineinput.demlatleftupper = deg2rad(fineinput.demlatleftupper);
+      fineinput.demlonleftupper = deg2rad(fineinput.demlonleftupper);
+      }
+
+// **********************************************************************
+    else if (!strcmp(keyword,"FC_DEM_NODATA"))       // flag for no data
+      {
+      fineinput.demnodata =  atof(word[1]) ;       // pass keyword
+      writearg(fineinput.demnodata);
+      }
+   
+
+    //added by MCC for fine CCCoregistration
+  
+// **********************************************************************
+    else if (!strcmp(keyword,"FC_SHIFTAZI"))            // true: shift before rs.
+      {
+      keyword =  word[1] ;      // pass keyword      
+      writearg(keyword);
+      toupper(keyword);
+      if (!strcmp(keyword,"ON") ||                   // consistent with previous versions
+          (keyword[0] == '\0')  || // no keyword
+          !strcmp(keyword,"DC"))                        // Doppler centroid polynomial
+        fineinput.shiftazi = 1;
+      else if (!strcmp(keyword,"DERAMP")) //Only for TOPS
+        fineinput.shiftazi = 2;
+      else if (!strcmp(keyword,"OFF")   ||
+               !strncmp(keyword,"//",2) ||              // comment
+               !strncmp(keyword,"#",1)   ||  //comment
+                !strcmp(keyword,"NONE"))                 // just in case                
+        fineinput.shiftazi = 0;
+      else 
+        {
+        fineinput.shiftazi = 1;
+        WARNING << "FC_SHIFTAZI: line: " << linecnt << ": unknown argument: "
+             << keyword << "; Set to ON (do shift azimuth spectrum).";
+        WARNING.print();
+        }        
+      } 
+    
+//  ***************************************
+    
+    
 // ____ start added by FvL ____
 
 // **********************************************************************
@@ -3326,27 +3478,32 @@ void readinput(
       }
 
 // **********************************************************************
-    else if (!strcmp(keyword,"RS_SHIFTAZI"))            // true: shift before rs.
+   
+     else if (!strcmp(keyword,"RS_SHIFTAZI"))            // true: shift before rs.
       {
-      keyword =  word[1] ;      // pass keyword
+      keyword =  word[1] ;      // pass keyword      
       writearg(keyword);
       toupper(keyword);
-      if (!strcmp(keyword,"ON"))
-        resampleinput.shiftazi = true;
-      else if (!strcmp(keyword,"OFF")   ||
-               !strncmp(keyword,"//",2) ||              // comment
-               !strncmp(keyword,"#",1)  ||              // comment
-               !(keyword[0] == '\0'))                   // no keyword
-        resampleinput.shiftazi = false;
-      else
+      
+      if (!strcmp(keyword,"OFF")   ||
+          !strncmp(keyword,"//",2) ||              // comment
+          !strncmp(keyword,"#",1)  ||              //comment
+          !strcmp(keyword,"NONE"))                 // just in case                
+        resampleinput.shiftazi = 0;
+      else if (!strcmp(keyword,"DERAMP")) //Only for TOPS
+        resampleinput.shiftazi = 2;
+      else if (!strcmp(keyword,"ON")  ||                   // consistent with previous versions          
+               !strcmp(keyword,"DC") ||                   // Doppler centroid polynomial
+               (keyword[0] == '\0'))                        // no keyword
+        resampleinput.shiftazi = 1;            
+      else 
         {
-        resampleinput.shiftazi = true;
+        resampleinput.shiftazi = 1;
         WARNING << "RS_SHIFTAZI: line: " << linecnt << ": unknown argument: "
              << keyword << "; Set to ON (do shift azimuth spectrum).";
         WARNING.print();
-        }
-      }
-
+        } 
+      } 
 
 // **********************************************************************
 // *** COMPUTATION OF INTERFEROGRAM
@@ -5735,6 +5892,8 @@ void checkfine(
   {
   TRACE_FUNCTION("checkfine (BK 29-Sep-1999)")
   INFO.print("\n\t*** Input for step FINE ***");
+  INFO << fineinput.method;
+  INFO.print();
   switch (fineinput.method)
     {
 //    case fc_cmplxfft:
@@ -5752,6 +5911,12 @@ void checkfine(
     case fc_oversample:
       INFO.print("FC_METHOD: \tOVERSAMPLE is used for fine correlation.");
       break;
+    case fc_coherence:
+      INFO.print("FC_METHOD: \tCOHERENCE is used for fine correlation.");
+      break;
+       case fc_intensity:
+     INFO.print("FC_METHOD: \tINTENSITY is used for fine correlation.");
+      break;
     default:
       PRINT_ERROR("panic.");
       throw(keyword_error);
diff --git a/src/readinput.hh b/doris_core/readinput.hh
similarity index 94%
rename from src/readinput.hh
rename to doris_core/readinput.hh
index 43e4dac..b08f48f 100755
--- a/src/readinput.hh
+++ b/doris_core/readinput.hh
@@ -66,7 +66,7 @@ enum {
                 pr_m_EXTRA        ,     // 10  for future use
 
                 pr_s_readfiles    ,     // 11   flag for reading leader etc slave
-                pr_s_crop         ,     // 12  writing to raw slave   
+                pr_s_crop         ,     // 12  writing to raw slave
                 pr_s_oversample   ,     // 13  oversample slave image   //____RaffaeleNutricato added this line
                 pr_s_porbits      ,     // 14  calling getorb slave
                 pr_s_morbits      ,     // 15  modify orbits by correction parameters [HB]
@@ -119,19 +119,19 @@ const char processcontrol[NUMPROCESSES][ONE27] = {
    "filt_range:",           // 8  pr_m_filtrange
    "resample:",             // 9  pr_m_resample   [MA] 200903, fake entry
    "NOT_USED:",             // 10 pr_m_EXTRA
-                                 
+
    "readfiles:",            // 11 pr_s_readfiles
    "crop:",                 // 12 pr_s_crop
    "oversample:",           // 13 pr_s_oversample //____RaffaeleNutricato added this line
    "precise_orbits:",       // 14 pr_s_porbits
    "modify_orbits:",        // 15 pr_s_morbits [HB]
-   "sim_amplitude:",        // 16 pr_s_simamp   [MA] 2009, fake entry 
-   "master_timing:",        // 17 pr_s_mtiming  [MA] 2009, fake entry 
+   "sim_amplitude:",        // 16 pr_s_simamp   [MA] 2009, fake entry
+   "master_timing:",        // 17 pr_s_mtiming  [MA] 2009, fake entry
    "filt_azi:",             // 18 pr_s_filtazi: must be same as m_
    "filt_range:",           // 19 pr_s_filtrange
    "resample:",             // 20 pr_s_resample !slave
    "NOT_USED:",             // 21 pr_s_EXTRA
-                                 
+
    "coarse_orbits:",        // 22 pr_i_coarse
    "coarse_correl:",        // 23 pr_i_coarse2
    "fine_coreg:",           // 24 pr_i_fine
@@ -172,6 +172,8 @@ const int16     fc_cmplxspace   = 32;           // method for fine coreg
 const int16     fc_magfft       = 33;           // method for fine coreg
 const int16     fc_magspace     = 34;           // method for fine coreg
 const int16     fc_oversample   = 35;           // method oversample signal,not corr.
+const int16     fc_coherence    = 36;           // method oversample signal,CCC.
+const int16     fc_intensity    = 37;            //same as fc_oversample but with intensity instead of amplitude
 
 const int16     fe_porbits      = 41;           // method for flat earth correction
 const int16     fe_method2      = 42;           // method for flat earth correction
@@ -192,9 +194,9 @@ const int16     rs_rc6p         = 606;          // raised cosine, 6 point
 const int16     rs_rc12p        = 612;          // raised cosine, 12 point
 
 const int16     int_oldmethod   = 91;           // method no overs. interf. gen.
-const int16     int_oversample  = 92;           // method oversample for int. computation 
+const int16     int_oversample  = 92;           // method oversample for int. computation
 const int16     coh_oldmethod   = 101;          // coherence computation up to refphase
-const int16     coh_newmethod   = 102;          // coherence computation including refdem 
+const int16     coh_newmethod   = 102;          // coherence computation including refdem
 
 const int16     fp_goldstein    = 81;           // method goldstein
 const int16     fp_spatialconv  = 82;           // method spatial conv. with kernel
@@ -320,7 +322,7 @@ struct input_simamp                     // arguments for simulation of amplitude
 // ______ ______
 
 
-struct input_mtiming                    // arguments of correlation for master timing error 
+struct input_mtiming                    // arguments of correlation for master timing error
   {
   char          ifpositions[4*ONE27];   // input file name for positions
   int16         method;                 // method selector, [MA] rm if not nec.
@@ -341,7 +343,7 @@ struct input_oversample          // arguments of m/s_oversample
   char      fileoutovs[4*ONE27];  // Name of the oversampled image
   int32     OsrRange;            // Oversampling ratio in range.
   int32     OsrAzimuth;          // Oversampling ratio in azimuth.
-  int32     FilterSize;          // Length of the interpolation kernel. 
+  int32     FilterSize;          // Length of the interpolation kernel.
   int32     oformatflag;         // Output format [cr4] ci16, I suggest [cr4].
   };
 //____RaffaeleNutricato END MODIFICATION SECTION 3
@@ -377,7 +379,25 @@ struct input_coarsecorr                 // arguments for correlation
   int32         initoffsetP;            // initial offset pixels
   };
 
+  
+//  struct input_deramp                       // arguments of m/s_crop
+//  {
+//   
+ // char          filein1[4*ONE27];  //name input file
+ // char          fileout1[4*ONE27];// name output file
+//  
+//  };
 
+
+struct input_reramp                       // arguments of m/s_crop
+  {
+   
+ // char          filein1[4*ONE27];  //name input file
+ // char          fileout1[4*ONE27];// name output file
+  
+  };
+  
+  
 // ______ ______
 struct input_fine                       // arguments for fine coreg.
   {
@@ -390,10 +410,26 @@ struct input_fine                       // arguments for fine coreg.
   uint          AccP;                   // #pixels to be searched in p direction
   int32         initoffsetL;            // initial offset lines
   int32         initoffsetP;            // initial offset pixels
-  uint          osfactor;               // oversampling factor 
+  uint          osfactor;               // oversampling factor
   bool          plotoffsets;            // call script
   bool          plotmagbg;              // call script
-  real4         plotthreshold;          // call script
+  real4         plotthreshold;          // call script  
+  int32          shiftazi;               // [true] shift spectrum to 0
+  
+  // * added by MCC
+  //For cohernece method only 
+  char          firefdem[4*ONE27];      // input filename reference dem
+    char          forefdem[4*ONE27];   // output filename DEM in radarcoord.
+  int16         iformatflag;            // input format [signed short]
+  uint          demrows;                // number of
+  uint          demcols;                // number of
+  real8         demdeltalat;            // radians
+  real8         demdeltalon;            // radians
+  real8         demlatleftupper;        // radians
+  real8         demlonleftupper;        // radians
+  real8         demnodata;              // identifier/flag
+
+  // * added by MCC  
   };
 
 
@@ -420,7 +456,7 @@ struct input_demassist                  // arguments for DEM assisted coregistra
   real8         demnodata;              // identifier/flag
   char          forefdemhei[4*ONE27];   // output filename DEM in radarcoord.
   char          fodem[4*ONE27];         // flag+name output of cropped dem
-  char          fodemi[4*ONE27];                // flag+name output of interpolated dem
+  char          fodemi[4*ONE27];                // flag+name output of interpolated dem       
   };
 
 // ______ ______
@@ -477,7 +513,7 @@ struct input_resample                   // arguments for resampling slave
   int16         oformatflag;            // output format [cr4] ci16
   window        dbow_geo;               // cut out of original master.geo
   window        dbow;                   // cut out of original master.radar
-  bool          shiftazi;               // [true] shift spectrum to 0
+  int32          shiftazi;               // [1] center the spectrum around zero, if 0 do nothing if 2 deramp using Sentinel-1 params
   };
 
 
@@ -571,6 +607,10 @@ struct input_comprefdem                 // arguments for reference phase from DE
   bool          includerefpha;          // flag to include_flatearth correction
   char          fodem[4*ONE27];         // flag+name output of cropped dem
   char          fodemi[4*ONE27];                // flag+name output of interpolated dem
+  bool          isCCC;                 // if demassist is used for Coherent complex coregistration (CCC), then isCCC must be set to true. Default is false
+  
+  
+  
   };
 
 // ______ ______
@@ -607,7 +647,7 @@ struct input_unwrap                     // arguments for unwrapping
   };
 
 // ______ ______ [HB]
-struct input_estorbits                    // arguments for orbit error estimation		
+struct input_estorbits                    // arguments for orbit error estimation
 {
   int16 	method; 		  // method selector
   char          fiheightmap[4*ONE27];     // file with heightmap in radar coordinates, obtained with CRD_OUT_DEM_LP
@@ -716,7 +756,7 @@ void readinput(
       input_unwrap      &unwrapinput,
       input_estorbits   &estorbitsinput,   // [HB]
       input_slant2h     &slant2hinput,
-      input_geocode     &geocodeinput);  
+      input_geocode     &geocodeinput);
 //____RaffaeleNutricato END MODIFICATION SECTION 4
 
 
diff --git a/src/referencephase.cc b/doris_core/referencephase.cc
similarity index 99%
rename from src/referencephase.cc
rename to doris_core/referencephase.cc
index 862c9c4..9f1b728 100755
--- a/src/referencephase.cc
+++ b/doris_core/referencephase.cc
@@ -129,7 +129,9 @@ void flatearth(
   // ______ Distribute points wel distributed over win ______
   // ______ or read from ascii file ______
   // ______(i,0): line, (i,1): pixel, (i,2) flagfromdisk______
-  matrix<uint> Position;
+  //matrix<uint> Position;
+  // [FvL] for correct folding of points outside overlap window when inserted by file
+  matrix<int> Position;
   const uint  Npoints = comprefphainput.Npoints;
   register int32 i,j,k,index;
 
@@ -148,9 +150,13 @@ void flatearth(
     for (i=0; i<Npoints; ++i)
       {
       ifpos >> ll >> pp;
-      Position(i,0) = uint(ll);
-      Position(i,1) = uint(pp);
-      Position(i,2) = uint(1);                // flag from file
+      //Position(i,0) = uint(ll);
+      //Position(i,1) = uint(pp);
+      //Position(i,2) = uint(1);                // flag from file
+      // [FvL]
+      Position(i,0) = int(ll);
+      Position(i,1) = int(pp);
+      Position(i,2) = int(1);                // flag from file
       ifpos.getline(dummyline,2*ONE27,'\n');       // goto next line.
       }
     ifpos.close();
@@ -177,7 +183,7 @@ void flatearth(
   // ______Check redundancy______
   if (Npoints < Nunk)
     {
-    PRINT_ERROR("flatearth: Number of points is smaller than parameters solved for.")
+    PRINT_ERROR("flatearth: Number of points is smaller than parameters solved for.");
     throw(input_error);
     }
 
@@ -1071,7 +1077,7 @@ void demassist(
   //============ (radar geometry)                         =============
   //===================================================================
 
-
+  
   //===================================================================
   //============ Determine inverse transformation         =============
   //============ (slave corners only, needed for overlap) =============
@@ -1409,7 +1415,7 @@ void demassist(
   scratchresfile.close();
 
 
-  } // END demassist
+ } // END demassist
 
  
 /****************************************************************
@@ -1452,6 +1458,7 @@ void radarcodedem(
   
   // _____ start added by MA _____
   bool mlookedIFG         =  false;                           // true: ifg is multilooked
+ 
   int32 mlL               = interferogram.multilookL;         // initialize multilookfactor
   int32 mlP               = interferogram.multilookP;
   const int32 &ifgmlL     = interferogram.multilookL;         // multilookfactor of interferogram
@@ -1464,6 +1471,8 @@ void radarcodedem(
     }
   // _____ end added by MA _____
 
+  
+  
   const real8 m_min4picdivlam = (-4.0*PI*SOL)/master.wavelength;
   const real8 s_min4picdivlam = (-4.0*PI*SOL)/slave.wavelength;
   DEBUG << "master wavelength = " << master.wavelength;
@@ -1595,6 +1604,8 @@ void radarcodedem(
 
   // ______ Open (temporary) output files ______
   // DEM heights 
+  INFO<<refdeminput.fodem << endl;
+  INFO.print();
   ofstream demofile;
   openfstream(demofile,refdeminput.fodem,generalinput.overwrit);
   bk_assert(demofile,refdeminput.fodem,__FILE__,__LINE__);
@@ -1650,6 +1661,9 @@ void radarcodedem(
     PROGRESS << STEP << "Reading crop of DEM for buffer: " << buffer+1;
     PROGRESS.print();
     DEBUG.print("Reading input DEM into real4 matrix (buffer).");
+     INFO<< "file info: name: "     << refdeminput.firefdem <<",  nof flat pixels, " << numberoflatpixels << endl;
+     INFO<< "file info, Format : "  << refdeminput.iformatflag<<endl;
+     INFO.print();
     switch (refdeminput.iformatflag)
       {
       // ______ Read as short BE, then convert to host order ______
@@ -1940,7 +1954,8 @@ if (outputh2ph==true)
        << restlines << " lines.";
   INFO.print();
 
-
+INFO << "OutputFile forefdem: "<< refdeminput.forefdem;
+INFO.print();
   // ______ Open output files ______
   ofstream refdemofile;       // refdem phase
   openfstream(refdemofile,refdeminput.forefdem,generalinput.overwrit);
@@ -1970,6 +1985,9 @@ if (outputh2ph==true)
   ofstream refdemheiofile;// Radarcoded DEM (Z.Perski)
   if (outputrefdemhei==true)
     {
+      INFO << "OutputFile forefdemhei: "<< refdeminput.forefdemhei;
+      INFO.print();
+
     openfstream(refdemheiofile,refdeminput.forefdemhei,generalinput.overwrit);
     bk_assert(refdemheiofile,refdeminput.forefdemhei,__FILE__,__LINE__);
     }
@@ -2094,8 +2112,9 @@ if (outputh2ph==true)
         output_layer(i,j) = real4(output_buffer(i,j));   // real8 --> real4 
    //     convert_type(output_buffer,output_layer); // TODO MA, should replace above 3 lines but this one doesn't work properly yet
 
-//cerr << "refphase: blines: " << blines << " Npixelsml " << Npixelsml << endl;
-
+    cerr << "refphase: blines: " << blines << " Npixelsml " << Npixelsml << endl;
+    INFO <<  "size buffer: lines:  " <<  output_layer.lines()<< ",  " << output_layer.pixels() <<endl;
+    INFO.print();
   // _____ start added by MA _____
     if ( mlookedIFG == true)                             // [MA] if ifg is multilooked by a factor
       {
diff --git a/src/referencephase.hh b/doris_core/referencephase.hh
similarity index 100%
rename from src/referencephase.hh
rename to doris_core/referencephase.hh
diff --git a/src/slcimage.cc b/doris_core/slcimage.cc
similarity index 86%
rename from src/slcimage.cc
rename to doris_core/slcimage.cc
index 94a3a7c..5e3860b 100755
--- a/src/slcimage.cc
+++ b/doris_core/slcimage.cc
@@ -80,6 +80,19 @@ slcimage::slcimage()
   f_DC_a0                 = 0.0;                // [Hz] default ERS2
   f_DC_a1                 = 0.0;
   f_DC_a2                 = 0.0;
+  
+  f_DC_t_ref_az            = 0.0;             // DC_reference_azimuth_time
+  f_DC_t_ref_rn           = 0.0;              // DC_reference range time
+// ________________  TOPS  ONLY__________________
+    // FM polynomial
+  FM_t_ref_az            = 0.0 ;        // azimuth time reference for frequency modulation rate 
+  FM_t_ref_rn            = 0.0 ;        // range time reference for frequency modulation rate 
+  FM_a0                  = 0.0 ;        // constant term Hz
+  FM_a1                  = 0.0 ;        // linear term Hz/s
+  FM_a2                  = 0.0 ;        // quadratic term Hz/s/s
+  Ks                     = 0.0 ;        // azimuth steering rate [deg/s]
+  dt_az                  = 0.0 ;        //Azimuth_time_interval [s]]
+  
   rsr2x                   = 18.9624680*2.0e6;   // [Hz] default ERS2
   rbw                     = 15.55e6;            // [Hz] default ERS2
   currentwindow.linelo    = 1;
@@ -108,6 +121,7 @@ slcimage::slcimage()
   slavemasteroffsets.pN0  = 0;
   slavemasteroffsets.lNN  = 0;
   slavemasteroffsets.pNN  = 0;
+ 
     } // END slcimage::slcimage()
 
 
@@ -138,10 +152,10 @@ void slcimage::fillslcimage(const char* file)
   bk_assert(resfile,file,__FILE__,__LINE__);
 
   // ______ Lookfor identifiers (strings) ______
-  char  dummyline[4*ONE27];
+  char  dummyline[6*ONE27];
   real8 latitude;
   real8 longitude;
-  char  word[4*ONE27]     = " ";
+  char  word[6*ONE27]     = " ";
   int32 linecounter     = 0;
   // ______ check all fields are found _______________
   bool found_lat        = false;
@@ -158,6 +172,16 @@ void slcimage::fillslcimage(const char* file)
   bool found_fdc_a0     = false;
   bool found_fdc_a1     = false;
   bool found_fdc_a2     = false;
+  bool found_f_DC_t_ref  = false;     
+  bool found_f_DC_t_ref_rn = false;  
+  bool found_FM_t_ref    = false;        // azimuth time reference for frequency modulation rate 
+  bool found_FM_t_ref_rn = false;        // range time reference for frequency modulation rate 
+  bool found_FM_a0      = false;        // constant term Hz
+  bool found_FM_a1      = false;        // linear term Hz/s
+  bool found_FM_a2      = false;        // quadratic term Hz/s/s
+  bool found_Ks         = false;        // azimuth steering rate [deg/s]
+  bool found_dt_az      = false;        //Azimuth_time_interval [s]]
+  
   bool found_sarp       = false;
   bool found_product    = false;
 
@@ -189,22 +213,22 @@ void slcimage::fillslcimage(const char* file)
         }
       }
     // ______ Check if all parameters are read ______
-    if (found_lat        == true &&
-        found_lon        == true &&
-        found_wavelength == true &&
-        found_t_range1   == true &&
-        found_t_azi1     == true &&
-        found_prf        == true &&
-        found_rsr        == true &&
-        found_nlines     == true &&
-        found_npixels    == true &&
-        found_abw        == true &&
-        found_rbw        == true &&
-        found_fdc_a0     == true &&
-        found_fdc_a1     == true &&
-        found_fdc_a2     == true &&
-        found_sarp       == true &&
-        found_product    == true)     break;
+ //   if (found_lat        == true &&
+ //       found_lon        == true &&
+ //       found_wavelength == true &&
+ //       found_t_range1   == true &&
+ //       found_t_azi1     == true &&
+ //       found_prf        == true &&
+ //       found_rsr        == true &&
+ //       found_nlines     == true &&
+  //      found_npixels    == true &&
+ //       found_abw        == true &&
+ //       found_rbw        == true &&
+ //       found_fdc_a0     == true &&
+ //       found_fdc_a1     == true &&
+ //       found_fdc_a2     == true &&
+ //       found_sarp       == true &&
+ //       found_product    == true)     break;
 
     // ___ read parameters ___
     resfile  >> word;                                   // read word
@@ -261,8 +285,8 @@ void slcimage::fillslcimage(const char* file)
       {
       found_t_azi1 = true;
       struct tm tijdstart;
-      char c12tijd0[13];
-      char c12tijd0_tmp[16];// allow for .123456 ms ASAR in reading
+      char c12tijd0[20];
+      char c12tijd0_tmp[20];// allow for .123456 ms ASAR in reading
       resfile >> word >> utc1 >> c12tijd0_tmp;  // (UTC): 26-JUL-1995 09:49:23.394
       // ______ utc1 should be including time ______
       // ______ make sure not to put in ms since what are the consequenses
@@ -295,8 +319,7 @@ void slcimage::fillslcimage(const char* file)
            << "]: string: \"First_pixel_azimuth_time\",    (derived) value: "
            << t_azi1;
       DEBUG.print();
-      INFO << "sec of day of first azimuth line: " << t_azi1;
-      INFO.print();
+      
       }
 
     else if (!strcmp(word,"Pulse_Repetition_Frequency"))
@@ -367,22 +390,41 @@ void slcimage::fillslcimage(const char* file)
       {
       found_fdc_a0 = true;
       resfile >> word >> word >> word >> f_DC_a0;// also works for ATL: 1.0E+02
+      
       DEBUG << "[" << linecounter
            << "]: string: \"Xtrack_f_DC_constant\",        (derived) value: "
            << f_DC_a0;
       DEBUG.print();
       }
+    
+    else if (!strcmp(word,"Azimuth_steering_rate"))     // (TOPS only):
+      {
+      found_Ks= true;
+      resfile >> word >>word;
+      Ks = atof(word);
+      
+      DEBUG << "[" << linecounter
+           << "]: string: \"Azimuth_steering_rate\",       (derived) value: "
+           << word << ", "
+           << Ks;
+      DEBUG.print();
+      }  
+    
     else if (!strcmp(word,"Xtrack_f_DC_linear"))        // (Hz/s, early edge):
       {
       found_fdc_a1 = true;
-      resfile >> word >> word >> word >> f_DC_a1;
+      //resfile >> word >> word >> word >> f_DC_a1;
+      resfile >> word >> word >> word >>word;
+          
+      f_DC_a1 = atof(word);
       if (abs(f_DC_a1)<1.0)
         {
         WARNING << "Strange value for f_DC_a1: " << f_DC_a1 
                 << "; setting f_DC_a1=0 (expected ~1e7)";
         WARNING.print();
         WARNING.print("other definition of ATLANTIS? or Focused SLC?");
-        f_DC_a1 = 0.0;
+        if (!found_Ks)//do not change anything for TOPS
+          f_DC_a1 = 0.0;
         }
       DEBUG << "[" << linecounter
            << "]: string: \"Xtrack_f_DC_linear\",          (derived) value: "
@@ -393,20 +435,142 @@ void slcimage::fillslcimage(const char* file)
       {
       found_fdc_a2 = true;
       resfile >> word >> word >> word >> f_DC_a2;
+   
+      
       if (abs(f_DC_a2)<1.0)
         {
         WARNING << "strange value for f_DC_a2: " << f_DC_a2 
                 << "; setting f_DC_a2=0 (expected ~1e12)";
         WARNING.print();
         WARNING.print("other definition of ATLANTIS? or Focused SLC?");
-        f_DC_a2 = 0.0;
+        if (!found_Ks)//do not change anything for TOPS
+          f_DC_a2 = 0.0;
         }
       DEBUG << "[" << linecounter
            << "]: string: \"Xtrack_f_DC_quadratic\",       (derived) value: "
            << f_DC_a2;
       DEBUG.print();
       }
-
+    
+    else if (!strcmp(word,"DC_reference_azimuth_time:"))     // (TOPS only):
+      {
+      found_f_DC_t_ref = true;
+      struct tm tijdstart;
+      char c12tijd0[20];
+      char c12tijd0_tmp[20];// allow for .123456 ms ASAR in reading
+      resfile >> utc1 >> c12tijd0_tmp;  // (UTC): 26-JUL-1995 09:49:23.394
+      // ______ utc1 should be including time ______
+      // ______ make sure not to put in ms since what are the consequences
+      // ______ for getorb, strptime, etc. ______
+      strncpy(c12tijd0,c12tijd0_tmp,12);
+      c12tijd0[12]='\0';
+      
+      
+      strcat(utc1," ");
+      strcat(utc1,c12tijd0);
+      // ______ Compute sec. of day for this date ______
+      strptime(c12tijd0,"%T",&tijdstart);
+      char c12frac0[20]="0.";
+      register int32 i;
+      for (i=0; i<20; i++)
+        if (c12tijd0_tmp[i]=='.') break;
+      int32 j = 2;
+      while (c12tijd0_tmp[i] != '\0')                       // use old value of i
+        {
+        i++;
+        c12frac0[j]=c12tijd0_tmp[i];
+        j++;
+        }
+       f_DC_t_ref_az = tijdstart.tm_sec +
+                     atof(c12frac0) +
+                     60 * tijdstart.tm_min +
+                     3600 * tijdstart.tm_hour;
+      
+      DEBUG << "[" << linecounter
+           << "]: string: \"DC_reference_azimuth_time:\",       (derived) value: "
+           << f_DC_t_ref_az;
+      DEBUG.print();
+      }
+     
+    else if (!strcmp(word,"DC_reference_range_time:"))     // (TOPS only):
+      {
+      found_f_DC_t_ref_rn = true;
+      resfile >>   f_DC_t_ref_rn;
+      
+      DEBUG << "[" << linecounter
+           << "]: string: \"DC_reference_range_time:\",       (derived) value: "
+           << f_DC_t_ref_rn;
+      DEBUG.print();
+      }
+    
+    else if (!strcmp(word,"FM_reference_azimuth_time:"))     // (TOPS only):
+      {
+      found_FM_t_ref = true;
+      resfile >>  FM_t_ref_az;
+      
+      DEBUG << "[" << linecounter
+           << "]: string: \"FM_reference_azimuth_time\",       (derived) value: "
+           << FM_t_ref_az;
+      DEBUG.print();
+      }  
+    
+    else if (!strcmp(word,"FM_reference_range_time:"))     // (TOPS only):
+      {
+      found_FM_t_ref_rn = true;
+      resfile >>  FM_t_ref_rn;
+      
+      DEBUG << "[" << linecounter
+           << "]: string: \"FM_reference_range_time\",       (derived) value: "
+           << FM_t_ref_rn;
+      DEBUG.print();
+      }  
+    
+    else if (!strcmp(word,"FM_polynomial_constant_coeff"))     // (TOPS only):
+      {
+      found_FM_a0= true;
+      resfile >> word >> word >> word >> FM_a0;
+      
+      DEBUG << "[" << linecounter
+           << "]: string: \"FM_polynomial_constant_coeff\",       (derived) value: "
+           << FM_a0;
+      DEBUG.print();
+      }  
+    
+    
+    else if (!strcmp(word,"FM_polynomial_linear_coeff"))     // (TOPS only):
+      {
+      found_FM_a1= true;
+      resfile >> word >> word >> word >> FM_a1;
+      
+      DEBUG << "[" << linecounter
+           << "]: string: \"FM_polynomial_linear_coeff\",       (derived) value: "
+           << FM_a1;
+      DEBUG.print();
+      }  
+    else if (!strcmp(word,"FM_polynomial_quadratic_coeff"))     // (TOPS only):
+      {
+      found_FM_a2= true;
+      resfile >> word >> word >> word >> FM_a2;
+      
+      DEBUG << "[" << linecounter
+           << "]: string: \"FM_polynomial_quadratic_coeff\",       (derived) value: "
+           << FM_a2;
+      DEBUG.print();
+      }  
+     
+      else if (!strcmp(word,"Azimuth_time_interval"))     // (TOPS only):
+      {
+      found_dt_az= true;
+      resfile >> word >> dt_az;
+      
+      DEBUG << "[" << linecounter
+           << "]: string: \"Azimuth_time_interval\",       (derived) value: "
+           << dt_az;
+      DEBUG.print();
+      }  
+    
+    
+//TODO UPDATE and ADD the FM rate 
     // ___ SAR_PROCESSOR key added, written in readleader() ___
     // ___ if you want to force behavior, change this in master.res ___
     else if (!strcmp(word,"SAR_PROCESSOR:"))     // VMP ATLANTIS TUD
@@ -612,6 +776,13 @@ void slcimage::fillslcimage(const char* file)
           sensor = SLC_RS2;
           continue;
         }
+      pch = strstr (word,"S1A");
+      if (pch != NULL) 
+        {
+          DEBUG.print("Substring \"Sentinel-1A\" found in Product type specifier.");
+          sensor = SLC_S1A;
+          continue;
+        }
       pch = strstr (word,"RSAT");
       if (pch != NULL) 
         {
@@ -731,7 +902,7 @@ void slcimage::fillslcimage(const char* file)
     }
   if (sensor==SLC_CSK)
     {
-      INFO.print("Yeah, Cosmo-SkyMed!");
+      INFO.print("Really!, Cosmo-SkyMed!");
       if (abs(wavelength-0.031) > 0.01)
         WARNING.print("wavelength seems to deviates more than 1 cm from CSK nominal.");
     }
diff --git a/src/slcimage.hh b/doris_core/slcimage.hh
similarity index 90%
rename from src/slcimage.hh
rename to doris_core/slcimage.hh
index 35bdbc8..b071f70 100755
--- a/src/slcimage.hh
+++ b/doris_core/slcimage.hh
@@ -77,7 +77,7 @@ class slcimage                          // info per image
     real8       abw;                    // azimuth band width (Hz)
     real8       rsr2x;                  // 2 times range sampling rate (Hz)
     real8       rbw;                    // range band width (Hz)
-    real8       t_azi1;                 // sec. of day of first azimuth line
+    real8       t_azi1;                 // sec. of day of first pixel azimuth time
     real8       t_range1;               // one way time (s) to first range pixel
     real8       wavelength;             // meters
     // real8 hamming_azi;               // weighting function designator/alpha
@@ -87,7 +87,23 @@ class slcimage                          // info per image
     real8       f_DC_a0;                // constant term Hz
     real8       f_DC_a1;                // linear term Hz/s
     real8       f_DC_a2;                // quadratic term Hz/s/s
-
+    
+    // ________________  TOPS  ONLY__________________
+    real8       f_DC_t_ref_az;             // DC_reference_azimuth_time
+    real8       f_DC_t_ref_rn;              // range time reference
+    
+// ________________  TOPS  ONLY__________________
+    // FM polynomial
+    real8       FM_t_ref_az;             // azimuth time reference for frequency modulation rate
+    real8       FM_t_ref_rn;             // azimuth time reference for frequency modulation rate
+    
+    real8       FM_a0;                // constant term (Hz) for polynomial of FM rate
+    real8       FM_a1;                // linear term (Hz/s) for polynomial of FM rate
+    real8       FM_a2;                // quadratic term (Hz/s/s) for polynomial of FM rate
+    
+    real8       Ks;                   // azimuth steering rate
+    real8       dt_az;                 // Azimuth time interval , for most systems this is the same as 1/PRF
+    
     // ______ offset = X(l,p) - X(L,P) ______
     // ______ Where l,p are in the local slave coordinate system and ______
     // ______ where L,P are in the local master coordinate system ______
@@ -96,6 +112,12 @@ class slcimage                          // info per image
     int32       coarseoffsetP;          // offset in pixel (range) direction
     int32       coarseorbitoffsetL;     // orbit offset in line (azimuth) direction [FvL]
     int32       coarseorbitoffsetP;     // orbit offset in pixel (range) direction [FvL]
+    
+    real4         slopeP;            // initial slope pixels
+    real4         slopeL;            // initial slope lines
+    real4         realoffsetL;            // initial offset pixels
+    real4         realoffsetP;            // initial offset lines
+    
     int32       ovs_az;                 // oversampling of SLC
     //real8       ovs_az;                 // oversampling of SLC, multilook test [TODO]
     int32       ovs_rg;                 // oversampling of SLC
diff --git a/src/tmp_strptime.cc b/doris_core/tmp_strptime.cc
similarity index 100%
rename from src/tmp_strptime.cc
rename to doris_core/tmp_strptime.cc
diff --git a/src/unwrap.cc b/doris_core/unwrap.cc
similarity index 100%
rename from src/unwrap.cc
rename to doris_core/unwrap.cc
diff --git a/src/unwrap.hh b/doris_core/unwrap.hh
similarity index 100%
rename from src/unwrap.hh
rename to doris_core/unwrap.hh
diff --git a/src/utilities.cc b/doris_core/utilities.cc
similarity index 99%
rename from src/utilities.cc
rename to doris_core/utilities.cc
index 0c62c29..25b75c3 100755
--- a/src/utilities.cc
+++ b/doris_core/utilities.cc
@@ -101,7 +101,7 @@ void getorb(
     strcpy(orbdir,inputorb.s_orbdir);
   else
     {
-    PRINT_ERROR("panic, impossible.")
+    PRINT_ERROR("panic, impossible.");
     throw(unhandled_case_error);
     }
 
@@ -1486,6 +1486,17 @@ void BalphaBhBvBparBperpTheta(
 
 
 
+
+
+
+
+
+
+
+
+
+
+
 /****************************************************************
  *    shiftazispectrum                                          *
  * Shift spectrum of input matrix data either from fDC to zero, *
diff --git a/src/utilities.hh b/doris_core/utilities.hh
similarity index 99%
rename from src/utilities.hh
rename to doris_core/utilities.hh
index e29b345..a66c350 100755
--- a/src/utilities.hh
+++ b/doris_core/utilities.hh
@@ -574,6 +574,8 @@ void BalphaBhBvBparBperpTheta(
         real8 &Bpar, real8 &Bperp, real8 &theta,
         const cn M, const cn P, const cn S);
 
+
+
 // ______ Shift azimuth spectrum from fDC to zero, and vv. ______
 void shiftazispectrum(
         matrix<complr4> &data,  // slcdata sin space domain
diff --git a/doris_stack/functions/ESD_functions.py b/doris_stack/functions/ESD_functions.py
new file mode 100755
index 0000000..e3daaa8
--- /dev/null
+++ b/doris_stack/functions/ESD_functions.py
@@ -0,0 +1,285 @@
+import numpy as np
+from numpy import *
+from doris.doris_stack.functions.get_ramp import get_ramp, get_parameter
+import os
+
+
+########################################################################################################################
+# Function to get parameters from files
+# Parameter = get_parameter(First_param,file_name,format_flag=1,Second_param=None,Third_param=None)
+def get_parameter(First_param,file_name,format_flag=1,Second_param=None,Third_param=None):
+    Read_contine_flag=0
+    class set_class(object):
+        pass
+    orbit_info = set_class()
+    time_temp = []
+    x_temp = []
+    y_temp = []
+    z_temp = []
+    value=None
+
+    for line in open(file_name):
+        if format_flag==1:
+            if not (line.find(First_param)):
+                index=line.find(':')
+                value=(line[(index+1):].strip(' \n\t'))
+                return value
+
+        if format_flag==2:
+            if  not (line.find(Second_param)):
+                Read_contine_flag=1
+            if (Read_contine_flag==1) and (not (line.find(First_param))):  ##Be careful
+                index=line.find(':')
+                value=(line[(index+1):].strip(' \n\t'))
+            if  (not (line.find(Third_param))):  ##Be careful
+                Read_contine_flag=0
+                return value
+
+        if format_flag==3:
+            if not (line.find(First_param)):
+                index=line.find(':')
+                pixel_time=(line[(index+1):].strip(' \n\t')).split(' ')[1].split(':')
+                return pixel_time
+
+        if format_flag==4:
+            if not (line.find(First_param)):
+                index=line.find(':')
+                value=int(line[(index+1):].strip(' \n\t'))
+                Read_contine_flag=1
+                continue
+            if (Read_contine_flag>=1) :
+                new_line = line.strip('\n').split()
+                time_temp.append(float(new_line[0]))
+                x_temp.append(float(new_line[1]))
+                y_temp.append(float(new_line[2]))
+                z_temp.append(float(new_line[3]))
+                Read_contine_flag=Read_contine_flag+1
+                if (Read_contine_flag==(value+1)):
+                    setattr(orbit_info,'x',x_temp)
+                    setattr(orbit_info,'y',y_temp)
+                    setattr(orbit_info,'z',z_temp)
+                    setattr(orbit_info,'time',time_temp)
+                    return orbit_info
+
+########################################################################################################################
+# Definition to extract data
+# thisBurstData = freadbk(path_file,line_start=1, pixel_start=1, nofLines=None, nofPixels=None, dt=np.dtype(np.float32), lines=0, pixels=0):
+def freadbk(path_file, line_start=1, pixel_start=1, nofLines=None, nofPixels=None, dt='float32', lines=0, pixels=0, memmap=True):
+    # First use memmap to get a memory map of the full file, than extract the requested part.
+
+    if dt == 'cpxint16':
+        dtype = np.dtype([('re', np.int16), ('im', np.int16)])
+        file_dat = np.memmap(path_file, dtype=dtype, mode='r', shape=(lines, pixels)).view(np.int16).astype(np.float32).view(np.complex64)
+        data = file_dat[line_start - 1:line_start + nofLines - 1, pixel_start - 1:pixel_start + nofPixels - 1].astype(
+            'complex64', subok=False)
+    elif dt == 'cpxshort':
+
+        file_dat = np.memmap(path_file, dtype=np.dtype(np.float16), mode='r', shape=(lines, pixels * 2))
+        data = 1j * file_dat[:, 1::2].astype('float32', subok=False)
+        data += file_dat[:, 0::2].astype('float32', subok=False)
+        data = data[line_start - 1:line_start + nofLines - 1, pixel_start - 1:pixel_start + nofPixels - 1]
+
+    else:
+        dt = np.dtype(dt)
+        file_dat = np.memmap(path_file, dtype=dt, mode='r', shape=(lines, pixels))
+        data = file_dat[line_start - 1:line_start + nofLines - 1, pixel_start - 1:pixel_start + nofPixels - 1].astype(
+            dt, subok=False)
+
+    return data
+
+
+########################################################################################################################
+# Function to calculate normalized Doppler Centroid frequency
+# Df_DC = get_f_DC_difference(f_DC_1, f_DC_2, BOL_Length, BOL_lines, PRF, normalize)
+def get_f_DC_difference(nBurst):
+
+    burst1 = 'burst_' + str(nBurst) + '/'
+    burst2 = 'burst_' + str(nBurst + 1) + '/'
+
+    this_m_resData = burst1 + 'master.res'
+    next_m_resData = burst2 + 'master.res'
+
+    os.chdir(os.getcwd() + '/' + burst1)
+    f_DC_1 = get_ramp(os.path.basename(this_m_resData), resampled=0, type='DC')
+    os.chdir(os.path.dirname(os.getcwd()))
+
+    os.chdir(os.getcwd() + '/' + burst2)
+    f_DC_2 = get_ramp(os.path.basename(next_m_resData), resampled=0, type='DC')
+    os.chdir(os.path.dirname(os.getcwd()))
+
+    line_start, line_length, first_pixel_this, first_pixel_next, pixel_length, this_nr_oflines, this_nr_ofpixels, next_nr_oflines, next_nr_ofpixels, PRF = get_coordinates(nBurst)
+
+    Df_DC = f_DC_1[line_start - 1:line_start + line_length - 1, first_pixel_this - 1:first_pixel_this + pixel_length - 1] - \
+            f_DC_2[0:line_length, first_pixel_next - 1: first_pixel_next + pixel_length - 1]
+
+    return Df_DC
+
+def get_coordinates(nBurst):
+
+    burst1 = 'burst_' + str(nBurst) + '/'
+    burst2 = 'burst_' + str(nBurst+1) + '/'
+    this_m_resData = burst1 + 'master.res'
+    next_m_resData = burst2 + 'master.res'
+
+    # Get variables from first burst
+    this_line_first     = int(get_parameter('First_line (w.r.t. output_image)',this_m_resData,1))
+    this_line_last      = int(get_parameter('Last_line (w.r.t. output_image)',this_m_resData,1))
+    this_nr_oflines     = int(this_line_last) - int(this_line_first) +1
+    this_pixel_first    = int(get_parameter('First_pixel (w.r.t. output_image)',this_m_resData,1))
+    this_pixel_last     = int(get_parameter('Last_pixel (w.r.t. output_image)',this_m_resData,1))
+    this_nr_ofpixels    = int(this_pixel_last) - int(this_pixel_first) +1
+    PRF_1               = float(get_parameter('Pulse_Repetition_Frequency (computed, Hz)',this_m_resData,1))
+
+    # Get variables from second burst
+    next_line_first     = int(get_parameter('First_line (w.r.t. output_image)',next_m_resData,1))
+    next_line_last      = int(get_parameter('Last_line (w.r.t. output_image)',next_m_resData,1))
+    next_nr_oflines     = int(next_line_last) - int(next_line_first) +1
+    next_pixel_first    = int(get_parameter('First_pixel (w.r.t. output_image)',next_m_resData,1))
+    next_pixel_last     = int(get_parameter('Last_pixel (w.r.t. output_image)',next_m_resData,1))
+    next_nr_ofpixels    = int(next_pixel_last) - int(next_pixel_first) +1
+
+    PRF = PRF_1
+
+    # Read only the Burstoverlap
+    if this_pixel_first < next_pixel_first:
+        first_pixel = next_pixel_first
+    elif this_pixel_first >= next_pixel_first:
+        first_pixel = this_pixel_first
+    if this_pixel_last > next_pixel_last:
+        pixel_length = next_pixel_last - first_pixel + 1
+    elif this_pixel_last <= next_pixel_last:
+        pixel_length = this_pixel_last - first_pixel + 1
+
+    first_pixel_this = first_pixel - this_pixel_first + 1
+    first_pixel_next = first_pixel - next_pixel_first + 1
+
+    line_length = this_line_last - next_line_first + 1
+    line_start = this_nr_oflines - line_length + 1
+
+    return line_start, line_length, first_pixel_this, first_pixel_next, pixel_length, this_nr_oflines, this_nr_ofpixels,\
+           next_nr_oflines, next_nr_ofpixels, PRF
+
+
+########################################################################################################################
+# Function to calculate the interferograms of both bursts
+# thisburstdata, nextburstdata, diffBursts, PRF = get_interfero(nBurst, BOL_lines, BOL_Length)
+def get_offset(nBurst, Df_DC, coh_treshold=0.3):
+
+    burst1 = 'burst_' + str(nBurst) + '/'
+    burst2 = 'burst_' + str(nBurst+1) + '/'
+
+    # cpxint16 and cpxfloat32
+    dataFormat_s = 'complex64'
+
+    line_start, line_length, first_pixel_this, first_pixel_next, pixel_length, this_nr_oflines, this_nr_ofpixels, next_nr_oflines, next_nr_ofpixels, PRF = get_coordinates(nBurst)
+
+    ifgs_1  = freadbk(burst1 + 'cint.raw.old', line_start, first_pixel_this, line_length, pixel_length , dataFormat_s,  this_nr_oflines, this_nr_ofpixels)
+    ESD_coh_1  = freadbk(burst1 + 'coherence.raw', line_start, first_pixel_this, line_length, pixel_length , 'float32',  this_nr_oflines, this_nr_ofpixels)
+    ifgs_2  = freadbk(burst2 + 'cint.raw.old', 1, first_pixel_next, line_length, pixel_length, dataFormat_s, next_nr_oflines, next_nr_ofpixels)
+    ESD_coh_2 = freadbk(burst2 + 'coherence.raw', 1, first_pixel_next, line_length,
+                     pixel_length, 'float32', next_nr_oflines, next_nr_ofpixels)
+    ESD_coh = (ESD_coh_1 + ESD_coh_2) / 2
+
+    #ifgs_1_total = freadbk(burst1 + 'cint.raw.old', 1, 1, this_nr_oflines, this_nr_ofpixels, dataFormat_s,  this_nr_oflines, this_nr_ofpixels)
+    #ifgs_2_total = freadbk(burst2 + 'cint.raw.old', 1, 1, next_nr_oflines, next_nr_ofpixels, dataFormat_s,  next_nr_oflines, next_nr_ofpixels)
+
+    # Remove invalid data both in range and azimuth
+    valid_range = []
+    valid_azimuth = []
+    for i in range(0,len(ifgs_1[0,:])):
+        if np.nanmean(abs(ifgs_1[:,i])) != 0 and np.nanmean(abs(ifgs_2[:,i])) != 0:
+            valid_range.append(i)
+
+    for i in range(0,len(ifgs_1[:,0])):
+        if np.nanmean(abs(ifgs_1[i,:])) != 0 and np.nanmean(abs(ifgs_2[i,:])) != 0:
+            valid_azimuth.append(i)
+
+    if valid_range and valid_azimuth:
+        ifgs_1 = ifgs_1[:, valid_range[:]]
+        ifgs_2 = ifgs_2[:, valid_range[:]]
+        ESD_coh = ESD_coh[:, valid_range[:]]
+
+        ifgs_1 = ifgs_1[valid_azimuth[:], :]
+        ifgs_2 = ifgs_2[valid_azimuth[:], :]
+        ESD_coh = ESD_coh[valid_azimuth[:], :]
+
+        Df_DC = Df_DC[:, valid_range[:]]
+        Df_DC = Df_DC[valid_azimuth[:], :]
+
+    # First downsample 2 * 10
+    Nra = 10
+    Naz = 2
+    new_ra = ESD_coh.shape[1] / 10
+    new_az = ESD_coh.shape[0] / 2
+
+    ESD_coh = ESD_coh[0:new_az*Naz-1:Naz, 0:new_ra*Nra-1:Nra]
+    ifgs_1_multilook = ifgs_1[:new_az*2, :new_ra*10].reshape([new_az, Naz, new_ra, Nra]).mean(3).mean(1)
+    ifgs_2_multilook = ifgs_2[:new_az*2, :new_ra*10].reshape([new_az, Naz, new_ra, Nra]).mean(3).mean(1)
+    Df_DC_multilook = Df_DC[:new_az * 2, :new_ra * 10].reshape([new_az, Naz, new_ra, Nra]).mean(3).mean(1)
+
+    # Double difference and calculate weights according to Cramer Rao bound
+    diffBursts = ifgs_1_multilook * ifgs_2_multilook.conj()
+    weights = 2 * ESD_coh*ESD_coh / (1 - ESD_coh*ESD_coh)
+
+    W = np.sum(weights[ESD_coh > coh_treshold])
+    angle = (PRF / (2 * np.pi * np.nanmean(Df_DC_multilook[ESD_coh > coh_treshold] * weights[ESD_coh > coh_treshold] /
+                                            np.mean(weights[ESD_coh > coh_treshold]))))
+    offset = np.angle(np.sum(diffBursts[ESD_coh > coh_treshold] * weights[ESD_coh > coh_treshold]) / W) * angle
+
+    angle_pixel = angle * (line_start - 1)
+
+    return offset, W, angle_pixel
+
+########################################################################################################################
+# Function to calculate pixel offset for each burst, according to Nida's proposed method. Threshold can be assigned by
+# user.
+# offset = apply_ESD_Nida(diffBursts, Df_DC, PRF, threshold)
+def apply_ESD_Nida(diffBursts, Df_DC, PRF, threshold = 0.0001):
+
+    # Determine phase of interferogram
+    ph_esd = np.angle(diffBursts)
+
+    # Do an intitial estimation based on the peak of the histogram
+    N,X = np.histogram(ph_esd[:], 50)
+    idx = np.argmax(N)
+    D_az_init = X[idx]*(PRF/(2*np.pi*np.nanmean(Df_DC[:])))
+
+    # Create linspace from -0.1 to 0.1 in 7 steps
+    D_az_span = -0.3*np.pi*(PRF/(2*np.pi*np.nanmean(Df_DC[:])))
+    D_azs = np.linspace(D_az_init-D_az_span, D_az_init+D_az_span,num=7)
+    del D_az_span
+
+    c = -1
+    D_az_min = []
+    # Keeps refinining until error with earlier estimation is lower than 0.0001
+    while True:
+        c += 1
+        ph_test = np.ones(len(D_azs))
+        # Calculate estimated phase, residual phase and test phase
+        for k in range(0,len(D_azs)):
+            D_az = D_azs[k]
+            ph_est = (2*np.pi*Df_DC*D_az)/PRF
+            ph_res = ph_esd - ph_est
+
+            ph_test[k] = np.nanmean(np.angle(exp(1j * ph_res[:]))) # should be ph_test(k) = np.nanmean(exp(1i*ph_res[:]))
+            #print ph_test
+
+        ind = np.argmin(abs(ph_test))
+        D_az_min.append(D_azs[ind])
+
+        # Break iteration when pixel shift is sufficient
+        if c > 2 and abs(D_az_min[c]-D_az_min[c-1]) < threshold and abs(D_az_min[c-1]-D_az_min[c-2]) < threshold:
+            ph_est_opt = np.nanmean(ph_est[:])
+            offset = -D_az_min[c]
+            break
+
+        # Use a smaller difference for next iteration
+        D_az_span = D_azs[1] - D_azs[0]
+        D_azs = np.linspace(D_azs[ind]-D_az_span, D_azs[ind]+D_az_span,num=7)
+        del ph_test
+
+    #print 'amount of loops in iteration ' + str(c)
+
+    pix_offset = offset / (PRF/(2*np.pi*np.nanmean(Df_DC[:])))
+
+    return offset, pix_offset
diff --git a/doris_stack/functions/ESD_ps_ds.py b/doris_stack/functions/ESD_ps_ds.py
new file mode 100755
index 0000000..62682f8
--- /dev/null
+++ b/doris_stack/functions/ESD_ps_ds.py
@@ -0,0 +1,493 @@
+import numpy as np
+import os, sys
+from datetime import datetime
+from datetime import timedelta
+from doris.doris_stack.functions.ESD_functions import get_f_DC_difference, get_coordinates, freadbk
+
+
+def save_overlapping(stack_folder, master_date, dates, overlap):
+
+    nBurst, burst, next_burst = get_burst(overlap)
+
+    esd_folder = os.path.join(stack_folder, 'esd')
+    if not os.path.exists(esd_folder):
+        print('ESD folder does not exist')
+        return
+    overlap_path = os.path.join(stack_folder, 'esd', overlap)
+    if not os.path.exists(overlap_path):
+        os.mkdir(overlap_path)
+
+    path = swath_path(stack_folder, dates[0], burst)
+    print('reading metadata from ' + path)
+    os.chdir(path)
+
+    line_start, line_length, first_pixel_this, first_pixel_next, pixel_length, this_nr_oflines, \
+    this_nr_ofpixels, next_nr_oflines, next_nr_ofpixels, PRF = get_coordinates(nBurst)
+
+    burst1 = 'burst_' + str(nBurst) + '/'
+    burst2 = 'burst_' + str(nBurst + 1) + '/'
+
+    # First get the data for the master and df_dc
+    master_path = os.path.join(overlap_path, master_date)
+    df_dc_path = os.path.join(overlap_path, 'df_dc')
+
+    master_1 = master_file(burst)
+    master_2 = master_file(next_burst)
+
+    if not os.path.exists(master_path + '_1') or not os.path.exists(master_path + '_2'):
+        master_1 = freadbk(burst1 + master_1, line_start, first_pixel_this, line_length,
+                          pixel_length, 'cpxint16', this_nr_oflines, this_nr_ofpixels)
+        master_2 = freadbk(burst2 + master_2, 1, first_pixel_next, line_length, pixel_length,
+                          'cpxint16', next_nr_oflines, next_nr_ofpixels)
+        master_1_file = np.memmap(master_path + '_1', 'complex64', shape=master_1.shape, mode='w+')
+        master_2_file = np.memmap(master_path + '_2', 'complex64', shape=master_2.shape, mode='w+')
+        master_1_file[:] = master_1
+        master_2_file[:] = master_2
+        master_1_file.flush()
+        master_2_file.flush()
+
+    if not os.path.exists(df_dc_path):
+        df_dc = get_f_DC_difference(nBurst)
+        df_dc_file = np.memmap(df_dc_path, 'float32', shape=df_dc.shape, mode='w+')
+        df_dc_file[:,:] = df_dc[:,:]
+
+    # Then loop over the slaves
+    for date in dates:
+        if date == master_date:
+            continue
+
+        path = swath_path(stack_folder, date, burst)
+        os.chdir(path)
+
+        print(path)
+        data_path = os.path.join(overlap_path, date)
+
+        burst1 = 'burst_' + str(nBurst) + '/'
+        burst2 = 'burst_' + str(nBurst + 1) + '/'
+
+        if not os.path.exists(data_path + '_1') or not os.path.exists(data_path + '_2'):
+            slave_1 = freadbk(burst1 + 'slave_rsmp_reramped.raw', line_start, first_pixel_this, line_length, pixel_length , 'complex64',  this_nr_oflines, this_nr_ofpixels)
+            slave_2 = freadbk(burst2 + 'slave_rsmp_reramped.raw', 1, first_pixel_next, line_length, pixel_length, 'complex64', next_nr_oflines, next_nr_ofpixels)
+            slave_1_file = np.memmap(data_path + '_1', 'complex64', shape=slave_1.shape, mode='w+')
+            slave_2_file = np.memmap(data_path + '_2', 'complex64', shape=slave_2.shape, mode='w+')
+            slave_1_file[:] = slave_1
+            slave_2_file[:] = slave_2
+            slave_1_file.flush()
+            slave_2_file.flush()
+
+
+def find_ps_overlapping(stack_folder, overlap):
+    # This is used to find the ps point in overlapping areas
+
+    nBurst, burst, next_burst = get_burst(overlap)
+    esd_folder = os.path.join(stack_folder, 'esd')
+    overlap_path = os.path.join(esd_folder, overlap)
+    files = os.listdir(os.path.join(overlap_path))
+    dates = sorted([f[:-2] for f in files if (f.endswith('_1') and len(f) > 10)])
+    folder = dates[0][0:4] + dates[0][5:7] + dates[0][8:10]
+    os.chdir(os.path.join(stack_folder, folder, overlap[0:7]))
+
+    line_start, line_length, first_pixel_this, first_pixel_next, pixel_length, this_nr_oflines, \
+    this_nr_ofpixels, next_nr_oflines, next_nr_ofpixels, PRF = get_coordinates(nBurst)
+
+    # Remove earlier generated files for ps
+    files = next(os.walk(overlap_path))[2]
+    files = [os.path.join(overlap_path, name) for name in files if (name.endswith('ps') or name.startswith('ps'))]
+    for filename in files:
+        os.remove(filename)
+
+    # Gather data in one matrix
+    first, second = gather_stack(overlap_path, line_length, pixel_length)
+
+    # First calculate the ps point for first overlap
+    mean = np.mean(first, axis=2)
+    std = np.std(first, axis=2)
+
+    with np.errstate(divide='ignore', invalid='ignore'):
+        c = np.true_divide(std, mean)
+        c[~ np.isfinite(c)] = 10000
+    ps1 = (c < 0.3)
+    ps1_file = os.path.join(overlap_path, 'ps_1')
+    ps1_dat = np.memmap(ps1_file, 'bool', mode= 'w+', shape=(line_length, pixel_length))
+    ps1_dat[:,:] = ps1[:,:]
+
+    # Then calculate the ps point for second overlap
+    mean = np.mean(second, axis=2)
+    std = np.std(second, axis=2)
+
+    with np.errstate(divide='ignore', invalid='ignore'):
+        c = np.true_divide(std, mean)
+        c[~ np.isfinite(c)] = 10000
+    ps2 = (c < 0.3)
+    ps2_file = os.path.join(overlap_path, 'ps_2')
+    ps2_dat = np.memmap(ps2_file, 'bool', mode= 'w+', shape=(line_length, pixel_length))
+    ps2_dat[:,:] = ps2[:,:]
+
+    ps_file = os.path.join(overlap_path, 'ps')
+    ps_dat = np.memmap(ps_file, 'bool', mode='w+', shape=(line_length, pixel_length))
+    ps_dat[:, :] = ((ps1_dat * ps2_dat) == 1)
+
+
+def select_ps_data(stack_folder, overlap):
+    # This function creates seperate files for the values of the ps points only.
+
+    esd_folder = os.path.join(stack_folder, 'esd')
+    overlap_path = os.path.join(esd_folder, overlap)
+    files = os.listdir(os.path.join(overlap_path))
+    dates = sorted([f[:-2] for f in files if (f.endswith('_1') and len(f) > 10)])
+
+    folder = dates[0][0:4] + dates[0][5:7] + dates[0][8:10]
+    os.chdir(os.path.join(stack_folder, folder, overlap[0:7]))
+
+    nBurst = int(overlap.split('_')[3])
+    line_start, line_length, first_pixel_this, first_pixel_next, pixel_length, this_nr_oflines, \
+    this_nr_ofpixels, next_nr_oflines, next_nr_ofpixels, PRF = get_coordinates(nBurst)
+
+    ps_file = os.path.join(overlap_path, 'ps')
+    ps_dat = np.memmap(ps_file, 'bool', mode='r', shape=(line_length, pixel_length))
+    ps_num = np.sum(ps_dat)
+
+    # Save only the ps points to file.
+    for date in dates:
+        slave_ps_name = os.path.join(overlap_path, date + '_1_ps')
+        master_ps_name = os.path.join(overlap_path, date + '_2_ps')
+
+        if not os.path.exists(slave_ps_name) or not os.path.exists(master_ps_name):
+            slave_ps = np.memmap(slave_ps_name, 'complex64', mode='w+', shape=(ps_num))
+            master_ps = np.memmap(master_ps_name, 'complex64', mode='w+', shape=(ps_num))
+            slave = np.memmap(os.path.join(overlap_path, date + '_1'), 'complex64', mode='r',
+                                     shape=(line_length, pixel_length))
+            master = np.memmap(os.path.join(overlap_path, date + '_2'), 'complex64', mode='r',
+                                    shape=(line_length, pixel_length))
+            if ps_num > 0:
+                slave_ps[:] = slave[ps_dat]
+                master_ps[:] = master[ps_dat]
+    # Do the same for the df_dc file
+    if not os.path.exists(os.path.join(overlap_path, 'df_dc_ps')):
+        df_dc_ps = np.memmap(os.path.join(overlap_path, 'df_dc_ps'), 'float32', mode='w+', shape=(ps_num))
+        df_dc = np.memmap(os.path.join(overlap_path, 'df_dc'), 'float32', mode='r',
+                          shape=(line_length, pixel_length))
+        if ps_num > 0:
+            df_dc_ps[:] = df_dc[ps_dat]
+
+
+def network_esd_ps(stack_folder, overlap, master_date, max_baseline, max_offset=0.02):
+    # Based on ps point esd is calculated using a network approach
+
+    dates, overlap_path, diff_matrix, var_matrix, to_angle_matrix, weight_matrix, processing = prepare_esd(stack_folder, overlap)
+    folder = dates[0][0:4] + dates[0][5:7] + dates[0][8:10]
+    os.chdir(os.path.join(stack_folder, folder, overlap[0:7]))
+
+    nBurst = int(overlap.split('_')[3])
+    line_start, line_length, first_pixel_this, first_pixel_next, pixel_length, this_nr_oflines, \
+    this_nr_ofpixels, next_nr_oflines, next_nr_ofpixels, PRF = get_coordinates(nBurst)
+
+    ps_file = os.path.join(overlap_path, 'ps')
+    ps_dat = np.memmap(ps_file, 'bool', mode='r', shape=(line_length, pixel_length))
+
+    ps_id = np.where(ps_dat == 1)
+    if not ps_id:  # If there are no ps points
+        return diff_matrix, var_matrix, weight_matrix, to_angle_matrix
+    else:
+        ps_num = len(ps_id[0])
+
+    df_dc_ps = np.memmap(os.path.join(overlap_path, 'df_dc_ps'), 'float32', mode='r+', shape=ps_num)[:]
+
+    for date, n in zip(dates, range(len(dates))):
+        for date_2, num in zip(dates, range(len(dates))):
+            # Only calculate the upper triangle, as the others will be the same
+            if processing[n, num] == 1:
+                continue
+
+            timediff = datetime.strptime(date_2, '%Y-%m-%d') - datetime.strptime(date, '%Y-%m-%d')
+            if timediff > timedelta(minutes=1) and timediff < timedelta(days=max_baseline):
+
+                first_master = np.memmap(os.path.join(overlap_path, date + '_1_ps'), 'complex64', mode='r',
+                                         shape=ps_num)
+                first_slave = np.memmap(os.path.join(overlap_path, date_2 + '_1_ps'), 'complex64', mode='r',
+                                        shape=ps_num)
+                second_master = np.memmap(os.path.join(overlap_path, date + '_2_ps'), 'complex64', mode='r',
+                                          shape=ps_num)
+                second_slave = np.memmap(os.path.join(overlap_path, date_2 + '_2_ps'), 'complex64', mode='r',
+                                         shape=ps_num)
+
+                double_diff = (first_master * first_slave.conj()) * (second_master * second_slave.conj()).conj()
+
+                # Now select all pixels with an offset of less than x milipixel
+                double_diff[np.isnan(double_diff)] = 0.050
+                val = (np.abs(np.angle(double_diff)) < max_offset)
+                w = np.sum(val)
+
+                if w > 0:
+                    pixel_diff = np.angle(np.sum(double_diff[val])) * (PRF / (2 * np.pi * np.nanmean(df_dc_ps[val])))
+                    pixel_var = np.var(np.angle(double_diff[val]) * (PRF/(2*np.pi*df_dc_ps[val])))
+                    temp_baseline_w = np.exp(-(float(timediff.days) / 100))
+                    weight_matrix[0, n, num] = temp_baseline_w * w
+                    var_matrix[0, n, num] = pixel_var * temp_baseline_w
+                    diff_matrix[0, n, num] = pixel_diff
+                    # Phase ramp per pixel
+                    to_angle_matrix[0, n, num] = (PRF/(2*np.pi*np.nanmean(df_dc_ps[val]))) * (line_start - 1)
+
+    return diff_matrix, var_matrix, to_angle_matrix, weight_matrix, dates
+
+
+def network_esd_coh(stack_folder, overlap, master_date, max_baseline, ra=10, az=2):
+
+    dates, overlap_path, diff_matrix, var_matrix, to_angle_matrix, weight_matrix, processed = prepare_esd(stack_folder, overlap)
+    folder = master_date[0:4] + master_date[5:7] + master_date[8:10] + "_" + dates[0][0:4] + dates[0][5:7] + dates[0][8:10]
+    os.chdir(os.path.join(stack_folder, folder, overlap[0:7]))
+
+    nBurst = int(overlap.split('_')[3])
+    line_start, line_length, first_pixel_this, first_pixel_next, pixel_length, this_nr_oflines, \
+    this_nr_ofpixels, next_nr_oflines, next_nr_ofpixels, PRF = get_coordinates(nBurst)
+
+    # Gather data in one matrix
+    first, second = gather_stack(overlap_path, line_length, pixel_length, abs=False)
+
+    # Remove the empty rows / columns
+    columns = np.where((np.min(np.abs(np.sum(first, axis=0)), axis=1) != 0) *
+                       (np.min(np.abs(np.sum(second, axis=0)), axis=1) != 0) == True)[0]
+    rows = np.where((np.min(np.abs(np.sum(first, axis=1)), axis=1) != 0) *
+                       (np.min(np.abs(np.sum(second, axis=1)), axis=1) != 0) == True)[0]
+    first = first[rows[0]:rows[-1]+1, columns[0]:columns[-1]+1, :]
+    second = second[rows[0]:rows[-1]+1, columns[0]:columns[-1]+1, :]
+
+    # Multilook the df_dc
+    df_dc = np.memmap(os.path.join(overlap_path, 'df_dc'), 'float32', mode='r+', shape=(line_length, pixel_length))
+    df_dc_ml = multilook(df_dc[rows[0]:rows[-1]+1, columns[0]:columns[-1]+1], az, ra)
+
+    for date, n in zip(dates, range(len(dates))):
+
+        # First select the dates we want to compare with
+        c_dates = []
+        nums = []
+        for date_2, num in zip(dates, range(len(dates))):
+            # Only calculate the upper triangle, as the others will be the same
+            timediff = datetime.strptime(date_2, '%Y-%m-%d') - datetime.strptime(date, '%Y-%m-%d')
+
+            if timediff > timedelta(minutes=1) and timediff < timedelta(days=max_baseline) and processed[n, num] != 1:
+                c_dates.append(date_2)
+                nums.append(num)
+
+        if len(c_dates) != 0:
+            # Then create ifgs of first and second
+            shape_ifg = (first.shape[0], first.shape[1], len(nums))
+            first_ifg = np.memmap(os.path.join(overlap_path, 'first_ifg'), 'complex64', shape=shape_ifg, mode='w+')
+            first_ifg[:] = first[: ,: ,n][:, :, None] * first[:, :, nums].conj()
+            second_ifg = np.memmap(os.path.join(overlap_path, 'second_ifg'), 'complex64', shape=shape_ifg, mode='w+')
+            second_ifg[:] = second[:, :, n][:, :, None] * second[:, :, nums].conj()
+
+            # And the double difference
+            double_diff = np.memmap(os.path.join(overlap_path, 'double_diff'), 'complex64', shape=shape_ifg, mode='w+')
+            double_diff[:] = first_ifg * second_ifg.conj()
+            double_diff = multilook(double_diff, az, ra, summation=True)
+            diff_phase = np.angle(double_diff)
+            diff_amp = np.abs(double_diff)
+
+            # Calculate coherence
+            amp_sq = np.memmap(os.path.join(overlap_path, 'amp_sq'), 'float32', shape=shape_ifg, mode='w+')
+            amp_sq[:] = (np.real(first_ifg) ** 2 + np.imag(first_ifg) ** 2) * (np.real(second_ifg) ** 2 + np.imag(second_ifg) ** 2)
+            coh_amp_sq = multilook(amp_sq, az, ra, summation=True)
+            coh = diff_amp / np.sqrt(coh_amp_sq)
+
+            # Calculate weights
+            coh[coh == 1] = 0
+            weight = 2 * az * ra * coh**2 / (1 - coh**2)
+            tot_weight = np.sum(np.sum(weight, axis=0), axis=0)
+
+            to_angle = PRF / (2 * np.pi * df_dc_ml)
+            shift_pix = diff_phase * to_angle[:, :, None]
+            shift = np.sum(np.sum(shift_pix * weight, axis=0), axis=0) / tot_weight
+            to_angle_weighted = np.sum(np.sum(to_angle[:, :, None] * weight, axis=0), axis=0) / tot_weight
+            var = np.sum(np.sum((shift[None, None, :] - shift_pix)**2 * weight, axis=0), axis=0) / tot_weight
+
+            var_matrix[0, n, nums] = var
+            diff_matrix[0, n, nums] = shift
+            to_angle_matrix[0, n, nums] = to_angle_weighted
+            weight_matrix[0, n, nums] = tot_weight
+
+    return diff_matrix, var_matrix, to_angle_matrix, weight_matrix, dates
+
+
+def prepare_esd(stack_folder, overlap, esd_type='ps', load_existing=False):
+    # Get some standard variables for the esd processing, used in both methods.
+
+    esd_folder = os.path.join(stack_folder, 'esd')
+    overlap_path = os.path.join(esd_folder, overlap)
+    files = os.listdir(os.path.join(overlap_path))
+    dates = sorted([f[:-2] for f in files if (f.endswith('_1') and len(f) > 10)])
+
+    diff_matrix = np.zeros(shape=(1, len(dates), len(dates)))
+    var_matrix = np.zeros(shape=(1, len(dates), len(dates)))
+    to_angle_matrix = np.zeros(shape=(1, len(dates), len(dates)))
+    weight_matrix = np.zeros(shape=(1, len(dates), len(dates)))
+    processed = np.zeros(shape=(len(dates), len(dates)))
+
+    # Now load the existing results if they are available.
+    if load_existing == True:
+        diff_m = np.load(os.path.join(overlap_path, esd_type + '_diff_matrix.npy'))
+        var_m = np.load(os.path.join(overlap_path, esd_type + '_var_matrix.npy'))
+        to_angle_m = np.load(os.path.join(overlap_path, esd_type + '_to_angle_matrix.npy'))
+        weight_m = np.load(os.path.join(overlap_path, esd_type + '_weight_matrix.npy'))
+        old_dates = np.load(os.path.join(overlap_path, esd_type + '_dates.npy'))
+
+        dates_overlap = [id for id, date in zip(range(len(dates)), old_dates) if date in dates]
+        diff_matrix[np.ix_(1, dates_overlap, dates_overlap)] = diff_m
+        var_matrix[np.ix_(1, dates_overlap, dates_overlap)] = var_m
+        to_angle_matrix[np.ix_(1, dates_overlap, dates_overlap)] = to_angle_m
+        weight_matrix[np.ix_(1, dates_overlap, dates_overlap)] = weight_m
+
+        processed[np.ix_(dates_overlap, dates_overlap)] = 1
+
+    return dates, overlap_path, diff_matrix, var_matrix, to_angle_matrix, weight_matrix, processed
+
+
+def gather_stack(overlap_path, line_length, pixel_length, abs=True):
+    # This function gathers all data from different date into 2 3D matrices
+
+    files = os.listdir(overlap_path)
+    first_files = [os.path.join(overlap_path, f) for f in files if f.endswith('_1') and len(f) > 10]
+    first_name = os.path.join(overlap_path, 'first')
+    if os.path.exists(first_name):
+        os.remove(first_name)
+    if abs:
+        first = np.memmap(first_name, 'float32', shape=(line_length, pixel_length, len(first_files)), mode='w+')
+    else:
+        first = np.memmap(first_name, 'complex64', shape=(line_length, pixel_length, len(first_files)), mode='w+')
+
+    for f, n in zip(first_files, range(len(first_files))):
+        first_dat = np.memmap(f, 'complex64', mode='r', shape=(line_length, pixel_length))
+        if abs:
+            first[:, :, n] = np.abs(first_dat[:, :])
+        else:
+            first[:, :, n] = first_dat[:, :]
+
+    second_files = [os.path.join(overlap_path, f) for f in files if f.endswith('_2') and len(f) > 10]
+    second_name = os.path.join(overlap_path, 'second')
+    if os.path.exists(second_name):
+        os.remove(second_name)
+    if abs:
+        second = np.memmap(second_name, 'float32', shape=(line_length, pixel_length, len(second_files)), mode='w+')
+    else:
+        second = np.memmap(second_name, 'complex64', shape=(line_length, pixel_length, len(second_files)), mode='w+')
+
+    for f, n in zip(second_files, range(len(second_files))):
+        second_dat = np.memmap(f, 'complex64', mode='r', shape=(line_length, pixel_length))
+        if abs:
+            second[:, :, n] = np.abs(second_dat[:, :])
+        else:
+            second[:, :, n] = second_dat[:, :]
+
+    return first, second
+
+
+def multilook(matrix, az, ra, summation=False):
+    # This function multilooks a matrix, which is either in 2D or 3D. In the case of 3D the third dimension is
+    # considered the time dimension. If summation is True we do not average but sum the values in the multilooking area.
+    # Multilooking always starts at the first range/azimuth pixel.
+
+    # First downsample 2 * 10
+    new_ra = matrix.shape[1] / ra
+    new_az = matrix.shape[0] / az
+
+    size = matrix.shape
+    if len(size) == 3 and summation == False:
+        matrix_multilook = matrix[:new_az * az, :new_ra * ra, :].reshape([new_az, az, new_ra, ra, size[2]]).mean(3).mean(1)
+    elif len(size) == 2 and summation == False:
+        matrix_multilook = matrix[:new_az * az, :new_ra * ra].reshape([new_az, az, new_ra, ra]).mean(3).mean(1)
+    elif len(size) == 3 and summation == True:
+        matrix_multilook = matrix[:new_az * az, :new_ra * ra, :].reshape([new_az, az, new_ra, ra, size[2]]).mean(3).mean(1)
+    elif len(size) == 2 and summation == True:
+        matrix_multilook = matrix[:new_az * az, :new_ra * ra].reshape([new_az, az, new_ra, ra]).mean(3).mean(1)
+    else:
+        print('matrix does not have the right size')
+        return []
+
+    matrix_multilook = matrix_multilook.astype(matrix_multilook.dtype, subok=False)
+
+    return matrix_multilook
+
+
+def get_burst(overlap):
+    s = overlap.split('_')
+    burst = s[0] + '_' + s[1] + '_' + s[2] + '_' + s[3]
+    next_burst = s[4] + '_' + s[5] + '_' + s[6] + '_' + s[7]
+
+    nBurst = int(s[3])
+
+    return nBurst, burst, next_burst
+
+
+def swath_path(stack_folder, date, key):
+    date_folder = date[:4] + date[5:7] + date[8:10]
+    swath_burst = key.split('_')
+    file_path = os.path.join(stack_folder, date_folder, swath_burst[0] + '_' + swath_burst[1])
+
+    return file_path
+
+
+def master_file(key):
+    # This function converts combinations of dates and keys to a datafile name
+    string = '_iw_' + key[6] + '_burst_' + key[14:]
+    string = 'master' + string + '.raw'
+
+    return string
+
+# Actually execute the code...
+if __name__ == "__main__":
+
+    ps_select = '1'
+    if len(sys.argv) == 7:
+        stack_folder  = sys.argv[1]
+        overlap       = sys.argv[2]
+        esd_type      = sys.argv[3]
+        max_baseline  = sys.argv[4]
+        master_date   = sys.argv[5]
+        ps_select     = sys.argv[6]  # Use 1 if needed, use 0 if not
+    elif len(sys.argv) == 6:
+        stack_folder  = sys.argv[1]
+        overlap       = sys.argv[2]
+        esd_type      = sys.argv[3]
+        max_baseline  = sys.argv[4]
+        master_date   = sys.argv[5]
+    else:
+        sys.exit('usage: stack_folder type burst')
+
+    print('stack folder is ' + stack_folder)
+    print('burst is ' + overlap)
+    print('ps select is ' + ps_select)
+    print('max baseline is ' + max_baseline)
+    print('master date is ' + master_date)
+    print('type ESD is ' + esd_type)
+
+    # first get all the dates from the stack:
+    master_key = master_date[:4] + master_date[5:7] + master_date[8:]
+
+    ifgs = [f for f in os.listdir(stack_folder) if len(f) == 8]
+    dates = [f[:4] + '-' + f[4:6] + '-' + f[6:8] for f in ifgs if f != master_key]
+
+    # Then run the overlap cutout / ps selection and
+    save_overlapping(stack_folder, master_date, dates, overlap)
+
+    # If we want to select ps points
+    if ps_select == '1':
+        find_ps_overlapping(stack_folder, overlap)
+
+    # Get the esd results for the overlapping areas either based on ps or coherence
+    max_baseline = int(max_baseline)
+    if esd_type == 'ps':
+        select_ps_data(stack_folder, overlap)
+        diff_matrix, var_matrix, to_angle_matrix, weight, dates = network_esd_ps(stack_folder, overlap, master_date, max_baseline)
+    elif esd_type == 'coh':
+        diff_matrix, var_matrix, to_angle_matrix, weight, dates = network_esd_coh(stack_folder, overlap, master_date, max_baseline)
+    else:
+        sys.exit('Type should either be coh or ps')
+
+    # And save them in the corresponding folder:
+    folder = os.path.join(stack_folder, 'esd', overlap)
+
+    np.save(os.path.join(folder, esd_type + '_diff_matrix'), diff_matrix)
+    np.save(os.path.join(folder, esd_type + '_var_matrix'), var_matrix)
+    np.save(os.path.join(folder, esd_type + '_to_angle_matrix'), to_angle_matrix)
+    np.save(os.path.join(folder, esd_type + '_weight_matrix'), weight)
+    np.save(os.path.join(folder, esd_type + '_dates'), dates)
diff --git a/doris_stack/functions/__init__.py b/doris_stack/functions/__init__.py
new file mode 100755
index 0000000..e69de29
diff --git a/doris_stack/functions/baselines.py b/doris_stack/functions/baselines.py
new file mode 100755
index 0000000..2e841a5
--- /dev/null
+++ b/doris_stack/functions/baselines.py
@@ -0,0 +1,99 @@
+import os
+import numpy as np
+import warnings
+from shutil import copyfile
+from doris.doris_stack.main_code.resdata import ResData
+import datetime
+import subprocess
+
+
+def baselines(dir_in,inputfile,start_date='2014-01-01',end_date='2018-01-01',doris=''):
+    # This function calculates the baselines and plots a baseline plot.
+
+    # Define doris path
+    if not doris:
+        doris = doris_path
+
+    if not os.path.exists(dir_in):
+        warnings.warn('The input directory does not exist!')
+        return
+
+    os.chdir(dir_in)
+    process_folder = os.path.join(dir_in, 'baseline_process')
+    if not os.path.exists(process_folder):
+        os.mkdir(process_folder)
+    os.chdir(process_folder)
+
+    try:
+        first = np.datetime64(start_date)
+        last = np.datetime64(end_date)
+    except:
+        warnings.warn('Input dates could not be converted, use "yyyy-mm-dd"')
+        return
+
+    # Search for folders and take only the first burst.
+    folders = next(os.walk(dir_in))[1]
+    folders = sorted(folders)
+
+    # Initialize... (Search for folders / resfiles / dates)
+    n = 0
+    res = []; date = []
+    for fold in folders:
+        # Select only the folders which has a name like yyyymmdd and are within
+        if len(fold) == 8:
+            # define date of folder
+            date_prod = np.datetime64((fold[:4] + '-' + fold[4:6] + '-' + fold[6:]))
+
+            if date_prod >= first and date_prod <= last:
+                # Select the first swath
+                date_fold = os.path.join(dir_in,fold)
+                swath_fold = os.path.join(date_fold,next(os.walk(date_fold))[1][0])
+                # Select the first burst
+                prod_files = next(os.walk(swath_fold))[2]
+                for file in prod_files:
+                    if file.endswith('1.res'):
+                        res.extend([os.path.join(swath_fold,file)])
+                        date.extend([date_prod])
+                        n = n + 1
+                        break
+
+    # Now create a set of baselines
+
+    baselines = np.zeros([len(res),len(res)])
+    resfiles = dict()
+
+    # First create the ifgs.res files and store the data in a res data class.
+    master = res[0]
+    copyfile(master,os.path.join(process_folder,'master.res'))
+
+    for resultfile, dat in zip(res, date):
+        copyfile(resultfile,os.path.join(process_folder,'slave.res'))
+        subprocess.call([doris + ' ' + inputfile], shell=True)
+
+        dat = dat.astype(datetime.datetime).strftime('%Y-%m-%d')
+        resfiles[dat] = ResData(type='interferogram',filename='ifgs.res')
+        resfiles[dat].read()
+        os.remove(os.path.join(process_folder,'ifgs.res'))
+
+    # Then gather the baselines
+    for dat, j in zip(date, range(len(date))):
+        dat = dat.astype(datetime.datetime).strftime('%Y-%m-%d')
+        baselines[j,0] = resfiles[dat].processes['coarse_orbits']['Bperp'][1]
+
+
+
+    # Create figure of baselines.
+    days = (date[0] - date).astype(float)
+    plt.figure(111)
+    plt.plot(baselines[:,0], days, marker='o')
+
+    # Annotate
+    for dat, x, y in zip(date, baselines[:,0], days):
+        dat = dat.astype(datetime.datetime).strftime('%Y-%m-%d')
+        plt.annotate(
+            dat,
+            xy = (x, y), xytext = (0, 0),
+            textcoords = 'offset points', size = 8)
+
+    plt.savefig('baseline_plot.pdf')
+
diff --git a/doris_stack/functions/burst_metadata.py b/doris_stack/functions/burst_metadata.py
new file mode 100755
index 0000000..e90dfd6
--- /dev/null
+++ b/doris_stack/functions/burst_metadata.py
@@ -0,0 +1,153 @@
+# Based on the orbit of the swath the orbits of the individual burst is calculated.
+
+from orbit_coordinates import lph2xyz, xyz2ell, intrp_orbit
+import os
+import numpy as np
+import collections
+from datetime import datetime
+from doris.doris_stack.functions.resdata import ResData
+from shapely.geometry import Polygon
+
+
+def burst_header(resID):
+
+    meta = collections.OrderedDict()
+
+    meta['row_1'] = ['===============================================\n']
+    meta['MASTER RESULTFILE:'] = resID
+    meta['Created by'] = 'G.Mulder TU Delft'
+    meta['row_2'] = 'Doris (Delft o-o Radar Interferometric Software)'
+    meta['Version'] = 'Version (2015) (For TOPSAR)'
+    meta['FFTW library'] = 'used'
+    meta['VECLIB library'] = 'not used'
+    meta['LAPACK library'] = 'not used'
+    meta['Compiled at'] = 'XXXXXXXX'
+    meta['By GUN gcc'] = 'XXXXXXXX'
+    meta['row_3'] = ['===============================================\n']
+
+    return meta
+
+
+def burst_readfiles(meta, burst_num, burst_center, burst_border, swath_data):
+    # First copy swath metadata for burst and create a georef dict which stores information about the geo reference of
+    # the burst.
+    meta['Burst_number_index'] = str(burst_num)
+    aux = meta['aux']
+    aux['azimuthPRF'] = [meta['Pulse_Repetition_Frequency (computed, Hz)']]
+    meta.pop('aux')
+
+    # First find coordinates of center and optionally the corners
+    meta['Scene_centre_longitude'] = str(burst_center[0])
+    meta['Scene_centre_latitude'] = str(burst_center[1])
+    meta['Scene_ul_corner_latitude'] = str(burst_border[0][1])
+    meta['Scene_ur_corner_latitude'] = str(burst_border[1][1])
+    meta['Scene_lr_corner_latitude'] = str(burst_border[2][1])
+    meta['Scene_ll_corner_latitude'] = str(burst_border[3][1])
+    meta['Scene_ul_corner_longitude'] = str(burst_border[0][0])
+    meta['Scene_ur_corner_longitude'] = str(burst_border[1][0])
+    meta['Scene_lr_corner_longitude'] = str(burst_border[2][0])
+    meta['Scene_ll_corner_longitude'] = str(burst_border[3][0])
+
+    # Find doppler centroid frequency and azimuth reference time
+    doppler_times = [np.datetime64(aux['doppler_azimuth_Time'][i] + '-00') for i in range(len(aux['doppler_azimuth_Time']))]
+    frequency_times = [np.datetime64(aux['doppler_azimuth_Time'][i] + '-00') for i in range(len(aux['doppler_azimuth_Time']))]
+    burst_start_time = np.datetime64(aux['azimuthTimeStart'][burst_num-1] + '-00')
+    meta['First_pixel_azimuth_time (UTC)'] = burst_start_time.astype(datetime).strftime('%Y-%b-%d %H:%M:%S.%f')
+
+    # First index after start burst for doppler and azimuth
+    doppler_id = np.where(doppler_times > burst_start_time)
+    frequency_id = np.where(frequency_times > burst_start_time)
+
+    # Assign DC values to metadata
+    if len(doppler_id[0]) > 0:
+        doppler_id = np.min(doppler_id)
+        parameter = aux['dopplerCoeff'][doppler_id].split()
+        meta['DC_reference_azimuth_time'] = np.datetime64(aux['doppler_azimuth_Time'][doppler_id] + '-00').astype(datetime).strftime('%Y-%b-%d %H:%M:%S.%f')
+        meta['DC_reference_range_time'] = aux['doppler_range_Time'][doppler_id]
+    else:
+        parameter = ['0.0','0.0','0.0']
+        meta['DC_reference_azimuth_time'] = '0.0 0.0'
+        meta['DC_reference_range_time'] = aux['doppler_range_Time'][-1]
+    # Assign parameters
+    meta['Xtrack_f_DC_constant (Hz, early edge)'] = parameter[0]
+    meta['Xtrack_f_DC_linear (Hz/s, early edge)'] = parameter[1]
+    meta['Xtrack_f_DC_quadratic (Hz/s/s, early edge)'] = parameter[2]
+
+    # Assign FM values to metadata
+    if len(frequency_id[0]) > 0:
+        frequency_id = np.min(frequency_id)
+        if aux['azimuthFmRate_c0']:
+            parameter = [aux['azimuthFmRate_c0'][frequency_id],aux['azimuthFmRate_c1'][frequency_id],aux['azimuthFmRate_c2'][frequency_id]]
+        else:
+            parameter = aux['azimuthFmRatePolynomial'][frequency_id].split()
+        meta['FM_reference_azimuth_time'] = np.datetime64(aux['azimuthFmRate_reference_Azimuth_time'][frequency_id] + '-00').astype(datetime).strftime('%Y-%b-%d %H:%M:%S.%f')
+        meta['FM_reference_range_time'] = aux['azimuthFmRate_reference_Range_time'][frequency_id]
+    else:
+        parameter = ['0.0','0.0','0.0']
+        meta['doppler_azimuth_Time'] = '0.0 0.0'
+        meta['FM_reference_range_time'] = aux['azimuthFmRate_reference_Range_time'][-1]
+    # Assign parameters
+    meta['FM_polynomial_constant_coeff (Hz, early edge)'] = parameter[0]
+    meta['FM_polynomial_linear_coeff (Hz/s, early edge)'] = parameter[1]
+    meta['FM_polynomial_quadratic_coeff (Hz/s/s, early edge)'] = parameter[2]
+
+    # Add information about swath
+    meta['row_1'] = ['******************************************************************']
+    meta['Datafile'] = os.path.basename(swath_data)
+    meta['Dataformat'] = 'tiff'
+    meta['Number_of_lines_original'] = aux['imageLines'][0]
+    meta['Number_of_pixels_original'] = aux['imagePixels'][0]
+    meta['deramp'] = '0'
+    meta['reramp'] = '0'
+    meta['ESD_correct'] = '0'
+
+    return meta
+
+
+def burst_crop(meta,burst_num,swath_data,new_burst_num):
+    # This function returns a description of the crop files part, which defines how the burst is cropped out of the
+    # swath data. This function is generally called when the .res and raw data is written to the datastack and uses one
+    # of the outputs from the burst_readfiles
+
+    crop = collections.OrderedDict()
+
+    last_sample =  [int(x) for x in meta['aux']['lastValidSample'][burst_num-1].split()]
+    first_sample = [int(x) for x in meta['aux']['firstValidSample'][burst_num-1].split()]
+
+    swath_data = os.path.basename(swath_data)
+    crop['Data_output_file'] = 'slave_iw_' + swath_data[6] + '_burst_' + str(new_burst_num) + '.raw'
+    crop['Data_output_format'] = 'complex_short'
+
+    # Start line of this burst in total swath product
+    lines = meta['aux']['imageLines'][0]
+    extra_lines = int(lines) * (burst_num-1)
+
+    # All coordinates are one based (e.g. we start at pixel 1)
+    halfway = int(len(last_sample)/2)
+
+    crop['First_line (w.r.t. tiff_image)'] = str(1 + last_sample[:halfway].count(-1) + extra_lines)
+    crop['Last_line (w.r.t. tiff_image)'] = str(len(last_sample) - last_sample[halfway:].count(-1) + extra_lines)
+    crop['First_line (w.r.t. original_image)'] = str(1 + last_sample[:halfway].count(-1))
+    crop['Last_line (w.r.t. original_image)'] = str(len(last_sample) - last_sample[halfway:].count(-1))
+    crop['First_pixel (w.r.t. original_image)'] = str(max(first_sample))
+    crop['Last_pixel (w.r.t. original_image)'] = str(max(last_sample))
+
+    return crop
+
+
+def center_shape_from_res(resfile):
+    # This function reads the shape and center of a burst from a .res file.
+
+    res = ResData(resfile)
+    res.res_read()
+    meta = res.processes['readfiles']
+
+    center = (float(meta['Scene_centre_longitude']), float(meta['Scene_centre_latitude']))
+    ul = (float(meta['Scene_ul_corner_longitude']), float(meta['Scene_ul_corner_latitude']))
+    ur = (float(meta['Scene_ur_corner_longitude']), float(meta['Scene_ur_corner_latitude']))
+    lr = (float(meta['Scene_lr_corner_longitude']), float(meta['Scene_lr_corner_latitude']))
+    ll = (float(meta['Scene_ll_corner_longitude']), float(meta['Scene_ll_corner_latitude']))
+
+    coverage = Polygon([ul, ur, lr, ll])
+
+    return center, coverage
\ No newline at end of file
diff --git a/doris_stack/functions/cleanup_stack.py b/doris_stack/functions/cleanup_stack.py
new file mode 100644
index 0000000..132449c
--- /dev/null
+++ b/doris_stack/functions/cleanup_stack.py
@@ -0,0 +1,125 @@
+# This script can be used to cleanup a datastack
+# In this script we use the following rational
+# 1. Data from the master date and ESD are always kept (can change when these scripts evolve)
+# 2. The last processing step is saved (or if different processing steps are connected and the last one is not finished yet
+# 3. All .res and .ras files are kept.
+# 4. Following steps are never deleted > resampled slave / subt_refdem / filtphase (multilooked) / coherence (multilooked) / unwrap
+
+import os
+import sys
+from doris.doris_stack.main_code.resdata import ResData
+
+def res_file_selection(path, dat_type='burst'):
+    # Give an oversight of all files connected to different steps:
+
+    del_files = []
+    res_dat = dict()
+
+    # First check if slave / master / ifgs res files are there
+    for res_type in ['master', 'slave', 'ifgs']:
+        if os.path.exists(os.path.join(path, res_type + '.res')):
+            res_dat[res_type] = ResData(os.path.join(path, res_type + '.res'), type=res_type)
+        else:
+            print('No data found in ' + path)
+            return []
+
+    if res_dat['slave'].process_control['readfiles'] == '1' and res_dat['slave'].process_control['crop'] == '1':
+        if res_dat['slave'].processes['readfiles']['deramp'] == '1':
+            # Remove the ramped image data.
+            del_files.append(res_dat['slave'].processes['crop']['Data_output_file'][:-12] + '.raw')
+    if res_dat['ifgs'].process_control['dem_assist'] == '1':
+        # Remove the temporary files for dem_assist
+        del_files.extend(['dac_delta_demline.temp', 'dac_delta_dempixel.temp', 'dac_m_demline.temp', 'dac_m_dempixel.temp'])
+    if res_dat['slave'].process_control['resample'] == '1' and res_dat['ifgs'].process_control['dem_assist'] == '1':
+        del_files.extend(['dac_delta_line.raw', 'dac_delta_pixel.raw'])
+    if res_dat['slave'].process_control['resample'] == '1':
+        # After resampling the deramped file is not needed anymore. Also processing data from resampling is not needed.
+        del_files.append(res_dat['slave'].processes['crop']['Data_output_file'])
+        del_files.extend(['rsmp_orig_slave_line.raw', 'rsmp_orig_slave_pixel.raw'])
+
+    # Now the resampled slave stays.
+    if res_dat['ifgs'].process_control['subtr_refphase'] == '1':
+        # If the reference phase is subtracted both the reramped slave and interferogram can be removed
+        del_files.extend(['cint.raw', 'slave_rsmp_reramped.raw'])
+    if res_dat['ifgs'].process_control['subtr_refdem'] == '1':
+        # When the dem phase is removed, the interferogram with subtracted reference phase can be removed.
+        del_files.extend(['cint.raw', 'cint_srp.raw', 'demcrop.raw', 'dem_radar.raw', 'master_slave.crd'])
+    if res_dat['ifgs'].process_control['filtphase'] == '1':
+        # When after the the removal of the reference dem the filtphase step is done, the subtrefdem ifgs is removed.
+        del_files.extend(['cint.raw', 'cint_srp.raw', 'cint_srd.raw', 'demcrop.raw', 'dem_radar.raw', 'master_slave.crd'])
+
+    # Finally if it is about a burst image, we can check whether the filtered and coherence files are already
+    # concatenated, which means they can be removed.
+    image_ifgs = os.path.join(os.path.dirname(os.path.dirname(path)), 'ifgs.res')
+    if dat_type == 'burst' and os.path.exists(image_ifgs):
+        # If there is a ifgs.res file for the full image.
+
+        image_res = ResData(image_ifgs)
+        if image_res.process_control['coherence'] == '1':
+            del_files.append('coherence.raw')
+        if image_res.process_control['filtphase'] == '1':
+            del_files.append('cint.0.2filtered')
+        if image_res.process_control['filtphase'] == '1':
+            del_files.append('cint.0.2filtered')
+
+    elif dat_type == 'image':
+        # If we are looking at the full image, is it important to remove the non-multilooked coherence or filtered image
+        if res_dat['ifgs'].process_control['filtphase'] == '1':
+            if res_dat['ifgs'].processes['filtphase']['Data_output_file'].endswith('ml.raw'):
+                del_files.append('cint.0.2filtered')
+        if res_dat['ifgs'].process_control['coherence'] == '1':
+            if res_dat['ifgs'].processes['coherence']['Data_output_file'].endswith('ml.raw'):
+                del_files.append('coherence.raw')
+
+    del_files = set(del_files)
+    del_files = [os.path.join(path, del_file) for del_file in del_files]
+    del_files = [del_file for del_file in del_files if os.path.exists(del_file)]
+
+    return del_files
+
+def cleanup_stack(path, master_key):
+    # This is the main cleanup function.
+
+    folders = next(os.walk(path))[1]
+    if not master_key in folders:
+        print('master folder not found in path')
+        return
+    else:
+        folders.remove(master_key)
+
+    del_files = []
+
+    for folder in folders:
+        del_dat = res_file_selection(os.path.join(path, folder), dat_type='image')
+        del_files.extend(del_dat)
+
+        burst_folders = find_burst_folders(os.path.join(path, folder))
+        for burst_folder in burst_folders:
+            del_dat = res_file_selection(os.path.join(path, burst_folder), dat_type='burst')
+            del_files.extend(del_dat)
+
+    for filename in del_files:
+        print('removing: ' + filename)
+        os.remove(filename)
+    print('Succes! path ' + path + ' is cleaned from temporary files.')
+
+
+def find_burst_folders(folder):
+    # This function finds all the burst folders in an image folder
+    folders = []
+    swaths = next(os.walk(folder))[1]
+    for swath in swaths:
+        bursts = next(os.walk(os.path.join(folder, swath)))[1]
+        for burst in bursts:
+            folders.append(os.path.join(folder, swath, burst))
+    return folders
+
+# Actually execute the code...
+if __name__ == "__main__":
+    path = sys.argv[1]
+    master_key = sys.argv[2]
+
+    print('path to be cleaned ' + path)
+    print('master key is ' + master_key)
+
+    cleanup_stack(path, master_key)
diff --git a/doris_stack/functions/compassbearing.py b/doris_stack/functions/compassbearing.py
new file mode 100755
index 0000000..4a40319
--- /dev/null
+++ b/doris_stack/functions/compassbearing.py
@@ -0,0 +1,26 @@
+import math
+
+
+def calculate_initial_compass_bearing(pointA, pointB):
+
+    if (type(pointA) != tuple) or (type(pointB) != tuple):
+        raise TypeError("Only tuples are supported as arguments")
+
+    lat1 = math.radians(pointA[0])
+    lat2 = math.radians(pointB[0])
+
+    diffLong = math.radians(pointB[1] - pointA[1])
+
+    x = math.sin(diffLong) * math.cos(lat2)
+    y = math.cos(lat1) * math.sin(lat2) - (math.sin(lat1)
+            * math.cos(lat2) * math.cos(diffLong))
+
+    initial_bearing = math.atan2(x, y)
+
+    # Now we have the initial bearing but math.atan2 return values
+    # from -180 to + 180 which is not what we want for a compass bearing
+    # The solution is to normalize the initial bearing as shown below
+    initial_bearing = math.degrees(initial_bearing)
+    compass_bearing = (initial_bearing + 360) % 360
+
+    return compass_bearing
\ No newline at end of file
diff --git a/doris_stack/functions/concatenate_decatenate.py b/doris_stack/functions/concatenate_decatenate.py
new file mode 100755
index 0000000..13efee4
--- /dev/null
+++ b/doris_stack/functions/concatenate_decatenate.py
@@ -0,0 +1,215 @@
+from doris.doris_stack.main_code.resdata import ResData
+import numpy as np
+import os, sys
+
+
+def decatenate(date_folder, image_file, burst_file, datatype, multilooked='none', res_type='master'):
+    # Decatenate data.
+    # Multilooks can be none > not concatenated, only > only multilooked images are concatenated or >
+    # all > both original and multilooked images are concatenated.
+
+    if len(multilooked) == 7:
+        master = os.path.join(date_folder, image_file[:-4] + '_' + multilooked + '.raw')
+    else:
+        master = os.path.join(date_folder, image_file)
+
+    # Load .res files
+    image_res, burst_res = read_res(date_folder, type=res_type)
+
+    # Read image size
+    bursts = burst_res.keys()
+    if multilooked != 'none':
+        try:
+            no_lines = int(burst_res[bursts[0]].processes['readfiles']['Number_of_ml_lines_output_image'])
+            no_pixels = int(burst_res[bursts[0]].processes['readfiles']['Number_of_ml_pixels_output_image'])
+        except:
+            print('Not able to load multilooking parameters for ' + image_file)
+            return
+    else:
+        no_lines = int(burst_res[bursts[0]].processes['readfiles']['Number_of_lines_output_image'])
+        no_pixels = int(burst_res[bursts[0]].processes['readfiles']['Number_of_pixels_output_image'])
+
+    # First use memmap to get a memory map of the full file.
+    full_image = np.memmap(master, dtype=datatype, mode='r', shape=(no_lines, no_pixels))
+
+    for burst in bursts:
+        # Finally write all data from individual bursts to master file. We assume a simple 20 pixel offset from
+        # the side to prevent copying data without information.
+
+        burst_dat, line_0, line_1, pix_0, pix_1, burst_pix, burst_line, az_offset, ra_offset = \
+            burst_info(burst, burst_file, burst_res, multilooked)
+
+        # Cut out data with border of 20 px and write to file.
+        burst_image = np.memmap(burst_dat, dtype=datatype, mode='w+', shape=(burst_line, burst_pix))
+        burst_image[:, :] = full_image[line_0-1:line_1, pix_0-1:pix_1]
+        burst_image.flush()
+
+
+def concatenate(date_folder, image_file, burst_file, datatype, multilooked='none', res_type='master'):
+    # Concatenate data.
+
+    if len(multilooked) == 7:
+        master = os.path.join(date_folder, image_file[:-4] + '_' + multilooked + '.raw')
+    else:
+        master = os.path.join(date_folder, image_file)
+
+    # Load .res files
+    image_res, burst_res = read_res(date_folder, type=res_type)
+
+    # Read image size
+    bursts = burst_res.keys()
+    if multilooked != 'none':
+        try:
+            no_lines = int(burst_res[bursts[0]].processes['readfiles']['Number_of_ml_lines_output_image'])
+            no_pixels = int(burst_res[bursts[0]].processes['readfiles']['Number_of_ml_pixels_output_image'])
+        except:
+            print('Not able to load multilooking parameters for ' + image_file)
+            return
+    else:
+        no_lines = int(burst_res[bursts[0]].processes['readfiles']['Number_of_lines_output_image'])
+        no_pixels = int(burst_res[bursts[0]].processes['readfiles']['Number_of_pixels_output_image'])
+
+    # First use memmap to get a memory map of the full file.
+    full_image = np.memmap(master, dtype=datatype, mode='w+', shape=(no_lines, no_pixels))
+
+    for burst in bursts:
+        # Finally write all data from individual bursts to master file. We assume a simple 20 pixel offset from
+        # the side to prevent copying data without information. (corrected with multilooking factor)
+
+        burst_dat, line_0, line_1, pix_0, pix_1, burst_pix, burst_line, daz, dra = \
+            burst_info(burst, burst_file, burst_res, multilooked)
+
+        # Cut out data with border of 20 px and write to file.
+        burst_image = np.memmap(burst_dat, dtype=datatype, mode='r', shape=(burst_line,burst_pix))
+        full_image[(line_0+(daz-1)):(line_1-daz), (pix_0+(dra-1)):(pix_1-dra)] = burst_image[daz:-daz, dra:-dra]
+
+
+def burst_info(burst, burst_file, burst_res, multilooked='none'):
+    # Information about this specific burst
+
+    if burst_file == 'master.raw':
+        if multilooked:
+            string = '_iw_' + burst[6] + '_burst_' + burst[17:] + '_' + multilooked + '.raw'
+        else:
+            string = '_iw_' + burst[6] + '_burst_' + burst[17:] + '.raw'
+    elif burst_file == 'master_deramped.raw':
+        if multilooked:
+            string = '_iw_' + burst[6] + '_burst_' + burst[17:] + '_deramped' + '_' + multilooked + '.raw'
+        else:
+            string = '_iw_' + burst[6] + '_burst_' + burst[17:] + '_deramped.raw'
+    else:
+        string = burst_file
+
+    if len(multilooked) == 7:
+        burst_dat = os.path.join(date_folder, burst[0:7], burst[8:], string[:-4] + '_' + multilooked + '.raw')
+        line_0 = int(burst_res[burst].processes['readfiles']['First_line_' + multilooked])
+        line_1 = int(burst_res[burst].processes['readfiles']['Last_line_ml_' + multilooked])
+        pix_0 = int(burst_res[burst].processes['readfiles']['First_pixel_ml_' + multilooked])
+        pix_1 = int(burst_res[burst].processes['readfiles']['Last_pixel_ml_' + multilooked])
+        ra = int(burst_res[burst].processes['readfiles']['Multilook_range_' + multilooked])
+        az = int(burst_res[burst].processes['readfiles']['Multilook_azimuth_' + multilooked])
+    else:
+        burst_dat = os.path.join(date_folder, burst[0:7], burst[8:], string)
+        line_0 = int(burst_res[burst].processes['readfiles']['First_line (w.r.t. output_image)'])
+        line_1 = int(burst_res[burst].processes['readfiles']['Last_line (w.r.t. output_image)'])
+        pix_0 = int(burst_res[burst].processes['readfiles']['First_pixel (w.r.t. output_image)'])
+        pix_1 = int(burst_res[burst].processes['readfiles']['Last_pixel (w.r.t. output_image)'])
+        ra = 1
+        az = 1
+
+    az_offset = int(np.ceil(20 / float(az)))
+    ra_offset = int(np.ceil(100 / float(ra)))
+
+    burst_pix = pix_1 - pix_0 + 1
+    burst_line = line_1 - line_0 + 1
+
+    return burst_dat, line_0, line_1, pix_0, pix_1, burst_pix, burst_line, az_offset, ra_offset
+
+
+def read_res(date_folder, type='master'):
+    # Read .res data to the burst objects. Generally done after a processing step.
+
+    swaths = next(os.walk(date_folder))[1]
+    swaths = [fol for fol in swaths if len(fol) == 7]
+
+    res_burst = dict()
+
+    for swath in swaths:
+
+        bursts = next(os.walk(os.path.join(date_folder, swath)))[1]
+        bursts = [burst for burst in bursts if burst.startswith('burst')]
+
+        for burst in bursts:
+            slave_res = os.path.join(date_folder, swath, burst, 'slave.res')
+            master_res = os.path.join(date_folder, swath, burst, 'master.res')
+
+            burst_name = swath + '_' + burst
+
+            if type == 'master' and os.path.exists(master_res):
+                res_burst[burst_name] = ResData(filename=master_res)
+            elif type == 'slave' and os.path.exists(slave_res):
+                res_burst[burst_name]= ResData(filename=slave_res)
+            elif os.path.exists(master_res):
+                res_burst[burst_name] = ResData(filename=master_res)
+            elif os.path.exists(slave_res):
+                res_burst[burst_name]= ResData(filename=slave_res)
+            else:
+                print('No burst master or slave image available')
+                return
+
+    slave_res = os.path.join(date_folder, 'slave.res')
+    master_res = os.path.join(date_folder, 'master.res')
+
+    if type == 'master' and os.path.exists(master_res):
+        res_image = ResData(filename=master_res)
+    elif type == 'slave' and os.path.exists(slave_res):
+        res_image = ResData(filename=slave_res)
+    elif os.path.exists(master_res):
+        res_image = ResData(filename=master_res)
+    elif os.path.exists(slave_res):
+        res_image = ResData(filename=slave_res)
+    else:
+        print('No image master or slave image available')
+        return
+
+    return res_image, res_burst
+
+# Actually execute the code...
+if __name__ == "__main__":
+
+    date_folder         = sys.argv[1]
+    type                = sys.argv[2]
+    burst_file          = sys.argv[3]
+    datatype            = sys.argv[4]
+    if len(sys.argv) > 5:
+        multilooked         = sys.argv[5]
+    else:
+        multilooked = 'False'
+    if len(sys.argv) > 6:
+        res_type = sys.argv[6]
+    else:
+        res_type = 'master'
+
+    print('concatenate folder is ' + date_folder)
+    print('burst_file is ' + burst_file)
+    print('datatype is ' + datatype)
+    print('concatenating multilooked image is ' + multilooked)
+
+    if len(multilooked) == 7:
+        # Multilooked should be given in a 7 length string azimuth multilook _ range multiloop
+        # example '004_020'
+        # The script detects whether this multilooking is available. Otherwise it will produce an error.
+        multilooked = multilooked
+    else:
+        multilooked = 'none'
+    image_file = burst_file
+
+    if datatype == 'cpxint16' or datatype == 'complex_short':
+        datatype = np.dtype([('re', np.int16), ('im', np.int16)])
+
+    if type == 'decatenate':
+        decatenate(date_folder, image_file, burst_file, datatype, multilooked, res_type)
+    elif type == 'concatenate':
+        concatenate(date_folder, image_file, burst_file, datatype, multilooked, res_type)
+    else:
+        sys.exit('type should either be decatenate or concatenate')
diff --git a/doris_stack/functions/correct_ESD.py b/doris_stack/functions/correct_ESD.py
new file mode 100755
index 0000000..dbf8fae
--- /dev/null
+++ b/doris_stack/functions/correct_ESD.py
@@ -0,0 +1,51 @@
+# This script is used to remove the residual ramp in the ifgs of individual bursts based on ESD estimates.
+
+import numpy as np
+import os, sys
+
+if __name__ == "__main__":
+    # If calling script directly we have to load the package first to our python path
+    folder = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+    print(folder)
+    sys.path.extend([folder])
+
+from resdata import ResData
+
+
+def remove_ramp(file, angle_pixel):
+    # Remove ramp from burst
+
+    res_file = 'master.res'
+    res_dat = ResData(res_file, 'master')
+    crop = res_dat.processes['crop']
+    lines = int(crop['Last_line (w.r.t. original_image)']) - int(crop['First_line (w.r.t. original_image)']) + 1
+    pixels = int(crop['Last_pixel (w.r.t. original_image)']) - int(crop['First_pixel (w.r.t. original_image)']) + 1
+
+    n = np.arange(lines)
+    phase_diff = n * angle_pixel
+    complex_diff = np.cos(phase_diff).astype('complex64') + 1j * np.sin(phase_diff).astype('complex64')
+
+    dat_file = np.memmap(file, dtype='complex64', mode='r+', shape=(lines, pixels))
+    p_before = np.nanmean(np.angle(dat_file))
+    print('Average phase before is ' + str(p_before))
+
+    dat_file[:, :] = dat_file * complex_diff.conj()[:, None]
+    p_after = np.nanmean(np.angle(dat_file))
+    print('Average phase after is ' + str(p_after))
+    dat_file.flush()
+
+if __name__ == "__main__":
+    # If calling script directly we run the remove_ramp function.
+
+    if len(sys.argv) == 3:
+        file            = sys.argv[1]
+        angle_pixel     = sys.argv[2]  # Use 1 if needed, use 0 if not
+    else:
+        sys.exit('usage: burst_folder, file, angle_per_pixel')
+
+    print('file we will deramp is ' + file)
+    print('the angle per pixel is ' + angle_pixel)
+
+    angle_pixel = float(angle_pixel)
+    remove_ramp(file, angle_pixel)
+
diff --git a/doris_stack/functions/create_image.py b/doris_stack/functions/create_image.py
new file mode 100644
index 0000000..a3b77f1
--- /dev/null
+++ b/doris_stack/functions/create_image.py
@@ -0,0 +1,13 @@
+# This function creates images based on complex or real input data.
+# Input parameters are:
+# - Input matrix
+# - datatype
+# - Image scaling (are pixels for example 1x2 km or 3x1 km?) scaling is azimuth / range.
+# - Use of logscaling?
+# - Plot amplitude / phase / both (not relevant for real values...)
+
+# If you want to save your data as a geotiff or netcdf file use the read_write_data.py script. This will enable
+# visualization in for example QGIS or ArcGIS
+# If you want to do multilooking first apply the multilook.py script.
+
+def create_image():
diff --git a/doris_stack/functions/do_deramp_SLC.py b/doris_stack/functions/do_deramp_SLC.py
new file mode 100755
index 0000000..c63a2e5
--- /dev/null
+++ b/doris_stack/functions/do_deramp_SLC.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+import numpy as np
+from numpy import *
+from get_ramp import get_ramp
+from doris.doris_stack.functions.ESD_functions import freadbk
+from doris.doris_stack.main_code.resdata import ResData
+import sys
+
+
+def usage():
+    print '\nUsage: python do_deramp_SLC_nom.py dataFilename  resFilename plotFlag'
+    print '  where dataFilename     is the name of burst you want to deramp'
+    print '        resFilename      is the .res file of burst'
+    print '        plotFlag         is a boolean var, to plot only'
+    print '                         default of doPlot is false'
+    print ' This function removes the phase ramp (Doppler centroid variations) from single burst of' 
+    print ' RS2 or S1 TOPS acquisition. The original binary image at path '
+    print " DATAFILENAME is saved in 'DATAFILENAME'.orig, whereas the new instance " 
+    print ' will be characterized by baseband spectrum. The function also requires '
+    print ' the .res file RESFILENAME.                                             '     
+    print '  for example                                                           '
+    print ' python  do_deramp_SLC.py   20140821_iw_2_burst_1.raw slave.res False   '
+    print ' created by Gert Mulder'
+    print ' Part of code adapted from Lorenzo Iannini and Wu Wenhao'
+try:
+    dataFilename = sys.argv[1]
+    resFilename = sys.argv[2]
+except:
+    print 'Unrecognized input'
+    usage()
+    sys.exit(1)
+
+#*****************************************************************************#
+# Calculate chirp for deramping
+ChirpFilt = get_ramp(resFilename, resampled=0, type='chirp')
+
+res = ResData(resFilename)
+
+# Image size properties
+if res.process_control['oversample'] == '1': #oversampled data
+    l0 = int(res.processes['oversample']['First_line (w.r.t. ovs_image)'])
+    lN = int(res.processes['oversample']['Last_line (w.r.t. ovs_image)'])
+    p0 = int(res.processes['oversample']['First_pixel (w.r.t. ovs_image)'])
+    pN = int(res.processes['oversample']['Last_pixel (w.r.t. ovs_image)'])
+    dataFormat = 'cpxfloat32'
+else: # original data
+    l0 = int(res.processes['crop']['First_line (w.r.t. original_image)'])
+    lN = int(res.processes['crop']['Last_line (w.r.t. original_image)'])
+    p0 = int(res.processes['crop']['First_pixel (w.r.t. original_image)'])
+    pN = int(res.processes['crop']['Last_pixel (w.r.t. original_image)'])
+    dataFormat = 'cpxint16'
+
+# Image size
+Naz_res = lN-l0+1
+Nrg_res = pN-p0+1
+
+################################################################################
+# Read data
+
+slc = freadbk(dataFilename, 1, 1, int(Naz_res), int(Nrg_res), dataFormat, int(Naz_res), int(Nrg_res))
+
+#######################################################################################
+
+newFilename = dataFilename[:-4] + '_deramped.raw'
+fid = open(newFilename, 'wb')
+slc_deramped = conj(ChirpFilt)*slc
+del ChirpFilt
+del slc
+
+# %% Apply reramping
+if dataFormat == 'complex64':
+    slc_dat = slc_deramped.astype(np.complex64)
+else:  # cpxint16
+    slc_dat = np.zeros(shape=(int(Naz_res), int(Nrg_res) * 2)).astype('int16')
+    slc_dat[:, 0::2] = np.real(slc_deramped)
+    slc_dat[:, 1::2] = np.imag(slc_deramped)
+
+fid.write(slc_dat)
+fid.close()
+
+print "\nDeramp operation completed\n"
diff --git a/doris_stack/functions/do_reramp_SLC.py b/doris_stack/functions/do_reramp_SLC.py
new file mode 100755
index 0000000..a7e266e
--- /dev/null
+++ b/doris_stack/functions/do_reramp_SLC.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+import numpy as np
+from numpy import *
+from doris.doris_stack.functions.get_ramp import get_ramp
+from doris.doris_stack.functions.ESD_functions import freadbk
+from doris.doris_stack.main_code.resdata import ResData
+import sys
+
+def usage():
+    print '\nUsage: python  do_reramp_SLC.py dataFilename resFilename resampled'
+    print '  where dataFilename        is the name of burst you want to deramp'
+    print '        resFilename         is the .res file of burst              '
+    print ' This python applies the inverse phase ramp to the burst pointed by DATAFILENAME (slc)'
+    print ' and RESFILENAME (res) that was deramped by deramp_SLC.m. The phase screen'
+    print ' must account for the new resampled grids PIXRGGRID and PIXAZGRID    '
+    print ' [Nlines_mst x Nsamples_mst] that contain the time coordinates of the'
+    print ' resampled image into the master grid:                               '
+    print '  for example                                                        '
+    print ' python   do_reramp_SLC.py slave_rsmp.raw slave.res False            '
+    print ' created by Gert Mulder'
+    print ' Part of code adapted from Lorenzo Iannini and Wu Wenhao'
+try:
+    dataFilename = sys.argv[1]
+    resFilename = sys.argv[2]
+     
+except:
+    print 'Unrecognized input'
+    usage()
+    sys.exit(1)
+if len(sys.argv) == 3:
+    resampled = True
+elif len(sys.argv) == 4:
+    resampled = sys.argv[3]
+else:
+    print 'Unrecognized input'
+    usage()
+    sys.exit(1)
+
+# Read information
+################################################################################
+
+res = ResData(resFilename)
+
+# Image size properties
+if res.process_control['resample'] == '1': #oversampled data
+    l0 = int(res.processes['resample']['First_line (w.r.t. original_master)'])
+    lN = int(res.processes['resample']['Last_line (w.r.t. original_master)'])
+    p0 = int(res.processes['resample']['First_pixel (w.r.t. original_master)'])
+    pN = int(res.processes['resample']['Last_pixel (w.r.t. original_master)'])
+    dataFormat = 'complex64'
+    resampled = True
+else: # original data
+    l0 = int(res.processes['crop']['First_line (w.r.t. original_image)'])
+    lN = int(res.processes['crop']['Last_line (w.r.t. original_image)'])
+    p0 = int(res.processes['crop']['First_pixel (w.r.t. original_image)'])
+    pN = int(res.processes['crop']['Last_pixel (w.r.t. original_image)'])
+    dataFormat = 'cpxint16'
+    resampled = False
+
+# Get resampled Slv size
+Naz_res = lN-l0+1
+Nrg_res = pN-p0+1
+
+#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+#% Obtain chirp %
+#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+if not resampled:
+    ChirpFilt = get_ramp(resFilename, resampled=0, type='chirp')
+elif resampled:
+    ChirpFilt = get_ramp(resFilename, resampled=1, type='chirp')
+else:
+    print('Resampled should be True or False')
+
+################################################################################
+# Read data
+
+slc = freadbk(dataFilename, 1, 1, int(Naz_res), int(Nrg_res), dataFormat, int(Naz_res), int(Nrg_res))
+
+#######################################################################################
+
+newFilename = dataFilename[:-4] + '_reramped.raw'
+fid = open(newFilename, 'wb')
+slc_reramped = slc * ChirpFilt
+del ChirpFilt
+del slc
+
+#%% Apply reramping
+if dataFormat == 'complex64':
+    slc_dat = slc_reramped.astype(np.complex64)
+else:  # cpxint16
+    slc_dat = np.zeros(shape=(int(Naz_res), int(Nrg_res) * 2)).astype('int16')
+    slc_dat[:, 0::2] = np.real(slc_reramped)
+    slc_dat[:, 1::2] = np.imag(slc_reramped)
+
+fid.write(slc_dat)
+fid.close()
+
+print "\nReramp operation completed\n"
diff --git a/doris_stack/functions/get_ramp.py b/doris_stack/functions/get_ramp.py
new file mode 100755
index 0000000..923a512
--- /dev/null
+++ b/doris_stack/functions/get_ramp.py
@@ -0,0 +1,282 @@
+import os
+import numpy as np
+import gdal
+from gdalconst import *
+import sys
+
+def get_ramp(res_file, resampled=0, type='chirp'):
+    # Read information
+    ################################################################################
+
+    #FM
+    t0_FM = np.float64(get_parameter('FM_reference_range_time', res_file,1))
+    c0_FM = np.float64(get_parameter('FM_polynomial_constant_coeff (Hz, early edge)', res_file,1))
+    c1_FM = np.float64(get_parameter('FM_polynomial_linear_coeff (Hz/s, early edge)', res_file,1))
+    c2_FM = np.float64(get_parameter('FM_polynomial_quadratic_coeff (Hz/s/s, early edge)', res_file,1))
+
+    #DC
+    azimuthTime_DC = get_parameter('DC_reference_azimuth_time', res_file,3)
+    azimuthTime_DC = np.float64(azimuthTime_DC[0])*3600+float(azimuthTime_DC[1])*60+float(azimuthTime_DC[2])
+
+    t0_DC = np.float64(get_parameter('DC_reference_range_time',res_file,1))
+    c0_DC = np.float64(get_parameter('Xtrack_f_DC_constant (Hz, early edge)', res_file,1))
+    c1_DC = np.float64(get_parameter('Xtrack_f_DC_linear (Hz/s, early edge)', res_file,1))
+    c2_DC = np.float64(get_parameter('Xtrack_f_DC_quadratic (Hz/s/s, early edge)', res_file,1))
+
+    Ks = np.float64(get_parameter('Azimuth_steering_rate (deg/s)', res_file,1))
+
+    # Image sampling parameters
+    Taz_start = get_parameter('First_pixel_azimuth_time (UTC)', res_file,3)
+    Taz_start = np.float64(Taz_start[0])*3600+float(Taz_start[1])*60+float(Taz_start[2])
+
+    Trg_start = np.float64(get_parameter('Range_time_to_first_pixel (2way) (ms)', res_file,1))*1e-3
+    fsRg = np.float64(get_parameter('Range_sampling_rate (computed, MHz)', res_file,1))
+
+    dt_az = np.float64(get_parameter('Azimuth_time_interval (s)', res_file,1))
+    dt_rg = 1/fsRg/1e6
+
+    # Number of lines
+    lNum = int(get_parameter('Number_of_lines_original', res_file,1))
+
+    if resampled == 1:
+        l0 = int(get_parameter('First_line (w.r.t. original_master)', res_file,2,'*_Start_resample','* End_resample:_NORMAL'))
+        lN = int(get_parameter('Last_line (w.r.t. original_master)', res_file,2,'*_Start_resample','* End_resample:_NORMAL'))
+        p0 = int(get_parameter('First_pixel (w.r.t. original_master)', res_file,2,'*_Start_resample','* End_resample:_NORMAL'))
+        pN = int(get_parameter('Last_pixel (w.r.t. original_master)', res_file,2,'*_Start_resample','* End_resample:_NORMAL'))
+    else:
+        l0 = int(get_parameter('First_line (w.r.t. original_image)', res_file, 1))
+        lN = int(get_parameter('Last_line (w.r.t. original_image)', res_file, 1))
+        p0 = int(get_parameter('First_pixel (w.r.t. original_image)', res_file, 1))
+        pN = int(get_parameter('Last_pixel (w.r.t. original_image)', res_file, 1))
+
+    # Get resampled Slv size
+    Naz_res = lN-l0+1
+    Nrg_res = pN-p0+1
+
+    if resampled == 1:
+        # Read the resampled image and slave coordinates in master geometry
+        #################################################################################
+
+        Path_MFF_HDR   ='rsmp_orig_slave_pixel'+'.hdr'
+        Link_DATA      ='rsmp_orig_slave_pixel'+'.r00'  # the default format should be r00
+        Link_rsmp_orig_slave_pixel ='rsmp_orig_slave_pixel.raw'
+
+        if (os.path.isfile(Path_MFF_HDR)):
+            os.remove(Path_MFF_HDR)
+        if (os.path.isfile(Link_DATA)):
+            os.remove(Link_DATA)
+
+        RAW_DATA_ABSOLUTE_PATH=os.path.abspath(Link_rsmp_orig_slave_pixel)
+        print "RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH
+        os.symlink(RAW_DATA_ABSOLUTE_PATH,Link_DATA)
+
+        outStream      = open(Path_MFF_HDR,'w')
+        outStream.write('IMAGE_FILE_FORMAT = MFF\n')
+        outStream.write('FILE_TYPE = IMAGE\n')
+        outStream.write('IMAGE_LINES = %d\n' % int(Naz_res))
+        outStream.write('LINE_SAMPLES = %d\n'% int(Nrg_res))
+        outStream.write('BYTE_ORDER = LSB\n')
+        outStream.write('END\n')
+        outStream.close()
+
+        PixRgGrid = freadbk(Path_MFF_HDR,1, 1,int(Naz_res),int(Nrg_res))
+        PixRgGrid = PixRgGrid.astype(np.float64)
+
+        if (os.path.isfile(Path_MFF_HDR)):
+            os.remove(Path_MFF_HDR)
+        if (os.path.isfile(Link_DATA)):
+            os.remove(Link_DATA)
+        #################################################################################
+
+        Path_MFF_HDR   ='rsmp_orig_slave_line'+'.hdr'
+        Link_DATA      ='rsmp_orig_slave_line'+'.r00'
+        Link_rsmp_orig_slave_line ='rsmp_orig_slave_line.raw'
+
+        if (os.path.isfile(Path_MFF_HDR)):
+            os.remove(Path_MFF_HDR)
+        if (os.path.isfile(Link_DATA)):
+            os.remove(Link_DATA)
+
+
+        RAW_DATA_ABSOLUTE_PATH=os.path.abspath(Link_rsmp_orig_slave_line)
+        print "RAW_DATA_ABSOLUTE_PATH=", RAW_DATA_ABSOLUTE_PATH
+        os.symlink(RAW_DATA_ABSOLUTE_PATH,Link_DATA)
+
+        outStream      = open(Path_MFF_HDR,'w')
+        outStream.write('IMAGE_FILE_FORMAT = MFF\n')
+        outStream.write('FILE_TYPE = IMAGE\n')
+        outStream.write('IMAGE_LINES = %d\n' % int(Naz_res))
+        outStream.write('LINE_SAMPLES = %d\n'% int(Nrg_res))
+        outStream.write('BYTE_ORDER = LSB\n')
+        outStream.write('END\n')
+        outStream.close()
+
+        PixAzGrid = freadbk(Path_MFF_HDR,1, 1,int(Naz_res),int(Nrg_res))
+        PixAzGrid=PixAzGrid.astype(np.float64)
+
+        if (os.path.isfile(Path_MFF_HDR)):
+            os.remove(Path_MFF_HDR)
+        if (os.path.isfile(Link_DATA)):
+            os.remove(Link_DATA)
+
+        #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+        #% Prepare azimuth and range grids %
+        #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+        TrgGrid = Trg_start + (PixRgGrid-1) * dt_rg
+        TazGrid = (PixAzGrid-1) * dt_az - (lNum/2 * dt_az)
+
+        del PixAzGrid, PixRgGrid
+
+    elif resampled == 0:
+        Tvect_rg = Trg_start + np.arange(p0-1,pN) * dt_rg
+        Tvect_az = np.arange(l0-1,lN) * dt_az - (lNum/2 * dt_az)
+        Tvect_az = Tvect_az[:, None]
+
+        TrgGrid = np.tile(Tvect_rg, (Naz_res, 1))
+        TazGrid = np.tile(Tvect_az, (1, Nrg_res))
+        
+    else:
+        print 'variable resampled can only be 0 or 1!'
+        return
+
+    #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+    #% From S-1 steering rate and orbit information %
+    #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+    #%%%%%%%%%%%%%%%%%%%%%
+    #% Orbit information %
+    #%%%%%%%%%%%%%%%%%%%%%
+
+    orbit_number     = int(get_parameter('NUMBER_OF_DATAPOINTS',res_file,1))
+    orbit_time       = np.zeros(orbit_number,dtype=np.float64)
+    orbit_velocity_x = np.zeros(orbit_number,dtype=np.float64)
+    orbit_velocity_y = np.zeros(orbit_number,dtype=np.float64)
+    orbit_velocity_z = np.zeros(orbit_number,dtype=np.float64)
+    orbit_info       = get_parameter('NUMBER_OF_DATAPOINTS',res_file,4)
+
+    orbit_info=orbit_info.split('\n')
+
+    for row in range(orbit_number):
+        orbit_time_position=orbit_info[row]
+        orbit_time[row]=float(orbit_time_position.strip().split()[0])
+        orbit_velocity_x[row]=float(orbit_time_position.strip().split()[1])
+        orbit_velocity_y[row]=float(orbit_time_position.strip().split()[2])
+        orbit_velocity_z[row]=float(orbit_time_position.strip().split()[3])
+    orbit_velocity = np.sqrt(np.diff(orbit_velocity_x)**2+np.diff(orbit_velocity_y)**2+np.diff(orbit_velocity_z)**2)/np.diff(orbit_time)
+
+    # Compute Nominal DC for the whole burst
+    # Compute FM rate along range
+    Kfm = c0_FM + c1_FM*(TrgGrid-t0_FM) + c2_FM*(TrgGrid-t0_FM)**2
+    Kfm_0 = c0_FM + c1_FM*(Trg_start-t0_FM) + c2_FM*(Trg_start-t0_FM)**2
+
+    # Compute DC along range at reference azimuth time (azimuthTime)
+    Df_AzCtr = c0_DC + c1_DC*(TrgGrid-t0_DC) + c2_DC*(TrgGrid-t0_DC)**2
+    f_DC_ref_0 = c0_DC + c1_DC*(Trg_start-t0_DC) + c2_DC*(Trg_start-t0_DC)**2
+    del TrgGrid
+
+    # From S-1 steering rate and orbit information %
+    # Computes sensor velocity from orbits
+    C_lambda=np.float64(get_parameter('Radar_wavelength (m)',res_file,1))
+
+    # Frequency rate
+    Ks_hz = 2*np.mean(orbit_velocity)/C_lambda*Ks/180*np.pi
+
+    # Time ratio
+    alpha_nom = 1 - Ks_hz/Kfm
+
+    # DC Azimuth rate [Hz/s]
+    DR_est = Ks_hz/alpha_nom
+    del Ks_hz, alpha_nom
+
+    # Reference time
+    az_DC = -(Df_AzCtr / Kfm) + (f_DC_ref_0 / Kfm_0)
+    del Kfm, Kfm_0
+    Taz_vec = TazGrid - az_DC
+    del az_DC
+
+    #% Generate inverse chirp %
+    if type == 'chirp':
+        data = np.exp(1j*2*np.pi*(DR_est/2*Taz_vec+Df_AzCtr)*Taz_vec)
+    elif type == 'DC':
+        data = Df_AzCtr + Taz_vec * DR_est 
+    else:
+        print 'Choose either chirp or DC for type'
+        return
+
+    return data
+
+
+def get_parameter(First_param,file_name,format_flag=1,Second_param=None,Third_param=None):
+    Read_contine_flag=0
+    orbit_info=""
+    value=None
+    for line in open(file_name):
+        if format_flag==1:
+            if not (line.find(First_param)):
+                index=line.find(':')
+                value=(line[(index+1):].strip(' \n\t'))
+                return value
+
+        if format_flag==2:
+            if not(line.find(Second_param)):
+                Read_contine_flag=1
+            if (Read_contine_flag==1) and (not (line.find(First_param))):  #Be careful
+                index=line.find(':')
+                value=(line[(index+1):].strip(' \n\t'))
+                continue
+            if Read_contine_flag==1  and (not(line.find(Third_param))):  #Be careful
+                Read_contine_flag=0
+                return value
+
+
+        if format_flag==3:
+            if not (line.find(First_param)):
+                index=line.find(':')
+                pixel_time=(line[(index+1):].strip(' \n\t')).split(' ')[1].split(':')
+                return pixel_time
+
+
+        if format_flag==4:
+
+            if not (line.find(First_param)):
+                index=line.find(':')
+                value=int(line[(index+1):].strip(' \n\t'))
+                Read_contine_flag=1
+                continue
+            if (Read_contine_flag>=1):
+                orbit_info=orbit_info+line
+                Read_contine_flag=Read_contine_flag+1
+                if (Read_contine_flag==(value+1)):
+                    return orbit_info
+################################################################################
+
+
+###############################################################################
+
+def freadbk(path_file,line_start=1, pixels_start=1,nofLines1=None,nofPixels1=None):
+    #Driver
+    driver=gdal.GetDriverByName('MFF')
+    driver.Register()
+    gdal.AllRegister()
+    thisBurstData_file=gdal.Open(path_file,GA_ReadOnly)
+    if thisBurstData_file is None:
+        print 'Could not open'+Path_MFF_HDR
+        sys.exit(1)
+    #print 'Driver: ', thisBurstData_file.GetDriver().ShortName,'/', \
+    #      thisBurstData_file.GetDriver().LongName
+    #print 'Size is ',thisBurstData_file.RasterXSize,'x',thisBurstData_file.RasterYSize, \
+    #      'x',thisBurstData_file.RasterCount
+    #print 'Projection is ',thisBurstData_file.GetProjection()
+    geotransform = thisBurstData_file.GetGeoTransform()
+    if not geotransform is None:
+        print 'Origin = (',geotransform[0], ',',geotransform[3],')'
+        print 'Pixel Size = (',geotransform[1], ',',geotransform[5],')'
+
+    cint_srd=thisBurstData_file.GetRasterBand(1)
+    #print 'Band Type=',gdal.GetDataTypeName(cint_srd.DataType)
+
+    if cint_srd.GetOverviewCount() > 0:
+            print 'Band has ', cint_srd.GetOverviewCount(), ' overviews.'
+    thisBurstData= cint_srd.ReadAsArray(int(pixels_start-1),int(line_start-1),nofPixels1,nofLines1)
+    return thisBurstData
+##################################################################################
diff --git a/doris_stack/functions/get_winpos.py b/doris_stack/functions/get_winpos.py
new file mode 100755
index 0000000..9cf09d3
--- /dev/null
+++ b/doris_stack/functions/get_winpos.py
@@ -0,0 +1,223 @@
+#!/usr/bin/env python
+import numpy as np
+from numpy import *
+import gdal
+from gdalconst import *
+from scipy import ndimage
+
+def usage():
+    print '\nUsage: python get_winpos.py dataFile resFile Nwin outFile                        '
+    print 'where   dataFile           is the name of burst you want to deramp                 '
+    print '        resFile            is the .res file of burst                               '
+    print '        Nwin               number of windows to be distributed over the total image'
+    print '        outFile            output file name                                        '
+    print '  for example                                                                      '
+    print ' python get_winpos.py 20141003_iw_1_burst_1.raw 20141003_iw_1_burst_1.res 2001 winpos_fine.asc'
+    print ' matlab: TU Delft                                                                  '
+    print ' Python: Wu Wenhao   Wuhan QQ:460249274                                            '
+try:
+    dataFile   = sys.argv[1]
+    resFile    = sys.argv[2]
+    Nwin       = sys.argv[3]
+    outFile    = sys.argv[4]
+  
+except:
+    print 'Unrecognized input'
+    usage()
+    sys.exit(1)
+
+
+################################################################################
+def get_parameter(First_param,file_name,format_flag=1,Second_param=None,Third_param=None):
+    Read_contine_flag=0
+    orbit_info=""
+    value=None
+    for line in open(file_name):
+        if format_flag==1:
+            if not (line.find(First_param)):
+                index=line.find(':')
+                value=(line[(index+1):].strip(' \n\t'))
+                return value 
+
+        if format_flag==2:
+            if not(line.find(Second_param)):
+                Read_contine_flag=1             
+            if (Read_contine_flag==1) and (not (line.find(First_param))):  #Be careful
+                index=line.find(':')
+                value=(line[(index+1):].strip(' \n\t'))
+                continue               
+            if Read_contine_flag==1  and (not(line.find(Third_param))):  #Be careful               
+                Read_contine_flag=0         
+                return value
+               
+
+        if format_flag==3:            
+            if not (line.find(First_param)):
+                index=line.find(':')
+                pixel_time=(line[(index+1):].strip(' \n\t')).split(' ')[1].split(':')
+                return pixel_time  
+
+
+        if format_flag==4:
+            
+            if not (line.find(First_param)):
+                index=line.find(':')                
+                value=int(line[(index+1):].strip(' \n\t'))
+                Read_contine_flag=1
+                continue                 
+            if (Read_contine_flag>=1):               
+                orbit_info=orbit_info+line
+                Read_contine_flag=Read_contine_flag+1
+                if (Read_contine_flag==(value+1)):
+                    return orbit_info      
+###############################################################################
+#thisBurstData = freadbk(['burst' num2str(nBurst)   '/cint_srd.raw'],nofLines1,formatData1, line1, nofLines1,1,nofPixels1);
+def freadbk(path_file,line_start=1, Pixels_start=1,nofLines1=None,nofPixels1=None):
+    #Driver
+    driver=gdal.GetDriverByName('MFF')
+    driver.Register()
+    gdal.AllRegister()
+    thisBurstData_file=gdal.Open(path_file,GA_ReadOnly)
+    if thisBurstData_file is None:
+        print 'Could not open'+Path_MFF_HDR
+        sys.exit(1)
+    #print 'Driver: ', thisBurstData_file.GetDriver().ShortName,'/', \
+    #      thisBurstData_file.GetDriver().LongName
+    #print 'Size is ',thisBurstData_file.RasterXSize,'x',thisBurstData_file.RasterYSize, \
+    #      'x',thisBurstData_file.RasterCount
+    #print 'Projection is ',thisBurstData_file.GetProjection()
+    geotransform = thisBurstData_file.GetGeoTransform()
+    #if not geotransform is None:
+    #    print 'Origin = (',geotransform[0], ',',geotransform[3],')'
+    #    print 'Pixel Size = (',geotransform[1], ',',geotransform[5],')'
+
+    cint_srd=thisBurstData_file.GetRasterBand(1)
+    #print 'Band Type=',gdal.GetDataTypeName(cint_srd.DataType)
+
+    if cint_srd.GetOverviewCount() > 0:
+            print 'Band has ', cint_srd.GetOverviewCount(), ' overviews.'
+    thisBurstData= cint_srd.ReadAsArray(int(Pixels_start-1),int(line_start-1),nofPixels1,nofLines1)
+    return thisBurstData
+###############################################################################
+
+
+NwinGrid = 5
+
+azSpacing = 20
+rSpacing  = 5
+
+
+resData   = resFile
+
+if get_parameter('First_line (w.r.t. ovs_image)',resData,1): #%oversampled data
+  #% First_line (w.r.t. ovs_image):
+    l0 = int(get_parameter('First_line (w.r.t. ovs_image)',resData,1));
+  #% Last_line (w.r.t. ovs_image):
+    lN = int(get_parameter('Last_line (w.r.t. ovs_image)',resData,1));
+  #% First_pixel (w.r.t. ovs_image):
+    p0 = int(get_parameter('First_pixel (w.r.t. ovs_image)',resData,1));
+  #% Last_pixel (w.r.t. ovs_image):
+    pN = int(get_parameter('Last_pixel (w.r.t. ovs_image)',resData,1));
+  
+    dataFormat = 'cpxfloat32'
+   
+else:#original data
+    l0 = int(get_parameter('First_line (w.r.t. original_image)',resData,1));
+    #% Last_line (w.r.t. original_image):
+    lN = int(get_parameter('Last_line (w.r.t. original_image)',resData,1))
+    #% First_pixel (w.r.t. original_image):
+    p0 = int(get_parameter('First_pixel (w.r.t. original_image)',resData,1))
+    #% Last_pixel (w.r.t. original_image):
+    pN = int(get_parameter('Last_pixel (w.r.t. original_image)',resData,1))
+    dataFormat = 'cpxint16'
+
+
+# Image size
+Nlines = lN-l0+1;
+Npixels = pN-p0+1;
+print "Nlines =",Nlines 
+print "Npixels =",Npixels
+
+
+Ngrid = float(Nwin)/NwinGrid;
+daz = Nlines*azSpacing;
+dr = Npixels*rSpacing;
+
+ratio = float(dr)/daz
+
+Ngrid_az = sqrt(Ngrid/ratio);
+Ngrid_r = round(Ngrid_az*ratio);
+Ngrid_az = round(Ngrid_az)
+
+
+Nlines_grid = ceil(Nlines/Ngrid_az)
+Nlines_grid_orig = Nlines_grid
+
+
+Npixels_grid = ceil(Npixels/Ngrid_r)
+Npixels_grid_orig = Npixels_grid
+
+
+RAW_CINT_SRD  = dataFile
+Path_MFF_HDR  = dataFile.split('.')[0]+'.hdr';
+
+
+if dataFormat == 'cpxint16':
+   Link_CINT_SRD=dataFile.split('.')[0]+'.j00'
+else:
+   Link_CINT_SRD=dataFile.split('.')[0]+'.x00'
+
+
+outStream      = open(Path_MFF_HDR,'w')
+outStream.write('IMAGE_FILE_FORMAT = MFF\n')
+outStream.write('FILE_TYPE = IMAGE\n')
+outStream.write('IMAGE_LINES = %d\n' % int(Nlines))
+outStream.write('LINE_SAMPLES = %d\n'% int(Npixels))
+outStream.write('BYTE_ORDER = LSB\n')
+outStream.write('END\n')
+outStream.close()
+    
+if (os.path.exists(Link_CINT_SRD)):
+    os.remove(Link_CINT_SRD)
+RAW_CINT_SRD_ABSOLUTE_PATH=os.path.abspath(RAW_CINT_SRD)
+print "RAW_CINT_SRD_ABSOLUTE_PATH=", RAW_CINT_SRD_ABSOLUTE_PATH
+os.symlink(RAW_CINT_SRD_ABSOLUTE_PATH,Link_CINT_SRD)
+
+
+winpos=np.array([],dtype='int32').reshape(0,2)
+
+
+for v in range(1,int(Ngrid_az)+1):
+
+    if v==Ngrid_az:
+       Nlines_grid = (Nlines-1)%Nlines_grid_orig+1
+    else:
+       Nlines_grid = Nlines_grid_orig   
+    ampArray = abs(freadbk(Path_MFF_HDR,int((v-1)*Nlines_grid_orig+1),1,int(Nlines_grid),Npixels ))
+    for w in range(1,int(Ngrid_r)+1):
+        if w==Ngrid_r:
+            Npixels_grid = (Npixels-1)%Npixels_grid_orig+1
+        else:
+            Npixels_grid = Npixels_grid_orig
+        amp = ampArray[:,(w-1)*Npixels_grid_orig:(w-1)*Npixels_grid_orig+Npixels_grid]
+        locMaxsInd = amp == ndimage.grey_dilation(amp, size=(5*rSpacing, 5*azSpacing))       
+        locMaxs = amp[locMaxsInd]
+        [az,r] = where(locMaxsInd)
+        sortIdx =np.argsort(-locMaxs)        
+        sortIdx = sortIdx[0:NwinGrid]     
+        add_winpos=np.array([az[sortIdx]+l0-1+(v-1)*Nlines_grid_orig,r[sortIdx]+p0-1+(w-1)*Npixels_grid_orig]).transpose()     
+        winpos=np.vstack([winpos,add_winpos])
+
+fidRes        = open(outFile,'w')
+cols = winpos.shape[1]
+rows = winpos.shape[0]        
+#print "cols = ",cols
+print "rows = ", rows
+for i_temp in range(0,rows):       
+    fidRes.write( '%d   %d\n' % (winpos[i_temp,0]+1,winpos[i_temp,1]+1))      
+fidRes.close()
+
+if (os.path.exists(Link_CINT_SRD)):
+    os.remove(Link_CINT_SRD)
+if (os.path.exists(Path_MFF_HDR)):
+    os.remove(Path_MFF_HDR)
diff --git a/doris_stack/functions/job.started b/doris_stack/functions/job.started
new file mode 100644
index 0000000..e69de29
diff --git a/doris_stack/functions/load_shape_unzip.py b/doris_stack/functions/load_shape_unzip.py
new file mode 100755
index 0000000..e192d56
--- /dev/null
+++ b/doris_stack/functions/load_shape_unzip.py
@@ -0,0 +1,220 @@
+import os, sys
+import zipfile
+import shutil
+import warnings
+import xml.etree.cElementTree as etree
+from shapely.geometry import Polygon, shape
+import fiona
+import numpy as np
+
+
+def unzip_folder(zipped_folder, dest_folder, shapefile='', pol='', data=True, swath='all', overwrite=False, check_valid=False):
+    # This function unzips a data folder. The total amount of data to unpack can be reduced by selecting which data
+    # files are unpacked. In first instance all files except the data files are extracted. Then the other files are
+    # extracted based on the given input:
+    # - pol > only this polarisation is unpacked ('vv', 'hh', 'hv', 'vh')
+    # - shape > only the swaths which overlap with this shape are unpacked shape is a shapefile file
+    # - swath > you can choose either 1/2/3
+    # If you do not want to extract any data file use data=False. Finally, the script will skip files which are already
+    # unpacked unless you set overwrite to True
+
+    zipdat = zipfile.ZipFile(zipped_folder)
+
+    # First check zipfile quality
+    if check_valid == True:
+        test = zipdat.testzip()
+        if test:
+            print('Some files in ' + zipped_folder + ' are corrupted.')
+            return
+
+    # First check wether the shape overlaps...
+    kml_file, png_file = extract_kml_preview(zipped_folder, dir=dest_folder, overwrite=overwrite)
+
+    if shapefile:
+        shp = load_shape(shapefile, buffer=0.02)
+        overlap = shape_im_kml(shp, kml_file)
+        if not overlap:
+            print('The image and kml_file do not overlap')
+            return
+
+    swaths = []
+    for filename in zipdat.namelist():
+        absname = os.path.join(dest_folder, filename)
+
+        if not os.path.exists(absname) or overwrite == True:
+            if not filename.endswith('.tiff'):
+                zipdat.extract(filename, dest_folder)
+            else:
+                swaths.append(filename)
+
+    if swath in ['1', '2', '3']:  # If only one of the swaths is extracted.
+        swaths = [s for s in swaths if os.path.basename(s)[6] == str(swath)]
+    if pol in ['vv','vh','hh','hv']:
+        swaths = [s for s in swaths if os.path.basename(s)[12:14] == pol]
+    if not swaths or data == False: # If there is nothing left, stop unpacking.
+        return
+    if shapefile:
+        d_swath = []
+        for s in swaths:
+            xml_file = os.path.join(dest_folder, os.path.dirname(os.path.dirname(s)), 'annotation', os.path.basename(s)[:-4] + 'xml')
+            if shape_swath_xml(shp, xml_file):
+                d_swath.append(s)
+        swaths = d_swath
+
+    # Finally unpack the needed swaths
+    for s in swaths:
+        zipdat.extract(s, dest_folder)
+
+
+def extract_kml_preview(zipped_folder, dir='', kml=True, png=True, overwrite=False):
+    # Extracts quicklook and/or .kml files.
+
+    zipdat = zipfile.ZipFile(zipped_folder)
+    if not dir:
+        dir = os.path.dirname(zipped_folder)
+
+    png_name = ''
+    kml_name = ''
+
+    for filename in zipdat.namelist():  # Unzip and save .kml file
+        if filename.endswith('map-overlay.kml') and kml == True:
+            kml_name = os.path.join(dir, os.path.basename(zipped_folder)[:-4] + '.kml')
+            zipped_kml = zipdat.open(filename)
+            if not os.path.exists(kml_name) or overwrite == True:
+                kml_file = file(kml_name, "wb")
+                with zipped_kml, kml_file:
+                    shutil.copyfileobj(zipped_kml, kml_file)
+
+    for filename in zipdat.namelist():  # Unzip and save quicklook
+        if filename.endswith('quick-look.png') and png == True:
+            png_name = os.path.join(dir, os.path.basename(zipped_folder)[:-4] + '.png')
+            zipped_png = zipdat.open(filename)
+            if not os.path.exists(png_name) or overwrite == True:
+                png_file = file(png_name, "wb")
+                with zipped_png, png_file:
+                    shutil.copyfileobj(zipped_png, png_file)
+
+    return kml_name, png_name
+
+
+def shape_im_kml(shp, kml_file):
+    # This script extracts a Fiona/polygon shape of the footprint given in the .xml file of the image and checks whether
+    # it overlaps
+
+    # First check is .kml file exist
+    if not os.path.exists(kml_file):
+        warnings.warn('.kml file does not exist.')
+        return False
+
+    try:
+        in_kml = etree.parse(kml_file)
+        in_kml = in_kml.getroot()
+        coor = in_kml[0][1][1][2][0].text
+        coor = [i.split(',') for i in coor.split(' ')]
+        coverage = Polygon([[float(i[0]),float(i[1])] for i in coor])
+    except:
+        warnings.warn('.kml file is corrupt')
+        return False
+
+    if coverage.intersects(shp):
+        return True
+    else:
+        return False
+
+
+def shape_swath_xml(shp, xml_file):
+    # This script extracts a Fiona/polygon shape of the footprint given in the .xml file of the image and checks whether
+    # it overlaps
+
+    # First check is .xml file exist
+    if not os.path.exists(xml_file):
+        warnings.warn('.xml file does not exist.')
+        return False
+
+    try:
+        in_xml = etree.parse(xml_file)
+        in_xml = in_xml.getroot()
+        coor = in_xml.find('.geolocationGrid').find('.geolocationGridPointList').findall('.geolocationGridPoint')
+        lats = []
+        lons = []
+        line = []
+        pixel = []
+        for c in coor:
+            lats.append(float(c.find('.latitude').text))
+            lons.append(float(c.find('.longitude').text))
+            pixel.append(int(c.find('.pixel').text))
+            line.append(int(c.find('.line').text))
+        maxpixel = np.max(pixel)
+        coor = [[lat, lon] for lon, lat, p in zip(lats, lons, pixel) if p == 0]
+        coor.extend([[lat, lon] for lon, lat, p in zip(lats[::-1], lons[::-1], pixel[::-1]) if p == maxpixel])
+        coverage = Polygon(coor)
+
+    except:
+        warnings.warn('.xml file is corrupt')
+        return False
+
+    if coverage.intersects(shp):
+        return True
+    else:
+        return False
+
+
+def load_shape(shapefile, buffer=0.02):
+    # This function creates a shape to make a selection of usable bursts later on. Buffer around shape is in
+    # degrees.
+
+    if not shapefile:
+        warnings.warn('Please provide a shapefile or coordinates.')
+
+    try:
+        if isinstance(shapefile, list):  # If the coordinates are already loaded. (for example bounding box)
+            shp = Polygon(shapefile)
+        else:  # It should be a shape file. We always select the first shape.
+            sh = fiona.open(shapefile).next()
+            shp = shape(sh['geometry'])
+
+        # Now we have the shape we add a buffer and simplify first to save computation time.
+        shp = shp.simplify(buffer / 2)
+        shp = shp.buffer(buffer)
+    except:
+        warnings.warn('Unrecognized shape')
+        return
+
+    return shp
+
+
+# Testing ---------------------------------------------
+# zipped_folder = '/media/gert/Data/radar_database/sentinel-1/s1_asc_t88/IW_SLC__1SDV_VVVH/20141116/S1A_IW_SLC__1SDV_20141116T172443_20141116T172510_003310_003D5C_E92F.SAFE.zip'
+# dest_folder = '/media/gert/Data/radar_database/sentinel-1/s1_asc_t88/IW_SLC__1SDV_VVVH/20141116/test'
+# shapefile = '/media/gert/Data/shapes/netherlands/netherland.shp'
+# pol = ''
+# data = True
+# swath = 'all'
+# overwrite = False
+# check_valid = True
+# ------------------------------------------------------
+
+# Actually execute the code to unzip one data file.
+if __name__ == "__main__":
+
+    data = True
+    check_valid = False
+    swath = 'all'
+
+    zipped_folder = sys.argv[1]
+    dest_folder = sys.argv[2]
+    shapefile = sys.argv[3]
+    pol = sys.argv[4]
+    overwrite = sys.argv[5]
+
+    if overwrite == 'False':
+        overwrite = False
+    else:
+        overwrite = True
+
+    print('zipfile is ' + zipped_folder)
+    print('shapefile is ' + shapefile)
+    print('destination folder is ' + dest_folder)
+
+    unzip_folder(zipped_folder, shapefile=shapefile, pol=pol, dest_folder=dest_folder, overwrite=overwrite, swath=swath, check_valid=check_valid, data=data)
+
diff --git a/doris_stack/functions/orbit_coordinates.py b/doris_stack/functions/orbit_coordinates.py
new file mode 100755
index 0000000..1a3e598
--- /dev/null
+++ b/doris_stack/functions/orbit_coordinates.py
@@ -0,0 +1,226 @@
+# The three functions here are used to transform coordinates and times in the original xml files to a usefull format for doris.
+# Therefore these functions are generally used to read metadata of sentinel files.
+import math
+import numpy as np
+
+def lph2xyz(line,pixel,container,norm_orbit_line,centroid_lon,centroid_lat,height):
+
+    # initialization
+    MAXITER=500
+    CRITERPOS=1.0e-16
+    # WGS84 Elliposid:
+    ellipsoid=[6378137.0 , 6356752.3141]
+    num_points = np.array([line]).shape[0]
+    xyz       = np.zeros((num_points,3))
+    #$$$ parameter of the image
+    tr1  = float(container['rangeTimePix'][0])/2 # one way range time [sec]
+    RSR  = float(container['rangeRSR'][0])*2 # one way in [HZ]
+    centerphi = float(centroid_lat)
+    centerlambda= float(centroid_lon)
+    SOL=299792458
+    # $$$ reference surace: WGS84
+
+    ell_a  = ellipsoid[0]                  # semimajor of the ellipsoid
+    ell_b  = ellipsoid[1]                  # semiminor of the ellipsoid
+    ell_e2 = (ell_a**2-ell_b**2)/ell_a**2    # squared first eccentricity(derived)
+    # $$$ ell_e2b=(ell_a^2-ell_b^2)/ell_b^2;  % squared second eccentricity(derived)
+
+    # $$$ [lat,long,h] of scene center to [x,y,z]
+    h            = height        # this is only for initial values
+
+    centerphi    = centerphi*np.pi/180
+    centerlambda = centerlambda*np.pi/180
+
+    Ncenter      = ell_a/np.sqrt(1-ell_e2*(np.sin(centerphi)**2)) # eccentricity
+    scenecenterx = (Ncenter+h)*np.cos(centerphi)*np.cos(centerlambda)
+    scenecentery = (Ncenter+h)*np.cos(centerphi)*np.sin(centerlambda)
+    scenecenterz = (Ncenter+h-ell_e2*Ncenter)*np.sin(centerphi)
+
+    for n in range(0,num_points):   # loop through points
+
+        posonellx = scenecenterx
+        posonelly = scenecentery
+        posonellz = scenecenterz
+        ratime = tr1 + (pixel-1.0)/RSR
+
+    #get position and velocity of the satellite
+        possatx = norm_orbit_line[n,1]
+        possaty = norm_orbit_line[n,2]
+        possatz = norm_orbit_line[n,3]
+        velsatx = norm_orbit_line[n,4]
+        velsaty = norm_orbit_line[n,5]
+        velsatz = norm_orbit_line[n,6]
+
+        equationset=np.zeros((3,1))
+        partialsxyz=np.zeros((3,3))
+        for iter in range(1, MAXITER+1):
+
+            #update equations and slove system
+            dsat_Px = posonellx - possatx    #vector of 'satellite to P on ellipsoid'
+            dsat_Py = posonelly - possaty
+            dsat_Pz = posonellz - possatz
+
+            equationset[0,0] = -(velsatx*dsat_Px+velsaty*dsat_Py+velsatz* dsat_Pz)
+
+            equationset[1,0] = -(dsat_Px*dsat_Px+dsat_Py*dsat_Py+dsat_Pz*dsat_Pz-(SOL*ratime)**2)
+
+            equationset[2,0] = -((posonellx*posonellx+posonelly*posonelly)/((ell_a+height)**2)+(posonellz/(ell_b+height))**2-1.0)
+
+            partialsxyz[0,0] = velsatx
+            partialsxyz[0,1] = velsaty
+            partialsxyz[0,2] = velsatz
+            partialsxyz[1,0] = 2*dsat_Px
+            partialsxyz[1,1] = 2*dsat_Py
+            partialsxyz[1,2] = 2*dsat_Pz
+            partialsxyz[2,0] = (2*posonellx)/((ell_a+height)**2)
+            partialsxyz[2,1] = (2*posonelly)/((ell_a+height)**2)
+            partialsxyz[2,2] = (2*posonellz)/((ell_b+height)**2)
+
+        # solve system [NOTE] orbit_normalized, otherwise close to
+        # singular
+            solpos = np.linalg.solve(partialsxyz,equationset)
+
+            solx = solpos[0,0]
+            soly = solpos[1,0]
+            solz = solpos[2,0]
+
+        # update solution
+            posonellx = posonellx + solx
+            posonelly = posonelly + soly
+            posonellz = posonellz + solz
+
+        # check convergence
+            if (abs(solx)<CRITERPOS and abs(soly)<CRITERPOS and abs(solz)<CRITERPOS):
+                break
+            elif(iter>=MAXITER):
+                MAXITER=MAXITER+1
+        # final solution: array of XYZ coordinates
+        xyz[n,:]=np.array([posonellx, posonelly, posonellz]).copy()
+
+        return xyz
+
+
+def xyz2ell(position):
+
+    ellipsoid=[6378137.0 , 6356752.3141]
+    ell_a = ellipsoid[0]
+    ell_b = ellipsoid[1]
+
+    ell_e2  = (ell_a**2-ell_b**2)/ell_a**2    #squared first eccentricity(derived)
+    ell_e2b = (ell_a**2-ell_b**2)/ell_b**2    # squared second eccentricity(derived)
+
+    posx = position[:,0]
+    posy = position[:,1]
+    posz = position[:,2]
+
+    r    = math.sqrt(posx**2 + posy**2)
+    mu   = math.atan2(posz*ell_a, r*ell_b)
+
+    sin3 = (math.sin(mu))**3
+    cos3 = (math.cos(mu))**3
+    phi  = math.atan2((posz + ell_e2b * ell_b * sin3),(r - ell_e2 * ell_a* cos3))
+
+    Radar_lambda = math.atan2(posy,posx)
+    N = ell_a / math.sqrt(1 - ell_e2 * (math.sin(phi))**2) #for every point no
+                                                # approx with scene.center
+    height = (r/math.cos(phi)) - N
+
+    phi_lam_height = np.zeros(3)
+    phi_lam_height[0] = phi*180/math.pi
+    phi_lam_height[1] = Radar_lambda*180/math.pi
+    phi_lam_height[2] = height
+
+    return phi_lam_height
+
+def intrp_orbit(line,container,burst_number):
+
+    intrpOrder = 'spline'
+    orbit_time       = np.zeros(len(container['orbitTime']),dtype=np.float64)
+    orbit_x = np.zeros(len(container['orbitTime']),dtype=np.float64)
+    orbit_y = np.zeros(len(container['orbitTime']),dtype=np.float64)
+    orbit_z = np.zeros(len(container['orbitTime']),dtype=np.float64)
+    for row in range(len(container['orbitTime'])):
+
+        #orbit_time_position=precorb[row]
+        orbit_time[row]    =hms2sec(container['orbitTime'][row].split('T')[1])
+        orbit_x[row]       =float( container['orbitX'][row])
+        orbit_y[row]       =float( container['orbitY'][row])
+        orbit_z[row]       =float( container['orbitZ'][row])
+
+    # compute normalization factors
+    px = orbit_time # time
+
+    f  = min(px)
+    g  = (max(px)-min(px))
+    px = (px-f)/g
+    # polyDegree
+    polyDegree = 2
+
+    coef_x1 = (np.polyfit(px,orbit_x,polyDegree));
+    a = coef_x1[2]
+    b = coef_x1[1]
+    c = coef_x1[0]
+    coef_x = [c/(g**2), b/g-(2*c*f)/(g**2), a-b*f/g+c*(f/g)**2]
+
+    coef_y1 = (np.polyfit(px,orbit_y,polyDegree))
+    a = coef_y1[2]
+    b = coef_y1[1]
+    c = coef_y1[0]
+    coef_y = [c/(g**2), b/g-(2*c*f)/(g**2), a-b*f/g+c*(f/g)**2]
+
+    coef_z1 = (np.polyfit(px,orbit_z,polyDegree));
+    a = coef_z1[2]
+    b = coef_z1[1]
+    c = coef_z1[0]
+    coef_z = [c/(g**2), b/g-(2*c*f)/(g**2), a-b*f/g+c*(f/g)**2]
+
+    vel_x = np.polyval(np.polyder(coef_x),orbit_time)
+    vel_y = np.polyval(np.polyder(coef_y),orbit_time)
+    vel_z = np.polyval(np.polyder(coef_z),orbit_time)
+
+    acc_x = np.kron(np.ones(len(container['orbitTime'])),np.polyder(np.polyder(coef_x)))
+    acc_y = np.kron(np.ones(len(container['orbitTime'])),np.polyder(np.polyder(coef_y)))
+    acc_z = np.kron(np.ones(len(container['orbitTime'])),np.polyder(np.polyder(coef_z)))
+    #print 'acc_x.shape=',acc_x.shape
+
+    # interpolated orbit
+    norm_orbit = np.array([orbit_time, orbit_x,orbit_y,orbit_z,vel_x,  vel_y,  vel_z,acc_x,  acc_y,  acc_z]);
+
+    # interpolated orbit for l_aztime
+    PRF       = float(container['azimuthPRF'][0])   #[Hz]     # Pulse repeition frequency
+    Taz_start = hms2sec(container['azimuthTimeStart'][burst_number].split('T')[1])
+
+    ta1          = Taz_start  # start time in UTC [sec]
+
+    l_aztime     = (line-1)/PRF + ta1
+    pos_orbit_x = np.interp(l_aztime,orbit_time,orbit_x)
+    pos_orbit_y = np.interp(l_aztime,orbit_time,orbit_y)
+    pos_orbit_z = np.interp(l_aztime,orbit_time,orbit_z)
+
+    vel_orbit_x = np.interp(l_aztime,orbit_time,vel_x)
+    vel_orbit_y = np.interp(l_aztime,orbit_time,vel_y)
+    vel_orbit_z = np.interp(l_aztime,orbit_time,vel_z)
+
+    #acc = np.interp(orbit_time,[acc_x, acc_y, acc_z],    l_aztime,intrpOrder)
+    acc_orbit_x = np.interp(l_aztime,orbit_time,acc_x)
+    acc_orbit_y = np.interp(l_aztime,orbit_time,acc_y)
+    acc_orbit_z = np.interp(l_aztime,orbit_time,acc_z)
+
+
+    norm_orbit_line = np.array([l_aztime, pos_orbit_x,pos_orbit_y,pos_orbit_z,vel_orbit_x,vel_orbit_y,vel_orbit_z,acc_orbit_x,acc_orbit_y,acc_orbit_z])
+
+    return norm_orbit.transpose(),norm_orbit_line.reshape(1,-1,order='F')
+
+####################################################################################################
+
+def hms2sec(hmsString,convertFlag='float'):
+    # input hmsString syntax: XX:XX:XX.xxxxxx
+    secString = int(hmsString[0:2])*3600 + \
+        int(hmsString[3:5])*60 + \
+        float(hmsString[6:])
+    if convertFlag == 'int' :
+        return int(secString)
+    elif convertFlag == 'float' :
+        return float(secString)
+    else:
+        return int(secString)
\ No newline at end of file
diff --git a/doris_stack/functions/precise_read.py b/doris_stack/functions/precise_read.py
new file mode 100755
index 0000000..bcb8ffe
--- /dev/null
+++ b/doris_stack/functions/precise_read.py
@@ -0,0 +1,155 @@
+# This file contains several functions to read data from precise orbit files of sentinel-1 data.
+# Scripts were created by  Wu Wenhao, Wuhan university and adapted by Gert Mulder, TU Delft
+
+import time
+import calendar
+import numpy as np
+import os, sys
+from scipy.interpolate import interp1d
+import scipy.interpolate as inter
+import calendar
+
+
+def orbit_read(input_EOF_FileName):
+
+    try:
+        import xml.etree.cElementTree as etree
+    except:
+        try:
+            from lxml import etree
+        except:
+            #import xml.etree.ElementTree as etree
+            print 'Failed to load lxml.etree or xml.etree.cElementTree'
+            sys.exit(1)
+
+    inTree = etree.parse(input_EOF_FileName)
+    queryList = {
+                # orbit inf
+               'Mission'  : './/Earth_Explorer_Header/Fixed_Header/Mission',\
+               'Validity_Start': './/Earth_Explorer_Header/Fixed_Header/Validity_Period/Validity_Start',\
+               'Validity_Stop': './/Earth_Explorer_Header/Fixed_Header/Validity_Period/Validity_Stop',\
+               'orbitABS' : './/Data_Block/List_of_OSVs/OSV/Absolute_Orbit',\
+               'orbitTime': './/Data_Block/List_of_OSVs/OSV/UTC',\
+               'orbitX'   : './/Data_Block/List_of_OSVs/OSV/X',\
+               'orbitY'   : './/Data_Block/List_of_OSVs/OSV/Y',\
+               'orbitZ'   : './/Data_Block/List_of_OSVs/OSV/Z',\
+               }
+
+    # temp variables and parameters
+    container     = {}
+    # containerTemp = {}
+    events        = ('end',)
+
+    for key in queryList.keys():
+
+        try:
+            vars()[key]
+        except KeyError or NameError:
+           vars()[key] = []
+
+        for nodes in inTree.findall(queryList[key]):
+            vars()[key].append(nodes.text)
+
+        container[key] = vars()[key]
+    return container
+
+#--------------------------------------------------------
+def interpolate_orbit(input_orbit_dir, date, input_orbit_type, input_interpolation_method, satellite='S1A'):
+
+    orbit_time = calendar.timegm(time.strptime(date,'%Y-%m-%dT%H:%M:%S.%f'))
+    date_start = calendar.timegm(time.strptime(date[:10],'%Y-%m-%d'))
+
+    if input_orbit_type == 'POE':
+        input_orbit_dir = os.path.join(input_orbit_dir, 'precise')
+    elif input_orbit_type == 'RES':
+        input_orbit_dir = os.path.join(input_orbit_dir, 'restituted')
+
+    L = os.listdir(input_orbit_dir)
+    Orbit_info = []
+
+    if input_orbit_type == 'POE' and satellite == 'S1A':
+        orbit_type = 'S1A_OPER_AUX_POEORB_OPOD_'
+    elif input_orbit_type == 'RES' and satellite == 'S1A':
+        orbit_type = 'S1A_OPER_AUX_RESORB_OPOD_'
+    elif input_orbit_type == 'POE' and satellite == 'S1B':
+        orbit_type = 'S1B_OPER_AUX_POEORB_OPOD_'
+    elif input_orbit_type == 'RES' and satellite == 'S1B':
+        orbit_type = 'S1B_OPER_AUX_RESORB_OPOD_'
+
+    for d in L:
+        if d.startswith(orbit_type):
+            start_time = calendar.timegm(time.strptime(d[42:57], '%Y%m%dT%H%M%S'))
+            end_time = calendar.timegm(time.strptime(d[58:73], '%Y%m%dT%H%M%S'))
+            if (start_time < orbit_time) and (end_time > orbit_time):
+
+                meta = orbit_read(os.path.join(input_orbit_dir, d))
+                print(d)
+
+                for i in range(len(meta['orbitTime'])):
+
+                    point_time = calendar.timegm(time.strptime(meta['orbitTime'][i][4:-7], '%Y-%m-%dT%H:%M:%S'))
+                    if (point_time > orbit_time-290) and (point_time < orbit_time+290):
+
+                        Tuple_orbit=(float(hms2sec(meta['orbitTime'][i][4:].split('T')[1])),\
+                                     float(meta['orbitX'][i]), float(meta['orbitY'][i]),\
+                                     float(meta['orbitZ'][i]))
+                        Orbit_info.append(Tuple_orbit)
+
+    set_list=[]
+    Orbit_info=sorted(Orbit_info)
+
+    for element in range(len(Orbit_info)-1):
+        temp_element     =Orbit_info[element][0]
+        temp_element_add =Orbit_info[element+1][0]
+        if int(temp_element) != int(temp_element_add):
+            set_list.append(Orbit_info[element])
+
+    Orbit_info=set_list
+
+    orbit_Time=[]
+    orbit_X   =[]
+    orbit_Y   =[]
+    orbit_Z   =[]
+
+    for element in Orbit_info:
+        orbit_Time.append(element[0])
+        orbit_X.append(element[1])
+        orbit_Y.append(element[2])
+        orbit_Z.append(element[3])
+
+    if len(orbit_X) == 0:
+        return [], orbit_X, orbit_Y, orbit_Z
+
+    del Orbit_info
+    orbit_Time=np.array(orbit_Time)
+    orbit_X   =np.array(orbit_X)
+    orbit_Y   =np.array(orbit_Y)
+    orbit_Z   =np.array(orbit_Z)
+    if input_interpolation_method=='cubic':
+        spl_x=interp1d(orbit_Time,orbit_X,kind='cubic')
+        spl_y=interp1d(orbit_Time,orbit_Y,kind='cubic')
+        spl_z=interp1d(orbit_Time,orbit_Z,kind='cubic')
+    elif input_interpolation_method=='spline':
+        spl_x = inter.InterpolatedUnivariateSpline (orbit_Time,orbit_X)
+        spl_y = inter.InterpolatedUnivariateSpline (orbit_Time,orbit_Y)
+        spl_z = inter.InterpolatedUnivariateSpline (orbit_Time,orbit_Z)
+
+    input_time = np.arange((orbit_time - date_start) - 100, (orbit_time - date_start) + 100).astype(dtype='int32')
+    out_orbit_X=spl_x(input_time)
+    out_orbit_Y=spl_y(input_time)
+    out_orbit_Z=spl_z(input_time)
+
+    return input_time, out_orbit_X, out_orbit_Y, out_orbit_Z
+
+#----------------------------------------------------------------
+def hms2sec(hmsString,convertFlag='float'):
+    # input hmsString syntax: XX:XX:XX.xxxxxx
+    secString = int(hmsString[0:2])*3600 + \
+        int(hmsString[3:5])*60 + \
+        float(hmsString[6:])
+    if convertFlag == 'int' :
+        return int(secString)
+    elif convertFlag == 'float' :
+        return float(secString)
+    else:
+        return int(secString)
\ No newline at end of file
diff --git a/doris_stack/functions/read_write_data.py b/doris_stack/functions/read_write_data.py
new file mode 100755
index 0000000..06bcf7b
--- /dev/null
+++ b/doris_stack/functions/read_write_data.py
@@ -0,0 +1,62 @@
+import numpy as np
+import os
+
+
+def freadbk(path_file, line_start=1, pixel_start=1, nofLines=None, nofPixels=None, dt='float32', lines=0, pixels=0):
+    # First use memmap to get a memory map of the full file, than extract the requested part.
+
+    if dt == 'cpxint16':
+        dtype = np.dtype([('re', np.int16), ('im', np.int16)])
+        file_dat = np.memmap(path_file, dtype=dtype, mode='r', shape=(lines, pixels)).view(np.int16).astype(np.float32).view(np.complex64)
+        data = file_dat[line_start - 1:line_start + nofLines - 1, pixel_start - 1:pixel_start + nofPixels - 1].astype(
+            'complex64', subok=False)
+    elif dt == 'cpxshort':
+        dtype = np.dtype([('re', np.float16), ('im', np.float16)])
+        file_dat = np.memmap(path_file, dtype=dtype, mode='r', shape=(lines, pixels)).view(np.float16).astype(np.float32).view(np.complex64)
+        data = file_dat[line_start - 1:line_start + nofLines - 1, pixel_start - 1:pixel_start + nofPixels - 1].astype(
+            'complex64', subok=False)
+    else:
+        dt = np.dtype(dt)
+        file_dat = np.memmap(path_file, dtype=dt, mode='r', shape=(lines, pixels))
+        data = file_dat[line_start - 1:line_start + nofLines - 1, pixel_start - 1:pixel_start + nofPixels - 1].astype(
+            dt, subok=False)
+
+    return data, file_dat
+
+
+def fwritebk(path_file, data, dt):
+    # First define dtype and write to file using memmap.
+
+    if dt == 'cpxint16':
+        dtype = np.dtype([('re', np.int16), ('im', np.int16)])
+        data = np.memmap(path_file, dtype=dtype, mode='w', shape=data.shape)
+        data[:, :] = data.view(np.float32).astype(np.int16).view(dtype)
+    elif dt == 'cpxshort':
+        dtype = np.dtype([('re', np.float16), ('im', np.float16)])
+        data = np.memmap(path_file, dtype=dtype, mode='w', shape=data.shape)
+        data[:, :] = data.view(np.float32).astype(np.float16).view(dtype)
+    else:
+        data = np.memmap(path_file, dtype=dt, mode='w', shape=data.shape)
+        data[:, :] = data
+
+    return data, file_dat
+
+def read_tiff(path_file, line_start=1, pixel_start=1, nofLines=None, nofPixels=None, dt='float32'):
+    print('under construction')
+
+
+def write_tiff(path_file, line_start=1, pixel_start=1, nofLines=None, nofPixels=None, dt='float32'):
+    print('under construction')
+
+
+def read_nc(path_file, line_start=1, pixel_start=1, nofLines=None, nofPixels=None, dt='float32'):
+    print('under construction')
+
+
+def write_nc(path_file, line_start=1, pixel_start=1, nofLines=None, nofPixels=None, dt='float32'):
+    print('under construction')
+
+
+
+def python_gdal_convert():
+    print('under construction')
\ No newline at end of file
diff --git a/doris_stack/functions/resdata.py b/doris_stack/functions/resdata.py
new file mode 100755
index 0000000..507a746
--- /dev/null
+++ b/doris_stack/functions/resdata.py
@@ -0,0 +1,404 @@
+import warnings
+import os
+import collections
+
+
+class ResData(object):
+    # This class hold metadata of a doris datafile and processing chain and is capable of reading from and writing to a
+    # .res file used by the doris software.
+
+    def __init__(self,filename='',type=''):
+        # Initialize variables
+
+        # Filename of resfile and type (single, interferogram)
+        self.res_path = []
+        self.res_type = ''
+
+        # Processes, process_control and header of resfile
+        self.processes = collections.OrderedDict()
+        self.process_control = {}
+        self.process_timestamp = {}
+        self.process_time = {}
+        self.header = {}
+
+        #####################################################
+
+        # Create a ResData object (single/interferogram)
+        if type not in ['single','interferogram'] and not filename:
+            warnings.warn('Define if results data is slave, master or interferogram')
+            return
+        else:
+            self.res_type = type
+        if filename:
+            if not os.path.exists(filename):
+                warnings.warn('This filename does not exist: ' + filename)
+            else:
+                self.res_path = filename
+                self.res_read()
+        else:
+            if type == 'single':
+                self.process_control = collections.OrderedDict([('readfiles', '0'),('leader_datapoints', '0'), ('precise_orbits', '0'), ('crop', '0'), ('sim_amplitude', '0'), ('master_timing' , '0'),
+                                       ('oversample', '0'), ('resample', '0') , ('filt_azi', '0'), ('filt_range', '0'), ('NOT_USED' , '0')])
+            elif type == 'interferogram':
+                self.process_control = collections.OrderedDict([('coarse_orbits','0'),('coarse_correl','0'),('fine_coreg','0'),('timing_error','0'),('dem_assist','0'),
+                                   ('comp_coregpm','0'),('interfero','0'),('coherence','0'),('comp_refphase','0'),('subtr_refphase','0'),
+                                   ('comp_refdem','0'),('subtr_refdem','0'),('filtphase','0'),('unwrap','0'),('est_orbits','0'),('slant2h','0'),
+                                   ('geocoding','0'),('dinsar','0'),('NOT_USED2','0')])
+
+    def res_read(self):
+        self.meta_reader()
+        self.process_reader()
+
+    def meta_reader(self):
+        # This function
+        with open(self.res_path) as resfile:
+            splitter = ':'
+            temp = collections.OrderedDict()
+            row = 0
+            for line in resfile:
+                try:
+                    ## Filter out rubbish
+                    if line == '\n':
+                        continue
+                    elif 'Start_process_control' in line:
+                        self.header = temp
+                        temp = collections.OrderedDict()
+                    elif 'End_process_control' in line:
+                        self.process_control = temp
+                        break
+                    elif splitter in line and line[0] is not '|' and line[0] is not '\t' :
+                        # Split line if possible and add to dictionary
+                        l_split = line.split(splitter)
+                        temp[l_split[0].strip()] = l_split[1].strip()
+                    else:
+                        name = 'row_' + str(row)
+                        row += 1
+                        temp[name] = [line]
+
+                except:
+                    print 'Error occurred at line: ' + line
+
+    def process_reader(self,processes = ''):
+        # This function reads random processes based on standard buildup of processes in res files.
+        # leader_datapoints can be one of the processes, although it will not appear in the process_control in a .res file
+        # If loc is true, it will only return the locations where different processes start.
+
+        if not processes:
+            processes = self.process_control.keys()
+
+        processes.append('leader_datapoints')
+        process = ''
+
+        with open(self.res_path) as resfile:
+            # Start at row zero and with empty list
+            temp = collections.OrderedDict()
+            row = 0
+            line_no = -1
+            timestamp = False
+            timestamp_line = 0
+            for line in resfile:
+                try:
+                    line_no += 1
+                    # Filter out rubbish
+                    if '|'in line[0]:
+                        continue
+                    elif '**' in line:
+                        continue
+                    elif line == '\n':
+                        continue
+
+                    # Check if timestamp
+                    if ' *===========' in line:
+                        # First line of time stamp
+                        temp = collections.OrderedDict()
+                        timestamp = True
+                        row = 0
+                        continue
+                    elif ' *-----------' in line:
+                        timestamp = False
+                        timestamp_data = temp
+                        timestamp_line = line_no + 5
+                        continue
+
+                    # Check if process
+                    if '*' in line[0]:
+                        if line.replace('*_Start_', '').split(':')[0].strip() in processes:
+                            process = line.replace('*_Start_', '').split(':')[0].strip()
+                            temp = collections.OrderedDict()
+                            row = 0; space = [0]; space_r = [0,0,0,0,0,0,0,0]
+
+                            # Finally save the timestamp if it exists
+                            if line_no == timestamp_line:
+                                self.process_timestamp[process] = timestamp_data
+                            else:
+                                self.process_timestamp[process] = ''
+
+                        elif line.replace('* End_', '').split(':')[0] == process:
+                            self.processes[process] = temp
+                            temp = collections.OrderedDict()
+                            process = ''
+                        continue
+
+                    # Save line
+                    if timestamp is True:
+                        # Save rows in timestamp
+                        row_name = 'row_' + str(row)
+                        temp[row_name] = line
+                        if row == 1:
+                            self.process_time[process] = line.split(':', 1)[1].strip()
+                        row += 1
+                    elif process:
+                        # If we are in a process output line
+                        # Split line using ':' , '=' or spaces (tables)
+                        # variable space and space row define the future spacing in every processing step in a res file.
+
+                        if process == 'coarse_orbits':
+                            # Add some code for a strange exception in coarse_orbits
+                            if '//' in line:
+                                temp[line.split()[0]] = line.split()[1:]
+                            else:
+                                l_split = line.replace('=',':').split(':')
+                                temp[l_split[0].strip()] = l_split[1].strip()
+
+                        elif ':' in line:
+                            l_split = line.split(':',1)
+                            temp[l_split[0].strip()] = l_split[1].strip()
+                        else:
+                            # If the line does not contain a : it is likely a table.
+                            l_split = line.replace('\t',' ').split()
+                            row_name = 'row_' + str(row)
+                            temp[row_name] = [l_split[i].strip() for i in range(len(l_split))]
+                            row += 1
+
+                except:
+                    print 'Error occurred at line: ' + line
+
+    def process_spacing(self,process=''):
+
+        spacing = 0
+        table_spacing = [0,0,0,0,0,0,0]
+
+        dat = self.processes[process]
+
+        for key in dat.keys():
+            spacing = max(len(key) + 8, spacing)
+
+            if key.startswith('row'):
+                n=0
+                for val in self.processes[process][key]:
+                    table_spacing[n] = max(len(val) + 3, table_spacing[n])
+                    n += 1
+        spacing = [spacing]
+
+        return spacing, table_spacing
+
+    def del_process(self,process=''):
+        # function deletes one or multiple processes from the corresponding res file
+
+        if isinstance(process, basestring): # one process
+            if not process in self.process_control.keys():
+                warnings.warn('The requested process does not exist! (or processes are not read jet, use self.process_reader): ' + str(process))
+                return
+        elif isinstance(process, list): # If we use a list
+            for proc in process:
+                if not proc in self.process_control.keys():
+                    warnings.warn('The requested process does not exist! (or processes are not read jet, use self.process_reader): ' + str(proc))
+                    return
+        else:
+            warnings.warn('process should contain either a string of one process or a list of multiple processes: ' + str(process))
+
+        # Now remove the process and write the file again.
+        if isinstance(process, basestring): # Only one process should be removed
+            self.process_control[process] = '0'
+            del self.processes[process]
+        else:
+            for proc in process:
+                self.process_control[proc] = '0'
+                del self.processes[proc]
+
+    def write(self,new_filename=''):
+        # Here all the available information acquired is written to a new resfile. Generally if information is manually
+        # added or removed and the file should be created or created again. (For example the readfiles for Sentinel 1
+        # which are not added yet..)
+
+        if not new_filename and not self.res_path:
+            warnings.warn('Please specify filename: ' + str(new_filename))
+            return
+        elif not new_filename:
+            new_filename = self.res_path
+        if not self.process_control or not self.processes:
+            warnings.warn('Every result file needs at least a process control and one process to make any sense: ' + str(new_filename))
+
+        # Open file and write header, process control and processes
+        self.res_path = new_filename
+        f = open(new_filename, "w")
+
+        # Write the header:
+        if self.header:
+            spacing = [40]
+            for key in self.header.keys():
+                if 'row' in key:       # If it is just a string
+                    f.write(self.header[key][0])
+                else:                   # If the key should included
+                    f.write((key + ':').ljust(spacing[0]) + self.header[key] + '\n')
+
+        # Write the process control
+        for i in range(3):
+            f.write('\n')
+        f.write('Start_process_control\n')
+        for process in self.process_control.keys():
+            if process != 'leader_datapoints':  # leader_datapoints is left out in process control
+                f.write((process + ':\t\t') + str(self.process_control[process]) + '\n')
+        f.write('End_process_control\n')
+
+        # Then loop through all the processes
+        for process in [p for p in self.processes.keys()]:
+            # First check for a timestamp and add it if needed.
+            if self.process_timestamp[process]:
+                for i in range(2):
+                    f.write('\n')
+                f.write('   *====================================================================* \n')
+                for key in self.process_timestamp[process].keys():
+                    f.write(self.process_timestamp[process][key])
+                f.write('   *--------------------------------------------------------------------* \n')
+
+            # Then write the process itself
+            if process == 'coarse_orbits':
+                spacing = [45]
+                spacing_row = [15,10,15]
+            else:
+                spacing, spacing_row = self.process_spacing(process)
+            data = self.processes[process]
+
+            for i in range(3):
+                f.write('\n')
+            f.write('******************************************************************* \n')
+            f.write('*_Start_' + process + ':\n')
+            f.write('******************************************************************* \n')
+
+            for line_key in self.processes[process].keys():
+                if 'row' in line_key:  # If it is a table of consists of several different parts
+                    line = ''.join([(' ' + data[line_key][i]).replace(' -','-').ljust(spacing_row[i]) for i in range(len(data[line_key]))])
+                    f.write(line + '\n')
+                elif process == 'coarse_orbits':  # the coarse orbits output is different from the others.
+                    if 'Control point' in line_key: # Special case coarse orbits...
+                        f.write((line_key + ' =').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
+                    elif not isinstance(data[line_key], basestring): # Another special case
+                        f.write(line_key.ljust(spacing_row[0]) + (data[line_key][0]).ljust(spacing_row[1]) +
+                                data[line_key][1].ljust(spacing_row[2]) + ' '.join(data[line_key][2:]) + '\n')
+                    elif isinstance(data[line_key], basestring): # Handle as in normal cases
+                        f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
+                else: # If it consists out of two parts
+                    f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
+
+            f.write('******************************************************************* \n')
+            f.write('* End_' + process + ':_NORMAL\n')
+            f.write('******************************************************************* \n')
+        f.close()
+
+        # Read the locations in the new file
+        self.process_reader()
+
+    def insert(self,data,process,variable=''):
+        # This function inserts a variable or a process which does not exist at the moment
+        processes = self.process_control.keys()
+        processes.extend(['header','leader_datapoints'])
+
+        if process not in processes:
+            warnings.warn('This process does not exist for this datatype: ' + str(process))
+            return
+
+        # If a full process is added
+        if not variable:
+            if self.process_control[process] == '1':
+                warnings.warn('This process already exists! Use the update function: ' + str(process))
+                return
+            elif self.process_control[process] == '0':
+                self.process_control[process] = '1'
+                self.processes[process] = data
+                self.process_timestamp[process] = ''
+
+        # A variable is added
+        if variable:
+            if variable in self.processes[process].keys():
+                warnings.warn('This variable already exists! Use the update function: ' + str(variable))
+                return
+            elif not self.processes[process][variable]:
+                self.processes[process][variable] = data
+
+    def delete(self,process,variable=''):
+        # This function deletes a variable or a process which does exist at the moment
+        processes = self.process_control.keys()
+        processes.extend(['header','leader_datapoints'])
+
+        if process not in processes:
+            warnings.warn('This process does not exist for this datatype: ' + str(process))
+            return
+
+        # If a full process is deleted
+        if not variable:
+            if self.process_control[process] == '0':
+                warnings.warn('This process does not exist: ' + str(process))
+                return
+            elif self.process_control[process] == '1':
+                self.process_control[process] = '0'
+                del self.processes[process]
+                del self.process_timestamp[process]
+
+        # A variable is deleted
+        if variable:
+            if not variable in self.processes[process].keys():
+                warnings.warn('This variable does not exist: ' + str(variable))
+                return
+            else:
+                del self.processes[process][variable]
+
+    def update(self,data,process,variable=''):
+        # This function updates a variable or a process which does exist at the moment
+        processes = self.process_control.keys()
+        processes.extend(['header','leader_datapoints'])
+
+        if not process in processes:
+            warnings.warn('This process does not exist for this datatype: ' + str(process))
+            return
+
+        # If a full process is added
+        if not variable:
+            if self.process_control[process] == '1':
+                self.processes[process] = data
+            elif self.process_control[process] == '0':
+                warnings.warn('This process does not exist. Use the insert function: ' + str(process))
+                return
+        # A variable is added
+        if variable:
+            if variable in self.processes[process].keys():
+                self.processes[process][variable] = data
+            elif not self.processes[process][variable]:
+                warnings.warn('This variable does not exist. Use the insert function: ' + str(variable))
+                return
+
+    def request(self,process,variable=''):
+        # This function updates a variable or a process which does exist at the moment
+        processes = self.process_control.keys()
+        processes.extend(['header','leader_datapoints'])
+
+        if not process in processes:
+            warnings.warn('This process does not exist for this datatype: ' + str(process))
+            return
+
+        # If a full process is added
+        if not variable:
+            if self.process_control[process] == '1':
+                data = self.processes[process]
+            elif self.process_control[process] == '0':
+                warnings.warn('This process does not exist: ' + str(process))
+                return
+        # A variable is added
+        if variable:
+            if variable in self.processes[process].keys():
+                data = self.processes[process][variable]
+            elif not self.processes[process][variable]:
+                warnings.warn('This variable does not exist: ' + str(variable))
+                return
+
+        return data
\ No newline at end of file
diff --git a/doris_stack/functions/sentinel_dump_data_function.py b/doris_stack/functions/sentinel_dump_data_function.py
new file mode 100755
index 0000000..b4b1688
--- /dev/null
+++ b/doris_stack/functions/sentinel_dump_data_function.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+import os,sys,time
+
+if __name__ == "__main__":
+    # If calling script directly we have to load the package first to our python path
+    folder = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+    print(folder)
+    sys.path.extend([folder])
+
+import resdata as resdata
+
+
+def dump_data(input_file,res_file, output_file='', coordinates=[]):
+    # This function dumps a .raw file from the original .tif sentinel data. The input_file is the .tif file and the
+    # res_file is the .res file that corresponds with the output file. Coordinates is an optional variable which can be
+    # called if the lines and pixels are not yet defined in the .res file.
+
+    res_vars = resdata.ResData(filename=res_file)
+    res_vars.res_read()
+
+    # Check if information about crop is available
+
+    if not coordinates:
+        if res_vars.process_control['crop'] == '0':
+            print 'There is no information available about how to crop this file!'
+            return
+        else:
+            outputWinFirstPix = int(res_vars.processes['crop']['First_pixel (w.r.t. original_image)'])
+            outputWinLastPix = int(res_vars.processes['crop']['Last_pixel (w.r.t. original_image)'])
+            outputWinFirstLine = int(res_vars.processes['crop']['First_line (w.r.t. tiff_image)'])
+            outputWinLastLine = int(res_vars.processes['crop']['Last_line (w.r.t. tiff_image)'])
+
+    else:
+        outputWinFirstPix = coordinates[0]
+        outputWinLastPix = coordinates[1]
+        outputWinFirstLine = coordinates[2]
+        outputWinLastLine = coordinates[3]
+
+    if not output_file:
+        if res_vars.process_control['crop'] == '1':
+            if 'Data_output_file' in res_vars.processes['crop'].keys():
+                output_file = os.path.join(os.path.dirname(res_file), res_vars.processes['crop']['Data_output_file'])
+        if not output_file:
+            output_file = res_file.split(".")[0] + '.raw'
+
+    # system parameters :: check whether its there
+
+    gdalCall = 'gdal_translate'
+
+    # Output data parameters
+    outputDataFormat = 'MFF'
+    outputDataType   = 'CInt16'
+
+    cmd = '%s %s -ot %s -of %s %s' % (gdalCall,input_file,outputDataType,outputDataFormat,output_file)
+
+    if outputWinFirstPix is not None:
+        cmd = cmd + (' -srcwin %s %s %s %s' % (outputWinFirstPix,outputWinFirstLine,outputWinLastPix-outputWinFirstPix+1,outputWinLastLine-outputWinFirstLine+1))
+
+    failure = os.system(cmd)
+    if failure:
+        print '%s: running %s failed' % (sys.argv[0],cmd)
+        sys.exit(1)
+    else:
+        os.rename(os.path.splitext(output_file)[0]+'.j00',output_file)
+        os.remove(os.path.splitext(output_file)[0]+'.hdr')
+        os.remove(os.path.splitext(output_file)[0]+'.hdr.aux.xml')
+
+
+# Actually execute the code to unzip one data file.
+if __name__ == "__main__":
+
+    input_file = sys.argv[1]
+    res_file = sys.argv[2]
+    output_file = sys.argv[3]
+
+    dump_data(input_file, res_file, output_file, coordinates=[])
+
+
diff --git a/doris_stack/functions/stack_cleanup.py b/doris_stack/functions/stack_cleanup.py
new file mode 100755
index 0000000..3e1b4d9
--- /dev/null
+++ b/doris_stack/functions/stack_cleanup.py
@@ -0,0 +1,184 @@
+"""
+This script is meant to cleanup datastacks and remove unnecessary intermediate results. The most general options are the
+settings for a PS approach and a distributed scatterers approach, which is also used for atmospheric observations.
+"""
+
+import os
+import shutil
+
+# Test:
+stack_folder = '/media/gert/Data/datastacks/netherlands/asc_t88/stack'
+cleanup_ps = False
+cleanup_ds = True
+full_swath_rm = []
+full_swath_keep = []
+burst_rm = []
+burst_keep = []
+remove = False
+files = cleanup(stack_folder, cleanup_ps, cleanup_ds, full_swath_rm, full_swath_keep, burst_rm, burst_keep, remove)
+
+def cleanup(stack_folder, cleanup_ps=True, cleanup_ds=False, full_swath_rm=[], full_swath_keep=[], burst_rm=[],
+            burst_keep=[], remove=True):
+    """
+    This is the main script to decide what should be removed and what not for an individual full swath.
+    Basically a list is made of the files that should be removed from the full swath and burst folders based on the
+    ending of the file name.
+    The function returns a dictionary where are deleted files are listed. If you only want to list all the files that
+    will be deleted use the remove=False option.
+
+    Default options are:
+
+    remove                          abbreviation            PS          DS
+    interferogram                   ifg                     yes         yes
+    ifg earth phase corrected       ifg_srp                 yes         yes
+    ifg dem phase corrected         ifg_srd                 no          yes
+    ifg phase filtered              ifg_filt                NA (yes)    no
+    ifg coherence                   ifg_coh                 yes         no
+    ifg unwrapped                   ifg_unw                 NA (yes)    no
+    dem pixel shift (pixel)         dac_delta_p             yes         yes
+    dem pixel shift (line)          dac_delta_l             yes         yes
+    slave image                     s_ramp                  yes         yes
+    slave image deramped            s_deramp                no          yes
+    master image                    m_ramp                  yes         yes
+    master image deramped           m_deramp                no (*)      yes
+    dem phase                       dem                     no          no
+    latitude                        phi                     no (*)      no
+    longitude                       lam                     no (*)      no
+
+    burst folder                    b_folder                yes         yes
+        burst raw files             b_raw                   yes         yes
+        burst ras files             b_ras                   yes         yes
+        burst res files             b_res                   yes         yes
+
+    * Only one needed in a single master stack. Is not implemented yet.
+    """
+
+    # First check what should be removed.
+    if cleanup_ps:
+        swath_clean = {'ifg': True, 'ifg_srp': True, 'ifg_srd': False, 'ifg_filt': True, 'ifg_coh': True,
+                       'ifg_unw': True, 's_ramp': True, 's_deramp': False, 'm_ramp': True, 'm_deramp': False,
+                       'dem': False, 'phi': False, 'lam': False, 'dac_delta_p': True, 'dac_delta_l': True}
+        burst_clean = {'b_folder': True, 'b_raw': True, 'b_ras': True, 'b_res': True}
+    elif cleanup_ds:
+        swath_clean = {'ifg': True, 'ifg_srp': True, 'ifg_srd': True, 'ifg_filt': False, 'ifg_coh': False,
+                       'ifg_unw': False, 's_ramp': True, 's_deramp': True, 'm_ramp': True, 'm_deramp': True,
+                       'dem': False, 'phi': False, 'lam': False, 'dac_delta_p': True, 'dac_delta_l': True}
+        burst_clean = {'b_folder': True, 'b_raw': True, 'b_ras': True, 'b_res': True}
+    else:  # Otherwise nothing is removed unless indicated
+        swath_clean = {'ifg': False, 'ifg_srp': False, 'ifg_srd': False, 'ifg_filt': False, 'ifg_coh': False,
+                       'ifg_unw': False, 's_ramp': False, 's_deramp': False, 'm_ramp': False, 'm_deramp': False,
+                       'dem': False, 'phi': False, 'lam': False, 'dac_delta_p': False, 'dac_delta_l': False}
+        burst_clean = {'b_folder': False, 'b_raw': False, 'b_ras': False, 'b_res': False}
+
+    # Add additional options.
+    for dat in full_swath_rm:
+        swath_clean[dat] = True
+    for dat in full_swath_keep:
+        swath_clean[dat] = False
+    for dat in burst_rm:
+        burst_clean[dat] = True
+    for dat in burst_keep:
+        burst_clean[dat] = False
+
+    # Then create the strings with which these parts end
+    swath_endings = {'ifg': 'int.raw', 'ifg_srp': 'srp.raw', 'ifg_srd': 'srd.raw', 'ifg_filt': 'filt.raw',
+                     'ifg_coh': 'rence.raw', 'ifg_unw': 'unwrapped.raw', 's_ramp': 'rsmp.raw',
+                     's_deramp': 'rsmp_deramped.raw', 'm_ramp': 'ster.raw', 'm_deramp': 'ster_deramped.raw',
+                     'dem': 'dem_radar.raw', 'phi': 'phi.raw', 'lam': 'lam.raw',
+                     'dac_delta_p': 'delta_pixel.raw', 'dac_delta_l': 'delta_line.raw'}
+    burst_endings = {'b_folder': '', 'b_raw': '.raw', 'b_ras': '.ras', 'b_res': '.res'}
+
+    # Finally, make a list of which endings should be deleted
+    swath_remove = [dat for key, dat in swath_endings.iteritems() if swath_clean[key]]
+    burst_remove = [dat for key, dat in burst_endings.iteritems() if burst_clean[key]]
+
+    # Check the total ifgs in the stack
+    swath_folders = scan_stack(stack_folder)
+
+    # Go over these ifgs and remove intermediate steps in full swath and bursts.
+    deleted = dict()
+    for swath_folder in swath_folders:
+        deleted[swath_folder] = dict()
+
+        if burst_clean['b_folder']:
+            folders = remove_burst_folders(swath_folder, remove)
+            deleted[swath_folder]['folders'] = folders
+        else:
+            filenames = remove_burst_files(swath_folder, burst_remove, remove)
+            deleted[swath_folder]['burst_files'] = filenames
+
+        filenames = remove_file(swath_folder, swath_remove, remove)
+        deleted[swath_folder]['swath_files'] = filenames
+
+    return deleted
+
+
+def scan_stack(stack_folder):
+    # This function enters the children directories and checks whether a master.res, ifgs.res, swath folder and ifg
+    # exist.
+
+    swath_folders = []
+    root, dirs, files = os.walk(stack_folder).next()
+
+    for folder in dirs:
+        r, folders, files = os.walk(os.path.join(root, folder)).next()
+
+        if 'swath_1' in folders and 'master.res' in files and 'ifgs.res' in files and 'cint.raw' in files:
+            swath_folders.append(os.path.join(root, folder))
+
+    return swath_folders
+
+
+def remove_burst_folders(swath_folder, remove):
+    # Remove all burst folders from swath folder
+
+    folder_names = []
+    root, dirs, files = os.walk(swath_folder).next()
+
+    for folder in dirs:
+        if folder.startswith('swath'):
+            if remove:
+                shutil.rmtree(os.path.join(root, folder))
+            folder_names.append(os.path.join(root, folder))
+
+    return folder_names
+
+
+def remove_file(swath_folder, file_endings, remove):
+    # Remove the files in the main folder.
+
+    file_names = []
+    root, dirs, files = os.walk(swath_folder).next()
+
+    for filename in files:
+        for end in file_endings:
+            if filename.endswith(end):
+                if remove:
+                    os.remove(os.path.join(root, filename))
+                file_names.append(os.path.join(root, filename))
+
+    return file_names
+
+
+def remove_burst_files(swath_folder, file_endings, remove):
+    # Remove the files in the burst folders.
+
+    file_names = []
+    root1, swaths, files = os.walk(swath_folder).next()
+
+    if len(swaths) == 0:
+        'Files seems to be deleted already!'
+        return file_names
+
+    for swath in swaths:
+        root2, bursts, files = os.walk(os.path.join(root1, swath)).next()
+        for burst in bursts:
+            root3, burst_fold, files = os.walk(os.path.join(root2, burst)).next()
+            for filename in files:
+                for end in file_endings:
+                    if filename.endswith(end) and remove:
+                        if remove:
+                            os.remove(os.path.join(root3, filename))
+                        file_names.append(os.path.join(root3, filename))
+
+    return file_names
diff --git a/doris_stack/functions/swath_metadata.py b/doris_stack/functions/swath_metadata.py
new file mode 100755
index 0000000..984eac2
--- /dev/null
+++ b/doris_stack/functions/swath_metadata.py
@@ -0,0 +1,155 @@
+from datetime import datetime
+import collections
+from doris.doris_stack.functions.precise_read import interpolate_orbit
+from shapely.geometry import Polygon
+import numpy as np
+from scipy.interpolate import RectBivariateSpline
+import time
+
+
+def swath_datapoints(meta):
+    # First check which datapoints should be included.
+
+    datapoints = collections.OrderedDict()
+
+    datapoints['row_1'] = ['t(s)','X(m)','Y(m)','Z(m)']
+    t = meta['aux']['orbitTime']
+    x = meta['aux']['orbitX']
+    y = meta['aux']['orbitY']
+    z = meta['aux']['orbitZ']
+    datapoints['NUMBER_OF_DATAPOINTS'] = str(len(t))
+
+    i = 0
+    for n in range(len(t)):
+        t_s = datetime.strptime(t[n],'%Y-%m-%dT%H:%M:%S.%f')
+        t_s = float(t_s.hour * 3600 + t_s.minute * 60 + t_s.second) + float(t_s.microsecond) / 1000000
+        t_s = "{:.6f}".format(t_s)
+        datapoints['row_' + str(i + 2)] = [t_s, "{:.7f}".format(float(x[n])), "{:.7f}".format(float(y[n])), "{:.7f}".format(float(z[n]))]
+        i += 1
+
+    datapoints['NUMBER_OF_DATAPOINTS'] = str(i)
+
+    return datapoints
+
+
+def swath_precise(meta, precise_folder, dat_type='POE'):
+    # This function utilizes the orbit_read script to read precise orbit files and export them to the resfile format.
+    # Additionally it removes the burst_datapoints part, as it is not needed anymore.
+
+    # First check whether the precise orbit file exists and load data if that is the case.
+
+    date = meta['aux']['azimuthTimeStart'][0]
+    X = []
+
+    if dat_type not in ['POE', 'RES', 'XML']:
+        print('Choose either POE, RES or XML as data type')
+        return
+
+    if dat_type == 'POE' or dat_type == 'RES':
+        input_time, X, Y, Z = interpolate_orbit(precise_folder, date, dat_type, 'spline', satellite=meta['Product type specifier'])
+
+        if len(X) == 0 and dat_type == 'POE':
+            dat_type = 'RES'
+            input_time, X, Y, Z = interpolate_orbit(precise_folder, date, 'RES', 'spline', satellite=meta['Product type specifier'])
+            print('There is no precise orbit file available, we try the restituted files')
+
+    if len(X) == 0 or dat_type == 'XML':
+        print('There is no precise or restituted orbit file available we use the datapoints from the .xml file')
+        datapoints = swath_datapoints(meta)
+        datatype = 'leader_datapoints'
+        return datapoints, datatype
+
+    datapoints = collections.OrderedDict()
+    datapoints['row_1'] = ['t(s)','X(m)','Y(m)','Z(m)']
+    datapoints['NUMBER_OF_DATAPOINTS'] = str(200)
+
+    for n in range(len(input_time)):
+        datapoints['row_' + str(n + 2)] = [str(input_time[n]), str(X[n]), str(Y[n]), str(Z[n])]
+
+    return datapoints, 'precise_orbits'
+
+
+def swath_pixel_line_coordinate(meta):
+    # This function converts the given datapoints from xml to line/pixel coordinates with respect to the upper left 
+    # pixel of the swath. This information can be used to make a first guess of the corners and center locations of 
+    # different burst.
+
+    aux = meta['aux']
+    lats = np.array([float(l) for l in aux['sceneCenLat']])
+    lons = np.array([float(l) for l in aux['sceneCenLon']])
+    lines = [int(l) for l in aux['sceneCenLine_number']]
+    pixels = [int(l) for l in aux['sceneCenPixel_number']]
+
+    az_time = [np.datetime64(t) - np.datetime64(aux['azimuthTime'][0]) for t in aux['azimuthTime']]
+    az_step = np.timedelta64(int(float(meta['Azimuth_time_interval (s)']) * 1000000000000), 'ps')
+    new_lines = [int(round(t/az_step)) for t in az_time]
+
+    lines, count_l = np.unique(new_lines, return_counts=True)
+    pixels, count_p = np.unique(pixels, return_counts=True)
+    lats = lats.reshape((count_p[0], count_l[0]))
+    lons = lons.reshape((count_p[0], count_l[0]))
+
+    lat_interp = RectBivariateSpline(lines, pixels, lats)
+    lon_interp = RectBivariateSpline(lines, pixels, lons)
+    # These functions can be called using: lat_interp.ev(li, pi). Where li and pi are lists of pixels and lines.
+
+    return lat_interp, lon_interp
+
+
+def burst_coverage(meta, corners=True, shape=True):
+    # This function returns the lat, lon of the corners of all bursts in this swath. If polygon is True also the poly
+    # gons are generated.
+
+    # First get the interpolation from pix/line to lat/lon
+    lat_interp, lon_interp = swath_pixel_line_coordinate(meta)
+
+    # Now calculate the centre pixels of individual bursts.
+    l_b = int(meta['aux']['imageLines'][0])
+    p_b = int(meta['aux']['imagePixels'][0])
+
+    # Calculate first lines
+    start_times = meta['aux']['azimuthTimeStart']
+    az_time = [np.datetime64(t) - np.datetime64(start_times[0]) for t in start_times]
+    az_step = np.timedelta64(int(float(meta['Azimuth_time_interval (s)']) * 1000000000000), 'ps')
+    start_lines = [int(round(t/az_step)) for t in az_time]
+
+    burst_center = []
+    burst_corners = []
+    burst_shapes = []
+
+    # Now loop over all the bursts and calc the center pixel / corners / polygon
+    for l in start_lines:
+        center = [(lon_interp(l+np.floor(l_b/2), np.floor(p_b/2))[0][0], lat_interp(l+np.floor(l_b/2), np.floor(p_b/2))[0][0])]
+        burst_center.append(center)
+        if corners == True or shape == True:
+            ul = (lon_interp(l , 0)[0][0], lat_interp(l, 0)[0][0])
+            ur = (lon_interp(l , p_b-1)[0][0], lat_interp(l, p_b-1)[0][0])
+            lr = (lon_interp(l+l_b-1, p_b-1)[0][0], lat_interp(l+l_b-1, p_b-1)[0][0])
+            ll = (lon_interp(l+l_b-1, 0)[0][0], lat_interp(l+l_b-1, 0)[0][0])
+            burst_corners.append([ul, ur, lr, ll])
+
+            if shape == True:
+                burst_shapes.append(Polygon([ul, ur, lr, ll]))
+
+    return burst_center, burst_corners, burst_shapes
+
+
+def swath_coverage(meta):
+    # This function calculates the total coverage of the swath.
+
+    aux = meta['aux']
+    lats = np.array([float(l) for l in aux['sceneCenLat']])
+    lons = np.array([float(l) for l in aux['sceneCenLon']])
+    lines = [int(l) for l in aux['sceneCenLine_number']]
+    pixels = [int(l) for l in aux['sceneCenPixel_number']]
+
+    lines, count_l = np.unique(lines, return_counts=True)
+    pixels, count_p = np.unique(pixels, return_counts=True)
+    lats = lats.reshape((count_p[0], count_l[0]))
+    lons = lons.reshape((count_p[0], count_l[0]))
+
+    # ul, ur, lr, ll
+    swath_corners = [(lons[0,0],lats[0,0]), (lons[0,-1],lats[0,-1]), (lons[-1,-1],lats[-1,-1]), (lons[-1,0],lats[-1,0])]
+    swath_shapes = Polygon(swath_corners)
+
+    return swath_corners, swath_shapes
diff --git a/doris_stack/functions/xml_query.py b/doris_stack/functions/xml_query.py
new file mode 100755
index 0000000..bffe924
--- /dev/null
+++ b/doris_stack/functions/xml_query.py
@@ -0,0 +1,162 @@
+# This function performs an xml query on a provided xml file.
+
+import sys
+import collections
+
+
+def xml_query(input_xml):
+
+    try:
+        import xml.etree.cElementTree as etree
+    except:
+        try:
+            from lxml import etree
+        except:
+            #import xml.etree.ElementTree as etree
+            print 'Failed to load lxml.etree or xml.etree.cElementTree'
+            sys.exit(1)
+
+    inTree = etree.parse(input_xml)
+
+    queryList = collections.OrderedDict([
+        ('Volume_file'                                  , 'dummy'),
+        ('Volume_ID'                                    , './/adsHeader/missionDataTakeId'),
+        ('Volume_identifier'                            , 'dummy'),
+        ('Volume_set_identifier'                        , 'dummy'),
+        ('Number of records in ref. file'               , 'dummy'),
+        ('SAR_PROCESSOR'                                , 'update_1'),
+        ('SWATH'                                        , './/adsHeader/swath'),
+        ('PASS'                                         , './/generalAnnotation/productInformation/pass'),
+        ('IMAGE_MODE'                                   , './/adsHeader/mode'),
+        ('polarisation'                                 , './/adsHeader/polarisation'),
+        ('Product type specifier'                       , './/adsHeader/missionId'),
+        ('Logical volume generating facility'           , 'dummy'),
+        ('Location and date/time of product creation'   , 'dummy'),
+        ('Number_of_lines_Swath'                        , './/imageAnnotation/imageInformation/numberOfLines'),
+        ('number_of_pixels_Swath'                       , './/imageAnnotation/imageInformation/numberOfSamples'),
+        ('rangePixelSpacing'                            , './/imageAnnotation/imageInformation/rangePixelSpacing'),
+        ('azimuthPixelSpacing'                          , './/imageAnnotation/imageInformation/azimuthPixelSpacing'),
+        ('total_Burst'                                  , 'update_1'),
+        ('Burst_number_index'                           , 'update_2'),
+        ('RADAR_FREQUENCY (HZ)'                         , './/generalAnnotation/productInformation/radarFrequency'),
+        ('Scene identification'                         , 'update_1'),
+        ('Scene location'                               , 'update_1'),
+        ('Sensor platform mission identifer'            , './/adsHeader/missionId'),
+        ('Scene_center_heading'                         , './/generalAnnotation/productInformation/platformHeading'),
+        ('Scene_centre_latitude'                        , 'update_2'),
+        ('Scene_centre_longitude'                       , 'update_2'),
+        ('Radar_wavelength (m)'                         , 'update_1'),
+        ('Azimuth_steering_rate (deg/s)'                , './/generalAnnotation/productInformation/azimuthSteeringRate'),
+        ('Pulse_Repetition_Frequency_raw_data(TOPSAR)'  , './/generalAnnotation/downlinkInformationList/downlinkInformation/prf'),
+        ('First_pixel_azimuth_time (UTC)'               , 'update_2'),
+        ('Pulse_Repetition_Frequency (computed, Hz)'    , './/imageAnnotation/imageInformation/azimuthFrequency'),
+        ('Azimuth_time_interval (s)'                    , './/imageAnnotation/imageInformation/azimuthTimeInterval'),
+        ('Total_azimuth_band_width (Hz)'                , './/imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/azimuthProcessing/totalBandwidth'),
+        ('Weighting_azimuth'                           , './/imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/azimuthProcessing/windowType'),
+        ('Range_time_to_first_pixel (2way) (ms)'        , 'update_1'),
+        ('Range_sampling_rate (computed, MHz)'          , 'update_1'),
+        ('Total_range_band_width (MHz)'                 , 'update_1'),
+        ('Weighting_range'                              , './/imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/rangeProcessing/windowType'),
+        ('DC_reference_azimuth_time'                    , 'update_2'),
+        ('DC_reference_range_time'                      , 'update_2'),
+        ('Xtrack_f_DC_constant (Hz, early edge)'        , 'update_2'),
+        ('Xtrack_f_DC_linear (Hz/s, early edge)'        , 'update_2'),
+        ('Xtrack_f_DC_quadratic (Hz/s/s, early edge)'   , 'update_2'),
+        ('FM_reference_azimuth_time'                    , 'update_2'),
+        ('FM_reference_range_time'                      , 'update_2'),
+        ('FM_polynomial_constant_coeff (Hz, early edge)', 'update_2'),
+        ('FM_polynomial_linear_coeff (Hz/s, early edge)', 'update_2'),
+        ('FM_polynomial_quadratic_coeff (Hz/s/s, early edge)', 'update_2'),
+        ('Datafile'                                     , 'update_2'),
+        ('Dataformat'                                   , 'update_2'),
+        ('Number_of_lines_original'                     , 'update_2'),
+        ('Number_of_pixels_original'                    , 'update_2')
+    ])
+
+    queryList_aux = collections.OrderedDict([
+        ('Swath_startTime'                      , './/adsHeader/startTime'),
+        ('Swath_stopTime'                       , './/adsHeader/stopTime'),
+        ('imageLines'                           , './/swathTiming/linesPerBurst'),
+        ('imagePixels'                          , './/swathTiming/samplesPerBurst'),
+        ('firstValidSample'                     , './/swathTiming/burstList/burst/firstValidSample'),
+        ('lastValidSample'                      , './/swathTiming/burstList/burst/lastValidSample'),
+        ('productSpec'                          , './/generalHeader/referenceDocument'),
+        ('productVolDate'                       , './/setup//IOCSAuxProductGenerationTimeUTC'),
+        ('productDate'                          , './/generalHeader/generationTime'),
+        ('productFacility'                      , './/productInfo/generationInfo/level1ProcessingFacility'),
+        ('scenePol'                             , './/adsHeader/polarisation'),
+        ('sceneMode'                            , './/adsHeader/mode'),
+        ('sceneCenLine_number'                  , './/geolocationGrid/geolocationGridPointList/geolocationGridPoint/line'),
+        ('sceneCenPixel_number'                 , './/geolocationGrid/geolocationGridPointList/geolocationGridPoint/pixel'),
+        ('sceneCenLat'                          , './/geolocationGrid/geolocationGridPointList/geolocationGridPoint/latitude'),
+        ('sceneCenLon'                          , './/geolocationGrid/geolocationGridPointList/geolocationGridPoint/longitude'),
+        ('height'                               , './/geolocationGrid/geolocationGridPointList/geolocationGridPoint/height'),
+        ('azimuthTime'                          , './/geolocationGrid/geolocationGridPointList/geolocationGridPoint/azimuthTime'),
+        ('sceneRecords'                         , './/imageDataInfo/imageRaster/numberOfRows'),
+        ('orbitABS'                             , './/adsHeader/absoluteOrbitNumber'),
+        ('orbitTime'                            , './/generalAnnotation/orbitList/orbit/time'),
+        ('orbitX'                               , './/generalAnnotation/orbitList/orbit/position/x'),
+        ('orbitY'                               , './/generalAnnotation/orbitList/orbit/position/y'),
+        ('orbitZ'                               , './/generalAnnotation/orbitList/orbit/position/z'),
+        ('rangeRSR'                             , './/generalAnnotation/productInformation/rangeSamplingRate'),
+        ('rangeBW'                              , './/imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/rangeProcessing/processingBandwidth'),
+        ('rangeTimePix'                         , './/imageAnnotation/imageInformation/slantRangeTime'),
+        ('azimuthTimeStart'                     , './/swathTiming/burstList/burst/azimuthTime'),
+        ('heading'                              , './/generalAnnotation/productInformation/platformHeading'),
+        ('doppler_azimuth_Time'                 , './/dopplerCentroid/dcEstimateList/dcEstimate/azimuthTime'),
+        ('doppler_range_Time'                   , './/dopplerCentroid/dcEstimateList/dcEstimate/t0'),
+        ('dopplerCoeff'                         , './/dopplerCentroid/dcEstimateList/dcEstimate/dataDcPolynomial'),
+        ('azimuthFmRate_reference_Azimuth_time' , './/generalAnnotation/azimuthFmRateList/azimuthFmRate/azimuthTime'),
+        ('azimuthFmRate_reference_Range_time'   , './/generalAnnotation/azimuthFmRateList/azimuthFmRate/t0'),
+        ('azimuthFmRate_c0'                     , './/generalAnnotation/azimuthFmRateList/azimuthFmRate/c0'),
+        ('azimuthFmRate_c1'                     , './/generalAnnotation/azimuthFmRateList/azimuthFmRate/c1'),
+        ('azimuthFmRate_c2'                     , './/generalAnnotation/azimuthFmRateList/azimuthFmRate/c2'),
+        ('azimuthFmRatePolynomial'              , './/generalAnnotation/azimuthFmRateList/azimuthFmRate/azimuthFmRatePolynomial')
+
+    ])
+
+    # Now find the variables of queryList and queryList_aux in the xml data.
+
+    for key in queryList.keys():
+        try:
+            vars()[key]
+        except KeyError or NameError:
+            vars()[key] = []
+
+        if queryList[key][0:3] == './/':
+            for nodes in inTree.findall(queryList[key]):
+                vars()[key].append(nodes.text)
+            dat = vars()[key]
+            if isinstance(dat,list):
+                dat = dat[0]
+            if isinstance(dat,(int,float)):
+                dat = str(dat)
+            queryList[key] = dat
+
+    for key in queryList_aux.keys():
+        try:
+            vars()[key]
+        except KeyError or NameError:
+            vars()[key] = []
+
+        for nodes in inTree.findall(queryList_aux[key]):
+            vars()[key].append(nodes.text)
+        queryList_aux[key] = vars()[key]
+
+    # Finally do the first update
+    queryList['SAR_PROCESSOR'] = 'Sentinel-' + queryList['Sensor platform mission identifer'][-2:]
+    queryList['total_Burst'] = str(len(queryList_aux['azimuthTimeStart']))
+    queryList['Scene identification'] = 'Orbit: '+ queryList_aux['orbitABS'][0]
+    queryList['Scene location'] = 'lat: ' + queryList_aux['sceneCenLat'][0] + ' lon:' + queryList_aux['sceneCenLon'][0]
+    queryList['Radar_wavelength (m)'] = "{:.9f}".format(299792458.0/float(queryList['RADAR_FREQUENCY (HZ)']))
+    queryList['Range_time_to_first_pixel (2way) (ms)'] = "{:.15f}".format(float(queryList_aux['rangeTimePix'][0])*1000)
+    queryList['Range_sampling_rate (computed, MHz)'] = "{:.9f}".format(float(queryList_aux['rangeRSR'][0])/1000000)
+    queryList['Total_range_band_width (MHz)'] = "{:.9f}".format(float(queryList_aux['rangeBW'][0])/1000000)
+
+    deldict = ['orbitABS','rangeBW']
+    for d in deldict:
+        queryList_aux.pop(d)
+
+    queryList['aux'] = queryList_aux
+
+    return queryList
diff --git a/doris_stack/main_code/__init__.py b/doris_stack/main_code/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/doris_stack/main_code/burst.py b/doris_stack/main_code/burst.py
new file mode 100644
index 0000000..9118f9c
--- /dev/null
+++ b/doris_stack/main_code/burst.py
@@ -0,0 +1,95 @@
+# This files defines a class for metadata objects of sentinel images. Large part of the work depends on python readers
+# from the tops toolbox.
+
+from doris.doris_stack.functions.burst_metadata import burst_header, burst_readfiles, burst_crop
+from doris.doris_stack.main_code.resdata import ResData
+import warnings
+import os
+import copy
+
+
+class BurstMeta(ResData):
+    # Class which holds and gathers information of a specific burst for sentinel 1 data.
+
+    def __init__(self, path='', swath_no='1', pol='vv', burst_num=1, xml='', data=''):
+        # Initialize variables
+
+        # This indicates the burst number in the swath, acquisition date, centre location and the coverage of the burst (if available).
+        self.burst_num = []
+        self.new_burst_num = []
+        self.swath_num = []
+        self.burst_date = []
+        self.burst_center = []
+        self.burst_coverage = []
+        self.burst_corners = []
+        self.swath_meta = []
+
+        # The following contain the path of xml and data file for swath and burst.
+        self.swath_xml = ''
+        self.swath_data = ''
+        self.burst_res = ''
+        self.burst_data = ''
+
+        # orbits
+        self.datapoints = []
+        self.orbit_type = ''
+
+        #############################################################
+
+        # This function creates an swath object and searches for available data and xml files. It gives an error when
+        # either the path does not exist, no data or xml files can be found or the data and xml files do not match.'
+
+        # Create resdata for this object
+        super(BurstMeta, self).__init__(type='single')
+
+        if not xml or not data:
+            xml_dir = os.path.join(path, 'annotation')
+            xml = [f for f in os.listdir(xml_dir) if os.path.isfile(os.path.join(xml_dir, f))]
+
+            data_dir = os.path.join(path, 'measurement')
+            data = [f for f in os.listdir(data_dir) if os.path.isfile(os.path.join(data_dir, f))]
+
+            # Select polarisation
+            if not any(s in pol for s in ('hh','vv','hv','vh')):
+                warnings.warn('Polarisation not recognized, using default (vv)')
+                pol = 'vv'
+            if not swath_no in ('1','2','3'):
+                warnings.warn('Swath number not recognized, using default (1)')
+
+            xml = [os.path.join(path,'annotation',x) for x in xml if x[12:14] in pol and x[6] == swath_no]
+            data = [os.path.join(path,'measurement',x) for x in data if x[12:14] in pol and x[6] == swath_no]
+
+        if type(xml) is str:
+            xml = [xml]
+        if type(data) is str:
+            data = [data]
+        if (len(xml) != 1 and type(xml) is list) or len(data) != 1:
+            warnings.warn('Total number of files should be one!')
+        if not os.path.exists(xml[0]) or not os.path.exists(data[0]):
+            warnings.warn('Either xml or data path does not exist')
+        if xml[0:-3] != data[0:-4]:
+            warnings.warn('xml and data file do not correspond.')
+        if not burst_num:
+            warnings.warn('No burst number given')
+
+        self.swath_xml = xml[0]
+        self.swath_data = data[0]
+        self.burst_num = burst_num
+        self.swath_num = int(os.path.basename(xml[0])[6])
+
+    def meta_burst(self, swath_meta=[], corners=True):
+        # This function reads and stores metadata of the burst based on the swath xml file. If
+
+        self.swath_meta = swath_meta
+        readfiles = burst_readfiles(copy.deepcopy(self.swath_meta), self.burst_num, self.burst_center, self.burst_corners, self.swath_data)
+        crop = burst_crop(self.swath_meta, self.burst_num, self.swath_data, self.new_burst_num)
+
+        # Read metadata from xml and inserts in resdata of burst
+        # Insert the different steps (readfiles, orbits and crop)
+        self.header = burst_header('master.res')
+        self.insert(readfiles, process='readfiles')
+        self.insert(self.datapoints, process=self.orbit_type)
+        self.insert(crop, process='crop')
+
+
+
diff --git a/doris_stack/main_code/doris_config.py b/doris_stack/main_code/doris_config.py
new file mode 100644
index 0000000..43520b8
--- /dev/null
+++ b/doris_stack/main_code/doris_config.py
@@ -0,0 +1,27 @@
+'''
+	GrsConfig defines paths that are local to the source tree.
+	They are copied into DorisParameters for use in Doris python scripts
+'''
+
+import xml.etree.ElementTree as ET
+import sys, os
+
+class DorisConfig(object):
+
+    def __init__(self):
+
+        xml_file = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 'install/doris_config.xml')
+        tree = ET.parse(xml_file)
+        settings = tree.getroot()
+
+        self.source_path = settings.find('.source_path').text
+        self.doris_path = settings.find('.doris_path').text
+        self.cpxfiddle_path = settings.find('.cpxfiddle_path').text
+
+        self.job_handler_script = self.source_path + "/doris_stack/main_code/jobHandlerScript"
+        self.function_path = self.source_path + "/doris_stack/functions/"
+        self.main_code_path = self.source_path + "/doris_stack/main_code/"
+
+        # Extend path
+        sys.path.extend([self.function_path])
+        sys.path.extend([self.main_code_path])
diff --git a/doris_stack/main_code/doris_main.py b/doris_stack/main_code/doris_main.py
new file mode 100644
index 0000000..60e3c10
--- /dev/null
+++ b/doris_stack/main_code/doris_main.py
@@ -0,0 +1,28 @@
+import argparse
+import os
+import xml.etree.ElementTree as ET
+from doris.doris_stack.main_code.doris_sentinel_1 import DorisSentinel1
+
+"""Doris processing
+argument:  --parameterfilepath, -p
+"""
+
+# parse arguments here
+parser = argparse.ArgumentParser(description='Doris processing.')
+parser.add_argument('--parameterfilepath', '-p', default='./',
+                    help='Path to dorisParameter.py file, this file contains case specific parameters')
+
+args = parser.parse_args()
+
+xml_file = os.path.join(os.path.join(args.parameterfilepath, 'doris_input.xml'))
+print('Reading ' + xml_file)
+tree = ET.parse(xml_file)
+settings = tree.getroot()[0]
+
+start_date = settings.find('.start_date').text
+end_date = settings.find('.end_date').text
+master_date = settings.find('.master_date').text
+
+#start doris sentinel1 run
+doris_sentinel_1 = DorisSentinel1()
+doris_sentinel_1.run(args.parameterfilepath, start_date, end_date, master_date)
diff --git a/doris_stack/main_code/doris_parameters.py b/doris_stack/main_code/doris_parameters.py
new file mode 100644
index 0000000..266d7aa
--- /dev/null
+++ b/doris_stack/main_code/doris_parameters.py
@@ -0,0 +1,114 @@
+from datetime import datetime
+import os
+from doris_config import DorisConfig
+import xml.etree.ElementTree as ET
+
+
+class DorisParameters():
+
+    """This class contains all parameters that are used in the execution of test_dat_EDS_5.py.
+       path parameters are checked for existance,
+       all parameters are printed to stdout
+    """
+
+    def __init__(self, stack_path):
+
+        grs_config = DorisConfig()
+
+        self.doris_path = grs_config.doris_path
+        self.cpxfiddle_path = grs_config.cpxfiddle_path
+        self.job_handler_script = grs_config.job_handler_script
+        self.function_path = grs_config.function_path
+        self.source_path = grs_config.source_path
+
+        self.verbose = True
+
+        tree = ET.parse(os.path.join(stack_path, 'doris_input.xml'))
+        self.settings = tree.getroot()
+
+        project_path = self._settings_get('.datastack_folder')
+        self.project_path = project_path
+        data_path = self._settings_get('.sar_data_folder')
+        self.data_path = data_path
+        #
+        # used in single_master.py
+        #
+        #
+        # used in test_dat_ESD
+        #
+        # TODO DLE FIX shape path
+        self.shape_dat = self._settings_get('.shape_file_path')
+        self.track_dir = data_path
+        self.stack_path = project_path + '/stack/'
+        self.precise_orbits = self._settings_get('.orbits_folder')
+        # Start data of datastack. If end date not given it searches till current.
+        self.input_files = project_path + '/input_files/'
+
+        self.parallel = self._settings_compare('.parallel', 'yes')
+        self.nr_of_jobs = int(self._settings_get('.cores'))
+        self.initialize_flag = self._settings_compare('.initialize_flag', 'yes')
+
+        self.profile_log = project_path + '/profile_log'
+        self.doris_parallel_flag_dir = project_path + '/.Doris_parallel'
+        self.between_sleep_time = 1
+        self.end_sleep_time = 1
+
+        self.do_coarse_orbits = self._settings_compare('.do_coarse_orbits', 'yes')
+        self.do_deramp = self._settings_compare('.do_deramp', 'yes')
+        self.do_fake_fine_coreg_bursts = self._settings_compare('.do_fake_fine_coreg_bursts', 'yes')
+        self.do_dac_bursts = self._settings_compare('.do_dac_bursts', 'yes')
+        self.do_fake_coreg_bursts = self._settings_compare('.do_fake_coreg_bursts', 'yes')
+        self.do_resample = self._settings_compare('.do_resample', 'yes')
+        self.do_reramp = self._settings_compare('.do_reramp', 'yes')
+        self.do_interferogram = self._settings_compare('.do_interferogram', 'yes')
+        self.do_compref_phase = self._settings_compare('.do_compref_phase', 'yes')
+        self.do_compref_dem = self._settings_compare('.do_compref_dem', 'yes')
+        self.do_coherence = self._settings_compare('.do_coherence', 'yes')
+        self.do_esd = self._settings_compare('.do_esd', 'yes')
+        self.do_network_esd = self._settings_compare('.do_network_esd', 'yes')
+        self.do_ESD_correct = self._settings_compare('.do_ESD_correct', 'yes')
+        self.do_ref_phase = self._settings_compare('.do_ref_phase', 'yes')
+        self.do_ref_dem = self._settings_compare('.do_ref_dem', 'yes')
+        self.do_phasefilt = self._settings_compare('.do_phasefilt', 'yes')
+        self.do_calc_coordinates = self._settings_compare('.do_calc_coordinates', 'yes')
+        self.do_multilooking = self._settings_compare('.do_multilooking', 'yes')
+        self.do_unwrap = self._settings_compare('.do_unwrap', 'yes')
+        #
+        # used in Jobs
+        #
+        # self.job_handler_script = source_path + "/sentinel1/main_code/jobHandlerScript"
+        #
+        # Print parameters, check if paths exist
+        #
+
+        print 'self.shape_dat: ' + self.shape_dat
+        # self._check_path_exists(self.shape_dat)
+        print 'self.track_dir:	' + self.track_dir
+        self._check_path_exists(self.track_dir)
+        print 'self.stack_path:	' + self.stack_path
+        # self._check_path_exists(self.stack_path)
+        print 'self.precise_orbits:	' + self.precise_orbits
+        self._check_path_exists(self.precise_orbits)
+        print 'self.input_files:	' + self.input_files
+        # self._check_path_exists(self.input_files)
+#        print 'self.main_code_folder:	' + self.main_code_folder
+#        self._check_path_exists(self.main_code_folder)
+#        print 'self.script_folder:	' + self.script_folder
+#        self._check_path_exists(self.script_folder)
+        print 'self.nr_of_jobs:	' + str(self.nr_of_jobs)
+        print 'self.initialize_flag:	' + str(self.initialize_flag)
+        print 'self.jobHandlerScript:	' + self.job_handler_script
+        self._check_path_exists(self.job_handler_script)
+
+    def _check_path_exists(self, path):
+        if not(os.path.exists(path)):
+            print 'Error Doris_Parameters: path ' + path + ' does not exist'
+            
+    def _settings_get(self, string):
+        return self.settings.find('*/' + string).text
+
+
+    def _settings_compare(self, string, comp_string):
+        if (self.settings.find('*/' + string).text.lower()==comp_string.lower()):
+            return True
+        return False
diff --git a/doris_stack/main_code/doris_parameters_path.py b/doris_stack/main_code/doris_parameters_path.py
new file mode 100644
index 0000000..782536a
--- /dev/null
+++ b/doris_stack/main_code/doris_parameters_path.py
@@ -0,0 +1,12 @@
+import os
+import sys
+
+
+class DorisParameters_Path(object):
+
+    def set(self, doris_parameters_path):
+            if(os.path.exists(doris_parameters_path)):
+                sys.path.append(os.path.split(doris_parameters_path)[0])
+                print 'dorisparameter path: ' + doris_parameters_path
+            else:
+                print 'dorisparameter path: ' + doris_parameters_path + ' not a valid path'
diff --git a/doris_stack/main_code/doris_profile.py b/doris_stack/main_code/doris_profile.py
new file mode 100644
index 0000000..7341c57
--- /dev/null
+++ b/doris_stack/main_code/doris_profile.py
@@ -0,0 +1,17 @@
+import sys
+import time
+
+class Doris_Profile():
+
+    def __init__(self, logfile, verbose):
+        self.verbose = verbose
+        if(verbose):
+            self.start_time = time.localtime()
+            self.logfile = logfile + "." + time.strftime("%a, %d %b %Y %H:%M:%S +0000",self.start_time).replace(" ", "_").replace(",", "").replace("+", "")
+
+    def log_time_stamp(self, logtxt):
+        if(self.verbose):
+            fileHandle = open(self.logfile, 'a')
+            message = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.localtime()) + ' : ' + logtxt + '\n'
+            fileHandle.write(message)
+            fileHandle.close()
diff --git a/doris_stack/main_code/doris_sentinel_1.py b/doris_stack/main_code/doris_sentinel_1.py
new file mode 100644
index 0000000..a94a438
--- /dev/null
+++ b/doris_stack/main_code/doris_sentinel_1.py
@@ -0,0 +1,179 @@
+# This file is created by:
+# Gert Mulder
+# TU Delft
+# 04-07-2016
+##############################
+
+import sys
+from doris.doris_stack.main_code.stack import StackData
+from doris.doris_stack.main_code.dorisparameters import DorisParameters
+from doris.doris_stack.main_code.grs_profile import GRS_Profile
+from doris.doris_stack.main_code.single_master_stack import SingleMaster
+
+class DorisSentinel1(object):
+
+    def run(self, doris_parameters_path, start_date, end_date, master_date):
+
+        print 'start sentinel 1 processing'
+
+        #Set your input variables here. You should use absolute paths.
+        dorisParameters = DorisParameters(doris_parameters_path)
+        # fix DLE
+        sys.path.extend([dorisParameters.function_path])
+
+        profile = GRS_Profile(dorisParameters.profile_log + '_' + str(dorisParameters.nr_of_jobs), dorisParameters.verbose)
+
+        # The shapefile to select the area of interest. You can easily find a shapefile countries or regions on the internet.
+        # For example via diva-gis.org. Shapefile for the Netherlands can be found in the same folder under shapes.
+        shape_dat = dorisParameters.shape_dat # '/...../test.shp'
+
+        # The folder where SLC images from a specific track are stored. This data will be used as input for the script
+        track_dir = dorisParameters.track_dir # '/......'
+
+        # This is the output folder.
+        stack_path = dorisParameters.stack_path  #'/.....'
+
+        # Folder where the precise or restituted orbits are stored. Precise orbits can be found via the following link:
+        # 'https://qc.sentinel1.eo.esa.int/aux_poeorb/'. The script will assume that there are precise orbits if this folder is
+        # defined, but falls back to other algorithms if needed.
+        precise_orbits = dorisParameters.precise_orbits #'/......'
+        polarisation = dorisParameters.polarisation
+
+        # Here the doris inputfiles are stored. (Comes with the python scripts)
+        input_files = dorisParameters.input_files  #'/......'
+
+
+        profile.log_time_stamp('start')
+        # Create a datastack using the stack function
+        stack = StackData(track_dir=track_dir, shape_dat=shape_dat, start_date=start_date, end_date=end_date,
+                          polarisation=polarisation, path=stack_path, db_type=2, precise_dir=precise_orbits)
+        #profile.log_time_stamp('StackData')
+        # Select the images which are new in this datastack.
+        stack.select_image()
+        # Then these images are unzipped
+        stack.check_new_images(master=master_date)
+        # All images which correspond with the start and end date are selected
+        stack.unpack_image()
+        # Based on the shape file bursts are selected for one date
+
+        print('master date is ' + master_date)
+        stack.select_burst(date=master_date)
+        # And also for the other dates the needed bursts are selected
+        stack.extend_burst()
+        # Remove the images which are not fully present
+        stack.remove_incomplete_images()
+        # Now the exact coordinates of the different burst in the concatenated image is calculated
+        stack.define_burst_coordinates(slaves=True)
+        # Write the datastack to the stack_path folder
+        stack.write_stack(write_path=stack_path,no_data=False)
+        # A few auxiliary functions which are not strictly necessary.
+        # Calculate the coverage of the different sub-swaths
+        stack.swath_coverage()
+        # Write the shapes from the bursts and swaths to a shapefile to check in a GIS program like Qgis.
+        stack.write_shapes()
+        profile.log_time_stamp('stack preparation finished')
+        # Finally delete unzipped images
+        stack.del_unpacked_image()
+
+        import single_master_stack
+
+        # Now we import the script to create a single master interferogram
+        processing = SingleMaster(master_date=master_date, start_date=start_date,
+                                                      end_date=end_date, stack_folder=stack_path,
+                                                      input_files=input_files, processing_folder=stack_path)
+
+        processing.remove_finished(step='dinsar')
+        # Copy the necessary files to start processing
+        profile.log_time_stamp('initialize')
+        processing.initialize()
+
+        # Calculate the coarse orbits of individual bursts
+        if(dorisParameters.do_coarse_orbits):
+            profile.log_time_stamp('coarse_orbits')
+            processing.coarse_orbits()
+        # Deramp the data of both slave and master
+        if(dorisParameters.do_deramp):
+            profile.log_time_stamp('deramp')
+            processing.deramp(master=True) # Still needed for coherence...
+        # Fake the use of fine window coregistration, which is officially not needed
+        if(dorisParameters.do_fake_fine_coreg_bursts):
+            profile.log_time_stamp('fake_fine_coreg_bursts')
+            processing.fake_fine_coreg()
+        # Perform DEM coregistration for individual bursts
+        if(dorisParameters.do_dac_bursts):
+            profile.log_time_stamp('dac_bursts')
+            processing.dac_bursts()
+        # Fake the use of coregmp, as the orbits are good enough for coregistration
+        if(dorisParameters.do_fake_coreg_bursts):
+            profile.log_time_stamp('fake_coreg_bursts')
+            processing.fake_coregmp()
+        # Resample individual bursts
+        if(dorisParameters.do_resample):
+            profile.log_time_stamp('resample')
+            processing.resample()
+        # Reramp burst
+        if(dorisParameters.do_reramp):
+            profile.log_time_stamp('reramp')
+            processing.reramp()
+
+        # Perform enhanced spectral diversity for full swath
+        if(dorisParameters.do_esd):
+            profile.log_time_stamp('esd')
+            processing.esd()
+        if (dorisParameters.do_network_esd):
+            profile.log_time_stamp('network esd')
+            processing.network_esd()
+            
+        # Make interferograms for individual bursts
+        if(dorisParameters.do_interferogram):
+            profile.log_time_stamp('interferogram')
+            processing.interferogram()
+        # Calculate earth reference phase from interferograms and combine for full swath
+        if(dorisParameters.do_compref_phase):
+            profile.log_time_stamp('compref_phase')
+            processing.compref_phase()
+        # Calculate height effects from interferograms and combine for full swath
+        if(dorisParameters.do_compref_dem):
+            profile.log_time_stamp('compref_dem')
+            processing.compref_dem()
+        # Remove earth reference phase from interferograms and combine for full swath
+        if(dorisParameters.do_ref_phase):
+            profile.log_time_stamp('ref_phase')
+            processing.ref_phase(concatenate=False)
+        # Remove height effects from interferograms and combine for full swath
+        if(dorisParameters.do_ref_dem):
+            profile.log_time_stamp('ref_dem')
+            processing.ref_dem(concatenate=True, ras=True)
+        # Geocode data
+        if(dorisParameters.do_calc_coordinates):
+            profile.log_time_stamp('calc_coordinates')
+            processing.calc_coordinates()
+
+        # Correct using ramp ifgs based on ESD
+        if(dorisParameters.do_ESD_correct):
+            profile.log_time_stamp('ESD_correct')
+            processing.ESD_correct_ramp(filename='cint_srd.raw')
+        # Compute coherence
+        if(dorisParameters.do_coherence):
+            profile.log_time_stamp('coherence')
+            processing.coherence(ras=True)
+
+        if(dorisParameters.do_phasefilt):
+            profile.log_time_stamp('phasefilt')
+            processing.phasefilt(ras=True)
+        # Multilook filtered image and coherence image
+
+        if(dorisParameters.do_multilooking):
+            profile.log_time_stamp('multilooking')
+            processing.multilook(step='coherence')
+            processing.multilook(step='filtphase')
+        # Unwrap image
+        # processing.del_process('unwrap', type='ifgs', images=True)
+        if(dorisParameters.do_unwrap):
+            profile.log_time_stamp('unwrap')
+            processing.unwrap()
+
+        profile.log_time_stamp('end')
+
+        print 'end sentinel 1 processing'
+
diff --git a/doris_stack/main_code/dorisparameters.py b/doris_stack/main_code/dorisparameters.py
new file mode 100644
index 0000000..ba8cadc
--- /dev/null
+++ b/doris_stack/main_code/dorisparameters.py
@@ -0,0 +1,120 @@
+from datetime import datetime
+import os
+from doris.doris_stack.main_code.grs_config import GrsConfig
+import xml.etree.ElementTree as ET
+
+
+class DorisParameters():
+
+    """This class contains all parameters that are used in the execution of test_dat_EDS_5.py.
+       path parameters are checked for existance,
+       all parameters are printed to stdout
+    """
+
+    def __init__(self, stack_path):
+
+        grs_config = GrsConfig()
+
+        self.doris_path = grs_config.doris_path
+        self.cpxfiddle_path = grs_config.cpxfiddle_path
+        self.job_handler_script = grs_config.job_handler_script
+        self.function_path = grs_config.function_path
+        self.source_path = grs_config.source_path
+
+        self.verbose = True
+
+        tree = ET.parse(os.path.join(stack_path, 'doris_input.xml'))
+        self.settings = tree.getroot()
+
+        archive_path = self._settings_get('.sar_data_folder')
+        self.archive_path = archive_path
+        project_path = self._settings_get('.datastack_folder')
+        self.project_path = project_path
+        data_path = self._settings_get('.sar_data_folder')
+        self.data_path = data_path
+        polarisation = self._settings_get('.polarisation')
+        self.polarisation = polarisation
+        track = self._settings_get('.track')
+        self.track = track
+        direction = self._settings_get('.direction')
+        self.direction = direction
+        #
+        # used in single_master.py
+        #
+        #
+        # used in test_dat_ESD
+        #
+        # TODO DLE FIX shape path
+        self.shape_dat = self._settings_get('.shape_file_path')
+        self.track_dir = data_path
+        self.stack_path = os.path.join(project_path, 'stack')
+        self.precise_orbits = self._settings_get('.orbits_folder')
+        # Start data of datastack. If end date not given it searches till current.
+        self.input_files = os.path.join(project_path, 'input_files')
+
+        self.parallel = self._settings_compare('.parallel', 'yes')
+        self.nr_of_jobs = int(self._settings_get('.cores'))
+
+        self.profile_log = project_path + '/profile_log'
+        self.doris_parallel_flag_dir = project_path + '/.Doris_parallel'
+        self.between_sleep_time = 1
+        self.end_sleep_time = 1
+
+        self.do_coarse_orbits = self._settings_compare('.do_coarse_orbits', 'yes')
+        self.do_deramp = self._settings_compare('.do_deramp', 'yes')
+        self.do_fake_fine_coreg_bursts = self._settings_compare('.do_fake_fine_coreg_bursts', 'yes')
+        self.do_dac_bursts = self._settings_compare('.do_dac_bursts', 'yes')
+        self.do_fake_coreg_bursts = self._settings_compare('.do_fake_coreg_bursts', 'yes')
+        self.do_resample = self._settings_compare('.do_resample', 'yes')
+        self.do_reramp = self._settings_compare('.do_reramp', 'yes')
+        self.do_interferogram = self._settings_compare('.do_interferogram', 'yes')
+        self.do_compref_phase = self._settings_compare('.do_compref_phase', 'yes')
+        self.do_compref_dem = self._settings_compare('.do_compref_dem', 'yes')
+        self.do_coherence = self._settings_compare('.do_coherence', 'yes')
+        self.do_esd = self._settings_compare('.do_esd', 'yes')
+        self.do_network_esd = self._settings_compare('.do_network_esd', 'yes')
+        self.do_ESD_correct = self._settings_compare('.do_ESD_correct', 'yes')
+        self.do_ref_phase = self._settings_compare('.do_ref_phase', 'yes')
+        self.do_ref_dem = self._settings_compare('.do_ref_dem', 'yes')
+        self.do_phasefilt = self._settings_compare('.do_phasefilt', 'yes')
+        self.do_calc_coordinates = self._settings_compare('.do_calc_coordinates', 'yes')
+        self.do_multilooking = self._settings_compare('.do_multilooking', 'yes')
+        self.do_unwrap = self._settings_compare('.do_unwrap', 'yes')
+        #
+        # used in Jobs
+        #
+        # self.job_handler_script = source_path + "/sentinel1/main_code/jobHandlerScript"
+        #
+        # Print parameters, check if paths exist
+        #
+
+        print 'self.shape_dat: ' + self.shape_dat
+        # self._check_path_exists(self.shape_dat)
+        print 'self.track_dir:	' + self.track_dir
+        self._check_path_exists(self.track_dir)
+        print 'self.stack_path:	' + self.stack_path
+        # self._check_path_exists(self.stack_path)
+        print 'self.precise_orbits:	' + self.precise_orbits
+        self._check_path_exists(self.precise_orbits)
+        print 'self.input_files:	' + self.input_files
+        # self._check_path_exists(self.input_files)
+#        print 'self.main_code_folder:	' + self.main_code_folder
+#        self._check_path_exists(self.main_code_folder)
+#        print 'self.script_folder:	' + self.script_folder
+#        self._check_path_exists(self.script_folder)
+        print 'self.nr_of_jobs:	' + str(self.nr_of_jobs)
+        print 'self.jobHandlerScript:	' + self.job_handler_script
+        self._check_path_exists(self.job_handler_script)
+
+    def _check_path_exists(self, path):
+        if not(os.path.exists(path)):
+            print 'Error Doris_Parameters: path ' + path + ' does not exist'
+            
+    def _settings_get(self, string):
+        return self.settings.find('*/' + string).text
+
+
+    def _settings_compare(self, string, comp_string):
+        if (self.settings.find('*/' + string).text.lower()==comp_string.lower()):
+            return True
+        return False
diff --git a/doris_stack/main_code/grs_config.py b/doris_stack/main_code/grs_config.py
new file mode 100644
index 0000000..80e61fc
--- /dev/null
+++ b/doris_stack/main_code/grs_config.py
@@ -0,0 +1,27 @@
+'''
+	GrsConfig defines paths that are local to the source tree.
+	They are copied into DorisParameters for use in Doris python scripts
+'''
+
+import xml.etree.ElementTree as ET
+import sys, os
+
+class GrsConfig(object):
+
+    def __init__(self):
+
+        xml_file = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 'install/doris_config.xml')
+        tree = ET.parse(xml_file)
+        settings = tree.getroot()
+
+        self.source_path = settings.find('.source_path').text
+        self.doris_path = settings.find('.doris_path').text
+        self.cpxfiddle_path = settings.find('.cpxfiddle_path').text
+
+        self.job_handler_script = self.source_path + "/doris_stack/main_code/jobHandlerScript"
+        self.function_path = self.source_path + "/doris_stack/functions/"
+        self.main_code_path = self.source_path + "/doris_stack/main_code/"
+
+        # Extend path
+        sys.path.extend([self.function_path])
+        sys.path.extend([self.main_code_path])
diff --git a/doris_stack/main_code/grs_profile.py b/doris_stack/main_code/grs_profile.py
new file mode 100644
index 0000000..f435c7a
--- /dev/null
+++ b/doris_stack/main_code/grs_profile.py
@@ -0,0 +1,17 @@
+import sys
+import time
+
+class GRS_Profile():
+
+    def __init__(self, logfile, verbose):
+        self.verbose = verbose
+        if(verbose):
+            self.start_time = time.localtime()
+            self.logfile = logfile + "." + time.strftime("%a, %d %b %Y %H:%M:%S +0000",self.start_time).replace(" ", "_").replace(",", "").replace("+", "")
+
+    def log_time_stamp(self, logtxt):
+        if(self.verbose):
+            fileHandle = open(self.logfile, 'a')
+            message = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.localtime()) + ' : ' + logtxt + '\n'
+            fileHandle.write(message)
+            fileHandle.close()
diff --git a/doris_stack/main_code/image.py b/doris_stack/main_code/image.py
new file mode 100644
index 0000000..a3b3402
--- /dev/null
+++ b/doris_stack/main_code/image.py
@@ -0,0 +1,124 @@
+# This files defines a class for metadata objects of sentinel images. Large part of the work depends on python readers
+# from the tops toolbox.
+import os
+import warnings
+import zipfile
+import copy
+
+from doris.doris_stack.main_code.swath import SwathMeta
+
+
+class ImageMeta(object):
+    # Object for image files for sentinel data
+
+    def __init__(self, path='' , pol='all' ,swath_no=['1','2','3']):
+        # Initialize function variables
+
+        # This will contain a list of swath objects
+        self.swaths = []
+        self.pol = pol
+        self.swath_no = swath_no
+
+        # The following contain the path of xml and data files
+        self.swaths_xml = []
+        self.swaths_data = []
+        self.image_kml = ''
+
+        # This variable contains the convex hull of all swaths together
+        self.metadata = []
+        self.coverage = []
+
+        # The following variables store data on which processing steps are performed and the results of these steps.
+        self.steps = []
+        self.steps_res = []
+
+        # The following variable is to check the total number of bursts for this image. This is used to remove time
+        # slots with less bursts.
+        self.burst_no = 0
+
+        # Check if the data is unzipped or not. If unzipped run the further initialization.
+        self.zip_path = ''
+        self.unzip_path = ''
+        if path.endswith('.zip'):
+            self.zip_path = path
+        else:
+            self.unzip_path = path
+
+        # orbit information for this image
+        self.orbit = ''
+
+    ######################################################
+
+    def init_unzipped(self, unzip_path=''):
+        # This function creates an image object and searches for available data and xml files. It gives an error when
+        # either the path does not exist, no data or xml files can be found or the data and xml files do not match.
+        # It is possible to choose one of the available polarisations or swaths using the pol and swath variables.
+        xml_dir = os.path.join(self.unzip_path, 'annotation')
+        xml = [f for f in os.listdir(xml_dir) if os.path.isfile(os.path.join(xml_dir, f))]
+
+        data_dir = os.path.join(self.unzip_path, 'measurement')
+        data = [f for f in os.listdir(data_dir) if os.path.isfile(os.path.join(data_dir, f))]
+
+        # Select polarisation
+        if any(s in self.pol for s in ('hh','vv','hv','vh')):
+            xml = [x for x in xml if x[12:14] in self.pol]
+            data = [x for x in data if x[12:14] in self.pol]
+        elif self.pol != 'all':
+            warnings.warn('Polarisation not recognized, using default (all)')
+
+        # Select swaths
+        xml = sorted([os.path.join(self.unzip_path,'annotation',x) for x in xml if x[6] in self.swath_no])
+        data = sorted([os.path.join(self.unzip_path,'measurement',x) for x in data if x[6] in self.swath_no])
+
+        # Initialize function values
+        dat = [os.path.basename(d) for d in data]
+        self.swaths_xml = [x for x in xml if os.path.basename(x)[:-4] + '.tiff' in dat]
+        self.swaths_data = data
+
+        # Check if the data is there and if the filenames coincide.
+        if len(self.swaths_xml) == 0:
+            warnings.warn('There are no xml files')
+
+
+    def unzip(self, unzip_path=''):
+        # This function unzips the corresponding image, based on some requirements.
+        # Note that this is a backup function, while most unpacking is done by load_shape_unzip.py
+        if not os.path.exists(self.path):
+            try:
+                zip = zipfile.ZipFile(self.path + '.zip')
+                path = os.path.abspath(os.path.join(self.path, os.pardir))
+                zip.extractall(path)
+                return True
+            except:
+                print('Failed to unpack!')
+                return False
+        else:
+            return True
+
+    def meta_swath(self, precise_folder=''):
+        # This function reads and stores metadata of different swaths in the swath objects.
+        orbits = []
+        orb_type = ''
+
+        if not self.swaths_data:
+            self.init_unzipped()
+
+        if not self.swaths:
+            for i in range(len(self.swaths_data)):
+                data = self.swaths_data[i]
+                xml = os.path.join(os.path.dirname(os.path.dirname(data)), 'annotation', os.path.basename(data)[:-5] + '.xml')
+
+                # Initialize swath and load data from xml file
+                swath = SwathMeta(xml=xml, data=data)
+                swath.meta_swath()
+
+                # Calculate the orbits for this swath and reuse it for other swaths if it is already calculated
+                if not orbits or not orb_type:
+                    orbits, orb_type = swath.orbits_swath(precise_folder=precise_folder)
+                else:
+                    swath.orbits = copy.deepcopy(orbits)
+                    swath.orbit_type = orb_type
+
+                # Define the resdata for the individual burst. And append the swath to the image object.
+                swath.meta_burst()
+                self.swaths.append(swath)
diff --git a/doris_stack/main_code/jobHandlerScript b/doris_stack/main_code/jobHandlerScript
new file mode 100755
index 0000000..58d6122
--- /dev/null
+++ b/doris_stack/main_code/jobHandlerScript
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+
+echo ${3} >> ${1}/${2}".started"
+if [[ "${4}" == "True" ]]; then
+      echo "DORIS PARALLEL:"
+      export PYTHONPATH="${5}:$PYTHONPATH"
+      echo "PYTHONPATH:"
+      echo $PYTHONPATH
+      echo "PATH:"
+      echo $PATH
+      echo ${3}" "${6}" "${7}" "${8}" "${9}" "${10}" "${11}" "${12}" "${13}" "${14}" "${15}" "${16}" "${17}" "${18}" started"
+      date +%T
+      pwd
+      echo >> "./job.started"
+fi
+return_value=1
+index=0
+while [[ $return_value -ne 0 && index -le 5 ]]; do
+    echo "INDEX = "$index
+    if [[ $index -gt 0 ]]; then
+        echo "DORIS PARALLEL Execution FAILED, retry nr "$index
+        echo "DORIS PARALLEL "${3}" "${6}" "${7}" "${8}" "${9}" "${10}" "${11}" "${12}" "${13}" "${14}" "${15}" "${16}" "${17}" "${18}" RESTARTED"
+    fi
+    ${6} ${7} ${8} ${9} ${10} ${11} ${12} ${13} ${14} ${15} ${16} ${17} ${18} 2>&1 | tee job_$index_${2}.log # start job
+    return_value=$?
+    if [ "${3}" == "True" ]; then
+        echo "DORIS PARALLEL return value is:"
+        echo $return_value
+    fi
+    ((index++))
+done
+echo ${3} >> ${1}/${2}".finished"
+if [ "${4}" == "True" ]; then
+      echo "DORIS PARALLEL:"
+      echo ${3}" "${6}" "${7}" "${8}" "${9}" "${10}" "${11}" "${12}" "${13}" "${14}" "${15}" "${16}" "${17}" "${18}" finished"
+      date +%T
+      pwd
+      echo >> "./job.finished"
+fi
+
+
+
diff --git a/doris_stack/main_code/jobs.py b/doris_stack/main_code/jobs.py
new file mode 100644
index 0000000..828fab9
--- /dev/null
+++ b/doris_stack/main_code/jobs.py
@@ -0,0 +1,119 @@
+import os
+import time
+
+class Jobs(object):
+    """The Jobs class runs a list of jobs in parallel.
+       It starts the maximum number of jobs from the list in parallel and monitors for job completion.
+       When jobs are finished, new jobs are started until the maximum is reached again.
+       This is repeated until all jobs from the list are processed.
+
+       This class executes a jobHandlerScript to execute a job.
+       The job handler script sets job start and job finished flags in a flag directory on the system.
+       In verbose mode the job handler script prints status info to stdout
+
+       This class generates directories on the system that contain start and finish flags for each job that is run
+       Old flag directories are moved to timestamped directories
+
+       Methods:
+           Run: executes list of jobs
+           """
+    def __init__(self, max_jobs, dorisParameters):
+        """max_jobs: maximum number of jobs to run simultaniously
+           verbose: print status to stdout during execution of job list"""
+        self.max_jobs = max_jobs
+        self.pid = str(os.getpid())
+        self.doris_parameters = dorisParameters
+        self.verbose = self.doris_parameters.verbose
+        self.flag_dir_root = self.doris_parameters.doris_parallel_flag_dir
+        self.between_sleep_time = self.doris_parameters.between_sleep_time
+        self.end_sleep_time = self.doris_parameters.end_sleep_time
+        self.python_path = os.path.dirname(self.doris_parameters.source_path)
+        self.jobs_todo = []
+        self.jobs_active = []
+        self.jobs_finished = []
+        self.flag_dir = ''
+
+    def _new_id(self):
+        # static class variable
+        Jobs.id = Jobs.id + 1
+        return Jobs.id
+
+    def _set_id(self, job_list):
+        for job_dict in job_list:
+            job_dict['id'] = self._new_id()
+
+    def _create_flag_dir(self):
+        self.flag_dir = self.flag_dir_root + "." + time.asctime(time.localtime(time.time())).replace(" ", "_")
+        os.mkdir(self.flag_dir)
+
+
+    def _cleanup_flag_dir(self):
+        #
+        # cleans stuff from current run, if not verbose
+        #
+        if(not (self.verbose)):
+            os.system("rm -rf " + self.flag_dir)
+
+    def _get_job_id(self, job):
+        #
+        # returns machine level unique job Id
+        return job['path'].replace("/","_") + "." + job['command'].replace("/","_").replace(" ","-") + self.pid
+
+
+    def _start_jobs(self):
+        #
+        # starts a number of jobs
+        # returns list of unstarted and list of started jobs
+        #
+        jobs_to_start_count = min((self.max_jobs - len(self.jobs_active)), len(self.jobs_todo))
+        for index in range(0, jobs_to_start_count):
+            job = self.jobs_todo.pop(0)
+            os.chdir(job['path'])
+            os.system(self.doris_parameters.job_handler_script + " "
+                      + self.flag_dir + " "
+                      + str(job['id']) + " "
+                      + self._get_job_id(job) + " "
+                      + str(self.verbose) + " "
+                      + self.python_path + " "
+                      + job['command'] + " &")
+            self.jobs_active.append(job)
+        return
+
+    def _check_active_jobs(self):
+        #
+        # returns from the list of jobs, the jobs that are started, but not finished
+        #
+        jobs_active = []
+        for job in self.jobs_active:  # find active jobs
+            this_job_started = False
+            this_job_ready = False
+            for file in os.listdir(self.flag_dir):
+                if str(job['id']) + ".finished" == file:
+                    this_job_ready = True
+#                if str(job['id']) + ".started" == file:
+#                    this_job_started = True
+#            this_job_active = this_job_started & (not (this_job_ready))
+#            if (this_job_active):
+            if (not (this_job_ready)):
+                jobs_active.append(job)
+        self.jobs_active = jobs_active
+        return
+
+    def run(self, job_list):
+        """executes joblist in parallel
+        jobList: list of jobs, containing execution path and command to be executed
+        """
+        self._create_flag_dir()
+        self.jobs_todo = job_list
+        self._set_id(self.jobs_todo)
+        self._start_jobs()
+        while len(self.jobs_active):
+            if(self.verbose):
+                print time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs busy"
+            time.sleep(self.between_sleep_time)
+            self._check_active_jobs()
+            self._start_jobs()
+        if (self.verbose):
+            print time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + "jobs finished"
+        time.sleep(self.end_sleep_time)
+        self._cleanup_flag_dir()
diff --git a/doris_stack/main_code/resdata.py b/doris_stack/main_code/resdata.py
new file mode 100644
index 0000000..cd28c96
--- /dev/null
+++ b/doris_stack/main_code/resdata.py
@@ -0,0 +1,404 @@
+import warnings
+import os
+import collections
+
+
+class ResData(object):
+    # This class hold metadata of a doris datafile and processing chain and is capable of reading from and writing to a
+    # .res file used by the doris software.
+
+    def __init__(self,filename='',type=''):
+        # Initialize variables
+
+        # Filename of resfile and type (single, interferogram)
+        self.res_path = []
+        self.res_type = ''
+
+        # Processes, process_control and header of resfile
+        self.processes = collections.OrderedDict()
+        self.process_control = {}
+        self.process_timestamp = {}
+        self.process_time = {}
+        self.header = {}
+
+        #####################################################
+
+        # Create a ResData object (single/interferogram)
+        if type not in ['single','interferogram'] and not filename:
+            warnings.warn('Define if results data is slave, master or interferogram')
+            return
+        else:
+            self.res_type = type
+        if filename:
+            if not os.path.exists(filename):
+                warnings.warn('This filename does not exist: ' + filename)
+            else:
+                self.res_path = filename
+                self.res_read()
+        else:
+            if type == 'single':
+                self.process_control = collections.OrderedDict([('readfiles', '0'),('leader_datapoints', '0'), ('precise_orbits', '0'), ('crop', '0'), ('sim_amplitude', '0'), ('master_timing' , '0'),
+                                       ('oversample', '0'), ('resample', '0') , ('filt_azi', '0'), ('filt_range', '0'), ('NOT_USED' , '0')])
+            elif type == 'interferogram':
+                self.process_control = collections.OrderedDict([('coarse_orbits','0'),('coarse_correl','0'),('fine_coreg','0'),('timing_error','0'),('dem_assist','0'),
+                                   ('comp_coregpm','0'),('interfero','0'),('coherence','0'),('comp_refphase','0'),('subtr_refphase','0'),
+                                   ('comp_refdem','0'),('subtr_refdem','0'),('filtphase','0'),('unwrap','0'),('est_orbits','0'),('slant2h','0'),
+                                   ('geocoding','0'),('dinsar','0'),('NOT_USED2','0')])
+
+    def res_read(self):
+        self.meta_reader()
+        self.process_reader()
+
+    def meta_reader(self):
+        # This function
+        with open(self.res_path) as resfile:
+            splitter = ':'
+            temp = collections.OrderedDict()
+            row = 0
+            for line in resfile:
+                try:
+                    ## Filter out rubbish
+                    if line == '\n':
+                        continue
+                    elif 'Start_process_control' in line:
+                        self.header = temp
+                        temp = collections.OrderedDict()
+                    elif 'End_process_control' in line:
+                        self.process_control = temp
+                        break
+                    elif splitter in line and line[0] is not '|' and line[0] is not '\t' :
+                        # Split line if possible and add to dictionary
+                        l_split = line.split(splitter)
+                        temp[l_split[0].strip()] = l_split[1].strip()
+                    else:
+                        name = 'row_' + str(row)
+                        row += 1
+                        temp[name] = [line]
+
+                except:
+                    print 'Error occurred at line: ' + line
+
+    def process_reader(self,processes = ''):
+        # This function reads random processes based on standard buildup of processes in res files.
+        # leader_datapoints can be one of the processes, although it will not appear in the process_control in a .res file
+        # If loc is true, it will only return the locations where different processes start.
+
+        if not processes:
+            processes = self.process_control.keys()
+
+        processes.append('leader_datapoints')
+        process = ''
+
+        with open(self.res_path) as resfile:
+            # Start at row zero and with empty list
+            temp = collections.OrderedDict()
+            row = 0
+            line_no = -1
+            timestamp = False
+            timestamp_line = 0
+            for line in resfile:
+                try:
+                    line_no += 1
+                    # Filter out rubbish
+                    if '|'in line[0]:
+                        continue
+                    elif '**' in line:
+                        continue
+                    elif line == '\n':
+                        continue
+
+                    # Check if timestamp
+                    if ' *===========' in line:
+                        # First line of time stamp
+                        temp = collections.OrderedDict()
+                        timestamp = True
+                        row = 0
+                        continue
+                    elif ' *-----------' in line:
+                        timestamp = False
+                        timestamp_data = temp
+                        timestamp_line = line_no + 5
+                        continue
+
+                    # Check if process
+                    if '*' in line[0]:
+                        if line.replace('*_Start_', '').split(':')[0].strip() in processes:
+                            process = line.replace('*_Start_', '').split(':')[0].strip()
+                            temp = collections.OrderedDict()
+                            row = 0; space = [0]; space_r = [0,0,0,0,0,0,0,0]
+
+                            # Finally save the timestamp if it exists
+                            if line_no == timestamp_line:
+                                self.process_timestamp[process] = timestamp_data
+                            else:
+                                self.process_timestamp[process] = ''
+
+                        elif line.replace('* End_', '').split(':')[0] == process:
+                            self.processes[process] = temp
+                            temp = collections.OrderedDict()
+                            process = ''
+                        continue
+
+                    # Save line
+                    if timestamp is True:
+                        # Save rows in timestamp
+                        row_name = 'row_' + str(row)
+                        temp[row_name] = line
+                        if row == 1:
+                            self.process_time[process] = line.split(':', 1)[1].strip()
+                        row += 1
+                    elif process:
+                        # If we are in a process output line
+                        # Split line using ':' , '=' or spaces (tables)
+                        # variable space and space row define the future spacing in every processing step in a res file.
+
+                        if process == 'coarse_orbits':
+                            # Add some code for a strange exception in coarse_orbits
+                            if '//' in line:
+                                temp[line.split()[0]] = line.split()[1:]
+                            else:
+                                l_split = line.replace('=',':').split(':')
+                                temp[l_split[0].strip()] = l_split[1].strip()
+
+                        elif ':' in line:
+                            l_split = line.split(':',1)
+                            temp[l_split[0].strip()] = l_split[1].strip()
+                        else:
+                            # If the line does not contain a : it is likely a table.
+                            l_split = line.replace('\t',' ').split()
+                            row_name = 'row_' + str(row)
+                            temp[row_name] = [l_split[i].strip() for i in range(len(l_split))]
+                            row += 1
+
+                except:
+                    print 'Error occurred at line: ' + line
+
+    def process_spacing(self,process=''):
+
+        spacing = 0
+        table_spacing = [0,0,0,0,0,0,0]
+
+        dat = self.processes[process]
+
+        for key in dat.keys():
+            spacing = max(len(key) + 8, spacing)
+
+            if key.startswith('row'):
+                n=0
+                for val in self.processes[process][key]:
+                    table_spacing[n] = max(len(val) + 3, table_spacing[n])
+                    n += 1
+        spacing = [spacing]
+
+        return spacing, table_spacing
+
+    def del_process(self,process=''):
+        # function deletes one or multiple processes from the corresponding res file
+
+        if isinstance(process, basestring): # one process
+            if not process in self.process_control.keys():
+                warnings.warn('The requested process does not exist! (or processes are not read jet, use self.process_reader): ' + str(process))
+                return
+        elif isinstance(process, list): # If we use a list
+            for proc in process:
+                if not proc in self.process_control.keys():
+                    warnings.warn('The requested process does not exist! (or processes are not read jet, use self.process_reader): ' + str(proc))
+                    return
+        else:
+            warnings.warn('process should contain either a string of one process or a list of multiple processes: ' + str(process))
+
+        # Now remove the process and write the file again.
+        if isinstance(process, basestring): # Only one process should be removed
+            self.process_control[process] = '0'
+            del self.processes[process]
+        else:
+            for proc in process:
+                self.process_control[proc] = '0'
+                del self.processes[proc]
+
+    def write(self,new_filename=''):
+        # Here all the available information acquired is written to a new resfile. Generally if information is manually
+        # added or removed and the file should be created or created again. (For example the readfiles for Sentinel 1
+        # which are not added yet..)
+
+        if not new_filename and not self.res_path:
+            warnings.warn('Please specify filename: ' + str(new_filename))
+            return
+        elif not new_filename:
+            new_filename = self.res_path
+        if not self.process_control or not self.processes:
+            warnings.warn('Every result file needs at least a process control and one process to make any sense: ' + str(new_filename))
+
+        # Open file and write header, process control and processes
+        self.res_path = new_filename
+        f = open(new_filename,"w")
+
+        # Write the header:
+        if self.header:
+            spacing = [40]
+            for key in self.header.keys():
+                if 'row' in key:       # If it is just a string
+                    f.write(self.header[key][0])
+                else:                   # If the key should included
+                    f.write((key + ':').ljust(spacing[0]) + self.header[key] + '\n')
+
+        # Write the process control
+        for i in range(3):
+            f.write('\n')
+        f.write('Start_process_control\n')
+        for process in self.process_control.keys():
+            if process != 'leader_datapoints':  # leader_datapoints is left out in process control
+                f.write((process + ':\t\t') + str(self.process_control[process]) + '\n')
+        f.write('End_process_control\n')
+
+        # Then loop through all the processes
+        for process in [p for p in self.processes.keys()]:
+            # First check for a timestamp and add it if needed.
+            if self.process_timestamp[process]:
+                for i in range(2):
+                    f.write('\n')
+                f.write('   *====================================================================* \n')
+                for key in self.process_timestamp[process].keys():
+                    f.write(self.process_timestamp[process][key])
+                f.write('   *--------------------------------------------------------------------* \n')
+
+            # Then write the process itself
+            if process == 'coarse_orbits':
+                spacing = [45]
+                spacing_row = [15,10,15]
+            else:
+                spacing, spacing_row = self.process_spacing(process)
+            data = self.processes[process]
+
+            for i in range(3):
+                f.write('\n')
+            f.write('******************************************************************* \n')
+            f.write('*_Start_' + process + ':\n')
+            f.write('******************************************************************* \n')
+
+            for line_key in self.processes[process].keys():
+                if 'row' in line_key:  # If it is a table of consists of several different parts
+                    line = ''.join([(' ' + data[line_key][i]).replace(' -','-').ljust(spacing_row[i]) for i in range(len(data[line_key]))])
+                    f.write(line + '\n')
+                elif process == 'coarse_orbits':  # the coarse orbits output is different from the others.
+                    if 'Control point' in line_key: # Special case coarse orbits...
+                        f.write((line_key + ' =').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
+                    elif not isinstance(data[line_key], basestring): # Another special case
+                        f.write(line_key.ljust(spacing_row[0]) + (data[line_key][0]).ljust(spacing_row[1]) +
+                                data[line_key][1].ljust(spacing_row[2]) + ' '.join(data[line_key][2:]) + '\n')
+                    elif isinstance(data[line_key], basestring): # Handle as in normal cases
+                        f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
+                else: # If it consists out of two parts
+                    f.write((line_key + ':').ljust(spacing[0]) + str(self.processes[process][line_key]) + '\n')
+
+            f.write('******************************************************************* \n')
+            f.write('* End_' + process + ':_NORMAL\n')
+            f.write('******************************************************************* \n')
+        f.close()
+
+        # Read the locations in the new file
+        self.process_reader()
+
+    def insert(self,data,process,variable=''):
+        # This function inserts a variable or a process which does not exist at the moment
+        processes = self.process_control.keys()
+        processes.extend(['header','leader_datapoints'])
+
+        if process not in processes:
+            warnings.warn('This process does not exist for this datatype: ' + str(process))
+            return
+
+        # If a full process is added
+        if not variable:
+            if self.process_control[process] == '1':
+                warnings.warn('This process already exists! Use the update function: ' + str(process))
+                return
+            elif self.process_control[process] == '0':
+                self.process_control[process] = '1'
+                self.processes[process] = data
+                self.process_timestamp[process] = ''
+
+        # A variable is added
+        if variable:
+            if variable in self.processes[process].keys():
+                warnings.warn('This variable already exists! Use the update function: ' + str(variable))
+                return
+            elif not self.processes[process][variable]:
+                self.processes[process][variable] = data
+
+    def delete(self,process,variable=''):
+        # This function deletes a variable or a process which does exist at the moment
+        processes = self.process_control.keys()
+        processes.extend(['header','leader_datapoints'])
+
+        if process not in processes:
+            warnings.warn('This process does not exist for this datatype: ' + str(process))
+            return
+
+        # If a full process is deleted
+        if not variable:
+            if self.process_control[process] == '0':
+                warnings.warn('This process does not exist: ' + str(process))
+                return
+            elif self.process_control[process] == '1':
+                self.process_control[process] = '0'
+                del self.processes[process]
+                del self.process_timestamp[process]
+
+        # A variable is deleted
+        if variable:
+            if not variable in self.processes[process].keys():
+                warnings.warn('This variable does not exist: ' + str(variable))
+                return
+            else:
+                del self.processes[process][variable]
+
+    def update(self,data,process,variable=''):
+        # This function updates a variable or a process which does exist at the moment
+        processes = self.process_control.keys()
+        processes.extend(['header','leader_datapoints'])
+
+        if not process in processes:
+            warnings.warn('This process does not exist for this datatype: ' + str(process))
+            return
+
+        # If a full process is added
+        if not variable:
+            if self.process_control[process] == '1':
+                self.processes[process] = data
+            elif self.process_control[process] == '0':
+                warnings.warn('This process does not exist. Use the insert function: ' + str(process))
+                return
+        # A variable is added
+        if variable:
+            if variable in self.processes[process].keys():
+                self.processes[process][variable] = data
+            elif not self.processes[process][variable]:
+                warnings.warn('This variable does not exist. Use the insert function: ' + str(variable))
+                return
+
+    def request(self,process,variable=''):
+        # This function updates a variable or a process which does exist at the moment
+        processes = self.process_control.keys()
+        processes.extend(['header','leader_datapoints'])
+
+        if not process in processes:
+            warnings.warn('This process does not exist for this datatype: ' + str(process))
+            return
+
+        # If a full process is added
+        if not variable:
+            if self.process_control[process] == '1':
+                data = self.processes[process]
+            elif self.process_control[process] == '0':
+                warnings.warn('This process does not exist: ' + str(process))
+                return
+        # A variable is added
+        if variable:
+            if variable in self.processes[process].keys():
+                data = self.processes[process][variable]
+            elif not self.processes[process][variable]:
+                warnings.warn('This variable does not exist: ' + str(variable))
+                return
+
+        return data
\ No newline at end of file
diff --git a/doris_stack/main_code/single_master_stack.py b/doris_stack/main_code/single_master_stack.py
new file mode 100644
index 0000000..11f49bc
--- /dev/null
+++ b/doris_stack/main_code/single_master_stack.py
@@ -0,0 +1,2411 @@
+import os
+import numpy as np
+from datetime import datetime
+from collections import OrderedDict
+import copy
+from copy import deepcopy
+from doris.doris_stack.main_code.resdata import ResData
+from doris.doris_stack.main_code.dorisparameters import DorisParameters
+import collections
+from jobs import Jobs
+from doris.doris_stack.functions.baselines import baselines
+
+
+class SingleMaster(object):
+
+    def __init__(self, start_date='', end_date='', master_date='', stack_folder='', processing_folder='',
+                 input_files=''):
+        # This function loads in a datastack to create a single master stack. Optional are the start date, end date and
+        # master date. If they are not defined all dates will be loaded. The master date can be loaded later using the
+        # master function. If you want a random master value, choose master_date='random'
+        # Dates should be given as 'yyyy-mm-dd'. If stack_read is True information is read from the stack_folder. Otherwise
+        # the datastack from an StackData object should be given as input.
+
+        Jobs.id = 0
+
+        doris_parameters = DorisParameters(os.path.dirname(processing_folder)) # (assuming it is stored in the stackfolder
+        self.doris_parameters = doris_parameters
+        
+        if not start_date:
+            self.start_date = doris_parameters.start_date_default
+        else:
+            self.start_date = start_date
+        if not end_date:
+            self.end_date = doris_parameters.end_date_default
+        else:
+            self.end_date = end_date
+
+        self.start_date = datetime.strptime(self.start_date,'%Y-%m-%d')
+        self.end_date = datetime.strptime(self.end_date, '%Y-%m-%d')
+
+        self.nr_of_jobs = doris_parameters.nr_of_jobs
+        self.parallel = doris_parameters.parallel
+
+        self.stack = dict()
+        self.full_swath = dict()
+        self.master_date = ''
+        self.master_key = ''
+        self.folder = processing_folder
+        self.stack_folder = stack_folder
+
+        self.doris_path = doris_parameters.doris_path
+        self.cpxfiddle = doris_parameters.cpxfiddle_path  # '/...../cpxfiddle'
+        self.function_path = doris_parameters.function_path
+        self.input_files = input_files
+        self.ESD_shift = dict()
+        self.ESD_angle_pixel = dict()
+        self.swath = dict()
+        self.overlapping = []
+
+        # Initialize ESD variables
+        self.diff_matrix = dict()
+        self.var_matrix = dict()
+        self.to_angle_matrix = dict()
+        self.weight_matrix = dict()
+
+        if master_date:
+            master_date = datetime.strptime(master_date, '%Y-%m-%d')
+            self.master(master_date)
+
+        if self.folder:
+            self.processing_read()
+
+        self.coreg_dates = [d for d in self.stack.keys() if d != self.master_date]
+
+    def processing_read(self):
+        # This function reads a processing datastack based on the processing folder
+
+        folders = next(os.walk(self.folder))[1]
+        folders = [fold for fold in folders if len(fold) == 8]
+
+        self.stack = dict()
+
+        for fold in folders:
+            s_date = fold[:4] + '-' + fold[4:6] + '-' + fold[6:8]
+            s_datetime = datetime.strptime(s_date,'%Y-%m-%d')
+
+            if self.start_date <= s_datetime <= self.end_date:
+                self.stack[s_date] = dict()
+                self.full_swath[s_date] = dict()
+
+                swaths = next(os.walk(os.path.join(self.folder, fold)))[1]
+                swaths = [fol for fol in swaths if len(fol) == 7]
+
+                for swath in swaths:
+                    bursts = next(os.walk(os.path.join(self.folder, fold, swath)))[1]
+
+                    for burst in bursts:
+                        burst_name = swath + '_' + burst
+                        self.stack[s_date][burst_name] = dict()
+
+        self.read_res()
+
+    def remove_finished(self, step='unwrap'):
+        # Checks which processing folders are already finished based on the final step.
+        # possible steps are ('dem_assist', 'comp_coregpm', 'interfero', 'coherence', 'subtr_refphase', 'subtr_refdem',
+        # 'filtphase', 'unwrap', 'geocoding'
+        self.read_res()
+
+        for date in self.stack.keys():
+            if 'ifgs' in self.full_swath[date].keys():
+                if self.full_swath[date]['ifgs'].process_control[step] == '1':
+                    self.coreg_dates.remove(date)
+
+        print('Dates that will be processed are:')
+        for date in self.coreg_dates:
+            print(date)
+        print('End list')
+
+    def master(self,master_date):
+        # Load master date
+        self.master_date = master_date.strftime('%Y-%m-%d')
+        self.master_key = self.master_date[:4] + self.master_date[5:7] + self.master_date[8:10]
+
+        if not master_date in self.stack.keys():
+            print 'Master date is not part of the datastack. If you do not need to initialize anymore this is not a problem.'
+
+    def baseline(self):
+        # Create baseline plot of datastack. Usefull to select the right master
+        baselines(self.stack_folder,self.start_date,self.end_date)
+
+    def initialize(self, path='',cascade=False):
+        # If path is not defined the stack will be initialized in the same folder as the datastack. This function does:
+        # - copy .res and .raw files
+
+        os.chdir(self.folder)
+
+        for date in self.stack.keys():
+            date_folder = self.image_path(date)
+            if not os.path.exists(date_folder):
+                os.mkdir(date_folder)
+
+            for burst in self.stack[date].keys():
+                swath_folder = self.swath_path(date, burst)
+                if not os.path.exists(swath_folder):
+                    os.mkdir(swath_folder)
+
+                burst_folder = self.burst_path(date, burst, full_path=True)
+                if not os.path.exists(burst_folder):
+                    os.mkdir(burst_folder)
+                os.chdir(burst_folder)
+
+                # Copy data files
+                m_source = self.burst_path(key=burst, date=self.master_date, dat_type='slave', full_path=True)
+                m_dest = self.burst_path(key=burst, date=date, dat_type='master', full_path=True)
+
+                if not os.path.exists(m_dest):
+                    os.symlink(m_source, m_dest)
+
+                # Write res files
+                new_filename = os.path.join(burst_folder, 'master.res')
+                if not os.path.exists(new_filename):
+                    res = deepcopy(self.stack[self.master_date][burst]['slave'])
+                    res.processes['crop']['Data_output_file'] = 'master' + res.processes['crop']['Data_output_file'][5:]
+                    res.write(new_filename=new_filename)
+
+        self.read_res()
+
+        self.create_full_swath()
+        self.coarse_orbits(dates=[self.master_date])   # Create ifgs.res files for master.
+
+        del self.stack[self.master_date]
+        del self.full_swath[self.master_date]
+
+    def create_full_swath(self):
+        # Create folders with full swath for individual interferogram.
+
+        dates = self.stack.keys()
+
+        # Change the res files.
+        for date in dates:
+            print(date)
+            bursts = self.stack[date].keys()
+
+            if 'slave' in self.full_swath[date].keys() and 'master' in self.full_swath[date].keys():
+                continue
+
+            for dat_type in ['master', 'slave']:
+                self.full_swath[date][dat_type] = copy.deepcopy(self.stack[date][bursts[0]][dat_type])
+
+                # Information slave images
+                az_time = self.full_swath[date][dat_type].processes['readfiles']['First_pixel_azimuth_time (UTC)']
+                az_time = datetime.strptime(az_time,'%Y-%b-%d %H:%M:%S.%f')
+                range_time = float(self.full_swath[date][dat_type].processes['readfiles']['Range_time_to_first_pixel (2way) (ms)'])
+
+                # First adjust pixel and range times for pixel (1,1)
+                for burst in bursts:
+                    az_burst = self.stack[date][burst][dat_type].processes['readfiles']['First_pixel_azimuth_time (UTC)']
+                    az_burst = datetime.strptime(az_burst,'%Y-%b-%d %H:%M:%S.%f')
+                    range_burst = float(self.stack[date][burst][dat_type].processes['readfiles']['Range_time_to_first_pixel (2way) (ms)'])
+
+                    if az_burst < az_time:
+                        az_time = az_burst
+                    if range_burst < range_time:
+                        range_time = range_burst
+
+                az_time = az_time.strftime('%Y-%b-%d %H:%M:%S.%f')
+                range_time = "{0:.15f}".format(range_time)
+                self.full_swath[date][dat_type].processes['readfiles']['First_pixel_azimuth_time (UTC)'] = az_time
+                self.full_swath[date][dat_type].processes['readfiles']['Range_time_to_first_pixel (2way) (ms)'] = range_time
+
+                # Then change information on image size and crop.
+                no_lines = self.full_swath[date][dat_type].processes['readfiles']['Number_of_lines_output_image']
+                no_pixels = self.full_swath[date][dat_type].processes['readfiles']['Number_of_pixels_output_image']
+
+                self.full_swath[date][dat_type].processes['readfiles']['Number_of_lines_original'] = no_lines
+                self.full_swath[date][dat_type].processes['readfiles']['Number_of_pixels_original'] = no_pixels
+
+                # Change readfiles
+                self.full_swath[date][dat_type].processes['readfiles']['First_line (w.r.t. output_image)'] = '1'
+                self.full_swath[date][dat_type].processes['readfiles']['Last_line (w.r.t. output_image)'] = no_lines
+                self.full_swath[date][dat_type].processes['readfiles']['First_pixel (w.r.t. output_image)'] = '1'
+                self.full_swath[date][dat_type].processes['readfiles']['Last_pixel (w.r.t. output_image)'] = no_pixels
+
+                # Change in crop
+                self.full_swath[date][dat_type].processes['crop']['First_line (w.r.t. original_image)'] = '1'
+                self.full_swath[date][dat_type].processes['crop']['Last_line (w.r.t. original_image)'] = no_lines
+                self.full_swath[date][dat_type].processes['crop']['First_pixel (w.r.t. original_image)'] = '1'
+                self.full_swath[date][dat_type].processes['crop']['Last_pixel (w.r.t. original_image)'] = no_pixels
+                if dat_type == 'master':
+                    self.full_swath[date][dat_type].processes['crop']['Data_output_file'] = self.master_date + '.raw'
+                else:
+                    self.full_swath[date][dat_type].processes['crop']['Data_output_file'] = date + '.raw'
+                self.full_swath[date][dat_type].processes['crop'].pop('First_line (w.r.t. tiff_image)')
+                self.full_swath[date][dat_type].processes['crop'].pop('Last_line (w.r.t. tiff_image)')
+
+                # Write data to folder
+                folder = self.image_path(date)
+                os.chdir(folder)
+
+                # Create res_file
+                master_path = self.image_path(date, dat_type + '.res')
+                self.full_swath[date][dat_type].write(new_filename=master_path)
+
+        self.read_res()
+
+    def coarse_orbits(self, dates=[]):
+        # Run coarse orbits for all bursts.
+
+        if not dates:
+            if len(self.coreg_dates) == 0:
+                return
+            dates = self.coreg_dates
+            self.read_res(dates=self.coreg_dates)
+            stack = self.stack
+            full_swath = self.full_swath
+        else:  # In the case we process for another date.
+            stack = dict()
+            full_swath = dict()
+            self.read_res(dates=dates, image_stack=full_swath, burst_stack=stack)
+
+        job_list1 = []
+        job_list2 = []
+
+        bursts = self.stack[dates[0]].keys()
+
+        for date in dates:
+            for burst in bursts:
+                if 'ifgs' not in self.stack[date][burst].keys(): # If there is an ifgs file, coarse orbits are done...
+                    path = self.burst_path(date, burst, full_path=True)
+                    command2 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.coarseorb')
+                    job_list2.append({"path": path, "command": command2})
+                    if not self.parallel:
+                        os.chdir(path)
+                        os.system(command2)
+            if self.parallel:
+                jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+                jobs.run(job_list2)
+
+            # Run coarse orbits for full swath
+            if 'ifgs' not in self.full_swath[date].keys():  # If there is an ifgs file, coarse orbits are done...
+                path = self.image_path(date)
+                command1 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.coarseorb')
+                job_list1.append({"path": path, "command": command1})
+                if not self.parallel:
+                    os.chdir(path)
+                    os.system(command1)
+            if self.parallel:
+                jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+                jobs.run(job_list1)
+
+    def coarse_correlation(self, ps=False):
+        # Run coarse correlation.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        for date in self.coreg_dates:
+            job_list1 = []
+            job_list2 = []
+            for burst in self.stack[date].keys():
+                if self.stack[date][burst]['ifgs'].process_control['coarse_correl'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    os.chdir(path)
+                    if ps is True:
+                        master_file = self.burst_path(key=burst,dat_type='master',full_path=False)
+                        command1 = 'python -m ' + 'get_winpos' + ' ' + master_file + ' master.res 21 winpos_cc.asc'
+                        job_list1.append({"path": path, "command": command1})
+                        command2 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.coarsecorr')
+                        job_list2.append({"path": path, "command": command2})
+                        if not self.parallel:
+                            os.system('python -m ' + 'get_winpos' + ' ' + master_file + ' master.res 21 winpos_cc.asc')
+                            os.system(command2)
+                    if ps is False:
+                        command = self.doris_path + ' ' + os.path.join(self.input_files, 'input.coarsecorr')
+                        job_list1.append({"path": path, "command": command})
+                        if not self.parallel:
+                            os.system(command)
+
+            if (self.parallel):
+                jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+                jobs.run(job_list1)
+                jobs.run(job_list2)
+
+        self.fake_master_steps(step='coarse_correl', full_swath=False)
+
+    def correct_coarse_correlation(self):
+        # Correct coarse orbits to same reference system for whole image.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        for date in self.coreg_dates:
+
+            bursts = self.stack[date].keys()
+            real_trans_p = []
+            real_trans_l = []
+            crop_shift_p = []
+            crop_shift_l = []
+
+            for burst in bursts:
+
+                s_first_pix = self.stack[date][burst]['slave'].processes['readfiles']['First_pixel (w.r.t. output_image)']
+                s_first_line = self.stack[date][burst]['slave'].processes['readfiles']['First_line (w.r.t. output_image)']
+                m_first_pix = self.stack[date][burst]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
+                m_first_line = self.stack[date][burst]['master'].processes['readfiles']['First_line (w.r.t. output_image)']
+
+                s_first_pix_c = self.stack[date][burst]['slave'].processes['crop']['First_pixel (w.r.t. original_image)']
+                s_first_line_c = self.stack[date][burst]['slave'].processes['crop']['First_line (w.r.t. original_image)']
+                m_first_pix_c = self.stack[date][burst]['master'].processes['crop']['First_pixel (w.r.t. original_image)']
+                m_first_line_c = self.stack[date][burst]['master'].processes['crop']['First_line (w.r.t. original_image)']
+
+                coarse_p = self.stack[date][burst]['ifgs'].processes['coarse_correl']['Coarse_correlation_translation_pixels']
+                coarse_l = self.stack[date][burst]['ifgs'].processes['coarse_correl']['Coarse_correlation_translation_lines']
+
+                crop_p = int(s_first_pix) - int(m_first_pix) - int(s_first_pix_c) + int(m_first_pix_c)
+                crop_l = int(s_first_line) - int(m_first_line) - int(s_first_line_c) + int(m_first_line_c)
+                crop_shift_p.append(crop_p)
+                crop_shift_l.append(crop_l)
+                real_trans_p.append(int(coarse_p) + crop_p)
+                real_trans_l.append(int(coarse_l) + crop_l)
+
+            im_trans_p = int(round(np.median(real_trans_p)))
+            im_trans_l = int(round(np.median(real_trans_l)))
+
+            for burst, p_shift, l_shift in zip(bursts, crop_shift_p, crop_shift_l):
+
+                trans_l = str(im_trans_l - l_shift)
+                trans_p = str(im_trans_p - p_shift)
+                self.stack[date][burst]['ifgs'].processes['coarse_correl']['Coarse_correlation_translation_pixels'] = trans_p
+                self.stack[date][burst]['ifgs'].processes['coarse_correl']['Coarse_correlation_translation_lines'] = trans_l
+                self.stack[date][burst]['ifgs'].processes['coarse_correl']['Initial_Offset_CoarseCorr_pixels'] = trans_p
+                self.stack[date][burst]['ifgs'].processes['coarse_correl']['Initial_Offset_CoarseCorr_lines'] = trans_l
+                self.stack[date][burst]['ifgs'].processes['coarse_correl']['Slope_CoarseCorr_pixels'] = '0'
+                self.stack[date][burst]['ifgs'].processes['coarse_correl']['Slope_CoarseCorr_lines'] = '0'
+            self.full_swath[date]['ifgs'].processes['coarse_orbits']['Coarse_orbits_translation_pixels'] = str(im_trans_p)
+            self.full_swath[date]['ifgs'].processes['coarse_orbits']['Coarse_orbits_translation_lines'] = str(im_trans_l)
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='coarse_correl', burst_proc=False)
+
+    def deramp(self, master=True):
+        # Deramp slave and masters and slaves of bursts.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        job_list1 = []
+        job_list2 = []
+
+        # Deramp slaves
+        bursts = self.stack[self.coreg_dates[0]].keys()
+
+        for date in self.coreg_dates:
+            for burst in bursts:
+                path = self.burst_path(date, burst, full_path=True)
+                slave_file = self.burst_path(key=burst, dat_type='slave', full_path=False)
+                slave_deramped = self.burst_path(key=burst, dat_type='slave_deramped', full_path=False)
+
+                if not os.path.exists(os.path.join(path, slave_deramped)):
+                    command2 = 'python ' + os.path.join(self.function_path, 'do_deramp_SLC.py') + ' ' + slave_file + ' slave.res'
+                    job_list2.append({"path": path, "command": command2})
+                    if not self.parallel:
+                        os.chdir(path)
+                        os.system(command2)
+
+                if self.stack[date][burst]['slave'].processes['crop']['Data_output_file'] != os.path.basename(slave_deramped):
+                    self.stack[date][burst]['slave'].processes['crop']['Data_output_file'] = os.path.basename(slave_deramped)
+                if self.stack[date][burst]['slave'].processes['readfiles']['deramp'] != '1':
+                    self.stack[date][burst]['slave'].processes['readfiles']['deramp'] = '1'
+                if self.stack[date][burst]['master'].processes['readfiles']['reramp'] != '0':
+                    self.stack[date][burst]['master'].processes['readfiles']['reramp'] = '0'
+
+        # Deramp master
+        date = self.master_date
+
+        for burst in bursts:
+            path = self.burst_path(date, burst, full_path=True)
+            master_file = self.burst_path(key=burst, dat_type='slave', full_path=False)
+            master_deramped = self.burst_path(key=burst, dat_type='slave_deramped', full_path=False)
+
+            if not os.path.exists(os.path.join(path, master_deramped)) or not master:
+                command1 = 'python ' + os.path.join(self.function_path, 'do_deramp_SLC.py') + ' ' + master_file + ' slave.res'
+                job_list1.append({"path": path, "command": command1})
+                if not self.parallel:
+                    os.chdir(path)
+                    os.system(command1)
+
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(job_list1)
+            jobs.run(job_list2)
+
+        # Create links for master if needed.
+        for burst in bursts:
+            master_file = self.burst_path(key=burst, date=date, dat_type='slave_deramped', full_path=True)
+
+            for date_slave in self.coreg_dates:
+                slave_file = self.burst_path(key=burst, date=date_slave, dat_type='master_deramped', full_path=True)
+                if not os.path.exists(slave_file):
+                    os.symlink(master_file, slave_file)
+                if self.stack[date_slave][burst]['master'].processes['crop']['Data_output_file'] != os.path.basename(slave_file):
+                    self.stack[date_slave][burst]['master'].processes['crop']['Data_output_file'] = os.path.basename(slave_file)
+                if self.stack[date_slave][burst]['master'].processes['readfiles']['deramp'] != '1':
+                    self.stack[date_slave][burst]['master'].processes['readfiles']['deramp'] = '1'
+                if self.stack[date_slave][burst]['master'].processes['readfiles']['reramp'] != '0':
+                    self.stack[date_slave][burst]['master'].processes['readfiles']['reramp'] = '0'
+
+        self.update_res(dates=self.coreg_dates)
+
+    def icc_burst(self, ps=False):
+        # Do the icc per burst
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        job_list1 = []
+        job_list2 = []
+
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+                if self.stack[date][burst]['ifgs'].process_control['fine_coreg'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    master_file = self.burst_path(key=burst,dat_type='master',full_path=False)
+                    if not(self.parallel):
+                        os.chdir(path)
+                    if ps == True:
+                        command1 = 'python -m' + 'get_winpos'  + ' ' + master_file + ' master.res 101 winpos_fine.asc'
+                        job_list1.append({"path": path, "command": command1})
+                        command2 = self.doris_path + ' ' + os.path.join(self.input_files,'input.finecoreg_icc_pointscat')
+                        job_list2.append({"path": path, "command": command2})
+                        if (not(self.parallel)):
+                            os.system(command1)
+                            os.system(command2)
+                    elif ps == False:
+                        command = self.doris_path + ' ' + os.path.join(self.input_files,'input.finecoreg')
+                        job_list1.append({"path": path, "command": command})
+                        if not (self.parallel):
+                            os.system(command)
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(job_list1)
+            jobs.run(job_list2)
+
+        self.fake_master_steps(step='fine_coreg', full_swath=False)
+
+    def coreg_full_swath(self):
+        # Do the combined icc and dem coregistration for the full swath
+
+        # First read all .res files again.
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        for date in self.coreg_dates:
+            # We start by adding the windows of the first burst.
+            no_offset = 0
+            bursts = self.stack[date].keys()
+            new_icc = copy.deepcopy(self.stack[date][bursts[0]]['ifgs'].processes['fine_coreg'])
+
+            im_trans_p = self.full_swath[date]['ifgs'].processes['coarse_orbits']['Coarse_orbits_translation_pixels']
+            im_trans_l = self.full_swath[date]['ifgs'].processes['coarse_orbits']['Coarse_orbits_translation_lines']
+
+            for burst in bursts:
+
+                icc = self.stack[date][burst]['ifgs'].processes['fine_coreg']
+                position = self.stack[date][burst]['master'].processes['readfiles']
+
+                trans_p = self.stack[date][burst]['ifgs'].processes['coarse_correl']['Coarse_correlation_translation_pixels']
+                trans_l = self.stack[date][burst]['ifgs'].processes['coarse_correl']['Coarse_correlation_translation_lines']
+                p_shift_offset = int(im_trans_p) - int(trans_p)
+                l_shift_offset = int(im_trans_l) - int(trans_l)
+
+                p_offset = int(position['First_pixel (w.r.t. output_image)']) - 1
+                l_offset = int(position['First_line (w.r.t. output_image)']) - 1
+                window_no = int(icc['Number_of_correlation_windows'])
+
+                for row in range(1,window_no+1):
+                    dat = copy.deepcopy(icc['row_' + str(row)])
+                    dat[0] = str(no_offset)
+                    dat[1] = str(int(dat[1]) + l_offset)
+                    dat[2] = str(int(dat[2]) + p_offset)
+                    dat[3] = str(float(dat[3]) + float(l_shift_offset))
+                    dat[4] = str(float(dat[4]) + float(p_shift_offset))
+                    new_icc['row_' + str(no_offset + 1)] = dat
+
+                    no_offset += 1
+
+            new_icc['Number_of_correlation_windows'] = str(no_offset)
+
+            # Finally save to .res file
+            self.full_swath[date]['ifgs'].insert(new_icc,'fine_coreg')
+            # And write .res file
+            res_path = self.image_path(date,file_path='ifgs.res')
+            self.full_swath[date]['ifgs'].write(new_filename=res_path)
+
+        self.fake_master_steps(step='fine_coreg', burst_proc=False)
+
+    def dac_bursts(self):
+        # Do the DEM coregistration and coregpm for the full swath
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        for date in self.coreg_dates:
+            job_list = []
+            for burst in self.stack[date].keys():
+                # If this step is not run yet.
+                if self.stack[date][burst]['ifgs'].process_control['dem_assist'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    command = self.doris_path + ' ' + os.path.join(self.input_files,'input.dembased')
+                    job_list.append({"path": path, "command": command})
+                    if not self.parallel:
+                        os.chdir(path)
+                        os.system(command)
+            if (self.parallel):
+                jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+                jobs.run(job_list)
+
+        self.fake_master_steps(step='dem_assist', full_swath=False)
+
+    def coreg_bursts(self,no_poly=True):
+        # Write coregistration results from full swath to individual bursts
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        for date in self.coreg_dates:
+            # First read the polynomials and normalization lines/pixels from full swath
+
+            path = self.image_path(date)
+            os.chdir(path)
+            os.system(self.doris_path + ' ' + os.path.join(self.input_files,'input.coregpm'))
+
+            self.read_res(dates=[date])
+
+            coreg = copy.deepcopy(self.full_swath[date]['ifgs'].processes['comp_coregpm'])
+            norm_line = [float(coreg['Normalization_Lines'].split()[0]),float(coreg['Normalization_Lines'].split()[1])]
+            norm_pix = [float(coreg['Normalization_Pixels'].split()[0]),float(coreg['Normalization_Pixels'].split()[1])]
+            degree = int(coreg['Degree_cpm'])
+
+            La = 0; Lb = 0; Lc = 0; Ld = 0; Le = 0; Lf = 0
+            Pa = 0; Pb = 0; Pc = 0; Pd = 0; Pe = 0; Pf = 0
+
+            # Load the polynomial from the full swath
+            if degree == 0 and no_poly == False:
+                Lf = float(coreg['row_0'][0])
+                Pf = float(coreg['row_1'][0])
+            if degree == 1 and no_poly == False:
+                Lf = float(coreg['row_0'][0])
+                Le = float(coreg['row_1'][0])
+                Ld = float(coreg['row_2'][0])
+                Pf = float(coreg['row_3'][0])
+                Pe = float(coreg['row_4'][0])
+                Pd = float(coreg['row_5'][0])
+            if degree == 2 and no_poly == False:
+                Lf = float(coreg['row_0'][0])
+                Le = float(coreg['row_1'][0])
+                Ld = float(coreg['row_2'][0])
+                Lc = float(coreg['row_4'][0])
+                Lb = float(coreg['row_3'][0])
+                La = float(coreg['row_5'][0])
+                Pf = float(coreg['row_6'][0])
+                Pe = float(coreg['row_7'][0])
+                Pd = float(coreg['row_9'][0])
+                Pc = float(coreg['row_8'][0])
+                Pb = float(coreg['row_10'][0])
+                Pa = float(coreg['row_11'][0])
+
+            for burst in self.stack[date].keys():
+                # Now convert to burst coreg using the pixel and line offset
+                line_burst = int(self.stack[date][burst]['master'].processes['readfiles']['First_line (w.r.t. output_image)'])
+                pixel_burst = int(self.stack[date][burst]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)'])
+
+                # And convert to an offset in the [-2,2] domain
+                l0 = (line_burst-norm_line[0]) / (norm_line[1]-norm_line[0]) * 4
+                p0 = (pixel_burst-norm_pix[0]) / (norm_pix[1]-norm_pix[0]) * 4
+
+                # Finally convert variables. We assume a 2 degree polynomial.
+                # y = ax'^2+bz'^2+cx'z'+(2ax0+cz0+d)x'+(2bz0+cx0+e)z'+(a0^2+bz0^2+cx0z0+dx0+ez0+f)
+                p_poly = [0,0,0,0,0,0]
+                p_poly[0] = Pa*p0**2 + Pb*l0**2 + Pc*p0*l0 + Pd*p0 + Pe*l0 + Pf
+                p_poly[1] = 2*Pb*l0 + Pc*p0 + Pe
+                p_poly[2] = 2*Pa*l0 + Pc*p0 + Pd
+                p_poly[3] = Pc
+                p_poly[4] = Pb
+                p_poly[5] = Pa
+
+                l_poly = [0,0,0,0,0,0]
+                l_poly[0] = La*l0**2 + Lb*p0**2 + Lc*p0*l0 + Ld*l0 + Le*p0 + Lf
+                l_poly[1] = 2*Lb*p0 + Lc*l0 + Le
+                l_poly[2] = 2*La*p0 + Lc*l0 + Ld
+                l_poly[3] = Lc
+                l_poly[4] = Lb
+                l_poly[5] = La
+
+                # lambda function for pixel and line coordinates
+                l_eq = lambda l,p: l_poly[5]*l**2 + l_poly[4]*p**2 + l_poly[3]*l*p + l_poly[2]*l + l_poly[1]*p + l_poly[0]
+                p_eq = lambda l,p: p_poly[5]*p**2 + p_poly[4]*l**2 + p_poly[3]*l*p + p_poly[2]*p + p_poly[1]*l + p_poly[0]
+
+                # Save new coregistration function to burst
+                coreg['Degree_cpm'] = str(degree)
+                if degree == 0:
+                    coreg['row_0'] = ["{0:.8e}".format(l_poly[0]), '0', '0']
+                    coreg['row_1'] = ["{0:.8e}".format(p_poly[0]), '0', '0']
+                if degree == 1:
+                    coreg['row_0'] = ["{0:.8e}".format(l_poly[0]), '0', '0']
+                    coreg['row_1'] = ["{0:.8e}".format(l_poly[1]), '1', '0']
+                    coreg['row_2'] = ["{0:.8e}".format(l_poly[2]), '0', '1']
+                    coreg['row_3'] = ["{0:.8e}".format(p_poly[0]), '0', '0']
+                    coreg['row_4'] = ["{0:.8e}".format(p_poly[1]), '1', '0']
+                    coreg['row_5'] = ["{0:.8e}".format(p_poly[2]), '0', '1']
+                if degree == 2:
+                    coreg['row_0'] = ["{0:.8e}".format(l_poly[0]), '0', '0']
+                    coreg['row_1'] = ["{0:.8e}".format(l_poly[1]), '1', '0']
+                    coreg['row_2'] = ["{0:.8e}".format(l_poly[2]), '0', '1']
+                    coreg['row_3'] = ["{0:.8e}".format(l_poly[4]), '2', '0']
+                    coreg['row_4'] = ["{0:.8e}".format(l_poly[3]), '1', '1']
+                    coreg['row_5'] = ["{0:.8e}".format(l_poly[5]), '0', '2']
+                    coreg['row_6'] = ["{0:.8e}".format(p_poly[0]), '0', '0']
+                    coreg['row_7'] = ["{0:.8e}".format(p_poly[1]), '1', '0']
+                    coreg['row_8'] = ["{0:.8e}".format(p_poly[2]), '0', '1']
+                    coreg['row_9'] = ["{0:.8e}".format(p_poly[4]), '2', '0']
+                    coreg['row_10'] = ["{0:.8e}".format(p_poly[3]), '1', '1']
+                    coreg['row_11'] = ["{0:.8e}".format(p_poly[5]), '0', '2']
+
+                coreg['Deltaline_slave00_poly'] = "{0:.8e}".format(-l_eq(-2.0, -2.0))
+                coreg['Deltapixel_slave00_poly'] = "{0:.8e}".format(-p_eq(-2.0, -2.0))
+                coreg['Deltaline_slave0N_poly'] = "{0:.8e}".format(-l_eq(-2.0, 2.0))
+                coreg['Deltapixel_slave0N_poly'] = "{0:.8e}".format(-p_eq(-2, 2.0))
+                coreg['Deltaline_slaveN0_poly'] = "{0:.8e}".format(-l_eq(2.0, -2.0))
+                coreg['Deltapixel_slaveN0_poly'] = "{0:.8e}".format(-p_eq(2.0, -2.0))
+                coreg['Deltaline_slaveNN_poly'] = "{0:.8e}".format(-l_eq(2.0, 2.0))
+                coreg['Deltapixel_slaveNN_poly'] = "{0:.8e}".format(-p_eq(2.0, 2.0))
+
+                # Finally add the Normalization lines / pixels
+                lines = (int(self.stack[date][burst]['master'].processes['crop']['Last_line (w.r.t. original_image)']) -
+                         int(self.stack[date][burst]['master'].processes['crop']['First_line (w.r.t. original_image)']))
+                pixels = (int(self.stack[date][burst]['master'].processes['crop']['Last_pixel (w.r.t. original_image)']) -
+                         int(self.stack[date][burst]['master'].processes['crop']['First_pixel (w.r.t. original_image)']))
+
+                # Save pixels lines
+                coreg['Normalization_Lines'] = "{0:.8e}".format(1) + ' ' + "{0:.8e}".format(lines)
+                coreg['Normalization_Pixels'] = "{0:.8e}".format(1) + ' ' + "{0:.8e}".format(pixels)
+
+                # Copy coregistration from full swath to burst
+                try:
+                    self.stack[date][burst]['ifgs'].insert(coreg,'comp_coregpm')
+                except:
+                    self.stack[date][burst]['ifgs'].update(coreg,'comp_coregpm')
+
+            self.update_res(dates=[date])
+
+        # Save .res files.
+        self.update_res(dates=self.coreg_dates)
+
+    def fake_fine_coreg(self):
+        # This function is used if only geometrical coregistatration is used.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        coreg = OrderedDict()
+        coreg['Initial offsets (l,p)'] = '0, 0'
+        coreg['Window_size_L_for_correlation'] = '64'
+        coreg['Window_size_P_for_correlation'] = '64'
+        coreg['Max. offset that can be estimated'] = '32'
+        coreg['Peak search ovs window (l,p)'] = '16 , 16'
+        coreg['Oversampling factor'] = '32'
+        coreg['Number_of_correlation_windows'] = '0'
+
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+                # Insert fake coregistration
+                if not self.stack[date][burst]['ifgs'].process_control['fine_coreg'] == '1':
+                    self.stack[date][burst]['ifgs'].insert(coreg,'fine_coreg')
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='fine_coreg', full_swath=False)
+
+    def fake_coregmp(self):
+        # This function is used if only geometrical coregistatration is used.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        coreg = OrderedDict()
+        coreg['Degree_cpm'] = '0'
+        coreg['Normalization_Lines'] = ''
+        coreg['Normalization_Pixels'] = ''
+        coreg['Estimated_coefficientsL'] = ''
+        coreg['row_0'] = ["{0:.8e}".format(0), '0', '0']
+        coreg['Estimated_coefficientsP'] = ''
+        coreg['row_1'] = ["{0:.8e}".format(0), '0', '0']
+
+        coreg['Deltaline_slave00_poly'] = "{0:.8e}".format(0)
+        coreg['Deltapixel_slave00_poly'] = "{0:.8e}".format(0)
+        coreg['Deltaline_slave0N_poly'] = "{0:.8e}".format(0)
+        coreg['Deltapixel_slave0N_poly'] = "{0:.8e}".format(0)
+        coreg['Deltaline_slaveN0_poly'] = "{0:.8e}".format(0)
+        coreg['Deltapixel_slaveN0_poly'] = "{0:.8e}".format(0)
+        coreg['Deltaline_slaveNN_poly'] = "{0:.8e}".format(0)
+        coreg['Deltapixel_slaveNN_poly'] = "{0:.8e}".format(0)
+
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+                # Now convert to burst coreg using the pixel and line offset
+                lines = (int(self.stack[date][burst]['master'].processes['crop']['Last_line (w.r.t. original_image)']) -
+                         int(self.stack[date][burst]['master'].processes['crop']['First_line (w.r.t. original_image)']))
+                pixels = (int(self.stack[date][burst]['master'].processes['crop']['Last_pixel (w.r.t. original_image)']) -
+                         int(self.stack[date][burst]['master'].processes['crop']['First_pixel (w.r.t. original_image)']))
+
+                # Save pixels lines
+                coreg['Normalization_Lines'] = "{0:.8e}".format(1) + ' ' + "{0:.8e}".format(lines)
+                coreg['Normalization_Pixels'] = "{0:.8e}".format(1) + ' ' + "{0:.8e}".format(pixels)
+
+                # Copy coregistration from full swath to burst
+                if not self.stack[date][burst]['ifgs'].process_control['comp_coregpm'] == '1':
+                    self.stack[date][burst]['ifgs'].insert(coreg,'comp_coregpm')
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='comp_coregpm', full_swath=False)
+
+    def dac_full_swath(self):
+        # This function reads the dem shift result files from the full swath and saves them to both data and result
+        # files of individual bursts.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+
+                master_dat = self.stack[date][burst]['master'].processes['crop']
+                lines = int(master_dat['Last_line (w.r.t. original_image)']) - int(master_dat['First_line (w.r.t. original_image)'])
+                pixels = int(master_dat['Last_pixel (w.r.t. original_image)']) - int(master_dat['First_pixel (w.r.t. original_image)'])
+                ref_offset_p = int(self.full_swath[date]['ifgs'].processes['coarse_orbits']['Coarse_orbits_translation_pixels'])
+                ref_offset_l = int(self.full_swath[date]['ifgs'].processes['coarse_orbits']['Coarse_orbits_translation_lines'])
+                offset_p = int(self.stack[date][burst]['ifgs'].processes['coarse_correl']['Coarse_correlation_translation_pixels'])
+                offset_l = int(self.stack[date][burst]['ifgs'].processes['coarse_correl']['Coarse_correlation_translation_lines'])
+
+                file_path = self.burst_path(date, burst, file_path='dac_delta_pixel.raw', full_path=True)
+
+                if not os.path.exists(file_path + '.new'):
+                    d_pixel = np.memmap(file_path, dtype=np.dtype('float64'), shape=(lines+1,pixels+1))
+                    n_pixel = np.memmap(file_path + '.new', mode='w+', dtype=np.dtype('float64'), shape=(lines+1,pixels+1))
+                    n_pixel[:,:] = d_pixel[:,:] - (offset_p - ref_offset_p)
+                    n_pixel.flush()
+
+                if not os.path.exists(file_path + '.new'):
+                    file_path = self.burst_path(date, burst, file_path='dac_delta_line.raw', full_path=True)
+                    d_line = np.memmap(file_path, dtype=np.dtype('float64'), shape=(lines+1,pixels+1))
+                    n_line = np.memmap(file_path + '.n', mode='w+', dtype=np.dtype('float64'), shape=(lines+1,pixels+1))
+                    n_line[:,:] = d_line[:,:] - (offset_l - ref_offset_l)
+                    n_line.flush()
+
+        # Write delta line/pixel to burst folder
+        self.concatenate('dac_delta_line.raw.new', 'dac_delta_line.raw.new',dt=np.dtype('float64'))
+        self.concatenate('dac_delta_pixel.raw.new', 'dac_delta_pixel.raw.new',dt=np.dtype('float64'))
+
+        for date in self.coreg_dates:
+
+            bursts = self.stack[date].keys()
+
+            res_dem = deepcopy(self.stack[date][bursts[0]]['ifgs'].processes['dem_assist'])
+            master_crop = deepcopy(self.full_swath[date]['master'].processes['crop'])
+
+            # Update fields to dimensions of master burst.
+            res_dem['First_line (w.r.t. original_master)'] = master_crop['First_line (w.r.t. original_image)']
+            res_dem['Last_line (w.r.t. original_master)'] = master_crop['Last_line (w.r.t. original_image)']
+            res_dem['First_pixel (w.r.t. original_master)'] = master_crop['First_pixel (w.r.t. original_image)']
+            res_dem['Last_pixel (w.r.t. original_master)'] = master_crop['Last_pixel (w.r.t. original_image)']
+            lines = int(master_crop['Last_line (w.r.t. original_image)']) - int(master_crop['First_line (w.r.t. original_image)'])
+            res_dem['Number of lines'] = str(lines + 1)
+            pixels = int(master_crop['Last_pixel (w.r.t. original_image)']) - int(master_crop['First_pixel (w.r.t. original_image)'])
+            res_dem['Number of pixels'] = str(pixels + 1)
+
+            # Load image data
+            file_path = self.image_path(date, file_path='dac_delta_pixel.raw')
+            command = 'mv ' + file_path + '.new ' + file_path
+            os.system(command)
+            d_pixel = np.memmap(file_path, dtype=np.dtype('float64'), shape=(lines+1,pixels+1))
+            file_path = self.image_path(date, file_path='dac_delta_line.raw')
+            command = 'mv ' + file_path + '.new ' + file_path
+            os.system(command)
+            d_line = np.memmap(file_path, dtype=np.dtype('float64'), shape=(lines+1,pixels+1))
+
+            # Correct for corner information.
+            res_dem['Number of pixels'] = str(pixels + 1)
+            res_dem['Deltaline_slave00_dem'] = str(-d_line[0,0])
+            res_dem['Deltapixel_slave00_dem'] = str(-d_pixel[0,0])
+            res_dem['Deltaline_slave0N_dem'] = str(-d_line[0,-1])
+            res_dem['Deltapixel_slave0N_dem'] = str(-d_pixel[0,-1])
+            res_dem['Deltaline_slaveN0_dem'] = str(-d_line[-1,0])
+            res_dem['Deltapixel_slaveN0_dem'] = str(-d_pixel[-1,0])
+            res_dem['Deltaline_slaveNN_dem'] = str(-d_line[-1,-1])
+            res_dem['Deltapixel_slaveNN_dem'] = str(-d_pixel[-1,-1])
+
+            self.full_swath[date]['ifgs'].insert(res_dem,process='dem_assist')
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='dem_assist', burst_proc=False)
+
+    def resample(self, type=''):
+        # Resample slave bursts
+
+        if len(self.coreg_dates) == 0:
+            return
+
+        jobList1 = []
+        jobList2 = []
+
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+
+                if self.stack[date][burst]['slave'].process_control['resample'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    command1 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.resample')
+
+                    jobList1.append({"path": path, "command": command1})
+
+                    if not self.parallel:
+                        os.chdir(path)
+                        # Resample
+                        os.system(command1)
+
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+
+            jobs.run(jobList1)
+            jobs.run(jobList2)
+
+    def reramp(self, type=''):
+        # This function reramps the radar data. If master is True, we assume that there is still an original master file
+        # Which means that it is not needed to reramp that one. If master is false, only the slave is reramped.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        bursts = self.stack[self.coreg_dates[0]].keys()
+
+        jobList1 = []
+
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+                path = self.burst_path(date, burst, full_path=True)
+
+                if not os.path.exists(os.path.join(path, 'slave_rsmp_reramped.raw')):
+                    # If we are before the ESD step and reramp is not jet done.
+                    command1 = 'python ' + os.path.join(self.function_path, 'do_reramp_SLC.py') + ' slave_rsmp.raw slave.res'
+                    jobList1.append({"path": path, "command": command1})
+                    if not self.parallel:
+                        os.chdir(path)
+                        os.system(command1)
+
+                if self.stack[date][burst]['slave'].processes['resample']['Data_output_file'] != 'slave_rsmp_reramped.raw':
+                    self.stack[date][burst]['slave'].processes['resample']['Data_output_file'] = 'slave_rsmp_reramped.raw'
+                if self.stack[date][burst]['slave'].processes['readfiles']['reramp'] != '1':
+                    self.stack[date][burst]['slave'].processes['readfiles']['reramp'] = '1'
+
+        # Create links for master if needed.
+        for burst in bursts:
+            for date in self.coreg_dates:
+                # TODO If steps like simamp are added, we have to link back to these files.
+                slave_file = self.burst_path(key=burst, date=date, dat_type='master', full_path=True)
+                if self.stack[date][burst]['master'].processes['crop']['Data_output_file'] != os.path.basename(slave_file):
+                    self.stack[date][burst]['master'].processes['crop']['Data_output_file'] = os.path.basename(slave_file)
+                if self.stack[date][burst]['master'].processes['readfiles']['reramp'] != '1':
+                    self.stack[date][burst]['master'].processes['readfiles']['reramp'] = '1'
+
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(jobList1)
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='resample')
+
+    def fake_master_resample(self):
+        # This script fakes a resample step for the master file (this is of course not really needed)
+        # This will save us a lot of time in exception handling in steps later on....
+        # - create a master.res / copy slave.res
+        # - add resampling step (We assume a reramped result)
+        # - add slave_rsmp.raw and slave_rsmp_reramped.raw in complex64 format.
+        # - add the master original and deramp step same as the slave file.
+
+        date = self.master_date
+        date_1 = self.stack.keys()[0]
+        bursts = self.stack[date_1].keys()
+        burst_res = dict()
+        image_res = dict()
+        self.read_res(dates=[self.master_date], bursts=bursts, burst_stack=burst_res, image_stack=image_res)
+
+        for burst in self.stack[self.coreg_dates[0]].keys():
+            # burst_path
+            if not burst_res[date][burst]['slave'].process_control['resample'] == '1':
+                burst_res[date][burst]['slave'].insert(self.stack[date_1][burst]['slave'].processes['resample'], 'resample')
+
+            # Now create symlink to master data
+            slave_dat = self.burst_path(self.master_date, burst, full_path=True, dat_type='slave')
+            slave_deramped_dat = self.burst_path(self.master_date, burst, full_path=True, dat_type='slave_deramped')
+            master_dat = self.burst_path(self.master_date, burst, full_path=True, dat_type='master')
+            master_deramped_dat = self.burst_path(self.master_date, burst, full_path=True, dat_type='master_deramped')
+
+            if not os.path.exists(master_dat):
+                os.symlink(slave_dat, master_dat)
+            if not os.path.exists(master_deramped_dat):
+                os.symlink(slave_deramped_dat, master_deramped_dat)
+
+            # Finally copy the resampled files in complex64 format.
+            resample_dat = self.burst_path(self.master_date, burst, full_path=True, file_path='slave_rsmp.raw')
+            resample_reramped_dat = self.burst_path(self.master_date, burst, full_path=True, file_path='slave_rsmp_reramped.raw')
+
+            lines = int(burst_res[date][burst]['slave'].processes['readfiles']['Last_line (w.r.t. output_image)']) - \
+                    int(burst_res[date][burst]['slave'].processes['readfiles']['First_line (w.r.t. output_image)']) + 1
+            pixels = int(burst_res[date][burst]['slave'].processes['readfiles']['Last_pixel (w.r.t. output_image)']) - \
+                     int(burst_res[date][burst]['slave'].processes['readfiles']['First_pixel (w.r.t. output_image)']) + 1
+            dtype = np.dtype([('re', np.int16), ('im', np.int16)])
+
+            if not os.path.exists(resample_dat):
+                resample = np.memmap(resample_dat, dtype='complex64', mode='w+', shape=(lines, pixels))
+                slc_dat = np.memmap(slave_deramped_dat, dtype=dtype, mode='r', shape=(lines, pixels)).view(
+                    np.int16).astype(np.float32).view(np.complex64)
+                resample[:, :] = slc_dat
+                resample.flush()
+
+            if not os.path.exists(resample_reramped_dat):
+                resample_reramped = np.memmap(resample_reramped_dat, dtype='complex64', mode='w+', shape=(lines, pixels))
+                slc_ramped_dat = np.memmap(slave_dat, dtype=dtype, mode='r', shape=(lines, pixels)).view(
+                    np.int16).astype(np.float32).view(np.complex64)
+                resample_reramped[:, :] = slc_ramped_dat
+                resample_reramped.flush()
+
+        self.update_res(dates=[date], image_stack=image_res, burst_stack=burst_res)
+
+    def fake_master_steps(self, step='subtr_refdem', network=True, burst_proc=True, full_swath=True):
+        # This fakes different steps for the ifgs for the master date. These are needed for further processing in a
+        # network setup. Note that we just copy data from one of the other resampled datasets, which means that the actual
+        # information in the .res files is not correct.
+
+        steps = ['coarse_correl', 'fine_coreg', 'dem_assist', 'comp_coregpm', 'interfero', 'comp_refphase',
+                 'subtr_refphase', 'comp_refdem', 'subtr_refdem', 'filtphase', 'unwrap', 'coherence']
+        file_steps = {'subtr_refphase': 'cint_srp.raw', 'subtr_refdem': 'cint_srd.raw',
+                      'filtphase': 'cint.0.2filtered', 'unwrapped': 'unwrapped.raw'}
+
+        if not step in steps:
+            print('Step ' + step + ' does not exist in processing')
+            return
+        elif step == 'resample':
+            self.fake_master_resample()
+            return
+
+        date = self.master_date
+        date_1 = self.stack.keys()[0]
+        bursts = self.stack[date_1].keys()
+        burst_res = dict()
+        image_res = dict()
+        self.read_res()  # Read the information from other steps first.
+        self.read_res(dates=[self.master_date], bursts=bursts, burst_stack=burst_res, image_stack=image_res)
+
+        if burst_proc:
+            for burst in bursts:
+                if burst_res[date][burst]['ifgs'].process_control[step] != '1':
+                    lines = int(burst_res[date][burst]['master'].processes['readfiles']['Last_line (w.r.t. output_image)']) - \
+                            int(burst_res[date][burst]['master'].processes['readfiles']['First_line (w.r.t. output_image)']) + 1
+                    pixels = int(burst_res[date][burst]['master'].processes['readfiles']['Last_line (w.r.t. output_image)']) - \
+                             int(burst_res[date][burst]['master'].processes['readfiles']['First_line (w.r.t. output_image)']) + 1
+
+                    rsmp_file = self.burst_path(self.master_date, burst, full_path=True, file_path='slave_rsmp_reramped.raw')
+                    ifg_file = self.burst_path(self.master_date, burst, full_path=True, file_path='cint.raw')
+
+                    # Cases where we create output files relevant for further processing
+                    if not network and step == 'interfero':
+                        # If we do not work with a network the interferogram consists of zeros.
+                        ifg = np.memmap(ifg_file, dtype='complex64', mode='w+', shape=(lines, pixels))
+                        ifg[:, :] = 0
+                        ifg.flush()
+                    elif network and step == 'interfero' and not os.path.exists(ifg_file):
+                        os.symlink(rsmp_file, ifg_file)
+
+                        # Check the whether we have to create a file and define which file that should be.
+                    elif step in file_steps.keys():
+                        if step == 'coherence':
+                            # Coherence of ifg with itself is 1.
+                            coh = np.memmap(ifg_file, dtype='float32', mode='w+', shape=(lines, pixels))
+                            coh[:, :] = 1
+                            coh.flush()
+                        else:
+                            # For other steps we only have to link to the interferogram. This does not make sense for the case
+                            # we are using a network for unwrapping and filtphase. But in that case it will not be used...
+                            step_file = self.burst_path(self.master_date, burst, full_path=True, file_path=file_steps[step])
+                            if network and not os.path.exists(step_file) and os.path.exists(rsmp_file):
+                                os.symlink(rsmp_file, step_file)
+                            elif not os.path.exists(ifg_file) and os.path.exists(ifg_file):
+                                os.symlink(ifg_file, step_file)
+
+                    # Then copy the information from the other burst files. This hold for all interferogram steps.
+                    res_step = copy.deepcopy(self.stack[date_1][burst]['ifgs'].processes[step])
+                    burst_res[date][burst]['ifgs'].insert(res_step, step)   # This is generally the same.
+
+        # And from the main files if needed. Works only if the master resampled slave is also concatenated...
+        if full_swath:
+            if image_res[date]['ifgs'].process_control[step] != '1':
+                # Follow a similar procedure for the full swath
+                lines = int(image_res[date]['master'].processes['readfiles']['Last_line (w.r.t. output_image)']) - \
+                        int(image_res[date]['master'].processes['readfiles']['First_line (w.r.t. output_image)']) + 1
+                pixels = int(image_res[date]['master'].processes['readfiles']['Last_line (w.r.t. output_image)']) - \
+                         int(image_res[date]['master'].processes['readfiles']['First_line (w.r.t. output_image)']) + 1
+
+                rsmp_file = self.image_path(self.master_date, file_path='slave_rsmp_reramped.raw')
+                ifg_file = self.image_path(self.master_date, file_path='cint.raw')
+
+                if not os.path.exists(rsmp_file) and network:
+                    print('Please concatenate the resampled reramped master if you want to do network processing!')
+
+                # Cases where we create output files relevant for further processing
+                if network == False and step == 'interfero':
+                    # If we do not work with a network the interferogram consists of zeros.
+                    ifg = np.memmap(ifg_file, dtype='complex64', mode='w+', shape=(lines, pixels))
+                    ifg[:, :] = 0
+                    ifg.flush()
+                elif network and step == 'interfero' and not os.path.exists(ifg_file) and os.path.exists(rsmp_file):
+                    os.symlink(rsmp_file, ifg_file)
+
+                    # Check the whether we have to create a file and define which file that should be.
+                elif step in file_steps.keys():
+                    if step == 'coherence':
+                        # Coherence of ifg with itself is 1.
+                        coh = np.memmap(ifg_file, dtype='float32', mode='w+', shape=(lines, pixels))
+                        coh[:, :] = 1
+                        coh.flush()
+                    else:
+                        # For other steps we only have to link to the interferogram. This does not make sense for the case
+                        # we are using a network for unwrapping and filtphase. But in that case it will not be used...
+                        step_file = self.image_path(self.master_date, file_path=file_steps[step])
+                        if network and not os.path.exists(step_file) and os.path.exists(rsmp_file):
+                            os.symlink(rsmp_file, step_file)
+                        elif not os.path.exists(step_file) and os.path.exists(ifg_file):
+                            os.symlink(ifg_file, step_file)
+
+                # Then copy the information from the other burst files. This hold for all interferogram steps.
+                res_step = copy.deepcopy(self.full_swath[date_1]['ifgs'].processes[step])
+                image_res[date]['ifgs'].insert(res_step, step)  # This is generally the same.
+
+        self.update_res(dates=[date], image_stack=image_res, burst_stack=burst_res)
+
+    def fake_interferogram(self):
+        # This step fakes the creation of an interferogram by renaming the resampled slave
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        interfero_dummy = OrderedDict()
+        interfero_dummy['Data_output_file'] = 'slave_rsmp_reramped.raw'
+        interfero_dummy['Data_output_format'] = 'complex_real4'
+        interfero_dummy['First_line (w.r.t. original_master)'] = ''
+        interfero_dummy['First_pixel (w.r.t. original_master)'] = ''
+        interfero_dummy['Last_line (w.r.t. original_master)'] = ''
+        interfero_dummy['Last_pixel (w.r.t. original_master)'] = ''
+        interfero_dummy['Multilookfactor_azimuth_direction'] = '1'
+        interfero_dummy['Multilookfactor_range_direction'] = '1'
+        interfero_dummy['Number of lines (multilooked)'] = ''
+        interfero_dummy['Number of pixels (multilooked)'] = ''
+
+        # Fake an interferogram for the different bursts.
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+
+                if self.stack[date][burst]['ifgs'].process_control['interfero'] != '1':
+
+                    m_dat = self.stack[date][burst]['master'].processes['crop']
+
+                    interfero = copy.deepcopy(interfero_dummy)
+                    interfero['First_line (w.r.t. original_master)'] = m_dat['First_line (w.r.t. original_image)']
+                    interfero['First_pixel (w.r.t. original_master)'] = m_dat['First_pixel (w.r.t. original_image)']
+                    interfero['Last_line (w.r.t. original_master)'] = m_dat['Last_line (w.r.t. original_image)']
+                    interfero['Last_pixel (w.r.t. original_master)'] = m_dat['Last_pixel (w.r.t. original_image)']
+                    n_lines = int(m_dat['Last_line (w.r.t. original_image)']) - int(
+                        m_dat['First_line (w.r.t. original_image)']) + 1
+                    n_pixels = int(m_dat['Last_pixel (w.r.t. original_image)']) - int(
+                        m_dat['First_pixel (w.r.t. original_image)']) + 1
+                    interfero['Number of lines (multilooked)'] = str(n_lines)
+                    interfero['Number of pixels (multilooked)'] = str(n_pixels)
+
+                    if not self.stack[date][burst]['ifgs'].process_control['interfero'] == '1':
+                        self.stack[date][burst]['ifgs'].insert(interfero, 'interfero')
+
+        # The master and slave results files are switched to get the right correction of the slave file.
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='interfero', network=True, full_swath=False)
+
+    def interferogram(self, concatenate=True, overwrite=False, ras=False):
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        # Make an interferogram for the different bursts. (Not always necessary)
+        jobList1 = []
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+
+                if self.stack[date][burst]['ifgs'].process_control['interfero'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    os.chdir(path)
+
+                    command1 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.interferogram')
+                    jobList1.append({"path": path, "command": command1})
+
+                    if (not(self.parallel)):
+                        os.chdir(path)
+                        os.system(command1)
+
+        if (self.parallel):
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(jobList1)
+
+        self.read_res(dates=self.coreg_dates)
+
+        if concatenate == True:
+            cint_name = 'cint.raw'
+            self.concatenate(cint_name, cint_name, dt=np.dtype('complex64'), overwrite=overwrite)
+
+            for date in self.coreg_dates:
+                if self.full_swath[date]['ifgs'].process_control['interfero'] != '1' or overwrite is True:
+                    # Add res file information
+                    no_lines = self.full_swath[date]['master'].processes['readfiles']['Number_of_lines_original']
+                    no_pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+                    line_0 = self.full_swath[date]['master'].processes['readfiles']['First_line (w.r.t. output_image)']
+                    line_1 = self.full_swath[date]['master'].processes['readfiles']['Last_line (w.r.t. output_image)']
+                    pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
+                    pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
+
+                    burst = self.stack[date].keys()[0]
+                    res = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['interfero'])
+
+                    res['First_line (w.r.t. original_master)'] = line_0
+                    res['Last_line (w.r.t. original_master)'] = line_1
+                    res['First_pixel (w.r.t. original_master)'] = pix_0
+                    res['Last_pixel (w.r.t. original_master)'] = pix_1
+                    res['Number of lines (multilooked)'] = no_lines
+                    res['Number of pixels (multilooked)'] = no_pixels
+
+                    self.full_swath[date]['ifgs'].insert(res, 'interfero')
+
+                    path = self.image_path(date)
+                    os.chdir(path)
+                    # Finally show preview based on cpxfiddle
+
+                    if ras:
+                        pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+
+                        if not os.path.exists('interferogram_mag.ras') or overwrite:
+                            mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' ' + cint_name + ' > interferogram_mag.ras'
+                            os.system(self.cpxfiddle + mag)
+                        if not os.path.exists('interferogram_mix.ras') or overwrite:
+                            mix = ' -w ' + pixels + ' -e 0.3 -s 1.2 -q mixed -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' ' + cint_name + ' > interferogram_mix.ras'
+                            os.system(self.cpxfiddle + mix)
+                        if not os.path.exists('interferogram_pha.ras') or overwrite:
+                            pha = ' -w ' + pixels + ' -q phase -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' ' + cint_name + ' > interferogram_pha.ras'
+                            os.system(self.cpxfiddle + pha)
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='interfero', network=False, full_swath=concatenate)
+
+    def overlapping(self):
+        # This function calculates the overlapping areas between different bursts. This function will give a list of
+        # overlapping areas between the different bursts.
+
+        for date in self.stack.keys():
+            # First make a list of all min max coordinates of all bursts.
+
+            x0=[]; x1=[]; y0=[]; y1=[]
+            bursts = self.stack[date].keys()
+            for burst in bursts:
+                y0.append(int(self.stack[date][burst][type].processes['readfiles']['First_line (w.r.t. output_image)']))
+                y1.append(int(self.stack[date][burst][type].processes['readfiles']['Last_line (w.r.t. output_image)']))
+                x0.append(int(self.stack[date][burst][type].processes['readfiles']['First_pixel (w.r.t. output_image)']))
+                x1.append(int(self.stack[date][burst][type].processes['readfiles']['Last_pixel (w.r.t. output_image)']))
+
+            for b1 in range(len(bursts)):
+                print 'hello'
+
+    def esd(self, esd_type='ps', max_baseline='200'):
+
+        esd_folder = os.path.join(self.folder, 'esd')
+        if not os.path.exists(esd_folder):
+            os.mkdir(esd_folder)
+
+        jobList = []
+        # First run all the ESD calculations in parallel
+        for date in [self.stack.keys()[0]]:
+            bursts = self.stack[date].keys()
+            sort_id = [int(dat[6]) * 100 + int(dat[14:]) for dat in bursts]
+            bursts = [x for (y, x) in sorted(zip(sort_id, bursts))]
+
+            for burst, id in zip(bursts, range(len(bursts))):
+
+                nBurst = int(burst[14:])
+                next_burst = burst[:14] + str(nBurst + 1)
+                if next_burst in bursts:
+                    stack_folder = self.folder
+                    overlap = burst + '_' + next_burst
+                    ps_select = '1'
+                    master_date = self.master_date
+                    command = 'python ' + os.path.join(self.function_path, 'ESD_ps_ds.py') + ' ' + stack_folder + ' ' \
+                              + overlap + ' ' + esd_type + ' ' + max_baseline + ' ' + master_date + ' ' + ps_select
+                    jobList.append({"path": stack_folder, "command": command})
+
+                    if not (self.parallel):
+                        os.chdir(stack_folder)
+                        os.system(command)
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(jobList)
+
+        # Now load the different matrices again.
+        # Find all the overlaps and corresponding results:
+        esd_folder = os.path.join(self.stack_folder, 'esd')
+        folders = os.listdir(esd_folder)
+
+        self.diff_matrix[esd_type] = np.zeros(shape=(len(folders), len(self.stack.keys()) + 1, len(self.stack.keys()) + 1))
+        self.var_matrix[esd_type] = np.zeros(shape=(len(folders), len(self.stack.keys()) + 1, len(self.stack.keys()) + 1))
+        self.to_angle_matrix[esd_type] = np.zeros(shape=(len(folders), len(self.stack.keys()) + 1, len(self.stack.keys()) + 1))
+        self.weight_matrix[esd_type] = np.zeros(shape=(len(folders), len(self.stack.keys()) + 1, len(self.stack.keys()) + 1))
+
+        for folder, n in zip(folders, range(len(folders))):
+            f = os.path.join(esd_folder, folder)
+            diff_m = np.load(os.path.join(f, esd_type + '_diff_matrix.npy'))
+            var_m = np.load(os.path.join(f, esd_type + '_var_matrix.npy'))
+            to_angle_m = np.load(os.path.join(f, esd_type + '_to_angle_matrix.npy'))
+            w = np.load(os.path.join(f, esd_type + '_weight_matrix.npy'))
+
+            self.diff_matrix[esd_type][n, :, :] = diff_m
+            self.var_matrix[esd_type][n, :, :] = var_m
+            self.to_angle_matrix[esd_type][n, :, :] = to_angle_m
+            self.weight_matrix[esd_type][n, :, :] = w
+
+    def network_esd(self, esd_type='ps', var_calc=False):
+        # This function calculates the ESD values using a network approach
+
+        dates = (self.stack.keys())
+        dates.append(self.master_date)
+        dates = sorted(dates)
+
+        w_matrix = np.sum(self.weight_matrix[esd_type], 0)
+        diff_matrix = np.sum(self.diff_matrix[esd_type] * self.weight_matrix[esd_type], 0)
+        diff_matrix[w_matrix > 0] = diff_matrix[w_matrix > 0] / w_matrix[w_matrix > 0]
+
+        angle_pixel = np.sum(self.to_angle_matrix[esd_type] * self.weight_matrix[esd_type], 0)
+        angle_pixel[w_matrix > 0] = angle_pixel[w_matrix > 0] / w_matrix[w_matrix > 0]
+
+        # In case we want to use the variances...
+        if var_calc == True:
+            var_matrix = np.zeros(w_matrix.shape)
+            id = np.where(w_matrix != 0)
+            for n, m in zip(id[0], id[1]):
+                w = self.weight_matrix[esd_type][:, n, m][None, :]
+                v = self.var_matrix[esd_type][:, n, m][None, :]
+                var_matrix[n,m] = np.sum(np.dot(w.transpose(), w) * np.dot(v.transpose(), v))
+            var_matrix[w_matrix != 0] = var_matrix[w_matrix != 0] / w_matrix[w_matrix != 0]
+            std_calc = np.sqrt(var_matrix)
+
+        # Finally calculate the network
+
+        # Find the connections in the difference matrix
+        m_s = np.where(diff_matrix != 0)
+        weight = w_matrix[diff_matrix != 0]
+
+        # Find the master date
+        master_num = dates.index(self.master_date)
+        slave_nums = range(len(dates))
+        slave_nums.remove(master_num)
+
+        # Create the A matrix
+        A = np.zeros(shape=(len(m_s[0]), np.max([np.max(m_s[0]), np.max(m_s[1])]) + 1))
+        A[range(len(m_s[0])), m_s[0]] = 1
+        A[range(len(m_s[0])), m_s[1]] = -1
+        A = np.hstack((A[:, :master_num], A[:, master_num + 1:]))
+
+        # Create the weight matrix
+        W = np.zeros((len(m_s[0]), len(m_s[0])))
+        id = range(len(m_s[0]))
+
+        W[id, id] = 1 / weight
+        W = np.linalg.inv(W)
+
+        esd_diff = np.dot(np.dot(np.dot(np.linalg.inv(np.dot(np.dot(A.T, W), A)), A.T), W), diff_matrix[diff_matrix != 0])
+        esd_residue = np.dot(A, esd_diff) - diff_matrix[diff_matrix != 0]
+
+        print(str(np.nanmean(np.abs(esd_residue))))
+        sigma = np.std(esd_residue)
+
+        dates = sorted(self.stack.keys())
+        for date, shift, n in zip(dates, esd_diff, slave_nums):
+            self.ESD_shift[date] = shift
+            self.ESD_angle_pixel[date] = np.max([angle_pixel[n, master_num], angle_pixel[master_num, n]])
+
+    def ESD_correct_ramp(self, filename='cint_srd.raw', network=False):
+        # This function correct for ESD using the expected ramp in the resampled slave image
+
+        self.read_res()
+        jobList = []
+
+        for date in self.stack.keys():
+            for burst in self.stack[date].keys():
+                if self.stack[date][burst]['slave'].processes['readfiles']['ESD_correct'] == '0':
+                    path = self.burst_path(date, burst, full_path=True)
+
+                    offset = self.ESD_shift[date]
+                    angle = self.ESD_angle_pixel[date]
+                    if not network and filename.startswith('cint'):
+                        # Because after interferogram the slave is subtracted from the master we have to compensate.
+                        angle_pixel = str(-offset / angle)
+                    else:
+                        angle_pixel = str(offset / angle)
+                    script = os.path.join(self.function_path, 'correct_ESD.py')
+                    command = 'python ' + script + ' ' + filename + ' ' + angle_pixel
+
+                    jobList.append({"path": path, "command": command})
+                    self.stack[date][burst]['slave'].processes['readfiles']['ESD_correct'] = '1'
+
+                    if not (self.parallel):
+                        os.chdir(path)
+                        os.system(command)
+
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(jobList)
+
+        self.update_res()
+
+    def combine_slave(self, overwrite=False, ramped=False, deramped=True, ras=False):
+        # This function concatenates the different slave values. Both ramped and deramped.
+
+        # Add the resample step to the .res file
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        if deramped:
+            self.concatenate('slave_rsmp.raw', 'slave_rsmp.raw', dt= np.dtype('complex64'), overwrite=overwrite)
+        if ramped:
+            self.concatenate('slave_rsmp_reramped.raw', 'slave_rsmp_reramped.raw', dt=np.dtype('complex64'), overwrite=overwrite)
+
+        for date in self.coreg_dates:
+
+            if self.full_swath[date]['slave'].process_control != '1':
+                path = self.image_path(date)
+                os.chdir(path)
+
+                burst = self.stack[date].keys()[0]
+                slave_res = copy.deepcopy(self.stack[date][burst]['slave'].processes['resample'])
+
+                # Read number of lines
+                lines = int(self.full_swath[date]['master'].processes['readfiles']['Number_of_lines_original'])
+                pixels = int(self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original'])
+
+                # Add information to interfero step about lines and pixels.
+                slave_res['First_line (w.r.t. original_master)'] = str(1)
+                slave_res['Last_line (w.r.t. original_master)'] = str(lines)
+                slave_res['First_pixel (w.r.t. original_master)'] = str(1)
+                slave_res['Last_pixel (w.r.t. original_master)'] = str(pixels)
+                slave_res['Data_output_file'] = 'slave_rsmp_reramped.raw'
+
+                # Finally add to result file
+                self.full_swath[date]['slave'].insert(slave_res, 'resample')
+
+            pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+
+            if ras:
+                if deramped and (not os.path.exists('slave_rsmp.ras') or overwrite):
+                    mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 20/5 -f cr4 -l1 ' \
+                                             '-p1 -P' + pixels + ' slave_rsmp.raw > slave_rsmp.ras'
+                    os.system(self.cpxfiddle + mag)
+                if ramped and (not os.path.exists('slaver_rsmp_reramped.ras') or overwrite):
+                    mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 20/5 -f cr4 -l1 ' \
+                                 '-p1 -P' + pixels + ' slave_rsmp_reramped.raw > slave_rsmp_reramped.ras'
+                    os.system(self.cpxfiddle + mag)
+
+        self.update_res(dates=self.coreg_dates)
+
+    def combine_master(self, overwrite=False, ramped=False, deramped=True, ras=False):
+        # This function concatenates the master files to one image. Afterwards the full master files are linked using
+        # symbolic links.
+
+        date = self.master_date
+        date_1 = self.stack.keys()[0]
+        bursts = self.stack[date_1].keys()
+        burst_res = dict()
+        image_res = dict()
+
+        self.read_res(dates=[date], bursts=bursts, burst_stack=burst_res, image_stack=image_res)
+        if deramped:
+            self.concatenate('slave_rsmp.raw', 'slave_rsmp.raw', dt=np.dtype('complex64'),
+                             overwrite=overwrite, dates=[date])
+        if ramped:
+            self.concatenate('slave_rsmp_reramped.raw', 'slave_rsmp_reramped.raw', dt=np.dtype('complex64'),
+                             overwrite=overwrite, dates=[date])
+
+        path = self.image_path(date)
+        os.chdir(path)
+
+        if image_res[date]['slave'].process_control != '1':
+            burst = burst_res[date].keys()[0]
+            slave_res = copy.deepcopy(burst_res[date][burst]['slave'].processes['resample'])
+
+            # Read number of lines
+            lines = int(image_res[date]['master'].processes['readfiles']['Number_of_lines_original'])
+            pixels = int(image_res[date]['master'].processes['readfiles']['Number_of_pixels_original'])
+
+            # Add information to interfero step about lines and pixels.
+            slave_res['First_line (w.r.t. original_master)'] = str(1)
+            slave_res['Last_line (w.r.t. original_master)'] = str(lines)
+            slave_res['First_pixel (w.r.t. original_master)'] = str(1)
+            slave_res['Last_pixel (w.r.t. original_master)'] = str(pixels)
+            slave_res['Data_output_file'] = 'slave_rsmp_reramped.raw'
+
+            # Finally add to result file
+            image_res[date]['slave'].insert(slave_res, 'resample')
+
+        pixels = image_res[date]['master'].processes['readfiles']['Number_of_pixels_original']
+
+        if ras:
+            if deramped and (not os.path.exists('slave_rsmp.ras') or overwrite):
+                mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 20/5 -f cr4 -l1 ' \
+                                         '-p1 -P' + pixels + ' slave_rsmp.raw > slave_rsmp.ras'
+                os.system(self.cpxfiddle + mag)
+            if ramped and (not os.path.exists('slaver_rsmp_reramped.ras') or overwrite):
+                mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 20/5 -f cr4 -l1 ' \
+                             '-p1 -P' + pixels + ' slave_rsmp_reramped.raw > slave_rsmp_reramped.ras'
+                os.system(self.cpxfiddle + mag)
+
+        self.update_res(dates=[date], image_stack=image_res, burst_stack=burst_res)
+
+    def compref_phase(self, network=False):
+        # This function performs the final steps in making an interferogram for all full images.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+        if network:
+            self.update_res(dates=self.coreg_dates, switch=True)
+
+        for date in self.coreg_dates:
+            job_list1 = []
+            for burst in self.stack[date].keys():
+                if self.stack[date][burst]['ifgs'].process_control['comp_refphase'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    command1 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.comprefpha')
+                    job_list1.append({"path": path, "command": command1})
+                    if (not(self.parallel)):
+                        os.chdir(path)
+                        os.system(command1)
+            if (self.parallel):
+                jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+                jobs.run(job_list1)
+
+        if network:
+            self.read_res(dates=self.coreg_dates)
+            self.update_res(dates=self.coreg_dates, switch=True)
+
+        self.fake_master_steps(step='comp_refphase', full_swath=False)
+
+    def ref_phase(self,concatenate=True, overwrite=False, network=False, ras=False):
+        # This function performs the final steps in making an interferogram for all full images.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        job_list2 = []
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+                if self.stack[date][burst]['ifgs'].process_control['subtr_refphase'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    command2 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.subtrrefpha')
+                    job_list2.append({"path": path, "command": command2})
+                    if (not(self.parallel)):
+                        os.chdir(path)
+                        os.system(command2)
+        if (self.parallel):
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(job_list2)
+
+        self.read_res(dates=self.coreg_dates)
+
+        if concatenate == True:
+            self.concatenate('cint_srp.raw', 'cint_srp.raw', dt=np.dtype('complex64'), overwrite=overwrite)
+            for date in self.coreg_dates:
+
+                if self.full_swath[date]['ifgs'].process_control['subtr_refphase'] != '1' or overwrite is True:
+                    # Add res file information
+                    no_lines = self.full_swath[date]['master'].processes['readfiles']['Number_of_lines_original']
+                    no_pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+                    line_0 = self.full_swath[date]['master'].processes['readfiles']['First_line (w.r.t. output_image)']
+                    line_1 = self.full_swath[date]['master'].processes['readfiles']['Last_line (w.r.t. output_image)']
+                    pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
+                    pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
+
+                    burst = self.stack[date].keys()[0]
+                    res_1 = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['comp_refphase'])
+                    res_2 = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['subtr_refphase'])
+
+                    res_2['First_line (w.r.t. original_master)'] = line_0
+                    res_2['Last_line (w.r.t. original_master)'] = line_1
+                    res_2['First_pixel (w.r.t. original_master)'] = pix_0
+                    res_2['Last_pixel (w.r.t. original_master)'] = pix_1
+                    res_2['Number of lines (multilooked)'] = no_lines
+                    res_2['Number of pixels (multilooked)'] = no_pixels
+
+                    self.full_swath[date]['ifgs'].insert(res_1, 'comp_refphase')
+                    self.full_swath[date]['ifgs'].insert(res_2, 'subtr_refphase')
+
+                    path = self.image_path(date)
+                    os.chdir(path)
+                    # Finally show preview based on cpxfiddle
+
+                    if ras:
+                        pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+
+                        if not os.path.exists('interferogram_srp_mag.ras') or overwrite:
+                            mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' cint_srp.raw > interferogram_srp_mag.ras'
+                            os.system(self.cpxfiddle + mag)
+                        if not os.path.exists('interferogram_srp_mix.ras') or overwrite:
+                            mix = ' -w ' + pixels + ' -e 0.3 -s 1.2 -q mixed -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' cint_srp.raw > interferogram_srp_mix.ras'
+                            os.system(self.cpxfiddle + mix)
+                        if not os.path.exists('interferogram_srp_pha.ras') or overwrite:
+                            pha = ' -w ' + pixels + ' -q phase -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' cint_srp.raw > interferogram_srp_pha.ras'
+                            os.system(self.cpxfiddle + pha)
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='subtr_refphase', full_swath=concatenate)
+
+    def compref_dem(self, network=False):
+        # This function performs the final steps in making an interferogram for all full images.
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        for date in self.coreg_dates:
+            job_list1 = []
+            for burst in self.stack[date].keys():
+                if self.stack[date][burst]['ifgs'].process_control['comp_refdem'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    command1 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.comprefdem')
+                    job_list1.append({"path": path, "command": command1})
+                    if (not(self.parallel)):
+                        os.chdir(path)
+                        os.system(command1)
+            if (self.parallel):
+                jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+                jobs.run(job_list1)
+
+        self.fake_master_steps(step='comp_refdem', full_swath=False)
+
+    def ref_dem(self,concatenate=True, overwrite=False, network=False, ras=False):
+        # This function performs the final steps in making an interferogram for all full images.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        job_list1 = []
+        job_list2 = []
+
+        for date in self.coreg_dates:
+            for burst in self.stack[date].keys():
+                if self.stack[date][burst]['ifgs'].process_control['subtr_refdem'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    command1 = self.doris_path + ' ' + os.path.join(self.input_files, 'input.subtrrefdem')
+                    job_list1.append({"path": path, "command": command1})
+                    if network:
+                        command2 = 'python ' + os.path.join(self.function_path, 'remove_dem_earth_phase.py') + ' ' + \
+                                   self.stack_folder + ' ' + date + ' ' + burst
+                        job_list2.append({"path": path, "command": command2})
+                    if not self.parallel:
+                        os.chdir(path)
+                        os.system(command2)
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(job_list1)
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(job_list2)
+
+        self.read_res(dates=self.coreg_dates)
+
+        if concatenate == True:
+            self.concatenate('cint_srd.raw', 'cint_srd.raw', dt=np.dtype('complex64'), overwrite=overwrite)
+
+            for date in self.coreg_dates:
+
+                if self.full_swath[date]['ifgs'].process_control['subtr_refdem'] != '1' or overwrite is True:
+                    # Add res file information
+                    no_lines = self.full_swath[date]['master'].processes['readfiles']['Number_of_lines_original']
+                    no_pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+                    line_0 = self.full_swath[date]['master'].processes['readfiles']['First_line (w.r.t. output_image)']
+                    line_1 = self.full_swath[date]['master'].processes['readfiles']['Last_line (w.r.t. output_image)']
+                    pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
+                    pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
+
+                    burst = self.stack[date].keys()[0]
+                    res_1 = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['comp_refdem'])
+                    res_2 = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['subtr_refdem'])
+
+                    res_1['First_line (w.r.t. original_master)'] = line_0
+                    res_1['Last_line (w.r.t. original_master)'] = line_1
+                    res_1['First_pixel (w.r.t. original_master)'] = pix_0
+                    res_1['Last_pixel (w.r.t. original_master)'] = pix_1
+                    res_1['Number of lines (multilooked)'] = no_lines
+                    res_1['Number of pixels (multilooked)'] = no_pixels
+
+                    res_2['First_line (w.r.t. original_master)'] = line_0
+                    res_2['Last_line (w.r.t. original_master)'] = line_1
+                    res_2['First_pixel (w.r.t. original_master)'] = pix_0
+                    res_2['Last_pixel (w.r.t. original_master)'] = pix_1
+                    res_2['Number of lines (multilooked)'] = no_lines
+                    res_2['Number of pixels (multilooked)'] = no_pixels
+
+                    self.full_swath[date]['ifgs'].insert(res_1, 'comp_refdem')
+                    self.full_swath[date]['ifgs'].insert(res_2, 'subtr_refdem')
+
+                    path = self.image_path(date)
+                    os.chdir(path)
+                    # Finally show preview based on cpxfiddle
+
+                    if ras:
+                        pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+
+                        if not os.path.exists('interferogram_srd_mag.ras') or overwrite:
+                            mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' cint_srd.raw > interferogram_srd_mag.ras'
+                            os.system(self.cpxfiddle + mag)
+                        if not os.path.exists('interferogram_srd_mix.ras') or overwrite:
+                            mix = ' -w ' + pixels + ' -e 0.3 -s 1.2 -q mixed -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' cint_srd.raw > interferogram_srd_mix.ras'
+                            os.system(self.cpxfiddle + mix)
+                        if not os.path.exists('interferogram_srd_pha.ras') or overwrite:
+                            pha = ' -w ' + pixels + ' -q phase -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' cint_srd.raw > interferogram_srd_pha.ras'
+                            os.system(self.cpxfiddle + pha)
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='comp_refdem', full_swath=concatenate)
+        self.fake_master_steps(step='subtr_refdem', full_swath=concatenate)
+
+    def coherence(self, concatenate=True, overwrite=False, coh_type='single_master', ras=False):
+        # This function performs the final steps in making an interferogram for all full images.
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        for date in self.coreg_dates:
+            job_list = []
+            for burst in self.stack[date].keys():
+                if self.stack[date][burst]['ifgs'].process_control['coherence'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    if coh_type == 'single_master':
+                        command = self.doris_path + ' ' + os.path.join(self.input_files, 'input.coherence')
+                    elif coh_type == 'network':
+                        command = self.doris_path + ' ' + os.path.join(self.input_files, 'input.coherence_network')
+                    job_list.append({"path": path, "command": command})
+                    if (not(self.parallel)):
+                        os.chdir(path)
+                        os.system(command)
+            if (self.parallel):
+                jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+                jobs.run(job_list)
+
+        self.read_res(dates=self.coreg_dates)
+
+        if concatenate == True:
+            self.concatenate('coherence.raw', 'coherence.raw', dt=np.dtype('float32'), overwrite=overwrite)
+
+            for date in self.coreg_dates:
+
+                if self.full_swath[date]['ifgs'].process_control['coherence'] != '1' or overwrite is True:
+                    # Add res file information
+                    no_lines = self.full_swath[date]['master'].processes['readfiles']['Number_of_lines_original']
+                    no_pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+                    line_0 = self.full_swath[date]['master'].processes['readfiles']['First_line (w.r.t. output_image)']
+                    line_1 = self.full_swath[date]['master'].processes['readfiles']['Last_line (w.r.t. output_image)']
+                    pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
+                    pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
+
+                    burst = self.stack[date].keys()[0]
+                    res = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['coherence'])
+
+                    res['First_line (w.r.t. original_master)'] = line_0
+                    res['Last_line (w.r.t. original_master)'] = line_1
+                    res['First_pixel (w.r.t. original_master)'] = pix_0
+                    res['Last_pixel (w.r.t. original_master)'] = pix_1
+                    res['Number of lines (multilooked)'] = no_lines
+                    res['Number of pixels (multilooked)'] = no_pixels
+
+                    self.full_swath[date]['ifgs'].insert(res, 'coherence')
+
+                    path = self.image_path(date)
+                    os.chdir(path)
+                    # Finally show preview based on cpxfiddle
+
+                    if ras:
+                        pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+
+                        if not os.path.exists('coherence.ras') or overwrite:
+                            mag = ' -w ' + pixels + ' -q normal -o sunraster -b -c gray -M 20/5 -r 0.0/1.0 -f r4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' coherence.raw > coherence.ras'
+                            os.system(self.cpxfiddle + mag)
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='coherence', full_swath=concatenate)
+
+    def phasefilt(self,concatenate=True, overwrite=False, ras=False):
+        # This function performs the phase filtering of the individual bursts.
+
+        if len(self.coreg_dates) == 0:
+            return
+        self.read_res(dates=self.coreg_dates)
+
+        for date in self.coreg_dates:
+            job_list = []
+            for burst in self.stack[date].keys():
+                if self.stack[date][burst]['ifgs'].process_control['filtphase'] != '1':
+                    path = self.burst_path(date, burst, full_path=True)
+                    command = self.doris_path + ' ' + os.path.join(self.input_files, 'input.phasefilt')
+                    job_list.append({"path": path, "command": command})
+                    if (not(self.parallel)):
+                        os.chdir(path)
+                        os.system(command)
+            if (self.parallel):
+                jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+                jobs.run(job_list)
+
+        self.read_res(dates=self.coreg_dates)
+
+        if concatenate == True:
+
+            self.concatenate('cint.0.2filtered', 'cint_filt.raw', dt=np.dtype('complex64'), overwrite=overwrite)
+            for date in self.coreg_dates:
+
+                if self.full_swath[date]['ifgs'].process_control['filtphase'] != '1' or overwrite is True:
+                    # Add res file information
+                    no_lines = self.full_swath[date]['master'].processes['readfiles']['Number_of_lines_original']
+                    no_pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+                    line_0 = self.full_swath[date]['master'].processes['readfiles']['First_line (w.r.t. output_image)']
+                    line_1 = self.full_swath[date]['master'].processes['readfiles']['Last_line (w.r.t. output_image)']
+                    pix_0 = self.full_swath[date]['master'].processes['readfiles']['First_pixel (w.r.t. output_image)']
+                    pix_1 = self.full_swath[date]['master'].processes['readfiles']['Last_pixel (w.r.t. output_image)']
+
+                    burst = self.stack[date].keys()[0]
+                    res = copy.deepcopy(self.stack[date][burst]['ifgs'].processes['filtphase'])
+
+                    res['First_line (w.r.t. original_master)'] = line_0
+                    res['Last_line (w.r.t. original_master)'] = line_1
+                    res['First_pixel (w.r.t. original_master)'] = pix_0
+                    res['Last_pixel (w.r.t. original_master)'] = pix_1
+                    res['Number of lines (multilooked)'] = no_lines
+                    res['Number of pixels (multilooked)'] = no_pixels
+
+                    self.full_swath[date]['ifgs'].insert(res, 'filtphase')
+
+                    path = self.image_path(date)
+                    os.chdir(path)
+                    # Finally show preview based on cpxfiddle
+
+                    if ras:
+                        pixels = self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original']
+
+                        if not os.path.exists('interferogram_filt_mag.ras') or overwrite:
+                            mag = ' -w ' + pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' cint.0.2filtered > interferogram_filt_mag.ras'
+                            os.system(self.cpxfiddle + mag)
+                        if not os.path.exists('interferogram_filt_mix.ras') or overwrite:
+                            mix = ' -w ' + pixels + ' -e 0.3 -s 1.2 -q mixed -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' cint.0.2filtered > interferogram_filt_mix.ras'
+                            os.system(self.cpxfiddle + mix)
+                        if not os.path.exists('interferogram_filt_pha.ras') or overwrite:
+                            pha = ' -w ' + pixels + ' -q phase -o sunraster -b -c jet -M 20/5 -f cr4 -l1 ' \
+                                                             '-p1 -P' + pixels + ' cint.0.2filtered > interferogram_filt_pha.ras'
+                            os.system(self.cpxfiddle + pha)
+
+        self.update_res(dates=self.coreg_dates)
+
+        self.fake_master_steps(step='filtphase', full_swath=concatenate)
+
+    def unwrap(self, ras=True):
+        # This function is used to call the unwrapping program snaphu via doris.
+
+        for date in self.coreg_dates:
+            path = self.image_path(date)
+            os.chdir(path)
+
+            # First create an phase input file for unwrapping
+            pixels = self.full_swath[date]['ifgs'].processes['filtphase']['Number of pixels (multilooked)']
+            print pixels
+            pha = ' -w ' + pixels + ' -q phase -o float -M 1/1 -f cr4 -l1 ' \
+                                    '-p1 -P' + pixels + ' cint_filt_ml.raw > unwrap_input.raw'
+            os.system(self.cpxfiddle + pha)
+
+            command = self.doris_path + ' ' + os.path.join(self.input_files, 'input.unwrap')
+            os.system(command)
+
+            # And create an image using cpxfiddle
+            if ras:
+                pha = ' -w ' + pixels + ' -q normal -o sunraster -b -c jet -M 1/1 -f r4 -l1 ' \
+                                        '-p1 -P' + pixels + ' unwrapped.raw > unwrapped.ras'
+                os.system(self.cpxfiddle + pha)
+
+        self.fake_master_steps(step='unwrap', burst_proc=False)
+
+    def calc_coordinates(self, createdem=True):
+        # Calculate the coordinates of grid cells
+
+        # choose date closest to master as reference
+        date = self.master_date
+        date_1 = self.stack.keys()[0]
+        bursts = self.stack[date_1].keys()
+        burst_res = dict()
+        image_res = dict()
+
+        self.read_res(dates=[self.master_date], bursts=bursts, burst_stack=burst_res, image_stack=image_res)
+
+        print('Coordinates are created for master date ' + date)
+
+        doris_dir = self.doris_path
+
+        for burst in burst_res[date].keys():
+            dem_path = self.burst_path(date, burst, 'dem_radar.raw', full_path=True)
+            dem_path_s = self.burst_path(date_1, burst, 'dem_radar.raw', full_path=True)
+
+            if not os.path.exists(dem_path) and createdem:
+                if os.path.exists(dem_path_s):
+                    os.symlink(dem_path_s, dem_path)
+
+                # path = self.burst_path(date, burst, full_path=True)
+
+                # Create grid coordinates and heights
+                # dem_inputfile = os.path.join(self.input_files, 'input.createdem')
+
+                # Run if one of the files does not exist...
+                # geocode_master(folder, geocode_inputfile, dem_inputfile, doris_dir)
+                # this function geocode the bursts of a master file, based on a DEM
+                # Run the create DEM command
+
+                # command1 = self.doris_path + ' ' + dem_inputfile
+                # job_list1.append({"path": path, "command": command1})
+                #if not self.parallel:
+                #    os.chdir(path)
+                #    os.system(command1)
+        #if self.parallel:
+        #    jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+        #    jobs.run(job_list1)
+
+        self.read_res(dates=[self.master_date], bursts=bursts, burst_stack=burst_res, image_stack=image_res)
+
+        for burst in burst_res[date].keys():
+            if not burst_res[date][burst]['ifgs'].process_control['slant2h'] == 1:
+                resultfile = copy.deepcopy(self.stack[date_1][burst]['ifgs'])
+
+                # Add the slant2height information. This is meant to fake the doris script
+                sl2h_dat = collections.OrderedDict()
+                sl2h_dat['Method'] = 'schwabisch'
+                sl2h_dat['Data_output_file'] = 'dem_radar.raw'
+                sl2h_dat['Data_output_format'] = 'real4'
+                sl2h_dat['First_line (w.r.t. original_master)'] = resultfile.processes['comp_refdem'][
+                    'First_line (w.r.t. original_master)']
+                sl2h_dat['Last_line (w.r.t. original_master)'] = resultfile.processes['comp_refdem'][
+                    'Last_line (w.r.t. original_master)']
+                sl2h_dat['First_pixel (w.r.t. original_master)'] = resultfile.processes['comp_refdem'][
+                    'First_pixel (w.r.t. original_master)']
+                sl2h_dat['Last_pixel (w.r.t. original_master)'] = resultfile.processes['comp_refdem'][
+                    'Last_pixel (w.r.t. original_master)']
+                sl2h_dat['Multilookfactor_azimuth_direction'] = resultfile.processes['comp_refdem'][
+                    'Multilookfactor_azimuth_direction']
+                sl2h_dat['Multilookfactor_range_direction'] = resultfile.processes['comp_refdem'][
+                    'Multilookfactor_range_direction']
+                sl2h_dat['Ellipsoid (name,a,b)'] = 'WGS84 6.37814e+06 6.35675e+06'
+
+                # Add this to all date .res files
+                burst_res[date][burst]['ifgs'].insert(sl2h_dat, process='slant2h')
+        self.update_res(dates=[date], image_stack=image_res, burst_stack=burst_res)
+
+        job_list1 = []
+        for burst in burst_res[date].keys():
+            if not burst_res[date][burst]['ifgs'].process_control['geocoding'] == 1:
+                path = self.burst_path(date, burst, full_path=True)
+                geocode_inputfile = os.path.join(self.input_files, 'input.geocode')
+                # Generate lat / lon files
+                command1 = doris_dir + ' ' + geocode_inputfile
+                job_list1.append({"path": path, "command": command1})
+
+                if not self.parallel:
+                    os.system(command1)
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(job_list1)
+
+        self.concatenate('phi.raw', 'phi.raw', dt=np.dtype('float32'), dates=[date])
+        self.concatenate('lam.raw', 'lam.raw', dt=np.dtype('float32'), dates=[date])
+        self.concatenate('dem_radar.raw', 'dem_radar.raw', dt=np.dtype('float32'), dates=[date])
+
+        dates = self.coreg_dates
+
+        # create links
+        for burst in burst_res[date].keys():
+            if burst_res[date][burst]['ifgs'].process_control['slant2h'] == 1 and \
+                            burst_res[date][burst]['ifgs'].process_control['geocoding'] == 1:
+
+                res_sl2h = copy.deepcopy(burst_res[date][burst]['ifgs'].processes['slant2h'])
+                res_geo = copy.deepcopy(burst_res[date][burst]['ifgs'].processes['geocoding'])
+                for date_s in dates:
+                    if not self.stack[date_s][burst]['ifgs'].process_control['slant2h'] == '1':
+                        self.stack[date_s][burst]['ifgs'].insert(res_sl2h, process='slant2h')
+                    if not self.stack[date_s][burst]['ifgs'].process_control['geocoding'] == '1':
+                        self.stack[date_s][burst]['ifgs'].insert(res_geo, process='geocoding')
+
+        # Create symlinks for bursts
+        for burst in burst_res[date].keys():
+            dat_lam = self.burst_path(self.master_date, burst, 'lam.raw', full_path=True)
+            dat_phi = self.burst_path(self.master_date, burst, 'phi.raw', full_path=True)
+            dat_dem = self.burst_path(self.master_date, burst, 'dem_radar.raw', full_path=True)
+
+            for date_s in dates:
+                link_lam = self.burst_path(date_s, burst, 'lam.raw', full_path=True)
+                link_phi = self.burst_path(date_s, burst, 'phi.raw', full_path=True)
+                link_dem = self.burst_path(date_s, burst, 'dem_radar.raw', full_path=True)
+
+                if not os.path.exists(link_lam):
+                    os.symlink(dat_lam, link_lam)
+                if not os.path.exists(link_phi):
+                    os.symlink(dat_phi, link_phi)
+                if not os.path.exists(link_dem):
+                    os.symlink(dat_dem, link_dem)
+
+        # Create symlinks for images
+        for date in dates:
+            dat_lam = self.image_path(self.master_date, 'lam.raw')
+            dat_phi = self.image_path(self.master_date, 'phi.raw')
+            dat_dem = self.image_path(self.master_date, 'dem_radar.raw')
+
+            link_lam = self.image_path(date, 'lam.raw')
+            link_phi = self.image_path(date, 'phi.raw')
+            link_dem = self.image_path(date, 'dem_radar.raw')
+
+            if not os.path.exists(link_lam):
+                os.symlink(dat_lam, link_lam)
+            if not os.path.exists(link_phi):
+                os.symlink(dat_phi, link_phi)
+            if not os.path.exists(link_dem):
+                os.symlink(dat_dem, link_dem)
+
+    def concatenate(self, burst_file, master_file, dt=np.dtype(np.float32), overwrite=False, dates=[], multilooked='False', res_type='master'):
+        # Concatenate all burst to a single full swath product. If burst_file = 'master' then the input master files are read...
+        # This function also accepts cpxint16 datatype
+
+        if not dates:
+            dates = self.stack.keys()
+        job_list1 = []
+
+        for date in dates:
+            path = self.image_path(date)
+            final_path = os.path.join(path, master_file)
+
+            if not os.path.exists(final_path) or overwrite == True:
+                command1 = 'python ' + os.path.join(self.function_path, 'concatenate_decatenate.py') + ' ' + path \
+                           + ' concatenate ' + burst_file + ' ' + dt.name + ' ' + multilooked + ' ' + res_type
+                job_list1.append({"path": path, "command": command1})
+
+            if not self.parallel:
+                os.chdir(path)
+                os.system(command1)
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(job_list1)
+
+    def multilook(self, ra=40, az=10, step='filtphase'):
+        # This function does the multilooking using cpxfiddle and updates the resolution of the step variable. You
+        # have to careful that if you want to perform this step to follow on with a smaller data file, for e.g. unwrapping
+        # this should be the last mentioned step.
+
+        if step == 'filtphase':
+            filename = 'cint.0.2filtered'
+            filename2 = 'cint_filt_ml.raw'
+            type = 'cr4'
+        elif step == 'coherence':
+            filename = 'coherence.raw'
+            filename2 = 'coherence_ml.raw'
+            type = 'r4'
+        elif step == 'subtr_refdem':
+            filename = 'cint_srd.raw'
+            filename2 = 'cint_srd_ml.raw'
+            type = 'cr4'
+        elif step == 'subtr_refpha':
+            filename = 'cint_srp.raw'
+            filename2 = 'cint_srp_ml.raw'
+            type = 'cr4'
+        elif step == 'interfero':
+            filename = 'cint.raw'
+            filename2 = 'cint_ml.raw'
+            type = 'cr4'
+        else:
+            print('Choose for step between filtphase, coherence, subtrefdem, subtrefpha and interfero')
+
+        self.read_res()
+
+        for date in self.coreg_dates:
+            print(date)
+
+            lines = int(self.full_swath[date]['master'].processes['readfiles']['Number_of_lines_original'])
+            pixels = int(self.full_swath[date]['master'].processes['readfiles']['Number_of_pixels_original'])
+
+            date_path = self.image_path(date)
+            os.chdir(date_path)
+
+            # Create cpxfiddle command
+            command = ' -w ' + str(pixels) + ' -o float -M ' + str(ra) + '/'+ str(az) + ' -f ' + type + ' ' \
+                                             '-l1 -p1 -P' + str(pixels) + ' -q normal ' + filename + ' > ' + filename2
+            os.system(self.cpxfiddle + command)
+
+            # Update res file
+            new_lines = str(int(np.floor(lines / az)))
+            new_pixels = str(int(np.floor(pixels / ra)))
+
+            res = self.full_swath[date]['ifgs'].processes[step]
+
+            res['Data_output_file'] = filename2
+            res['Multilookfactor_azimuth_direction'] = str(az)
+            res['Multilookfactor_range_direction'] = str(ra)
+            res['Number of lines (multilooked)'] = new_lines
+            res['Number of pixels (multilooked)'] = new_pixels
+
+            self.full_swath[date]['ifgs'].processes[step] = res
+
+            # Finally create an image using cpxfiddle (full resolution)
+            if type == 'r4':
+                # Only show the magnitude
+                if step == 'coherence':
+                    mag = ' -w ' + new_pixels + ' -q normal -o sunraster -b -c gray -M 20/5 -r 0.0/1.0 -f r4 -l1 ' \
+                                            '-p1 -P' + new_pixels + ' ' + filename2 + ' > ' + filename2[:-4] + '.ras'
+                else:
+                    mag = ' -w ' + new_pixels + ' -e 0.3 -s 1.0 -q normal -o sunraster -b -c gray -M 1/1 -f r4 -l1 ' \
+                                            '-p1 -P' + new_pixels + ' ' + filename2 + ' > ' + filename2[:-4] + '.ras'
+                os.system(self.cpxfiddle + mag)
+            elif type == 'cr4':
+                # Show the 3 images
+                mag = ' -w ' + new_pixels + ' -e 0.3 -s 1.0 -q mag -o sunraster -b -c gray -M 1/1 -f cr4 -l1 ' \
+                                                 '-p1 -P' + new_pixels + ' ' + filename2 + ' > ' + filename2[:-4] + '_mag.ras'
+                os.system(self.cpxfiddle + mag)
+                mix = ' -w ' + new_pixels + ' -e 0.3 -s 1.2 -q mixed -o sunraster -b -c jet -M 1/1 -f cr4 -l1 ' \
+                                                 '-p1 -P' + new_pixels + ' ' + filename2 + ' > ' + filename[:-4] + '_mix.ras'
+                os.system(self.cpxfiddle + mix)
+                pha = ' -w ' + new_pixels + ' -q phase -o sunraster -b -c jet -M 1/1 -f cr4 -l1 ' \
+                                                 '-p1 -P' + new_pixels + ' ' + filename2 + ' > ' + filename2[:-4] + '_pha.ras'
+                os.system(self.cpxfiddle + pha)
+
+        self.update_res()
+
+    def decatenate(self, burst_file, master_file, dt=np.dtype(np.float32), dates=[], multilooked=False, res_type='master'):
+        # Split full swath into different burst products. (to be used for DEM result splitting)
+        
+        if not dates:
+            dates = self.stack.keys()
+        job_list1 = []
+
+        for date in dates:
+            path = self.image_path(date)
+
+            command1 = os.path.join(self.function_path, 'concatenate_decatenate.py') + ' ' + path + ' decatenate ' + burst_file + ' ' + dt.str
+            job_list1.append({"path": path, "command": command1})
+            if not(self.parallel):
+                os.chdir(path)
+                os.system(command1)
+        if (self.parallel):
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(job_list1)
+
+    # Following functions are helper function which help acces the correct folders and files more easily:
+
+    def burst_path(self, date='', key='', file_path='', stack_folder=False, dat_type='', full_path=False):
+        if stack_folder == False:
+            stack_folder = self.stack_folder
+
+        if dat_type:
+            file_path = '_iw_' + key[6] + '_burst_' + key[14:]
+            if dat_type == 'master' or dat_type == 'slave':
+                file_path = dat_type + file_path + '.raw'
+            elif dat_type == 'master_deramped' or dat_type == 'slave_deramped':
+                file_path = dat_type[:-9] + file_path + '_deramped.raw'
+
+        if full_path is True:
+            if len(date) == 10:
+                date_folder = date[:4] + date[5:7] + date[8:10]
+            else:
+                date_folder = date
+            swath_folder = key[:7]
+            burst_folder = key[8:]
+            if file_path:
+                file_path = os.path.join(stack_folder, date_folder, swath_folder, burst_folder, file_path)
+            else:
+                file_path = os.path.join(stack_folder, date_folder, swath_folder, burst_folder)
+
+        return file_path
+
+    def burst_paths(self, stack, dates=[], stack_folder=False, dat_type='', file_path='', full_path=False):
+        # Creates a list of all burst paths for this file, given the dates.
+        if stack_folder == False:
+            stack_folder = self.stack_folder
+        if not dates:
+            dates = stack.keys()
+
+        paths = []
+        for date in dates:
+            for key in stack[date].keys():
+                burst_path = self.burst_path(date, key, stack_folder=stack_folder, dat_type=dat_type, file_path=file_path, full_path=full_path)
+                paths.append(burst_path)
+
+        return paths
+
+    def swath_path(self, date, key, stack_folder=False):
+        if stack_folder == False:
+            stack_folder = self.stack_folder
+
+        if len(date) == 10:
+            date_folder = date[:4] + date[5:7] + date[8:10]
+        else:
+            date_folder = date
+        swath_burst = key.split('_')
+        file_path = os.path.join(stack_folder, date_folder, swath_burst[0] + '_' + swath_burst[1])
+
+        return file_path
+
+    def swath_paths(self, stack, dates=[], stack_folder=False):
+        # Creates a list of all swath paths given the dates.
+        if stack_folder == False:
+            stack_folder = self.stack_folder
+        if not dates:
+            dates = stack.keys()
+
+        paths = []
+        for date in dates:
+            keys = set([key[:7] for key in stack[date].keys()])
+            for key in keys:
+                swath_path = self.swath_path(date, key, stack_folder)
+                paths.append(swath_path)
+        return paths
+
+    def image_path(self, date, file_path='', stack_folder=False):
+        if stack_folder == False:
+            stack_folder = self.stack_folder
+
+        if len(date) == 10:
+            date_folder = date[:4] + date[5:7] + date[8:10]
+        else:
+            date_folder = date
+        if file_path:
+            file_path = os.path.join(stack_folder, date_folder, file_path)
+        else:
+            file_path = os.path.join(stack_folder, date_folder)
+
+        return file_path
+
+    def image_paths(self, stack, dates=[], file_path='', stack_folder=False):
+        # Creates a list of all image paths
+        if stack_folder == False:
+            stack_folder = self.stack_folder
+        if not dates:
+            dates = stack.keys()
+
+        paths = []
+        for date in dates:
+            image_path = self.image_path(date, file_path, stack_folder)
+            paths.append(image_path)
+
+        return paths
+
+    @staticmethod
+    def read_image_paths(master_key, stack_folder):
+        # This functions reads all the current folders from a stack.
+
+        # Select different categories of directories.
+        stack_dirs = next(os.walk(stack_folder))[1]
+        master = os.path.join(stack_folder, master_key)
+        esd = os.path.join(stack_folder, 'esd')
+        slaves = [os.path.join(stack_folder, dir_s) for dir_s in stack_dirs if dir_s != master_key and len(dir_s) == 8]
+        ifg = [os.path.join(stack_folder, dir_s) for dir_s in stack_dirs if len(dir_s) == 21]
+
+        return master, slaves, ifg, esd
+
+    def read_burst_paths(self, master_key, stack_folder, dat_type='list'):
+        # The output can be in 2 formats. A list with all
+        # folders, including image and burst folders, or a dict structure.
+        if dat_type == 'list':
+            folders = [[], [], []]
+        elif dat_type == 'dict':
+            folders = [dict(), dict(), dict()]
+        else:
+            print('dat_type should either be a list or dict')
+            return
+
+        master, slaves, ifg = self.read_image_paths(master_key, stack_folder)[:3]
+
+        # And find the corresponding folders:
+        for fold, dirs in zip(folders, [[master], slaves, ifg]):
+            for direc in dirs:
+                if dat_type == 'list':
+                    fold.append(os.path.basename(direc))
+                elif dat_type == 'dict':
+                    fold[os.path.basename(direc)] = dict()
+                swaths = next(os.walk(direc))[1]
+                for swath in swaths:
+                    if dat_type == 'dict':
+                        fold[os.path.basename(direc)][swath] = dict()
+                    bursts = next(os.walk(os.path.join(direc, swath)))[1]
+                    for burst in bursts:
+                        if dat_type == 'dict':
+                            fold[os.path.basename(direc)][swath][burst] = []
+                        elif dat_type == 'list':
+                            fold.append(os.path.join(os.path.basename(direc), swath, burst))
+
+        # Now return master, slave and ifg setup
+        return folders[0], folders[1], folders[2]
+
+    def update_res(self, dates='default', stack_folder='', burst_stack=list(), image_stack=list()):
+        # Save to .res file based on the burst objects.
+        if not burst_stack:
+            burst_stack = self.stack
+        if not image_stack:
+            image_stack = self.full_swath
+        if dates == 'default':
+            dates = self.stack.keys()
+        if not stack_folder:
+            stack_folder = self.stack_folder
+
+        for date in dates:
+            for burst in burst_stack[date].keys():
+
+                files = burst_stack[date][burst].keys()
+                if 'slave' in files:
+                    slave_res = self.burst_path(date, burst, 'slave.res', stack_folder=stack_folder, full_path=True)
+                    burst_stack[date][burst]['slave'].write(new_filename=slave_res)
+                if 'master' in files:
+                    master_res = self.burst_path(date, burst, 'master.res', stack_folder=stack_folder, full_path=True)
+                    burst_stack[date][burst]['master'].write(new_filename=master_res)
+                if 'ifgs' in files:
+                    ifgs_res = self.burst_path(date,burst,'ifgs.res', stack_folder=stack_folder, full_path=True)
+                    burst_stack[date][burst]['ifgs'].write(new_filename=ifgs_res)
+
+            files = image_stack[date].keys()
+            if 'slave' in files:
+                slave_res = self.image_path(date, 'slave.res', stack_folder=stack_folder)
+                image_stack[date]['slave'].write(new_filename=slave_res)
+            if 'master' in files:
+                master_res = self.image_path(date, 'master.res', stack_folder=stack_folder)
+                image_stack[date]['master'].write(new_filename=master_res)
+            if 'ifgs' in files:
+                ifgs_res = self.image_path(date, 'ifgs.res', stack_folder=stack_folder)
+                image_stack[date]['ifgs'].write(new_filename=ifgs_res)
+
+    def read_res(self, coreg_dates=False, dates='default', stack_folder='', burst_stack='', image_stack='', bursts=[]):
+        # Read .res data to the burst objects. Generally done after a processing step.
+        if burst_stack == '':
+            burst_stack = self.stack
+        if image_stack == '':
+            image_stack = self.full_swath
+        if dates == 'default':
+            dates = self.stack.keys()
+        if not stack_folder:
+            stack_folder = self.stack_folder
+        if not bursts:
+            if dates[0] in burst_stack.keys():
+                bursts = burst_stack[dates[0]].keys()
+            else:
+                date_1 = self.stack.keys()[0]
+                bursts = self.stack[date_1].keys()
+        # TODO Maybe add search for folders and bursts if no specific date or burst is specified?
+
+        for date in dates:
+            if not date in burst_stack.keys():
+                burst_stack[date] = dict()
+            for burst in bursts:
+                if not burst in burst_stack[date].keys():
+                    burst_stack[date][burst] = dict()
+
+                slave_res = self.burst_path(date, burst, 'slave.res', stack_folder=stack_folder, full_path=True)
+                master_res = self.burst_path(date, burst, 'master.res', stack_folder=stack_folder, full_path=True)
+                ifgs_res = self.burst_path(date, burst,'ifgs.res', stack_folder=stack_folder, full_path=True)
+
+                if os.path.exists(slave_res):
+                    burst_stack[date][burst]['slave'] = ResData(filename=slave_res)
+                if os.path.exists(master_res):
+                    burst_stack[date][burst]['master'] = ResData(filename=master_res)
+                if os.path.exists(ifgs_res):
+                    burst_stack[date][burst]['ifgs'] = ResData(filename=ifgs_res)
+
+            if not date in image_stack.keys():
+                image_stack[date] = dict()
+
+            slave_res = self.image_path(date, 'slave.res', stack_folder=stack_folder)
+            master_res = self.image_path(date, 'master.res', stack_folder=stack_folder)
+            ifgs_res = self.image_path(date, 'ifgs.res', stack_folder=stack_folder)
+
+            if os.path.exists(slave_res):
+                image_stack[date]['slave'] = ResData(filename=slave_res)
+            if os.path.exists(master_res):
+                image_stack[date]['master'] = ResData(filename=master_res)
+            if os.path.exists(ifgs_res):
+                image_stack[date]['ifgs'] = ResData(filename=ifgs_res)
+
+    def del_res(self, type='ifgs', images=False, bursts=True, dates='default', stack_folder=''):
+
+        if dates == 'default':
+            dates = self.stack.keys()
+
+        for date in dates:
+            for burst in self.stack[date].keys():
+                if bursts:
+                    res = self.burst_path(date,burst, type + '.res', stack_folder=stack_folder)
+                    if os.path.exists(res):
+                        os.remove(res)
+
+            if images:
+                res = self.image_path(date, type + '.res', stack_folder=stack_folder)
+                if os.path.exists(res):
+                    os.remove(res)
+
+    def del_process(self, process, type='ifgs', images=False, bursts=True, dates='default', burst_stack=list(), image_stack=list()):
+        # Delete a process from the .res files.
+
+        if not burst_stack:
+            burst_stack = self.stack
+        if not image_stack:
+            image_stack = self.full_swath
+        if dates == 'default':
+            dates = self.stack.keys()
+
+        self.read_res(dates=dates) # Read data
+
+        for date in dates:
+            for burst in burst_stack[date].keys():
+                if bursts and burst_stack[date][burst][type].process_control[process] == '1':
+                    burst_stack[date][burst][type].delete(process)
+            if images and image_stack[date][type].process_control[process] == '1':
+                image_stack[date][type].delete(process)
+        self.update_res(dates=dates)
+
diff --git a/doris_stack/main_code/stack.py b/doris_stack/main_code/stack.py
new file mode 100644
index 0000000..f7a2101
--- /dev/null
+++ b/doris_stack/main_code/stack.py
@@ -0,0 +1,735 @@
+# This script gathers available sentinel files from the database and checks the coverage in space and time for this data
+# stack.
+
+import os
+import warnings
+from collections import Counter, OrderedDict
+from datetime import datetime
+import shutil
+
+import fiona
+import numpy as np
+from shapely.geometry import shape, mapping, box
+from shapely.ops import cascaded_union
+
+import image as image
+from doris.doris_stack.functions.load_shape_unzip import extract_kml_preview, shape_im_kml, load_shape
+from doris.doris_stack.main_code.dorisparameters import DorisParameters
+from doris.doris_stack.functions.burst_metadata import center_shape_from_res
+from doris.doris_stack.main_code.jobs import Jobs
+
+
+class StackData(object):
+    # This function holds information for a full datastack of sentinel data and is used to select relevant images and
+    # bursts based on a given area of interest.
+
+    def __init__(self, track_dir, shape_dat, buffer=0.02, start_date='2014-04-01', end_date='', polarisation='vh', path='', db_type=1, precise_dir=''):
+        # Initialize variables:
+
+        Jobs.id = 0
+
+        # Datastack folder where all data from the datastack is stored (database,shapes,burst data, res files and results)
+        # You should have write acces to this folder!
+        self.path = path
+        self.unzip_path = ''
+
+        # Search path, shape, buffer and polarisation for this datastack. Currently only one polarisation is implemented. If
+        # needed this can be extended later.
+        self.search_path = []
+        self.start_date = []
+        self.end_date = []
+        self.master_date = []
+        self.shape = []
+        self.shape_filename = shape_dat
+        self.buffer = []
+        self.polarisation = ''
+        self.precise_orbits = ''
+
+        # All images included in this datastack. Including lists of shapes and acquistion dates
+        self.images = []
+        self.image_files = []
+        self.image_shapes = []
+        self.image_dates = []
+
+        # The shapes and names of the swaths
+        self.swath_names = list()
+        self.swath_shapes = list()
+
+        # The resulting dates, with underlying bursts. This information can be used to create res files and images
+        # of individual bursts. Variable data includes a structure using (dates > swaths > bursts).
+        self.dates = list()
+        self.datastack = dict()
+        self.concatenated = dict()
+        self.coordinates = dict()
+        self.burst_names = list()
+        self.burst_shapes = list()
+        self.burst_centers = list()
+
+        # Some important characteristics of the dataset of bursts. (matrices with names on y and dates on x axis)
+        self.burst_no = 0
+        self.burst_availability = []
+        self.burst_lon = []
+        self.burst_lat = []
+        self.burst_baselines = []
+
+        # Temporary variable to store images which are not yet checked for shape or dates.
+        self.image_dump = []
+
+        # parallel computing:
+        doris_parameters = DorisParameters(os.path.dirname(self.path))
+        self.doris_parameters = doris_parameters
+        self.nr_of_jobs = doris_parameters.nr_of_jobs
+        self.parallel = doris_parameters.parallel
+        self.function_path = doris_parameters.function_path
+
+        ####################################################################
+
+        # This function initializes the datastack using a search path, start/end dates and a buffer shape. The start and
+        # end date should have the format yyyy-mm-dd and the shape should either be a shapefile or a list of coordinate
+        # pairs. [[lat,lon][lat,lon] enz. ]. Minimum input is a folder which contains the .SAFE folders (a track folder)
+
+        if not track_dir or not os.path.exists(track_dir):
+            warnings.warn('This function needs an existing path as input!')
+            return
+
+        self.search_files(track_dir)
+
+        if shape:
+            self.create_shape(shape_dat,buffer)
+
+        if end_date:
+            self.end_date = np.datetime64(end_date).astype('datetime64[s]') + np.timedelta64(1, 'D').astype('timedelta64[s]')
+        else:
+            self.end_date = np.datetime64('now').astype('datetime64[s]')
+        self.start_date = np.datetime64(start_date).astype('datetime64[s]')
+
+        if isinstance(polarisation, basestring):
+            polarisation = [polarisation]
+            for i in polarisation:
+                if not i in ['hh','vv','hv','vh']:
+                    warnings.warn('This polarisation does not exist for sentinel data.')
+                    return
+        self.polarisation = polarisation
+        self.search_path = track_dir
+
+        if not path:
+            warnings.warn('You did not specify an output path. Please do so later on using the add_path function')
+        else:
+            self.add_path(path)
+
+        if precise_dir:
+            if os.path.exists(precise_dir):
+                self.precise_orbits = precise_dir
+            else:
+                print 'Precise orbit path does not exist'
+
+    def add_path(self,path):
+        # This function adds the output path.
+
+        if os.path.isdir(path):
+            self.path = path
+        elif os.path.isdir(os.path.dirname(path[:-1])):
+            os.mkdir(path)
+            self.path = path
+        else:
+            warnings.warn('Neither the directory itself nor the parents directory exists. Choose another path.')
+
+    def search_files(self, track_dir):
+        # This function searches for files within a certain folder. This folder should contain images from the same
+        # track. These images are added to the variable image dump.
+        images = list()
+
+        top_dir = next(os.walk(track_dir))
+
+        for data in top_dir[2]:
+            if data.endswith('.zip'):
+                images.append(os.path.join(track_dir, data))
+        for data in top_dir[1]:
+            if data.endswith('.SAFE'):
+                images.append(os.path.join(track_dir, data))
+            else:  # Likely images are stored in a folder.
+                sec_dir = next(os.walk(os.path.join(track_dir, data)))
+
+                for dat in sec_dir[1]:
+                    if dat.endswith('.SAFE'):
+                        images.append(os.path.join(track_dir, data, dat))
+                for dat in sec_dir[2]:
+                    if dat.endswith('.zip'):
+                        images.append(os.path.join(track_dir, data, dat))
+
+        if images:
+            images = sorted(images)
+        else:
+            print('No images found in track_dir = ' + str(track_dir) + ', switching to archive folder')
+
+            track_name = self.doris_parameters.direction + '_t' + self.doris_parameters.track.zfill(3)
+            track_dir = os.path.join(track_dir, track_name)
+
+            print('Searching in folder ' + track_dir)
+
+            top_dirs = next(os.walk(track_dir))[1]
+
+            if len(top_dirs) == 1:
+                track_dir = os.path.join(track_dir, top_dirs[0])
+            elif len(top_dirs) > 1:
+                for top_dir in top_dirs:
+                    user_input = raw_input("Do you want to use folder " + top_dir + " as resource folder? (yes/no)").lower()
+                    if user_input in ['yes', 'y']:
+                        track_dir = os.path.join(track_dir, top_dir)
+
+            dir_new = next(os.walk(track_dir))
+            for dat in dir_new[1]:
+                dat_files = next(os.walk(os.path.join(track_dir, dat)))[2]
+                for data in dat_files:
+                    if data.endswith('.zip'):
+                        images.append(os.path.join(track_dir, dat, data))
+
+        if images:
+            images = sorted(images)
+        else:
+            warnings.warn('No images found! Please choose another data folder. Track_dir = ' + str(track_dir))
+
+        base = []  # Remove double hits because data is already unzipped.
+        for i in images: # Drop all .zip files which are unpacked already.
+            if i.endswith('.SAFE.zip'):
+                base.append(os.path.basename(i[:-9]))
+            elif i.endswith('.zip'):
+                base.append(os.path.basename(i[:-4]))
+            elif i.endswith('.SAFE'):
+                base.append(os.path.basename(i[:-5]))
+        b, id = np.unique(base, return_index=True)
+
+        rem = []
+        for i in range(len(base)):
+            if i in id:
+                self.image_dump.append(images[i])
+            else:
+                rem.append(images[i])
+
+        if rem:
+            print('removed the following zip files from stack:')
+            for r in rem:
+                print(r)
+            print('It is advised to work with zipfiles instead of unpacked data. This saves diskspace and will ')
+
+    def create_shape(self,shape_dat,buffer=0.02):
+        # This function creates a shape to make a selection of usable bursts later on. Buffer around shape is in
+        # degrees.
+
+        self.shape = load_shape(shape_dat, buffer)
+        self.buffer = buffer
+
+    def check_new_images(self, master):
+        # This function checks which images are already processed, and which are not. If certain dates are already
+        # processed they are removed from the list. You have to specify the master date, otherwise the script will not
+        # know how many burst are expected per date.
+
+        # Which dates are available?
+        image_dates = [im.astype('datetime64[D]') for im in self.image_dates]
+        # What is the master date
+        date = np.datetime64(master).astype('datetime64[D]')
+
+        date_folders = [d for d in next(os.walk(self.path))[1] if len(d) == 8]
+        rm_id = []
+
+        if date_folders:
+            dates = [np.datetime64(d[0:4] + '-' + d[4:6] + '-' + d[6:8]) for d in date_folders]
+
+            if date in dates:
+                date_folder = date_folders[np.where(dates == date)[0][0]]
+
+                # Check existing files in master folder
+                swaths = dict()
+                swath_folders = next(os.walk(os.path.join(self.path, date_folder)))[1]
+                if len(swath_folders) == 0:
+                    print('No swaths in master folder')
+                    return
+
+                for swath in swath_folders:
+                    self.swath_names.append(swath)
+                    swaths[swath] = sorted(next(os.walk(os.path.join(self.path, date_folder, swath)))[1])
+
+                # Now check if the burst also in slave folders exist....
+                for folder, d in zip(date_folders, dates):
+                    # Check existing files in master folder
+                    try:
+                        swath_folders = next(os.walk(os.path.join(self.path, folder)))[1]
+                        if not set(swath_folders) == set(swaths.keys()):
+                            raise LookupError('Amount of swaths is not the same for ' + folder)
+
+                        for swath in swath_folders:
+                            bursts = sorted(next(os.walk(os.path.join(self.path, date_folder, swath)))[1])
+                            if not set(bursts) == set(swaths[swath]):
+                                raise LookupError('Amount of bursts is not the same for ' + folder)
+
+                            if d == date:
+                                # If the master is already processed we have to create the list of center and coverage of
+                                # bursts.
+                                # TODO make this robust for the case no seperate input data folders are created.
+                                res_files = []
+                                for burst in bursts:
+                                    dat_file = [r for r in next(os.walk(os.path.join(self.path, date_folder, swath, burst)))[2]
+                                                if (r.startswith('slave_iw') and len(r) < 25)]
+                                    res_file = os.path.join(self.path, date_folder, swath, burst, 'slave.res')
+                                    res_files.append(res_file)
+
+                                    center, coverage = center_shape_from_res(resfile=res_file)
+                                    # Assign coverage, center coordinates and burst name.
+                                    self.burst_shapes.append(coverage)
+                                    self.burst_centers.append(center)
+                                    burst_num = os.path.basename(dat_file[0])[17:-4]
+                                    self.burst_names.append(swath + '_burst_' + burst_num)
+                                    self.burst_no += 1
+
+                        # If all bursts are the same these files are not processed.
+                        for id in np.where(image_dates == d)[0][::-1]:
+                            del self.image_dates[id]
+                            del image_dates[id]
+                            del self.images[id]
+                            del self.image_files[id]
+
+                    except LookupError as error:
+                        print(error)
+
+    def select_image(self,start_date='',end_date='', dest_folder=''):
+        # This function selects usable images based on .kml files and dates
+
+        if not dest_folder:
+            dest_folder = os.path.join(self.path, 'kml')
+        if not os.path.exists(dest_folder):
+            os.makedirs(dest_folder)
+
+        if not self.shape:
+            warnings.warn('There is no shape loaded to select images. Please use the create_shape function to do so.')
+
+        if start_date:
+            self.start_date = np.datetime64(start_date).astype('datetime64[s]')
+        if end_date:
+            self.end_date = np.datetime64(end_date).astype('datetime64[s]') + np.timedelta64(1, 'D').astype('timedelta64[s]')
+
+        # First select images based on dates and check if polygons intersect.
+        for i in self.image_dump:
+            d = os.path.basename(i)[17:32]
+            acq_time = np.datetime64(d[0:4] + '-' + d[4:6] + '-' + d[6:11] + ':' + d[11:13] + ':' + d[13:] + '-0000')
+            if acq_time >= self.start_date and acq_time <= self.end_date:
+                im = image.ImageMeta(path=i)
+                kml, png = extract_kml_preview(i, dir=dest_folder, png=False, overwrite=True)
+                succes = shape_im_kml(self.shape, kml)
+
+                if succes:
+                    self.images.append(im)
+                    self.image_dates.append(acq_time)
+                    self.image_files.append(os.path.basename(i))
+        self.dates = sorted(list(set([d.astype('datetime64[D]') for d in self.image_dates])))
+        for date in self.dates:
+            print(date)
+
+    def select_burst(self, date=''):
+        # This function selects the usefull bursts at one epoch (user defined or automatically selected) and searches
+        # usefull burst at other dates. This function uses the extend_burst function, which is intended to search for
+        # bursts at other dates. This function can be run later on to update the datastack.
+
+        if self.burst_names:
+            print('Master data is already loaded from earlier processed data.')
+            return
+
+        image_dates = [im.astype('datetime64[D]') for im in self.image_dates]
+        # First select which date will be the master
+        if date:
+            date = np.datetime64(date).astype('datetime64[D]')
+            # date = deepcopy(image_dates[min(abs(self.image_dates-date))])
+        else:  # if no date is specified
+            date = Counter(image_dates).most_common(1)[0][0]
+
+        # Load the metadata for this date for the bursts and swaths
+        for date in image_dates:
+            print(date)
+        print(date)
+        image_id = np.where(image_dates == date)[0]
+        for i in image_id:
+            print('processing data: ' + self.images[i].unzip_path)
+            self.images[i].meta_swath(precise_folder=self.precise_orbits)
+
+        # Order the selected images by acquisition time.
+        image_id = [x for (y,x) in sorted(zip([self.image_dates[i] for i in image_id],image_id))]
+        date = date.astype(datetime).strftime('%Y-%m-%d')
+        self.master_date = date
+        self.datastack[date] = dict()
+
+        swath_nums = ['1', '2', '3']
+
+        for p in self.polarisation:
+            for swath in swath_nums:
+                swath_id = 'iw' + swath + '-slc-' + p
+                swath_name = 'swath_' + swath
+                burst_no = 1
+                data = []
+
+                for i in image_id:
+                    # Check which swath should be selected.
+                    if not self.images[i].swaths_data:
+                        continue
+
+                    swath_names = [os.path.basename(data) for data in self.images[i].swaths_data]
+                    swath_no = [no for no in range(len(swath_names)) if swath_id in swath_names[no]]
+                    if not swath_no:  # If there is no data for this swath
+                        continue
+
+                    swath_no = swath_no[0]
+
+                    for burst in self.images[i].swaths[swath_no].bursts:
+                        if burst.burst_coverage.intersects(self.shape):
+
+                            # Check if we do not have a copy of former bursts...
+                            if len(self.burst_centers) != 0:
+                                dist_center = [np.sqrt((burst.burst_center[0] - center[0])**2 +
+                                               (burst.burst_center[1] - center[1])**2) for center in self.burst_centers]
+                                if min(dist_center) < 0.02:
+                                    print('Possible duplicate burst deleted')
+                                    continue
+
+                            # If there are bursts in this swath, which intersect, add burst to list.
+                            if swath_name not in self.datastack[date].keys(): # If there are burst and no burst list exists
+                                self.datastack[date][swath_name] = dict()
+                                if swath_name not in self.swath_names:
+                                    self.swath_names.append(swath_name)
+
+                            # Assign burst to data stack
+                            burst_name = 'burst_' + str(burst_no)
+                            burst.new_burst_num = burst_no
+                            self.datastack[date][swath_name][burst_name] = burst
+
+                            # Create .res file data
+                            burst.meta_burst(swath_meta=self.images[i].swaths[swath_no].metadata)
+
+                            # Assign coverage, center coordinates and burst name.
+                            self.burst_shapes.append(burst.burst_coverage)
+                            self.burst_centers.append(burst.burst_center)
+                            self.burst_names.append(swath_name + '_' + burst_name)
+                            burst_no += 1
+
+                            # Finally add also to the number of bursts from the image
+                            self.images[i].burst_no += 1
+                            self.burst_no += 1
+
+    def extend_burst(self):
+        # Searches for burst at dates other than the dates that are already available. This means that if there are
+        # images added for dates which are already indexed, this data will not be used!
+
+        image_dates = [d.astype('datetime64[D]') for d in self.image_dates]
+
+        for date in self.dates:
+            # Append data to datastack variable
+            date_str = date.astype(datetime).strftime('%Y-%m-%d')
+            self.datastack[date_str] = dict()
+            data = []
+
+            # Load the metadata for this date for the bursts and swaths
+            image_id = np.where(image_dates == date)[0]
+            if len(image_id) == 0:
+                continue
+
+            for i in image_id:
+                if date_str == self.master_date:
+                    continue
+                print('processing data: ' + self.images[i].unzip_path)
+                self.images[i].meta_swath(precise_folder=self.precise_orbits)
+
+            for swath in self.swath_names:
+                # Add swath to datastack
+                self.datastack[date_str][swath] = OrderedDict()
+
+                for i in image_id:
+                    # Select correct swath in image
+                    swath_id = swath[-1] + '-slc-' + self.polarisation[0]
+
+                    swath_names = [os.path.basename(data) for data in self.images[i].swaths_data]
+                    swath_no = [no for no in range(len(swath_names)) if swath_id in swath_names[no]]
+                    if not swath_no:  # If there is no data for this swath
+                        continue
+
+                    swath_no = swath_no[0]
+
+                    for burst in self.images[i].swaths[swath_no].bursts:
+                        x_dist = np.array([xy[0] - burst.burst_center[0] for xy in self.burst_centers])
+                        y_dist = np.array([xy[1] - burst.burst_center[1] for xy in self.burst_centers])
+
+                        dist = np.sqrt(x_dist**2 + y_dist**2)
+                        burst_id = np.argmin(dist)
+
+                        if dist[burst_id] < 0.1:
+                            # Assign burst to data stack
+                            burst.new_burst_num = int(self.burst_names[burst_id][14:])
+                            self.datastack[date_str][swath][self.burst_names[burst_id][8:]] = burst
+
+                            # Create .res file data
+                            burst.meta_burst(swath_meta=self.images[i].swaths[swath_no].metadata)
+                        else:
+                            print('No corresponding burst found! Closest is ' + str(dist[burst_id]) + ' ' +
+                                  date_str + ' ' + swath + ' ' + self.burst_names[burst_id])
+
+        # Remove all unnecessary dates from stack
+        for dat_key in self.datastack.keys():
+            if not self.datastack[dat_key]:
+                del self.datastack[dat_key]
+
+    def remove_incomplete_images(self):
+        # This function removes all the images with less than maximum bursts. This will make a stack more consistent.
+
+        for key in self.datastack.keys():
+            burst_no = 0
+            for key_swath in self.datastack[key].keys():
+                burst_no += len(self.datastack[key][key_swath])
+            if burst_no != self.burst_no:
+                self.datastack.pop(key)
+                print('Number of burst for ' + key + ' is ' + str(burst_no) + ' instead of ' + str(self.burst_no) +
+                      ' and is removed from the datastack.')
+
+    def define_burst_coordinates(self,slaves=False):
+        # This function defines the exact coordinates in pixels of every burst based on the lower left corner of the first
+        # burst image. In this way the total overlap of these bursts can easily be monitored. Results are written to the
+        # coordinates variable
+
+        if slaves is True:
+            dates = self.datastack.keys()
+        else:
+            dates = [self.master_date]
+
+        self.coordinates = OrderedDict()
+
+        for date in dates:
+            self.coordinates[date] = OrderedDict()
+            self.coordinates[date]['shapes'] = []
+            ref = False
+
+            min_line = 1; min_pixel = 1
+            max_line = 1; max_pixel = 1
+            for swath in self.datastack[date].keys():
+
+                self.coordinates[date][swath] = OrderedDict()
+                self.coordinates[date][swath]['corners'] = np.zeros([len(self.datastack[date][swath].keys()), 4, 2],dtype='int')
+
+                b = 0
+                for burst in sorted(self.datastack[date][swath].keys(), key = lambda x: int(x[6:])):
+
+                    if ref is False:
+                        self.coordinates[date]['ref_az_time'] = self.datastack[date][swath][burst].processes['readfiles']['First_pixel_azimuth_time (UTC)']
+                        self.coordinates[date]['ref_range_time'] = self.datastack[date][swath][burst].processes['readfiles']['Range_time_to_first_pixel (2way) (ms)']
+                        ref = True
+
+                    az_first = self.datastack[date][swath][burst].processes['readfiles']['First_pixel_azimuth_time (UTC)']
+                    az_samp = self.datastack[date][swath][burst].processes['readfiles']['Pulse_Repetition_Frequency (computed, Hz)']
+                    first_line = int(self.datastack[date][swath][burst].processes['crop']['First_line (w.r.t. original_image)'])
+                    last_line = int(self.datastack[date][swath][burst].processes['crop']['Last_line (w.r.t. original_image)'])
+
+                    range_first = self.datastack[date][swath][burst].processes['readfiles']['Range_time_to_first_pixel (2way) (ms)']
+                    range_samp = self.datastack[date][swath][burst].processes['readfiles']['Range_sampling_rate (computed, MHz)']
+                    first_pixel = int(self.datastack[date][swath][burst].processes['crop']['First_pixel (w.r.t. original_image)'])
+                    last_pixel = int(self.datastack[date][swath][burst].processes['crop']['Last_pixel (w.r.t. original_image)'])
+
+                    no_lines = int(self.datastack[date][swath][burst].processes['readfiles']['Number_of_lines_original'])
+                    no_pixels = int(self.datastack[date][swath][burst].processes['readfiles']['Number_of_pixels_original'])
+
+                    # Calculate difference w.r.t. reference point.
+                    range_time_diff = (float(range_first) - float(self.coordinates[date]['ref_range_time']))
+                    pixel_diff = int(round((float(range_samp) * 1e3) * range_time_diff))
+
+                    az_time1 = datetime.strptime(az_first, '%Y-%b-%d %H:%M:%S.%f')
+                    az_time2 = datetime.strptime(self.coordinates[date]['ref_az_time'], '%Y-%b-%d %H:%M:%S.%f')
+                    az_time_diff = (az_time1 - az_time2).total_seconds()
+                    line_diff = int(round(float(az_samp) * az_time_diff))
+
+                    # Calculate final corner coordinates.
+                    ll = np.array([line_diff + first_line, pixel_diff + first_pixel], ndmin=2)
+                    ul = np.array([line_diff + last_line, pixel_diff + first_pixel], ndmin=2)
+                    ur = np.array([line_diff + last_line, pixel_diff + last_pixel], ndmin=2)
+                    lr = np.array([line_diff + first_line, pixel_diff + last_pixel], ndmin=2)
+
+                    self.coordinates[date][swath]['corners'][b,:,:] = np.vstack([ll,ul,ur,lr])
+                    b += 1
+
+                    # Check for max/min line/pixel to prevent negative pixel numbers.
+                    min_line = min(1 + line_diff, min_line)
+                    max_line = max(line_diff + no_lines, max_line)
+                    min_pixel = min(1 + pixel_diff, min_pixel)
+                    max_pixel = max(pixel_diff + no_pixels, max_pixel)
+
+            if min_line < 1:
+                max_line = max_line + (1 - min_line)
+            if min_pixel < 1:
+                max_pixel = max_pixel + (1 - min_pixel)
+
+            for swath in self.datastack[date].keys():
+                # If one of the lines or pixels is lower then 1, correct all coordinates to have positive coordinates for all
+                # bursts.
+                if min_line < 1:
+                    self.coordinates[date][swath]['corners'][:,:,0] = self.coordinates[date][swath]['corners'][:,:,0] + (1 - min_line)
+                if min_pixel < 1:
+                    self.coordinates[date][swath]['corners'][:,:,1] = self.coordinates[date][swath]['corners'][:,:,1] + (1 - min_pixel)
+
+                for burst in range(len(self.datastack[date][swath].keys())):
+                    shape_c = self.coordinates[date][swath]['corners'][burst,:,:]
+                    shape = box(shape_c[0,1],shape_c[0,0],shape_c[2,1],shape_c[2,0])
+                    self.coordinates[date]['shapes'].append(shape)
+
+                    # Finally add information to the .res file if already loaded
+                    if self.datastack[date][swath]['burst_' + str(burst+1)].processes['readfiles']:
+
+                        read = self.datastack[date][swath]['burst_' + str(burst+1)].processes['readfiles']
+                        read['First_line (w.r.t. output_image)'] = str(shape_c[0,0])
+                        read['Last_line (w.r.t. output_image)'] = str(shape_c[2,0])
+                        read['First_pixel (w.r.t. output_image)'] = str(shape_c[0,1])
+                        read['Last_pixel (w.r.t. output_image)'] = str(shape_c[2,1])
+                        read['Number_of_pixels_output_image'] = str(max_pixel)
+                        read['Number_of_lines_output_image'] = str(max_line)
+                        self.datastack[date][swath]['burst_' + str(burst+1)].processes['readfiles'] = read
+                    else:
+                        print 'No resfile available, so information is not added to resfile'
+
+    def write_stack(self,write_path='',no_data=False):
+        # This function writes the full datastack to a given folder using the dates / swaths / bursts setup. This
+        # also generates the res readfiles data.
+        if write_path and os.path.exists(write_path):
+            self.path = write_path
+        if (not write_path or not os.path.exists(write_path)) and not self.path:
+            warnings.warn('Please specify a path that exists to write the data')
+            return
+
+        write_jobs = []
+        burst_num = []
+
+        for date in self.datastack.keys():
+
+            date_basic = date.translate(None,'-')
+            date_path = os.path.join(self.path, date_basic)
+            if not os.path.exists(date_path):
+                os.mkdir(date_path)
+
+            for swath in self.datastack[date].keys():
+
+                swath_path = os.path.join(date_path, swath)
+                if not os.path.exists(swath_path):
+                    os.mkdir(swath_path)
+
+                for burst in self.datastack[date][swath].keys():
+                    burst_path = os.path.join(date_path, swath, burst)
+                    if not os.path.exists(burst_path):
+                        os.mkdir(burst_path)
+
+                    # Finally write the bursts with their res files and precise orbits
+                    xml = self.datastack[date][swath][burst].swath_xml
+                    data = self.datastack[date][swath][burst].swath_data
+                    image_no = str(self.datastack[date][swath][burst].burst_num)
+                    stack_no = burst[6:]
+                    xml_base = os.path.basename(xml)
+                    res_name = os.path.join(burst_path, 'slave.res')
+                    outdata = os.path.join(burst_path, 'slave_iw_' + xml_base[6] + '_burst_' + stack_no + '.raw')
+
+                    self.datastack[date][swath][burst].write(res_name)
+                    if not os.path.exists(res_name) or not os.path.exists(outdata):
+
+                        write_jobs.append('python ' + self.function_path + 'sentinel_dump_data_function.py ' + data + ' ' + res_name + ' ' + outdata)
+                        burst_num.append(stack_no + '_' + xml_base[6] + '_' + xml_base[15:23])
+
+        # Burst are sorted in such a way that mainly read from different data files sorted by burst then swath then date.
+        ids = sorted(range(len(burst_num)), key=lambda x: burst_num[x])
+
+        jobList1 = []
+        for id_val in ids:
+            jobList1.append({"path": self.path, "command": write_jobs[id_val]})
+            if not self.parallel:
+                os.chdir(self.path)
+                # Resample
+                os.system(write_jobs[id_val])
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(jobList1)
+
+    def unpack_image(self, dest_folder=''):
+
+        if not dest_folder:
+            dest_folder = os.path.join(self.path, 'slc_data_files')
+            self.unzip_path = dest_folder
+        if not os.path.exists(dest_folder):
+            os.mkdir(dest_folder)
+
+        jobList1 = []
+
+        # This program unpacks the images which are needed for processing. If unpacking fails, they are removed..
+        for imagefile in self.images:
+
+            zipped_folder = imagefile.zip_path
+            if zipped_folder.endswith('.SAFE.zip'):
+                imagefile.unzip_path = os.path.join(dest_folder, os.path.basename(zipped_folder[:-9] + '.SAFE'))
+            elif zipped_folder.endswith('.zip'):
+                imagefile.unzip_path = os.path.join(dest_folder, os.path.basename(zipped_folder[:-4] + '.SAFE'))
+            shapefile = self.shape_filename
+            pol = self.polarisation[0]
+            overwrite = False
+            command1 = ('python ' + self.function_path + 'load_shape_unzip.py ' + zipped_folder + ' ' + dest_folder +
+                        ' ' + shapefile + ' ' + pol + ' ' + str(overwrite))
+            jobList1.append({"path": self.path, "command": command1})
+            if not self.parallel:
+                os.chdir(self.path)
+                # Resample
+                os.system(command1)
+
+        if self.parallel:
+            jobs = Jobs(self.nr_of_jobs, self.doris_parameters)
+            jobs.run(jobList1)
+
+    def del_unpacked_image(self):
+        # This program unpacks the images which are needed for processing.
+        for image_dat in self.images:
+            shutil.rmtree(image_dat.unzip_path)
+
+    def swath_coverage(self):
+        # Create a convex hull for the different swaths.
+
+        for swath in self.swath_names:
+            # Assign coverage of swath using convex hull.
+            burst_id = [i for i in range(len(self.burst_names)) if swath in self.burst_names[i]]
+            swath_shape = cascaded_union([self.burst_shapes[i] for i in burst_id])
+            self.swath_shapes.append(swath_shape)
+
+    def write_shapes(self,coverage=True,images=True,swaths=True,bursts=True):
+        # This function writes shapefiles of the area of interest, images and bursts.
+
+        if not coverage and not images and not bursts:
+            warnings.warn('Select at least one of shape types')
+            return
+
+        shapes = list(); shape_names = list(); shape_files = list()
+        if coverage and self.shape:
+            shape_files.append(self.path + 'area_of_interest.shp')
+            if self.shape.type == 'MultiPolygon':
+                shapes.append([sh for sh in self.shape])
+                shape_names.append([('coverage_with_buffer_of_' + str(self.buffer) + '_degrees_' + str(i)) for i in range(len(shapes[0]))])
+            elif self.shape.type == 'Polygon':
+                shape_names.append(['coverage_with_buffer_of_' + str(self.buffer) + '_degrees'])
+                shapes.append([self.shape])
+        if images and self.image_shapes:
+            shape_files.append(self.path + 'image_coverage.shp')
+            shape_names.append([date.astype(datetime).strftime('%Y-%m-%dT%H:%M:%S') for date in self.image_dates])
+            shapes.append(self.image_shapes)
+        if swaths and self.swath_shapes:
+            shape_files.append(self.path + 'swath_coverage.shp')
+            shape_names.append(self.swath_names)
+            shapes.append(self.swath_shapes)
+        if bursts and self.burst_shapes:
+            shape_files.append(self.path + 'burst_coverage.shp')
+            shape_names.append(self.burst_names)
+            shapes.append(self.burst_shapes)
+
+        shape_setup = {
+            'geometry': 'Polygon',
+            'properties': {'name': 'str'},
+        }
+
+        for i in range(len(shape_files)):
+            with fiona.open(shape_files[i], 'w', 'ESRI Shapefile', shape_setup) as sh:
+                for n in range(len(shapes[i])):
+                    sh.write({
+                        'geometry': mapping(shapes[i][n]),
+                        'properties': {'name': shape_names[i][n]},
+                    })
diff --git a/doris_stack/main_code/swath.py b/doris_stack/main_code/swath.py
new file mode 100644
index 0000000..6db92e3
--- /dev/null
+++ b/doris_stack/main_code/swath.py
@@ -0,0 +1,119 @@
+# This files defines a class for metadata objects of sentinel images. Large part of the work depends on python readers
+# from the tops toolbox.
+
+from doris.doris_stack.functions.xml_query import xml_query
+import warnings
+import os
+from doris.doris_stack.main_code.burst import BurstMeta
+from doris.doris_stack.functions.swath_metadata import burst_coverage, swath_coverage, swath_precise
+
+
+class SwathMeta(object):
+    # Class which stores and gathers information of a swath in a sentinel dataset
+
+    def __init__(self, path='', swath_no='1', pol='vv', xml='', data=''):
+        # Initialize object variables
+
+        # This will contain a list of burst objects
+        self.bursts = []
+
+        # The following contain the path of xml and data file. Also swath_no and polarisation are given.
+        self.swath_xml = ''
+        self.swath_data = ''
+        self.swath_no = ''
+        self.swath_pol = ''
+
+        # These variables contain the metadata and the convex hull of the bursts
+        self.metadata = []
+        self.coverage = []
+        self.burst_centers = []
+        self.burst_corners = []
+        self.burst_shapes = []
+        self.orbits = []
+        self.orbit_type = ''
+
+        # This function creates an swath object and searches for available data and xml files. It gives an error when
+        # either the path does not exist, no data or xml files can be found or the data and xml files do not match.'
+        if (not xml or not data) and not path:
+            warnings.warn('Please provide either a product path or xml and data path')
+            return
+
+        if not xml or not data:
+            warnings.warn('Data path is now used')
+
+            xml_dir = os.path.join(path, 'annotation')
+            xml = [f for f in os.listdir(xml_dir) if os.path.isfile(os.path.join(xml_dir, f))]
+
+            data_dir = os.path.join(path, 'measurement')
+            data = [f for f in os.listdir(data_dir) if os.path.isfile(os.path.join(data_dir, f))]
+
+            # Select polarisation
+            if not any(s in pol for s in ('hh','vv','hv','vh')):
+                warnings.warn('Polarisation not recognized, using default (vv)')
+                pol = 'vv'
+            if not swath_no in ('1','2','3'):
+                warnings.warn('Swath number not recognized, using default (1)')
+
+            xml = [os.path.join(path,'annotation',x) for x in xml if x[12:14] in pol and x[6] == swath_no]
+            data = [os.path.join(path,'measurement',x) for x in data if x[12:14] in pol and x[6] == swath_no]
+
+        # Check if the data is there and if the filenames coincide.
+        # print xml + str(len(xml))
+        # print data + str(len(data))
+
+        if type(xml) is str:
+            xml = [xml]
+        if type(data) is str:
+            data = [data]
+        if (len(xml) != 1 and type(xml) is list) or len(data) != 1:
+            warnings.warn('Total number of files should be one!')
+        if not os.path.exists(xml[0]) or not os.path.exists(data[0]):
+            warnings.warn('Either xml or data path does not exist')
+        if xml[0:-3] != data[0:-4]:
+            warnings.warn('xml and data file do not correspond.')
+
+        self.swath_xml = xml[0]
+        self.swath_data = data[0]
+
+    def meta_swath(self):
+        # This function reads and stores metadata of different swaths in the swath objects.
+        self.metadata = xml_query(self.swath_xml)
+        corners, self.coverage = swath_coverage(self.metadata)
+        self.burst_centers, self.burst_corners, self.burst_shapes = burst_coverage(self.metadata)
+
+
+    def orbits_swath(self, precise_folder=''):
+        # This functions loads the precise orbits for this swath
+        if not precise_folder:
+            print('xml information on orbit is used because no precise folder is specified')
+            orbits, type_orb = swath_precise(self.metadata, precise_folder=precise_folder, dat_type='XML')
+        else:
+            orbits, type_orb = swath_precise(self.metadata, precise_folder=precise_folder, dat_type='POE')
+
+        self.orbits = orbits
+        self.orbit_type = type_orb
+
+        return orbits, type_orb
+
+
+    def meta_burst(self, precise_folder=''):
+        # This function reads and stores metadata of different bursts in the bursts objects.
+
+        if not self.metadata:
+            self.meta_swath()
+        if not self.orbits:
+            self.orbits_swath(precise_folder=precise_folder)
+
+        bursts_num = len(self.metadata['aux']['azimuthTimeStart'])
+
+        if self.bursts:
+            self.bursts = []
+
+        for no in range(bursts_num):
+            self.bursts.append(BurstMeta(path='',swath_no=self.swath_no, pol=self.swath_pol, burst_num=no + 1,
+                                         xml=self.swath_xml, data=self.swath_data))
+            self.bursts[no].burst_center = self.burst_centers[no][0]
+            self.bursts[no].burst_coverage = self.burst_shapes[no]
+            self.bursts[no].burst_corners = self.burst_corners[no]
+            self.bursts[no].datapoints = self.orbits
+            self.bursts[no].orbit_type = self.orbit_type
diff --git a/editor_support/Makefile b/editor_support/Makefile
deleted file mode 100644
index cf37de2..0000000
--- a/editor_support/Makefile
+++ /dev/null
@@ -1,58 +0,0 @@
-#########################################################################
-# Makefile for the Doris syntax hilighting using vim.			#
-# 22-Mar-2010                                                           #
-#                                                                       #
-# Delft University of Technology                                        #
-# Delft Institute of Earth Observation and Space Systems                #
-# http://enterprise.lr.tudelft.nl/doris/				#
-# See also the user manual, annex installation.				#
-#									#
-# Version 1.0 - Mahmut Arikan   					#
-#									#
-#########################################################################
-###################################################################
-# Please change if required: 					###
-###################################################################
-### The shell used by this makefile ###
-SHELL   = /bin/sh
-
-### Specify compiler/installation directory ###
-SCRIPTSDIR = vim/syntax
-INSTALLDIR = ~/.vim/syntax
-
-### Define statements controlling compilation ###
-SYNTAX	   = 	doris.vim
-
-
-#####################################################
-### No need to change anything below here... ####
-#####################################################
-default:	vim
-
-vim:		install
-
-### Install executable in installdir ###
-install:	$(SCRIPTSDIR)/$(SYNTAX)
-		@echo "* Installing $(SYNTAX) in: $(INSTALLDIR)"
-		@mkdir -p $(INSTALLDIR)
-		@( cd $(SCRIPTSDIR) 1>/dev/null ;  cp -f $(SYNTAX) $(INSTALLDIR)/. )
-		@echo " "
-		@echo "*******************************"
-		@echo "* ...Installation finished... *"
-		@echo "*******************************"
-		@echo " "
-		@echo "* Check that you have enabled syntax hilighting in .vimrc by adding: "
-		@echo "*  syntax on"
-		@echo "* "
-		@echo "* Type :set syntax=doris to check whether it is working or not. "
-		@echo "* "
-		@echo " "
-
-
-#####################################################
-### Cleaners ###
-uninstall:	
-		@rm -f $(INSTALLDIR)/$(SYNTAX)
-		@echo "* Removed syntax definition $(SYNTAX) from dir: $(INSTALLDIR)."
-
-### TUDelft 22-03-2010 ###
diff --git a/editor_support/README b/editor_support/README
deleted file mode 100755
index 39fde54..0000000
--- a/editor_support/README
+++ /dev/null
@@ -1,39 +0,0 @@
-#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++#
-# CONTENTS:                                                          #
-#  1. Quick Start                                                    #
-#  2. Details                                                        #
-#  3. List of files.                                                 #
-#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++#
-
-## QUICK START: install vim syntax for doris, type:
-make
-
-## DETAILS
-Uses C++ syntax higlighting of vim with Doris .res file structure.
-It can be also used for C/C++ hilighting at the same time.
-
-Please make sure you have turned on syntax hilighting in .vimrc file.
-See vimrc_example for an example setup.
-
-To test open a doris input file and type :set syntax=doris
-or add the following line to the end of the input file 
-beginning with the # (hash):
-# vim: syntax=doris
-
-## to uninstall
-make uninstall
-
-
-Have fun!
-
-TU Delft 2000-2010
-doris_users[AT]tudelft.nl
-
-
-------------------------
-filelist:
-------------------------
--rw-r--r--  2066 2010-03-22 14:15 Makefile
--rwxr-xr-x  1220 2010-03-22 14:12 README
-drwxr-xr-x  4096 2009-04-17 09:20 vim
--rw-r--r--  3648 2010-03-22 12:59 vimrc_example
diff --git a/editor_support/vim/syntax/doris.vim b/editor_support/vim/syntax/doris.vim
deleted file mode 100644
index 3cb96d4..0000000
--- a/editor_support/vim/syntax/doris.vim
+++ /dev/null
@@ -1,70 +0,0 @@
-" Vim syntax file
-" Language:	C++ + Doris
-" Maintainer:	Ken Shan <ccshan at post.harvard.edu>
-" Last Change:	2002 Jul 15
-" Doris Part Maintainer:	Mahmut Arikan <M.Arikan at tudelft.nl>
-
-" For version 5.x: Clear all syntax items
-" For version 6.x: Quit when a syntax file was already loaded
-if version < 600
-  syntax clear
-elseif exists("b:current_syntax")
-  finish
-endif
-
-" Read the C syntax to start with
-if version < 600
-  so <sfile>:p:h/c.vim
-else
-  runtime! syntax/c.vim
-  unlet b:current_syntax
-endif
-
-" C++ extentions
-syn keyword cppStatement	new delete this friend using
-syn keyword cppAccess		public protected private
-syn keyword cppType		inline virtual explicit export bool wchar_t
-syn keyword cppExceptions	throw try catch
-syn keyword cppOperator		operator typeid
-syn keyword cppOperator		and bitor or xor compl bitand and_eq or_eq xor_eq not not_eq
-syn match cppCast		"\<\(const\|static\|dynamic\|reinterpret\)_cast\s*<"me=e-1
-syn match cppCast		"\<\(const\|static\|dynamic\|reinterpret\)_cast\s*$"
-syn keyword cppStorageClass	mutable
-syn keyword cppStructure	class typename template namespace
-syn keyword cppNumber		NPOS
-syn keyword cppBoolean		true false
-" MA: minor update to handle Doris input files as well.
-syn match xComment		"^c .*"
-syn match xComment		"^C .*"
-syn match xComment		"^#.*"
-syn match xComment		"\s#.*"ms=s+1  
-
-" The minimum and maximum operators in GNU C++
-syn match cppMinMax "[<>]?"
-
-" Default highlighting
-if version >= 508 || !exists("did_cpp_syntax_inits")
-  if version < 508
-    let did_cpp_syntax_inits = 1
-    command -nargs=+ HiLink hi link <args>
-  else
-    command -nargs=+ HiLink hi def link <args>
-  endif
-  HiLink cppAccess		cppStatement
-  HiLink cppCast		cppStatement
-  HiLink cppExceptions		Exception
-  HiLink cppOperator		Operator
-  HiLink cppStatement		Statement
-  HiLink cppType		Type
-  HiLink cppStorageClass	StorageClass
-  HiLink cppStructure		Structure
-  HiLink cppNumber		Number
-  HiLink cppBoolean		Boolean
-  HiLink xComment		Comment
-  hi Comment ctermfg=LightBlue
-  delcommand HiLink
-endif
-
-let b:current_syntax = "doris"
-
-" vim: ts=8
diff --git a/editor_support/vimrc_example b/editor_support/vimrc_example
deleted file mode 100644
index b658f31..0000000
--- a/editor_support/vimrc_example
+++ /dev/null
@@ -1,99 +0,0 @@
-" All system-wide defaults are set in $VIMRUNTIME/debian.vim (usually just
-" /usr/share/vim/vimcurrent/debian.vim) and sourced by the call to :runtime
-" you can find below.  If you wish to change any of those settings, you should
-" do it in this file (/etc/vim/vimrc), since debian.vim will be overwritten
-" everytime an upgrade of the vim packages is performed.  It is recommended to
-" make changes after sourcing debian.vim since it alters the value of the
-" 'compatible' option.
-scriptencoding latin1
-
-" This line should not be removed as it ensures that various options are
-" properly set to work with the Vim-related packages available in Debian.
-runtime! debian.vim
-
-" Uncomment the next line to make Vim more Vi-compatible
-" NOTE: debian.vim sets 'nocompatible'.  Setting 'compatible' changes numerous
-" options, so any other options should be set AFTER setting 'compatible'.
-"set compatible
-
-" Vim5 and later versions support syntax highlighting. Uncommenting the next
-" line enables syntax highlighting by default.
-syntax on
-
-" If using a dark background within the editing area and syntax highlighting
-" turn on this option as well
-"set background=dark
-
-" Uncomment the following to have Vim jump to the last position when
-" reopening a file
-"if has("autocmd")
-"  au BufReadPost * if line("'\"") > 0 && line("'\"") <= line("$")
-"    \| exe "normal g'\"" | endif
-"endif
-
-" Uncomment the following to have Vim load indentation rules according to the
-" detected filetype. Per default Debian Vim only load filetype specific
-" plugins.
-"if has("autocmd")
-"  filetype indent on
-"endif
-
-" The following are commented out as they cause vim to behave a lot
-" differently from regular Vi. They are highly recommended though.
-set showcmd		" Show (partial) command in status line.
-set showmatch		" Show matching brackets.
-set ignorecase		" Do case insensitive matching
-"set smartcase		" Do smart case matching
-"set incsearch		" Incremental search
-"set autowrite		" Automatically save before commands like :next and :make
-"set hidden             " Hide buffers when they are abandoned
-"set mouse=a		" Enable mouse usage (all modes) in terminals
-
-"scriptencoding utf-8
-set bg=dark
-set tabstop=8
-set shiftwidth=8
-set softtabstop=0       " was 8
-set expandtab 
-set diffopt+=iwhite " don't diff white-space 
-set modelines=1
-
-"copy paste from xclipboard
-vmap <F6> :!xclip -f -sel clip<CR>
-map  <F7> mz:-1r !xclip -o -sel clip<CR>
- 
-if (&termencoding == "utf-8") || has("gui_running")
-    if v:version >= 700
-        set list listchars=eol:\ ,tab:»·,trail:·,precedes:.,extends:.,nbsp:.
-    else
-        set list listchars=eol:\ ,tab:»·,trail:·,extends:.
-    endif
-else
-    if v:version >= 700
-        "set list listchars=eol:\ ,tab:>-,trail:.,extends:>,nbsp:_
-        set list listchars=eol:%,tab:>-,trail:.,extends:>,nbsp:_
-    else
-        set list listchars=eol:%,tab:>-,trail:.,extends:>
-    endif
-    "set Tlist_Inc_Winwidth=0
-endif
-
-"exe "set list listchars=tab:\xbb\xb7,trail:\xb7"
-"set list listchars=tab:\|_,eol:¶ 
-set nolist
-
-"autocmd FileType * set tabstop=2|set shiftwidth=2|set noexpandtab
-"autocmd FileType python set tabstop=4|set shiftwidth=4|set expandtab
-au FileType make setlocal noexpandtab  " for make files
-au BufEnter *.m set ai sw=2 ts=2 sts=2 sta et fo=croql
-au BufEnter *.py set ai sw=2 ts=2 sts=2 sta et fo=croql
-au BufEnter *.cc set ai sw=2 ts=2 sts=2 sta et fo=croql
-au BufEnter *.res set ai ts=8 sw=8
-
-" Source a global configuration file if available
-" XXX Deprecated, please move your changes here in /etc/vim/vimrc
-if filereadable("/etc/vim/vimrc.local")
-  " source /etc/vim/vimrc.local
-"  source /etc/vimrc
-endif
-
diff --git a/ENVISAT_TOOLS/Makefile b/envisat_tools/Makefile
similarity index 98%
rename from ENVISAT_TOOLS/Makefile
rename to envisat_tools/Makefile
index b5a1760..9126dff 100755
--- a/ENVISAT_TOOLS/Makefile
+++ b/envisat_tools/Makefile
@@ -21,7 +21,7 @@ CC = 		gcc
 #CFLAGS =	-O3
 #CFLAGS =        -m32         # for 64-bit systems, it requires compatibility lib32, no need for epr_api v2.2
 LFLAGS =	-lm
-INSTALL_DIR =	/usr/local/bin
+INSTALL_DIR =	/home/gertmulder/bin/doris/doris_v5_wu_branch
 
 
 
diff --git a/ENVISAT_TOOLS/README b/envisat_tools/README
similarity index 100%
rename from ENVISAT_TOOLS/README
rename to envisat_tools/README
diff --git a/ENVISAT_TOOLS/envisat_dump_HH.c b/envisat_tools/envisat_dump_HH.c
similarity index 100%
rename from ENVISAT_TOOLS/envisat_dump_HH.c
rename to envisat_tools/envisat_dump_HH.c
diff --git a/ENVISAT_TOOLS/envisat_dump_VV.c b/envisat_tools/envisat_dump_VV.c
similarity index 81%
rename from ENVISAT_TOOLS/envisat_dump_VV.c
rename to envisat_tools/envisat_dump_VV.c
index eb5cca5..bb47a05 100755
--- a/ENVISAT_TOOLS/envisat_dump_VV.c
+++ b/envisat_tools/envisat_dump_VV.c
@@ -29,10 +29,13 @@ int main(int argc, char** argv)
   EPR_SDatasetId* MDS1;
   EPR_SRecord*    rec1;
   EPR_SRecord*    rec5;
-  EPR_SField*     numlines_field;
-  EPR_SField*     numpixels_field;
-  EPR_SField*     line_field;
-  EPR_SField*     line_num_field;
+  //EPR_SField*     numlines_field;
+  //EPR_SField*     numpixels_field;
+  //EPR_SField*     line_field;
+  EPR_SField     numlines_field;
+  EPR_SField     numpixels_field;
+  EPR_SField     line_field;
+  //EPR_SField*     line_num_field; // not used
   int             status;
   ulong           line_num;
   ulong           numlines;
@@ -103,17 +106,21 @@ int main(int argc, char** argv)
   /* product dataset RECORD field element */
   rec1 = epr_read_record(MAIN_PROC_PM_ID,         0, NULL);
   /* product dataset record FIELD element */
-  numlines_field  = epr_get_field(rec1, "num_output_lines");
-  numpixels_field = epr_get_field(rec1, "num_samples_per_line");
+  //numlines_field  = epr_get_field(rec1, "num_output_lines");
+  //numpixels_field = epr_get_field(rec1, "num_samples_per_line");
+  numlines_field  = *(epr_get_field(rec1, "num_output_lines"));
+  numpixels_field = *(epr_get_field(rec1, "num_samples_per_line"));
   /*
   epr_free_record(rec1);
   */
-  epr_print_field(numlines_field, stdout);
-  epr_print_field(numpixels_field, stdout);
+  //epr_print_field(numlines_field, stdout);
+  //epr_print_field(numpixels_field, stdout);
+  epr_print_field(&numlines_field, stdout);
+  epr_print_field(&numpixels_field, stdout);
   //numlines        = epr_get_field_elem_as_ulong(numlines_field, 0);
   //numpixels       = epr_get_field_elem_as_ulong(numpixels_field, 0);
-  numlines        = epr_get_field_elem_as_uint(numlines_field, 0);
-  numpixels       = epr_get_field_elem_as_uint(numpixels_field, 0);
+  numlines        = epr_get_field_elem_as_uint(&numlines_field, 0);
+  numpixels       = epr_get_field_elem_as_uint(&numpixels_field, 0);
   if (argc == 3)
     {
     l0 = 1;
@@ -174,15 +181,20 @@ int main(int argc, char** argv)
     printf("line number: %i\n", line_num);
     */
 
-    line_field = epr_get_field(rec5, "proc_data");
+    //line_field = epr_get_field(rec5, "proc_data");
+    line_field = *(epr_get_field(rec5, "proc_data"));
     cnt = 2*(p0-1);/* p0 starts at 1 for first element */
     /* write out selected pixels */
     for (x=p0;x<=pN;x++)
       {
-      realpart_short = epr_get_field_elem_as_short(line_field,cnt);
-      cnt = cnt + 1;
-      imagpart_short = epr_get_field_elem_as_short(line_field,cnt);
-      cnt = cnt + 1;
+     // realpart_short = epr_get_field_elem_as_short(line_field,cnt);
+     // cnt = cnt + 1;
+     // imagpart_short = epr_get_field_elem_as_short(line_field,cnt);
+     // cnt = cnt + 1;
+      realpart_short = epr_get_field_elem_as_short(&line_field,cnt);
+      cnt++;
+      imagpart_short = epr_get_field_elem_as_short(&line_field,cnt);
+      cnt++;
 /*
 printf("%i,%i: realpart: %f  %f\n", x,y,(float)realpart_short,(float)imagpart_short);
 */
@@ -191,7 +203,7 @@ printf("%i,%i: realpart: %f  %f\n", x,y,(float)realpart_short,(float)imagpart_sh
       status = fwrite(&imagpart_short,2,1,outstream);
       if (status != 1) fprintf(stderr,"fwrite could not write to disk?");
       }
-    /* this program need memory for some reason?  try to free it */
+    /* this program seemed to fill all memory for some reason?  try to free it */
     epr_free_record(rec5);
     }
   fclose(outstream);
diff --git a/ENVISAT_TOOLS/envisat_dump_data.c b/envisat_tools/envisat_dump_data.c
similarity index 100%
rename from ENVISAT_TOOLS/envisat_dump_data.c
rename to envisat_tools/envisat_dump_data.c
diff --git a/ENVISAT_TOOLS/envisat_dump_header.c b/envisat_tools/envisat_dump_header.c
similarity index 100%
rename from ENVISAT_TOOLS/envisat_dump_header.c
rename to envisat_tools/envisat_dump_header.c
diff --git a/ENVISAT_TOOLS/envisat_dump_header2doris.csh b/envisat_tools/envisat_dump_header2doris.csh
similarity index 100%
rename from ENVISAT_TOOLS/envisat_dump_header2doris.csh
rename to envisat_tools/envisat_dump_header2doris.csh
diff --git a/install/INSTALL.txt b/install/INSTALL.txt
new file mode 100755
index 0000000..7a4797b
--- /dev/null
+++ b/install/INSTALL.txt
@@ -0,0 +1,116 @@
+This "INSTALL" file describes the installation of the "Doris" Delft 
+radar interferometric software package.  Doris has been installed on
+virtually all operating systems world-wide (mainly UNIX like,
+for MS Windows(c) your best option is using Cygwin.)
+
+If the following does not work for you, please first search the FAQ via
+  http://doris.tudelft.nl
+
+
+There are 5 components in this distribution (doris v5.0beta).
+  1. doris_stack subdirectory: 		doris stack code for handling processing of stacks. (sentinel 1)
+  2. prepare_stack subdirectory: 	code for setting up a doris stack.
+  3. doris_core and bin subdirectory:   doris core software (C++ source code, python and shell scripts);
+  4. ENVISAT_TOOLS subdir:       standalone reader for ENVISAT (C source code);
+  5. SARtools subdir:            standalone utilities (C++ source code).
+
+--------------------------------------------
+- INSTALLATION OF Doris 5 -
+--------------------------------------------
+
+--------------------------------------------
+- OPTIONAL: installation of FFTW library ---
+- tested for Cygwin, SUN Solaris, Linux ----
+--------------------------------------------
+1. download fftw-3.2.1 from http://www.fftw.org : wget -c http://www.fftw.org/fftw-3.2.1.tar.gz
+2. gunzip and tar xvf it in subdirectory of doris root installation.
+3. cd fftw-3.2.1
+4. ./configure --prefix=`pwd` --enable-float
+5. make
+6. make install
+
+
+--------------------------------------------
+- COMPILATION OF THE DORIS CORE ------------
+--------------------------------------------
+7. cd ../doris_core
+8. Read the README file
+9. ./configure             (creates "Makefile")  # requires tcsh shell to run, to install type "sudo apt-get install
+                            tcsh" at shell prompt on Ubuntu platform.
+                            ( +answer the questions about libraries, etc.)
+10. make                    (compiles the software)
+11. make install            (installs doris and bin scripts)
+
+
+--------------------------------------------
+- COMPILATION OF DORIS UTILITIES -----------
+--------------------------------------------
+12. cd ../sartools
+13. make
+14. Review/edit the Makefile if this does not work
+    (for example if you do not want to use GNU gcc/g++ as compiler)
+15. make install            (installs in /usr/local/bin unless you edit the Makefile)
+
+
+16. cd ../envisat_tools       # on 64-bit system requires libc-dev-i386 library ex: "sudo apt-get install libc-dev-i386"
+17. make
+18. Review/edit the Makefile if this does not work
+    (for example if you do not want to use gcc as compiler)
+19. make install
+
+
+--------------------------------------------
+- INSTALLATION OF USEFUL EXTERNAL SOFTWARE -
+--------------------------------------------
+The doris software depends for full functionality on:
+21.   getorb:  precise orbits for ERS/ENVISAT      # requires fortran(ff7, g77 or gfortran)   ex: "sudo apt-get install gfortran" and edit Makefile and update FC = gfortran
+               (used by Doris step M_PORBITS and S_PORBITS).
+22.   snaphu:  standalone executable used for unwrapping
+               (used by Doris step UNWRAP).
+
+These programs should also be installed on your system. Refer to the download area of the doris website
+   http://doris.tudelft.nl
+where to obtain these programs.
+
+--------------------------------------------
+- INSTALLATION OF Doris - python part
+--------------------------------------------
+
+To use the Doris Python scripts you will need to install the following Python packages:
+-       numpy, scipy (for calculations)
+-       matplotlib (visualization)
+-       requests (data download)
+-       gdal, gdal-dev, shapely, fiona, pyproj, fastkml, osr (GIS)
+This can be done using:
+
+23. pip install [package_name]
+
+If you installed Doris via pip, this should be done automatically.
+
+Packages can also be installed using anaconda. This package comes with de Spyder IDE, which can be used to do some
+editing of the code. When you want a better insight in the overall project structure we recommend you install PyCharm
+as an additional IDE.
+
+After installation of the python packages you have to make a link to the doris_core, cpxfiddle and snaphu executables.
+You can do so by executing the install script, but you will have to create user accounts first to be able to download
+Sentinel-1 and SRTM DEMs.
+
+25. create an account for the downloading of SRTM DEMs at
+https://urs.earthdata.nasa.gov/users/new
+26. create an account for downloading Sentinel data at
+https://urs.earthdata.nasa.gov/users/new/
+
+# move to the install directory
+26. cd ../install
+27. python init_cfg.py
+and fill in the different paths and user accounts
+
+This should be it ...
+
+To setup your processing of a certain datastack, see the README file in the DORIS root directory for further instructions
+on the use of DORIS.
+
+Enjoy,
+
+TUDELFT RADAR GROUP 2017
+doris_users at tudelft.nl
\ No newline at end of file
diff --git a/install/__init__.py b/install/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/install/doris_config_template.xml b/install/doris_config_template.xml
new file mode 100644
index 0000000..deab7dc
--- /dev/null
+++ b/install/doris_config_template.xml
@@ -0,0 +1,10 @@
+<paths>
+    <source_path></source_path>
+    <doris_path></doris_path>
+    <cpxfiddle_path></cpxfiddle_path>
+    <snaphu_path></snaphu_path>
+    <scihub_username></scihub_username>
+    <scihub_password></scihub_password>
+    <usgs_username></usgs_username>
+    <usgs_password></usgs_password>
+</paths>
\ No newline at end of file
diff --git a/install/init_cfg.py b/install/init_cfg.py
new file mode 100644
index 0000000..a125057
--- /dev/null
+++ b/install/init_cfg.py
@@ -0,0 +1,73 @@
+import xml.etree.ElementTree as ET
+import os
+
+def init_cfg():
+    template_xml_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'doris_config_template.xml')
+    xml_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'doris_config.xml')
+    tree = ET.parse(template_xml_file)
+    settings = tree.getroot()
+
+    settings.find('.source_path').text = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+    input = False
+    while input == False:
+        user_input = raw_input("Enter the path to doris: ")
+        if os.path.exists(user_input) and user_input.endswith('doris'):
+            settings.find('.doris_path').text = user_input
+            input = True
+        else:
+            print('The path is incorrect, use another path')
+
+    input = False
+    while input == False:
+        user_input = raw_input("Enter the path to cpxfiddle: ")
+        if os.path.exists(user_input) and user_input.endswith('cpxfiddle'):
+            settings.find('.cpxfiddle_path').text = user_input
+            input = True
+        else:
+            print('The path is incorrect, use another path')
+
+    input = False
+    while input == False:
+        user_input = raw_input("Enter the path to snaphu: ")
+        if os.path.exists(user_input) and user_input.endswith('snaphu'):
+            settings.find('.snaphu_path').text = user_input
+            input = True
+        else:
+            print('The path is incorrect, use another path')
+
+    # Now create the password file.
+    user_input = raw_input("Enter your username for scihub (https://scihub.copernicus.eu/dhus/#/self-registration)")
+    if len(user_input) > 0:
+        settings.find('.scihub_username').text = user_input
+    else:
+        print('Username field is empty, you can change it later in the doris_config.xml file')
+
+    user_input = raw_input("Enter your password for scihub ")
+    if len(user_input) > 0:
+        settings.find('.scihub_password').text = user_input
+    else:
+        print('Password field is empty, you can change it later in the doris_config.xml file')
+
+    user_input = raw_input("Enter your username for srtm download (https://urs.earthdata.nasa.gov/users/new/)")
+    if len(user_input) > 0:
+        settings.find('.usgs_username').text = user_input
+    else:
+        print('Username field is empty, you can change it later in the doris_config.xml file')
+
+    user_input = raw_input("Enter your password for srtm download ")
+    if len(user_input) > 0:
+        settings.find('.usgs_password').text = user_input
+    else:
+        print('Password field is empty, you can change it later in the doris_config.xml file')
+
+    print('Doris is initialized. If you want to make changes later, you can change the doris_config.xml file' +
+          ' or run this script again')
+
+    tree.write(open(xml_file, 'w'))
+
+# Actually execute the code...
+if __name__ == "__main__":
+
+    # Initialize...
+    init_cfg()
diff --git a/prepare_stack/__init__.py b/prepare_stack/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/prepare_stack/create_datastack_bash.py b/prepare_stack/create_datastack_bash.py
new file mode 100644
index 0000000..532f25d
--- /dev/null
+++ b/prepare_stack/create_datastack_bash.py
@@ -0,0 +1,77 @@
+# This function creates the bash script in the data folder to run the final datastack.
+# please note that you still have to define your start and end dates!
+
+import os
+import xml.etree.ElementTree as ET
+
+class CreateBash(object):
+
+
+    def __init__(self):
+        return
+
+    def create(self, stack_folder, root_folder, nodes):
+
+        xml_file = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
+                                'install/doris_config.xml')
+        tree = ET.parse(xml_file)
+        settings = tree.getroot()
+
+        source_path = settings.find('.source_path').text
+        doris_folder = os.path.dirname(settings.find('.doris_path').text)
+        cpxfiddle_folder = os.path.dirname(settings.find('.cpxfiddle_path').text)
+        snaphu_folder = os.path.dirname(settings.find('.snaphu_path').text)
+
+        file_path=os.path.join(stack_folder, 'doris_stack.sh')
+
+        f = open(file_path, 'w')
+
+        doris_run_script = os.path.join(source_path, 'doris_stack', 'main_code', 'doris_main.py')
+        processing = stack_folder
+
+        f.write('#!/bin/bash \n')
+        f.write('\n')
+        f.write('#PBS -l nodes=1:ppn=' + nodes + ' \n')
+        f.write('\n')
+        f.write('source_path=' + source_path + '\n')
+        f.write('export PYTHONPATH=$source_path:$PYTHONPATH \n')
+        f.write('export PATH=' + doris_folder + ':' + cpxfiddle_folder + ':' + snaphu_folder + ':' + '$PATH \n')
+        f.write('python ' + doris_run_script + ' -p ' + processing + ' \n')
+
+        f.close()
+
+        # make sure the file is executable
+        os.chmod(file_path, 0744)
+
+        # Also create a download and dem creation bash script.
+        file_path = os.path.join(stack_folder, 'create_dem.sh')
+        f = open(file_path, 'w')
+
+        doris_run_script = os.path.join(source_path, 'prepare_stack', 'create_dem.py')
+        processing = stack_folder
+
+        f.write('#!/bin/bash \n')
+        f.write('\n')
+        f.write('source_path=' + source_path + '\n')
+        f.write('export PYTHONPATH=$source_path:$PYTHONPATH \n')
+        f.write('python ' + doris_run_script + ' ' + processing + ' SRTM3 \n')
+        f.close()
+
+        # make sure the file is executable
+        os.chmod(file_path, 0744)
+
+        file_path = os.path.join(stack_folder, 'download_sentinel.sh')
+        f = open(file_path, 'w')
+        f.write('source_path=' + source_path + '\n')
+        f.write('export PYTHONPATH=$source_path:$PYTHONPATH \n')
+        doris_run_script = os.path.join(source_path, 'prepare_stack', 'download_sentinel_data_orbits.py')
+        processing = stack_folder
+
+        f.write('#!/bin/bash \n')
+        f.write('\n')
+
+        f.write('python ' + doris_run_script + ' ' + processing + ' \n')
+        f.close()
+
+        # make sure the file is executable
+        os.chmod(file_path, 0744)
\ No newline at end of file
diff --git a/prepare_stack/create_dem.py b/prepare_stack/create_dem.py
new file mode 100644
index 0000000..f8334d2
--- /dev/null
+++ b/prepare_stack/create_dem.py
@@ -0,0 +1,779 @@
+# Function created by Gert Mulder
+# Institute TU Delft
+# Date 9-11-2016
+# Part of Doris 5.0
+
+# This function creates a dem based on either a shape/kml file or a given bounding box. If a shape/kml file is given a
+# minimum offset of about 0.1 degrees is used.
+# All grids are based on the WGS84 projection. 
+# Downloaded data is based on SRTM void filled data:
+# Documentation: https://lpdaac.usgs.gov/sites/default/files/public/measures/docs/NASA_SRTM_V3.pdf
+
+# Description srtm data: https://lpdaac.usgs.gov/dataset_discovery/measures/measures_products_table/SRTMGL1_v003
+# Description srtm q data: https://lpdaac.usgs.gov/node/505
+
+import numpy as np
+import gdal
+import gdalconst
+import osr
+from HTMLParser import HTMLParser
+import pickle
+import requests
+import os
+import zipfile
+import fastkml
+import shutil
+from fiona import collection
+import xml.etree.ElementTree as ET
+import sys
+
+# This are some example values...
+# shape_filename = '/media/gert/Data/shapes/netherlands/netherland.shp'
+# data_folder = '/media/gert/Data/dem/'
+# dem_path = '/media/gert/Data/dem/test/'
+# resample = 'regular_grid'
+# doris_input = True
+# quality = 'SRTM1'
+# out_file = dem_path + 'output.dem'
+# lats = np.arange(50.1, 53, 0.0005)
+# lons = np.arange(2, 8, 0.0005)
+# rounding = 1
+# border = 0.1
+# create_binary('', out_file, resample, doris_input, lats, lons, rounding, border, data_folder, quality)
+
+
+class CreateDem:
+
+    def __init__(self):
+        return
+
+    def create(self, shape_filename='', out_file='' ,var_file='', resample='regular_grid', doris_input=True, lats=[], lons=[],
+                      rounding=1, border=0.1, data_folder='', quality='SRTM1', password='', username='', password_file=''):
+        # This function stitches the different files together. If no data is available values will be zero. Which is
+        # generally true because it is above sealevel.
+        # The resampling is either
+        # - none
+        # - regular_grid (based on vectors of lats/lons)
+        # - irregular_grid (based on lats/lons)
+
+        if not out_file and not resample == 'irregular_grid':
+            print('Please specify an output filename!')
+
+        if shape_filename:
+            latlim, lonlim = self._kml_shp_2_bb(shape_filename)
+        else:
+            try:
+                latlim = [min(lats), max(lats)]
+                lonlim = [min(lons), max(lons)]
+            except:
+                print('Not possible to create DEM grid.')
+                return
+
+        # Now add the rounding and borders to add the sides of our image
+        # Please use rounding as a n/60 part of a degree (so 1/15 , 1/10 or 1/20 of a degree for example..)
+        latlim = [np.floor((latlim[0] - border) / rounding) * rounding, np.ceil((latlim[1] + border) / rounding) * rounding]
+        lonlim = [np.floor((lonlim[0] - border) / rounding) * rounding, np.ceil((lonlim[1] + border) / rounding) * rounding]
+
+        # First download needed .hgt files. Quality is either 1, 3 or 30. If possible the files are downloaded. Otherwise
+        # we fall back to lower quality. This is done using the elevation package
+        tiles, q_tiles, tiles_30 = self._download_dem_files(latlim, lonlim, quality, data_folder, password=password, username=username)
+
+        # Then create the final grid. This depends on the needed data type and possible resampling...
+        if quality == 'SRTM1':
+            pixel_degree = 3600
+        elif quality == 'SRTM3':
+            pixel_degree = 1200
+        elif quality == 'SRTM30':
+            pixel_degree = 120
+        else:
+            print('quality should be either SRTM1, SRTM3 or SRTM30!')
+            return
+
+        lat_size = int((latlim[1] - latlim[0]) * pixel_degree) + 1
+        lon_size = int((lonlim[1] - lonlim[0]) * pixel_degree) + 1
+        print('Bounding box is:')
+        print('from ' + str(latlim[0]) + ' latitude to ' + str(latlim[1]))
+        print('from ' + str(lonlim[0]) + ' longitude to ' + str(lonlim[1]))
+
+        # Create final grid and add tiles.
+        if quality == 'SRTM1' or quality == 'SRTM3':
+            # Create file for DEM data
+            bin_data = np.memmap(out_file, dtype=np.int16, shape=(lat_size, lon_size), mode='w+')
+            bin_data = self._add_tiles(bin_data, tiles, quality, latlim, lonlim)
+
+            # Create file for quality data
+            bin_q_data = np.memmap(out_file + '.q', dtype=np.uint8, shape=(lat_size, lon_size), mode='w+')
+            bin_q_data = self._add_tiles(bin_q_data, q_tiles, quality, latlim, lonlim, quality_file=True)
+
+            # Save quality file to geotiff
+            temp_q = os.path.join(data_folder, 'temp_q.tiff')
+            n_latlim = [latlim[0] - 0.5 / pixel_degree, latlim[1] + 0.5 / pixel_degree]
+            n_lonlim = [lonlim[0] - 0.5 / pixel_degree, lonlim[1] + 0.5 / pixel_degree]
+            q_tiff = self._create_georeference(n_latlim, n_lonlim, 1.0 / pixel_degree, 1.0 / pixel_degree, 'uint8', temp_q)
+            q_tiff.GetRasterBand(1).WriteArray(bin_q_data)
+            q_tiff = None
+
+        else:  # We are creating a 30 seconds map
+            # Create file for DEM data
+            bin_data = np.memmap(out_file, dtype=np.uint16, shape=(lat_size, lon_size))
+            bin_data = self._add_tiles(bin_data, quality, tiles_30, latlim, lonlim)
+
+            n_latlim = latlim
+            n_lonlim = lonlim
+
+        # Save the binary data as a geotiff
+        print('Save data to geotiff')
+        dem_tiff = os.path.join(data_folder, 'temp_dem.tiff')
+        dem_data = self._create_georeference(n_latlim, n_lonlim, 1.0 / pixel_degree, 1.0 / pixel_degree, 'int16', dem_tiff)
+        dem_data.GetRasterBand(1).WriteArray(bin_data)
+        dem_data = None
+
+        # First remove the egm96 from this raster.
+        # Pre assign the egm tiff file first.
+        print('Calculate geoid correction for SRTM data')
+        egm_tiff = os.path.join(data_folder, 'egm96_resample.tiff')
+        egm_data = self._create_georeference(n_latlim, n_lonlim, 1.0 / pixel_degree, 1.0 / pixel_degree, 'float32', egm_tiff)
+        egm_data = None
+
+        print('Correct DEM for geoid')
+        self._add_egm96(dem_tiff, egm_tiff, data_folder)
+
+        if resample == 'regular_grid':
+            # If regular grid is used, we convert using gdal.
+            # First create a geotiff file, then resample geotiff file. We always use cubic interpolation.
+            print('Resampling to new regular grid')
+            dlat = lats[1] - lats[0]
+            dlon = lons[1] - lons[0]
+            dem_tiff_final = os.path.join(data_folder, 'dem.tiff')
+            dem_data_final = self._create_georeference(latlim, lonlim, dlat, dlon, 'float32', dem_tiff_final)
+            dem_data_final = None
+            dem_data_final = gdal.Open(dem_tiff_final, gdal.GA_Update)
+            dem_data = gdal.Open(dem_tiff, gdal.GA_Update)
+
+            gdal.ReprojectImage(dem_data, dem_data_final, None, None, gdal.GRA_Cubic)
+            dem_data_final = None
+            dem_tiff = dem_tiff_final
+
+        elif resample == 'irregular_grid':
+            # Use a simple bilinear approach to find values for specific points.
+            print('Resampling to new irregular grid')
+            heights = self._simple_bilinear(lats, lons, dem_tiff)
+
+            return heights
+
+        if doris_input == True:
+            # Create a binary output file
+            command = 'gdal_translate -of MFF ' + dem_tiff + ' ' + dem_tiff[:-5] + '.raw'
+            os.system(command)
+
+            if not os.path.exists(os.path.dirname(out_file)):
+                os.makedirs(os.path.dirname(out_file))
+            shutil.move(dem_tiff[:-5] + '.r00', out_file)
+
+            # And create the scripts that go with them.
+            self._output_doris_inputfiles(dem_tiff, out_file, var_file)
+
+            # Finally also move .tiff file
+            shutil.move(dem_tiff, out_file[:-4] + '.tiff')
+
+        return
+
+
+    def _add_tiles(self, outputdata, tiles, quality, latlim, lonlim, quality_file=False):
+        # This function adds tiles to np.memmap file
+
+        if quality == 'SRTM1':
+            shape = (3601, 3601)
+            s_size = 1.0 / 3600.0
+            step_lat = 1
+            step_lon = 1
+        elif quality == 'SRTM3':
+            shape = (1201, 1201)
+            s_size = 1.0 / 1200.0
+            step_lat = 1
+            step_lon = 1
+        elif quality == 'SRTM30':
+            shape = (6000, 4800)
+            s_size = 1.0 / 120.0
+            step_lat = 50.0 - s_size
+            step_lon = 40.0 - s_size
+        else:
+            print('quality should be either SRTM1, SRTM3 or SRTM30!')
+            return
+
+        print('total file size is ' + str(outputdata.shape[0]) + ' in latitude and ' + str(outputdata.shape[1]) + ' in longitude')
+
+        for tile in tiles:
+            if quality_file:
+                image = np.fromfile(tile, dtype='>u1').reshape(shape)
+            else:
+                image = np.fromfile(tile, dtype='>i2').reshape(shape)
+
+            if os.path.basename(tile)[7] == 'N':
+                lat = float(os.path.basename(tile)[8:10])
+            else:
+                lat = - float(os.path.basename(tile)[8:10])
+            if os.path.basename(tile)[10] == 'E':
+                lon = float(os.path.basename(tile)[11:14])
+            else:
+                lon = - float(os.path.basename(tile)[11:14])
+            if quality == 'SRTM30':
+                lat = lat - 50 + (s_size / 2)
+                lon += (s_size / 2)
+
+            print('adding ' + tile)
+
+            # Find the coordinates of the part of the tile that should be written to the output data.
+            t_latlim = [max(lat, latlim[0]), min(lat + step_lat, latlim[1])]
+            t_lonlim = [max(lon, lonlim[0]), min(lon + step_lon, lonlim[1])]
+            t_latid = [shape[0] - int(round((t_latlim[0] - lat) / s_size)), shape[0] - (int(round((t_latlim[1] - lat) / s_size)) + 1)]
+            t_lonid = [int(round((t_lonlim[0] - lon) / s_size)), int(round((t_lonlim[1] - lon) / s_size)) + 1]
+            latsize = int(round((latlim[1] - latlim[0]) / s_size)) + 1
+            latid = [latsize - int(round((t_latlim[0] - latlim[0]) / s_size)), latsize - (int(round((t_latlim[1] - latlim[0]) / s_size)) + 1)]
+            lonid = [int(round((t_lonlim[0] - lonlim[0]) / s_size)), int(round((t_lonlim[1] - lonlim[0]) / s_size)) + 1]
+
+            print('Adding tile lat ' + str(t_latid[1] + 1) + ' to ' + str(t_latid[0]) + ' into dem file ' +
+                  str(latid[1] + 1) + ' to ' + str(latid[0]))
+            print('Adding tile lon ' + str(t_lonid[0] + 1) + ' to ' + str(t_lonid[1]) + ' into dem file ' +
+                  str(lonid[0] + 1) + ' to ' + str(lonid[1]))
+
+            # Assign values from tiles to outputdata
+            if quality == 'SRTM30':
+                outputdata[latid[1]: latid[0]-2, lonid[0]: lonid[1]-2] = image[t_latid[1]: t_latid[0]-2, t_lonid[0]: t_lonid[1]-2]
+            else:
+                outputdata[latid[1]: latid[0]-1, lonid[0]: lonid[1]-1] = image[t_latid[1]: t_latid[0]-1, t_lonid[0]: t_lonid[1]-1]
+
+        return outputdata
+
+
+    def _download_dem_files(self, latlim, lonlim, quality, data_folder, username='', password='', password_xml=''):
+        # This function downloads data either in 1,3 or 30 arc seconds. When you choose either 1 or 3 seconds also 30
+        # seconds is downloaded to fill the voids.
+        # In this operation the needed files are also extracted...
+
+        # Check the username and password and load from config file if needed.
+        if not username or not password:
+            if os.path.exists(password_xml):
+                tree = ET.parse(password_xml)
+                settings = tree.getroot()
+
+                username = settings.find('.usgs_username').text
+                password = settings.find('.usgs_password').text
+            else:
+                print('You should specify a username and password to download SRTM data. ')
+                return
+
+        filelist = self._srtm_listing(data_folder, username=username, password=password)
+        outfiles = []
+        q_files = []
+        outfiles_30 = []
+
+        lats = np.arange(np.floor(latlim[0]), np.ceil(latlim[1]))
+        lons = np.arange(np.floor(lonlim[0]), np.ceil(lonlim[1]))
+
+        if quality == 'SRTM1' or quality == 'SRTM3':
+            for lat in lats:
+                for lon in lons:
+
+                    lat = int(lat)
+                    lon = int(lon)
+
+                    if lat < 0:
+                        latstr = 'S' + str(abs(lat)).zfill(2)
+                    else:
+                        latstr = 'N' + str(lat).zfill(2)
+                    if lon < 0:
+                        lonstr = 'W' + str(abs(lon)).zfill(3)
+                    else:
+                        lonstr = 'E' + str(lon).zfill(3)
+
+                    # Check if file exists in filelist
+                    if str(lat) not in filelist[quality]:
+                        continue
+                    elif str(lon) not in filelist[quality][str(lat)]:
+                        continue
+
+                    filename = os.path.join(data_folder, latstr + lonstr + 'SRTMGL3.hgt.zip')
+                    q_file = os.path.join(data_folder, latstr + lonstr + 'SRTMGL3.q.zip')
+                    extracted_file = os.path.join(data_folder, quality + '__' + latstr + lonstr + '.hgt')
+                    q_extracted = os.path.join(data_folder, quality + '__' + latstr + lonstr + '.q')
+
+                    if not os.path.exists(extracted_file) or not os.path.exists(q_extracted):
+                        # Download and unzip
+                        download_dem = filelist[quality][str(lat)][str(lon)]
+                        download_q = download_dem[:-7] + 'num.zip'
+
+                        command = 'wget ' + download_dem + ' --user ' + username + ' --password ' + password + ' -O ' + filename
+                        q_command = 'wget ' + download_q + ' --user ' + username + ' --password ' + password + ' -O ' + q_file
+                        try:
+                            os.system(command)
+                            zip_data = zipfile.ZipFile(filename)
+                            source = zip_data.open(zip_data.namelist()[0])
+                            target = open(extracted_file, 'wb')
+                            shutil.copyfileobj(source, target, length=-1)
+                            target.close()
+                            outfiles.append(extracted_file)
+                            os.remove(filename)
+
+                            os.system(q_command)
+                            zip_data = zipfile.ZipFile(q_file)
+                            source = zip_data.open(zip_data.namelist()[0])
+                            target = open(q_extracted, 'wb')
+                            shutil.copyfileobj(source, target, length=-1)
+                            target.close()
+                            q_files.append(q_extracted)
+                            os.remove(q_file)
+                        except:
+                            print('Failed to download or process ' + filename)
+
+                    else:
+                        outfiles.append(extracted_file)
+                        q_files.append(q_extracted)
+
+        for lat in lats:
+            for lon in lons:
+                # Now check with which 30 seconds file this corresponds
+                lat50 = int(np.floor(float(lat + 10) / 50)) * 50 + 40
+                lon40 = int(np.floor(float(lon + 20) / 40)) * 40 - 20
+
+                if lat50 < 0:
+                    latstr = 'S' + str(abs(lat50)).zfill(2)
+                else:
+                    latstr = 'N' + str(lat50).zfill(2)
+                if lon40 < 0:
+                    lonstr = 'W' + str(abs(lon40)).zfill(3)
+                else:
+                    lonstr = 'E' + str(lon40).zfill(3)
+
+                # Does this file exist..
+                if str(lat50) not in filelist['SRTM30']:
+                    continue
+                elif str(lon40) not in filelist['SRTM30'][str(lat50)]:
+                    continue
+
+                filename = os.path.join(data_folder, latstr + lonstr + 'SRTMGL3.hgt.zip')
+                extracted_file = os.path.join(data_folder, 'SRTM30_' + latstr + lonstr + '.hgt')
+
+                # Download and unzip file if possible and add to list
+                if not os.path.exists(extracted_file):
+                    # Download and unzip
+                    user = 'USERNAME'
+                    password = 'PASSWORD'
+
+                    command = 'wget ' + filelist['SRTM30'][str(lat50)][str(lon40)] + ' --user ' + user + ' --password ' + password + \
+                              ' -O ' + filename
+                    try:
+                        os.system(command)
+                        zip_data = zipfile.ZipFile(filename)
+                        source = zip_data.open(zip_data.namelist()[0])
+                        target = open(extracted_file, 'wb')
+                        shutil.copyfileobj(source, target, length=-1)
+                        target.close()
+                        outfiles_30.append(extracted_file)
+
+                    except:
+                        print('Failed to download or process ' + filename)
+                elif extracted_file not in outfiles_30:
+                    outfiles_30.append(extracted_file)
+
+        return outfiles, q_files, outfiles_30
+
+
+    def _simple_bilinear(self, lats, lons, dem_tiff, data_folder):
+        # To resample to datapoints, we convert the tiff file to a binary file
+        bin_file = os.path.join(data_folder, 'dem.raw')
+        command = 'gdal_translate -of MFF ' + dem_tiff + ' ' + bin_file
+        os.system(command)
+        shutil.move(bin_file[:-4] + '.r00', bin_file)
+
+        # Read it as a memmap function
+        dem_data = gdal.Open(dem_tiff, gdal.GA_Update)
+        size = (dem_data.RasterYSize, dem_data.RasterXSize)
+        data = np.memmap(bin_file, shape=size, dtype=np.dtype('float32'))
+        r = dem_data.GetGeoTransform()
+
+        # And find the values of corresponding points using bilinear interpolation.
+        x_id = np.floor((lons - r[0]) / r[1]).astype('int32')
+        x_diff = (((lons - r[0]) / r[1]) - x_id)
+        y_id = np.floor((lats - r[3]) / r[5]).astype('int32')
+        y_diff = (((lats - r[3]) / r[5]) - y_id)
+
+        # Calculate final results
+        ll_cont = data[y_id, x_id] * (1-x_diff) * (1-y_diff)
+        ul_cont = data[y_id + 1, x_id] * (1-x_diff) * y_diff
+        ur_cont = data[y_id + 1, x_id + 1] * x_diff * y_diff
+        lr_cont = data[y_id, x_id + 1] * x_diff * (1-y_diff)
+        heights = ll_cont + ul_cont + ur_cont + lr_cont
+
+        return heights
+
+
+    def _kml_shp_2_bb(self, filename):
+        # from kml and shape file to a bounding box. We will always use a bounding box to create the final product.
+
+        if filename.endswith('.shp'):
+            with collection(filename, "r") as inputshape:
+
+                shapes = [shape for shape in inputshape]
+                # only first shape
+                dat = shapes[0]['geometry']['coordinates']
+
+                lon = [l[0] for l in dat[0]]
+                lat = [l[1] for l in dat[0]]
+
+                latlim = [min(lat), max(lat)]
+                lonlim = [min(lon), max(lon)]
+
+        elif filename.endswith('.kml'):
+            doc = file(filename).read()
+            k = fastkml.KML()
+            k.from_string(doc)
+            dat = list(list(k.features())[0].features())[0].geometry[0].exterior.coords[:]
+
+            lon = [l[0] for l in dat[0]]
+            lat = [l[1] for l in dat[0]]
+
+            latlim = [min(lat), max(lat)]
+            lonlim = [min(lon), max(lon)]
+        else:
+            print 'format not recognized! Pleas creat either a .kml or .shp file.'
+            return []
+
+        return latlim, lonlim
+
+
+    def _add_egm96(self, dem_tiff, egm_tiff, data_folder):
+        # This function adds the geoid from egm96
+
+        # Load egm96 grid and resample to input grid using gdal.
+        # (For this purpose the grid is downloaded from:
+        # http://earth-info.nga.mil/GandG/wgs84/gravitymod/egm96/binary/binarygeoid.html
+        # In principle this is converted to geotiff here,
+
+        filename = os.path.join(data_folder, 'EGM96_15min.dat')
+        egm_source_tiff = os.path.join(data_folder, 'EGM96_15min.tiff')
+
+        if not os.path.exists(egm_source_tiff):
+            if not os.path.exists(filename):
+                # Download egm96 file
+                command = 'wget http://earth-info.nga.mil/GandG/wgs84/gravitymod/egm96/binary/WW15MGH.DAC -O ' + filename
+                os.system(command)
+
+            # Get georeference
+            latlim = [-90.125, 90.125]
+            lonlim = [-0.125, 359.875]
+            dlat = 0.25
+            dlon = 0.25
+            egm_data = self._create_georeference(latlim, lonlim, dlat, dlon, 'float32', egm_source_tiff)
+
+            # Load data
+            egm96 = np.fromfile(filename, dtype='>i2').reshape((721, 1440)).astype('float32')
+
+            # Save as geotiff
+            egm_data.GetRasterBand(1).WriteArray(egm96 / 100)
+            egm_data = None
+
+        egm_source = gdal.Open(egm_source_tiff, gdal.GA_Update)
+        egm_data = gdal.Open(egm_tiff, gdal.GA_Update)
+
+        # Now resample to new dataset. (Copy information from gdal_dem)
+        gdal.ReprojectImage(egm_source, egm_data, None, None, gdalconst.GRA_Bilinear)
+        egm_data = None
+
+        dem_new = dem_tiff + '.new'
+
+        # Finally open original dataset and subtract
+        command = 'gdal_calc.py -A ' + dem_tiff + ' -B ' + egm_tiff + ' --outfile=' + dem_new + ' --calc="A+B"'
+        os.system(command)
+
+        shutil.move(dem_new, dem_tiff)
+
+
+    def _output_doris_inputfiles(self, dem_tiff, out_file, var_file):
+        # This script outputs doris inputfiles which can be copied for further processing.
+
+        dem = gdal.Open(dem_tiff)
+
+        xsize = dem.RasterXSize
+        ysize = dem.RasterYSize
+        georef = dem.GetGeoTransform()
+        dlat = georef[1]
+        dlon = abs(georef[5])
+        lonmin = georef[0] + (dlon * 0.5)
+        latmax = georef[3] - (dlat * 0.5)
+
+        output_txt = out_file + '.doris_inputfile'
+        output_var = var_file
+        output_var = open(output_var, 'w')
+        txtfile = open(output_txt, 'w')
+
+        dem_var = dict()
+        dem_var['in_dem'] = out_file
+        dem_var['in_format'] = 'r4'
+        dem_var['in_size'] = str(ysize) + " " + str(xsize)
+        dem_var['in_delta'] = str(dlat) + " " + str(dlon)
+        dem_var['in_ul'] = str(latmax) + " " + str(lonmin)
+        dem_var['in_nodata'] = '-32768'
+        pickle.dump(dem_var, output_var)
+        output_var.close()
+
+        txtfile.write("# The processing cards generated by $(basename $0) script. \n")
+        txtfile.write("# Using parameters: $@ \n")
+        txtfile.write('# Copy the section(s) that is/are necessary to your processing setup. \n')
+        txtfile.write("c         ___             ___ \n")
+        txtfile.write("comment   ___SIM AMPLITUDE___ \n")
+        txtfile.write("c                             \n")
+        txtfile.write("SAM_IN_DEM     " + out_file + " \n")
+        txtfile.write("SAM_IN_FORMAT   r4 \t\t\t // default is short integer \n")
+        txtfile.write("SAM_IN_SIZE    " + str(ysize) + " " + str(xsize) + " \n")
+        txtfile.write("SAM_IN_DELTA   " + str(dlat) + " " + str(dlon) + " \n")
+        txtfile.write("SAM_IN_UL      " + str(latmax) + " " + str(lonmin) + " \n")
+        txtfile.write("SAM_IN_NODATA  -32768 \n")
+        txtfile.write("  \n")
+        txtfile.write("  \n")
+        txtfile.write("c         ___          ___ \n")
+        txtfile.write("comment   ___DEM ASSIST___ \n")
+        txtfile.write("c                             \n")
+        txtfile.write("DAC_IN_DEM     $dempath/$outfile5 \n")
+        txtfile.write("DAC_IN_FORMAT   r4 \t\t\t // default is short integer \n")
+        txtfile.write("DAC_IN_SIZE    " + str(ysize) + " " + str(xsize) + " \n")
+        txtfile.write("DAC_IN_DELTA   " + str(dlat) + " " + str(dlon) + " \n")
+        txtfile.write("DAC_IN_UL      " + str(latmax) + " " + str(lonmin) + " \n")
+        txtfile.write("DAC_IN_NODATA  -32768 \n")
+        txtfile.write("  \n")
+        txtfile.write("  \n")
+        txtfile.write("c         ___             ___ \n")
+        txtfile.write("comment   ___REFERENCE DEM___ \n")
+        txtfile.write("c                             \n")
+        txtfile.write("## CRD_METHOD   DEMINTRPMETHOD \n")
+        txtfile.write("CRD_IN_DEM     $dempath/$outfile5 \n")
+        txtfile.write("CRD_IN_FORMAT   r4 \t\t\t // default is short integer \n")
+        txtfile.write("CRD_IN_SIZE    " + str(ysize) + " " + str(xsize) + " \n")
+        txtfile.write("CRD_IN_DELTA   " + str(dlat) + " " + str(dlon) + " \n")
+        txtfile.write("CRD_IN_UL      " + str(latmax) + " " + str(lonmin) + " \n")
+        txtfile.write("CRD_IN_NODATA  -32768 \n")
+
+        txtfile.close()
+
+
+    def _srtm_listing(self, data_folder, username, password):
+        # This script makes a list of all the available 1,3 and 30 arc second datafiles.
+        # This makes it easier to detect whether files do or don't exist.
+
+        data_file = os.path.join(data_folder, 'filelist')
+        if os.path.exists(data_file):
+            dat = open(data_file, 'r')
+            filelist = pickle.load(dat)
+            dat.close()
+            return filelist
+
+        server = "http://e4ftl01.cr.usgs.gov"
+
+        folders = 'SRTM/SRTMGL1.003/2000.02.11/', 'SRTM/SRTMGL3.003/2000.02.11/', 'SRTM/SRTMGL30.002/2000.02.11/'
+        keys = ['SRTM1', 'SRTM3', 'SRTM30']
+        filelist = dict()
+        filelist['SRTM1'] = dict()
+        filelist['SRTM3'] = dict()
+        filelist['SRTM30'] = dict()
+
+        for folder, key_value in zip(folders, keys):
+
+            conn = requests.get(server + '/' + folder, auth=(username, password))
+            if conn.status_code == 200:
+                print "status200 received ok"
+            else:
+                print "an error occurred during connection"
+
+            data = conn.text
+            parser = parseHTMLDirectoryListing()
+            parser.feed(data)
+            files = parser.getDirListing()
+
+            if key_value == 'SRTM1' or key_value == 'SRTM3':
+                files = [f for f in files if f.endswith('hgt.zip')]
+                north = [int(filename[1:3]) for filename in files]
+                east = [int(filename[4:7]) for filename in files]
+                for i in [i for i, filename in enumerate(files) if filename[0] == 'S']:
+                    north[i] = north[i] * -1
+                for i in [i for i, filename in enumerate(files) if filename[3] == 'W']:
+                    east[i] = east[i] * -1
+            else:
+                files = [f for f in files if f.endswith('dem.zip')]
+                north = [int(filename[5:7]) for filename in files]
+                east = [int(filename[1:4]) for filename in files]
+                for i in [i for i, filename in enumerate(files) if filename[4] == 's']:
+                    north[i] = north[i] * -1
+                for i in [i for i, filename in enumerate(files) if filename[0] == 'w']:
+                    east[i] = east[i] * -1
+
+            for filename, n, e in zip(files, north, east):
+                if not str(n) in filelist[key_value]:
+                    filelist[key_value][str(n)] = dict()
+                filelist[key_value][str(n)][str(e)] = server + '/' + folder + filename
+
+        file_list = open(os.path.join(data_folder, 'filelist'), 'w')
+        pickle.dump(filelist, file_list)
+        file_list.close()
+
+        return filelist
+
+
+    def _create_georeference(self, latlim, lonlim, dlat, dlon, dtype='int16', filename=''):
+        # This function also creates a dem file but creates an geotiff file instead. This functionality is used to allow
+        # resampling in later steps using gdal.
+
+        conversion = {
+            "uint8": 1,
+            "int8": 1,
+            "uint16": 2,
+            "int16": 3,
+            "uint32": 4,
+            "int32": 5,
+            "float32": 6,
+            "float64": 7,
+            "complex64": 10,
+            "complex128": 11,
+        }
+
+        if filename:
+            driver = gdal.GetDriverByName('Gtiff')
+            dataset = driver.Create(filename,
+                                    int(np.round((lonlim[1] - lonlim[0]) / dlon)),
+                                    int(np.round((latlim[1] - latlim[0]) / dlat)),
+                                    1,
+                                    conversion[dtype], ['COMPRESS=LZW', 'BIGTIFF=YES'])
+        else:
+            driver = gdal.GetDriverByName('mem')
+            dataset = driver.Create('',
+                                    int((lonlim[1] - lonlim[0]) / dlon),
+                                    int((latlim[1] - latlim[0]) / dlat),
+                                    1,
+                                    conversion[dtype])
+
+        dataset.SetGeoTransform((
+            lonlim[0],  # 0
+            dlat,  # 1
+            0,  # 2
+            latlim[1],  # 3
+            0,  # 4
+            -dlon))
+
+        spatial_ref = osr.SpatialReference()
+        spatial_ref.ImportFromEPSG(4326)
+
+        dataset.SetProjection(spatial_ref.ExportToWkt())
+
+        return dataset
+
+    # This function is not used because data does not contain gaps....
+    def _fill_voids(self, outputdata, output_30sec, quality):
+        # This function fills the voids in our output data.
+
+        if quality == 'SRTM1':
+            s_size = 1.0 / 3600.0
+        elif quality == 'SRTM3':
+            s_size = 1.0 / 1200.0
+        else:
+            print('quality should be either SRTM1 or SRTM3!')
+            return
+
+        # This script assumes that the final dem is at least divided in 1 arc minute blocks so the two input fields are
+        # perfectly alligned.
+        id_void = np.argwhere(outputdata == -32767)
+
+        if id:  # If there are any voids...
+            id_30 = np.floor(id_void * 120.0 * s_size)
+            outputdata[id_void] = output_30sec[id_30]
+
+        return outputdata
+
+# Following code is adapted from srtm-1.py > downloaded from
+# https://svn.openstreetmap.org/applications/utils/import/srtm2wayinfo/python/srtm.py
+class parseHTMLDirectoryListing(HTMLParser):
+    def __init__(self):
+        # print "parseHTMLDirectoryListing.__init__"
+        HTMLParser.__init__(self)
+        self.title = "Undefined"
+        self.isDirListing = False
+        self.dirList = []
+        self.inTitle = False
+        self.inHyperLink = False
+        self.currAttrs = ""
+        self.currHref = ""
+
+    def handle_starttag(self, tag, attrs):
+        # print "Encountered the beginning of a %s tag" % tag
+        if tag == "title":
+            self.inTitle = True
+        if tag == "a":
+            self.inHyperLink = True
+            self.currAttrs = attrs
+            for attr in attrs:
+                if attr[0] == 'href':
+                    self.currHref = attr[1]
+
+    def handle_endtag(self, tag):
+        # print "Encountered the end of a %s tag" % tag
+        if tag == "title":
+            self.inTitle = False
+        if tag == "a":
+            # This is to avoid us adding the parent directory to the list.
+            if self.currHref != "":
+                self.dirList.append(self.currHref)
+            self.currAttrs = ""
+            self.currHref = ""
+            self.inHyperLink = False
+
+    def handle_data(self, data):
+        if self.inTitle:
+            self.title = data
+            print "title=%s" % data
+            if "Index of" in self.title:
+                # print "it is an index!!!!"
+                self.isDirListing = True
+        if self.inHyperLink:
+            # We do not include parent directory in listing.
+            if "Parent Directory" in data:
+                self.currHref = ""
+
+    def getDirListing(self):
+        return self.dirList
+
+# Actually execute the code...
+if __name__ == "__main__":
+
+    stack_folder = sys.argv[1]
+    if len(sys.argv) > 2:
+        quality = sys.argv[2]
+        if quality not in ['SRTM1', 'SRTM3']:
+            quality = 'SRTM3'
+    else:
+        quality = 'SRTM3'
+
+    xml_file = os.path.join(os.path.join(stack_folder, 'doris_input.xml'))
+    print('reading xml file stack ' + xml_file)
+    tree_stack = ET.parse(xml_file)
+    settings_stack = tree_stack.getroot()[0]
+
+    xml_file = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'install',
+                                 'doris_config.xml')
+    print('reading xml file settings doris ' + xml_file)
+    tree_doris = ET.parse(xml_file)
+    settings_doris = tree_doris.getroot()
+
+    # Then create the dem file
+    shapefile = settings_stack.find('.shape_file_path').text
+    dem_calc_folder = settings_stack.find('.dem_processing_folder').text
+    dem_out_folder = settings_stack.find('.dem_folder').text
+
+    dem_out = os.path.join(dem_out_folder, 'dem.raw')
+    dem_var = os.path.join(dem_out_folder, 'var.raw')
+
+    srtm_username = settings_doris.find('.usgs_username').text
+    srtm_password = settings_doris.find('.usgs_password').text
+
+    dem = CreateDem()
+    dem.create(shapefile, dem_out, dem_var, resample=None,
+            doris_input=True, rounding=1, border=1,
+            data_folder=dem_calc_folder, quality=quality,
+            password=srtm_password, username=srtm_username)
diff --git a/prepare_stack/create_doris_input_xml.py b/prepare_stack/create_doris_input_xml.py
new file mode 100644
index 0000000..e63462e
--- /dev/null
+++ b/prepare_stack/create_doris_input_xml.py
@@ -0,0 +1,163 @@
+import xml.etree.ElementTree as ET
+import os
+from datetime import datetime
+
+class CreateDorisInputXml(object):
+
+    def __init__(self, input_file):
+        self.input_file_dict={}
+        self.dem_folder=''
+        if(len(input_file)==0):
+            self._create_xml()
+        else:
+            self._read_xml(input_file)
+
+
+    def _create_xml(self):
+        # This will first create the framework with data folders the stackfolder should contain
+        # a dorisparameters file.
+
+        input = False
+        while input == False:
+            user_input = raw_input("Enter the path to the archive data folder: ")
+            if os.path.exists(user_input):
+                self.input_file_dict['sar_data_folder'] = user_input
+                input = True
+            else:
+                print('The path is incorrect, use another path')
+
+        input = False
+        while input == False:
+            user_input = raw_input("Which polarisation do you want to use (vv,hh,vh,hv): ")
+            if user_input in ['vv', 'hh', 'vh', 'hv']:
+                self.input_file_dict['polarisation'] = user_input
+                input = True
+            else:
+                print('This polarisation does not exist')
+
+        input = False
+        while input == False:
+            user_input = raw_input("Which track do you want to work with? (explore on https://scihub.copernicus.eu/dhus/) : ")
+            try:
+                input = str(int(user_input)).zfill(3)
+                self.input_file_dict['track'] = user_input
+                input = True
+            except:
+                print('This track does not exist')
+
+        input = False
+        while input == False:
+            user_input = raw_input("Is this track ascending or descending? (asc/dsc) : ")
+            if user_input in ['asc', 'dsc']:
+                self.input_file_dict['direction'] = user_input
+                input = True
+            else:
+                print('Input should either be asc or dsc')
+
+        input = False
+        while input == False:
+            self.input_file_dict['datastack_folder'] = raw_input("Enter the path to the folder of new datastack: ")
+            if os.path.exists(self.input_file_dict['datastack_folder']):
+                input = True
+            else:
+                print('The path is incorrect, use another path')
+
+        input = False
+        while input == False:
+            self.input_file_dict['shape_file_path'] = raw_input("Enter full path to the shapefile: ")
+            if os.path.exists(self.input_file_dict['shape_file_path']) and self.input_file_dict['shape_file_path'].endswith('.shp'):
+                input = True
+            else:
+                print('The path is incorrect, use another path')
+
+        input = False
+        while input == False:
+            user_input = raw_input("Enter the path to the folder of the orbit files: ")
+            if os.path.exists(user_input):
+                self.input_file_dict['orbits_folder'] = user_input
+                input = True
+            else:
+                print('The path is incorrect, use another path')
+
+        input = False
+        while input == False:
+            user_input = raw_input("Do you want to generate the DEM file automaticly (Yes/No): ").lower()
+            if user_input == 'yes' or user_input == 'no':
+                self.input_file_dict['generate_dem'] = user_input
+                input = True
+            else:
+                print('You should use either yes or no')
+
+        input = False
+        while input == False:
+            self.input_file_dict['dem_processing_folder'] = raw_input("Enter path to the dem folder: ")
+            self.input_file_dict['dem_folder'] = os.path.join(self.input_file_dict['datastack_folder'], 'dem')
+            if os.path.exists(self.input_file_dict['dem_processing_folder']):
+                input = True
+            else:
+                print('The path is incorrect, use another path')
+
+        input = False
+        while input == False:
+            user_input = raw_input("Do you want to use parallel computing (Yes/No): ").lower()
+            if user_input == 'yes' or user_input == 'no':
+                self.input_file_dict['parallel'] = user_input
+                input = True
+            else:
+                print('You should use either yes of no')
+
+        if user_input == 'yes':
+            nodes = raw_input("How many cores do you want to use: ")
+            self.input_file_dict['cores'] = nodes
+
+        input = False
+        while input == False:
+            user_input = raw_input("What is the start date of your stack in yyyy-mm-dd (can be changed later): ").lower()
+            try:
+                date = datetime.strptime(user_input, '%Y-%m-%d')
+                self.input_file_dict['start_date'] = user_input
+                input = True
+            except:
+                print('Format not recognized, 01-01-2014 chosen')
+                self.input_file_dict['start_date'] = user_input
+
+        input = False
+        while input == False:
+            user_input = raw_input("What is the end date of your stack in yyyy-mm-dd (can be changed later): ").lower()
+            try:
+                date = datetime.strptime(user_input, '%Y-%m-%d')
+                self.input_file_dict['end_date'] = user_input
+                input = True
+            except:
+                print('Format not recognized, 01-01-2050 chosen')
+                self.input_file_dict['end_date'] = user_input
+
+        input = False
+        while input == False:
+            user_input = raw_input("What is the master date of your stack in yyyy-mm-dd (can be changed later): ").lower()
+            try:
+                date = datetime.strptime(user_input, '%Y-%m-%d')
+                self.input_file_dict['master_date'] = user_input
+                input = True
+            except:
+                print('Format not recognized, 01-01-2016 chosen. Check https://scihub.copernicus.eu/dhus/#/home for valid date')
+                self.input_file_dict['master_date'] = user_input
+
+        xml_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'doris_input_template.xml')
+        tree = ET.parse(xml_file)
+        self.settings = tree.getroot()
+
+        for key in self.input_file_dict.keys():
+            self.settings.find('*/' + key).text = self.input_file_dict.get(key)
+
+        tree.write(os.path.join(self.input_file_dict['datastack_folder'], 'doris_input.xml'))
+
+        return self.input_file_dict
+
+    def _read_xml(self, input_file):
+        xml_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), input_file)
+        tree = ET.parse(xml_file)
+        self.settings = tree.getroot()
+
+    def get_value(self, key):
+        return self.settings.find('*/' + key).text
diff --git a/prepare_stack/create_inputfiles.py b/prepare_stack/create_inputfiles.py
new file mode 100644
index 0000000..5247141
--- /dev/null
+++ b/prepare_stack/create_inputfiles.py
@@ -0,0 +1,103 @@
+# Function created by Gert Mulder
+# Institute TU Delft
+# Date 9-11-2016
+# Part of Doris 5.0
+
+# This function will create the needed files for a datastack based on input information from the datastack.
+import xml.etree.ElementTree as ET
+import os
+import pickle
+
+# Test data
+# settings_table = '/home/gert/software/doris/Doris_s1/sentinel_1/functions/inputfile_template.xml'
+# inputfile_folder = '/media/gert/Data/dem/test/'
+# sensor = 'sentinel-1'
+# dem_info = inputfile_folder + 'output.dem.var'
+
+
+class CreateInputFiles:
+    # Input for the class to create inputfiles are:
+    # - dem_info > variable or file with variable with information on the dem.
+    # - settings_table > this table includes the general settings for different sensors. You can either choose one of
+    #                       the predefined sensors or create a custom one.
+    # - sensor > select the predefined sensor of your choice
+    # - Other settings are mainly related to how much output information you want from the program. Also you can define
+    #   the amount of memory used by the program.
+
+
+    def __init__(self, dem_info ,settings_table, sensor):
+        tree = ET.parse(settings_table)
+        settings = tree.getroot()
+
+        self.xml_data = settings.find('.' + sensor)
+        self.header_data = self.xml_data.find('.header_settings')
+        dem_info = open(dem_info, 'r')
+        self.dem_var = pickle.load(dem_info)
+
+        self.inputfilenames = ['coarsecorr', 'coarseorb', 'coherence', 'coherence_network', 'comprefdem', 'comprefpha', 'coregpm',
+                          'dembased', 'finecoreg', 'geocode', 'interferogram', 'resample', 'subtrrefdem', 'subtrrefpha',
+                          'unwrap', 'phasefilt']
+        self.processes = ['coarsecorr', 'coarseorb', 'coherence', 'coherence', 'comprefdem', 'comprefpha', 'coregpm',
+                          'dembased', 'finecoreg', 'geocode', 'interferogram', 'resample', 'subtrrefdem', 'subtrrefpha',
+                          'unwrap', 'phasefilt']
+
+    def create(self, inputfile_folder):
+        for process_name, filename in zip(self.processes, self.inputfilenames):
+            # Create file
+            inputfilename = os.path.join(inputfile_folder, 'input.' + filename)
+            txtfile = open(inputfilename, 'w')
+
+            # Load xml data for processing step
+            process = self.xml_data.find('./' + process_name + '/PROCESS')
+            process_data = self.xml_data.find('.' + process_name)
+
+            # Write to file
+            txtfile = self._header(txtfile, self.header_data, process)
+            txtfile = self._create_inputfiles(txtfile, process_data, self.dem_var)
+
+            # Close file
+            txtfile.close()
+
+    def _create_inputfiles(self, txtfile, process_data, dem_var):
+        # This functions calls the different inputfile creation scripts.
+
+        for node in process_data:
+            if not node.tag == 'PROCESS':
+                if node.attrib['c'] == 'on':
+                    c = 'c '
+                else:
+                    c = ''
+
+                if 'var' in node.attrib and node.attrib['comment']:
+                    txtfile.write(c + node.tag.ljust(20) + '\t' + dem_var[node.attrib['var']].ljust(20) + '\t // ' + node.attrib['comment'] + '\n')
+                elif not 'var' in node.attrib and node.attrib['comment']:
+                    txtfile.write(c + node.tag.ljust(20) + '\t' + node.text.ljust(20) + '\t // ' + node.attrib['comment'] + '\n')
+                elif 'var' in node.attrib and not node.attrib['comment']:
+                    txtfile.write(c + node.tag.ljust(20) + '\t' + dem_var[node.attrib['var']].ljust(20) + '\n')
+                elif not 'var' in node.attrib and not node.attrib['comment']:
+                    txtfile.write(c + node.tag.ljust(20) + '\t' + node.text.ljust(20) + '\n')
+
+        txtfile.write("STOP                          \n")
+
+        return txtfile
+
+    def _header(self, txtfile, header_data, process):
+        # Function to write header
+
+        txtfile.write("c Inputfile created by Doris 5.0" + '\n')
+        txtfile.write("c         " + "___".ljust(len(process.text) + 3) + "___ \n")
+        txtfile.write("comment   ___" + process.text + "___ \n")
+        txtfile.write("c                             \n")
+
+        for node in header_data:
+            if node.tag == 'PROCESS':
+                txtfile.write("c \n")
+                txtfile.write("PROCESS ".ljust(15) + process.text + " \n")
+                txtfile.write("c \n")
+            else:
+                if node.attrib['comment']:
+                    txtfile.write(node.tag.ljust(20) + '\t' + node.text.ljust(20) + '\t // ' + node.attrib['comment'] + '\n')
+
+        txtfile.write("c                             \n")
+
+        return txtfile
diff --git a/prepare_stack/doris_input_template.xml b/prepare_stack/doris_input_template.xml
new file mode 100644
index 0000000..9aac16e
--- /dev/null
+++ b/prepare_stack/doris_input_template.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" ?>
+<root>
+    <paths>
+        <datastack_folder></datastack_folder>
+        <sar_data_folder></sar_data_folder>
+        <polarisation></polarisation>
+        <shape_file_path></shape_file_path>
+        <track></track>
+        <direction></direction>
+        <orbits_folder></orbits_folder>
+        <generate_dem></generate_dem>
+        <dem_folder></dem_folder>
+        <dem_processing_folder></dem_processing_folder>
+        <parallel></parallel>
+        <cores></cores>
+        <start_date></start_date>
+        <end_date></end_date>
+        <master_date></master_date>
+    </paths>
+    <steps>
+        <do_coarse_orbits>Yes</do_coarse_orbits>
+        <do_deramp>Yes</do_deramp>
+        <do_reramp>Yes</do_reramp>
+        <do_fake_fine_coreg_bursts>Yes</do_fake_fine_coreg_bursts>
+        <do_dac_bursts>Yes</do_dac_bursts>
+        <do_fake_coreg_bursts>Yes</do_fake_coreg_bursts>
+        <do_resample>Yes</do_resample>
+        <do_reramp>Yes</do_reramp>
+        <do_interferogram>Yes</do_interferogram>
+        <do_compref_phase>Yes</do_compref_phase>
+        <do_compref_dem>Yes</do_compref_dem>
+        <do_coherence>Yes</do_coherence>
+        <do_esd>Yes</do_esd>
+        <do_network_esd>Yes</do_network_esd>
+        <do_ESD_correct>Yes</do_ESD_correct>
+        <do_ref_phase>Yes</do_ref_phase>
+        <do_ref_dem>Yes</do_ref_dem>
+        <do_phasefilt>Yes</do_phasefilt>
+        <do_calc_coordinates>Yes</do_calc_coordinates>
+        <do_multilooking>Yes</do_multilooking>
+        <do_unwrap>Yes</do_unwrap>
+    </steps>
+</root>
\ No newline at end of file
diff --git a/prepare_stack/download_sentinel_data_orbits.py b/prepare_stack/download_sentinel_data_orbits.py
new file mode 100644
index 0000000..4d18010
--- /dev/null
+++ b/prepare_stack/download_sentinel_data_orbits.py
@@ -0,0 +1,472 @@
+# This file contains a function to check which files for sentinel are available, which ones are downloaded and a quality
+# check for the files which are downloaded.
+
+import urllib
+import urllib2
+import ssl
+import re
+import os, sys
+import datetime
+import base64
+import subprocess
+from fiona import collection
+from fastkml import kml
+from lxml import etree
+import xml.etree.ElementTree as ET
+
+
+def sentinel_available(start_day='', end_day='', sensor_mode='', product='', level='', track='', polarisation='', orbit_direction='', ROI='', user='', password=''):
+    # All available sentinel 1 images are detected and printed on screen.
+    # Following variables can be used to make a selection.
+    # shape > defining shape file or .kml
+    # start_day > first day for downloads (default one month before now) [yyyymmdd]
+    # end_day > last day for downloads (default today)
+    # track > the tracks we want to check (default all)
+    # polarisation > which polarisation will be used. (default all)
+
+    # string is the field we enter as url
+    string = ''
+
+    if sensor_mode:
+        string = string + ' AND ' + 'sensoroperationalmode:' + sensor_mode
+    if product:
+        string = string + ' AND ' + 'producttype:' + product
+    if level:
+        string = string + ' AND ' + level
+    if orbit_direction:
+        string = string + ' AND ' + 'orbitdirection:' + orbit_direction
+    if track:
+        string = string + ' AND ' + 'relativeorbitnumber:' + track
+    if start_day:
+        start = datetime.datetime.strptime(start_day, '%Y-%m-%d').strftime('%Y-%m-%d')
+    else:
+        start = (datetime.datetime.now() - datetime.timedelta(days=350)).strftime('%Y-%m-%d')
+    if end_day:
+        end = datetime.datetime.strptime(end_day, '%Y-%m-%d').strftime('%Y-%m-%d')
+    else:
+        end = datetime.datetime.now().strftime('%Y-%m-%d')
+    if polarisation:
+        string = string + ' AND ' + 'polarisationmode:' + polarisation
+    if ROI:
+        shape_str = load_shape_info(ROI)
+        string = string + ' AND footprint:"Intersects(POLYGON(' + shape_str + '))"'
+
+    date_string = 'beginPosition:[' + start + 'T00:00:00.000Z TO ' + end + 'T23:59:59.999Z] AND endPosition:[' + start + 'T00:00:00.000Z TO ' + end + 'T23:59:59.999Z]'
+    string = string + ' AND ' + date_string
+
+    # Finally we do the query to get the search result.
+    string = string[5:] + '&rows=1000'
+    url = 'https://scihub.copernicus.eu/dhus/search?q=' + urllib.quote_plus(string)
+    print(url)
+
+    print('Requesting available products: ' + url)
+    request = urllib2.Request(url)
+    base64string = base64.b64encode('%s:%s' % (user, password))
+    request.add_header("Authorization", "Basic %s" % base64string)
+
+    # connect to server. Hopefully this works at once
+    try:
+        dat = urllib2.urlopen(request)
+    except:
+        print 'not possible to connect this time'
+        return [], [], []
+
+    html_dat = ''
+    for line in dat:
+        html_dat = html_dat + line
+
+    parser = etree.HTMLParser()
+    tree = etree.fromstring(html_dat, parser)
+    products = [data for data in tree.iter(tag='entry')]
+    links = [data.find('link').attrib for data in tree.iter(tag='entry')]
+    dates = [data.findall('date')[1].text for data in tree.iter(tag='entry')]
+
+    print('Following products will be downloaded')
+    for link in links:
+        print(link)
+
+
+    return products, links, dates
+
+
+def load_shape_info(shapefile):
+    # This script converts .kml, .shp and .txt files to the right format. If multiple shapes are available the script
+    # will select the first one.
+
+    if shapefile.endswith('.shp'):
+        with collection(shapefile, "r") as inputshape:
+            for shape in inputshape:
+                # only first shape
+                dat = shape['geometry']['coordinates']
+
+                st='('
+                for p in dat[0]:
+                    st = st + str(p[0]) + ' ' + str(p[1]) + ','
+                st = st[:-1] + ')'
+
+                break
+    elif shapefile.endswith('.kml'):
+        doc = file(shapefile).read()
+        k = kml.KML()
+        k.from_string(doc)
+        shape = list(list(k.features())[0].features())[0].geometry.exterior.coords[:]
+        st='('
+        for p in shape:
+            st = st + str(p[0]) + ' ' + str(p[1]) + ','
+        st = st[:-1] + ')'
+    else:
+        print 'format not recognized! Pleas creat either a .kml or .shp file.'
+        return []
+
+    return st
+
+
+def sentinel_check_validity(products=[], destination_folder='', user='', password='', remove=True):
+    # Check if the downloaded files are valid and remove if not
+
+    valid_files = []
+    invalid_files = []
+
+    if not products:
+        print 'Nothing to check'
+        return
+
+    for product in products:
+        date = str(product.findall('date')[1].text)
+        date = datetime.datetime.strptime(date[:19], '%Y-%m-%dT%H:%M:%S')
+
+        name = str(product.find('title').text)
+
+        track = str(product.find('int[@name="relativeorbitnumber"]').text)
+        data_type = str(product.find(".//str[@name='filename']").text)[4:16]
+        pol = str(product.find(".//str[@name='polarisationmode']").text).replace(' ', '')
+        direction = str(product.find(".//str[@name='orbitdirection']").text)
+        if direction == 'ASCENDING':
+            direction = 'asc'
+        elif direction == 'DESCENDING':
+            direction = 'dsc'
+
+        trackfolder = os.path.join(destination_folder, 's1_' + direction + '_t' + track)
+        typefolder = os.path.join(trackfolder, data_type + '_' + pol)
+        datefolder = os.path.join(typefolder, date.strftime('%Y%m%d'))
+
+        xml_dir = os.path.join(datefolder, name + '.xml')
+        file_dir = os.path.join(datefolder, name + '.SAFE.zip')
+        kml_dir = os.path.join(datefolder, name + '.kml')
+        preview_dir = os.path.join(datefolder, name + '.jpg')
+
+        # First check the file
+        if os.path.exists(file_dir):
+            uuid = product.find('id').text
+            valid_dat = sentinel_quality_check(file_dir, uuid, user, password)
+        else:
+            valid_dat = False
+
+        if not valid_dat:
+            if os.path.exists(file_dir) and remove == True:
+                os.system('rm ' + file_dir)
+            if os.path.exists(xml_dir) and remove == True:
+                os.system('rm ' + xml_dir)
+            if os.path.exists(kml_dir) and remove == True:
+                os.system('rm ' + kml_dir)
+            if os.path.exists(preview_dir) and remove == True:
+                os.system('rm ' + preview_dir)
+
+            invalid_files.append(file_dir)
+        else:
+            valid_files.append(file_dir)
+
+    return invalid_files, valid_files
+
+
+def sentinel_download(products=[], xml_only=False,  destination_folder='', project_folder='', user='', password=''):
+    # Download the files which are found by the sentinel_available script.
+
+    if not products:
+        print 'No files to download'
+        return
+
+    wget_base = 'wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --continue --tries=20 --no-check-certificate --user=' + user + ' --password=' + password + ' '
+
+    for product in products:
+        date = str(product.findall('date')[1].text)
+        date = datetime.datetime.strptime(date[:19], '%Y-%m-%dT%H:%M:%S')
+
+        url = str('"'+product.findall('link')[0].attrib['href'][:-6]+ urllib.quote_plus('$value') +'"')
+        name = str(product.find('title').text)
+
+        track = str(product.find('int[@name="relativeorbitnumber"]').text)
+        data_type = str(product.find(".//str[@name='filename']").text)[4:16]
+        pol = str(product.find(".//str[@name='polarisationmode']").text).replace(' ', '')
+        direction = str(product.find(".//str[@name='orbitdirection']").text)
+        if direction == 'ASCENDING':
+            direction = 'asc'
+        elif direction == 'DESCENDING':
+            direction = 'dsc'
+
+        trackfolder = os.path.join(destination_folder, direction + '_t' + track.zfill(3))
+        if not os.path.exists(trackfolder):
+            os.mkdir(trackfolder)
+        typefolder = os.path.join(trackfolder, data_type + '_' + pol)
+        if not os.path.exists(typefolder):
+            os.mkdir(typefolder)
+        datefolder = os.path.join(typefolder, date.strftime('%Y%m%d'))
+        if not os.path.exists(datefolder):
+            os.mkdir(datefolder)
+
+        xml_dir = os.path.join(datefolder, name + '.xml')
+        file_dir = os.path.join(datefolder, name + '.SAFE.zip')
+        kml_dir = os.path.join(datefolder, name + '.kml')
+        preview_dir = os.path.join(datefolder, name + '.jpg')
+
+        if project_folder:
+            datefolder = os.path.join(project_folder, 's1', date.strftime('%Y%m%d') + '_t' + track)
+            if not os.path.exists(datefolder):
+                os.mkdir(datefolder)
+            sentinel_folder = os.path.join(datefolder, 'sentinel_1')
+            if not os.path.exists(sentinel_folder):
+                os.mkdir(sentinel_folder)
+
+            xml_project = os.path.join(datefolder, 'sentinel_1', name + '.xml')
+            link_project = os.path.join(datefolder, 'sentinel_1', name + '.SAFE.zip')
+            kml_project = os.path.join(datefolder, 'sentinel_1', name + '.kml')
+            preview_project = os.path.join(datefolder, 'sentinel_1', name + '.jpg')
+
+        # Save .xml files
+        prod = etree.ElementTree(product)
+
+        if not os.path.exists(xml_dir):
+            prod.write(xml_dir, pretty_print = True)
+        if project_folder:
+            if not os.path.exists(xml_project):
+                prod.write(xml_project, pretty_print = True)
+
+        prev = "'preview'"
+        png = "'quick-look.png'"
+        kml = "'map-overlay.kml'"
+        dat = "'" + name + ".SAFE'"
+
+        preview_url = url[:-10] + '/Nodes(' + dat + ')/Nodes(' + prev + ')/Nodes(' + png + ')/' + urllib.quote_plus('$value') + '"'
+        kml_url = url[:-10] + '/Nodes(' + dat + ')/Nodes(' + prev + ')/Nodes(' + kml + ')/' + urllib.quote_plus('$value') + '"'
+
+        # Download data files and create symbolic link
+        if xml_only == False: # So we also download the file
+            if not os.path.exists(file_dir):
+                wget_data = wget_base + url + ' -O ' + file_dir
+                print('download url is:' + wget_data)
+                os.system(wget_data)
+
+                # Finally check whether the file is downloaded correctly. Otherwise delete file and wait for next round of
+                # downloads.
+                uuid = product.find('id').text
+                valid = sentinel_quality_check(file_dir, uuid, user, password)
+            else: # If the file already exists we assume it is valid.
+                valid = True
+
+            if valid == True:
+                # First download additional files
+                if not os.path.exists(preview_dir):
+                    wget_preview = wget_base + preview_url + ' -O ' + preview_dir
+                    os.system(wget_preview)
+                if not os.path.exists(kml_dir):
+                    wget_kml = wget_base + kml_url + ' -O ' + kml_dir
+                    os.system(wget_kml)
+
+                # Then copy to user folder and create links if project folder is used
+                if project_folder:
+                    if not os.path.exists(preview_project):
+                        os.system('cp ' + preview_dir + ' ' + preview_project)
+                    if not os.path.exists(kml_project):
+                        os.system('cp ' + kml_dir + ' ' + kml_project)
+                    if not os.path.exists(link_project):
+                        os.system('ln -s ' + file_dir + ' ' + link_project)
+            else:
+                os.system('rm ' + file_dir)
+                os.system('rm ' + xml_dir)
+                os.system('rm ' + xml_project)
+
+def sentinel_quality_check(filename, uuid, user, password):
+    # Check whether the zip files can be unpacked or not. This is part of the download procedure.
+
+    checksum_url = "https://scihub.copernicus.eu/dhus/odata/v1/Products('" + uuid + "')/Checksum/Value/" + urllib.quote_plus('$value')
+    request = urllib2.Request(checksum_url)
+    base64string = base64.b64encode('%s:%s' % (user, password))
+    request.add_header("Authorization", "Basic %s" % base64string)
+
+    # connect to server. Hopefully this works at once
+    try:
+        dat = urllib2.urlopen(request)
+    except:
+        print 'not possible to connect this time'
+        return False
+
+    html_dat = ''
+    for line in dat:
+        html_dat = html_dat + line
+
+    # Check file on disk
+    if sys.platform == 'darwin':
+        md5 = subprocess.check_output('md5 ' + filename, shell=True)[-33:-1]
+    elif sys.platform == 'linux2':
+        md5 = subprocess.check_output('md5sum ' + filename, shell=True)[:32]
+    else:
+        'This function only works on mac or linux systems!'
+        return False
+
+    if md5 == html_dat.lower():
+        return True
+    else:
+        return False
+
+def download_orbits(start_date, end_date, pages=30, precise_folder='', restituted_folder =''):
+    # This script downloads all orbits files from the precise orbits website, when pages is set to a very high number.
+    # By default only the first page for the last two days (restituted) is checked.
+
+    pages_res = min(pages, 60)
+    pages_poe = pages # every day there are 8 restituted orbit files
+    last_precise = '' # Last precise orbit file. Used to remove unused restituted orbit files.
+
+    start_num = int(start_date[0:4] + start_date[5:7] + start_date[8:10])
+    end_num = int(end_date[0:4] + end_date[5:7] + end_date[8:10])
+
+    if precise_folder:
+        for i in range(pages_poe):
+            # First extract the orbitfiles from the page.
+
+            url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/?page=' + str(i + 1)
+            gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+            try:
+                page = urllib2.urlopen(url, context=gcontext)
+            except TypeError:
+                page = urllib2.urlopen(url)
+
+            html = page.read().split('\n')
+            orb_files = []
+
+            for line in html:
+                if re.search('<a .*href=.*>', line):
+                    if re.search('EOF', line):
+                        dat = re.search('<a href=.*>(.*)</a>', line)
+                        orb_files.append(dat.group(1))
+
+            if not last_precise:
+                last_precise = orb_files[0]
+
+            for orb in orb_files:
+                # Download the orbit files
+                filename = os.path.join(precise_folder, orb)
+
+                if int(orb[42:50]) + 1 <= end_num and int(orb[42:50]) + 1 >= start_num:
+                    url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/' + orb
+                    if not os.path.exists(filename):
+                        try:
+                            urllib.urlretrieve(url, filename, context=gcontext)
+                        except TypeError:
+                            urllib.urlretrieve(url, filename)
+                        print(orb + ' downloaded')
+                    else:
+                        print(orb + ' already downloaded')
+                else:
+                    print(orb + ' is out of date range')
+
+            if len(orb_files) > 0:
+                if int(orb[42:50]) < start_num:
+                    break
+
+    if restituted_folder:
+        now = datetime.datetime.now()
+        last_date = datetime.datetime.strptime(end_date, '%Y-%m-%d')
+        diff = datetime.timedelta(days=25)
+
+        print('Time difference to last date is ' + str((now - last_date).days))
+
+        if now - last_date < diff:  # only run when precise orbits will not be available
+            for i in range(pages_res):
+                # First extract the orbitfiles from the page.
+
+                url = 'https://qc.sentinel1.eo.esa.int/aux_resorb/?page=' + str(i + 1)
+                gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+                try:
+                    page = urllib2.urlopen(url, context=gcontext)
+                except TypeError:
+                    page = urllib2.urlopen(url)
+
+                html = page.read().split('\n')
+                orb_files = []
+
+                for line in html:
+                    if re.search('<a .*href=.*>', line):
+                        if re.search('EOF', line):
+                            dat = re.search('<a href=.*>(.*)</a>', line)
+                            orb_files.append(dat.group(1))
+
+                for orb in orb_files:
+                    # Download the orbit files
+                    filename = os.path.join(precise_folder, orb)
+
+                    if int(orb[42:50]) + 1 <= end_num and int(orb[42:50]) + 1 >= start_num:
+                        url = 'https://qc.sentinel1.eo.esa.int/aux_poeorb/' + orb
+                        if not os.path.exists(filename):
+                            try:
+                                urllib.urlretrieve(url, filename, context=gcontext)
+                            except TypeError:
+                                urllib.urlretrieve(url, filename)
+                            print(orb + ' downloaded')
+                        else:
+                            print(orb + ' already downloaded')
+                    else:
+                        print(orb + ' is out of date range')
+
+                if len(orb_files) > 0:
+                    if int(orb[42:50]) < start_num:
+                        break
+
+
+# Actually execute the code...
+if __name__ == "__main__":
+
+    stack_folder = sys.argv[1]
+
+    xml_file = os.path.join(os.path.join(stack_folder, 'doris_input.xml'))
+    tree = ET.parse(xml_file)
+    settings = tree.getroot()[0]
+    print('reading xml file stack ' + xml_file)
+
+    ROI = settings.find('.shape_file_path').text
+    polarisation = settings.find('.polarisation').text
+    archive_folder = settings.find('.sar_data_folder').text
+    track = settings.find('.track').text
+    orbit_folder = settings.find('.orbits_folder').text
+
+    start_date = settings.find('.start_date').text
+    end_date = settings.find('.end_date').text
+
+    # Standard settings
+    level = 'L1'
+    sensor_mode = 'IW'
+    product = 'SLC'
+
+    # user settings
+    xml_name = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+    config_xml_file = os.path.join(os.path.join(xml_name, 'install', 'doris_config.xml'))
+    print('reading xml file settings doris ' + config_xml_file)
+    tree = ET.parse(config_xml_file)
+    settings = tree.getroot()
+    user = settings.find('.scihub_username').text
+    password = settings.find('.scihub_password').text
+
+    products, links, dates = sentinel_available(start_day=start_date, end_day=end_date, ROI=ROI,
+                                                polarisation=polarisation, sensor_mode=sensor_mode, track=track,
+                                                orbit_direction='', level=level, product=product,user=user,
+                                                password=password)
+
+    sentinel_download(products, destination_folder=archive_folder, user=user, password=password)
+
+    precise_folder = os.path.join(orbit_folder, 'precise')
+    if not os.path.exists(precise_folder):
+        os.makedirs(precise_folder)
+    restituted_folder = os.path.join(orbit_folder, 'restituted')
+    if not os.path.exists(restituted_folder):
+        os.makedirs(restituted_folder)
+
+    download_orbits(start_date, end_date, pages=100, precise_folder=precise_folder, restituted_folder=restituted_folder)
diff --git a/prepare_stack/inputfile_template.xml b/prepare_stack/inputfile_template.xml
new file mode 100644
index 0000000..5866a5f
--- /dev/null
+++ b/prepare_stack/inputfile_template.xml
@@ -0,0 +1,154 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--Document created by: Gert Mulder additional options can be found in the doris guide-->
+<sensors>
+	<sentinel-1>sentinel-1
+		<header_settings>Settings for the headers
+			<MEMORY comment="MB" c="off">2000</MEMORY>
+			<BEEP comment="level of beeping" c="off">error</BEEP>
+			<OVERWRITE comment="overwrite existing files" c="off">on</OVERWRITE>
+			<PREVIEW comment="on" c="off">on</PREVIEW>
+			<BATCH comment="non-interactive" c="off">on</BATCH>
+			<LISTINPUT comment="prevents copy of this file to log" c="off">on</LISTINPUT>
+			<SCREEN comment="level of output to standard out" c="off">info</SCREEN>
+			<PROCESS></PROCESS>
+			<LOGFILE comment="log file" c="off">log.out</LOGFILE>
+			<I_RESFILE comment="interferogram parameter file" c="off">ifgs.res</I_RESFILE>
+			<M_RESFILE comment="master parameter file" c="off">master.res</M_RESFILE>
+			<S_RESFILE comment="slave parameter file" c="off">slave.res</S_RESFILE>
+			<HEIGHT comment="average WGS84 height" c="on">0.0</HEIGHT>
+			<ORB_INTERP comment="orbit interpolation method" c="on">POLYFIT</ORB_INTERP>
+			<ELLIPSOID comment="WGS84, GRS80, BESSEL or define major and minor axis" c="on">WGS84</ELLIPSOID>
+		</header_settings>
+		<coarsecorr>coarse correlation
+			<PROCESS>COARSECORR</PROCESS>
+			<CC_METHOD comment="magfft" c="off">magfft</CC_METHOD>
+			<CC_ACC comment="only for magspace" c="on">30 30</CC_ACC>
+			<CC_NWIN comment="number of windows only magspace" c="off">10</CC_NWIN>
+			<CC_WINSIZE comment="size of windows" c="off">1024 256</CC_WINSIZE>
+			<CC_INITOFF comment="use result of orbits for initial offset" c="off">orbit</CC_INITOFF>
+		</coarsecorr>
+		<coarseorb>coarse orbits
+			<PROCESS>COARSEORB</PROCESS>
+		</coarseorb>
+		<coherence>coherence between master and slave
+			<PROCESS>COHERENCE</PROCESS>
+			<COH_METHOD comment="" c="off">INCLUDE_REFDEM</COH_METHOD>
+			<COH_OUT_COH comment="output image" c="off">coherence.raw</COH_OUT_COH>
+			<COH_MULTILOOK comment="" c="off">1 1</COH_MULTILOOK>
+			<COH_WINSIZE comment="" c="off">2 10</COH_WINSIZE>
+		</coherence>
+		<coherence_network>coherence between master and slave
+			<PROCESS>COHERENCE</PROCESS>
+			<COH_METHOD comment="" c="off">refphase_only</COH_METHOD>
+			<COH_OUT_COH comment="output image" c="off">coherence.raw</COH_OUT_COH>
+			<COH_MULTILOOK comment="" c="off">1 1</COH_MULTILOOK>
+			<COH_WINSIZE comment="" c="off">2 10</COH_WINSIZE>
+		</coherence_network>
+		<comprefdem>compute the reference phase due to DEM
+			<PROCESS>COMPREFDEM</PROCESS>
+			<CRD_INCLUDE_FE comment="phase w.r.t. ellipsoid" c="on">OFF</CRD_INCLUDE_FE>
+			<CRD_OUT_FILE comment="" c="off">refdem.raw</CRD_OUT_FILE>
+			<CRD_OUT_DEM_LP comment="" c="off">dem_radar.raw</CRD_OUT_DEM_LP>
+			<CRD_OUT_DEM_H2PH comment="" c="off">h2ph_srd.raw</CRD_OUT_DEM_H2PH>
+			<CRD_OUT_FILE comment="" c="off">master_slave.crd</CRD_OUT_FILE>
+			<CRD_IN_DEM comment="" c="off" var='in_dem'></CRD_IN_DEM>
+			<CRD_IN_FORMAT comment="" c="off" var='in_format'></CRD_IN_FORMAT>
+			<CRD_IN_SIZE comment="" c="off" var='in_size'></CRD_IN_SIZE>
+			<CRD_IN_DELTA comment="" c="off" var='in_delta'></CRD_IN_DELTA>
+			<CRD_IN_UL comment="" c="off" var='in_ul'></CRD_IN_UL>
+			<CRD_IN_NODATA comment="" c="off" var='in_nodata'></CRD_IN_NODATA>
+		</comprefdem>
+		<comprefpha>compute reference phase due to earth curvature
+			<PROCESS>COMPREFPHA</PROCESS>
+			<FE_METHOD comment="" c="off">porbits</FE_METHOD>
+			<FE_DEGREE comment="" c="off">3</FE_DEGREE>
+			<FE_NPOINTS comment="" c="off">101</FE_NPOINTS>
+		</comprefpha>
+		<coregpm>combine DEM and windows coregistration
+			<PROCESS>COREGPM</PROCESS>
+			<CPM_THRESHOLD comment="" c="off">0.001</CPM_THRESHOLD>
+			<CPM_DEGREE comment="" c="off">1</CPM_DEGREE>
+			<CPM_WEIGHTS comment="" c="off">bamler</CPM_WEIGHTS>
+			<CPM_MAXITER comment="" c="off">1</CPM_MAXITER>
+			<CPM_PLOT comment="" c="on">NOBG</CPM_PLOT>
+		</coregpm>
+		<dembased>dem assisted coregistration
+			<PROCESS>DEMASSIST</PROCESS>
+			<DAC_IN_DEM comment="" c="off" var='in_dem'></DAC_IN_DEM>
+			<DAC_IN_FORMAT comment="" c="off" var='in_format'></DAC_IN_FORMAT>
+			<DAC_IN_SIZE comment="" c="off" var='in_size'></DAC_IN_SIZE>
+			<DAC_IN_DELTA comment="" c="off" var='in_delta'></DAC_IN_DELTA>
+			<DAC_IN_UL comment="" c="off" var='in_ul'></DAC_IN_UL>
+			<DAC_IN_NODATA comment="" c="off" var='in_nodata'></DAC_IN_NODATA>
+		</dembased>
+		<finecoreg>fine coregistration (windows)
+			<PROCESS>FINE</PROCESS>
+			<FC_METHOD comment="" c="off">coherence</FC_METHOD>
+			<FC_NWIN comment="number of windows" c="off">3</FC_NWIN>
+			<FC_IN_POS comment="used when PS points are used" c="on">fc_pos.in</FC_IN_POS>
+			<FC_WINSIZE comment="number of windows" c="off">64 64</FC_WINSIZE>
+			<FC_ACC comment="search window" c="off">8 8</FC_ACC>
+			<FC_INITOFF comment="use coarse correlation result as first guess" c="off">coarsecorr</FC_INITOFF>
+			<FC_OSFACTOR comment="oversampling factor" c="off">32</FC_OSFACTOR>
+			<FC_PLOT comment="" c="on">0.65 BG</FC_PLOT>
+		</finecoreg>
+		<geocode>calculate lat/lon of radar pixels
+			<PROCESS>GEOCODE</PROCESS>
+			<GEO_OUT_LAM comment="longitude coordinates" c="off">lam.raw</GEO_OUT_LAM>
+			<GEO_OUT_PHI comment="latitude coordinates" c="off">phi.raw</GEO_OUT_PHI>
+		</geocode>
+		<interferogram>calculate lat/lon of radar pixels
+			<PROCESS>INTERFERO</PROCESS>
+			<INT_OUT_CINT comment="" c="off">cint.raw</INT_OUT_CINT>
+			<INT_MULTILOOK comment="" c="off">1 1</INT_MULTILOOK>
+		</interferogram>	
+		<resample>resample image
+			<PROCESS>RESAMPLE</PROCESS>
+			<RS_METHOD comment="" c="off">rc12p</RS_METHOD>
+			<RS_SHIFTAZI comment="" c="off">off</RS_SHIFTAZI>
+			<RS_OUT_FILE comment="" c="off">slave_rsmp.raw</RS_OUT_FILE>
+			<RS_OUT_FORMAT comment="" c="off">cr4</RS_OUT_FORMAT>
+			<RS_DBOW_GEO comment="" c="on">52.13 4.38 5000 1000</RS_DBOW_GEO>
+		</resample>
+		<subtrrefdem>remove DEM induced reference phase
+			<PROCESS>SUBTRREFDEM</PROCESS>
+			<SRD_OUT_CINT comment="" c="off">cint_srd.raw</SRD_OUT_CINT>
+			<SRD_OFFSET comment="" c="off">1 1</SRD_OFFSET>
+		</subtrrefdem>
+		<subtrrefpha>remove DEM induced reference phase
+			<PROCESS>SUBTRREFPHA</PROCESS>
+			<SRP_METHOD comment="" c="off">exact</SRP_METHOD>
+			<SRP_OUT_CINT comment="" c="off">cint_srp.raw</SRP_OUT_CINT>
+			<SRP_MULTILOOK comment="" c="off">1 1</SRP_MULTILOOK>
+			<SRP_DUMPREFPHA comment="" c="off">OFF</SRP_DUMPREFPHA>
+			<SRP_OUT_REFPHA comment="" c="off">refphase.raw</SRP_OUT_REFPHA>
+			<SRD_OUT_H2PH comment="" c="off">h2ph_srp.raw</SRD_OUT_H2PH>
+		</subtrrefpha>
+		<unwrap>remove DEM induced reference phase
+			<PROCESS>unwrap</PROCESS>
+			<UW_METHOD comment="" c="off">SNAPHU</UW_METHOD>
+			<UW_OUT_FILE comment="" c="off">unwrapped.raw</UW_OUT_FILE>
+			<UW_OUT_FORMAT comment="" c="off">REAL4</UW_OUT_FORMAT>
+			<UW_SNAPHU_LOG comment="" c="off">snaphu.log</UW_SNAPHU_LOG>
+			<UW_SNAPHU_coh comment="" c="off">unwrap_input.raw</UW_SNAPHU_coh>
+			<UW_SNAPHU_MODE comment="" c="off">SMOOTH</UW_SNAPHU_MODE>
+			<UW_SNAPHU_INIT comment="" c="off">MST</UW_SNAPHU_INIT>
+			<UW_SNAPHU_VERBOSE comment="" c="off">ON</UW_SNAPHU_VERBOSE>
+		</unwrap>
+		<phasefilt>phase filter
+			<PROCESS>FILTPHASE</PROCESS>
+			<PF_BLOCKSIZE comment="" c="off">32</PF_BLOCKSIZE>
+			<PF_OVERLAP comment="" c="off">3</PF_OVERLAP>
+			<PF_KERNEL comment="" c="off">1 2 3 2 1</PF_KERNEL>
+			<PF_METHOD comment="" c="off">goldstein</PF_METHOD>
+			<PF_ALPHA comment="" c="off">0.2</PF_ALPHA>
+			<PF_KERNEL comment="" c="off">5 1 1 1 1 1</PF_KERNEL>
+		</phasefilt>
+	</sentinel-1>
+</sensors>
+
+
+
+
+
+
diff --git a/prepare_stack/prepare_datastack.py b/prepare_stack/prepare_datastack.py
new file mode 100644
index 0000000..e414b80
--- /dev/null
+++ b/prepare_stack/prepare_datastack.py
@@ -0,0 +1,61 @@
+# Function created by Gert Mulder
+# Institute TU Delft
+# Date 9-11-2016
+# Part of Doris 5.0
+
+# This function makes a setup of the processing folder, based on a single shapefile.
+# Inputs are
+# - the satellite sensor used
+# - the shapefile
+# - the processing folder we want to create
+# - the dem source folder where the intermediate DEM data is stored
+
+import os
+from create_dem import CreateDem
+from create_inputfiles import CreateInputFiles
+from create_doris_input_xml import CreateDorisInputXml
+from create_datastack_bash import CreateBash
+import xml.etree.ElementTree as ET
+
+class PrepareDatastack(object):
+
+    def __init__(self):
+        return
+
+    def prepare(self, inputfile):
+
+        # This will first create the framework with data folders the stackfolder should contain
+        # a dorisparameters file.
+
+        doris_input_xml = CreateDorisInputXml(inputfile)
+
+        folders = ['input_files', 'stack', 'dem']
+        for foldername in folders:
+            if not os.path.exists(os.path.join(doris_input_xml.get_value('datastack_folder'), foldername)):
+                os.mkdir(os.path.join(doris_input_xml.get_value('datastack_folder'), foldername))
+
+        # Then create the dem file
+        password_file = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),'install', 'doris_config.xml')
+        tree_doris = ET.parse(password_file)
+        settings_doris = tree_doris.getroot()
+        srtm_username = settings_doris.find('.usgs_username').text
+        srtm_password = settings_doris.find('.usgs_password').text
+        dem_out = os.path.join(doris_input_xml.get_value('dem_folder'), 'dem.raw')
+        dem_var = dem_out + '.var'
+
+        dem = CreateDem()
+        if doris_input_xml.get_value('generate_dem'.lower())=='yes':
+            dem.create(doris_input_xml.get_value('shape_file_path'), dem_out, dem_var, resample=None,
+                                    doris_input=True, rounding=0.1, border=1.5,
+                                    data_folder=doris_input_xml.get_value('dem_processing_folder'), quality='SRTM3',
+                                    password=srtm_password, username=srtm_username)
+
+        ## Then create the inputfiles
+        inputfiles_folder = os.path.join(doris_input_xml.get_value('datastack_folder'), 'input_files')
+        xml_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'inputfile_template.xml')
+        satellite = 'sentinel-1'
+        CreateInputFiles(dem_var, xml_file, satellite).create(inputfiles_folder)
+
+        root_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+        CreateBash().create(doris_input_xml.get_value('datastack_folder'), root_folder, doris_input_xml.get_value('cores'))
+
diff --git a/prepare_stack/prepare_datastack_main.py b/prepare_stack/prepare_datastack_main.py
new file mode 100644
index 0000000..8b6277e
--- /dev/null
+++ b/prepare_stack/prepare_datastack_main.py
@@ -0,0 +1,20 @@
+import argparse
+from prepare_datastack import PrepareDatastack
+
+"""Doris prepare datastack
+arguments:  --doris_input_file, -i
+"""
+
+# parse arguments here
+
+parser = argparse.ArgumentParser(description='Doris prepare datastack.')
+parser.add_argument('--doris_input_file', '-i', default='',
+                    help='Path to doris input file, this file contains case specific parameters')
+
+args = parser.parse_args()
+
+#start doris sentinel1 run
+prepare_data_stack = PrepareDatastack()
+prepare_data_stack.prepare(args.doris_input_file)
+
+
diff --git a/SARtools/Makefile b/sar_tools/Makefile
similarity index 96%
rename from SARtools/Makefile
rename to sar_tools/Makefile
index d4be559..fb3e1de 100644
--- a/SARtools/Makefile
+++ b/sar_tools/Makefile
@@ -22,7 +22,8 @@
 SHELL	=	/bin/sh
 
 ### Installdirdef should exist!
-INSTALL_DIR =	/usr/local/bin
+#INSTALL_DIR =	/usr/local/bin
+INSTALL_DIR =	/home/dlevelt/src/Doris_s1_git/bin
 
 ### GCC compiler
 CC 	=	g++
@@ -47,6 +48,7 @@ PROGS	=	wrap \
 		flapjack \
 		cpxfiddle \
 		floatmult \
+		floatmask \
 		readrasterheader \
 		rasterheader \
 		dateconv
@@ -78,6 +80,8 @@ cpxfiddle:	cpxfiddle.o
 		$(CC) $(CFLAGS) $@.o -o $@
 flapjack:	flapjack.o
 		$(CC) $(CFLAGS) $@.o -o $@
+floatmask:	floatmask.o
+		$(CC) $(CFLAGS) $@.o -o $@
 floatmult:	floatmult.o
 		$(CC) $(CFLAGS) $@.o -o $@
 readrasterheader:	readrasterheader.o
diff --git a/SARtools/README b/sar_tools/README
similarity index 100%
rename from SARtools/README
rename to sar_tools/README
diff --git a/SARtools/bkconvert.cc b/sar_tools/bkconvert.cc
similarity index 100%
rename from SARtools/bkconvert.cc
rename to sar_tools/bkconvert.cc
diff --git a/SARtools/cpxconj.cc b/sar_tools/cpxconj.cc
similarity index 100%
rename from SARtools/cpxconj.cc
rename to sar_tools/cpxconj.cc
diff --git a/SARtools/cpxdiv.cc b/sar_tools/cpxdiv.cc
similarity index 100%
rename from SARtools/cpxdiv.cc
rename to sar_tools/cpxdiv.cc
diff --git a/SARtools/cpxfiddle.cc b/sar_tools/cpxfiddle.cc
similarity index 100%
rename from SARtools/cpxfiddle.cc
rename to sar_tools/cpxfiddle.cc
diff --git a/SARtools/cpxmult.cc b/sar_tools/cpxmult.cc
similarity index 100%
rename from SARtools/cpxmult.cc
rename to sar_tools/cpxmult.cc
diff --git a/SARtools/dateconv.cc b/sar_tools/dateconv.cc
similarity index 100%
rename from SARtools/dateconv.cc
rename to sar_tools/dateconv.cc
diff --git a/SARtools/flapjack.cc b/sar_tools/flapjack.cc
similarity index 100%
rename from SARtools/flapjack.cc
rename to sar_tools/flapjack.cc
diff --git a/SARtools/floatmult.cc b/sar_tools/floatmask.cc
similarity index 73%
copy from SARtools/floatmult.cc
copy to sar_tools/floatmask.cc
index 3101d5d..76c63a5 100644
--- a/SARtools/floatmult.cc
+++ b/sar_tools/floatmask.cc
@@ -18,6 +18,9 @@
 // MA 19-Dec-2008
 // $Revision: 3.7 $
 //  - support file size > 4GB
+//
+// BO 21-Feb-2013
+//  - Modified to generate masks
 
 using namespace std;
 #include <iostream>				// cout
@@ -37,18 +40,23 @@ typedef unsigned long long  uint64;
 void usage(char *programname)
 {
   cerr << "\nProgram: " << programname 
-       << " pixelwise float multiplication of a (complex) float complex file.\n"
-       << " To be used to scale float files, or magnitude of complex files.\n"
-       << " see also: cpxmult, flapjack, cpxfiddle... \n"
+       << " pixelwise mask generation from a float file.\n"
+       << " To be used to scale float files.\n"
+       << " see also: floatmult, cpxmult, flapjack, cpxfiddle... \n"
        << "\n  USAGE:\n\t" << programname
-       << " infile1 [factor==2.]\n\n"
-       << "  EXAMPLE:\n\t" << programname << " cint.raw 1.47"
-       << "\noutput file == infile1." << programname << ".factor\n\n"
+       << " infile1 gt|lt|eq [threshold] \n\n"
+       << "  EXAMPLE:\n\t" << programname << " interfero.coh lt 0.4 \n"
+       << "\n default threshold is 0.5\n\n"
+       << "\noutput file == infile1.gt|lt|eq.threshold\n\n"
        << "exit levels: 0:ok; -1: wrong input; 1:ifile; 2:ofile.\n"
-       << "please sent comments to: doris_users at tudelft.nl\n\n\n";
+       << "please send comments to: batu at gi.alaska.edu\n\n\n";
   exit(-1);
   }
 
+bool (*check)(float, float) = NULL;
+bool gt(float a, float b){ return a>b; }  
+bool lt(float a, float b){ return a<b; }  
+bool eq(float a, float b){ return a==b; }  
 
 int main(int argc, char* argv[])
 {
@@ -56,13 +64,23 @@ int main(int argc, char* argv[])
   const int ONE27 = 127;
   char ifile[ONE27];				// input file name
   const int sizeofelement = sizeof(float);	// data in file
-  float factor = 2.;				// default
-  
+  float factor = 0.5;				// default
   // ====== Handle input ======
   switch (argc)
     {
+    case 4:
+      factor = atof(argv[3]);
     case 3:
-      factor = atof(argv[2]);			// input filename arg1
+      if (strcmp(argv[2], "gt") == 0)
+        check=>
+      else if (strcmp(argv[2], "lt") == 0)
+        check=<
+      else if (strcmp(argv[2], "eq") == 0)
+        check=&eq;
+      else{
+        usage(argv[0]);
+        exit(-1);
+      }  
       //--- fall through ---//
     case 2:
       strcpy(ifile,argv[1]);			// input filename arg1
@@ -73,8 +91,8 @@ int main(int argc, char* argv[])
   
   // ______ Check / echo input ______
   cerr << "Program parameters:\n\t" << argv[0] << " " 
-       << ifile << " " << factor << endl;
-  if (factor == 1) usage(argv[0]);
+       << ifile << " "<< argv[2] << " " << factor << endl;
+  //if (factor == 1) usage(argv[0]);
   
   // ______ Set defaults if required _____
   char ofile[ONE27];		// output filename == "ifile.flapjack"
@@ -83,7 +101,7 @@ int main(int argc, char* argv[])
   //  ostrstream omem(ofile,ONE27);
   ostringstream omem;
   //  omem << ifile << "." << argv[0] << factor << ends;
-  omem << ifile << "." << argv[0] << factor << endl;
+  omem << ifile << "." << argv[2] << factor << ends;
   strcpy(ofile,omem.str().c_str());
 
   //quick debugging
@@ -120,7 +138,10 @@ int main(int argc, char* argv[])
   for (register int i=0; i<numberofpixels; ++i)
     {
       infile1.read((char*)&value,sizeofelement);
-      value *= factor;
+      if ( (*check)(value, factor) )
+        value=0.0;
+      else
+        value=1.0;
       outfile.write((char*)&value,sizeofelement);
       if (!(i%tenpercent))
 	{
diff --git a/SARtools/floatmult.cc b/sar_tools/floatmult.cc
similarity index 100%
rename from SARtools/floatmult.cc
rename to sar_tools/floatmult.cc
diff --git a/SARtools/rasterheader.cc b/sar_tools/rasterheader.cc
similarity index 100%
rename from SARtools/rasterheader.cc
rename to sar_tools/rasterheader.cc
diff --git a/SARtools/readrasterheader.cc b/sar_tools/readrasterheader.cc
similarity index 100%
rename from SARtools/readrasterheader.cc
rename to sar_tools/readrasterheader.cc
diff --git a/SARtools/wrap.cc b/sar_tools/wrap.cc
similarity index 100%
rename from SARtools/wrap.cc
rename to sar_tools/wrap.cc
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..f573e75
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,13 @@
+from setuptools import setup
+
+setup(
+    name='doris',
+    version='5.0.3',
+    packages=['install', 'doris_stack', 'doris_stack.functions', 'doris_stack.main_code', 'prepare_stack'],
+    url='https://github.com/TUDelftGeodesy/Doris',
+    license='LICENSE.txt',
+    author='Gert Mulder',
+    author_email='g.mulder- at tudelft.nl',
+    description='doris InSAR processing software',
+    install_requires=['numpy', 'shapely', 'requests', 'fiona', 'gdal', 'osr', 'scipy', 'fastkml']
+)

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/doris.git



More information about the Pkg-grass-devel mailing list