[pyresample] 01/16: Imported Upstream version 1.1.3

Antonio Valentino a_valentino-guest at moszumanska.debian.org
Sat Jul 11 19:52:36 UTC 2015


This is an automated email from the git hooks/post-receive script.

a_valentino-guest pushed a commit to branch master
in repository pyresample.

commit ff538ddebdeaf9ab64c07b0afd980a184871fbd6
Author: Antonio Valentino <antonio.valentino at tiscali.it>
Date:   Sat Jul 11 11:12:05 2015 +0000

    Imported Upstream version 1.1.3
---
 .travis.yml                                        |  18 +
 LICENSE.txt                                        | 829 ++++---------------
 PKG-INFO                                           |  16 -
 README                                             |   4 +-
 README.md                                          |  39 +
 docs/source/_static/images/time_vs_nproc_1-12.png  | Bin 0 -> 35330 bytes
 docs/source/geo_def.rst                            |  14 +-
 docs/source/multi.rst                              |   3 +
 pyresample.egg-info/PKG-INFO                       |  16 -
 pyresample.egg-info/SOURCES.txt                    |  63 --
 pyresample.egg-info/dependency_links.txt           |   1 -
 pyresample.egg-info/not-zip-safe                   |   1 -
 pyresample.egg-info/requires.txt                   |   9 -
 pyresample.egg-info/top_level.txt                  |   1 -
 pyresample/__init__.py                             |  51 +-
 pyresample/_multi_proc.py                          | 137 ++--
 pyresample/_spatial_mp.py                          | 202 ++---
 pyresample/data_reduce.py                          | 198 ++---
 pyresample/geo_filter.py                           |  64 +-
 pyresample/geometry.py                             | 442 +++++-----
 pyresample/grid.py                                 | 153 ++--
 pyresample/image.py                                | 155 ++--
 pyresample/kd_tree.py                              | 593 +++++++-------
 pyresample/plot.py                                 | 177 ++--
 pyresample/spherical_geometry.py                   |  92 ++-
 pyresample/test/__init__.py                        |  46 ++
 {test => pyresample/test}/test_files/areas.cfg     |   0
 {test => pyresample/test}/test_files/mask_grid.dat |   0
 .../test}/test_files/mask_test_data.dat            |   0
 .../test}/test_files/mask_test_fill_value.dat      |   0
 .../test}/test_files/mask_test_full_fill.dat       |   0
 .../test}/test_files/mask_test_full_fill_multi.dat |   0
 .../test}/test_files/mask_test_mask.dat            |   0
 .../test}/test_files/mask_test_nearest_data.dat    |   0
 .../test}/test_files/mask_test_nearest_mask.dat    |   0
 .../test}/test_files/quick_mask_test.dat           |   0
 .../test}/test_files/ssmis_swath.npz               | Bin
 pyresample/test/test_geometry.py                   | 646 +++++++++++++++
 pyresample/test/test_grid.py                       | 201 +++++
 pyresample/test/test_image.py                      | 225 +++++
 pyresample/test/test_kd_tree.py                    | 907 +++++++++++++++++++++
 pyresample/test/test_plot.py                       |  87 ++
 .../test}/test_spherical_geometry.py               |  53 +-
 {test => pyresample/test}/test_swath.py            |  75 +-
 {test => pyresample/test}/test_utils.py            |  56 +-
 pyresample/utils.py                                | 213 +++--
 pyresample/version.py                              |  28 +-
 requirements.txt                                   |   2 +
 setup.cfg                                          |   5 -
 setup.py                                           |  59 +-
 test/test_geometry.py                              | 513 ------------
 test/test_grid.py                                  | 177 ----
 test/test_image.py                                 | 202 -----
 test/test_kd_tree.py                               | 836 -------------------
 test/test_plot.py                                  |  67 --
 55 files changed, 3833 insertions(+), 3843 deletions(-)

diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..c137efd
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,18 @@
+language: python
+python:
+- '2.6'
+- '2.7'
+- '3.2'
+- '3.3'
+- '3.4'
+before_install:
+- sudo add-apt-repository ppa:ubuntugis/ppa -y
+- sudo apt-get update -qq
+- sudo apt-get install libfreetype6-dev
+- sudo apt-get install libgeos-3.3.8 libgeos-c1 libgeos-dev
+install:
+- pip install -r requirements.txt
+- pip install -e ".[pykdtree]"
+- pip install coveralls
+script: coverage run --source=pyresample setup.py test
+after_success: coveralls
diff --git a/LICENSE.txt b/LICENSE.txt
index 94a9ed0..e3acbd5 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -1,674 +1,165 @@
-                    GNU GENERAL PUBLIC LICENSE
+                   GNU LESSER GENERAL PUBLIC LICENSE
                        Version 3, 29 June 2007
 
- Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Copyright (C) 2007, 2015 Free Software Foundation, Inc. <http://fsf.org/>
  Everyone is permitted to copy and distribute verbatim copies
  of this license document, but changing it is not allowed.
 
-                            Preamble
-
-  The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
-  The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works.  By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users.  We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors.  You can apply it to
-your programs, too.
-
-  When we speak of free software, we are referring to freedom, not
-price.  Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
-  To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights.  Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
-  For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received.  You must make sure that they, too, receive
-or can get the source code.  And you must show them these terms so they
-know their rights.
-
-  Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
-  For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software.  For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
-  Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so.  This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software.  The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable.  Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products.  If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
-  Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary.  To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
-  The precise terms and conditions for copying, distribution and
-modification follow.
-
-                       TERMS AND CONDITIONS
-
-  0. Definitions.
-
-  "This License" refers to version 3 of the GNU General Public License.
-
-  "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
-  "The Program" refers to any copyrightable work licensed under this
-License.  Each licensee is addressed as "you".  "Licensees" and
-"recipients" may be individuals or organizations.
-
-  To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy.  The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
-  A "covered work" means either the unmodified Program or a work based
-on the Program.
-
-  To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy.  Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
-  To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies.  Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
-  An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License.  If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
-  1. Source Code.
-
-  The "source code" for a work means the preferred form of the work
-for making modifications to it.  "Object code" means any non-source
-form of a work.
-
-  A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
-  The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form.  A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
-  The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities.  However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work.  For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
-  The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
-  The Corresponding Source for a work in source code form is that
-same work.
-
-  2. Basic Permissions.
-
-  All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met.  This License explicitly affirms your unlimited
-permission to run the unmodified Program.  The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work.  This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
-  You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force.  You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright.  Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
-  Conveying under any other circumstances is permitted solely under
-the conditions stated below.  Sublicensing is not allowed; section 10
-makes it unnecessary.
-
-  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
-  No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
-  When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
-  4. Conveying Verbatim Copies.
-
-  You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
-  You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
-  5. Conveying Modified Source Versions.
-
-  You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
-    a) The work must carry prominent notices stating that you modified
-    it, and giving a relevant date.
-
-    b) The work must carry prominent notices stating that it is
-    released under this License and any conditions added under section
-    7.  This requirement modifies the requirement in section 4 to
-    "keep intact all notices".
-
-    c) You must license the entire work, as a whole, under this
-    License to anyone who comes into possession of a copy.  This
-    License will therefore apply, along with any applicable section 7
-    additional terms, to the whole of the work, and all its parts,
-    regardless of how they are packaged.  This License gives no
-    permission to license the work in any other way, but it does not
-    invalidate such permission if you have separately received it.
-
-    d) If the work has interactive user interfaces, each must display
-    Appropriate Legal Notices; however, if the Program has interactive
-    interfaces that do not display Appropriate Legal Notices, your
-    work need not make them do so.
-
-  A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit.  Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
-  6. Conveying Non-Source Forms.
-
-  You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
-    a) Convey the object code in, or embodied in, a physical product
-    (including a physical distribution medium), accompanied by the
-    Corresponding Source fixed on a durable physical medium
-    customarily used for software interchange.
-
-    b) Convey the object code in, or embodied in, a physical product
-    (including a physical distribution medium), accompanied by a
-    written offer, valid for at least three years and valid for as
-    long as you offer spare parts or customer support for that product
-    model, to give anyone who possesses the object code either (1) a
-    copy of the Corresponding Source for all the software in the
-    product that is covered by this License, on a durable physical
-    medium customarily used for software interchange, for a price no
-    more than your reasonable cost of physically performing this
-    conveying of source, or (2) access to copy the
-    Corresponding Source from a network server at no charge.
-
-    c) Convey individual copies of the object code with a copy of the
-    written offer to provide the Corresponding Source.  This
-    alternative is allowed only occasionally and noncommercially, and
-    only if you received the object code with such an offer, in accord
-    with subsection 6b.
-
-    d) Convey the object code by offering access from a designated
-    place (gratis or for a charge), and offer equivalent access to the
-    Corresponding Source in the same way through the same place at no
-    further charge.  You need not require recipients to copy the
-    Corresponding Source along with the object code.  If the place to
-    copy the object code is a network server, the Corresponding Source
-    may be on a different server (operated by you or a third party)
-    that supports equivalent copying facilities, provided you maintain
-    clear directions next to the object code saying where to find the
-    Corresponding Source.  Regardless of what server hosts the
-    Corresponding Source, you remain obligated to ensure that it is
-    available for as long as needed to satisfy these requirements.
-
-    e) Convey the object code using peer-to-peer transmission, provided
-    you inform other peers where the object code and Corresponding
-    Source of the work are being offered to the general public at no
-    charge under subsection 6d.
-
-  A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
-  A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling.  In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage.  For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product.  A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
-  "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source.  The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
-  If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information.  But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
-  The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed.  Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
-  Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
-  7. Additional Terms.
-
-  "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law.  If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
-  When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it.  (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.)  You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
-  Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
-    a) Disclaiming warranty or limiting liability differently from the
-    terms of sections 15 and 16 of this License; or
-
-    b) Requiring preservation of specified reasonable legal notices or
-    author attributions in that material or in the Appropriate Legal
-    Notices displayed by works containing it; or
-
-    c) Prohibiting misrepresentation of the origin of that material, or
-    requiring that modified versions of such material be marked in
-    reasonable ways as different from the original version; or
-
-    d) Limiting the use for publicity purposes of names of licensors or
-    authors of the material; or
-
-    e) Declining to grant rights under trademark law for use of some
-    trade names, trademarks, or service marks; or
-
-    f) Requiring indemnification of licensors and authors of that
-    material by anyone who conveys the material (or modified versions of
-    it) with contractual assumptions of liability to the recipient, for
-    any liability that these contractual assumptions directly impose on
-    those licensors and authors.
-
-  All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10.  If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term.  If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
-  If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
-  Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
-  8. Termination.
-
-  You may not propagate or modify a covered work except as expressly
-provided under this License.  Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
-  However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
-  Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
-  Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License.  If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
-  9. Acceptance Not Required for Having Copies.
-
-  You are not required to accept this License in order to receive or
-run a copy of the Program.  Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance.  However,
-nothing other than this License grants you permission to propagate or
-modify any covered work.  These actions infringe copyright if you do
-not accept this License.  Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
-  10. Automatic Licensing of Downstream Recipients.
-
-  Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License.  You are not responsible
-for enforcing compliance by third parties with this License.
-
-  An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations.  If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
-  You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License.  For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
-  11. Patents.
-
-  A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based.  The
-work thus licensed is called the contributor's "contributor version".
-
-  A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version.  For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
-  Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
-  In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement).  To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
-  If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients.  "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
-  If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
-  A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License.  You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
-  Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
-  12. No Surrender of Others' Freedom.
-
-  If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License.  If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all.  For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
-  13. Use with the GNU Affero General Public License.
-
-  Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work.  The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
-  14. Revised Versions of this License.
-
-  The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time.  Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
-  Each version is given a distinguishing version number.  If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation.  If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
-  If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
-  Later license versions may give you additional or different
-permissions.  However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
-  15. Disclaimer of Warranty.
-
-  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
-  16. Limitation of Liability.
-
-  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
-  17. Interpretation of Sections 15 and 16.
-
-  If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
-                     END OF TERMS AND CONDITIONS
-
-            How to Apply These Terms to Your New Programs
-
-  If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
-  To do so, attach the following notices to the program.  It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-    <one line to give the program's name and a brief idea of what it does.>
-    Copyright (C) <year>  <name of author>
-
-    This program is free software: you can redistribute it and/or modify
-    it under the terms of the GNU General Public License as published by
-    the Free Software Foundation, either version 3 of the License, or
-    (at your option) any later version.
-
-    This program is distributed in the hope that it will be useful,
-    but WITHOUT ANY WARRANTY; without even the implied warranty of
-    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-    GNU General Public License for more details.
-
-    You should have received a copy of the GNU General Public License
-    along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-Also add information on how to contact you by electronic and paper mail.
-
-  If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
-    <program>  Copyright (C) <year>  <name of author>
-    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
-    This is free software, and you are welcome to redistribute it
-    under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License.  Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
-
-  You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-<http://www.gnu.org/licenses/>.
-
-  The GNU General Public License does not permit incorporating your program
-into proprietary programs.  If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library.  If this is what you want to do, use the GNU Lesser General
-Public License instead of this License.  But first, please read
-<http://www.gnu.org/philosophy/why-not-lgpl.html>.
+
+  This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+  0. Additional Definitions.
+
+  As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+  "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+  An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+  A "Combined Work" is a work produced by combining or linking an
+Application with the Library.  The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+  The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+  The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+  1. Exception to Section 3 of the GNU GPL.
+
+  You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+  2. Conveying Modified Versions.
+
+  If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+   a) under this License, provided that you make a good faith effort to
+   ensure that, in the event an Application does not supply the
+   function or data, the facility still operates, and performs
+   whatever part of its purpose remains meaningful, or
+
+   b) under the GNU GPL, with none of the additional permissions of
+   this License applicable to that copy.
+
+  3. Object Code Incorporating Material from Library Header Files.
+
+  The object code form of an Application may incorporate material from
+a header file that is part of the Library.  You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+   a) Give prominent notice with each copy of the object code that the
+   Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the object code with a copy of the GNU GPL and this license
+   document.
+
+  4. Combined Works.
+
+  You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+   a) Give prominent notice with each copy of the Combined Work that
+   the Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the Combined Work with a copy of the GNU GPL and this license
+   document.
+
+   c) For a Combined Work that displays copyright notices during
+   execution, include the copyright notice for the Library among
+   these notices, as well as a reference directing the user to the
+   copies of the GNU GPL and this license document.
+
+   d) Do one of the following:
+
+       0) Convey the Minimal Corresponding Source under the terms of this
+       License, and the Corresponding Application Code in a form
+       suitable for, and under terms that permit, the user to
+       recombine or relink the Application with a modified version of
+       the Linked Version to produce a modified Combined Work, in the
+       manner specified by section 6 of the GNU GPL for conveying
+       Corresponding Source.
+
+       1) Use a suitable shared library mechanism for linking with the
+       Library.  A suitable mechanism is one that (a) uses at run time
+       a copy of the Library already present on the user's computer
+       system, and (b) will operate properly with a modified version
+       of the Library that is interface-compatible with the Linked
+       Version.
+
+   e) Provide Installation Information, but only if you would otherwise
+   be required to provide such information under section 6 of the
+   GNU GPL, and only to the extent that such information is
+   necessary to install and execute a modified version of the
+   Combined Work produced by recombining or relinking the
+   Application with a modified version of the Linked Version. (If
+   you use option 4d0, the Installation Information must accompany
+   the Minimal Corresponding Source and Corresponding Application
+   Code. If you use option 4d1, you must provide the Installation
+   Information in the manner specified by section 6 of the GNU GPL
+   for conveying Corresponding Source.)
+
+  5. Combined Libraries.
+
+  You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+   a) Accompany the combined library with a copy of the same work based
+   on the Library, uncombined with any other library facilities,
+   conveyed under the terms of this License.
+
+   b) Give prominent notice with the combined library that part of it
+   is a work based on the Library, and explaining where to find the
+   accompanying uncombined form of the same work.
+
+  6. Revised Versions of the GNU Lesser General Public License.
+
+  The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+  Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+  If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff --git a/PKG-INFO b/PKG-INFO
deleted file mode 100644
index 9e9ceba..0000000
--- a/PKG-INFO
+++ /dev/null
@@ -1,16 +0,0 @@
-Metadata-Version: 1.1
-Name: pyresample
-Version: 1.1.0
-Summary: Resampling of remote sensing data in Python
-Home-page: UNKNOWN
-Author: Esben S. Nielsen
-Author-email: esn at dmi.dk
-License: UNKNOWN
-Description: UNKNOWN
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
-Classifier: Programming Language :: Python
-Classifier: Operating System :: OS Independent
-Classifier: Intended Audience :: Science/Research
-Classifier: Topic :: Scientific/Engineering
diff --git a/README b/README
deleted file mode 100644
index 5c5095c..0000000
--- a/README
+++ /dev/null
@@ -1,3 +0,0 @@
-Python package for geospatial resampling
-
-Look at http://code.google.com/p/pyresample/ and http://pytroll.org/ for more information.
diff --git a/README b/README
new file mode 120000
index 0000000..42061c0
--- /dev/null
+++ b/README
@@ -0,0 +1 @@
+README.md
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..52ad6d1
--- /dev/null
+++ b/README.md
@@ -0,0 +1,39 @@
+[![Build Status](https://travis-ci.org/mraspaud/pyresample.svg?branch=master)](https://travis-ci.org/mraspaud/pyresample)
+
+Python package for geospatial resampling
+----------------------------------------
+
+Resampling (reprojection) of geospatial image data in Python.
+Pyresample uses a kd-tree approach for resampling. 
+Pyresample is designed for resampling of remote sensing data and supports resampling from both fixed grids and geolocated swath data. 
+Several types of resampling are supported including nearest neighbour, gaussian weighting and weighting with a user defined radial function.
+Pyresample works with Numpy arrays including support for masked arrays.
+Support for parallel resampling using multiple processor cores.
+Plotting capablity using Basemap. As of v0.8.0 [pykdtree](https://github.com/storpipfugl/pykdtree) can be used to speed up processing.
+
+Pyresample is tested with Python 2.6, 2.7, 3.2, 3.3, and 3.4.
+
+Note: For numpy >= 1.6.2 use pyresample >= 0.7.13  
+
+[Documentation](https://pyresample.readthedocs.org/en/latest/)
+Look at [pytroll.org](http://pytroll.org/) for more information.
+
+
+===News===
+  * *2015-02-03*: Pyresample-1.1.3 released. Switch to LGPLv3.
+
+  * *2014-12-17*: Pyresample-1.1.2 released. Fix to allow tests to run on travis.
+
+  * *2014-12-10*: Pyresample-1.1.1 released. Wrapping of longitudes and latitudes is now implemented.
+
+  * *2013-10-23*: Pyresample-1.1.0 released. Added option for calculating uncertainties for weighted kd-tree resampling. From now on pyresample will adhere to [http://semver.org/ semantic versioning].
+
+  * *2013-07-03*: Pyresample-1.0.0 released. Minor API change to the geometry.py module as the boundary variable is removed and replaced by proj_x_coords and proj_y_coords. Caching scheme removed from projection coordinate calculation in geometry.py as it introduced excessive complications. The numexpr package is now used for minor bottleneck optimization if available. Version number bumped to 1.0.0 as pyresample has been running stable in production environments for several years now.
+   
+  * *2013-03-20*: Pyresample-0.8.0 released. Enables use of pykdtree. Fixes projection handling for 'latlong' projection.
+
+  * *2013-01-21*: Pyresample-0.7.13 released. Fixes numpy incompability introduced with numpy v1.6.2
+
+  * *2012-10-18*: Pyresample-0.7.12 released. Better integration with Basemap with support for plotting using globe projections (geos, ortho and nspere). Documentation updated with correct description of the epsilon parameter.
+
+  * *2012-07-03*: Pyresample-0.7.11 released. Support for plotting in Plate Carree projection and bugfixes for meridians and parallels in plots. Added utils.fwhm2sigma convenience function for use in Gauss resampling.   
diff --git a/docs/source/_static/images/time_vs_nproc_1-12.png b/docs/source/_static/images/time_vs_nproc_1-12.png
new file mode 100644
index 0000000..dd3c41d
Binary files /dev/null and b/docs/source/_static/images/time_vs_nproc_1-12.png differ
diff --git a/docs/source/geo_def.rst b/docs/source/geo_def.rst
index 68c6e92..3031b93 100644
--- a/docs/source/geo_def.rst
+++ b/docs/source/geo_def.rst
@@ -3,6 +3,18 @@ Geometry definitions
 The module **pyresample.geometry** contains classes for describing different kinds of types
 of remote sensing data geometries. The use of the different classes is described below.
 
+Remarks
+-------
+
+All longitudes and latitudes provided to **pyresample.geometry** must be in degrees.
+Longitudes must additionally be in the [-180;+180[ validity range.
+
+As of version 1.1.1, the **pyresample.geometry** contructors will check the range of 
+longitude values, send a warning if some of them fall outside validity range, 
+and automatically correct the invalid values into [-180;+180[. 
+
+Use function **utils.wrap_longitudes** for wrapping longitudes yourself.
+
 AreaDefinition
 --------------
 
@@ -309,4 +321,4 @@ It can be tested if a (lon, lat) point is inside a GeometryDefinition
  ...                  			   x_size, y_size, area_extent)
  >>> print (0, -90) in area_def
  True
-     
\ No newline at end of file
+     
diff --git a/docs/source/multi.rst b/docs/source/multi.rst
index 564d141..f7e294d 100644
--- a/docs/source/multi.rst
+++ b/docs/source/multi.rst
@@ -34,3 +34,6 @@ Example of resampling in parallel using 4 processes:
 Note: Do not use more processes than available processor cores. As there is a process creation overhead 
 there might be neglible performance improvement using say 8 compared to 4 processor cores. 
 Test on the actual system to determine the most sensible number of processes to use. 
+
+Here is an example of the performance for a varying number of processors on a 64-bit ubuntu 14.04, 32 GB RAM, 2 x Intel Xeon E5-2630 with 6 physical cores each:
+  .. image:: _static/images/time_vs_nproc_1-12.png
diff --git a/pyresample.egg-info/PKG-INFO b/pyresample.egg-info/PKG-INFO
deleted file mode 100644
index 9e9ceba..0000000
--- a/pyresample.egg-info/PKG-INFO
+++ /dev/null
@@ -1,16 +0,0 @@
-Metadata-Version: 1.1
-Name: pyresample
-Version: 1.1.0
-Summary: Resampling of remote sensing data in Python
-Home-page: UNKNOWN
-Author: Esben S. Nielsen
-Author-email: esn at dmi.dk
-License: UNKNOWN
-Description: UNKNOWN
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
-Classifier: Programming Language :: Python
-Classifier: Operating System :: OS Independent
-Classifier: Intended Audience :: Science/Research
-Classifier: Topic :: Scientific/Engineering
diff --git a/pyresample.egg-info/SOURCES.txt b/pyresample.egg-info/SOURCES.txt
deleted file mode 100644
index 99a3d5a..0000000
--- a/pyresample.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,63 +0,0 @@
-LICENSE.txt
-MANIFEST.in
-README
-setup.py
-docs/Makefile
-docs/source/API.rst
-docs/source/conf.py
-docs/source/data_reduce.rst
-docs/source/geo_def.rst
-docs/source/geo_filter.rst
-docs/source/grid.rst
-docs/source/index.rst
-docs/source/installation.rst
-docs/source/multi.rst
-docs/source/plot.rst
-docs/source/preproc.rst
-docs/source/swath.rst
-docs/source/_static/images/tb37_multi.png
-docs/source/_static/images/tb37v_bmng.png
-docs/source/_static/images/tb37v_ortho.png
-docs/source/_static/images/tb37v_pc.png
-docs/source/_static/images/tb37v_quick.png
-docs/source/_static/images/uncert_conc_nh.png
-docs/source/_static/images/uncert_count_nh.png
-docs/source/_static/images/uncert_stddev_nh.png
-pyresample/__init__.py
-pyresample/_multi_proc.py
-pyresample/_spatial_mp.py
-pyresample/data_reduce.py
-pyresample/geo_filter.py
-pyresample/geometry.py
-pyresample/grid.py
-pyresample/image.py
-pyresample/kd_tree.py
-pyresample/plot.py
-pyresample/spherical_geometry.py
-pyresample/utils.py
-pyresample/version.py
-pyresample.egg-info/PKG-INFO
-pyresample.egg-info/SOURCES.txt
-pyresample.egg-info/dependency_links.txt
-pyresample.egg-info/not-zip-safe
-pyresample.egg-info/requires.txt
-pyresample.egg-info/top_level.txt
-test/test_geometry.py
-test/test_grid.py
-test/test_image.py
-test/test_kd_tree.py
-test/test_plot.py
-test/test_spherical_geometry.py
-test/test_swath.py
-test/test_utils.py
-test/test_files/areas.cfg
-test/test_files/mask_grid.dat
-test/test_files/mask_test_data.dat
-test/test_files/mask_test_fill_value.dat
-test/test_files/mask_test_full_fill.dat
-test/test_files/mask_test_full_fill_multi.dat
-test/test_files/mask_test_mask.dat
-test/test_files/mask_test_nearest_data.dat
-test/test_files/mask_test_nearest_mask.dat
-test/test_files/quick_mask_test.dat
-test/test_files/ssmis_swath.npz
\ No newline at end of file
diff --git a/pyresample.egg-info/dependency_links.txt b/pyresample.egg-info/dependency_links.txt
deleted file mode 100644
index 8b13789..0000000
--- a/pyresample.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/pyresample.egg-info/not-zip-safe b/pyresample.egg-info/not-zip-safe
deleted file mode 100644
index 8b13789..0000000
--- a/pyresample.egg-info/not-zip-safe
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/pyresample.egg-info/requires.txt b/pyresample.egg-info/requires.txt
deleted file mode 100644
index 37520a3..0000000
--- a/pyresample.egg-info/requires.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-pyproj
-numpy
-configobj
-
-[numexpr]
-numexpr
-
-[pykdtree]
-pykdtree
\ No newline at end of file
diff --git a/pyresample.egg-info/top_level.txt b/pyresample.egg-info/top_level.txt
deleted file mode 100644
index 2c78f3a..0000000
--- a/pyresample.egg-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-pyresample
diff --git a/pyresample/__init__.py b/pyresample/__init__.py
index 375e914..7a79e45 100644
--- a/pyresample/__init__.py
+++ b/pyresample/__init__.py
@@ -1,42 +1,47 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
-#(at your option) any later version.
+# Copyright (C) 2010, 2014, 2015  Esben S. Nielsen
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import absolute_import
+
+from pyresample.version import __version__
+from pyresample import geometry
+from pyresample import grid
+from pyresample import image
+from pyresample import kd_tree
+from pyresample import utils
+from pyresample import plot
 
-import grid
-import image
-import kd_tree
-import utils
-import version
-import plot
+__all__ = ['grid', 'image', 'kd_tree',
+           'utils', 'plot', 'geo_filter', 'geometry']
 
-__version__ = version.__version__
 
 def get_capabilities():
     cap = {}
 
     try:
         from pykdtree.kdtree import KDTree
-        cap['pykdtree'] = True 
+        cap['pykdtree'] = True
     except ImportError:
         cap['pykdtree'] = False
 
     try:
         import numexpr
-        cap['numexpr'] = True 
+        cap['numexpr'] = True
     except ImportError:
-        cap['numexpr'] = False 
+        cap['numexpr'] = False
 
     return cap
diff --git a/pyresample/_multi_proc.py b/pyresample/_multi_proc.py
index b1d1957..e40c293 100644
--- a/pyresample/_multi_proc.py
+++ b/pyresample/_multi_proc.py
@@ -1,37 +1,40 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
-#(at your option) any later version.
+# Copyright (C) 2010, 2015  Esben S. Nielsen
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-import ctypes
+from __future__ import absolute_import
 
+import ctypes
 import multiprocessing as mp
+
 import numpy as np
 
+
 class Scheduler(object):
-    
+
     def __init__(self, ndata, nprocs, chunk=None, schedule='guided'):
-        if not schedule in ['guided','dynamic', 'static']:
-            raise ValueError, 'unknown scheduling strategy'
+        if not schedule in ['guided', 'dynamic', 'static']:
+            raise ValueError('unknown scheduling strategy')
         self._ndata = mp.RawValue(ctypes.c_int, ndata)
         self._start = mp.RawValue(ctypes.c_int, 0)
         self._lock = mp.Lock()
         self._schedule = schedule
         self._nprocs = nprocs
         if schedule == 'guided' or schedule == 'dynamic':
-            min_chunk = ndata // (10*nprocs)
+            min_chunk = ndata // (10 * nprocs)
             if chunk:
                 min_chunk = chunk
             min_chunk = max(min_chunk, 1)
@@ -42,63 +45,55 @@ class Scheduler(object):
                 min_chunk = max(chunk, min_chunk)
             min_chunk = max(min_chunk, 1)
             self._chunk = min_chunk
-            
-    def __iter__(self):
-        return self
 
-    def next(self):
-        self._lock.acquire()
-        ndata = self._ndata.value
-        nprocs = self._nprocs
-        start = self._start.value
-        if self._schedule == 'guided':
-            _chunk = ndata // nprocs
-            chunk = max(self._chunk, _chunk)
-        else:
-            chunk = self._chunk
-        if ndata:
-            if chunk > ndata:
-                s0 = start
-                s1 = start + ndata
-                self._ndata.value = 0
+    def __iter__(self):
+        while True:
+            self._lock.acquire()
+            ndata = self._ndata.value
+            nprocs = self._nprocs
+            start = self._start.value
+            if self._schedule == 'guided':
+                _chunk = ndata // nprocs
+                chunk = max(self._chunk, _chunk)
+            else:
+                chunk = self._chunk
+            if ndata:
+                if chunk > ndata:
+                    s0 = start
+                    s1 = start + ndata
+                    self._ndata.value = 0
+                else:
+                    s0 = start
+                    s1 = start + chunk
+                    self._ndata.value = ndata - chunk
+                    self._start.value = start + chunk
+                self._lock.release()
+                yield slice(s0, s1)
             else:
-                s0 = start
-                s1 = start + chunk
-                self._ndata.value = ndata - chunk
-                self._start.value = start + chunk
-            self._lock.release()
-            return slice(s0, s1)
-        else:
-            self._lock.release()
-            raise StopIteration
+                self._lock.release()
+                raise StopIteration
 
 
 def shmem_as_ndarray(raw_array):
     _ctypes_to_numpy = {
-                        ctypes.c_char : np.int8,
-                        ctypes.c_wchar : np.int16,
-                        ctypes.c_byte : np.int8,
-                        ctypes.c_ubyte : np.uint8,
-                        ctypes.c_short : np.int16,
-                        ctypes.c_ushort : np.uint16,
-                        ctypes.c_int : np.int32,
-                        ctypes.c_uint : np.int32,
-                        ctypes.c_long : np.int32,
-                        ctypes.c_ulong : np.int32,
-                        ctypes.c_float : np.float32,
-                        ctypes.c_double : np.float64
-                        }
-    address = raw_array._wrapper.get_address()
-    size = raw_array._wrapper.get_size()
+        ctypes.c_char: np.int8,
+        ctypes.c_wchar: np.int16,
+        ctypes.c_byte: np.int8,
+        ctypes.c_ubyte: np.uint8,
+        ctypes.c_short: np.int16,
+        ctypes.c_ushort: np.uint16,
+        ctypes.c_int: np.int32,
+        ctypes.c_uint: np.int32,
+        ctypes.c_long: np.int32,
+        ctypes.c_ulong: np.int32,
+        ctypes.c_float: np.float32,
+        ctypes.c_double: np.float64
+    }
     dtype = _ctypes_to_numpy[raw_array._type_]
-    class Dummy(object): pass
-    d = Dummy()
-    d.__array_interface__ = {
-                             'data' : (address, False),
-                             'typestr' : np.dtype(np.uint8).str,
-                             'descr' : np.dtype(np.uint8).descr,
-                             'shape' : (size,),
-                             'strides' : None,
-                             'version' : 3
-                             }                            
-    return np.asarray(d).view(dtype=dtype)
\ No newline at end of file
+
+    # The following works too, but occasionally raises
+    # RuntimeWarning: Item size computed from the PEP 3118 buffer format string does not match the actual item size.
+    # and appears to be slower.
+    # return np.ctypeslib.as_array(raw_array)
+
+    return np.frombuffer(raw_array, dtype=dtype)
diff --git a/pyresample/_spatial_mp.py b/pyresample/_spatial_mp.py
index 78a0c0d..56f96f4 100644
--- a/pyresample/_spatial_mp.py
+++ b/pyresample/_spatial_mp.py
@@ -1,19 +1,21 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010, 2013  Esben S. Nielsen, Martin Raspaud
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
-#(at your option) any later version.
+# Copyright (C) 2010, 2013, 2015  Esben S. Nielsen, Martin Raspaud
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import absolute_import
 
 import ctypes
 
@@ -27,16 +29,17 @@ try:
 except ImportError:
     ne = None
 
-from _multi_proc import shmem_as_ndarray, Scheduler
+from ._multi_proc import shmem_as_ndarray, Scheduler
 
-#Earth radius
+# Earth radius
 R = 6370997.0
 
 
 class cKDTree_MP(object):
+
     ''' Multiprocessing cKDTree subclass, shared memory '''
 
-    def __init__(self, data, leafsize=10, nprocs=2, chunk=None,\
+    def __init__(self, data, leafsize=10, nprocs=2, chunk=None,
                  schedule='guided'):
         '''
         Same as cKDTree.__init__ except that an internal copy
@@ -49,34 +52,34 @@ class cKDTree_MP(object):
 
         self.n, self.m = data.shape
         # Allocate shared memory for data
-        self.shmem_data = mp.RawArray(ctypes.c_double, self.n*self.m)
-        
+        self.shmem_data = mp.RawArray(ctypes.c_double, self.n * self.m)
+
         # View shared memory as ndarray, and copy over the data.
         # The RawArray objects have information about the dtype and
         # buffer size.
         _data = shmem_as_ndarray(self.shmem_data).reshape((self.n, self.m))
         _data[:,:] = data
-        
+
         # Initialize parent, we must do this last because
         # cKDTree stores a reference to the data array. We pass in
         # the copy in shared memory rather than the origial data.
         self.leafsize = leafsize
         self._nprocs = nprocs
         self._chunk = chunk
-        self._schedule = schedule        
-        
+        self._schedule = schedule
+
     def query(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf):
         '''
         Same as cKDTree.query except parallelized with multiple
         processes and shared memory.        
         '''
-        
+
         # allocate shared memory for x and result
         nx = x.shape[0]
-        shmem_x = mp.RawArray(ctypes.c_double, nx*self.m)
-        shmem_d = mp.RawArray(ctypes.c_double, nx*k)
-        shmem_i = mp.RawArray(ctypes.c_int, nx*k)
-        
+        shmem_x = mp.RawArray(ctypes.c_double, nx * self.m)
+        shmem_d = mp.RawArray(ctypes.c_double, nx * k)
+        shmem_i = mp.RawArray(ctypes.c_int, nx * k)
+
         # view shared memory as ndarrays
         _x = shmem_as_ndarray(shmem_x).reshape((nx, self.m))
         if k == 1:
@@ -85,84 +88,84 @@ class cKDTree_MP(object):
         else:
             _d = shmem_as_ndarray(shmem_d).reshape((nx, k))
             _i = shmem_as_ndarray(shmem_i).reshape((nx, k))
-        
+
         # copy x to shared memory
         _x[:] = x
-        
-        # set up a scheduler to load balance the query        
-        scheduler = Scheduler(nx, self._nprocs, chunk=self._chunk,\
+
+        # set up a scheduler to load balance the query
+        scheduler = Scheduler(nx, self._nprocs, chunk=self._chunk,
                               schedule=self._schedule)
 
         # query with multiple processes
-        query_args = [scheduler, self.shmem_data, self.n, self.m,\
-                      self.leafsize, shmem_x, nx, shmem_d, shmem_i,\
+        query_args = [scheduler, self.shmem_data, self.n, self.m,
+                      self.leafsize, shmem_x, nx, shmem_d, shmem_i,
                       k, eps, p, distance_upper_bound]
-                
+
         _run_jobs(_parallel_query, query_args, self._nprocs)
         # return results (private memory)
         return _d.copy(), _i.copy()
-    
+
 
 class Proj(pyproj.Proj):
 
-    def __call__(self, data1, data2, inverse=False, radians=False,\
+    def __call__(self, data1, data2, inverse=False, radians=False,
                  errcheck=False, nprocs=1):
         if self.is_latlong():
             return data1, data2
-            
-        return super(Proj, self).__call__(data1, data2, inverse=inverse,\
+
+        return super(Proj, self).__call__(data1, data2, inverse=inverse,
                                           radians=radians, errcheck=errcheck)
 
 
 class Proj_MP(pyproj.Proj):
-    
+
     def __init__(self, *args, **kwargs):
         self._args = args
         self._kwargs = kwargs
-        
-    def __call__(self, data1, data2, inverse=False, radians=False,\
+
+    def __call__(self, data1, data2, inverse=False, radians=False,
                  errcheck=False, nprocs=2, chunk=None, schedule='guided'):
         if self.is_latlong():
             return data1, data2
-            
+
         grid_shape = data1.shape
         n = data1.size
-        
-        #Create shared memory
+
+        # Create shared memory
         shmem_data1 = mp.RawArray(ctypes.c_double, n)
         shmem_data2 = mp.RawArray(ctypes.c_double, n)
         shmem_res1 = mp.RawArray(ctypes.c_double, n)
         shmem_res2 = mp.RawArray(ctypes.c_double, n)
-        
+
         # view shared memory as ndarrays
         _data1 = shmem_as_ndarray(shmem_data1)
         _data2 = shmem_as_ndarray(shmem_data2)
         _res1 = shmem_as_ndarray(shmem_res1)
         _res2 = shmem_as_ndarray(shmem_res2)
-        
+
         # copy input data to shared memory
         _data1[:] = data1.ravel()
         _data2[:] = data2.ravel()
-        
-        # set up a scheduler to load balance the query        
+
+        # set up a scheduler to load balance the query
         scheduler = Scheduler(n, nprocs, chunk=chunk, schedule=schedule)
-                
+
         # Projection with multiple processes
-        proj_call_args = [scheduler, shmem_data1, shmem_data2, shmem_res1,\
-                          shmem_res2, self._args, self._kwargs, inverse,\
+        proj_call_args = [scheduler, shmem_data1, shmem_data2, shmem_res1,
+                          shmem_res2, self._args, self._kwargs, inverse,
                           radians, errcheck]
-        
+
         _run_jobs(_parallel_proj, proj_call_args, nprocs)
         return _res1.copy().reshape(grid_shape), _res2.copy().reshape(grid_shape)
 
 
 class Cartesian(object):
-    
+
     def __init__(self, *args, **kwargs):
         pass
-    
+
     def transform_lonlats(self, lons, lats):
-    
+
         coords = np.zeros((lons.size, 3), dtype=lons.dtype)
         deg2rad = lons.dtype.type(np.pi / 180)
         if ne:
@@ -170,13 +173,14 @@ class Cartesian(object):
             coords[:, 1] = ne.evaluate("R*cos(lats*deg2rad)*sin(lons*deg2rad)")
             coords[:, 2] = ne.evaluate("R*sin(lats*deg2rad)")
         else:
-            coords[:, 0] = R*np.cos(lats*deg2rad)*np.cos(lons*deg2rad)
-            coords[:, 1] = R*np.cos(lats*deg2rad)*np.sin(lons*deg2rad)
-            coords[:, 2] = R*np.sin(lats*deg2rad)
+            coords[:, 0] = R * np.cos(lats * deg2rad) * np.cos(lons * deg2rad)
+            coords[:, 1] = R * np.cos(lats * deg2rad) * np.sin(lons * deg2rad)
+            coords[:, 2] = R * np.sin(lats * deg2rad)
         return coords
-     
+
 Cartesian_MP = Cartesian
 
+
 def _run_jobs(target, args, nprocs):
     """Run process pool
     """
@@ -185,25 +189,29 @@ def _run_jobs(target, args, nprocs):
     # access to these values are serialized automatically
     ierr = mp.Value(ctypes.c_int, 0)
     err_msg = mp.Array(ctypes.c_char, 1024)
-    
+
     args.extend((ierr, err_msg))
-    
+
     pool = [mp.Process(target=target, args=args) for n in range(nprocs)]
-    for p in pool: p.start()
-    for p in pool: p.join()
+    for p in pool:
+        p.start()
+    for p in pool:
+        p.join()
     if ierr.value != 0:
-        raise RuntimeError,\
-                ('%d errors in worker processes. Last one reported:\n%s'%\
-                 (ierr.value, err_msg.value))
-                
+        raise RuntimeError('%d errors in worker processes. Last one reported:\n%s' %
+                           (ierr.value, err_msg.value.decode()))
+
 # This is executed in an external process:
-def _parallel_query(scheduler, # scheduler for load balancing
-                    data, ndata, ndim, leafsize, # data needed to reconstruct the kd-tree
-                    x, nx, d, i, # query data and results
-                    k, eps, p, dub, # auxillary query parameters
-                    ierr, err_msg): # return values (0 on success)
-    
-    try:     
+
+
+def _parallel_query(scheduler,  # scheduler for load balancing
+                    # data needed to reconstruct the kd-tree
+                    data, ndata, ndim, leafsize,
+                    x, nx, d, i,  # query data and results
+                    k, eps, p, dub,  # auxillary query parameters
+                    ierr, err_msg):  # return values (0 on success)
+
+    try:
         # View shared memory as ndarrays.
         _data = shmem_as_ndarray(data).reshape((ndata, ndim))
         _x = shmem_as_ndarray(x).reshape((nx, ndim))
@@ -223,17 +231,18 @@ def _parallel_query(scheduler, # scheduler for load balancing
         for s in scheduler:
             if k == 1:
                 _d[s], _i[s] = kdtree.query(_x[s,:], k=1, eps=eps, p=p,\
-                                                distance_upper_bound=dub)
+                                            distance_upper_bound=dub)
             else:
                 _d[s,:], _i[s,:] = kdtree.query(_x[s,:], k=k, eps=eps, p=p,\
                                                 distance_upper_bound=dub)
     # An error occured, increment the return value ierr.
     # Access to ierr is serialized by multiprocessing.
-    except Exception, e:
+    except Exception as e:
         ierr.value += 1
-        err_msg.value = e.message  
-        
-def _parallel_proj(scheduler, data1, data2, res1, res2, proj_args, proj_kwargs,\
+        err_msg.value = str(e).encode()
+
+
+def _parallel_proj(scheduler, data1, data2, res1, res2, proj_args, proj_kwargs,
                    inverse, radians, errcheck, ierr, err_msg):
     try:
         # View shared memory as ndarrays.
@@ -241,35 +250,38 @@ def _parallel_proj(scheduler, data1, data2, res1, res2, proj_args, proj_kwargs,\
         _data2 = shmem_as_ndarray(data2)
         _res1 = shmem_as_ndarray(res1)
         _res2 = shmem_as_ndarray(res2)
-        
-        #Initialise pyproj
+
+        # Initialise pyproj
         proj = pyproj.Proj(*proj_args, **proj_kwargs)
-        
-        #Reproject data segment
+
+        # Reproject data segment
         for s in scheduler:
-            _res1[s], _res2[s] = proj(_data1[s], _data2[s], inverse=inverse,\
-                                       radians=radians, errcheck=errcheck)
-    
+            _res1[s], _res2[s] = proj(_data1[s], _data2[s], inverse=inverse,
+                                      radians=radians, errcheck=errcheck)
+
     # An error occured, increment the return value ierr.
     # Access to ierr is serialized by multiprocessing.
-    except Exception, e:
+    except Exception as e:
         ierr.value += 1
-        err_msg.value = e.message  
-        
+        err_msg.value = str(e).encode()
+
+
 def _parallel_transform(scheduler, lons, lats, n, coords, ierr, err_msg):
     try:
         # View shared memory as ndarrays.
         _lons = shmem_as_ndarray(lons)
         _lats = shmem_as_ndarray(lats)
         _coords = shmem_as_ndarray(coords).reshape((n, 3))
-        
-        #Transform to cartesian coordinates
+
+        # Transform to cartesian coordinates
         for s in scheduler:
-            _coords[s, 0] = R*np.cos(np.radians(_lats[s]))*np.cos(np.radians(_lons[s]))
-            _coords[s, 1] = R*np.cos(np.radians(_lats[s]))*np.sin(np.radians(_lons[s]))
-            _coords[s, 2] = R*np.sin(np.radians(_lats[s]))
+            _coords[s, 0] = R * \
+                np.cos(np.radians(_lats[s])) * np.cos(np.radians(_lons[s]))
+            _coords[s, 1] = R * \
+                np.cos(np.radians(_lats[s])) * np.sin(np.radians(_lons[s]))
+            _coords[s, 2] = R * np.sin(np.radians(_lats[s]))
     # An error occured, increment the return value ierr.
     # Access to ierr is serialized by multiprocessing.
-    except Exception, e:
+    except Exception as e:
         ierr.value += 1
-        err_msg.value = e.message  
+        err_msg.value = str(e).encode()
diff --git a/pyresample/data_reduce.py b/pyresample/data_reduce.py
index fc5c5f8..693f410 100644
--- a/pyresample/data_reduce.py
+++ b/pyresample/data_reduce.py
@@ -1,33 +1,36 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
-#(at your option) any later version.
+# Copyright (C) 2010, 2015  Esben S. Nielsen
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 """Reduce data sets based on geographical information"""
 
+from __future__ import absolute_import
+
 import numpy as np
 
 
-#Earth radius
+# Earth radius
 R = 6370997.0
 
-def swath_from_cartesian_grid(cart_grid, lons, lats, data, 
+
+def swath_from_cartesian_grid(cart_grid, lons, lats, data,
                               radius_of_influence):
     """Makes coarse data reduction of swath data by comparison with 
     cartesian grid
-    
+
     :Parameters:
     chart_grid : numpy array          
         Grid of area cartesian coordinates
@@ -39,26 +42,27 @@ def swath_from_cartesian_grid(cart_grid, lons, lats, data,
         Swath data
     radius_of_influence : float 
         Cut off distance in meters
-    
+
     :Returns: 
     (lons, lats, data) : list of numpy arrays
         Reduced swath data and coordinate set
     """
-    
-    valid_index = get_valid_index_from_cartesian_grid(cart_grid, lons, lats, 
+
+    valid_index = get_valid_index_from_cartesian_grid(cart_grid, lons, lats,
                                                       radius_of_influence)
 
     lons = lons[valid_index]
     lats = lats[valid_index]
     data = data[valid_index]
-    
+
     return lons, lats, data
 
-def get_valid_index_from_cartesian_grid(cart_grid, lons, lats, 
+
+def get_valid_index_from_cartesian_grid(cart_grid, lons, lats,
                                         radius_of_influence):
     """Calculates relevant data indices using coarse data reduction of swath 
     data by comparison with cartesian grid
-    
+
     :Parameters:
     chart_grid : numpy array          
         Grid of area cartesian coordinates
@@ -70,40 +74,41 @@ def get_valid_index_from_cartesian_grid(cart_grid, lons, lats,
         Swath data
     radius_of_influence : float 
         Cut off distance in meters
-    
+
     :Returns: 
     valid_index : numpy array
         Boolean array of same size as lons and lats indicating relevant indices
     """
-    
+
     def _get_lons(x, y):
-        return np.rad2deg(np.arccos(x/np.sqrt(x**2 + y**2)))*np.sign(y)
-    
+        return np.rad2deg(np.arccos(x / np.sqrt(x ** 2 + y ** 2))) * np.sign(y)
+
     def _get_lats(z):
-        return 90 - np.rad2deg(np.arccos(z/R))
-    
-    #Get sides of target grid and transform to lon lats
-    lons_side1 = _get_lons(cart_grid[0, :, 0], cart_grid[0, :, 1])  
+        return 90 - np.rad2deg(np.arccos(z / R))
+
+    # Get sides of target grid and transform to lon lats
+    lons_side1 = _get_lons(cart_grid[0, :, 0], cart_grid[0, :, 1])
     lons_side2 = _get_lons(cart_grid[:, -1, 0], cart_grid[:, -1, 1])
     lons_side3 = _get_lons(cart_grid[-1, ::-1, 0], cart_grid[-1, ::-1, 1])
     lons_side4 = _get_lons(cart_grid[::-1, 0, 0], cart_grid[::-1, 0, 1])
-    
+
     lats_side1 = _get_lats(cart_grid[0, :, 2])
     lats_side2 = _get_lats(cart_grid[:, -1, 2])
     lats_side3 = _get_lats(cart_grid[-1, ::-1, 2])
     lats_side4 = _get_lats(cart_grid[::-1, 0, 2])
-    
+
     valid_index = _get_valid_index(lons_side1, lons_side2, lons_side3, lons_side4,
                                    lats_side1, lats_side2, lats_side3, lats_side4,
                                    lons, lats, radius_of_influence)
-    
+
     return valid_index
 
-def swath_from_lonlat_grid(grid_lons, grid_lats, lons, lats, data,\
+
+def swath_from_lonlat_grid(grid_lons, grid_lats, lons, lats, data,
                            radius_of_influence):
     """Makes coarse data reduction of swath data by comparison with 
     lon lat grid
-    
+
     :Parameters:
     grid_lons : numpy array          
         Grid of area lons
@@ -117,25 +122,27 @@ def swath_from_lonlat_grid(grid_lons, grid_lats, lons, lats, data,\
         Swath data
     radius_of_influence : float 
         Cut off distance in meters
-    
+
     :Returns:
     (lons, lats, data) : list of numpy arrays
         Reduced swath data and coordinate set 
     """
-    
-    valid_index = get_valid_index_from_lonlat_grid(grid_lons, grid_lats, lons, lats, radius_of_influence)
+
+    valid_index = get_valid_index_from_lonlat_grid(
+        grid_lons, grid_lats, lons, lats, radius_of_influence)
 
     lons = lons[valid_index]
     lats = lats[valid_index]
     data = data[valid_index]
-    
+
     return lons, lats, data
 
-def swath_from_lonlat_boundaries(boundary_lons, boundary_lats, lons, lats, data,\
-                           radius_of_influence):
+
+def swath_from_lonlat_boundaries(boundary_lons, boundary_lats, lons, lats, data,
+                                 radius_of_influence):
     """Makes coarse data reduction of swath data by comparison with 
     lon lat boundary
-    
+
     :Parameters:
     boundary_lons : numpy array          
         Grid of area lons
@@ -149,26 +156,26 @@ def swath_from_lonlat_boundaries(boundary_lons, boundary_lats, lons, lats, data,
         Swath data
     radius_of_influence : float 
         Cut off distance in meters
-    
+
     :Returns:
     (lons, lats, data) : list of numpy arrays
         Reduced swath data and coordinate set 
     """
-    
-    valid_index = get_valid_index_from_lonlat_boundaries(boundary_lons, 
+
+    valid_index = get_valid_index_from_lonlat_boundaries(boundary_lons,
                                                          boundary_lats, lons, lats, radius_of_influence)
 
     lons = lons[valid_index]
     lats = lats[valid_index]
     data = data[valid_index]
-    
+
     return lons, lats, data
 
 
 def get_valid_index_from_lonlat_grid(grid_lons, grid_lats, lons, lats, radius_of_influence):
     """Calculates relevant data indices using coarse data reduction of swath 
     data by comparison with lon lat grid
-    
+
     :Parameters:
     chart_grid : numpy array          
         Grid of area cartesian coordinates
@@ -180,64 +187,66 @@ def get_valid_index_from_lonlat_grid(grid_lons, grid_lats, lons, lats, radius_of
         Swath data
     radius_of_influence : float 
         Cut off distance in meters
-    
+
     :Returns: 
     valid_index : numpy array
         Boolean array of same size as lon and lat indicating relevant indices
     """
-    
-    #Get sides of target grid
-    lons_side1 = grid_lons[0, :]    
+
+    # Get sides of target grid
+    lons_side1 = grid_lons[0, :]
     lons_side2 = grid_lons[:, -1]
     lons_side3 = grid_lons[-1, ::-1]
     lons_side4 = grid_lons[::-1, 0]
 
-    lats_side1 = grid_lats[0, :]    
+    lats_side1 = grid_lats[0, :]
     lats_side2 = grid_lats[:, -1]
     lats_side3 = grid_lats[-1, :]
     lats_side4 = grid_lats[:, 0]
-    
+
     valid_index = _get_valid_index(lons_side1, lons_side2, lons_side3, lons_side4,
                                    lats_side1, lats_side2, lats_side3, lats_side4,
                                    lons, lats, radius_of_influence)
-    
+
     return valid_index
 
+
 def get_valid_index_from_lonlat_boundaries(boundary_lons, boundary_lats, lons, lats, radius_of_influence):
     """Find relevant indices from grid boundaries using the 
     winding number theorem"""
-    
-    valid_index = _get_valid_index(boundary_lons.side1, boundary_lons.side2, 
+
+    valid_index = _get_valid_index(boundary_lons.side1, boundary_lons.side2,
                                    boundary_lons.side3, boundary_lons.side4,
-                                   boundary_lats.side1, boundary_lats.side2, 
+                                   boundary_lats.side1, boundary_lats.side2,
                                    boundary_lats.side3, boundary_lats.side4,
                                    lons, lats, radius_of_influence)
-    
+
     return valid_index
-    
+
+
 def _get_valid_index(lons_side1, lons_side2, lons_side3, lons_side4,
                      lats_side1, lats_side2, lats_side3, lats_side4,
                      lons, lats, radius_of_influence):
     """Find relevant indices from grid boundaries using the 
     winding number theorem"""
-    
-    #Coarse reduction of data based on extrema analysis of the boundary 
-    #lon lat values of the target grid
+
+    # Coarse reduction of data based on extrema analysis of the boundary
+    # lon lat values of the target grid
     illegal_lons = (((lons_side1 < -180) | (lons_side1 > 180)).any() or
                     ((lons_side2 < -180) | (lons_side2 > 180)).any() or
                     ((lons_side3 < -180) | (lons_side3 > 180)).any() or
                     ((lons_side4 < -180) | (lons_side4 > 180)).any())
-    
+
     illegal_lats = (((lats_side1 < -90) | (lats_side1 > 90)).any() or
                     ((lats_side2 < -90) | (lats_side2 > 90)).any() or
                     ((lats_side3 < -90) | (lats_side3 > 90)).any() or
                     ((lats_side4 < -90) | (lats_side4 > 90)).any())
-    
+
     if illegal_lons or illegal_lats:
-        #Grid boundaries are not safe to operate on
-        return np.ones(lons.size, dtype=np.bool)   
-    
-    #Find sum angle sum of grid boundary
+        # Grid boundaries are not safe to operate on
+        return np.ones(lons.size, dtype=np.bool)
+
+    # Find sum angle sum of grid boundary
     angle_sum = 0
     for side in (lons_side1, lons_side2, lons_side3, lons_side4):
         prev = None
@@ -246,12 +255,12 @@ def _get_valid_index(lons_side1, lons_side2, lons_side3, lons_side4,
             if prev:
                 delta = lon - prev
                 if abs(delta) > 180:
-                    delta = (abs(delta)-360) * (delta//abs(delta))
+                    delta = (abs(delta) - 360) * (delta // abs(delta))
                 angle_sum += delta
                 side_sum += delta
             prev = lon
-    
-    #Buffer min and max lon and lat of interest with radius of interest
+
+    # Buffer min and max lon and lat of interest with radius of interest
     lat_min = min(lats_side1.min(), lats_side2.min(), lats_side3.min(),
                   lats_side4.min())
     lat_min_buffered = lat_min - float(radius_of_influence) / R
@@ -261,42 +270,43 @@ def _get_valid_index(lons_side1, lons_side2, lons_side3, lons_side4,
 
     max_angle_s2 = max(abs(lats_side2.max()), abs(lats_side2.min()))
     max_angle_s4 = max(abs(lats_side4.max()), abs(lats_side4.min()))
-    lon_min_buffered = (lons_side4.min() - 
-                       float(radius_of_influence) / 
-                       (np.sin(np.radians(max_angle_s4)) * R))
-                    
-    lon_max_buffered = (lons_side2.max() + 
-                       float(radius_of_influence) / 
-                       (np.sin(np.radians(max_angle_s2)) * R))
-    
-    #From the winding number theorem follows:
-    #angle_sum possiblilities:
+    lon_min_buffered = (lons_side4.min() -
+                        float(radius_of_influence) /
+                        (np.sin(np.radians(max_angle_s4)) * R))
+
+    lon_max_buffered = (lons_side2.max() +
+                        float(radius_of_influence) /
+                        (np.sin(np.radians(max_angle_s2)) * R))
+
+    # From the winding number theorem follows:
+    # angle_sum possiblilities:
     #-360: area covers north pole
     # 360: area covers south pole
     #   0: area covers no poles
-    #else: area covers both poles    
+    # else: area covers both poles
     if round(angle_sum) == -360:
-        #Covers NP
-        valid_index = (lats >= lat_min_buffered)        
+        # Covers NP
+        valid_index = (lats >= lat_min_buffered)
     elif round(angle_sum) == 360:
-        #Covers SP
-        valid_index = (lats <= lat_max_buffered)        
+        # Covers SP
+        valid_index = (lats <= lat_max_buffered)
     elif round(angle_sum) == 0:
-        #Covers no poles
+        # Covers no poles
         valid_lats = (lats >= lat_min_buffered) * (lats <= lat_max_buffered)
 
         if lons_side2.min() > lons_side4.max():
-            #No date line crossing                      
-            valid_lons = (lons >= lon_min_buffered) * (lons <= lon_max_buffered)
+            # No date line crossing
+            valid_lons = (lons >= lon_min_buffered) * \
+                (lons <= lon_max_buffered)
         else:
-            #Date line crossing
+            # Date line crossing
             seg1 = (lons >= lon_min_buffered) * (lons <= 180)
             seg2 = (lons <= lon_max_buffered) * (lons >= -180)
-            valid_lons = seg1 + seg2                        
-        
-        valid_index = valid_lats * valid_lons        
+            valid_lons = seg1 + seg2
+
+        valid_index = valid_lats * valid_lons
     else:
-        #Covers both poles don't reduce
+        # Covers both poles don't reduce
         valid_index = np.ones(lons.size, dtype=np.bool)
 
     return valid_index
diff --git a/pyresample/geo_filter.py b/pyresample/geo_filter.py
index e6fec3e..31317f6 100644
--- a/pyresample/geo_filter.py
+++ b/pyresample/geo_filter.py
@@ -1,11 +1,13 @@
 import numpy as np
 
-import _spatial_mp
-import geometry
+from . import _spatial_mp
+from . import geometry
+
 
 class GridFilter(object):
+
     """Geographic filter from a grid
-    
+
     :Parameters:
     grid_ll_x : float
         Projection x coordinate of lower left corner of lower left pixel
@@ -18,59 +20,59 @@ class GridFilter(object):
     proj4_string : string 
     mask : numpy array
         Mask as boolean numpy array
-        
+
     """
-    
+
     def __init__(self, area_def, filter, nprocs=1):
         self.area_def = area_def
         self._filter = filter.astype(np.bool)
         self.nprocs = nprocs
-        
+
     def get_valid_index(self, geometry_def):
         """Calculates valid_index array  based on lons and lats
-        
+
         :Parameters:
         lons : numpy array
         lats : numpy array
-        
+
         :Returns:
             Boolean numpy array of same shape as lons and lats
-             
+
         """
-        
+
         lons = geometry_def.lons[:]
         lats = geometry_def.lats[:]
-        
-        #Get projection coords
+
+        # Get projection coords
         if self.nprocs > 1:
             proj = _spatial_mp.Proj_MP(**self.area_def.proj_dict)
         else:
             proj = _spatial_mp.Proj(**self.area_def.proj_dict)
-            
+
         x_coord, y_coord = proj(lons, lats, nprocs=self.nprocs)
-                        
-        #Find array indices of coordinates   
-        target_x = ((x_coord / self.area_def.pixel_size_x) + 
+
+        # Find array indices of coordinates
+        target_x = ((x_coord / self.area_def.pixel_size_x) +
                     self.area_def.pixel_offset_x).astype(np.int32)
-        target_y = (self.area_def.pixel_offset_y - 
-                    (y_coord / self.area_def.pixel_size_y)).astype(np.int32)        
-        
-        #Create mask for pixels outside array (invalid pixels)
+        target_y = (self.area_def.pixel_offset_y -
+                    (y_coord / self.area_def.pixel_size_y)).astype(np.int32)
+
+        # Create mask for pixels outside array (invalid pixels)
         target_x_valid = (target_x >= 0) & (target_x < self.area_def.x_size)
         target_y_valid = (target_y >= 0) & (target_y < self.area_def.y_size)
-        
-        #Set index of invalid pixels to 0
-        target_x[np.invert(target_x_valid)] = 0 
+
+        # Set index of invalid pixels to 0
+        target_x[np.invert(target_x_valid)] = 0
         target_y[np.invert(target_y_valid)] = 0
-        
-        #Find mask
+
+        # Find mask
         filter = self._filter[target_y, target_x]
-        
-        #Remove invalid pixels
+
+        # Remove invalid pixels
         filter = (filter & target_x_valid & target_y_valid).astype(np.bool)
-    
+
         return filter
-    
+
     def filter(self, geometry_def, data):
         lons = geometry_def.lons[:]
         lats = geometry_def.lats[:]
@@ -79,8 +81,6 @@ class GridFilter(object):
         lats_f = lats[valid_index]
         data_f = data[valid_index]
         geometry_def_f = \
-            geometry.CoordinateDefinition(lons_f, lats_f, 
+            geometry.CoordinateDefinition(lons_f, lats_f,
                                           nprocs=geometry_def.nprocs)
         return geometry_def_f, data_f
-        
-        
diff --git a/pyresample/geometry.py b/pyresample/geometry.py
index 6593c94..fa4b977 100644
--- a/pyresample/geometry.py
+++ b/pyresample/geometry.py
@@ -1,26 +1,34 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010, 2013  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
-#(at your option) any later version.
+# Copyright (C) 2010-2015
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# Authors:
+#    Esben S. Nielsen
+#    Thomas Lavergne
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 """Classes for geometry operations"""
-import weakref
+
+from __future__ import absolute_import
+
+import warnings
 
 import numpy as np
 
-import _spatial_mp
+from . import utils
+from . import _spatial_mp
 
 
 class DimensionError(Exception):
@@ -28,9 +36,10 @@ class DimensionError(Exception):
 
 
 class Boundary(object):
+
     """Container for geometry boundary.
     Labelling starts in upper left corner and proceeds clockwise"""
-      
+
     def __init__(self, side1, side2, side3, side4):
         self.side1 = side1
         self.side2 = side2
@@ -39,22 +48,39 @@ class Boundary(object):
 
 
 class BaseDefinition(object):
+
     """Base class for geometry definitions"""
-           
+
     def __init__(self, lons=None, lats=None, nprocs=1):
         if type(lons) != type(lats):
             raise TypeError('lons and lats must be of same type')
         elif lons is not None:
             if lons.shape != lats.shape:
                 raise ValueError('lons and lats must have same shape')
-            
+
         self.nprocs = nprocs
 
-        self.lons = lons
-        self.lats = lats
-        
+        # check the latitutes
+        if lats is not None and ((lats.min() < -90. or lats.max() > +90.)):
+            # throw exception
+            raise ValueError(
+                'Some latitudes are outside the [-90.;+90] validity range')
+        else:
+            self.lats = lats
+
+        # check the longitudes
+        if lons is not None and ((lons.min() < -180. or lons.max() >= +180.)):
+            # issue warning
+            warnings.warn('All geometry objects expect longitudes in the [-180:+180[ range. ' +
+                          'We will now automatically wrap your longitudes into [-180:+180[, and continue. ' +
+                          'To avoid this warning next time, use routine utils.wrap_longitudes().')
+            # wrap longitudes to [-180;+180[
+            self.lons = utils.wrap_longitudes(lons)
+        else:
+            self.lons = lons
+
         self.cartesian_coords = None
-    
+
     def __eq__(self, other):
         """Test for approximate equality"""
 
@@ -76,53 +102,53 @@ class BaseDefinition(object):
                     np.allclose(self_lats, other_lats, atol=1e-6,
                                 rtol=5e-9))
         except (AttributeError, ValueError):
-            return False  
+            return False
 
     def __ne__(self, other):
         """Test for approximate equality"""
-        
+
         return not self.__eq__(other)
-    
+
     def get_lonlat(self, row, col):
         """Retrieve lon and lat of single pixel
-        
+
         :Parameters:
         row : int
         col : int
-        
+
         :Returns:
         (lon, lat) : tuple of floats
         """
-        
+
         if self.ndim != 2:
             raise DimensionError(('operation undefined '
                                   'for %sD geometry ') % self.ndim)
         elif self.lons is None or self.lats is None:
             raise ValueError('lon/lat values are not defined')
         return self.lons[row, col], self.lats[row, col]
-    
+
     def get_lonlats(self, data_slice=None, **kwargs):
         """Base method for lon lat retrieval with slicing"""
-        
+
         if self.lons is None or self.lats is None:
             raise ValueError('lon/lat values are not defined')
         elif data_slice is None:
             return self.lons, self.lats
         else:
             return self.lons[data_slice], self.lats[data_slice]
-   
+
     def get_boundary_lonlats(self):
-            """Returns Boundary objects"""
-            
-            side1 = self.get_lonlats(data_slice=(0, slice(None)))
-            side2 = self.get_lonlats(data_slice=(slice(None), -1))
-            side3 = self.get_lonlats(data_slice=(-1, slice(None)))
-            side4 = self.get_lonlats(data_slice=(slice(None), 0))
-            return Boundary(side1[0], side2[0], side3[0][::-1], side4[0][::-1]), Boundary(side1[1], side2[1], side3[1][::-1], side4[1][::-1])
-         
+        """Returns Boundary objects"""
+
+        side1 = self.get_lonlats(data_slice=(0, slice(None)))
+        side2 = self.get_lonlats(data_slice=(slice(None), -1))
+        side3 = self.get_lonlats(data_slice=(-1, slice(None)))
+        side4 = self.get_lonlats(data_slice=(slice(None), 0))
+        return Boundary(side1[0], side2[0], side3[0][::-1], side4[0][::-1]), Boundary(side1[1], side2[1], side3[1][::-1], side4[1][::-1])
+
     def get_cartesian_coords(self, nprocs=None, data_slice=None, cache=False):
         """Retrieve cartesian coordinates of geometry definition
-        
+
         :Parameters:
         nprocs : int, optional
             Number of processor cores to be used.
@@ -131,45 +157,45 @@ class BaseDefinition(object):
             Calculate only cartesian coordnates for the defined slice
         cache : bool, optional
             Store result the result. Requires data_slice to be None
-            
+
         :Returns:
         cartesian_coords : numpy array
         """
 
         if self.cartesian_coords is None:
-            #Coordinates are not cached
+            # Coordinates are not cached
             if nprocs is None:
                 nprocs = self.nprocs
-            
+
             if data_slice is None:
-                #Use full slice
+                # Use full slice
                 data_slice = slice(None)
-                
+
             lons, lats = self.get_lonlats(nprocs=nprocs, data_slice=data_slice)
-                    
+
             if nprocs > 1:
                 cartesian = _spatial_mp.Cartesian_MP(nprocs)
             else:
                 cartesian = _spatial_mp.Cartesian()
-                            
-            cartesian_coords = cartesian.transform_lonlats(np.ravel(lons), 
+
+            cartesian_coords = cartesian.transform_lonlats(np.ravel(lons),
                                                            np.ravel(lats))
-            
+
             if isinstance(lons, np.ndarray) and lons.ndim > 1:
-                #Reshape to correct shape
-                cartesian_coords = cartesian_coords.reshape(lons.shape[0], 
+                # Reshape to correct shape
+                cartesian_coords = cartesian_coords.reshape(lons.shape[0],
                                                             lons.shape[1], 3)
-            
+
             if cache and data_slice is None:
-                self.cartesian_coords = cartesian_coords  
+                self.cartesian_coords = cartesian_coords
         else:
-            #Coordinates are cached
+            # Coordinates are cached
             if data_slice is None:
                 cartesian_coords = self.cartesian_coords
             else:
                 cartesian_coords = self.cartesian_coords[data_slice]
-                
-        return cartesian_coords    
+
+        return cartesian_coords
 
     @property
     def corners(self):
@@ -180,7 +206,7 @@ class BaseDefinition(object):
                 Coordinate(*self.get_lonlat(0, -1)),
                 Coordinate(*self.get_lonlat(-1, -1)),
                 Coordinate(*self.get_lonlat(-1, 0))]
-        
+
     def __contains__(self, point):
         """Is a point inside the 4 corners of the current area? This uses
         great circle arcs as area boundaries.
@@ -197,28 +223,28 @@ class BaseDefinition(object):
         """Tests if the current area overlaps the *other* area. This is based
         solely on the corners of areas, assuming the boundaries to be great
         circles.
-        
+
         :Parameters:
         other : object
             Instance of subclass of BaseDefinition
-            
+
         :Returns:
         overlaps : bool
         """
 
         from pyresample.spherical_geometry import Arc
-        
+
         self_corners = self.corners
 
         other_corners = other.corners
-        
+
         for i in self_corners:
             if i in other:
                 return True
         for i in other_corners:
             if i in self:
                 return True
-    
+
         self_arc1 = Arc(self_corners[0], self_corners[1])
         self_arc2 = Arc(self_corners[1], self_corners[2])
         self_arc3 = Arc(self_corners[2], self_corners[3])
@@ -246,11 +272,11 @@ class BaseDefinition(object):
     def intersection(self, other):
         """Returns the corners of the intersection polygon of the current area
         with *other*.
-        
+
         :Parameters:
         other : object
             Instance of subclass of BaseDefinition
-            
+
         :Returns:
         (corner1, corner2, corner3, corner4) : tuple of points
         """
@@ -259,25 +285,25 @@ class BaseDefinition(object):
 
     def overlap_rate(self, other):
         """Get how much the current area overlaps an *other* area.
-        
+
         :Parameters:
         other : object
             Instance of subclass of BaseDefinition
-            
+
         :Returns:
         overlap_rate : float
         """
-        
+
         from pyresample.spherical_geometry import get_polygon_area
         other_area = other.get_area()
         inter_area = get_polygon_area(self.intersection(other))
         return inter_area / other_area
 
 
- 
 class CoordinateDefinition(BaseDefinition):
+
     """Base class for geometry definitions defined by lons and lats only"""
-     
+
     def __init__(self, lons, lats, nprocs=1):
         if lons.shape == lats.shape and lons.dtype == lats.dtype:
             self.shape = lons.shape
@@ -286,21 +312,21 @@ class CoordinateDefinition(BaseDefinition):
             self.dtype = lons.dtype
         else:
             raise ValueError(('%s must be created with either '
-                             'lon/lats of the same shape with same dtype') % 
+                              'lon/lats of the same shape with same dtype') %
                              self.__class__.__name__)
         super(CoordinateDefinition, self).__init__(lons, lats, nprocs)
-        
+
     def concatenate(self, other):
         if self.ndim != other.ndim:
             raise DimensionError(('Unable to concatenate %sD and %sD '
                                   'geometries') % (self.ndim, other.ndim))
-        klass = _get_highest_level_class(self, other)        
+        klass = _get_highest_level_class(self, other)
         lons = np.concatenate((self.lons, other.lons))
         lats = np.concatenate((self.lats, other.lats))
         nprocs = min(self.nprocs, other.nprocs)
         return klass(lons, lats, nprocs=nprocs)
-        
-    def append(self, other):    
+
+    def append(self, other):
         if self.ndim != other.ndim:
             raise DimensionError(('Unable to append %sD and %sD '
                                   'geometries') % (self.ndim, other.ndim))
@@ -310,27 +336,28 @@ class CoordinateDefinition(BaseDefinition):
         self.size = self.lons.size
 
     def __str__(self):
-        #Rely on numpy's object printing
-        return ('Shape: %s\nLons: %s\nLats: %s') % (str(self.shape), 
+        # Rely on numpy's object printing
+        return ('Shape: %s\nLons: %s\nLats: %s') % (str(self.shape),
                                                     str(self.lons),
                                                     str(self.lats))
-        
+
 
 class GridDefinition(CoordinateDefinition):
+
     """Grid defined by lons and lats
-    
+
     :Parameters:
     lons : numpy array
     lats : numpy array
     nprocs : int, optional
         Number of processor cores to be used for calculations.
-        
+
     :Attributes:
     shape : tuple
         Grid shape as (rows, cols)
     size : int
         Number of elements in grid
-        
+
     Properties:
     lons : object
         Grid lons
@@ -339,25 +366,26 @@ class GridDefinition(CoordinateDefinition):
     cartesian_coords : object
         Grid cartesian coordinates
     """
-    
+
     def __init__(self, lons, lats, nprocs=1):
         if lons.shape != lats.shape:
             raise ValueError('lon and lat grid must have same shape')
         elif lons.ndim != 2:
             raise ValueError('2 dimensional lon lat grid expected')
-        
+
         super(GridDefinition, self).__init__(lons, lats, nprocs)
 
 
 class SwathDefinition(CoordinateDefinition):
+
     """Swath defined by lons and lats
-    
+
     :Parameters:
     lons : numpy array
     lats : numpy array
     nprocs : int, optional
         Number of processor cores to be used for calculations.
-        
+
     :Attributes:
     shape : tuple
         Swath shape
@@ -365,7 +393,7 @@ class SwathDefinition(CoordinateDefinition):
         Number of elements in swath
     ndims : int
         Swath dimensions
-        
+
     Properties:
     lons : object
         Swath lons
@@ -374,7 +402,7 @@ class SwathDefinition(CoordinateDefinition):
     cartesian_coords : object
         Swath cartesian coordinates
     """
-    
+
     def __init__(self, lons, lats, nprocs=1):
         if lons.shape != lats.shape:
             raise ValueError('lon and lat arrays must have same shape')
@@ -383,7 +411,8 @@ class SwathDefinition(CoordinateDefinition):
         super(SwathDefinition, self).__init__(lons, lats, nprocs)
 
 
-class AreaDefinition(BaseDefinition):    
+class AreaDefinition(BaseDefinition):
+
     """Holds definition of an area.
 
     :Parameters:
@@ -407,7 +436,7 @@ class AreaDefinition(BaseDefinition):
         Grid lons
     lats : numpy array, optional
         Grid lats
-    
+
     :Attributes:
     area_id : str         
         ID of area
@@ -441,7 +470,7 @@ class AreaDefinition(BaseDefinition):
     pixel_offset_y : float 
         y offset between projection center and upper left corner of upper 
         left pixel in units of pixels..
-    
+
     Properties:
     proj4_string : str
         Projection defined as Proj.4 string
@@ -456,8 +485,7 @@ class AreaDefinition(BaseDefinition):
     projection_y_coords : object
         Grid projection y coordinate
     """
-                  
-            
+
     def __init__(self, area_id, name, proj_id, proj_dict, x_size, y_size,
                  area_extent, nprocs=1, lons=None, lats=None, dtype=np.float64):
         if not isinstance(proj_dict, dict):
@@ -481,69 +509,69 @@ class AreaDefinition(BaseDefinition):
         self.pixel_size_y = (area_extent[3] - area_extent[1]) / float(y_size)
         self.proj_dict = proj_dict
         self.area_extent = tuple(area_extent)
-        
+
         # Calculate area_extent in lon lat
         proj = _spatial_mp.Proj(**proj_dict)
-        corner_lons, corner_lats = proj((area_extent[0], area_extent[2]), 
-                                        (area_extent[1], area_extent[3]), 
+        corner_lons, corner_lats = proj((area_extent[0], area_extent[2]),
+                                        (area_extent[1], area_extent[3]),
                                         inverse=True)
-        self.area_extent_ll = (corner_lons[0], corner_lats[0], 
+        self.area_extent_ll = (corner_lons[0], corner_lats[0],
                                corner_lons[1], corner_lats[1])
-                
-        #Calculate projection coordinates of center of upper left pixel
+
+        # Calculate projection coordinates of center of upper left pixel
         self.pixel_upper_left = \
-                              (float(area_extent[0]) + 
-                               float(self.pixel_size_x) / 2,
-                               float(area_extent[3]) - 
-                               float(self.pixel_size_y) / 2)
-        
-        #Pixel_offset defines the distance to projection center from origen (UL)
-        #of image in units of pixels. 
+            (float(area_extent[0]) +
+             float(self.pixel_size_x) / 2,
+             float(area_extent[3]) -
+             float(self.pixel_size_y) / 2)
+
+        # Pixel_offset defines the distance to projection center from origen (UL)
+        # of image in units of pixels.
         self.pixel_offset_x = -self.area_extent[0] / self.pixel_size_x
         self.pixel_offset_y = self.area_extent[3] / self.pixel_size_y
-        
+
         self.projection_x_coords = None
         self.projection_y_coords = None
 
         self.dtype = dtype
-        
+
     def __str__(self):
-        #We need a sorted dictionary for a unique hash of str(self)
+        # We need a sorted dictionary for a unique hash of str(self)
         proj_dict = self.proj_dict
-        proj_str = ('{' + 
-                    ', '.join(["'%s': '%s'"%(str(k), str(proj_dict[k]))
+        proj_str = ('{' +
+                    ', '.join(["'%s': '%s'" % (str(k), str(proj_dict[k]))
                                for k in sorted(proj_dict.keys())]) +
                     '}')
         return ('Area ID: %s\nName: %s\nProjection ID: %s\n'
                 'Projection: %s\nNumber of columns: %s\nNumber of rows: %s\n'
-                'Area extent: %s') % (self.area_id, self.name, self.proj_id, 
-                                      proj_str, self.x_size, self.y_size, 
+                'Area extent: %s') % (self.area_id, self.name, self.proj_id,
+                                      proj_str, self.x_size, self.y_size,
                                       self.area_extent)
-               
+
     __repr__ = __str__
-    
+
     def __eq__(self, other):
         """Test for equality"""
-        
+
         try:
             return ((self.proj_dict == other.proj_dict) and
                     (self.shape == other.shape) and
                     (self.area_extent == other.area_extent))
         except AttributeError:
             return super(AreaDefinition, self).__eq__(other)
-        
+
     def __ne__(self, other):
         """Test for equality"""
-        
+
         return not self.__eq__(other)
-               
+
     def get_xy_from_lonlat(self, lon, lat):
         """Retrieve closest x and y coordinates (column, row indices) for the
         specified geolocation (lon,lat) if inside area. If lon,lat is a point a
         ValueError is raised if the return point is outside the area domain. If
         lon,lat is a tuple of sequences of longitudes and latitudes, a tuple of
         masked arrays are returned.
-        
+
         :Input:
         lon : point or sequence (list or array) of longitudes
         lat : point or sequence (list or array) of latitudes
@@ -557,13 +585,13 @@ class AreaDefinition(BaseDefinition):
         if isinstance(lat, list):
             lat = np.array(lat)
 
-        if ((isinstance(lon, np.ndarray) and 
-             not isinstance(lat, np.ndarray)) or 
-            (not isinstance(lon, np.ndarray) and 
+        if ((isinstance(lon, np.ndarray) and
+             not isinstance(lat, np.ndarray)) or
+            (not isinstance(lon, np.ndarray) and
              isinstance(lat, np.ndarray))):
-            raise ValueError("Both lon and lat needs to be of " + 
+            raise ValueError("Both lon and lat needs to be of " +
                              "the same type and have the same dimensions!")
-        
+
         if isinstance(lon, np.ndarray) and isinstance(lat, np.ndarray):
             if lon.shape != lat.shape:
                 raise ValueError("lon and lat is not of the same shape!")
@@ -571,44 +599,44 @@ class AreaDefinition(BaseDefinition):
         pobj = _spatial_mp.Proj(self.proj4_string)
         upl_x = self.area_extent[0]
         upl_y = self.area_extent[3]
-        xscale = abs(self.area_extent[2] - 
+        xscale = abs(self.area_extent[2] -
                      self.area_extent[0]) / float(self.x_size)
-        yscale = abs(self.area_extent[1] - 
+        yscale = abs(self.area_extent[1] -
                      self.area_extent[3]) / float(self.y_size)
-  
+
         xm_, ym_ = pobj(lon, lat)
         x__ = (xm_ - upl_x) / xscale
         y__ = (upl_y - ym_) / yscale
 
         if isinstance(x__, np.ndarray) and isinstance(y__, np.ndarray):
-            mask = (((x__ < 0 ) | (x__ > self.x_size)) | 
-                    ((y__ < 0)  | (y__ > self.y_size)))
-            return (np.ma.masked_array(x__.astype('int'), mask=mask, 
+            mask = (((x__ < 0) | (x__ > self.x_size)) |
+                    ((y__ < 0) | (y__ > self.y_size)))
+            return (np.ma.masked_array(x__.astype('int'), mask=mask,
                                        fill_value=-1),
                     np.ma.masked_array(y__.astype('int'), mask=mask,
                                        fill_value=-1))
         else:
             if ((x__ < 0 or x__ > self.x_size) or
-                (y__ < 0 or y__ > self.y_size)):
+                    (y__ < 0 or y__ > self.y_size)):
                 raise ValueError('Point outside area:( %f %f)' % (x__, y__))
             return int(x__), int(y__)
 
     def get_lonlat(self, row, col):
         """Retrieves lon and lat values of single point in area grid
-        
+
         :Parameters:
         row : int
         col : int
-        
+
         :Returns:
         (lon, lat) : tuple of floats
         """
-        
+
         return self.get_lonlats(nprocs=None, data_slice=(row, col))
-       
+
     def get_proj_coords(self, data_slice=None, cache=False, dtype=None):
         """Get projection coordinates of grid 
-    
+
         :Parameters:
         data_slice : slice object, optional
             Calculate only coordinates for specified slice
@@ -619,18 +647,18 @@ class AreaDefinition(BaseDefinition):
         (target_x, target_y) : tuple of numpy arrays
             Grids of area x- and y-coordinates in projection units
         """
-        
+
         def get_val(val, sub_val, max):
-            #Get value with substitution and wrapping
+            # Get value with substitution and wrapping
             if val is None:
                 return sub_val
             else:
                 if val < 0:
-                    #Wrap index
+                    # Wrap index
                     return max + val
                 else:
                     return val
-        
+
         if self.projection_x_coords is not None and self.projection_y_coords is not None:
             # Projection coords are cached
             if data_slice is None:
@@ -644,79 +672,84 @@ class AreaDefinition(BaseDefinition):
         if dtype is None:
             dtype = self.dtype
 
-        #create coordinates of local area as ndarrays
+        # create coordinates of local area as ndarrays
         if data_slice is None or data_slice == slice(None):
-            #Full slice
+            # Full slice
             rows = self.y_size
             cols = self.x_size
             row_start = 0
             col_start = 0
-        else:            
+        else:
             if isinstance(data_slice, slice):
-                #Row slice
+                # Row slice
                 row_start = get_val(data_slice.start, 0, self.y_size)
                 col_start = 0
-                rows = get_val(data_slice.stop, self.y_size, self.y_size) - row_start                                 
+                rows = get_val(
+                    data_slice.stop, self.y_size, self.y_size) - row_start
                 cols = self.x_size
             elif isinstance(data_slice[0], slice) and isinstance(data_slice[1], slice):
-                #Block slice
+                # Block slice
                 row_start = get_val(data_slice[0].start, 0, self.y_size)
                 col_start = get_val(data_slice[1].start, 0, self.x_size)
-                rows = get_val(data_slice[0].stop, self.y_size, self.y_size) - row_start
-                cols = get_val(data_slice[1].stop, self.x_size, self.x_size) - col_start
+                rows = get_val(
+                    data_slice[0].stop, self.y_size, self.y_size) - row_start
+                cols = get_val(
+                    data_slice[1].stop, self.x_size, self.x_size) - col_start
             elif isinstance(data_slice[0], slice):
-                #Select from col
+                # Select from col
                 is_1d_select = True
                 row_start = get_val(data_slice[0].start, 0, self.y_size)
                 col_start = get_val(data_slice[1], 0, self.x_size)
-                rows = get_val(data_slice[0].stop, self.y_size, self.y_size) - row_start
+                rows = get_val(
+                    data_slice[0].stop, self.y_size, self.y_size) - row_start
                 cols = 1
             elif isinstance(data_slice[1], slice):
-                #Select from row
+                # Select from row
                 is_1d_select = True
                 row_start = get_val(data_slice[0], 0, self.y_size)
                 col_start = get_val(data_slice[1].start, 0, self.x_size)
                 rows = 1
-                cols = get_val(data_slice[1].stop, self.x_size, self.x_size) - col_start
+                cols = get_val(
+                    data_slice[1].stop, self.x_size, self.x_size) - col_start
             else:
-                #Single element select
+                # Single element select
                 is_single_value = True
-                
-                row_start = get_val(data_slice[0], 0, self.y_size)                
+
+                row_start = get_val(data_slice[0], 0, self.y_size)
                 col_start = get_val(data_slice[1], 0, self.x_size)
-                    
+
                 rows = 1
-                cols = 1    
-        
-        #Calculate coordinates
-        target_x = np.fromfunction(lambda i, j: (j + col_start) * 
-                                   self.pixel_size_x + 
+                cols = 1
+
+        # Calculate coordinates
+        target_x = np.fromfunction(lambda i, j: (j + col_start) *
+                                   self.pixel_size_x +
                                    self.pixel_upper_left[0],
-                                   (rows, 
+                                   (rows,
                                     cols), dtype=dtype)
-    
-        target_y = np.fromfunction(lambda i, j: 
-                                   self.pixel_upper_left[1] - 
+
+        target_y = np.fromfunction(lambda i, j:
+                                   self.pixel_upper_left[1] -
                                    (i + row_start) * self.pixel_size_y,
-                                   (rows, 
+                                   (rows,
                                     cols), dtype=dtype)
-        
+
         if is_single_value:
-            #Return single values
+            # Return single values
             target_x = float(target_x)
             target_y = float(target_y)
         elif is_1d_select:
-            #Reshape to 1D array
+            # Reshape to 1D array
             target_x = target_x.reshape((target_x.size,))
             target_y = target_y.reshape((target_y.size,))
-       
+
         if cache and data_slice is None:
             # Cache the result if requested
             self.projection_x_coords = target_x
             self.projection_y_coords = target_y
 
         return target_x, target_y
-        
+
     @property
     def proj_x_coords(self):
         return self.get_proj_coords(data_slice=(0, slice(None)))[0]
@@ -725,10 +758,26 @@ class AreaDefinition(BaseDefinition):
     def proj_y_coords(self):
         return self.get_proj_coords(data_slice=(slice(None), 0))[1]
 
+    @property
+    def outer_boundary_corners(self):
+        """Returns the lon,lat of the outer edges of the corner points
+        """
+        from pyresample.spherical_geometry import Coordinate
+        proj = _spatial_mp.Proj(**self.proj_dict)
+
+        corner_lons, corner_lats = proj((self.area_extent[0], self.area_extent[2],
+                                         self.area_extent[2], self.area_extent[0]),
+                                        (self.area_extent[3], self.area_extent[3],
+                                         self.area_extent[1], self.area_extent[1]),
+                                        inverse=True)
+        return [Coordinate(corner_lons[0], corner_lats[0]),
+                Coordinate(corner_lons[1], corner_lats[1]),
+                Coordinate(corner_lons[2], corner_lats[2]),
+                Coordinate(corner_lons[3], corner_lats[3])]
 
     def get_lonlats(self, nprocs=None, data_slice=None, cache=False, dtype=None):
         """Returns lon and lat arrays of area.
-    
+
         :Parameters:        
         nprocs : int, optional 
             Number of processor cores to be used.
@@ -741,7 +790,7 @@ class AreaDefinition(BaseDefinition):
         :Returns: 
         (lons, lats) : tuple of numpy arrays
             Grids of area lons and and lats
-        """ 
+        """
 
         if dtype is None:
             dtype = self.dtype
@@ -750,53 +799,54 @@ class AreaDefinition(BaseDefinition):
             #Data is not cached
             if nprocs is None:
                 nprocs = self.nprocs
-                
-            #Proj.4 definition of target area projection
+
+            # Proj.4 definition of target area projection
             if nprocs > 1:
                 target_proj = _spatial_mp.Proj_MP(**self.proj_dict)
             else:
                 target_proj = _spatial_mp.Proj(**self.proj_dict)
-        
-            #Get coordinates of local area as ndarrays
-            target_x, target_y = self.get_proj_coords(data_slice=data_slice, dtype=dtype)
-            
-            #Get corresponding longitude and latitude values
+
+            # Get coordinates of local area as ndarrays
+            target_x, target_y = self.get_proj_coords(
+                data_slice=data_slice, dtype=dtype)
+
+            # Get corresponding longitude and latitude values
             lons, lats = target_proj(target_x, target_y, inverse=True,
                                      nprocs=nprocs)
             lons = np.asanyarray(lons, dtype=dtype)
             lats = np.asanyarray(lats, dtype=dtype)
-            
+
             if cache and data_slice is None:
                 # Cache the result if requested
                 self.lons = lons
                 self.lats = lats
 
-            #Free memory
+            # Free memory
             del(target_x)
             del(target_y)
         else:
             #Data is cached
             if data_slice is None:
-                #Full slice
+                # Full slice
                 lons = self.lons
                 lats = self.lats
             else:
                 lons = self.lons[data_slice]
                 lats = self.lats[data_slice]
-            
+
         return lons, lats
 
     @property
     def proj4_string(self):
         """Returns projection definition as Proj.4 string"""
-        
+
         items = self.proj_dict.items()
-        return '+' + ' +'.join([ t[0] + '=' + t[1] for t in items])         
-    
+        return '+' + ' +'.join([t[0] + '=' + t[1] for t in items])
+
 
 def _get_slice(segments, shape):
     """Generator for segmenting a 1D or 2D array"""
-    
+
     if not (1 <= len(shape) <= 2):
         raise ValueError('Cannot segment array of shape: %s' % str(shape))
     else:
@@ -812,19 +862,21 @@ def _get_slice(segments, shape):
             start_idx = end_idx
             end_idx = min(start_idx + slice_length, size)
 
+
 def _flatten_cartesian_coords(cartesian_coords):
     """Flatten array to (n, 3) shape"""
-    
-    shape = cartesian_coords.shape 
+
+    shape = cartesian_coords.shape
     if len(shape) > 2:
-        cartesian_coords = cartesian_coords.reshape(shape[0] * 
+        cartesian_coords = cartesian_coords.reshape(shape[0] *
                                                     shape[1], 3)
     return cartesian_coords
 
+
 def _get_highest_level_class(obj1, obj2):
-    if (not issubclass(obj1.__class__, obj2.__class__) or 
-        not issubclass(obj2.__class__, obj1.__class__)):
-        raise TypeError('No common superclass for %s and %s' % 
+    if (not issubclass(obj1.__class__, obj2.__class__) or
+            not issubclass(obj2.__class__, obj1.__class__)):
+        raise TypeError('No common superclass for %s and %s' %
                         (obj1.__class__, obj2.__class__))
 
     if obj1.__class__ == obj2.__class__:
@@ -833,6 +885,4 @@ def _get_highest_level_class(obj1, obj2):
         klass = obj2.__class__
     else:
         klass = obj1.__class__
-    return klass    
-           
-        
+    return klass
diff --git a/pyresample/grid.py b/pyresample/grid.py
index d93494c..52d1033 100644
--- a/pyresample/grid.py
+++ b/pyresample/grid.py
@@ -1,27 +1,34 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
+# Copyright (C) 2010, 2014, 2015  Esben S. Nielsen
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or
 #(at your option) any later version.
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 """Resample image from one projection to another 
 using nearest neighbour method in cartesian projection coordinate systems"""
 
+from __future__ import absolute_import
+
 import numpy as np
 
-import geometry
-import _spatial_mp
+from . import geometry
+from . import _spatial_mp
+
+try:
+    range = xrange
+except NameError:
+    pass
 
 
 def get_image_from_linesample(row_indices, col_indices, source_image,
@@ -39,42 +46,42 @@ def get_image_from_linesample(row_indices, col_indices, source_image,
             Set undetermined pixels to this value.
             If fill_value is None a masked array is returned 
             with undetermined pixels masked
-    
+
     :Returns: 
     image_data : numpy array
         Resampled image 
     """
-    
-    #mask out non valid row and col indices
+
+    # mask out non valid row and col indices
     row_mask = (row_indices >= 0) * (row_indices < source_image.shape[0])
     col_mask = (col_indices >= 0) * (col_indices < source_image.shape[1])
     valid_rows = row_indices * row_mask
     valid_cols = col_indices * col_mask
 
-    #free memory
+    # free memory
     del(row_indices)
     del(col_indices)
-    
-    #get valid part of image
+
+    # get valid part of image
     target_image = source_image[valid_rows, valid_cols]
-    
-    #free memory
+
+    # free memory
     del(valid_rows)
     del(valid_cols)
 
-    #create mask for valid data points
+    # create mask for valid data points
     valid_data = row_mask * col_mask
     if valid_data.ndim != target_image.ndim:
         for i in range(target_image.ndim - valid_data.ndim):
             valid_data = np.expand_dims(valid_data, axis=valid_data.ndim)
-            
-    #free memory
+
+    # free memory
     del(row_mask)
     del(col_mask)
-    
-    #fill the non valid part of the image
+
+    # fill the non valid part of the image
     if fill_value is not None:
-        target_filled = (target_image * valid_data + 
+        target_filled = (target_image * valid_data +
                          (1 - valid_data) * fill_value)
     else:
         if np.ma.is_masked(target_image):
@@ -82,12 +89,13 @@ def get_image_from_linesample(row_indices, col_indices, source_image,
         else:
             mask = (1 - valid_data)
         target_filled = np.ma.array(target_image, mask=mask)
-    
+
     return target_filled.astype(target_image.dtype)
-    
+
+
 def get_linesample(lons, lats, source_area_def, nprocs=1):
     """Returns index row and col arrays for resampling
-    
+
     :Parameters:
     lons : numpy array 
         Lons. Dimensions must match lats
@@ -97,31 +105,32 @@ def get_linesample(lons, lats, source_area_def, nprocs=1):
         Source definition as AreaDefinition object
     nprocs : int, optional 
         Number of processor cores to be used
-    
+
     :Returns:
     (row_indices, col_indices) : tuple of numpy arrays
         Arrays for resampling area by array indexing
     """
-    
-    #Proj.4 definition of source area projection
+
+    # Proj.4 definition of source area projection
     if nprocs > 1:
         source_proj = _spatial_mp.Proj_MP(**source_area_def.proj_dict)
     else:
         source_proj = _spatial_mp.Proj(**source_area_def.proj_dict)
 
-    #get cartesian projection values from longitude and latitude 
+    # get cartesian projection values from longitude and latitude
     source_x, source_y = source_proj(lons, lats, nprocs=nprocs)
 
-    #Find corresponding pixels (element by element conversion of ndarrays)
-    source_pixel_x = (source_area_def.pixel_offset_x + \
+    # Find corresponding pixels (element by element conversion of ndarrays)
+    source_pixel_x = (source_area_def.pixel_offset_x +
                       source_x / source_area_def.pixel_size_x).astype(np.int32)
-    
-    source_pixel_y = (source_area_def.pixel_offset_y - \
+
+    source_pixel_y = (source_area_def.pixel_offset_y -
                       source_y / source_area_def.pixel_size_y).astype(np.int32)
-                    
+
     return source_pixel_y, source_pixel_x
-                          
-def get_image_from_lonlats(lons, lats, source_area_def, source_image_data, 
+
+
+def get_image_from_lonlats(lons, lats, source_area_def, source_image_data,
                            fill_value=0, nprocs=1):
     """Samples from image based on lon lat arrays 
     using nearest neighbour method in cartesian projection coordinate systems.
@@ -141,20 +150,21 @@ def get_image_from_lonlats(lons, lats, source_area_def, source_image_data,
             with undetermined pixels masked    
     nprocs : int, optional 
         Number of processor cores to be used
-    
+
     :Returns:
     image_data : numpy array 
         Resampled image data
     """
 
-    source_pixel_y, source_pixel_x = get_linesample(lons, lats, 
-                                                    source_area_def, 
+    source_pixel_y, source_pixel_x = get_linesample(lons, lats,
+                                                    source_area_def,
                                                     nprocs=nprocs)
 
-    #Return target image
+    # Return target image
     return get_image_from_linesample(source_pixel_y, source_pixel_x,
                                      source_image_data, fill_value)
 
+
 def get_resampled_image(target_area_def, source_area_def, source_image_data,
                         fill_value=0, nprocs=1, segments=None):
     """Resamples image using nearest neighbour method in cartesian 
@@ -176,12 +186,12 @@ def get_resampled_image(target_area_def, source_area_def, source_image_data,
     segments : {int, None} optional
         Number of segments to use when resampling.
         If set to None an estimate will be calculated. 
-        
+
     :Returns:
     image_data : numpy array 
         Resampled image data    
     """
-    
+
     if not isinstance(target_area_def, geometry.AreaDefinition):
         raise TypeError('target_area_def must be of type AreaDefinition')
     if not isinstance(source_area_def, geometry.AreaDefinition):
@@ -191,7 +201,7 @@ def get_resampled_image(target_area_def, source_area_def, source_image_data,
         raise TypeError('source_image must be of type ndarray'
                         ' or a masked array.')
 
-    #Calculate number of segments if needed 
+    # Calculate number of segments if needed
     if segments is None:
         rows = target_area_def.y_size
         cut_off = 500
@@ -199,37 +209,32 @@ def get_resampled_image(target_area_def, source_area_def, source_image_data,
             segments = int(rows / cut_off)
         else:
             segments = 1
-    
-    
+
     if segments > 1:
-        #Iterate through segments        
-        for i, target_slice in enumerate(geometry._get_slice(segments,  
-                                                  target_area_def.shape)):
-            
-            #Select data from segment with slice
-            lons, lats = target_area_def.get_lonlats(nprocs=nprocs, data_slice=target_slice)
-            
-            #Calculate partial result
-            next_result = get_image_from_lonlats(lons, lats, source_area_def, 
-                                                 source_image_data, 
+        # Iterate through segments
+        for i, target_slice in enumerate(geometry._get_slice(segments,
+                                                             target_area_def.shape)):
+
+            # Select data from segment with slice
+            lons, lats = target_area_def.get_lonlats(
+                nprocs=nprocs, data_slice=target_slice)
+
+            # Calculate partial result
+            next_result = get_image_from_lonlats(lons, lats, source_area_def,
+                                                 source_image_data,
                                                  fill_value, nprocs)
-                
-            #Build result iteratively 
+
+            # Build result iteratively
             if i == 0:
-                #First iteration
+                # First iteration
                 result = next_result
-            else:            
+            else:
                 result = np.row_stack((result, next_result))
-        
+
         return result
     else:
-        #Get lon lat arrays of target area
+        # Get lon lat arrays of target area
         lons, lats = target_area_def.get_lonlats(nprocs)
-        #Get target image
-        return get_image_from_lonlats(lons, lats, source_area_def, 
+        # Get target image
+        return get_image_from_lonlats(lons, lats, source_area_def,
                                       source_image_data, fill_value, nprocs)
-
-
-
-
-        
diff --git a/pyresample/image.py b/pyresample/image.py
index b813874..5622a3f 100644
--- a/pyresample/image.py
+++ b/pyresample/image.py
@@ -1,31 +1,34 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
-#(at your option) any later version.
+# Copyright (C) 2010, 2015  Esben S. Nielsen
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 """Handles resampling of images with assigned geometry definitions"""
 
+from __future__ import absolute_import
+
 import numpy as np
 
-import geometry, grid, kd_tree
+from . import geometry, grid, kd_tree
 
 
 class ImageContainer(object):
+
     """Holds image with geometry definition. 
     Allows indexing with linesample arrays.
-    
+
     :Parameters:
     image_data : numpy array 
         Image data
@@ -37,7 +40,7 @@ class ImageContainer(object):
         with undetermined pixels masked
     nprocs : int, optional 
         Number of processor cores to be used
-        
+
     :Attributes:
     image_data : numpy array 
         Image data
@@ -48,21 +51,21 @@ class ImageContainer(object):
     nprocs : int
         Number of processor cores to be used for geometry operations
     """
-        
+
     def __init__(self, image_data, geo_def, fill_value=0, nprocs=1):
         if not isinstance(image_data, (np.ndarray, np.ma.core.MaskedArray)):
             raise TypeError('image_data must be either an ndarray'
                             ' or a masked array')
-        elif ((image_data.ndim > geo_def.ndim + 1) or 
+        elif ((image_data.ndim > geo_def.ndim + 1) or
               (image_data.ndim < geo_def.ndim)):
-                raise ValueError(('Unexpected number of dimensions for '
-                                 'image_data: ') % image_data.ndim)
+            raise ValueError(('Unexpected number of dimensions for '
+                              'image_data: ') % image_data.ndim)
         for i, size in enumerate(geo_def.shape):
             if image_data.shape[i] != size:
                 raise ValueError(('Size mismatch for image_data. Expected '
                                   'size %s for dimension %s and got %s') %
-                                  (size, i, image_data.shape[i])) 
-        
+                                 (size, i, image_data.shape[i]))
+
         self.shape = geo_def.shape
         self.size = geo_def.size
         self.ndim = geo_def.ndim
@@ -73,18 +76,18 @@ class ImageContainer(object):
             self.channels = 1
         self.geo_def = geo_def
         self.fill_value = fill_value
-        self.nprocs = nprocs        
-        
+        self.nprocs = nprocs
+
     def __str__(self):
-        return 'Image:\n %s'%self.image_data.__str__()
+        return 'Image:\n %s' % self.image_data.__str__()
 
-    def __repr__(self): 
+    def __repr__(self):
         return self.image_data.__repr__()
-        
+
     def resample(self, target_geo_def):
         """Base method for resampling"""
-        
-        raise NotImplementedError('Method "resample" is not implemented ' 
+
+        raise NotImplementedError('Method "resample" is not implemented '
                                   'in class %s' % self.__class__.__name__)
 
     def get_array_from_linesample(self, row_indices, col_indices):
@@ -95,32 +98,33 @@ class ImageContainer(object):
             Row indices. Dimensions must match col_indices
         col_indices : numpy array 
             Col indices. Dimensions must match row_indices 
-        
+
         :Returns: 
         image_data : numpy_array
             Resampled image data
         """
-        
+
         if self.geo_def.ndim != 2:
-            raise TypeError('Resampling from linesamples only makes sense ' 
+            raise TypeError('Resampling from linesamples only makes sense '
                             'on 2D data')
-        
+
         return grid.get_image_from_linesample(row_indices, col_indices,
-                                              self.image_data, 
+                                              self.image_data,
                                               self.fill_value)
-        
+
     def get_array_from_neighbour_info(self, *args, **kwargs):
         """Base method for resampling from preprocessed data."""
-        
+
         raise NotImplementedError('Method "get_array_from_neighbour_info" is '
-                                  'not implemented in class %s' % 
+                                  'not implemented in class %s' %
                                   self.__class__.__name__)
 
 
 class ImageContainerQuick(ImageContainer):
+
     """Holds image with area definition. '
     Allows quick resampling within area.
-    
+
     :Parameters:
     image_data : numpy array 
         Image data
@@ -135,7 +139,7 @@ class ImageContainerQuick(ImageContainer):
     segments : {int, None}
         Number of segments to use when resampling.
         If set to None an estimate will be calculated
-        
+
     :Attributes:
     image_data : numpy array 
         Image data
@@ -151,29 +155,29 @@ class ImageContainerQuick(ImageContainer):
         Number of segments to use when resampling      
     """
 
-    def __init__(self, image_data, geo_def, fill_value=0, nprocs=1, 
+    def __init__(self, image_data, geo_def, fill_value=0, nprocs=1,
                  segments=None):
         if not isinstance(geo_def, geometry.AreaDefinition):
             raise TypeError('area_def must be of type '
-                            'geometry.AreaDefinition')    
-        super(ImageContainerQuick, self).__init__(image_data, geo_def, 
-                                                  fill_value=fill_value, 
+                            'geometry.AreaDefinition')
+        super(ImageContainerQuick, self).__init__(image_data, geo_def,
+                                                  fill_value=fill_value,
                                                   nprocs=nprocs)
         self.segments = segments
 
     def resample(self, target_area_def):
         """Resamples image to area definition using nearest neighbour 
         approach in projection coordinates.
-        
+
         :Parameters:
         target_area_def : object 
             Target area definition as AreaDefinition object
-        
+
         :Returns: 
         image_container : object
             ImageContainerQuick object of resampled area   
-        """        
-        
+        """
+
         resampled_image = grid.get_resampled_image(target_area_def,
                                                    self.geo_def,
                                                    self.image_data,
@@ -181,15 +185,16 @@ class ImageContainerQuick(ImageContainer):
                                                    nprocs=self.nprocs,
                                                    segments=self.segments)
 
-        return ImageContainerQuick(resampled_image, target_area_def, 
+        return ImageContainerQuick(resampled_image, target_area_def,
                                    fill_value=self.fill_value,
                                    nprocs=self.nprocs, segments=self.segments)
-    
+
 
 class ImageContainerNearest(ImageContainer):
+
     """Holds image with geometry definition. 
     Allows nearest neighbour resampling to new geometry definition.
-    
+
     :Parameters:
     image_data : numpy array 
         Image data
@@ -212,7 +217,7 @@ class ImageContainerNearest(ImageContainer):
     segments : {int, None}
         Number of segments to use when resampling.
         If set to None an estimate will be calculated
-    
+
     :Attributes:
     image_data : numpy array 
         Image data
@@ -232,50 +237,50 @@ class ImageContainerNearest(ImageContainer):
         Number of segments to use when resampling   
     """
 
-    def __init__(self, image_data, geo_def, radius_of_influence, epsilon=0, 
+    def __init__(self, image_data, geo_def, radius_of_influence, epsilon=0,
                  fill_value=0, reduce_data=True, nprocs=1, segments=None):
-        super(ImageContainerNearest, self).__init__(image_data, geo_def, 
-                                                    fill_value=fill_value, 
+        super(ImageContainerNearest, self).__init__(image_data, geo_def,
+                                                    fill_value=fill_value,
                                                     nprocs=nprocs)
         self.radius_of_influence = radius_of_influence
         self.epsilon = epsilon
         self.reduce_data = reduce_data
         self.segments = segments
-        
+
     def resample(self, target_geo_def):
         """Resamples image to area definition using nearest neighbour 
         approach
-        
+
         :Parameters:
         target_geo_def : object 
             Target geometry definition         
-          
+
         :Returns: 
         image_container : object
             ImageContainerNearest object of resampled geometry   
         """
-        
+
         if self.image_data.ndim > 2 and self.ndim > 1:
-            image_data = self.image_data.reshape(self.image_data.shape[0] * 
-                                                 self.image_data.shape[1], 
+            image_data = self.image_data.reshape(self.image_data.shape[0] *
+                                                 self.image_data.shape[1],
                                                  self.image_data.shape[2])
         else:
             image_data = self.image_data.ravel()
-                   
+
         resampled_image = \
-                kd_tree.resample_nearest(self.geo_def, 
-                                         image_data, 
-                                         target_geo_def,
-                                         self.radius_of_influence, 
-                                         epsilon=self.epsilon,
-                                         fill_value=self.fill_value, 
-                                         nprocs=self.nprocs,
-                                         reduce_data=self.reduce_data,
-                                         segments=self.segments)
-        return ImageContainerNearest(resampled_image, target_geo_def, 
-                                     self.radius_of_influence, 
+            kd_tree.resample_nearest(self.geo_def,
+                                     image_data,
+                                     target_geo_def,
+                                     self.radius_of_influence,
+                                     epsilon=self.epsilon,
+                                     fill_value=self.fill_value,
+                                     nprocs=self.nprocs,
+                                     reduce_data=self.reduce_data,
+                                     segments=self.segments)
+        return ImageContainerNearest(resampled_image, target_geo_def,
+                                     self.radius_of_influence,
                                      epsilon=self.epsilon,
-                                     fill_value=self.fill_value, 
-                                     reduce_data=self.reduce_data, 
+                                     fill_value=self.fill_value,
+                                     reduce_data=self.reduce_data,
                                      nprocs=self.nprocs,
-                                     segments=self.segments)
\ No newline at end of file
+                                     segments=self.segments)
diff --git a/pyresample/kd_tree.py b/pyresample/kd_tree.py
index 4302a52..ded8587 100644
--- a/pyresample/kd_tree.py
+++ b/pyresample/kd_tree.py
@@ -1,30 +1,40 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
+# Copyright (C) 2010, 2014, 2015  Esben S. Nielsen
+#                           Adam.Dybbroe
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or
 #(at your option) any later version.
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Handles reprojection of geolocated data. Several types of resampling are
+supported"""
 
-"""Handles reprojection of geolocated data. Several types of resampling are supported"""
+from __future__ import absolute_import
 
 import types
 import warnings
+import sys
 
 import numpy as np
 
-import geometry
-import data_reduce
-import _spatial_mp
+from . import geometry
+from . import data_reduce
+from . import _spatial_mp
+
+if sys.version < '3':
+    range = xrange
+else:
+    long = int
 
 kd_tree_name = None
 try:
@@ -33,19 +43,22 @@ try:
 except ImportError:
     try:
         import scipy.spatial as sp
-        kd_tree_name = 'scipy.spatial'        
+        kd_tree_name = 'scipy.spatial'
     except ImportError:
         raise ImportError('Either pykdtree or scipy must be available')
-        
+
+
 class EmptyResult(Exception):
     pass
 
+
 def which_kdtree():
     """Returns the name of the kdtree used for resampling
     """
-    
+
     return kd_tree_name
 
+
 def resample_nearest(source_geo_def, data, target_geo_def,
                      radius_of_influence, epsilon=0,
                      fill_value=0, reduce_data=True, nprocs=1, segments=None):
@@ -76,17 +89,18 @@ def resample_nearest(source_geo_def, data, target_geo_def,
     segments : {int, None}
         Number of segments to use when resampling.
         If set to None an estimate will be calculated
-               
+
     :Returns: 
     data : numpy array 
         Source data resampled to target geometry
     """
-    
+
     return _resample(source_geo_def, data, target_geo_def, 'nn',
                      radius_of_influence, neighbours=1,
                      epsilon=epsilon, fill_value=fill_value,
                      reduce_data=reduce_data, nprocs=nprocs, segments=segments)
 
+
 def resample_gauss(source_geo_def, data, target_geo_def,
                    radius_of_influence, sigmas, neighbours=8, epsilon=0,
                    fill_value=0, reduce_data=True, nprocs=1, segments=None, with_uncert=False):
@@ -125,7 +139,7 @@ def resample_gauss(source_geo_def, data, target_geo_def,
         If set to None an estimate will be calculated
     with_uncert : bool, optional
         Calculate uncertainty estimates
-    
+
     :Returns:
     data : numpy array (default)
         Source data resampled to target geometry
@@ -137,37 +151,37 @@ def resample_gauss(source_geo_def, data, target_geo_def,
     """
 
     def gauss(sigma):
-        #Return gauss function object
-        return lambda r: np.exp(-r**2 / float(sigma)**2)
-    
-    #Build correct sigma argument
+        # Return gauss function object
+        return lambda r: np.exp(-r ** 2 / float(sigma) ** 2)
+
+    # Build correct sigma argument
     is_multi_channel = False
     try:
         sigmas.__iter__()
         sigma_list = sigmas
         is_multi_channel = True
     except:
-        sigma_list = [sigmas] 
-        
-        
+        sigma_list = [sigmas]
+
     for sigma in sigma_list:
         if not isinstance(sigma, (long, int, float)):
-            raise TypeError('sigma must be number')    
-    
-    #Get gauss function objects
+            raise TypeError('sigma must be number')
+
+    # Get gauss function objects
     if is_multi_channel:
-        weight_funcs = map(gauss, sigma_list) 
+        weight_funcs = list(map(gauss, sigma_list))
     else:
         weight_funcs = gauss(sigmas)
-        
+
     return _resample(source_geo_def, data, target_geo_def, 'custom',
                      radius_of_influence, neighbours=neighbours,
                      epsilon=epsilon, weight_funcs=weight_funcs, fill_value=fill_value,
                      reduce_data=reduce_data, nprocs=nprocs, segments=segments, with_uncert=with_uncert)
 
+
 def resample_custom(source_geo_def, data, target_geo_def,
                     radius_of_influence, weight_funcs, neighbours=8,
-                    epsilon=0, fill_value=0, reduce_data=True, nprocs=1, 
+                    epsilon=0, fill_value=0, reduce_data=True, nprocs=1,
                     segments=None, with_uncert=False):
     """Resamples data using kd-tree custom radial weighting neighbour approach
 
@@ -203,7 +217,7 @@ def resample_custom(source_geo_def, data, target_geo_def,
     segments : {int, None}
         Number of segments to use when resampling.
         If set to None an estimate will be calculated
-    
+
     :Returns:
     data : numpy array (default)
         Source data resampled to target geometry
@@ -217,47 +231,49 @@ def resample_custom(source_geo_def, data, target_geo_def,
     try:
         for weight_func in weight_funcs:
             if not isinstance(weight_func, types.FunctionType):
-                raise TypeError('weight_func must be function object')        
+                raise TypeError('weight_func must be function object')
     except:
         if not isinstance(weight_funcs, types.FunctionType):
             raise TypeError('weight_func must be function object')
-    
+
     return _resample(source_geo_def, data, target_geo_def, 'custom',
                      radius_of_influence, neighbours=neighbours,
                      epsilon=epsilon, weight_funcs=weight_funcs,
                      fill_value=fill_value, reduce_data=reduce_data,
                      nprocs=nprocs, segments=segments, with_uncert=with_uncert)
 
+
 def _resample(source_geo_def, data, target_geo_def, resample_type,
-             radius_of_influence, neighbours=8, epsilon=0, weight_funcs=None,
-             fill_value=0, reduce_data=True, nprocs=1, segments=None, with_uncert=False):
-    """Resamples swath using kd-tree approach"""    
-                
+              radius_of_influence, neighbours=8, epsilon=0, weight_funcs=None,
+              fill_value=0, reduce_data=True, nprocs=1, segments=None, with_uncert=False):
+    """Resamples swath using kd-tree approach"""
+
     valid_input_index, valid_output_index, index_array, distance_array = \
-                                 get_neighbour_info(source_geo_def, 
-                                                    target_geo_def, 
-                                                    radius_of_influence, 
-                                                    neighbours=neighbours, 
-                                                    epsilon=epsilon, 
-                                                    reduce_data=reduce_data, 
-                                                    nprocs=nprocs,
-                                                    segments=segments)
-    
-    
-    return get_sample_from_neighbour_info(resample_type, 
-                                         target_geo_def.shape, 
-                                         data, valid_input_index, 
-                                         valid_output_index, 
-                                         index_array, 
-                                         distance_array=distance_array, 
-                                         weight_funcs=weight_funcs, 
-                                         fill_value=fill_value, 
-                                         with_uncert=with_uncert)
-    
-def get_neighbour_info(source_geo_def, target_geo_def, radius_of_influence, 
-                       neighbours=8, epsilon=0, reduce_data=True, nprocs=1, segments=None):
+        get_neighbour_info(source_geo_def,
+                           target_geo_def,
+                           radius_of_influence,
+                           neighbours=neighbours,
+                           epsilon=epsilon,
+                           reduce_data=reduce_data,
+                           nprocs=nprocs,
+                           segments=segments)
+
+    return get_sample_from_neighbour_info(resample_type,
+                                          target_geo_def.shape,
+                                          data, valid_input_index,
+                                          valid_output_index,
+                                          index_array,
+                                          distance_array=distance_array,
+                                          weight_funcs=weight_funcs,
+                                          fill_value=fill_value,
+                                          with_uncert=with_uncert)
+
+
+def get_neighbour_info(source_geo_def, target_geo_def, radius_of_influence,
+                       neighbours=8, epsilon=0, reduce_data=True,
+                       nprocs=1, segments=None):
     """Returns neighbour info
-    
+
     :Parameters:
     source_geo_def : object
         Geometry definition of source
@@ -282,7 +298,7 @@ def get_neighbour_info(source_geo_def, target_geo_def, radius_of_influence,
     segments : {int, None}
         Number of segments to use when resampling.
         If set to None an estimate will be calculated
-            
+
     :Returns:
     (valid_input_index, valid_output_index, 
     index_array, distance_array) : tuple of numpy arrays
@@ -290,7 +306,7 @@ def get_neighbour_info(source_geo_def, target_geo_def, radius_of_influence,
     """
 
     if source_geo_def.size < neighbours:
-        warnings.warn('Searching for %s neighbours in %s data points' % 
+        warnings.warn('Searching for %s neighbours in %s data points' %
                       (neighbours, source_geo_def.size))
 
     if segments is None:
@@ -299,155 +315,157 @@ def get_neighbour_info(source_geo_def, target_geo_def, radius_of_influence,
             segments = int(target_geo_def.size / cut_off)
         else:
             segments = 1
-    
-    #Find reduced input coordinate set
-    valid_input_index, source_lons, source_lats = _get_valid_input_index(source_geo_def, target_geo_def, 
-                                               reduce_data, 
-                                               radius_of_influence, 
-                                               nprocs=nprocs)    
-    
-    #Create kd-tree
+
+    # Find reduced input coordinate set
+    valid_input_index, source_lons, source_lats = _get_valid_input_index(source_geo_def, target_geo_def,
+                                                                         reduce_data,
+                                                                         radius_of_influence,
+                                                                         nprocs=nprocs)
+
+    # Create kd-tree
     try:
-        resample_kdtree = _create_resample_kdtree(source_lons, source_lats, 
-                                                  valid_input_index, 
+        resample_kdtree = _create_resample_kdtree(source_lons, source_lats,
+                                                  valid_input_index,
                                                   nprocs=nprocs)
     except EmptyResult:
-        #Handle if all input data is reduced away
-         valid_output_index, index_array, distance_array = \
-             _create_empty_info(source_geo_def, target_geo_def, neighbours)
-         return (valid_input_index, valid_output_index, index_array, 
-                 distance_array)
-     
+        # Handle if all input data is reduced away
+        valid_output_index, index_array, distance_array = \
+            _create_empty_info(source_geo_def, target_geo_def, neighbours)
+        return (valid_input_index, valid_output_index, index_array,
+                distance_array)
+
     if segments > 1:
-        #Iterate through segments     
-        for i, target_slice in enumerate(geometry._get_slice(segments, 
-                                                   target_geo_def.shape)):
+        # Iterate through segments
+        for i, target_slice in enumerate(geometry._get_slice(segments,
+                                                             target_geo_def.shape)):
 
-            #Query on slice of target coordinates
+            # Query on slice of target coordinates
             next_voi, next_ia, next_da = \
-                    _query_resample_kdtree(resample_kdtree, source_geo_def, 
-                                           target_geo_def, 
-                                           radius_of_influence, target_slice,
-                                           neighbours=neighbours, 
-                                           epsilon=epsilon, 
-                                           reduce_data=reduce_data, 
-                                           nprocs=nprocs)
-
-            #Build result iteratively
+                _query_resample_kdtree(resample_kdtree, source_geo_def,
+                                       target_geo_def,
+                                       radius_of_influence, target_slice,
+                                       neighbours=neighbours,
+                                       epsilon=epsilon,
+                                       reduce_data=reduce_data,
+                                       nprocs=nprocs)
+
+            # Build result iteratively
             if i == 0:
-                #First iteration
+                # First iteration
                 valid_output_index = next_voi
                 index_array = next_ia
                 distance_array = next_da
-            else:    
+            else:
                 valid_output_index = np.append(valid_output_index, next_voi)
                 if neighbours > 1:
                     index_array = np.row_stack((index_array, next_ia))
                     distance_array = np.row_stack((distance_array, next_da))
                 else:
                     index_array = np.append(index_array, next_ia)
-                    distance_array = np.append(distance_array, next_da)        
+                    distance_array = np.append(distance_array, next_da)
     else:
-        #Query kd-tree with full target coordinate set        
+        # Query kd-tree with full target coordinate set
         full_slice = slice(None)
         valid_output_index, index_array, distance_array = \
-                    _query_resample_kdtree(resample_kdtree, source_geo_def, 
-                                           target_geo_def, 
-                                           radius_of_influence, full_slice,
-                                           neighbours=neighbours, 
-                                           epsilon=epsilon, 
-                                           reduce_data=reduce_data, 
-                                           nprocs=nprocs)
-    
+            _query_resample_kdtree(resample_kdtree, source_geo_def,
+                                   target_geo_def,
+                                   radius_of_influence, full_slice,
+                                   neighbours=neighbours,
+                                   epsilon=epsilon,
+                                   reduce_data=reduce_data,
+                                   nprocs=nprocs)
+
     # Check if number of neighbours is potentially too low
     if neighbours > 1:
         if not np.all(np.isinf(distance_array[:, -1])):
             warnings.warn(('Possible more than %s neighbours '
-                           'within %s m for some data points') % 
+                           'within %s m for some data points') %
                           (neighbours, radius_of_influence))
-         
-    return valid_input_index, valid_output_index, index_array, distance_array           
 
-def _get_valid_input_index(source_geo_def, target_geo_def, reduce_data, 
+    return valid_input_index, valid_output_index, index_array, distance_array
+
+
+def _get_valid_input_index(source_geo_def, target_geo_def, reduce_data,
                            radius_of_influence, nprocs=1):
     """Find indices of reduced inputput data"""
-    
+
     source_lons, source_lats = source_geo_def.get_lonlats(nprocs=nprocs)
     source_lons = source_lons.ravel()
     source_lats = source_lats.ravel()
-    
+
     if source_lons.size == 0 or source_lats.size == 0:
         raise ValueError('Cannot resample empty data set')
     elif source_lons.size != source_lats.size or \
             source_lons.shape != source_lats.shape:
         raise ValueError('Mismatch between lons and lats')
-    
-    #Remove illegal values
-    valid_data = ((source_lons >= -180) & (source_lons <= 180) & 
+
+    # Remove illegal values
+    valid_data = ((source_lons >= -180) & (source_lons <= 180) &
                   (source_lats <= 90) & (source_lats >= -90))
     valid_input_index = np.ones(source_geo_def.size, dtype=np.bool)
-    
+
     if reduce_data:
-        #Reduce dataset 
-        if (isinstance(source_geo_def, geometry.CoordinateDefinition) and 
-            isinstance(target_geo_def, (geometry.GridDefinition, 
-                                       geometry.AreaDefinition))) or \
-           (isinstance(source_geo_def, (geometry.GridDefinition, 
+        # Reduce dataset
+        if (isinstance(source_geo_def, geometry.CoordinateDefinition) and
+            isinstance(target_geo_def, (geometry.GridDefinition,
+                                        geometry.AreaDefinition))) or \
+           (isinstance(source_geo_def, (geometry.GridDefinition,
                                         geometry.AreaDefinition)) and
-            isinstance(target_geo_def, (geometry.GridDefinition, 
+            isinstance(target_geo_def, (geometry.GridDefinition,
                                         geometry.AreaDefinition))):
-            #Resampling from swath to grid or from grid to grid
+            # Resampling from swath to grid or from grid to grid
             lonlat_boundary = target_geo_def.get_boundary_lonlats()
             valid_input_index = \
                 data_reduce.get_valid_index_from_lonlat_boundaries(
-                                            lonlat_boundary[0],
-                                            lonlat_boundary[1], 
-                                            source_lons, source_lats, 
-                                            radius_of_influence)
-    
-    #Combine reduced and legal values
+                    lonlat_boundary[0],
+                    lonlat_boundary[1],
+                    source_lons, source_lats,
+                    radius_of_influence)
+
+    # Combine reduced and legal values
     valid_input_index = (valid_data & valid_input_index)
-    
-    
+
     if(isinstance(valid_input_index, np.ma.core.MaskedArray)):
-        #Make sure valid_input_index is not a masked array
+        # Make sure valid_input_index is not a masked array
         valid_input_index = valid_input_index.filled(False)
-    
+
     return valid_input_index, source_lons, source_lats
 
-def _get_valid_output_index(source_geo_def, target_geo_def, target_lons, 
+
+def _get_valid_output_index(source_geo_def, target_geo_def, target_lons,
                             target_lats, reduce_data, radius_of_influence):
     """Find indices of reduced output data"""
-    
+
     valid_output_index = np.ones(target_lons.size, dtype=np.bool)
-    
+
     if reduce_data:
-        if isinstance(source_geo_def, (geometry.GridDefinition, 
-                                         geometry.AreaDefinition)) and \
-             isinstance(target_geo_def, geometry.CoordinateDefinition):
-            #Resampling from grid to swath
+        if isinstance(source_geo_def, (geometry.GridDefinition,
+                                       geometry.AreaDefinition)) and \
+                isinstance(target_geo_def, geometry.CoordinateDefinition):
+            # Resampling from grid to swath
             lonlat_boundary = source_geo_def.get_boundary_lonlats()
             valid_output_index = \
                 data_reduce.get_valid_index_from_lonlat_boundaries(
-                                            lonlat_boundary[0],
-                                            lonlat_boundary[1], 
-                                            target_lons, 
-                                            target_lats, 
-                                            radius_of_influence)
+                    lonlat_boundary[0],
+                    lonlat_boundary[1],
+                    target_lons,
+                    target_lats,
+                    radius_of_influence)
             valid_output_index = valid_output_index.astype(np.bool)
-            
-    #Remove illegal values
-    valid_out = ((target_lons >= -180) & (target_lons <= 180) & 
-                  (target_lats <= 90) & (target_lats >= -90))
-    
-    #Combine reduced and legal values
+
+    # Remove illegal values
+    valid_out = ((target_lons >= -180) & (target_lons <= 180) &
+                 (target_lats <= 90) & (target_lats >= -90))
+
+    # Combine reduced and legal values
     valid_output_index = (valid_output_index & valid_out)
-    
+
     return valid_output_index
-        
+
+
 def _create_resample_kdtree(source_lons, source_lats, valid_input_index, nprocs=1):
     """Set up kd tree on input"""
-    
+
     """
     if not isinstance(source_geo_def, geometry.BaseDefinition):
         raise TypeError('source_geo_def must be of geometry type')
@@ -457,100 +475,104 @@ def _create_resample_kdtree(source_lons, source_lats, valid_input_index, nprocs=
     input_coords = geometry._flatten_cartesian_coords(source_cartesian_coords)
     input_coords = input_coords[valid_input_index]
     """
-    
+
     source_lons_valid = source_lons[valid_input_index]
     source_lats_valid = source_lats[valid_input_index]
-    
+
     if nprocs > 1:
         cartesian = _spatial_mp.Cartesian_MP(nprocs)
     else:
         cartesian = _spatial_mp.Cartesian()
 
-    input_coords = cartesian.transform_lonlats(source_lons_valid, source_lats_valid)
-    
+    input_coords = cartesian.transform_lonlats(
+        source_lons_valid, source_lats_valid)
+
     if input_coords.size == 0:
         raise EmptyResult('No valid data points in input data')
 
-    #Build kd-tree on input
+    # Build kd-tree on input
     if kd_tree_name == 'pykdtree':
         resample_kdtree = KDTree(input_coords)
-    elif nprocs > 1:        
+    elif nprocs > 1:
         resample_kdtree = _spatial_mp.cKDTree_MP(input_coords,
                                                  nprocs=nprocs)
     else:
         resample_kdtree = sp.cKDTree(input_coords)
-        
+
     return resample_kdtree
 
-def _query_resample_kdtree(resample_kdtree, source_geo_def, target_geo_def, 
-                        radius_of_influence, data_slice,
-                       neighbours=8, epsilon=0, reduce_data=True, nprocs=1):    
+
+def _query_resample_kdtree(resample_kdtree, source_geo_def, target_geo_def,
+                           radius_of_influence, data_slice,
+                           neighbours=8, epsilon=0, reduce_data=True, nprocs=1):
     """Query kd-tree on slice of target coordinates"""
 
-    #Check validity of input    
+    # Check validity of input
     if not isinstance(target_geo_def, geometry.BaseDefinition):
-        raise TypeError('target_geo_def must be of geometry type')    
+        raise TypeError('target_geo_def must be of geometry type')
     elif not isinstance(radius_of_influence, (long, int, float)):
         raise TypeError('radius_of_influence must be number')
     elif not isinstance(neighbours, int):
         raise TypeError('neighbours must be integer')
     elif not isinstance(epsilon, (long, int, float)):
         raise TypeError('epsilon must be number')
-    
-    #Get sliced target coordinates
-    target_lons, target_lats = target_geo_def.get_lonlats(nprocs=nprocs, 
-                                                           data_slice=data_slice, dtype=source_geo_def.dtype)
-    
-    #Find indiced of reduced target coordinates
-    valid_output_index = _get_valid_output_index(source_geo_def, 
-                                                 target_geo_def, 
-                                                 target_lons.ravel(), 
-                                                 target_lats.ravel(), 
-                                                 reduce_data, 
+
+    # Get sliced target coordinates
+    target_lons, target_lats = target_geo_def.get_lonlats(nprocs=nprocs,
+                                                          data_slice=data_slice, dtype=source_geo_def.dtype)
+
+    # Find indiced of reduced target coordinates
+    valid_output_index = _get_valid_output_index(source_geo_def,
+                                                 target_geo_def,
+                                                 target_lons.ravel(),
+                                                 target_lats.ravel(),
+                                                 reduce_data,
                                                  radius_of_influence)
 
-    #Get cartesian target coordinates and select reduced set
+    # Get cartesian target coordinates and select reduced set
     if nprocs > 1:
         cartesian = _spatial_mp.Cartesian_MP(nprocs)
     else:
         cartesian = _spatial_mp.Cartesian()
-        
-    target_lons_valid = target_lons.ravel()[valid_output_index] 
+
+    target_lons_valid = target_lons.ravel()[valid_output_index]
     target_lats_valid = target_lats.ravel()[valid_output_index]
-    
-    output_coords = cartesian.transform_lonlats(target_lons_valid, target_lats_valid) 
-    
-    #Query kd-tree        
-    distance_array, index_array = resample_kdtree.query(output_coords, 
+
+    output_coords = cartesian.transform_lonlats(
+        target_lons_valid, target_lats_valid)
+
+    # Query kd-tree
+    distance_array, index_array = resample_kdtree.query(output_coords,
                                                         k=neighbours,
                                                         eps=epsilon,
-                                                        distance_upper_bound=
-                                                        radius_of_influence)
-       
+                                                        distance_upper_bound=radius_of_influence)
+
     return valid_output_index, index_array, distance_array
 
+
 def _create_empty_info(source_geo_def, target_geo_def, neighbours):
     """Creates dummy info for empty result set"""
-    
+
     valid_output_index = np.ones(target_geo_def.size, dtype=np.bool)
     if neighbours > 1:
-        index_array = (np.ones((target_geo_def.size, neighbours), 
+        index_array = (np.ones((target_geo_def.size, neighbours),
                                dtype=np.int32) * source_geo_def.size)
         distance_array = np.ones((target_geo_def.size, neighbours))
     else:
-        index_array = (np.ones(target_geo_def.size, dtype=np.int32) * 
+        index_array = (np.ones(target_geo_def.size, dtype=np.int32) *
                        source_geo_def.size)
         distance_array = np.ones(target_geo_def.size)
-        
-    return valid_output_index, index_array, distance_array 
-    
-def get_sample_from_neighbour_info(resample_type, output_shape, data, 
-                                   valid_input_index, valid_output_index, 
-                                   index_array, distance_array=None, 
-                                   weight_funcs=None, fill_value=0, 
+
+    return valid_output_index, index_array, distance_array
+
+
+def get_sample_from_neighbour_info(resample_type, output_shape, data,
+                                   valid_input_index, valid_output_index,
+                                   index_array, distance_array=None,
+                                   weight_funcs=None, fill_value=0,
                                    with_uncert=False):
     """Resamples swath based on neighbour info
-    
+
     :Parameters:
     resample_type : {'nn', 'custom'}
         'nn': Use nearest neighbour resampling
@@ -578,7 +600,7 @@ def get_sample_from_neighbour_info(resample_type, output_shape, data,
         Set undetermined pixels to this value.
         If fill_value is None a masked array is returned 
         with undetermined pixels masked
-        
+
     :Returns: 
     result : numpy array 
         Source data resampled to target geometry
@@ -588,36 +610,36 @@ def get_sample_from_neighbour_info(resample_type, output_shape, data,
         data = data.reshape(data.shape[0] * data.shape[1], data.shape[2])
     elif data.shape[0] != valid_input_index.size:
         data = data.ravel()
-    
+
     if valid_input_index.size != data.shape[0]:
         raise ValueError('Mismatch between geometry and dataset')
-    
+
     is_multi_channel = (data.ndim > 1)
     valid_input_size = valid_input_index.sum()
     valid_output_size = valid_output_index.sum()
-    
-    #Handle empty result set
-    if valid_input_size == 0 or valid_output_size == 0: 
+
+    # Handle empty result set
+    if valid_input_size == 0 or valid_output_size == 0:
         if is_multi_channel:
             output_shape = list(output_shape)
             output_shape.append(data.shape[1])
-            
+
         if fill_value is None:
-            #Use masked array for fill values
-            return np.ma.array(np.zeros(output_shape, data.dtype), 
+            # Use masked array for fill values
+            return np.ma.array(np.zeros(output_shape, data.dtype),
                                mask=np.ones(output_shape, dtype=np.bool))
         else:
-            #Return fill vaues for all pixels
-            return np.ones(output_shape, dtype=data.dtype) * fill_value  
+            # Return fill vaues for all pixels
+            return np.ones(output_shape, dtype=data.dtype) * fill_value
 
-    #Get size of output and reduced input
+    # Get size of output and reduced input
     input_size = valid_input_size
     if len(output_shape) > 1:
         output_size = output_shape[0] * output_shape[1]
     else:
         output_size = output_shape[0]
-        
-    #Check validity of input
+
+    # Check validity of input
     if not isinstance(data, np.ndarray):
         raise TypeError('data must be numpy array')
     elif valid_input_index.ndim != 1:
@@ -625,64 +647,63 @@ def get_sample_from_neighbour_info(resample_type, output_shape, data,
     elif data.shape[0] != valid_input_index.size:
         raise TypeError('Not the same number of datapoints in '
                         'valid_input_index and data')
-    
+
     valid_types = ('nn', 'custom')
     if not resample_type in valid_types:
         raise TypeError('Invalid resampling type: %s' % resample_type)
-    
+
     if resample_type == 'custom' and weight_funcs is None:
         raise ValueError('weight_funcs must be supplied when using '
-                          'custom resampling')
-    
+                         'custom resampling')
+
     if not isinstance(fill_value, (long, int, float)) and fill_value is not None:
         raise TypeError('fill_value must be number or None')
-    
+
     if index_array.ndim == 1:
         neighbours = 1
     else:
         neighbours = index_array.shape[1]
         if resample_type == 'nn':
-            raise ValueError('index_array contains more neighbours than ' 
+            raise ValueError('index_array contains more neighbours than '
                              'just the nearest')
-    
-    #Reduce data    
-    new_data = data[valid_input_index]    
-    
-    #Nearest neighbour resampling should conserve data type
-    #Get data type
+
+    # Reduce data
+    new_data = data[valid_input_index]
+
+    # Nearest neighbour resampling should conserve data type
+    # Get data type
     conserve_input_data_type = False
     if resample_type == 'nn':
         conserve_input_data_type = True
         input_data_type = new_data.dtype
-    
-    #Handle masked array input
+
+    # Handle masked array input
     is_masked_data = False
     if np.ma.is_masked(new_data):
-        #Add the mask as channels to the dataset
+        # Add the mask as channels to the dataset
         is_masked_data = True
         new_data = np.column_stack((new_data.data, new_data.mask))
 
-    if new_data.ndim > 1: # Multiple channels or masked input
+    if new_data.ndim > 1:  # Multiple channels or masked input
         output_shape = list(output_shape)
         output_shape.append(new_data.shape[1])
 
-    
-    #Prepare weight_funcs argument for handeling mask data
+    # Prepare weight_funcs argument for handeling mask data
     if weight_funcs is not None and is_masked_data:
         if is_multi_channel:
             weight_funcs = weight_funcs * 2
         else:
             weight_funcs = (weight_funcs,) * 2
-    
-    #Handle request for masking intead of using fill values        
+
+    # Handle request for masking intead of using fill values
     use_masked_fill_value = False
     if fill_value is None:
         use_masked_fill_value = True
         fill_value = _get_fill_mask_value(new_data.dtype)
-    
-    #Resample based on kd-tree query result
+
+    # Resample based on kd-tree query result
     if resample_type == 'nn' or neighbours == 1:
-        #Get nearest neighbour using array indexing
+        # Get nearest neighbour using array indexing
         index_mask = (index_array == input_size)
         new_index_array = np.where(index_mask, 0, index_array)
         result = new_data[new_index_array]
@@ -691,12 +712,12 @@ def get_sample_from_neighbour_info(resample_type, output_shape, data,
         # Calculate result using weighting.
         # Note: the code below has low readability in order
         #       to avoid looping over numpy arrays
-                
-        #Get neighbours and masks of valid indices
+
+        # Get neighbours and masks of valid indices
         ch_neighbour_list = []
         index_mask_list = []
-        for i in range(neighbours): # Iterate over number of neighbours
-            # Make working copy neighbour index and 
+        for i in range(neighbours):  # Iterate over number of neighbours
+            # Make working copy neighbour index and
             # set out of bounds indices to zero
             index_ni = index_array[:, i].copy()
             index_mask_ni = (index_ni == input_size)
@@ -704,65 +725,67 @@ def get_sample_from_neighbour_info(resample_type, output_shape, data,
 
             # Get channel data for the corresponing indices
             ch_ni = new_data[index_ni]
-            ch_neighbour_list.append(ch_ni) 
+            ch_neighbour_list.append(ch_ni)
             index_mask_list.append(index_mask_ni)
-        
-        #Calculate weights 
+
+        # Calculate weights
         weight_list = []
-        for i in range(neighbours): # Iterate over number of neighbours
+        for i in range(neighbours):  # Iterate over number of neighbours
             # Make working copy of neighbour distances and
             # set out of bounds distance to 1 in order to avoid numerical Inf
             distance = distance_array[:, i].copy()
             distance[index_mask_list[i]] = 1
-            
-            if new_data.ndim > 1: # More than one channel in data set.
+
+            if new_data.ndim > 1:  # More than one channel in data set.
                 # Calculate weights for each channel
                 weights = []
                 num_weights = valid_output_index.sum()
                 num_channels = new_data.shape[1]
-                for j in range(num_channels):                    
+                for j in range(num_channels):
                     calc_weight = weight_funcs[j](distance)
-                    # Turn a scalar weight into a numpy array 
+                    # Turn a scalar weight into a numpy array
                     # (no effect if calc_weight already is an array)
                     expanded_calc_weight = np.ones(num_weights) * calc_weight
                     weights.append(expanded_calc_weight)
 
-                # Collect weights for all channels for neighbour number    
+                # Collect weights for all channels for neighbour number
                 weight_list.append(np.column_stack(weights))
-            else: # Only one channel
+            else:  # Only one channel
                 weights = weight_funcs(distance)
                 weight_list.append(weights)
-                        
+
         result = 0
         norm = 0
         count = 0
         norm_sqr = 0
         stddev = 0
 
-        # Calculate result       
-        for i in range(neighbours): # Iterate over number of neighbours   
+        # Calculate result
+        for i in range(neighbours):  # Iterate over number of neighbours
             # Find invalid indices to be masked of from calculation
-            if new_data.ndim > 1: # More than one channel in data set.
-                inv_index_mask = np.expand_dims(np.invert(index_mask_list[i]), axis=1)
-            else: # Only one channel
+            if new_data.ndim > 1:  # More than one channel in data set.
+                inv_index_mask = np.expand_dims(
+                    np.invert(index_mask_list[i]), axis=1)
+            else:  # Only one channel
                 inv_index_mask = np.invert(index_mask_list[i])
-            
-            #Aggregate result and norm
+
+            # Aggregate result and norm
             weights_tmp = inv_index_mask * weight_list[i]
             result += weights_tmp * ch_neighbour_list[i]
             norm += weights_tmp
 
-        #Normalize result and set fillvalue
+        # Normalize result and set fillvalue
         result_valid_index = (norm > 0)
         result[result_valid_index] /= norm[result_valid_index]
 
         if with_uncert:  # Calculate uncertainties
             # 2. pass to calculate standard deviation
-            for i in range(neighbours): # Iterate over number of neighbours   
+            for i in range(neighbours):  # Iterate over number of neighbours
                 # Find invalid indices to be masked of from calculation
-                if new_data.ndim > 1: # More than one channel in data set.
-                    inv_index_mask = np.expand_dims(np.invert(index_mask_list[i]), axis=1)
-                else: # Only one channel
+                if new_data.ndim > 1:  # More than one channel in data set.
+                    inv_index_mask = np.expand_dims(
+                        np.invert(index_mask_list[i]), axis=1)
+                else:  # Only one channel
                     inv_index_mask = np.invert(index_mask_list[i])
 
                 # Aggregate stddev information
@@ -776,20 +799,21 @@ def get_sample_from_neighbour_info(resample_type, output_shape, data,
             new_valid_index = (count > 1)
             v1 = norm[new_valid_index]
             v2 = norm_sqr[new_valid_index]
-            stddev[new_valid_index] = np.sqrt((v1 / (v1 ** 2 - v2)) * stddev[new_valid_index])
+            stddev[new_valid_index] = np.sqrt(
+                (v1 / (v1 ** 2 - v2)) * stddev[new_valid_index])
             stddev[~new_valid_index] = np.NaN
 
-        #Add fill values
-        result[np.invert(result_valid_index)] = fill_value 
+        # Add fill values
+        result[np.invert(result_valid_index)] = fill_value
 
     # Create full result
-    if new_data.ndim > 1: # More than one channel
+    if new_data.ndim > 1:  # More than one channel
         output_raw_shape = ((output_size, new_data.shape[1]))
-    else: # One channel
+    else:  # One channel
         output_raw_shape = output_size
 
     full_result = np.ones(output_raw_shape) * fill_value
-    full_result[valid_output_index] = result 
+    full_result[valid_output_index] = result
     result = full_result
 
     if with_uncert:  # Add fill values for uncertainty
@@ -799,29 +823,29 @@ def get_sample_from_neighbour_info(resample_type, output_shape, data,
         full_count[valid_output_index] = count
         stddev = full_stddev
         count = full_count
-        
+
         stddev = stddev.reshape(output_shape)
         count = count.reshape(output_shape)
 
-        if is_masked_data: # Ignore uncert computation of masks
+        if is_masked_data:  # Ignore uncert computation of masks
             stddev = _remask_data(stddev, is_to_be_masked=False)
             count = _remask_data(count, is_to_be_masked=False)
 
         # Set masks for invalid stddev
         stddev = np.ma.array(stddev, mask=np.isnan(stddev))
 
-    #Reshape resampled data to correct shape
+    # Reshape resampled data to correct shape
     result = result.reshape(output_shape)
 
-    #Remap mask channels to create masked output
+    # Remap mask channels to create masked output
     if is_masked_data:
         result = _remask_data(result)
-        
-    #Create masking of fill values
+
+    # Create masking of fill values
     if use_masked_fill_value:
         result = np.ma.masked_equal(result, fill_value)
-        
-    #Set output data type to input data type if relevant
+
+    # Set output data type to input data type if relevant
     if conserve_input_data_type:
         result = result.astype(input_data_type)
 
@@ -833,9 +857,10 @@ def get_sample_from_neighbour_info(resample_type, output_shape, data,
     else:
         return result
 
+
 def _get_fill_mask_value(data_dtype):
     """Returns the maximum value of dtype"""
-    
+
     if issubclass(data_dtype.type, np.floating):
         fill_value = np.finfo(data_dtype.type).max
     elif issubclass(data_dtype.type, np.integer):
@@ -845,13 +870,14 @@ def _get_fill_mask_value(data_dtype):
                         data_dtype.type)
     return fill_value
 
+
 def _remask_data(data, is_to_be_masked=True):
     """Interprets half the array as mask for the other half"""
-    
+
     channels = data.shape[-1]
     if is_to_be_masked:
-        mask = data[..., (channels // 2):]            
-        #All pixels affected by masked pixels are masked out
+        mask = data[..., (channels // 2):]
+        # All pixels affected by masked pixels are masked out
         mask = (mask != 0)
         data = np.ma.array(data[..., :(channels // 2)], mask=mask)
     else:
@@ -860,4 +886,3 @@ def _remask_data(data, is_to_be_masked=True):
     if data.shape[-1] == 1:
         data = data.reshape(data.shape[:-1])
     return data
-
diff --git a/pyresample/plot.py b/pyresample/plot.py
index 86e1eed..72ca5d6 100644
--- a/pyresample/plot.py
+++ b/pyresample/plot.py
@@ -1,81 +1,104 @@
+# pyresample, Resampling of remote sensing image data in python
+#
+# Copyright (C) 2010-2015
+#
+# Authors:
+#    Esben S. Nielsen
+#    Thomas Lavergne
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import absolute_import
 import numpy as np
 
 
 def ellps2axis(ellps_name):
     """Get semi-major and semi-minor axis from ellipsis definition
-    
+
     :Parameters:
     ellps_name : str
         Standard name of ellipsis
-        
+
     :Returns:
     (a, b) : semi-major and semi-minor axis
     """
-    
-    ellps = {'helmert': {'a': 6378200.0, 'b': 6356818.1696278909}, 
-             'intl': {'a': 6378388.0, 'b': 6356911.9461279465}, 
-             'merit': {'a': 6378137.0, 'b': 6356752.2982159676}, 
-             'wgs72': {'a': 6378135.0, 'b': 6356750.5200160937}, 
-             'sphere': {'a': 6370997.0, 'b': 6370997.0}, 
-             'clrk66': {'a': 6378206.4000000004, 'b': 6356583.7999999998}, 
-             'nwl9d': {'a': 6378145.0, 'b': 6356759.7694886839}, 
-             'lerch': {'a': 6378139.0, 'b': 6356754.2915103417}, 
-             'evrstss': {'a': 6377298.5559999999, 'b': 6356097.5503008962}, 
-             'evrst30': {'a': 6377276.3449999997, 'b': 6356075.4131402401}, 
-             'mprts': {'a': 6397300.0, 'b': 6363806.2827225132}, 
-             'krass': {'a': 6378245.0, 'b': 6356863.0187730473}, 
-             'walbeck': {'a': 6376896.0, 'b': 6355834.8466999996}, 
-             'kaula': {'a': 6378163.0, 'b': 6356776.9920869097}, 
-             'wgs66': {'a': 6378145.0, 'b': 6356759.7694886839}, 
-             'evrst56': {'a': 6377301.2429999998, 'b': 6356100.2283681016}, 
-             'new_intl': {'a': 6378157.5, 'b': 6356772.2000000002}, 
-             'airy': {'a': 6377563.3959999997, 'b': 6356256.9100000001}, 
-             'bessel': {'a': 6377397.1550000003, 'b': 6356078.9628181886}, 
-             'seasia': {'a': 6378155.0, 'b': 6356773.3205000004}, 
-             'aust_sa': {'a': 6378160.0, 'b': 6356774.7191953054}, 
-             'wgs84': {'a': 6378137.0, 'b': 6356752.3142451793}, 
-             'hough': {'a': 6378270.0, 'b': 6356794.3434343431}, 
-             'wgs60': {'a': 6378165.0, 'b': 6356783.2869594367}, 
-             'engelis': {'a': 6378136.0499999998, 'b': 6356751.3227215428}, 
-             'apl4.9': {'a': 6378137.0, 'b': 6356751.796311819}, 
-             'andrae': {'a': 6377104.4299999997, 'b': 6355847.4152333336}, 
-             'sgs85': {'a': 6378136.0, 'b': 6356751.301568781}, 
-             'delmbr': {'a': 6376428.0, 'b': 6355957.9261637237}, 
-             'fschr60m': {'a': 6378155.0, 'b': 6356773.3204827355}, 
-             'iau76': {'a': 6378140.0, 'b': 6356755.2881575283}, 
-             'plessis': {'a': 6376523.0, 'b': 6355863.0}, 
-             'cpm': {'a': 6375738.7000000002, 'b': 6356666.221912113}, 
-             'fschr68': {'a': 6378150.0, 'b': 6356768.3372443849}, 
-             'mod_airy': {'a': 6377340.1890000002, 'b': 6356034.4460000005}, 
-             'grs80': {'a': 6378137.0, 'b': 6356752.3141403561}, 
-             'bess_nam': {'a': 6377483.8650000002, 'b': 6356165.3829663256}, 
-             'fschr60': {'a': 6378166.0, 'b': 6356784.2836071067}, 
-             'clrk80': {'a': 6378249.1449999996, 'b': 6356514.9658284895}, 
-             'evrst69': {'a': 6377295.6639999999, 'b': 6356094.6679152036}, 
-             'grs67': {'a': 6378160.0, 'b': 6356774.5160907144}, 
+
+    ellps = {'helmert': {'a': 6378200.0, 'b': 6356818.1696278909},
+             'intl': {'a': 6378388.0, 'b': 6356911.9461279465},
+             'merit': {'a': 6378137.0, 'b': 6356752.2982159676},
+             'wgs72': {'a': 6378135.0, 'b': 6356750.5200160937},
+             'sphere': {'a': 6370997.0, 'b': 6370997.0},
+             'clrk66': {'a': 6378206.4000000004, 'b': 6356583.7999999998},
+             'nwl9d': {'a': 6378145.0, 'b': 6356759.7694886839},
+             'lerch': {'a': 6378139.0, 'b': 6356754.2915103417},
+             'evrstss': {'a': 6377298.5559999999, 'b': 6356097.5503008962},
+             'evrst30': {'a': 6377276.3449999997, 'b': 6356075.4131402401},
+             'mprts': {'a': 6397300.0, 'b': 6363806.2827225132},
+             'krass': {'a': 6378245.0, 'b': 6356863.0187730473},
+             'walbeck': {'a': 6376896.0, 'b': 6355834.8466999996},
+             'kaula': {'a': 6378163.0, 'b': 6356776.9920869097},
+             'wgs66': {'a': 6378145.0, 'b': 6356759.7694886839},
+             'evrst56': {'a': 6377301.2429999998, 'b': 6356100.2283681016},
+             'new_intl': {'a': 6378157.5, 'b': 6356772.2000000002},
+             'airy': {'a': 6377563.3959999997, 'b': 6356256.9100000001},
+             'bessel': {'a': 6377397.1550000003, 'b': 6356078.9628181886},
+             'seasia': {'a': 6378155.0, 'b': 6356773.3205000004},
+             'aust_sa': {'a': 6378160.0, 'b': 6356774.7191953054},
+             'wgs84': {'a': 6378137.0, 'b': 6356752.3142451793},
+             'hough': {'a': 6378270.0, 'b': 6356794.3434343431},
+             'wgs60': {'a': 6378165.0, 'b': 6356783.2869594367},
+             'engelis': {'a': 6378136.0499999998, 'b': 6356751.3227215428},
+             'apl4.9': {'a': 6378137.0, 'b': 6356751.796311819},
+             'andrae': {'a': 6377104.4299999997, 'b': 6355847.4152333336},
+             'sgs85': {'a': 6378136.0, 'b': 6356751.301568781},
+             'delmbr': {'a': 6376428.0, 'b': 6355957.9261637237},
+             'fschr60m': {'a': 6378155.0, 'b': 6356773.3204827355},
+             'iau76': {'a': 6378140.0, 'b': 6356755.2881575283},
+             'plessis': {'a': 6376523.0, 'b': 6355863.0},
+             'cpm': {'a': 6375738.7000000002, 'b': 6356666.221912113},
+             'fschr68': {'a': 6378150.0, 'b': 6356768.3372443849},
+             'mod_airy': {'a': 6377340.1890000002, 'b': 6356034.4460000005},
+             'grs80': {'a': 6378137.0, 'b': 6356752.3141403561},
+             'bess_nam': {'a': 6377483.8650000002, 'b': 6356165.3829663256},
+             'fschr60': {'a': 6378166.0, 'b': 6356784.2836071067},
+             'clrk80': {'a': 6378249.1449999996, 'b': 6356514.9658284895},
+             'evrst69': {'a': 6377295.6639999999, 'b': 6356094.6679152036},
+             'grs67': {'a': 6378160.0, 'b': 6356774.5160907144},
              'evrst48': {'a': 6377304.0630000001, 'b': 6356103.0389931547}}
     try:
         ellps_axis = ellps[ellps_name.lower()]
         a = ellps_axis['a']
         b = ellps_axis['b']
-    except KeyError, e:
+    except KeyError as e:
         raise ValueError(('Could not determine semi-major and semi-minor axis '
-                         'of specified ellipsis %s') % ellps_name)
+                          'of specified ellipsis %s') % ellps_name)
     return a, b
 
+
 def area_def2basemap(area_def, **kwargs):
     """Get Basemap object from AreaDefinition
-    
+
     :Parameters:
     area_def : object
         geometry.AreaDefinition object
     **kwargs: Keyword arguments
         Additional initialization arguments for Basemap
-        
+
     :Returns:
     bmap : Basemap object
     """
-    
+
     from mpl_toolkits.basemap import Basemap
     try:
         a, b = ellps2axis(area_def.proj_dict['ellps'])
@@ -92,8 +115,8 @@ def area_def2basemap(area_def, **kwargs):
             # Default to WGS84 ellipsoid
             a, b = ellps2axis('wgs84')
             rsphere = (a, b)
-            
-    # Add projection specific basemap args to args passed to function    
+
+    # Add projection specific basemap args to args passed to function
     basemap_args = kwargs
     basemap_args['rsphere'] = rsphere
 
@@ -114,25 +137,26 @@ def area_def2basemap(area_def, **kwargs):
         basemap_args['projection'] = 'cyl'
     else:
         basemap_args['projection'] = area_def.proj_dict['proj']
-    
+
     # Try adding potentially remaining args
-    for key in ('lon_0', 'lat_0', 'lon_1', 'lat_1', 'lon_2', 'lat_2', 
+    for key in ('lon_0', 'lat_0', 'lon_1', 'lat_1', 'lon_2', 'lat_2',
                 'lat_ts'):
         try:
             basemap_args[key] = float(area_def.proj_dict[key])
         except KeyError:
             pass
 
-    return Basemap(**basemap_args) 
-            
-def _get_quicklook(area_def, data, vmin=None, vmax=None, 
-                   label='Variable (units)', num_meridians=45, 
+    return Basemap(**basemap_args)
+
+
+def _get_quicklook(area_def, data, vmin=None, vmax=None,
+                   label='Variable (units)', num_meridians=45,
                    num_parallels=10, coast_res='c'):
     """Get default Basemap matplotlib plot
     """
-    
+
     if area_def.shape != data.shape:
-        raise ValueError('area_def shape %s does not match data shape %s' % 
+        raise ValueError('area_def shape %s does not match data shape %s' %
                          (list(area_def.shape), list(data.shape)))
     import matplotlib.pyplot as plt
     bmap = area_def2basemap(area_def, resolution=coast_res)
@@ -144,14 +168,15 @@ def _get_quicklook(area_def, data, vmin=None, vmax=None,
     if not (np.ma.isMaskedArray(data) and data.mask.all()):
         col = bmap.imshow(data, origin='upper', vmin=vmin, vmax=vmax)
         plt.colorbar(col, shrink=0.5, pad=0.05).set_label(label)
-        
+
     return plt
-    
-def show_quicklook(area_def, data, vmin=None, vmax=None, 
-                   label='Variable (units)', num_meridians=45, 
+
+
+def show_quicklook(area_def, data, vmin=None, vmax=None,
+                   label='Variable (units)', num_meridians=45,
                    num_parallels=10, coast_res='c'):
     """Display default quicklook plot
-    
+
     :Parameters:
     area_def : object
         geometry.AreaDefinition object
@@ -169,22 +194,23 @@ def show_quicklook(area_def, data, vmin=None, vmax=None,
         Number of parallels to plot on the globe
     coast_res : {'c', 'l', 'i', 'h', 'f'}, optional
         Resolution of coastlines
-        
+
     :Returns:
     bmap : Basemap object
     """
-        
-    plt = _get_quicklook(area_def, data, vmin=vmin, vmax=vmax, 
-                         label=label, num_meridians=num_meridians, 
+
+    plt = _get_quicklook(area_def, data, vmin=vmin, vmax=vmax,
+                         label=label, num_meridians=num_meridians,
                          num_parallels=num_parallels, coast_res=coast_res)
     plt.show()
     plt.close()
-    
-def save_quicklook(filename, area_def, data, vmin=None, vmax=None, 
-                   label='Variable (units)', num_meridians=45, 
+
+
+def save_quicklook(filename, area_def, data, vmin=None, vmax=None,
+                   label='Variable (units)', num_meridians=45,
                    num_parallels=10, coast_res='c', backend='AGG'):
     """Display default quicklook plot
-    
+
     :Parameters:
     filename : str
         path to output file
@@ -207,12 +233,11 @@ def save_quicklook(filename, area_def, data, vmin=None, vmax=None,
     backend : str, optional
         matplotlib backend to use'
     """
-    
+
     import matplotlib
     matplotlib.use(backend, warn=False)
-    plt = _get_quicklook(area_def, data, vmin=vmin, vmax=vmax, 
-                         label=label, num_meridians=num_meridians, 
+    plt = _get_quicklook(area_def, data, vmin=vmin, vmax=vmax,
+                         label=label, num_meridians=num_meridians,
                          num_parallels=num_parallels, coast_res=coast_res)
     plt.savefig(filename, bbox_inches='tight')
     plt.close()
-    
diff --git a/pyresample/spherical_geometry.py b/pyresample/spherical_geometry.py
index 637c9d0..e332fb2 100644
--- a/pyresample/spherical_geometry.py
+++ b/pyresample/spherical_geometry.py
@@ -1,30 +1,39 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010  Martin Raspaud
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
-#(at your option) any later version.
+# Copyright (C) 2010, 2015  Martin Raspaud
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 """Classes for spherical geometry operations"""
 
+from __future__ import absolute_import
+
 import math
 import numpy as np
 
+try:
+    range = xrange
+except NameError:
+    pass
+
 EPSILON = 0.0000001
 
 # FIXME: this has not been tested with R != 1
 
+
 class Coordinate(object):
+
     """Point on earth in terms of lat and lon.
     """
     lat = None
@@ -32,7 +41,7 @@ class Coordinate(object):
     x__ = None
     y__ = None
     z__ = None
-    
+
     def __init__(self, lon=None, lat=None,
                  x__=None, y__=None, z__=None, R__=1):
 
@@ -40,7 +49,7 @@ class Coordinate(object):
         if lat is not None and lon is not None:
             if not(-180 <= lon <= 180 and -90 <= lat <= 90):
                 raise ValueError('Illegal (lon, lat) coordinates: (%s, %s)'
-                                  % (lon, lat))
+                                 % (lon, lat))
             self.lat = math.radians(lat)
             self.lon = math.radians(lon)
             self._update_cart()
@@ -57,15 +66,14 @@ class Coordinate(object):
         self.x__ = math.cos(self.lat) * math.cos(self.lon)
         self.y__ = math.cos(self.lat) * math.sin(self.lon)
         self.z__ = math.sin(self.lat)
-        
 
     def _update_lonlat(self):
         """Convert cartesian to lon/lat.
         """
-        
+
         self.lat = math.degrees(math.asin(self.z__ / self.R__))
         self.lon = math.degrees(math.atan2(self.y__, self.x__))
-        
+
     def __ne__(self, other):
         if(abs(self.lat - other.lat) < EPSILON and
            abs(self.lon - other.lon) < EPSILON):
@@ -78,7 +86,7 @@ class Coordinate(object):
 
     def __str__(self):
         return str((math.degrees(self.lon), math.degrees(self.lat)))
-    
+
     def __repr__(self):
         return str((math.degrees(self.lon), math.degrees(self.lat)))
 
@@ -138,7 +146,7 @@ class Coordinate(object):
         x__ = self.y__ * point.z__ - self.z__ * point.y__
         y__ = self.z__ * point.x__ - self.x__ * point.z__
         z__ = self.x__ * point.y__ - self.y__ * point.x__
-        
+
         return Coordinate(x__=x__, y__=y__, z__=z__)
 
     def dot(self, point):
@@ -148,7 +156,9 @@ class Coordinate(object):
                 self.y__ * point.y__ +
                 self.z__ * point.z__)
 
+
 class Arc(object):
+
     """An arc of the great circle between two points.
     """
     start = None
@@ -167,9 +177,9 @@ class Arc(object):
             val = 1
         elif val < -1:
             val = -1
-        
+
         return math.acos(val)
-                           
+
     def __eq__(self, other):
         if(self.start == other.start and self.end == other.end):
             return 1
@@ -206,26 +216,25 @@ class Arc(object):
         ua_ = a__.cross(b__)
         ub_ = a__.cross(c__)
 
-        val =  ua_.dot(ub_) / (ua_.norm() * ub_.norm())
+        val = ua_.dot(ub_) / (ua_.norm() * ub_.norm())
         if abs(val - 1) < EPSILON:
             angle = 0
         elif abs(val + 1) < EPSILON:
             angle = math.pi
         else:
-            angle = math.acos(val)    
+            angle = math.acos(val)
 
         n__ = ua_.normalize()
         if n__.dot(c__) > 0:
             return -angle
         else:
             return angle
-        
+
     def intersections(self, other_arc):
         """Gives the two intersections of the greats circles defined by the 
        current arc and *other_arc*.
         """
-        
-        
+
         if self.end.lon - self.start.lon > math.pi:
             self.end.lon -= 2 * math.pi
         if other_arc.end.lon - other_arc.start.lon > math.pi:
@@ -234,7 +243,7 @@ class Arc(object):
             self.end.lon += 2 * math.pi
         if other_arc.end.lon - other_arc.start.lon < -math.pi:
             other_arc.end.lon += 2 * math.pi
-            
+
         ea_ = self.start.cross2cart(self.end).normalize()
         eb_ = other_arc.start.cross2cart(other_arc.end).normalize()
 
@@ -259,14 +268,12 @@ class Arc(object):
         two points.
         """
 
-
         for i in self.intersections(other_arc):
             a__ = self.start
             b__ = self.end
             c__ = other_arc.start
             d__ = other_arc.end
 
-
             ab_ = a__.distance(b__)
             cd_ = c__.distance(d__)
 
@@ -275,36 +282,39 @@ class Arc(object):
                 return i
         return None
 
+
 def modpi(val):
     """Puts *val* between -pi and pi.
     """
     return (val + math.pi) % (2 * math.pi) - math.pi
 
+
 def get_polygon_area(corners):
     """Get the area of the convex area defined by *corners*.
     """
     # We assume the earth is spherical !!!
     # Should be the radius of the earth at the observed position
     R = 1
-    
+
     c1_ = corners[0]
     area = 0
-    
+
     for idx in range(1, len(corners) - 1):
         b1_ = Arc(c1_, corners[idx])
         b2_ = Arc(c1_, corners[idx + 1])
         b3_ = Arc(corners[idx], corners[idx + 1])
         e__ = (abs(b1_.angle(b2_)) +
-            abs(b2_.angle(b3_)) + 
-                   abs(b3_.angle(b1_)))
+               abs(b2_.angle(b3_)) +
+               abs(b3_.angle(b1_)))
         area += R ** 2 * e__ - math.pi
     return area
 
+
 def get_intersections(b__, boundaries):
     """Get the intersections of *b__* with *boundaries*.
     Returns both the intersection coordinates and the concerned boundaries.
     """
-    
+
     intersections = []
     bounds = []
     for other_b in boundaries:
@@ -313,7 +323,8 @@ def get_intersections(b__, boundaries):
             intersections.append(inter)
             bounds.append(other_b)
     return intersections, bounds
-    
+
+
 def get_first_intersection(b__, boundaries):
     """Get the first intersection on *b__* with *boundaries*.
     """
@@ -325,6 +336,7 @@ def get_first_intersection(b__, boundaries):
         return intersections[indices[0]]
     return None
 
+
 def get_next_intersection(p__, b__, boundaries):
     """Get the next intersection from the intersection of arcs *p__* and *b__*
     along segment *b__* with *boundaries*.
@@ -339,6 +351,7 @@ def get_next_intersection(p__, b__, boundaries):
         return intersections[indices[1]], bounds[indices[1]]
     return None, None
 
+
 def point_inside(point, corners):
     """Is a point inside the 4 corners ? This uses great circle arcs as area
     boundaries.
@@ -347,7 +360,7 @@ def point_inside(point, corners):
     arc2 = Arc(corners[1], corners[2])
     arc3 = Arc(corners[2], corners[3])
     arc4 = Arc(corners[3], corners[0])
-    
+
     arc5 = Arc(corners[1], point)
     arc6 = Arc(corners[3], point)
 
@@ -358,10 +371,11 @@ def point_inside(point, corners):
     angle2bis = modpi(arc3.angle(arc6))
 
     return (np.sign(angle1) == np.sign(angle1bis) and
-            abs(angle1) > abs(angle1bis) and 
+            abs(angle1) > abs(angle1bis) and
             np.sign(angle2) == np.sign(angle2bis) and
             abs(angle2) > abs(angle2bis))
 
+
 def intersection_polygon(area_corners, segment_corners):
     """Get the intersection polygon between two areas.
     """
@@ -411,5 +425,3 @@ def intersection_polygon(area_corners, segment_corners):
             b__ = b2_
             boundaries, other_boundaries = other_boundaries, boundaries
     return poly[:-1]
-
-
diff --git a/pyresample/test/__init__.py b/pyresample/test/__init__.py
new file mode 100644
index 0000000..1fe0950
--- /dev/null
+++ b/pyresample/test/__init__.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2014, 2015 Martin Raspaud
+
+# Author(s):
+
+#   Martin Raspaud <martin.raspaud at smhi.se>
+
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
+
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""The test base.
+"""
+
+
+from pyresample.test import test_geometry, test_grid, test_image, test_kd_tree, test_plot, test_spherical_geometry, test_swath, test_utils
+
+import unittest
+
+
+def suite():
+    """The global test suite.
+    """
+    mysuite = unittest.TestSuite()
+    # Use the unittests also
+    mysuite.addTests(test_geometry.suite())
+    mysuite.addTests(test_grid.suite())
+    mysuite.addTests(test_image.suite())
+    mysuite.addTests(test_kd_tree.suite())
+    mysuite.addTests(test_plot.suite())
+    mysuite.addTests(test_spherical_geometry.suite())
+    mysuite.addTests(test_swath.suite())
+    mysuite.addTests(test_utils.suite())
+
+    return mysuite
diff --git a/test/test_files/areas.cfg b/pyresample/test/test_files/areas.cfg
similarity index 100%
rename from test/test_files/areas.cfg
rename to pyresample/test/test_files/areas.cfg
diff --git a/test/test_files/mask_grid.dat b/pyresample/test/test_files/mask_grid.dat
similarity index 100%
rename from test/test_files/mask_grid.dat
rename to pyresample/test/test_files/mask_grid.dat
diff --git a/test/test_files/mask_test_data.dat b/pyresample/test/test_files/mask_test_data.dat
similarity index 100%
rename from test/test_files/mask_test_data.dat
rename to pyresample/test/test_files/mask_test_data.dat
diff --git a/test/test_files/mask_test_fill_value.dat b/pyresample/test/test_files/mask_test_fill_value.dat
similarity index 100%
rename from test/test_files/mask_test_fill_value.dat
rename to pyresample/test/test_files/mask_test_fill_value.dat
diff --git a/test/test_files/mask_test_full_fill.dat b/pyresample/test/test_files/mask_test_full_fill.dat
similarity index 100%
rename from test/test_files/mask_test_full_fill.dat
rename to pyresample/test/test_files/mask_test_full_fill.dat
diff --git a/test/test_files/mask_test_full_fill_multi.dat b/pyresample/test/test_files/mask_test_full_fill_multi.dat
similarity index 100%
rename from test/test_files/mask_test_full_fill_multi.dat
rename to pyresample/test/test_files/mask_test_full_fill_multi.dat
diff --git a/test/test_files/mask_test_mask.dat b/pyresample/test/test_files/mask_test_mask.dat
similarity index 100%
rename from test/test_files/mask_test_mask.dat
rename to pyresample/test/test_files/mask_test_mask.dat
diff --git a/test/test_files/mask_test_nearest_data.dat b/pyresample/test/test_files/mask_test_nearest_data.dat
similarity index 100%
rename from test/test_files/mask_test_nearest_data.dat
rename to pyresample/test/test_files/mask_test_nearest_data.dat
diff --git a/test/test_files/mask_test_nearest_mask.dat b/pyresample/test/test_files/mask_test_nearest_mask.dat
similarity index 100%
rename from test/test_files/mask_test_nearest_mask.dat
rename to pyresample/test/test_files/mask_test_nearest_mask.dat
diff --git a/test/test_files/quick_mask_test.dat b/pyresample/test/test_files/quick_mask_test.dat
similarity index 100%
rename from test/test_files/quick_mask_test.dat
rename to pyresample/test/test_files/quick_mask_test.dat
diff --git a/test/test_files/ssmis_swath.npz b/pyresample/test/test_files/ssmis_swath.npz
similarity index 100%
rename from test/test_files/ssmis_swath.npz
rename to pyresample/test/test_files/ssmis_swath.npz
diff --git a/pyresample/test/test_geometry.py b/pyresample/test/test_geometry.py
new file mode 100644
index 0000000..bba33ee
--- /dev/null
+++ b/pyresample/test/test_geometry.py
@@ -0,0 +1,646 @@
+from __future__ import with_statement
+
+import sys
+import unittest
+
+import numpy as np
+
+import warnings
+if sys.version_info < (2, 6):
+    warnings.simplefilter("ignore")
+else:
+    warnings.simplefilter("always")
+
+from pyresample import geometry, geo_filter
+
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+
+class Test(unittest.TestCase):
+
+    """Unit testing the geometry and geo_filter modules"""
+
+    def assert_raises(self, exception, call_able, *args):
+        """assertRaises() has changed from py2.6 to 2.7! Here is an attempt to
+        cover both"""
+        import sys
+        if sys.version_info < (2, 7):
+            self.assertRaises(exception, call_able, *args)
+        else:
+            with self.assertRaises(exception):
+                call_able(*args)
+
+    def test_lonlat_precomp(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           800,
+                                           800,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+        lons, lats = area_def.get_lonlats()
+        area_def2 = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                            {'a': '6378144.0',
+                                             'b': '6356759.0',
+                                             'lat_0': '50.00',
+                                             'lat_ts': '50.00',
+                                             'lon_0': '8.00',
+                                             'proj': 'stere'},
+                                            800,
+                                            800,
+                                            [-1370912.72,
+                                                -909968.64000000001,
+                                                1029087.28,
+                                                1490031.3600000001],
+                                            lons=lons, lats=lats)
+        lon, lat = area_def.get_lonlat(400, 400)
+        self.assertAlmostEqual(lon, 5.5028467120975835,
+                               msg='lon retrieval from precomputated grid failed')
+        self.assertAlmostEqual(lat, 52.566998432390619,
+                               msg='lat retrieval from precomputated grid failed')
+
+    @tmp
+    def test_cartesian(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           800,
+                                           800,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+        cart_coords = area_def.get_cartesian_coords()
+        exp = 5872039989466.8457031
+        self.assertTrue((cart_coords.sum() - exp) < 1e-7 * exp,
+                        msg='Calculation of cartesian coordinates failed')
+
+    def test_base_lat_invalid(self):
+
+        lons = np.arange(-135., +135, 20.)
+        lats = np.ones_like(lons) * 70.
+        lats[0] = -95
+        lats[1] = +95
+        self.assertRaises(
+            ValueError, geometry.BaseDefinition, lons=lons, lats=lats)
+
+    def test_base_lon_wrapping(self):
+
+        lons1 = np.arange(-135., +135, 50.)
+        lats = np.ones_like(lons1) * 70.
+
+        with warnings.catch_warnings(record=True) as w1:
+            base_def1 = geometry.BaseDefinition(lons1, lats)
+            self.assertFalse(
+                len(w1) != 0, 'Got warning <%s>, but was not expecting one' % w1)
+
+        lons2 = np.where(lons1 < 0, lons1 + 360, lons1)
+        with warnings.catch_warnings(record=True) as w2:
+            base_def2 = geometry.BaseDefinition(lons2, lats)
+            self.assertFalse(
+                len(w2) != 1, 'Failed to trigger a warning on longitude wrapping')
+            self.assertFalse(('-180:+180' not in str(w2[0].message)),
+                             'Failed to trigger correct warning about longitude wrapping')
+
+        self.assertFalse(
+            base_def1 != base_def2, 'longitude wrapping to [-180:+180] did not work')
+
+        with warnings.catch_warnings(record=True) as w3:
+            base_def3 = geometry.BaseDefinition(None, None)
+            self.assertFalse(
+                len(w3) != 0, 'Got warning <%s>, but was not expecting one' % w3)
+
+        self.assert_raises(ValueError, base_def3.get_lonlats)
+
+    def test_base_type(self):
+        lons1 = np.arange(-135., +135, 50.)
+        lats = np.ones_like(lons1) * 70.
+
+        # Test dtype is preserved without longitude wrapping
+        basedef = geometry.BaseDefinition(lons1, lats)
+        lons, _ = basedef.get_lonlats()
+        self.assertEqual(lons.dtype, lons1.dtype,
+                         "BaseDefinition did not maintain dtype of longitudes (in:%s out:%s)" %
+                         (lons1.dtype, lons.dtype,))
+
+        lons1_ints = lons1.astype('int')
+        basedef = geometry.BaseDefinition(lons1_ints, lats)
+        lons, _ = basedef.get_lonlats()
+        self.assertEqual(lons.dtype, lons1_ints.dtype,
+                         "BaseDefinition did not maintain dtype of longitudes (in:%s out:%s)" %
+                         (lons1_ints.dtype, lons.dtype,))
+
+        # Test dtype is preserved with automatic longitude wrapping
+        lons2 = np.where(lons1 < 0, lons1 + 360, lons1)
+        with warnings.catch_warnings(record=True) as w:
+            basedef = geometry.BaseDefinition(lons2, lats)
+
+        lons, _ = basedef.get_lonlats()
+        self.assertEqual(lons.dtype, lons2.dtype,
+                         "BaseDefinition did not maintain dtype of longitudes (in:%s out:%s)" %
+                         (lons2.dtype, lons.dtype,))
+
+        lons2_ints = lons2.astype('int')
+        with warnings.catch_warnings(record=True) as w:
+            basedef = geometry.BaseDefinition(lons2_ints, lats)
+
+        lons, _ = basedef.get_lonlats()
+        self.assertEqual(lons.dtype, lons2_ints.dtype,
+                         "BaseDefinition did not maintain dtype of longitudes (in:%s out:%s)" %
+                         (lons2_ints.dtype, lons.dtype,))
+
+    def test_swath(self):
+        lons1 = np.fromfunction(lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats1 = np.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+
+        swath_def = geometry.SwathDefinition(lons1, lats1)
+
+        lons2, lats2 = swath_def.get_lonlats()
+
+        self.assertFalse(id(lons1) != id(lons2) or id(lats1) != id(lats2),
+                         msg='Caching of swath coordinates failed')
+
+    def test_swath_wrap(self):
+        lons1 = np.fromfunction(lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats1 = np.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+
+        lons1 += 180.
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            swath_def = geometry.BaseDefinition(lons1, lats1)
+        else:
+            with warnings.catch_warnings(record=True) as w1:
+                swath_def = geometry.BaseDefinition(lons1, lats1)
+                self.assertFalse(
+                    len(w1) != 1, 'Failed to trigger a warning on longitude wrapping')
+                self.assertFalse(('-180:+180' not in str(w1[0].message)),
+                                 'Failed to trigger correct warning about longitude wrapping')
+
+        lons2, lats2 = swath_def.get_lonlats()
+
+        self.assertTrue(id(lons1) != id(lons2),
+                        msg='Caching of swath coordinates failed with longitude wrapping')
+
+        self.assertTrue(lons2.min() > -180 and lons2.max() < 180,
+                        'Wrapping of longitudes failed for SwathDefinition')
+
+    def test_area_equal(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           800,
+                                           800,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+        area_def2 = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                            {'a': '6378144.0',
+                                             'b': '6356759.0',
+                                             'lat_0': '50.00',
+                                             'lat_ts': '50.00',
+                                             'lon_0': '8.00',
+                                             'proj': 'stere'},
+                                            800,
+                                            800,
+                                            [-1370912.72,
+                                                -909968.64000000001,
+                                                1029087.28,
+                                                1490031.3600000001])
+        self.assertFalse(
+            area_def != area_def2, 'area_defs are not equal as expected')
+
+    def test_not_area_equal(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           800,
+                                           800,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+
+        msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees',
+                                           'msg_full',
+                                           {'a': '6378169.0',
+                                            'b': '6356584.0',
+                                            'h': '35785831.0',
+                                            'lon_0': '0',
+                                            'proj': 'geos'},
+                                           3712,
+                                           3712,
+                                           [-5568742.4000000004,
+                                               -5568742.4000000004,
+                                               5568742.4000000004,
+                                               5568742.4000000004]
+                                           )
+        self.assertFalse(
+            area_def == msg_area, 'area_defs are not expected to be equal')
+
+    def test_swath_equal(self):
+        lons = np.array([1.2, 1.3, 1.4, 1.5])
+        lats = np.array([65.9, 65.86, 65.82, 65.78])
+        swath_def = geometry.SwathDefinition(lons, lats)
+        swath_def2 = geometry.SwathDefinition(lons, lats)
+        self.assertFalse(
+            swath_def != swath_def2, 'swath_defs are not equal as expected')
+
+    def test_swath_not_equal(self):
+        lats1 = np.array([65.9, 65.86, 65.82, 65.78])
+        lons = np.array([1.2, 1.3, 1.4, 1.5])
+        lats2 = np.array([65.91, 65.85, 65.80, 65.75])
+        swath_def = geometry.SwathDefinition(lons, lats1)
+        swath_def2 = geometry.SwathDefinition(lons, lats2)
+        self.assertFalse(
+            swath_def == swath_def2, 'swath_defs are not expected to be equal')
+
+    def test_swath_equal_area(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           800,
+                                           800,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+
+        swath_def = geometry.SwathDefinition(*area_def.get_lonlats())
+
+        self.assertFalse(
+            swath_def != area_def, "swath_def and area_def should be equal")
+
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           800,
+                                           800,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+
+        self.assertFalse(
+            area_def != swath_def, "swath_def and area_def should be equal")
+
+    def test_swath_not_equal_area(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           800,
+                                           800,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+
+        lons = np.array([1.2, 1.3, 1.4, 1.5])
+        lats = np.array([65.9, 65.86, 65.82, 65.78])
+        swath_def = geometry.SwathDefinition(lons, lats)
+
+        self.assertFalse(
+            swath_def == area_def, "swath_def and area_def should be different")
+
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           800,
+                                           800,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+
+        self.assertFalse(
+            area_def == swath_def, "swath_def and area_def should be different")
+
+    def test_concat_1d(self):
+        lons1 = np.array([1, 2, 3])
+        lats1 = np.array([1, 2, 3])
+        lons2 = np.array([4, 5, 6])
+        lats2 = np.array([4, 5, 6])
+        swath_def1 = geometry.SwathDefinition(lons1, lats1)
+        swath_def2 = geometry.SwathDefinition(lons2, lats2)
+        swath_def_concat = swath_def1.concatenate(swath_def2)
+        expected = np.array([1, 2, 3, 4, 5, 6])
+        self.assertTrue(np.array_equal(swath_def_concat.lons, expected) and
+                        np.array_equal(swath_def_concat.lons, expected),
+                        'Failed to concatenate 1D swaths')
+
+    def test_concat_2d(self):
+        lons1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
+        lats1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
+        lons2 = np.array([[4, 5, 6], [6, 7, 8]])
+        lats2 = np.array([[4, 5, 6], [6, 7, 8]])
+        swath_def1 = geometry.SwathDefinition(lons1, lats1)
+        swath_def2 = geometry.SwathDefinition(lons2, lats2)
+        swath_def_concat = swath_def1.concatenate(swath_def2)
+        expected = np.array(
+            [[1, 2, 3], [3, 4, 5], [5, 6, 7], [4, 5, 6], [6, 7, 8]])
+        self.assertTrue(np.array_equal(swath_def_concat.lons, expected) and
+                        np.array_equal(swath_def_concat.lons, expected),
+                        'Failed to concatenate 2D swaths')
+
+    def test_append_1d(self):
+        lons1 = np.array([1, 2, 3])
+        lats1 = np.array([1, 2, 3])
+        lons2 = np.array([4, 5, 6])
+        lats2 = np.array([4, 5, 6])
+        swath_def1 = geometry.SwathDefinition(lons1, lats1)
+        swath_def2 = geometry.SwathDefinition(lons2, lats2)
+        swath_def1.append(swath_def2)
+        expected = np.array([1, 2, 3, 4, 5, 6])
+        self.assertTrue(np.array_equal(swath_def1.lons, expected) and
+                        np.array_equal(swath_def1.lons, expected),
+                        'Failed to append 1D swaths')
+
+    def test_append_2d(self):
+        lons1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
+        lats1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
+        lons2 = np.array([[4, 5, 6], [6, 7, 8]])
+        lats2 = np.array([[4, 5, 6], [6, 7, 8]])
+        swath_def1 = geometry.SwathDefinition(lons1, lats1)
+        swath_def2 = geometry.SwathDefinition(lons2, lats2)
+        swath_def1.append(swath_def2)
+        expected = np.array(
+            [[1, 2, 3], [3, 4, 5], [5, 6, 7], [4, 5, 6], [6, 7, 8]])
+        self.assertTrue(np.array_equal(swath_def1.lons, expected) and
+                        np.array_equal(swath_def1.lons, expected),
+                        'Failed to append 2D swaths')
+
+    def test_grid_filter_valid(self):
+        lons = np.array([-170, -30, 30, 170])
+        lats = np.array([20, -40, 50, -80])
+        swath_def = geometry.SwathDefinition(lons, lats)
+        filter_area = geometry.AreaDefinition('test', 'test', 'test',
+                                              {'proj': 'eqc', 'lon_0': 0.0,
+                                                  'lat_0': 0.0},
+                                              8, 8,
+                                              (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+        filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           ])
+        grid_filter = geo_filter.GridFilter(filter_area, filter)
+        valid_index = grid_filter.get_valid_index(swath_def)
+        expected = np.array([1, 0, 0, 1])
+        self.assertTrue(
+            np.array_equal(valid_index, expected), 'Failed to find grid filter')
+
+    def test_grid_filter(self):
+        lons = np.array([-170, -30, 30, 170])
+        lats = np.array([20, -40, 50, -80])
+        swath_def = geometry.SwathDefinition(lons, lats)
+        data = np.array([1, 2, 3, 4])
+        filter_area = geometry.AreaDefinition('test', 'test', 'test',
+                                              {'proj': 'eqc', 'lon_0': 0.0,
+                                                  'lat_0': 0.0},
+                                              8, 8,
+                                              (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+        filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           ])
+        grid_filter = geo_filter.GridFilter(filter_area, filter)
+        swath_def_f, data_f = grid_filter.filter(swath_def, data)
+        expected = np.array([1, 4])
+        self.assertTrue(
+            np.array_equal(data_f, expected), 'Failed grid filtering data')
+        expected_lons = np.array([-170, 170])
+        expected_lats = np.array([20, -80])
+        self.assertTrue(np.array_equal(swath_def_f.lons[:], expected_lons)
+                        and np.array_equal(swath_def_f.lats[:], expected_lats),
+                        'Failed finding grid filtering lon lats')
+
+    def test_grid_filter2D(self):
+        lons = np.array([[-170, -30, 30, 170],
+                         [-170, -30, 30, 170]])
+        lats = np.array([[20, -40, 50, -80],
+                         [25, -35, 55, -75]])
+        swath_def = geometry.SwathDefinition(lons, lats)
+        data1 = np.ones((2, 4))
+        data2 = np.ones((2, 4)) * 2
+        data3 = np.ones((2, 4)) * 3
+        data = np.dstack((data1, data2, data3))
+        filter_area = geometry.AreaDefinition('test', 'test', 'test',
+                                              {'proj': 'eqc', 'lon_0': 0.0,
+                                                  'lat_0': 0.0},
+                                              8, 8,
+                                              (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+        filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           ])
+        grid_filter = geo_filter.GridFilter(filter_area, filter, nprocs=2)
+        swath_def_f, data_f = grid_filter.filter(swath_def, data)
+        expected = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3], [1, 2, 3]])
+        self.assertTrue(
+            np.array_equal(data_f, expected), 'Failed 2D grid filtering data')
+        expected_lons = np.array([-170, 170, -170, 170])
+        expected_lats = np.array([20, -80, 25, -75])
+        self.assertTrue(np.array_equal(swath_def_f.lons[:], expected_lons)
+                        and np.array_equal(swath_def_f.lats[:], expected_lats),
+                        'Failed finding 2D grid filtering lon lats')
+
+    def test_boundary(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           10,
+                                           10,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+        proj_x_boundary, proj_y_boundary = area_def.proj_x_coords, area_def.proj_y_coords
+        expected_x = np.array([-1250912.72, -1010912.72, -770912.72,
+                               -530912.72, -290912.72, -50912.72, 189087.28,
+                               429087.28, 669087.28, 909087.28])
+        expected_y = np.array([1370031.36, 1130031.36, 890031.36, 650031.36,
+                               410031.36, 170031.36, -69968.64, -309968.64,
+                               -549968.64, -789968.64])
+        self.assertTrue(np.allclose(proj_x_boundary, expected_x),
+                        'Failed to find projection x coords')
+        self.assertTrue(np.allclose(proj_y_boundary, expected_y),
+                        'Failed to find projection y coords')
+
+    def test_area_extent_ll(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                           {'a': '6378144.0',
+                                            'b': '6356759.0',
+                                            'lat_0': '50.00',
+                                            'lat_ts': '50.00',
+                                            'lon_0': '8.00',
+                                            'proj': 'stere'},
+                                           10,
+                                           10,
+                                           [-1370912.72,
+                                               -909968.64000000001,
+                                               1029087.28,
+                                               1490031.3600000001])
+        self.assertAlmostEqual(sum(area_def.area_extent_ll),
+                               122.06448093539757, 5,
+                               'Failed to get lon and lats of area extent')
+
+    @tmp
+    def test_latlong_area(self):
+        area_def = geometry.AreaDefinition('', '', '',
+                                           {'proj': 'latlong'},
+                                           360, 180,
+                                           [-180, -90, 180, 90])
+        lons, lats = area_def.get_lonlats()
+        self.assertEqual(lons[0, 0], -179.5)
+        self.assertEqual(lats[0, 0], 89.5)
+
+    def test_get_xy_from_lonlat(self):
+        """Test the function get_xy_from_lonlat"""
+        from pyresample import utils
+        area_id = 'test'
+        area_name = 'Test area with 2x2 pixels'
+        proj_id = 'test'
+        x_size = 2
+        y_size = 2
+        area_extent = [1000000, 0, 1050000, 50000]
+        proj_dict = {"proj": 'laea',
+                     'lat_0': '60',
+                     'lon_0': '0',
+                     'a': '6371228.0', 'units': 'm'}
+        area_def = utils.get_area_def(area_id,
+                                      area_name,
+                                      proj_id,
+                                      proj_dict,
+                                      x_size, y_size,
+                                      area_extent)
+        import pyproj
+        p__ = pyproj.Proj(proj_dict)
+        lon_ul, lat_ul = p__(1000000, 50000, inverse=True)
+        lon_ur, lat_ur = p__(1050000, 50000, inverse=True)
+        lon_ll, lat_ll = p__(1000000, 0, inverse=True)
+        lon_lr, lat_lr = p__(1050000, 0, inverse=True)
+
+        eps_lonlat = 0.01
+        eps_meters = 100
+        x__, y__ = area_def.get_xy_from_lonlat(lon_ul + eps_lonlat,
+                                               lat_ul - eps_lonlat)
+        x_expect, y_expect = 0, 0
+        self.assertEqual(x__, x_expect)
+        self.assertEqual(y__, y_expect)
+        x__, y__ = area_def.get_xy_from_lonlat(lon_ur - eps_lonlat,
+                                               lat_ur - eps_lonlat)
+        self.assertEqual(x__, 1)
+        self.assertEqual(y__, 0)
+        x__, y__ = area_def.get_xy_from_lonlat(lon_ll + eps_lonlat,
+                                               lat_ll + eps_lonlat)
+        self.assertEqual(x__, 0)
+        self.assertEqual(y__, 1)
+        x__, y__ = area_def.get_xy_from_lonlat(lon_lr - eps_lonlat,
+                                               lat_lr + eps_lonlat)
+        self.assertEqual(x__, 1)
+        self.assertEqual(y__, 1)
+
+        lon, lat = p__(1025000 - eps_meters, 25000 - eps_meters, inverse=True)
+        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
+        self.assertEqual(x__, 0)
+        self.assertEqual(y__, 1)
+
+        lon, lat = p__(1025000 + eps_meters, 25000 - eps_meters, inverse=True)
+        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
+        self.assertEqual(x__, 1)
+        self.assertEqual(y__, 1)
+
+        lon, lat = p__(1025000 - eps_meters, 25000 + eps_meters, inverse=True)
+        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
+        self.assertEqual(x__, 0)
+        self.assertEqual(y__, 0)
+
+        lon, lat = p__(1025000 + eps_meters, 25000 + eps_meters, inverse=True)
+        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
+        self.assertEqual(x__, 1)
+        self.assertEqual(y__, 0)
+
+        lon, lat = p__(999000, -10, inverse=True)
+        self.assert_raises(ValueError, area_def.get_xy_from_lonlat, lon, lat)
+        self.assert_raises(ValueError, area_def.get_xy_from_lonlat, 0., 0.)
+
+        # Test getting arrays back:
+        lons = [lon_ll + eps_lonlat, lon_ur - eps_lonlat]
+        lats = [lat_ll + eps_lonlat, lat_ur - eps_lonlat]
+        x__, y__ = area_def.get_xy_from_lonlat(lons, lats)
+
+        x_expects = np.array([0, 1])
+        y_expects = np.array([1, 0])
+        self.assertTrue((x__.data == x_expects).all())
+        self.assertTrue((y__.data == y_expects).all())
+
+
+def suite():
+    """The test suite.
+    """
+    loader = unittest.TestLoader()
+    mysuite = unittest.TestSuite()
+    mysuite.addTest(loader.loadTestsFromTestCase(Test))
+
+    return mysuite
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pyresample/test/test_grid.py b/pyresample/test/test_grid.py
new file mode 100644
index 0000000..9a7f53d
--- /dev/null
+++ b/pyresample/test/test_grid.py
@@ -0,0 +1,201 @@
+import copy
+import unittest
+
+import numpy as np
+
+from pyresample import grid, geometry, utils
+
+
+def mp(f):
+    f.mp = True
+    return f
+
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+
+class Test(unittest.TestCase):
+
+    area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                       {'a': '6378144.0',
+                                        'b': '6356759.0',
+                                        'lat_0': '50.00',
+                                        'lat_ts': '50.00',
+                                        'lon_0': '8.00',
+                                        'proj': 'stere'},
+                                       800,
+                                       800,
+                                       [-1370912.72,
+                                           -909968.64000000001,
+                                           1029087.28,
+                                           1490031.3600000001])
+
+    area_def2 = geometry.AreaDefinition('areaD2', 'Europe (3km, HRV, VTC)', 'areaD2',
+                                        {'a': '6378144.0',
+                                         'b': '6356759.0',
+                                         'lat_0': '50.00',
+                                         'lat_ts': '50.00',
+                                         'lon_0': '8.00',
+                                         'proj': 'stere'},
+                                        5,
+                                        5,
+                                        [-1370912.72,
+                                            -909968.64000000001,
+                                            1029087.28,
+                                            1490031.3600000001])
+
+    msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees',
+                                       'msg_full',
+                                       {'a': '6378169.0',
+                                        'b': '6356584.0',
+                                        'h': '35785831.0',
+                                        'lon_0': '0',
+                                        'proj': 'geos'},
+                                       3712,
+                                       3712,
+                                       [-5568742.4000000004,
+                                           -5568742.4000000004,
+                                           5568742.4000000004,
+                                           5568742.4000000004]
+                                       )
+
+    def test_linesample(self):
+        data = np.fromfunction(lambda y, x: y * x, (40, 40))
+        rows = np.array([[1, 2], [3, 4]])
+        cols = np.array([[25, 26], [27, 28]])
+        res = grid.get_image_from_linesample(rows, cols, data)
+        expected = np.array([[25., 52.], [81., 112.]])
+        self.assertTrue(np.array_equal(res, expected), 'Linesample failed')
+
+    def test_linesample_multi(self):
+        data1 = np.fromfunction(lambda y, x: y * x, (40, 40))
+        data2 = np.fromfunction(lambda y, x: 2 * y * x, (40, 40))
+        data3 = np.fromfunction(lambda y, x: 3 * y * x, (40, 40))
+        data = np.zeros((40, 40, 3))
+        data[:, :, 0] = data1
+        data[:, :, 1] = data2
+        data[:, :, 2] = data3
+        rows = np.array([[1, 2], [3, 4]])
+        cols = np.array([[25, 26], [27, 28]])
+        res = grid.get_image_from_linesample(rows, cols, data)
+        expected = np.array([[[25., 50., 75.],
+                              [52., 104., 156.]],
+                             [[81., 162., 243.],
+                              [112.,  224.,  336.]]])
+        self.assertTrue(np.array_equal(res, expected), 'Linesample failed')
+
+    def test_from_latlon(self):
+        data = np.fromfunction(lambda y, x: y * x, (800, 800))
+        lons = np.fromfunction(lambda y, x: x, (10, 10))
+        lats = np.fromfunction(lambda y, x: 50 - (5.0 / 10) * y, (10, 10))
+        #source_def = grid.AreaDefinition.get_from_area_def(self.area_def)
+        source_def = self.area_def
+        res = grid.get_image_from_lonlats(lons, lats, source_def, data)
+        expected = np.array([[129276.,  141032.,  153370.,  165804.,  178334.,  190575.,
+                              202864.,  214768.,  226176.,  238080.],
+                             [133056.,  146016.,  158808.,  171696.,  184320.,  196992.,
+                              209712.,  222480.,  234840.,  247715.],
+                             [137026.,  150150.,  163370.,  177215.,  190629.,  203756.,
+                              217464.,  230256.,  243048.,  256373.],
+                             [140660.,  154496.,  168714.,  182484.,  196542.,  210650.,
+                              224257.,  238464.,  251712.,  265512.],
+                             [144480.,  158484.,  173148.,  187912.,  202776.,  217358.,
+                              231990.,  246240.,  259920.,  274170.],
+                             [147968.,  163261.,  178398.,  193635.,  208616.,  223647.,
+                              238728.,  253859.,  268584.,  283898.],
+                             [151638.,  167121.,  182704.,  198990.,  214775.,  230280.,
+                              246442.,  261617.,  276792.,  292574.],
+                             [154980.,  171186.,  187860.,  204016.,  220542.,  237120.,
+                              253125.,  269806.,  285456.,  301732.],
+                             [158500.,  175536.,  192038.,  209280.,  226626.,  243697.,
+                              260820.,  277564.,  293664.,  310408.],
+                             [161696.,  179470.,  197100.,  214834.,  232320.,  250236.,
+                              267448.,  285090.,  302328.,  320229.]])
+        self.assertTrue(
+            np.array_equal(res, expected), 'Sampling from lat lon failed')
+
+    def test_proj_coords(self):
+        #res = grid.get_proj_coords(self.area_def2)
+        res = self.area_def2.get_proj_coords()
+        cross_sum = res[0].sum() + res[1].sum()
+        expected = 2977965.9999999963
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='Calculation of proj coords failed')
+
+    def test_latlons(self):
+        #res = grid.get_lonlats(self.area_def2)
+        res = self.area_def2.get_lonlats()
+        cross_sum = res[0].sum() + res[1].sum()
+        expected = 1440.8280578215431
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='Calculation of lat lons failed')
+
+    @mp
+    def test_latlons_mp(self):
+        #res = grid.get_lonlats(self.area_def2, nprocs=2)
+        res = self.area_def2.get_lonlats(nprocs=2)
+        cross_sum = res[0].sum() + res[1].sum()
+        expected = 1440.8280578215431
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='Calculation of lat lons failed')
+
+    def test_resampled_image(self):
+        data = np.fromfunction(lambda y, x: y * x * 10 ** -6, (3712, 3712))
+        target_def = self.area_def
+        source_def = self.msg_area
+        res = grid.get_resampled_image(
+            target_def, source_def, data, segments=1)
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='Resampling of image failed')
+
+    @tmp
+    def test_generate_linesample(self):
+        data = np.fromfunction(lambda y, x: y * x * 10 ** -6, (3712, 3712))
+        row_indices, col_indices = utils.generate_quick_linesample_arrays(self.msg_area,
+                                                                          self.area_def)
+        res = data[row_indices, col_indices]
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='Generate linesample failed')
+        self.assertFalse(row_indices.dtype != np.uint16 or col_indices.dtype != np.uint16,
+                         'Generate linesample failed. Downcast to uint16 expected')
+
+    @mp
+    def test_resampled_image_mp(self):
+        data = np.fromfunction(lambda y, x: y * x * 10 ** -6, (3712, 3712))
+        target_def = self.area_def
+        source_def = self.msg_area
+        res = grid.get_resampled_image(
+            target_def, source_def, data, nprocs=2, segments=1)
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='Resampling of image mp failed')
+
+    def test_single_lonlat(self):
+        lon, lat = self.area_def.get_lonlat(400, 400)
+        self.assertAlmostEqual(
+            lon, 5.5028467120975835, msg='Resampling of single lon failed')
+        self.assertAlmostEqual(
+            lat, 52.566998432390619, msg='Resampling of single lat failed')
+
+    def test_proj4_string(self):
+        proj4_string = self.area_def.proj4_string
+        expected_string = '+a=6378144.0 +b=6356759.0 +lat_ts=50.00 +lon_0=8.00 +proj=stere +lat_0=50.00'
+        self.assertEqual(
+            frozenset(proj4_string.split()), frozenset(expected_string.split()))
+
+
+def suite():
+    """The test suite.
+    """
+    loader = unittest.TestLoader()
+    mysuite = unittest.TestSuite()
+    mysuite.addTest(loader.loadTestsFromTestCase(Test))
+
+    return mysuite
diff --git a/pyresample/test/test_image.py b/pyresample/test/test_image.py
new file mode 100644
index 0000000..8342583
--- /dev/null
+++ b/pyresample/test/test_image.py
@@ -0,0 +1,225 @@
+import os
+import unittest
+
+import numpy
+
+from pyresample import image, geometry, grid, utils
+
+
+def mask(f):
+    f.mask = True
+    return f
+
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+
+class Test(unittest.TestCase):
+
+    area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                       {'a': '6378144.0',
+                                        'b': '6356759.0',
+                                        'lat_0': '50.00',
+                                        'lat_ts': '50.00',
+                                        'lon_0': '8.00',
+                                        'proj': 'stere'},
+                                       800,
+                                       800,
+                                       [-1370912.72,
+                                           -909968.64000000001,
+                                           1029087.28,
+                                           1490031.3600000001])
+
+    msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees',
+                                       'msg_full',
+                                       {'a': '6378169.0',
+                                        'b': '6356584.0',
+                                        'h': '35785831.0',
+                                        'lon_0': '0',
+                                        'proj': 'geos'},
+                                       3712,
+                                       3712,
+                                       [-5568742.4000000004,
+                                           -5568742.4000000004,
+                                           5568742.4000000004,
+                                           5568742.4000000004]
+                                       )
+
+    msg_area_resize = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees',
+                                              'msg_full',
+                                              {'a': '6378169.0',
+                                               'b': '6356584.0',
+                                               'h': '35785831.0',
+                                               'lon_0': '0',
+                                               'proj': 'geos'},
+                                              928,
+                                              928,
+                                              [-5568742.4000000004,
+                                                  -5568742.4000000004,
+                                                  5568742.4000000004,
+                                                  5568742.4000000004]
+                                              )
+
+    @tmp
+    def test_image(self):
+        data = numpy.fromfunction(lambda y, x: y * x * 10 ** -6, (3712, 3712))
+        msg_con = image.ImageContainerQuick(data, self.msg_area, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='ImageContainer resampling quick failed')
+
+    @tmp
+    def test_image_segments(self):
+        data = numpy.fromfunction(lambda y, x: y * x * 10 ** -6, (3712, 3712))
+        msg_con = image.ImageContainerQuick(data, self.msg_area, segments=8)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='ImageContainer resampling quick segments failed')
+
+    def test_return_type(self):
+        data = numpy.ones((3712, 3712)).astype('int')
+        msg_con = image.ImageContainerQuick(data, self.msg_area, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        self.assertTrue(
+            data.dtype is res.dtype, msg='Failed to maintain input data type')
+
+    @mask
+    def test_masked_image(self):
+        data = numpy.zeros((3712, 3712))
+        mask = numpy.zeros((3712, 3712))
+        mask[:, 1865:] = 1
+        data_masked = numpy.ma.array(data, mask=mask)
+        msg_con = image.ImageContainerQuick(
+            data_masked, self.msg_area, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        resampled_mask = res.mask.astype('int')
+        expected = numpy.fromfile(os.path.join(os.path.dirname(__file__), 'test_files', 'mask_grid.dat'),
+                                  sep=' ').reshape((800, 800))
+        self.assertTrue(numpy.array_equal(
+            resampled_mask, expected), msg='Failed to resample masked array')
+
+    @mask
+    def test_masked_image_fill(self):
+        data = numpy.zeros((3712, 3712))
+        mask = numpy.zeros((3712, 3712))
+        mask[:, 1865:] = 1
+        data_masked = numpy.ma.array(data, mask=mask)
+        msg_con = image.ImageContainerQuick(data_masked, self.msg_area,
+                                            fill_value=None, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        resampled_mask = res.mask.astype('int')
+        expected = numpy.fromfile(os.path.join(os.path.dirname(__file__), 'test_files', 'mask_grid.dat'),
+                                  sep=' ').reshape((800, 800))
+        self.assertTrue(numpy.array_equal(
+            resampled_mask, expected), msg='Failed to resample masked array')
+
+    def test_nearest_neighbour(self):
+        data = numpy.fromfunction(lambda y, x: y * x * 10 ** -6, (3712, 3712))
+        msg_con = image.ImageContainerNearest(
+            data, self.msg_area, 50000, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 399936.783062
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='ImageContainer resampling nearest neighbour failed')
+
+    def test_nearest_resize(self):
+        data = numpy.fromfunction(lambda y, x: y * x * 10 ** -6, (3712, 3712))
+        msg_con = image.ImageContainerNearest(
+            data, self.msg_area, 50000, segments=1)
+        area_con = msg_con.resample(self.msg_area_resize)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 2212023.0175830
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='ImageContainer resampling nearest neighbour failed')
+
+    def test_nearest_neighbour_multi(self):
+        data1 = numpy.fromfunction(lambda y, x: y * x * 10 ** -6, (3712, 3712))
+        data2 = numpy.fromfunction(
+            lambda y, x: y * x * 10 ** -6, (3712, 3712)) * 2
+        data = numpy.dstack((data1, data2))
+        msg_con = image.ImageContainerNearest(
+            data, self.msg_area, 50000, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum1 = res[:, :, 0].sum()
+        expected1 = 399936.783062
+        self.assertAlmostEqual(cross_sum1, expected1,
+                               msg='ImageContainer resampling nearest neighbour multi failed')
+        cross_sum2 = res[:, :, 1].sum()
+        expected2 = 399936.783062 * 2
+        self.assertAlmostEqual(cross_sum2, expected2,
+                               msg='ImageContainer resampling nearest neighbour multi failed')
+
+    def test_nearest_neighbour_multi_preproc(self):
+        data1 = numpy.fromfunction(lambda y, x: y * x * 10 ** -6, (3712, 3712))
+        data2 = numpy.fromfunction(
+            lambda y, x: y * x * 10 ** -6, (3712, 3712)) * 2
+        data = numpy.dstack((data1, data2))
+        msg_con = image.ImageContainer(data, self.msg_area)
+        #area_con = msg_con.resample_area_nearest_neighbour(self.area_def, 50000)
+        row_indices, col_indices = \
+            utils.generate_nearest_neighbour_linesample_arrays(self.msg_area,
+                                                               self.area_def,
+                                                               50000)
+        res = msg_con.get_array_from_linesample(row_indices, col_indices)
+        cross_sum1 = res[:, :, 0].sum()
+        expected1 = 399936.783062
+        self.assertAlmostEqual(cross_sum1, expected1,
+                               msg='ImageContainer resampling nearest neighbour multi preproc failed')
+        cross_sum2 = res[:, :, 1].sum()
+        expected2 = 399936.783062 * 2
+        self.assertAlmostEqual(cross_sum2, expected2,
+                               msg='ImageContainer resampling nearest neighbour multi preproc failed')
+
+    def test_nearest_swath(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        swath_con = image.ImageContainerNearest(
+            data, swath_def, 50000, segments=1)
+        area_con = swath_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,
+                         msg='ImageContainer swath resampling nearest failed')
+
+    def test_nearest_swath_segments(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        data = numpy.dstack(3 * (data,))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        swath_con = image.ImageContainerNearest(
+            data, swath_def, 50000, segments=2)
+        area_con = swath_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 3 * 15874591.0
+        self.assertEqual(cross_sum, expected,
+                         msg='ImageContainer swath segments resampling nearest failed')
+
+
+def suite():
+    """The test suite.
+    """
+    loader = unittest.TestLoader()
+    mysuite = unittest.TestSuite()
+    mysuite.addTest(loader.loadTestsFromTestCase(Test))
+
+    return mysuite
diff --git a/pyresample/test/test_kd_tree.py b/pyresample/test/test_kd_tree.py
new file mode 100644
index 0000000..6859bf8
--- /dev/null
+++ b/pyresample/test/test_kd_tree.py
@@ -0,0 +1,907 @@
+from __future__ import with_statement
+
+import os
+import sys
+import unittest
+
+import warnings
+if sys.version_info < (2, 6):
+    warnings.simplefilter("ignore")
+else:
+    warnings.simplefilter("always")
+
+import numpy
+
+from pyresample import kd_tree, utils, geometry, data_reduce
+
+
+def mp(f):
+    f.mp = True
+    return f
+
+
+def quick(f):
+    f.quick = True
+    return f
+
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+
+class Test(unittest.TestCase):
+
+    area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+                                       {'a': '6378144.0',
+                                        'b': '6356759.0',
+                                        'lat_0': '50.00',
+                                        'lat_ts': '50.00',
+                                        'lon_0': '8.00',
+                                        'proj': 'stere'},
+                                       800,
+                                       800,
+                                       [-1370912.72,
+                                           -909968.64000000001,
+                                           1029087.28,
+                                           1490031.3600000001])
+
+    tdata = numpy.array([1, 2, 3])
+    tlons = numpy.array([11.280789, 12.649354, 12.080402])
+    tlats = numpy.array([56.011037, 55.629675, 55.641535])
+    tswath = geometry.SwathDefinition(lons=tlons, lats=tlats)
+    tgrid = geometry.CoordinateDefinition(lons=numpy.array([12.562036]),
+                                          lats=numpy.array([55.715613]))
+
+    def test_nearest_base(self):
+        res = kd_tree.resample_nearest(self.tswath,
+                                       self.tdata.ravel(), self.tgrid,
+                                       100000, reduce_data=False, segments=1)
+        self.assertTrue(res[0] == 2, 'Failed to calculate nearest neighbour')
+
+    @tmp
+    def test_gauss_base(self):
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+
+            res = kd_tree.resample_gauss(self.tswath,
+                                         self.tdata.ravel(), self.tgrid,
+                                         50000, 25000, reduce_data=False, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(self.tswath,
+                                             self.tdata.ravel(), self.tgrid,
+                                             50000, 25000, reduce_data=False, segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour warning')
+                self.assertFalse(('Searching' not in str(
+                    w[0].message)), 'Failed to create correct neighbour warning')
+        self.assertAlmostEqual(res[0], 2.2020729, 5,
+                               'Failed to calculate gaussian weighting')
+
+    def test_custom_base(self):
+        def wf(dist):
+            return 1 - dist / 100000.0
+
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res = kd_tree.resample_custom(self.tswath,
+                                          self.tdata.ravel(), self.tgrid,
+                                          50000, wf, reduce_data=False, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_custom(self.tswath,
+                                              self.tdata.ravel(), self.tgrid,
+                                              50000, wf, reduce_data=False, segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour warning')
+                self.assertFalse(('Searching' not in str(
+                    w[0].message)), 'Failed to create correct neighbour warning')
+        self.assertAlmostEqual(res[0], 2.4356757, 5,
+                               'Failed to calculate custom weighting')
+
+    @tmp
+    def test_gauss_uncert(self):
+        sigma = utils.fwhm2sigma(41627.730557884883)
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res, stddev, count = kd_tree.resample_gauss(self.tswath, self.tdata,
+                                                        self.tgrid, 100000, sigma,
+                                                        with_uncert=True)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res, stddev, count = kd_tree.resample_gauss(self.tswath, self.tdata,
+                                                            self.tgrid, 100000, sigma,
+                                                            with_uncert=True)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour warning')
+                self.assertFalse(('Searching' not in str(
+                    w[0].message)), 'Failed to create correct neighbour warning')
+
+        expected_res = 2.20206560694
+        expected_stddev = 0.707115076173
+        expected_count = 3
+        self.assertAlmostEqual(res[0], expected_res, 5,
+                               'Failed to calculate gaussian weighting with uncertainty')
+        self.assertAlmostEqual(stddev[0], expected_stddev, 5,
+                               'Failed to calculate uncertainty for gaussian weighting')
+        self.assertEqual(
+            count[0], expected_count, 'Wrong data point count for gaussian weighting with uncertainty')
+
+    @tmp
+    def test_custom_uncert(self):
+        def wf(dist):
+            return 1 - dist / 100000.0
+
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res, stddev, counts = kd_tree.resample_custom(self.tswath,
+                                                          self.tdata, self.tgrid,
+                                                          100000, wf, with_uncert=True)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res, stddev, counts = kd_tree.resample_custom(self.tswath,
+                                                              self.tdata, self.tgrid,
+                                                              100000, wf, with_uncert=True)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour warning')
+                self.assertFalse(('Searching' not in str(
+                    w[0].message)), 'Failed to create correct neighbour warning')
+
+        self.assertAlmostEqual(res[0], 2.32193149, 5,
+                               'Failed to calculate custom weighting with uncertainty')
+        self.assertAlmostEqual(stddev[0], 0.81817972, 5,
+                               'Failed to calculate custom for gaussian weighting')
+        self.assertEqual(
+            counts[0], 3, 'Wrong data point count for custom weighting with uncertainty')
+
+    def test_nearest(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),
+                                       self.area_def, 50000, segments=1)
+        cross_sum = res.sum()
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,
+                         msg='Swath resampling nearest failed')
+
+    def test_nearest_1d(self):
+        data = numpy.fromfunction(lambda x, y: x * y, (800, 800))
+        lons = numpy.fromfunction(lambda x: 3 + x / 100., (500,))
+        lats = numpy.fromfunction(lambda x: 75 - x / 10., (500,))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(self.area_def, data.ravel(),
+                                       swath_def, 50000, segments=1)
+        cross_sum = res.sum()
+        expected = 35821299.0
+        self.assertEqual(res.shape, (500,),
+                         msg='Swath resampling nearest 1d failed')
+        self.assertEqual(cross_sum, expected,
+                         msg='Swath resampling nearest 1d failed')
+
+    def test_nearest_empty(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),
+                                       self.area_def, 50000, segments=1)
+        cross_sum = res.sum()
+        expected = 0
+        self.assertEqual(cross_sum, expected,
+                         msg='Swath resampling nearest empty failed')
+
+    def test_nearest_empty_multi(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data_multi,
+                                       self.area_def, 50000, segments=1)
+        self.assertEqual(res.shape, (800, 800, 3),
+                         msg='Swath resampling nearest empty multi failed')
+
+    def test_nearest_empty_multi_masked(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data_multi,
+                                       self.area_def, 50000, segments=1,
+                                       fill_value=None)
+        self.assertEqual(res.shape, (800, 800, 3),
+                         msg='Swath resampling nearest empty multi masked failed')
+
+    def test_nearest_empty_masked(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),
+                                       self.area_def, 50000, segments=1,
+                                       fill_value=None)
+        cross_sum = res.mask.sum()
+        expected = res.size
+        self.assertTrue(cross_sum == expected,
+                        msg='Swath resampling nearest empty masked failed')
+
+    def test_nearest_segments(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),
+                                       self.area_def, 50000, segments=2)
+        cross_sum = res.sum()
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,
+                         msg='Swath resampling nearest segments failed')
+
+    def test_nearest_remap(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),
+                                       self.area_def, 50000, segments=1)
+        remap = kd_tree.resample_nearest(self.area_def, res.ravel(),
+                                         swath_def, 5000, segments=1)
+        cross_sum = remap.sum()
+        expected = 22275.0
+        self.assertEqual(cross_sum, expected,
+                         msg='Grid remapping nearest failed')
+
+    def test_nearest_mp(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),
+                                       self.area_def, 50000, nprocs=2, segments=1)
+        cross_sum = res.sum()
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,
+                         msg='Swath resampling mp nearest failed')
+
+    def test_nearest_multi(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+        res = kd_tree.resample_nearest(swath_def, data_multi,
+                                       self.area_def, 50000, segments=1)
+        cross_sum = res.sum()
+        expected = 3 * 15874591.0
+        self.assertEqual(cross_sum, expected,
+                         msg='Swath multi channel resampling nearest failed')
+
+    def test_nearest_multi_unraveled(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.dstack((data, data, data))
+        res = kd_tree.resample_nearest(swath_def, data_multi,
+                                       self.area_def, 50000, segments=1)
+        cross_sum = res.sum()
+        expected = 3 * 15874591.0
+        self.assertEqual(cross_sum, expected,
+                         msg='Swath multi channel resampling nearest failed')
+
+    def test_gauss_sparse(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_gauss(swath_def, data.ravel(),
+                                     self.area_def, 50000, 25000, fill_value=-1, segments=1)
+        cross_sum = res.sum()
+        expected = 15387753.9852
+        self.assertAlmostEqual(cross_sum, expected, places=3,
+                               msg='Swath gauss sparse nearest failed')
+
+    def test_gauss(self):
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -5, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res = kd_tree.resample_gauss(swath_def, data.ravel(),
+                                         self.area_def, 50000, 25000, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data.ravel(),
+                                             self.area_def, 50000, 25000, segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 4872.81050892
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath resampling gauss failed')
+
+    def test_gauss_fwhm(self):
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -5, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res = kd_tree.resample_gauss(swath_def, data.ravel(),
+                                         self.area_def, 50000, utils.fwhm2sigma(41627.730557884883), segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data.ravel(),
+                                             self.area_def, 50000, utils.fwhm2sigma(41627.730557884883), segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 4872.81050892
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath resampling gauss failed')
+
+    def test_gauss_multi(self):
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res = kd_tree.resample_gauss(swath_def, data_multi,
+                                         self.area_def, 50000, [25000, 15000, 10000], segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data_multi,
+                                             self.area_def, 50000, [25000, 15000, 10000], segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 1461.84313918
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath multi channel resampling gauss failed')
+
+    @tmp
+    def test_gauss_multi_uncert(self):
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res, stddev, counts = kd_tree.resample_gauss(swath_def, data_multi,
+                                                         self.area_def, 50000, [
+                                                             25000, 15000, 10000],
+                                                         segments=1, with_uncert=True)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res, stddev, counts = kd_tree.resample_gauss(swath_def, data_multi,
+                                                             self.area_def, 50000, [
+                                                                 25000, 15000, 10000],
+                                                             segments=1, with_uncert=True)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        cross_sum_stddev = stddev.sum()
+        cross_sum_counts = counts.sum()
+        expected = 1461.84313918
+        expected_stddev = 0.446204424799
+        expected_counts = 4934802.0
+        self.assertTrue(res.shape == stddev.shape and stddev.shape ==
+                        counts.shape and counts.shape == (800, 800, 3))
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath multi channel resampling gauss failed on data')
+        self.assertAlmostEqual(cross_sum_stddev, expected_stddev,
+                               msg='Swath multi channel resampling gauss failed on stddev')
+        self.assertAlmostEqual(cross_sum_counts, expected_counts,
+                               msg='Swath multi channel resampling gauss failed on counts')
+
+    def test_gauss_multi_mp(self):
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res = kd_tree.resample_gauss(swath_def, data_multi,
+                                         self.area_def, 50000, [
+                                             25000, 15000, 10000],
+                                         nprocs=2, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data_multi,
+                                             self.area_def, 50000, [
+                                                 25000, 15000, 10000],
+                                             nprocs=2, segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 1461.84313918
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath multi channel resampling gauss failed')
+
+    def test_gauss_multi_mp_segments(self):
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res = kd_tree.resample_gauss(swath_def, data_multi,
+                                         self.area_def, 50000, [
+                                             25000, 15000, 10000],
+                                         nprocs=2, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data_multi,
+                                             self.area_def, 50000, [
+                                                 25000, 15000, 10000],
+                                             nprocs=2, segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 1461.84313918
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath multi channel segments resampling gauss failed')
+
+    def test_gauss_multi_mp_segments_empty(self):
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 165 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+        res = kd_tree.resample_gauss(swath_def, data_multi,
+                                     self.area_def, 50000, [
+                                         25000, 15000, 10000],
+                                     nprocs=2, segments=1)
+        cross_sum = res.sum()
+        self.assertTrue(cross_sum == 0,
+                        msg=('Swath multi channel segments empty '
+                             'resampling gauss failed'))
+
+    def test_custom(self):
+        def wf(dist):
+            return 1 - dist / 100000.0
+
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -5, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res = kd_tree.resample_custom(swath_def, data.ravel(),
+                                          self.area_def, 50000, wf, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_custom(swath_def, data.ravel(),
+                                              self.area_def, 50000, wf, segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 4872.81050729
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath custom resampling failed')
+
+    def test_custom_multi(self):
+        def wf1(dist):
+            return 1 - dist / 100000.0
+
+        def wf2(dist):
+            return 1
+
+        def wf3(dist):
+            return numpy.cos(dist) ** 2
+
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res = kd_tree.resample_custom(swath_def, data_multi,
+                                          self.area_def, 50000, [wf1, wf2, wf3], segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_custom(swath_def, data_multi,
+                                              self.area_def, 50000, [wf1, wf2, wf3], segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 1461.842980746
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath multi channel custom resampling failed')
+
+    def test_reduce(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(
+            lambda y, x: -180 + (360.0 / 1000) * x, (1000, 1000))
+        lats = numpy.fromfunction(
+            lambda y, x: -90 + (180.0 / 1000) * y, (1000, 1000))
+        grid_lons, grid_lats = self.area_def.get_lonlats()
+        lons, lats, data = data_reduce.swath_from_lonlat_grid(grid_lons, grid_lats,
+                                                              lons, lats, data,
+                                                              7000)
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(cross_sum, expected, msg='Reduce data failed')
+
+    def test_reduce_boundary(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(
+            lambda y, x: -180 + (360.0 / 1000) * x, (1000, 1000))
+        lats = numpy.fromfunction(
+            lambda y, x: -90 + (180.0 / 1000) * y, (1000, 1000))
+        boundary_lonlats = self.area_def.get_boundary_lonlats()
+        lons, lats, data = data_reduce.swath_from_lonlat_boundaries(boundary_lonlats[0],
+                                                                    boundary_lonlats[
+                                                                        1],
+                                                                    lons, lats, data,
+                                                                    7000)
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(cross_sum, expected, msg='Reduce data failed')
+
+    def test_cartesian_reduce(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(
+            lambda y, x: -180 + (360.0 / 1000) * x, (1000, 1000))
+        lats = numpy.fromfunction(
+            lambda y, x: -90 + (180.0 / 1000) * y, (1000, 1000))
+        #grid = utils.generate_cartesian_grid(self.area_def)
+        grid = self.area_def.get_cartesian_coords()
+        lons, lats, data = data_reduce.swath_from_cartesian_grid(grid, lons, lats, data,
+                                                                 7000)
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='Cartesian reduce data failed')
+
+    def test_area_con_reduce(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(
+            lambda y, x: -180 + (360.0 / 1000) * x, (1000, 1000))
+        lats = numpy.fromfunction(
+            lambda y, x: -90 + (180.0 / 1000) * y, (1000, 1000))
+        grid_lons, grid_lats = self.area_def.get_lonlats()
+        valid_index = data_reduce.get_valid_index_from_lonlat_grid(grid_lons, grid_lats,
+                                                                   lons, lats, 7000)
+        data = data[valid_index]
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(cross_sum, expected, msg='Reduce data failed')
+
+    def test_area_con_cartesian_reduce(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(
+            lambda y, x: -180 + (360.0 / 1000) * x, (1000, 1000))
+        lats = numpy.fromfunction(
+            lambda y, x: -90 + (180.0 / 1000) * y, (1000, 1000))
+        cart_grid = self.area_def.get_cartesian_coords()
+        valid_index = data_reduce.get_valid_index_from_cartesian_grid(cart_grid,
+                                                                      lons, lats, 7000)
+        data = data[valid_index]
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(
+            cross_sum, expected, msg='Cartesian reduce data failed')
+
+    def test_masked_nearest(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        mask = numpy.ones((50, 10))
+        mask[:, :5] = 0
+        masked_data = numpy.ma.array(data, mask=mask)
+        res = kd_tree.resample_nearest(swath_def, masked_data.ravel(),
+                                       self.area_def, 50000, segments=1)
+        expected_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
+                                                    'test_files',
+                                                    'mask_test_nearest_mask.dat'),
+                                       sep=' ').reshape((800, 800))
+        expected_data = numpy.fromfile(os.path.join(os.path.dirname(__file__),
+                                                    'test_files',
+                                                    'mask_test_nearest_data.dat'),
+                                       sep=' ').reshape((800, 800))
+        self.assertTrue(numpy.array_equal(expected_mask, res.mask),
+                        msg='Resampling of swath mask failed')
+        self.assertTrue(numpy.array_equal(expected_data, res.data),
+                        msg='Resampling of swath masked data failed')
+
+    def test_masked_nearest_1d(self):
+        data = numpy.ones((800, 800))
+        data[:400, :] = 2
+        lons = numpy.fromfunction(lambda x: 3 + x / 100., (500,))
+        lats = numpy.fromfunction(lambda x: 75 - x / 10., (500,))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        mask = numpy.ones((800, 800))
+        mask[400:, :] = 0
+        masked_data = numpy.ma.array(data, mask=mask)
+        res = kd_tree.resample_nearest(self.area_def, masked_data.ravel(),
+                                       swath_def, 50000, segments=1)
+        self.assertEqual(res.mask.sum(), 108,
+                         msg='Swath resampling masked nearest 1d failed')
+
+    def test_masked_gauss(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        mask = numpy.ones((50, 10))
+        mask[:, :5] = 0
+        masked_data = numpy.ma.array(data, mask=mask)
+        res = kd_tree.resample_gauss(swath_def, masked_data.ravel(),
+                                     self.area_def, 50000, 25000, segments=1)
+        expected_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
+                                                    'test_files',
+                                                    'mask_test_mask.dat'),
+                                       sep=' ').reshape((800, 800))
+        expected_data = numpy.fromfile(os.path.join(os.path.dirname(__file__),
+                                                    'test_files',
+                                                    'mask_test_data.dat'),
+                                       sep=' ').reshape((800, 800))
+        expected = expected_data.sum()
+        cross_sum = res.data.sum()
+
+        self.assertTrue(numpy.array_equal(expected_mask, res.mask),
+                        msg='Gauss resampling of swath mask failed')
+        self.assertAlmostEqual(cross_sum, expected, places=3,
+                               msg='Gauss resampling of swath masked data failed')
+
+    def test_masked_fill_float(self):
+        data = numpy.ones((50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),
+                                       self.area_def, 50000, fill_value=None, segments=1)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
+                                                         'test_files',
+                                                         'mask_test_fill_value.dat'),
+                                            sep=' ').reshape((800, 800))
+        fill_mask = res.mask
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
+                        msg='Failed to create fill mask on float data')
+
+    def test_masked_fill_int(self):
+        data = numpy.ones((50, 10)).astype('int')
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),
+                                       self.area_def, 50000, fill_value=None, segments=1)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
+                                                         'test_files',
+                                                         'mask_test_fill_value.dat'),
+                                            sep=' ').reshape((800, 800))
+        fill_mask = res.mask
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
+                        msg='Failed to create fill mask on integer data')
+
+    def test_masked_full(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        mask = numpy.ones((50, 10))
+        mask[:, :5] = 0
+        masked_data = numpy.ma.array(data, mask=mask)
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def,
+                                       masked_data.ravel(
+                                       ), self.area_def, 50000,
+                                       fill_value=None, segments=1)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
+                                                         'test_files',
+                                                         'mask_test_full_fill.dat'),
+                                            sep=' ').reshape((800, 800))
+        fill_mask = res.mask
+
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
+                        msg='Failed to create fill mask on masked data')
+
+    def test_masked_full_multi(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        mask1 = numpy.ones((50, 10))
+        mask1[:, :5] = 0
+        mask2 = numpy.ones((50, 10))
+        mask2[:, 5:] = 0
+        mask3 = numpy.ones((50, 10))
+        mask3[:25, :] = 0
+        data_multi = numpy.column_stack(
+            (data.ravel(), data.ravel(), data.ravel()))
+        mask_multi = numpy.column_stack(
+            (mask1.ravel(), mask2.ravel(), mask3.ravel()))
+        masked_data = numpy.ma.array(data_multi, mask=mask_multi)
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def,
+                                       masked_data, self.area_def, 50000,
+                                       fill_value=None, segments=1)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
+                                                         'test_files',
+                                                         'mask_test_full_fill_multi.dat'),
+                                            sep=' ').reshape((800, 800, 3))
+        fill_mask = res.mask
+        cross_sum = res.sum()
+        expected = 357140.0
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Failed to resample masked data')
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
+                        msg='Failed to create fill mask on masked data')
+
+    def test_nearest_from_sample(self):
+        data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        valid_input_index, valid_output_index, index_array, distance_array = \
+            kd_tree.get_neighbour_info(swath_def,
+                                       self.area_def,
+                                       50000, neighbours=1, segments=1)
+        res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800), data.ravel(),
+                                                     valid_input_index, valid_output_index,
+                                                     index_array)
+        cross_sum = res.sum()
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,
+                         msg='Swath resampling from neighbour info nearest failed')
+
+    def test_custom_multi_from_sample(self):
+        def wf1(dist):
+            return 1 - dist / 100000.0
+
+        def wf2(dist):
+            return 1
+
+        def wf3(dist):
+            return numpy.cos(dist) ** 2
+
+        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
+        lons = numpy.fromfunction(
+            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
+        lats = numpy.fromfunction(
+            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
+                                         data.ravel()))
+
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            valid_input_index, valid_output_index, index_array, distance_array = \
+                kd_tree.get_neighbour_info(swath_def,
+                                           self.area_def,
+                                           50000, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                valid_input_index, valid_output_index, index_array, distance_array = \
+                    kd_tree.get_neighbour_info(swath_def,
+                                               self.area_def,
+                                               50000, segments=1)
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+
+        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
+                                                     data_multi,
+                                                     valid_input_index, valid_output_index,
+                                                     index_array, distance_array,
+                                                     weight_funcs=[wf1, wf2, wf3])
+
+        cross_sum = res.sum()
+
+        expected = 1461.842980746
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath multi channel custom resampling from neighbour info failed 1')
+        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
+                                                     data_multi,
+                                                     valid_input_index, valid_output_index,
+                                                     index_array, distance_array,
+                                                     weight_funcs=[wf1, wf2, wf3])
+
+        # Look for error where input data has been manipulated
+        cross_sum = res.sum()
+        expected = 1461.842980746
+        self.assertAlmostEqual(cross_sum, expected,
+                               msg='Swath multi channel custom resampling from neighbour info failed 2')
+
+    def test_masked_multi_from_sample(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        mask1 = numpy.ones((50, 10))
+        mask1[:, :5] = 0
+        mask2 = numpy.ones((50, 10))
+        mask2[:, 5:] = 0
+        mask3 = numpy.ones((50, 10))
+        mask3[:25, :] = 0
+        data_multi = numpy.column_stack(
+            (data.ravel(), data.ravel(), data.ravel()))
+        mask_multi = numpy.column_stack(
+            (mask1.ravel(), mask2.ravel(), mask3.ravel()))
+        masked_data = numpy.ma.array(data_multi, mask=mask_multi)
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        valid_input_index, valid_output_index, index_array, distance_array = \
+            kd_tree.get_neighbour_info(swath_def,
+                                       self.area_def,
+                                       50000, neighbours=1, segments=1)
+        res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800),
+                                                     masked_data,
+                                                     valid_input_index,
+                                                     valid_output_index, index_array,
+                                                     fill_value=None)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
+                                                         'test_files',
+                                                         'mask_test_full_fill_multi.dat'),
+                                            sep=' ').reshape((800, 800, 3))
+        fill_mask = res.mask
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
+                        msg='Failed to create fill mask on masked data')
+
+
+def suite():
+    """The test suite.
+    """
+    loader = unittest.TestLoader()
+    mysuite = unittest.TestSuite()
+    mysuite.addTest(loader.loadTestsFromTestCase(Test))
+
+    return mysuite
diff --git a/pyresample/test/test_plot.py b/pyresample/test/test_plot.py
new file mode 100644
index 0000000..f2cd261
--- /dev/null
+++ b/pyresample/test/test_plot.py
@@ -0,0 +1,87 @@
+import unittest
+import os
+
+import numpy as np
+
+from pyresample import plot, geometry, utils, kd_tree
+
+try:
+    import matplotlib
+    matplotlib.use('Agg')
+except ImportError:
+    pass  # Postpone fail to individual tests
+
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+
+class Test(unittest.TestCase):
+
+    filename = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                            'test_files', 'ssmis_swath.npz'))
+    data = np.load(filename)['data']
+    lons = data[:, 0].astype(np.float64)
+    lats = data[:, 1].astype(np.float64)
+    tb37v = data[:, 2].astype(np.float64)
+
+    # screen out the fill values
+    fvalue = -10000000000.0
+    valid_fov = (lons != fvalue) * (lats != fvalue) * (tb37v != fvalue)
+    lons = lons[valid_fov]
+    lats = lats[valid_fov] 
+    tb37v = tb37v[valid_fov]
+ 
+    def test_ellps2axis(self):
+        a, b = plot.ellps2axis('WGS84')
+        self.assertAlmostEqual(a, 6378137.0,
+                               msg='Failed to get semi-major axis of ellipsis')
+        self.assertAlmostEqual(b, 6356752.3142451793,
+                               msg='Failed to get semi-minor axis of ellipsis')
+
+    def test_area_def2basemap(self):
+        area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
+                                                      'test_files', 'areas.cfg'), 'ease_sh')[0]
+        bmap = plot.area_def2basemap(area_def)
+        self.assertTrue(bmap.rmajor == bmap.rminor and
+                        bmap.rmajor == 6371228.0,
+                        'Failed to create Basemap object')
+
+    def test_plate_carreeplot(self):
+        area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
+                                                      'test_files', 'areas.cfg'), 'pc_world')[0]
+        swath_def = geometry.SwathDefinition(self.lons, self.lats)
+        result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def,
+                                          radius_of_influence=20000,
+                                          fill_value=None)
+        plt = plot._get_quicklook(area_def, result, num_meridians=0,
+                                  num_parallels=0)
+
+    def test_easeplot(self):
+        area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
+                                                      'test_files', 'areas.cfg'), 'ease_sh')[0]
+        swath_def = geometry.SwathDefinition(self.lons, self.lats)
+        result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def,
+                                          radius_of_influence=20000,
+                                          fill_value=None)
+        plt = plot._get_quicklook(area_def, result)
+
+    def test_orthoplot(self):
+        area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
+                                                      'test_files', 'areas.cfg'), 'ortho')[0]
+        swath_def = geometry.SwathDefinition(self.lons, self.lats)
+        result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def,
+                                          radius_of_influence=20000,
+                                          fill_value=None)
+        plt = plot._get_quicklook(area_def, result)
+
+
+def suite():
+    """The test suite.
+    """
+    loader = unittest.TestLoader()
+    mysuite = unittest.TestSuite()
+    mysuite.addTest(loader.loadTestsFromTestCase(Test))
+
+    return mysuite
diff --git a/test/test_spherical_geometry.py b/pyresample/test/test_spherical_geometry.py
similarity index 97%
rename from test/test_spherical_geometry.py
rename to pyresample/test/test_spherical_geometry.py
index 098b16a..169df45 100644
--- a/test/test_spherical_geometry.py
+++ b/pyresample/test/test_spherical_geometry.py
@@ -9,8 +9,10 @@ from pyresample import geometry
 
 
 class TestOverlap(unittest.TestCase):
+
     """Testing overlapping functions in pyresample.
     """
+
     def assert_raises(self, exception, call_able, *args):
         """assertRaises() has changed from py2.6 to 2.7! Here is an attempt to
         cover both"""
@@ -27,7 +29,7 @@ class TestOverlap(unittest.TestCase):
         lons = np.array([[-11, 11], [-11, 11]])
         lats = np.array([[11, 11], [-11, -11]])
         area = geometry.SwathDefinition(lons, lats)
-        
+
         point = Coordinate(0, 0)
 
         self.assertTrue(point in area)
@@ -35,7 +37,6 @@ class TestOverlap(unittest.TestCase):
         point = Coordinate(0, 12)
         self.assertFalse(point in area)
 
-
         lons = np.array([[-179, 179], [-179, 179]])
         lats = np.array([[1, 1], [-1, -1]])
         area = geometry.SwathDefinition(lons, lats)
@@ -67,7 +68,7 @@ class TestOverlap(unittest.TestCase):
         lons1 = np.array([[0, 90], [-90, 180]])
         lats1 = np.array([[89, 89], [89, 89]])
         area1 = geometry.SwathDefinition(lons1, lats1)
-        
+
         lons2 = np.array([[45, 135], [-45, -135]])
         lats2 = np.array([[89, 89], [89, 89]])
         area2 = geometry.SwathDefinition(lons2, lats2)
@@ -78,14 +79,14 @@ class TestOverlap(unittest.TestCase):
         lons1 = np.array([[0, 45], [135, 90]])
         lats1 = np.array([[89, 89], [89, 89]])
         area1 = geometry.SwathDefinition(lons1, lats1)
-        
+
         lons2 = np.array([[180, -135], [-45, -90]])
         lats2 = np.array([[89, 89], [89, 89]])
         area2 = geometry.SwathDefinition(lons2, lats2)
 
         self.assertFalse(area1.overlaps(area2))
         self.assertFalse(area2.overlaps(area1))
-        
+
         lons1 = np.array([[-1, 1], [-1, 1]])
         lats1 = np.array([[1, 1], [-1, -1]])
         area1 = geometry.SwathDefinition(lons1, lats1)
@@ -96,7 +97,6 @@ class TestOverlap(unittest.TestCase):
 
         self.assertTrue(area1.overlaps(area2))
         self.assertTrue(area2.overlaps(area1))
-        
 
         lons1 = np.array([[-1, 0], [-1, 0]])
         lats1 = np.array([[1, 2], [-1, 0]])
@@ -105,11 +105,10 @@ class TestOverlap(unittest.TestCase):
         lons2 = np.array([[1, 2], [1, 2]])
         lats2 = np.array([[1, 2], [-1, 0]])
         area2 = geometry.SwathDefinition(lons2, lats2)
-        
+
         self.assertFalse(area1.overlaps(area2))
         self.assertFalse(area2.overlaps(area1))
 
-
     def test_overlap_rate(self):
         """Test how much two areas overlap.
         """
@@ -124,23 +123,22 @@ class TestOverlap(unittest.TestCase):
 
         self.assertAlmostEqual(area1.overlap_rate(area2), 0.25, 3)
         self.assertAlmostEqual(area2.overlap_rate(area1), 0.25, 3)
-        
+
         lons1 = np.array([[82.829699999999974, 36.888300000000001],
                           [98.145499999999984, 2.8773]])
         lats1 = np.array([[60.5944, 52.859999999999999],
                           [80.395899999999997, 66.7547]])
         area1 = geometry.SwathDefinition(lons1, lats1)
-        
+
         lons2 = np.array([[7.8098183315148422, 26.189349044600252],
                           [7.8098183315148422, 26.189349044600252]])
         lats2 = np.array([[62.953206630716465, 62.953206630716465],
                           [53.301561187195546, 53.301561187195546]])
         area2 = geometry.SwathDefinition(lons2, lats2)
 
-
         self.assertAlmostEqual(area1.overlap_rate(area2), 0.07, 2)
         self.assertAlmostEqual(area2.overlap_rate(area1), 0.012, 3)
-        
+
         lons1 = np.array([[82.829699999999974, 36.888300000000001],
                           [98.145499999999984, 2.8773]])
         lats1 = np.array([[60.5944, 52.859999999999999],
@@ -153,13 +151,12 @@ class TestOverlap(unittest.TestCase):
                           [57.304862819933433, 57.304862819933433]])
         area2 = geometry.SwathDefinition(lons2, lats2)
 
-        
         self.assertAlmostEqual(area1.overlap_rate(area2), 0.5, 2)
         self.assertAlmostEqual(area2.overlap_rate(area1), 0.068, 3)
 
 
-
 class TestSphereGeometry(unittest.TestCase):
+
     """Testing sphere geometry from this module.
     """
 
@@ -207,7 +204,6 @@ class TestSphereGeometry(unittest.TestCase):
         self.assertAlmostEqual(arc4.angle(arc2), math.pi,
                                msg="this should be pi")
 
-
         p5_ = Coordinate(base + 1, base + 1)
         p6_ = Coordinate(base + 1, base - 1)
         p7_ = Coordinate(base - 1, base - 1)
@@ -238,14 +234,12 @@ class TestSphereGeometry(unittest.TestCase):
         self.assertAlmostEqual(arc1.angle(arc6), 3 * math.pi / 4, 3,
                                msg="this should be 3pi/4")
 
-
         c0_ = Coordinate(180, 0)
         c1_ = Coordinate(180, 1)
         c2_ = Coordinate(-179, 0)
         c3_ = Coordinate(-180, -1)
         c4_ = Coordinate(179, 0)
 
-
         arc1 = Arc(c0_, c1_)
         arc2 = Arc(c0_, c2_)
         arc3 = Arc(c0_, c3_)
@@ -300,15 +294,14 @@ class TestSphereGeometry(unittest.TestCase):
         self.assertAlmostEqual(arc2.angle(arc1), -math.pi / 2,
                                msg="this should be -pi/2")
 
-        self.assertAlmostEqual(Arc(c1_, c2_).angle(arc1), math.pi/4, 3,
+        self.assertAlmostEqual(Arc(c1_, c2_).angle(arc1), math.pi / 4, 3,
                                msg="this should be pi/4")
-                               
-        self.assertAlmostEqual(Arc(c4_, c3_).angle(arc4), -math.pi/4, 3,
-                               msg="this should be -pi/4")
 
-        self.assertAlmostEqual(Arc(c1_, c4_).angle(arc1), -math.pi/4, 3,
+        self.assertAlmostEqual(Arc(c4_, c3_).angle(arc4), -math.pi / 4, 3,
                                msg="this should be -pi/4")
 
+        self.assertAlmostEqual(Arc(c1_, c4_).angle(arc1), -math.pi / 4, 3,
+                               msg="this should be -pi/4")
 
     def test_intersects(self):
         """Test if two arcs intersect.
@@ -347,8 +340,6 @@ class TestSphereGeometry(unittest.TestCase):
 
         self.assertTrue(arc35.intersects(arc24))
 
-
-
         p0_ = Coordinate(180, 0)
         p1_ = Coordinate(180, 1)
         p2_ = Coordinate(-179, 0)
@@ -389,7 +380,7 @@ class TestSphereGeometry(unittest.TestCase):
         p1_ = Coordinate(0, 89)
         p2_ = Coordinate(90, 89)
         p3_ = Coordinate(180, 89)
-        p4_ = Coordinate(-90, 89)    
+        p4_ = Coordinate(-90, 89)
         p5_ = Coordinate(45, 89)
         p6_ = Coordinate(135, 89)
 
@@ -420,8 +411,16 @@ class TestSphereGeometry(unittest.TestCase):
         self.assertTrue(arc35.intersects(arc24))
 
 
+def suite():
+    """The test suite.
+    """
+    loader = unittest.TestLoader()
+    mysuite = unittest.TestSuite()
+    mysuite.addTest(loader.loadTestsFromTestCase(TestOverlap))
+    mysuite.addTest(loader.loadTestsFromTestCase(TestSphereGeometry))
 
-if __name__ == '__main__':
-    unittest.main()
+    return mysuite
 
 
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/test_swath.py b/pyresample/test/test_swath.py
similarity index 54%
rename from test/test_swath.py
rename to pyresample/test/test_swath.py
index bc19e59..ab0c4c1 100644
--- a/test/test_swath.py
+++ b/pyresample/test/test_swath.py
@@ -15,48 +15,71 @@ def tmp(f):
     f.tmp = True
     return f
 
+
 class Test(unittest.TestCase):
-    
-    filename = os.path.abspath(os.path.join(os.path.dirname(__file__), 
-                               'test_files', 'ssmis_swath.npz'))
+
+    filename = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                            'test_files', 'ssmis_swath.npz'))
     data = np.load(filename)['data']
     lons = data[:, 0].astype(np.float64)
     lats = data[:, 1].astype(np.float64)
     tb37v = data[:, 2].astype(np.float64)
-    
-    @tmp           
+
+    # screen out the fill values
+    fvalue = -10000000000.0
+    valid_fov = (lons != fvalue) * (lats != fvalue) * (tb37v != fvalue)
+    lons = lons[valid_fov]
+    lats = lats[valid_fov]
+    tb37v = tb37v[valid_fov]
+
+    @tmp
     def test_self_map(self):
         swath_def = geometry.SwathDefinition(lons=self.lons, lats=self.lats)
         if sys.version_info < (2, 6):
-            res = kd_tree.resample_gauss(swath_def, self.tb37v.copy(), swath_def, 
+            res = kd_tree.resample_gauss(swath_def, self.tb37v.copy(), swath_def,
                                          radius_of_influence=70000, sigmas=56500)
         else:
             with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_gauss(swath_def, self.tb37v.copy(), swath_def, 
+                res = kd_tree.resample_gauss(swath_def, self.tb37v.copy(), swath_def,
                                              radius_of_influence=70000, sigmas=56500)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
-       
-        self.assertAlmostEqual(res.sum() / 100., 668848.082208, 1, 
-                                msg='Failed self mapping swath for 1 channel')
-                           
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+
+        self.assertAlmostEqual(res.sum() / 100., 668848.082208, 1,
+                               msg='Failed self mapping swath for 1 channel')
+
     def test_self_map_multi(self):
         data = np.column_stack((self.tb37v, self.tb37v, self.tb37v))
         swath_def = geometry.SwathDefinition(lons=self.lons, lats=self.lats)
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_gauss(swath_def, data, swath_def, 
+
+        if (sys.version_info < (2, 6) or
+                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
+            res = kd_tree.resample_gauss(swath_def, data, swath_def,
                                          radius_of_influence=70000, sigmas=[56500, 56500, 56500])
         else:
             with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_gauss(swath_def, data, swath_def, 
+                res = kd_tree.resample_gauss(swath_def, data, swath_def,
                                              radius_of_influence=70000, sigmas=[56500, 56500, 56500])
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
-                
-        self.assertAlmostEqual(res[:, 0].sum() / 100., 668848.082208, 1, 
-                                   msg='Failed self mapping swath multi for channel 1')
-        self.assertAlmostEqual(res[:, 1].sum() / 100., 668848.082208, 1, 
-                                   msg='Failed self mapping swath multi for channel 2')
-        self.assertAlmostEqual(res[:, 2].sum() / 100., 668848.082208, 1, 
-                                   msg='Failed self mapping swath multi for channel 3')            
-    
+                self.assertFalse(
+                    len(w) != 1, 'Failed to create neighbour radius warning')
+                self.assertFalse(('Possible more' not in str(
+                    w[0].message)), 'Failed to create correct neighbour radius warning')
+
+        self.assertAlmostEqual(res[:, 0].sum() / 100., 668848.082208, 1,
+                               msg='Failed self mapping swath multi for channel 1')
+        self.assertAlmostEqual(res[:, 1].sum() / 100., 668848.082208, 1,
+                               msg='Failed self mapping swath multi for channel 2')
+        self.assertAlmostEqual(res[:, 2].sum() / 100., 668848.082208, 1,
+                               msg='Failed self mapping swath multi for channel 3')
+
+
+def suite():
+    """The test suite.
+    """
+    loader = unittest.TestLoader()
+    mysuite = unittest.TestSuite()
+    mysuite.addTest(loader.loadTestsFromTestCase(Test))
+
+    return mysuite
diff --git a/test/test_utils.py b/pyresample/test/test_utils.py
similarity index 55%
rename from test/test_utils.py
rename to pyresample/test/test_utils.py
index 40bd699..6274667 100644
--- a/test/test_utils.py
+++ b/pyresample/test/test_utils.py
@@ -3,51 +3,73 @@ import unittest
 
 from pyresample import utils
 
+import numpy as np
 
 def tmp(f):
     f.tmp = True
     return f
 
+
 class Test(unittest.TestCase):
 
     def test_area_parser(self):
-        ease_nh, ease_sh = utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
-                                                              'test_files', 
+        ease_nh, ease_sh = utils.parse_area_file(os.path.join(os.path.dirname(__file__),
+                                                              'test_files',
                                                               'areas.cfg'), 'ease_nh', 'ease_sh')
-        
-        nh_found = (ease_nh.__str__() =="""Area ID: ease_nh
+
+        nh_found = (ease_nh.__str__() == """Area ID: ease_nh
 Name: Arctic EASE grid
 Projection ID: ease_nh
 Projection: {'a': '6371228.0', 'lat_0': '90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'}
 Number of columns: 425
 Number of rows: 425
 Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""")
-        
-        sh_found = (ease_sh.__str__() =="""Area ID: ease_sh
+
+        sh_found = (ease_sh.__str__() == """Area ID: ease_sh
 Name: Antarctic EASE grid
 Projection ID: ease_sh
 Projection: {'a': '6371228.0', 'lat_0': '-90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'}
 Number of columns: 425
 Number of rows: 425
 Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""")
-        
-        self.assertTrue(nh_found and sh_found, msg='Failed to parse areas correctly')
-    
+
+        self.assertTrue(
+            nh_found and sh_found, msg='Failed to parse areas correctly')
+
     def test_load_area(self):
-        ease_nh = utils.load_area(os.path.join(os.path.dirname(__file__), 
-                                                              'test_files', 
-                                                              'areas.cfg'), 'ease_nh')
-        nh_found = (ease_nh.__str__() =="""Area ID: ease_nh
+        ease_nh = utils.load_area(os.path.join(os.path.dirname(__file__),
+                                               'test_files',
+                                               'areas.cfg'), 'ease_nh')
+        nh_found = (ease_nh.__str__() == """Area ID: ease_nh
 Name: Arctic EASE grid
 Projection ID: ease_nh
 Projection: {'a': '6371228.0', 'lat_0': '90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'}
 Number of columns: 425
 Number of rows: 425
 Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""")
-        self.assertTrue(nh_found, msg='Failed to load area correctly') 
-        
+        self.assertTrue(nh_found, msg='Failed to load area correctly')
+
     def test_not_found_exception(self):
-        self.assertRaises(utils.AreaNotFound, utils.parse_area_file, 
-                          os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 
+        self.assertRaises(utils.AreaNotFound, utils.parse_area_file,
+                          os.path.join(
+                              os.path.dirname(__file__), 'test_files', 'areas.cfg'),
                           'no_area')
         
+    def test_wrap_longitudes(self):
+        # test that we indeed wrap to [-180:+180[
+        step = 60
+        lons = np.arange(-360,360+step,step)
+        self.assertTrue((lons.min() < -180) and (lons.max() >= 180) and (+180 in lons))
+        wlons = utils.wrap_longitudes(lons)
+        self.assertFalse((wlons.min() < -180) or (wlons.max() >= 180) or (+180 in wlons))
+
+
+
+def suite():
+    """The test suite.
+    """
+    loader = unittest.TestLoader()
+    mysuite = unittest.TestSuite()
+    mysuite.addTest(loader.loadTestsFromTestCase(Test))
+
+    return mysuite
diff --git a/pyresample/utils.py b/pyresample/utils.py
index 336848c..647f5c2 100644
--- a/pyresample/utils.py
+++ b/pyresample/utils.py
@@ -1,77 +1,86 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
-#(at your option) any later version.
+# Copyright (C) 2010-2015
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# Authors:
+#    Esben S. Nielsen
+#    Thomas Lavergne
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 """Utility functions for pyresample"""
 
+from __future__ import absolute_import
+
 import numpy as np
 from configobj import ConfigObj
 
-import geometry, grid, kd_tree
-import _spatial_mp
+import pyresample as pr
+
 
 class AreaNotFound(Exception):
+
     """Exception raised when specified are is no found in file"""
     pass
 
+
 def load_area(area_file_name, *regions):
     """Load area(s) from area file
-    
+
     :Parameters:
     area_file_name : str
         Path to area definition file
     regions : str argument list 
         Regions to parse. If no regions are specified all 
         regions in the file are returned
-             
+
     :Returns:
     area_defs : object or list
         If one area name is specified a single AreaDefinition object is returned
         If several area names are specified a list of AreaDefinition objects is returned
-        
+
     :Raises:
     AreaNotFound
         If a specified area name is not found
     """
-    
+
     area_list = parse_area_file(area_file_name, *regions)
     if len(area_list) == 1:
         return area_list[0]
     else:
         return area_list
 
+
 def parse_area_file(area_file_name, *regions):
     """Parse area information from area file
-    
+
     :Parameters:
     area_file_name : str
         Path to area definition file
     regions : str argument list 
         Regions to parse. If no regions are specified all 
         regions in the file are returned
-             
+
     :Returns:
     area_defs : list
         List of AreaDefinition objects
-        
+
     :Raises:
     AreaNotFound
         If a specified area is not found
     """
-            
+
     area_file = open(area_file_name, 'r')
     area_list = list(regions)
     if len(area_list) == 0:
@@ -80,19 +89,19 @@ def parse_area_file(area_file_name, *regions):
     else:
         select_all_areas = False
         area_defs = [None for i in area_list]
-        
-    #Extract area from file
+
+    # Extract area from file
     in_area = False
     for line in area_file.readlines():
         if not in_area:
             if 'REGION' in line:
                 area_id = line.replace('REGION:', ''). \
-                              replace('{', '').strip()
+                    replace('{', '').strip()
                 if area_id in area_list or select_all_areas:
                     in_area = True
                     area_content = ''
         elif '};' in line:
-            in_area = False            
+            in_area = False
             if select_all_areas:
                 area_defs.append(_create_area(area_id, area_content))
             else:
@@ -102,15 +111,16 @@ def parse_area_file(area_file_name, *regions):
             area_content += line
 
     area_file.close()
-    
-    #Check if all specified areas were found
+
+    # Check if all specified areas were found
     if not select_all_areas:
         for i, area in enumerate(area_defs):
             if area is None:
-                raise AreaNotFound('Area "%s" not found in file "%s"'%
-                                   (area_list[i], area_file_name))    
+                raise AreaNotFound('Area "%s" not found in file "%s"' %
+                                   (area_list[i], area_file_name))
     return area_defs
 
+
 def _create_area(area_id, area_content):
     """Parse area configuration"""
 
@@ -119,30 +129,34 @@ def _create_area(area_id, area_content):
                             for line in config_obj.splitlines()])
     config = config_obj.dict()
     config['REGION'] = area_id
+
     try:
-        config['NAME'].__iter__()
+        string_types = basestring
+    except NameError:
+        string_types = str
+    if not isinstance(config['NAME'], string_types):
         config['NAME'] = ', '.join(config['NAME'])
-    except:
-        config['NAME'] = ''.join(config['NAME'])
+
     config['XSIZE'] = int(config['XSIZE'])
     config['YSIZE'] = int(config['YSIZE'])
     config['AREA_EXTENT'][0] = config['AREA_EXTENT'][0].replace('(', '')
     config['AREA_EXTENT'][3] = config['AREA_EXTENT'][3].replace(')', '')
-    
+
     for i, val in enumerate(config['AREA_EXTENT']):
         config['AREA_EXTENT'][i] = float(val)
-        
+
     config['PCS_DEF'] = _get_proj4_args(config['PCS_DEF'])
-    
-    return geometry.AreaDefinition(config['REGION'], config['NAME'], 
-                                   config['PCS_ID'], config['PCS_DEF'], 
-                                   config['XSIZE'], config['YSIZE'], 
-                                   config['AREA_EXTENT'])
+
+    return pr.geometry.AreaDefinition(config['REGION'], config['NAME'],
+                                      config['PCS_ID'], config['PCS_DEF'],
+                                      config['XSIZE'], config['YSIZE'],
+                                      config['AREA_EXTENT'])
+
 
 def get_area_def(area_id, area_name, proj_id, proj4_args, x_size, y_size,
                  area_extent):
     """Construct AreaDefinition object from arguments
-    
+
     :Parameters:
     area_id : str
         ID of area
@@ -158,19 +172,20 @@ def get_area_def(area_id, area_name, proj_id, proj4_args, x_size, y_size,
         Number of pixel in y dimension
     area_extent : list 
         Area extent as a list of ints (LL_x, LL_y, UR_x, UR_y)
-    
+
     :Returns: 
     area_def : object
         AreaDefinition object
     """
-    
+
     proj_dict = _get_proj4_args(proj4_args)
-    return geometry.AreaDefinition(area_id, area_name, proj_id, proj_dict, x_size,
-                                   y_size, area_extent)    
+    return pr.geometry.AreaDefinition(area_id, area_name, proj_id, proj_dict, x_size,
+                                      y_size, area_extent)
+
 
 def generate_quick_linesample_arrays(source_area_def, target_area_def, nprocs=1):
     """Generate linesample arrays for quick grid resampling
-    
+
     :Parameters:
     source_area_def : object 
         Source area definition as AreaDefinition object
@@ -182,28 +197,29 @@ def generate_quick_linesample_arrays(source_area_def, target_area_def, nprocs=1)
     :Returns: 
     (row_indices, col_indices) : tuple of numpy arrays
     """
-    if not (isinstance(source_area_def, geometry.AreaDefinition) and
-            isinstance(target_area_def, geometry.AreaDefinition)):
+    if not (isinstance(source_area_def, pr.geometry.AreaDefinition) and
+            isinstance(target_area_def, pr.geometry.AreaDefinition)):
         raise TypeError('source_area_def and target_area_def must be of type '
                         'geometry.AreaDefinition')
-            
+
     lons, lats = target_area_def.get_lonlats(nprocs)
-    
-    source_pixel_y, source_pixel_x = grid.get_linesample(lons, lats, 
-                                                         source_area_def, 
-                                                         nprocs=nprocs)
-    
-    source_pixel_x = _downcast_index_array(source_pixel_x, 
-                                           source_area_def.shape[1]) 
-    source_pixel_y = _downcast_index_array(source_pixel_y, 
+
+    source_pixel_y, source_pixel_x = pr.grid.get_linesample(lons, lats,
+                                                            source_area_def,
+                                                            nprocs=nprocs)
+
+    source_pixel_x = _downcast_index_array(source_pixel_x,
+                                           source_area_def.shape[1])
+    source_pixel_y = _downcast_index_array(source_pixel_y,
                                            source_area_def.shape[0])
-                     
+
     return source_pixel_y, source_pixel_x
 
-def generate_nearest_neighbour_linesample_arrays(source_area_def, target_area_def, 
+
+def generate_nearest_neighbour_linesample_arrays(source_area_def, target_area_def,
                                                  radius_of_influence, nprocs=1):
     """Generate linesample arrays for nearest neighbour grid resampling
-    
+
     :Parameters:
     source_area_def : object 
         Source area definition as AreaDefinition object
@@ -217,29 +233,29 @@ def generate_nearest_neighbour_linesample_arrays(source_area_def, target_area_de
     :Returns: 
     (row_indices, col_indices) : tuple of numpy arrays
     """
-    
-    if not (isinstance(source_area_def, geometry.AreaDefinition) and
-            isinstance(target_area_def, geometry.AreaDefinition)):
+
+    if not (isinstance(source_area_def, pr.geometry.AreaDefinition) and
+            isinstance(target_area_def, pr.geometry.AreaDefinition)):
         raise TypeError('source_area_def and target_area_def must be of type '
                         'geometry.AreaDefinition')
-    
+
     valid_input_index, valid_output_index, index_array, distance_array = \
-                            kd_tree.get_neighbour_info(source_area_def, 
-                                                       target_area_def, 
-                                                       radius_of_influence, 
-                                                       neighbours=1,
-                                                       nprocs=nprocs)
-    #Enumerate rows and cols
-    rows = np.fromfunction(lambda i, j: i, source_area_def.shape, 
+        pr.kd_tree.get_neighbour_info(source_area_def,
+                                      target_area_def,
+                                      radius_of_influence,
+                                      neighbours=1,
+                                      nprocs=nprocs)
+    # Enumerate rows and cols
+    rows = np.fromfunction(lambda i, j: i, source_area_def.shape,
                            dtype=np.int32).ravel()
-    cols = np.fromfunction(lambda i, j: j, source_area_def.shape, 
+    cols = np.fromfunction(lambda i, j: j, source_area_def.shape,
                            dtype=np.int32).ravel()
-    
-    #Reduce to match resampling data set
+
+    # Reduce to match resampling data set
     rows_valid = rows[valid_input_index]
     cols_valid = cols[valid_input_index]
-    
-    #Get result using array indexing
+
+    # Get result using array indexing
     number_of_valid_points = valid_input_index.sum()
     index_mask = (index_array == number_of_valid_points)
     index_array[index_mask] = 0
@@ -247,21 +263,22 @@ def generate_nearest_neighbour_linesample_arrays(source_area_def, target_area_de
     col_sample = cols_valid[index_array]
     row_sample[index_mask] = -1
     col_sample[index_mask] = -1
-    
-    #Reshape to correct shape
+
+    # Reshape to correct shape
     row_indices = row_sample.reshape(target_area_def.shape)
     col_indices = col_sample.reshape(target_area_def.shape)
-    
-    row_indices = _downcast_index_array(row_indices, 
+
+    row_indices = _downcast_index_array(row_indices,
                                         source_area_def.shape[0])
-    col_indices = _downcast_index_array(col_indices, 
+    col_indices = _downcast_index_array(col_indices,
                                         source_area_def.shape[1])
-    
+
     return row_indices, col_indices
 
+
 def fwhm2sigma(fwhm):
     """Calculate sigma for gauss function from FWHM (3 dB level)
-    
+
     :Parameters:
     fwhm : float 
         FWHM of gauss function (3 dB level of beam footprint)
@@ -269,15 +286,16 @@ def fwhm2sigma(fwhm):
     :Returns: 
     sigma : float
         sigma for use in resampling gauss function
-        
+
     """
-    
+
     return fwhm / (2 * np.sqrt(np.log(2)))
-    
+
+
 def _get_proj4_args(proj4_args):
     """Create dict from proj4 args
     """
-    
+
     if isinstance(proj4_args, str):
         proj_config = ConfigObj(proj4_args.replace('+', '').split())
     else:
@@ -288,10 +306,25 @@ def _get_proj4_args(proj4_args):
 def _downcast_index_array(index_array, size):
     """Try to downcast array to uint16
     """
-    
+
     if size <= np.iinfo(np.uint16).max:
         mask = (index_array < 0) | (index_array >= size)
         index_array[mask] = size
         index_array = index_array.astype(np.uint16)
     return index_array
-        
+
+
+def wrap_longitudes(lons):
+    """Wrap longitudes to the [-180:+180[ validity range (preserves dtype)
+
+    :Parameters:
+    lons : numpy array
+        Longitudes in degrees
+
+    :Returns: 
+    lons : numpy array
+        Longitudes wrapped into [-180:+180[ validity range
+
+    """
+    lons_wrap = (lons + 180) % (360) - 180
+    return lons_wrap.astype(lons.dtype)
diff --git a/pyresample/version.py b/pyresample/version.py
index a278cb1..d891594 100644
--- a/pyresample/version.py
+++ b/pyresample/version.py
@@ -1,18 +1,18 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2010  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
-#(at your option) any later version.
+# Copyright (C) 2010, 2014, 2015  Esben S. Nielsen
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-__version__ = '1.1.0'
+__version__ = '1.1.3'
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..4498b6b
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,2 @@
+git+https://github.com/matplotlib/matplotlib.git
+git+https://github.com/matplotlib/basemap.git
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 861a9f5..0000000
--- a/setup.cfg
+++ /dev/null
@@ -1,5 +0,0 @@
-[egg_info]
-tag_build = 
-tag_date = 0
-tag_svn_revision = 0
-
diff --git a/setup.py b/setup.py
index 6b27c76..18ea8cd 100644
--- a/setup.py
+++ b/setup.py
@@ -1,30 +1,33 @@
-#pyresample, Resampling of remote sensing image data in python
-# 
-#Copyright (C) 2012  Esben S. Nielsen
+# pyresample, Resampling of remote sensing image data in python
 #
-#This program is free software: you can redistribute it and/or modify
-#it under the terms of the GNU General Public License as published by
-#the Free Software Foundation, either version 3 of the License, or
+# Copyright (C) 2012, 2014, 2015  Esben S. Nielsen
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or
 #(at your option) any later version.
 #
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#GNU General Public License for more details.
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
+# details.
 #
-#You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program.  If not, see <http://www.gnu.org/licenses/>.
 
+# workaround python bug: http://bugs.python.org/issue15881#msg170215
+import multiprocessing
 from setuptools import setup
 import sys
-import os
 
 import imp
 
 version = imp.load_source('pyresample.version', 'pyresample/version.py')
 
 requirements = ['pyproj', 'numpy', 'configobj']
-extras_require = {'pykdtree': ['pykdtree'], 'numexpr': ['numexpr']}
+extras_require = {'pykdtree': ['pykdtree'],
+                  'numexpr': ['numexpr'],
+                  'quicklook': ['matplotlib', 'basemap']}
 
 if sys.version_info < (2, 6):
     # multiprocessing is not in the standard library
@@ -33,22 +36,20 @@ if sys.version_info < (2, 6):
 setup(name='pyresample',
       version=version.__version__,
       description='Resampling of remote sensing data in Python',
-      author='Esben S. Nielsen',
-      author_email='esn at dmi.dk',
-      package_dir = {'pyresample': 'pyresample'},
-      packages = ['pyresample'],      
+      author='Thomas Lavergne',
+      author_email='t.lavergne at met.no',
+      package_dir={'pyresample': 'pyresample'},
+      packages=['pyresample'],
       install_requires=requirements,
-      extras_require = extras_require,
-      zip_safe = False,
+      extras_require=extras_require,
+      test_suite='pyresample.test.suite',
+      zip_safe=False,
       classifiers=[
-      'Development Status :: 5 - Production/Stable',
-      'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
-      'Programming Language :: Python',
-      'Operating System :: OS Independent',
-      'Intended Audience :: Science/Research',
-      'Topic :: Scientific/Engineering'
+          'Development Status :: 5 - Production/Stable',
+          'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)',
+          'Programming Language :: Python',
+          'Operating System :: OS Independent',
+          'Intended Audience :: Science/Research',
+          'Topic :: Scientific/Engineering'
       ]
       )
-
-
-
diff --git a/test/test_geometry.py b/test/test_geometry.py
deleted file mode 100644
index e806a32..0000000
--- a/test/test_geometry.py
+++ /dev/null
@@ -1,513 +0,0 @@
-from __future__ import with_statement
-
-import unittest
-
-import numpy as np
-
-from pyresample import geometry, geo_filter
-
-
-def tmp(f):
-    f.tmp = True
-    return f
-
-class Test(unittest.TestCase):
-    """Unit testing the geometry and geo_filter modules"""
-    def assert_raises(self, exception, call_able, *args):
-        """assertRaises() has changed from py2.6 to 2.7! Here is an attempt to
-        cover both"""
-        import sys
-        if sys.version_info < (2, 7):
-            self.assertRaises(exception, call_able, *args)
-        else:
-            with self.assertRaises(exception):
-                call_able(*args)
-
-           
-    def test_lonlat_precomp(self):
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-        lons, lats = area_def.get_lonlats()
-        area_def2 = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001],
-                                     lons=lons, lats=lats)
-        lon, lat = area_def.get_lonlat(400, 400)
-        self.assertAlmostEqual(lon, 5.5028467120975835, 
-                                   msg='lon retrieval from precomputated grid failed')
-        self.assertAlmostEqual(lat, 52.566998432390619, 
-                                   msg='lat retrieval from precomputated grid failed')
-        
-    @tmp
-    def test_cartesian(self):
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-        cart_coords = area_def.get_cartesian_coords()
-        exp = 5872039989466.8457031
-        self.assertTrue((cart_coords.sum() - exp) < 1e-7 * exp, 
-                        msg='Calculation of cartesian coordinates failed')   
-    
-    def test_swath(self):
-        lons1 = np.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats1 = np.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        
-        swath_def = geometry.SwathDefinition(lons1, lats1)
-        
-        lons2, lats2 = swath_def.get_lonlats()
-        
-        self.failIf(id(lons1) != id(lons2) or id(lats1) != id(lats2), 
-                    msg='Caching of swath coordinates failed')
-               
-    def test_area_equal(self):
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-        area_def2 = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-        self.failIf(area_def != area_def2, 'area_defs are not equal as expected')
-         
-    def test_not_area_equal(self):
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-       
-        msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees', 
-                                   'msg_full',
-                                   {'a': '6378169.0',
-                                    'b': '6356584.0',
-                                    'h': '35785831.0',
-                                    'lon_0': '0',
-                                    'proj': 'geos'},
-                                    3712,
-                                    3712,
-                                    [-5568742.4000000004,
-                                    -5568742.4000000004,
-                                    5568742.4000000004,
-                                    5568742.4000000004]
-                                    )
-        self.failIf(area_def == msg_area, 'area_defs are not expected to be equal')
-       
-    def test_swath_equal(self):
-        lons = np.array([1.2, 1.3, 1.4, 1.5])
-        lats = np.array([65.9, 65.86, 65.82, 65.78])
-        swath_def = geometry.SwathDefinition(lons, lats)
-        swath_def2 = geometry.SwathDefinition(lons, lats)
-        self.failIf(swath_def != swath_def2, 'swath_defs are not equal as expected')
-        
-    def test_swath_not_equal(self):
-        lats1 = np.array([65.9, 65.86, 65.82, 65.78])
-        lons = np.array([1.2, 1.3, 1.4, 1.5])
-        lats2 = np.array([65.91, 65.85, 65.80, 65.75])
-        swath_def = geometry.SwathDefinition(lons, lats1)
-        swath_def2 = geometry.SwathDefinition(lons, lats2)
-        self.failIf(swath_def == swath_def2, 'swath_defs are not expected to be equal')
-
-    def test_swath_equal_area(self):
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-        
-        swath_def = geometry.SwathDefinition(*area_def.get_lonlats())
-
-        self.failIf(swath_def != area_def, "swath_def and area_def should be equal")
-
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-
-        self.failIf(area_def != swath_def, "swath_def and area_def should be equal")
-
-    def test_swath_not_equal_area(self):
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-        
-        lons = np.array([1.2, 1.3, 1.4, 1.5])
-        lats = np.array([65.9, 65.86, 65.82, 65.78])
-        swath_def = geometry.SwathDefinition(lons, lats)
-
-        self.failIf(swath_def == area_def, "swath_def and area_def should be different")
-
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-
-        self.failIf(area_def == swath_def, "swath_def and area_def should be different")
-        
-    def test_concat_1d(self):
-        lons1 = np.array([1, 2, 3])
-        lats1 = np.array([1, 2, 3])
-        lons2 = np.array([4, 5, 6])
-        lats2 = np.array([4, 5, 6])
-        swath_def1 = geometry.SwathDefinition(lons1, lats1)
-        swath_def2 = geometry.SwathDefinition(lons2, lats2)
-        swath_def_concat = swath_def1.concatenate(swath_def2) 
-        expected = np.array([1, 2, 3, 4, 5, 6])
-        self.assertTrue(np.array_equal(swath_def_concat.lons, expected) and 
-                        np.array_equal(swath_def_concat.lons, expected), 
-                        'Failed to concatenate 1D swaths')
-
-    def test_concat_2d(self):
-        lons1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
-        lats1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
-        lons2 = np.array([[4, 5, 6], [6, 7, 8]])
-        lats2 = np.array([[4, 5, 6], [6, 7, 8]])
-        swath_def1 = geometry.SwathDefinition(lons1, lats1)
-        swath_def2 = geometry.SwathDefinition(lons2, lats2)
-        swath_def_concat = swath_def1.concatenate(swath_def2) 
-        expected = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7], [4, 5, 6], [6, 7, 8]])
-        self.assertTrue(np.array_equal(swath_def_concat.lons, expected) and 
-                        np.array_equal(swath_def_concat.lons, expected), 
-                        'Failed to concatenate 2D swaths')
-        
-    def test_append_1d(self):
-        lons1 = np.array([1, 2, 3])
-        lats1 = np.array([1, 2, 3])
-        lons2 = np.array([4, 5, 6])
-        lats2 = np.array([4, 5, 6])
-        swath_def1 = geometry.SwathDefinition(lons1, lats1)
-        swath_def2 = geometry.SwathDefinition(lons2, lats2)
-        swath_def1.append(swath_def2) 
-        expected = np.array([1, 2, 3, 4, 5, 6])
-        self.assertTrue(np.array_equal(swath_def1.lons, expected) and 
-                        np.array_equal(swath_def1.lons, expected), 
-                        'Failed to append 1D swaths')
-
-    def test_append_2d(self):
-        lons1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
-        lats1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
-        lons2 = np.array([[4, 5, 6], [6, 7, 8]])
-        lats2 = np.array([[4, 5, 6], [6, 7, 8]])
-        swath_def1 = geometry.SwathDefinition(lons1, lats1)
-        swath_def2 = geometry.SwathDefinition(lons2, lats2)
-        swath_def1.append(swath_def2) 
-        expected = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7], [4, 5, 6], [6, 7, 8]])
-        self.assertTrue(np.array_equal(swath_def1.lons, expected) and 
-                        np.array_equal(swath_def1.lons, expected), 
-                        'Failed to append 2D swaths')
-
-    def test_grid_filter_valid(self):
-        lons = np.array([-170, -30, 30, 170])
-        lats = np.array([20, -40, 50, -80])
-        swath_def = geometry.SwathDefinition(lons, lats)
-        filter_area = geometry.AreaDefinition('test', 'test', 'test', 
-                                              {'proj' : 'eqc', 'lon_0' : 0.0, 'lat_0' : 0.0},
-                                              8, 8,
-                                              (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
-        filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
-                           [1, 1, 1, 1, 0, 0, 0, 0],
-                           [1, 1, 1, 1, 0, 0, 0, 0],
-                           [1, 1, 1, 1, 0, 0, 0, 0],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           ])
-        grid_filter = geo_filter.GridFilter(filter_area, filter)
-        valid_index = grid_filter.get_valid_index(swath_def)        
-        expected = np.array([1, 0, 0, 1])
-        self.assertTrue(np.array_equal(valid_index, expected), 'Failed to find grid filter')
-    
-    def test_grid_filter(self):
-        lons = np.array([-170, -30, 30, 170])
-        lats = np.array([20, -40, 50, -80])
-        swath_def = geometry.SwathDefinition(lons, lats)
-        data = np.array([1, 2, 3, 4])
-        filter_area = geometry.AreaDefinition('test', 'test', 'test', 
-                                              {'proj' : 'eqc', 'lon_0' : 0.0, 'lat_0' : 0.0},
-                                              8, 8,                                               
-                                              (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
-        filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
-                           [1, 1, 1, 1, 0, 0, 0, 0],
-                           [1, 1, 1, 1, 0, 0, 0, 0],
-                           [1, 1, 1, 1, 0, 0, 0, 0],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           ])
-        grid_filter = geo_filter.GridFilter(filter_area, filter)
-        swath_def_f, data_f = grid_filter.filter(swath_def, data)
-        expected = np.array([1, 4])
-        self.assertTrue(np.array_equal(data_f, expected), 'Failed grid filtering data')
-        expected_lons = np.array([-170, 170])
-        expected_lats = np.array([20, -80])
-        self.assertTrue(np.array_equal(swath_def_f.lons[:], expected_lons) 
-                        and np.array_equal(swath_def_f.lats[:], expected_lats), 
-                        'Failed finding grid filtering lon lats')
-        
-    def test_grid_filter2D(self):
-        lons = np.array([[-170, -30, 30, 170],
-                         [-170, -30, 30, 170]])
-        lats = np.array([[20, -40, 50, -80],
-                         [25, -35, 55, -75]])
-        swath_def = geometry.SwathDefinition(lons, lats)
-        data1 = np.ones((2, 4))
-        data2 = np.ones((2, 4)) * 2
-        data3 = np.ones((2, 4)) * 3
-        data = np.dstack((data1, data2, data3))
-        filter_area = geometry.AreaDefinition('test', 'test', 'test', 
-                                              {'proj' : 'eqc', 'lon_0' : 0.0, 'lat_0' : 0.0},
-                                              8, 8,                                               
-                                              (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
-        filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
-                           [1, 1, 1, 1, 0, 0, 0, 0],
-                           [1, 1, 1, 1, 0, 0, 0, 0],
-                           [1, 1, 1, 1, 0, 0, 0, 0],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           [0, 0, 0, 0, 1, 1, 1, 1],
-                           ])
-        grid_filter = geo_filter.GridFilter(filter_area, filter, nprocs=2)
-        swath_def_f, data_f = grid_filter.filter(swath_def, data)
-        expected = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3], [1, 2, 3]])        
-        self.assertTrue(np.array_equal(data_f, expected), 'Failed 2D grid filtering data')
-        expected_lons = np.array([-170, 170, -170, 170])
-        expected_lats = np.array([20, -80, 25, -75])
-        self.assertTrue(np.array_equal(swath_def_f.lons[:], expected_lons) 
-                        and np.array_equal(swath_def_f.lats[:], expected_lats), 
-                        'Failed finding 2D grid filtering lon lats')
-    
-    def test_boundary(self):
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    10,
-                                    10,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-        proj_x_boundary, proj_y_boundary = area_def.proj_x_coords, area_def.proj_y_coords
-        expected_x = np.array([-1250912.72, -1010912.72, -770912.72, 
-                             -530912.72, -290912.72, -50912.72, 189087.28, 
-                             429087.28, 669087.28, 909087.28])
-        expected_y = np.array([1370031.36, 1130031.36, 890031.36, 650031.36, 
-                               410031.36, 170031.36, -69968.64, -309968.64,  
-                               -549968.64, -789968.64])
-        self.assertTrue(np.allclose(proj_x_boundary, expected_x), 
-                        'Failed to find projection x coords')
-        self.assertTrue(np.allclose(proj_y_boundary, expected_y), 
-                        'Failed to find projection y coords')
-
-   
-    def test_area_extent_ll(self):
-        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    10,
-                                    10,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-        self.assertAlmostEqual(sum(area_def.area_extent_ll), 
-                                   122.06448093539757, 5, 
-                                   'Failed to get lon and lats of area extent')
-    
-    @tmp                               
-    def test_latlong_area(self):
-        area_def = geometry.AreaDefinition('', '', '', 
-                                   {'proj': 'latlong'}, 
-                                    360, 180,
-                                    [-180, -90, 180, 90])
-        lons, lats = area_def.get_lonlats()
-        self.assertEqual(lons[0, 0], -179.5)
-        self.assertEqual(lats[0, 0], 89.5)
-        
-
-    def test_get_xy_from_lonlat(self):
-        """Test the function get_xy_from_lonlat"""
-        from pyresample import utils
-        area_id = 'test'
-        area_name = 'Test area with 2x2 pixels'
-        proj_id = 'test'
-        x_size = 2
-        y_size = 2
-        area_extent = [1000000, 0, 1050000, 50000] 
-        proj_dict = {"proj": 'laea', 
-                     'lat_0': '60', 
-                     'lon_0': '0', 
-                     'a': '6371228.0', 'units': 'm'}
-        area_def = utils.get_area_def(area_id, 
-                                      area_name, 
-                                      proj_id, 
-                                      proj_dict, 
-                                      x_size, y_size, 
-                                      area_extent)
-        import pyproj
-        p__ = pyproj.Proj(proj_dict)
-        lon_ul, lat_ul = p__(1000000, 50000, inverse=True)
-        lon_ur, lat_ur = p__(1050000, 50000, inverse=True)
-        lon_ll, lat_ll = p__(1000000, 0, inverse=True)
-        lon_lr, lat_lr = p__(1050000, 0, inverse=True)
-        
-        eps_lonlat = 0.01
-        eps_meters = 100
-        x__, y__ = area_def.get_xy_from_lonlat(lon_ul + eps_lonlat, 
-                                               lat_ul - eps_lonlat)
-        x_expect, y_expect = 0, 0
-        self.assertEqual(x__, x_expect)
-        self.assertEqual(y__, y_expect)
-        x__, y__ = area_def.get_xy_from_lonlat(lon_ur - eps_lonlat, 
-                                               lat_ur - eps_lonlat)
-        self.assertEqual(x__, 1)
-        self.assertEqual(y__, 0)
-        x__, y__ = area_def.get_xy_from_lonlat(lon_ll + eps_lonlat, 
-                                               lat_ll + eps_lonlat)
-        self.assertEqual(x__, 0)
-        self.assertEqual(y__, 1)
-        x__, y__ = area_def.get_xy_from_lonlat(lon_lr - eps_lonlat, 
-                                               lat_lr + eps_lonlat)
-        self.assertEqual(x__, 1)
-        self.assertEqual(y__, 1)
-
-        lon, lat = p__(1025000 - eps_meters, 25000 - eps_meters, inverse=True)
-        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
-        self.assertEqual(x__, 0)
-        self.assertEqual(y__, 1)
-
-        lon, lat = p__(1025000 + eps_meters, 25000 - eps_meters, inverse=True)
-        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
-        self.assertEqual(x__, 1)
-        self.assertEqual(y__, 1)
-
-        lon, lat = p__(1025000 - eps_meters, 25000 + eps_meters, inverse=True)
-        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
-        self.assertEqual(x__, 0)
-        self.assertEqual(y__, 0)
-
-        lon, lat = p__(1025000 + eps_meters, 25000 + eps_meters, inverse=True)
-        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
-        self.assertEqual(x__, 1)
-        self.assertEqual(y__, 0)
-
-        lon, lat = p__(999000, -10, inverse=True)
-        self.assert_raises(ValueError, area_def.get_xy_from_lonlat, lon, lat)
-        self.assert_raises(ValueError, area_def.get_xy_from_lonlat, 0., 0.)
-
-        # Test getting arrays back:
-        lons = [lon_ll + eps_lonlat, lon_ur - eps_lonlat]
-        lats = [lat_ll + eps_lonlat, lat_ur - eps_lonlat]
-        x__, y__ = area_def.get_xy_from_lonlat(lons, lats)
-
-        x_expects = np.array([0, 1])
-        y_expects = np.array([1, 0])
-        self.assertTrue((x__.data == x_expects).all())
-        self.assertTrue((y__.data == y_expects).all())
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/test/test_grid.py b/test/test_grid.py
deleted file mode 100644
index 8238b99..0000000
--- a/test/test_grid.py
+++ /dev/null
@@ -1,177 +0,0 @@
-import copy
-import unittest
-
-import numpy as np
-
-from pyresample import grid, geometry, utils
-
-
-def mp(f):
-    f.mp = True
-    return f
-
-def tmp(f):
-    f.tmp = True
-    return f
-
-class Test(unittest.TestCase):
-    
-    area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-    
-    area_def2 = geometry.AreaDefinition('areaD2', 'Europe (3km, HRV, VTC)', 'areaD2', 
-                                    {'a': '6378144.0',
-                                     'b': '6356759.0',
-                                     'lat_0': '50.00',
-                                     'lat_ts': '50.00',
-                                     'lon_0': '8.00',
-                                     'proj': 'stere'}, 
-                                     5,
-                                     5,
-                                     [-1370912.72,
-                                      -909968.64000000001,
-                                      1029087.28,
-                                      1490031.3600000001])
-        
-    msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees', 
-                                   'msg_full',
-                                   {'a': '6378169.0',
-                                    'b': '6356584.0',
-                                    'h': '35785831.0',
-                                    'lon_0': '0',
-                                    'proj': 'geos'},
-                                    3712,
-                                    3712,
-                                    [-5568742.4000000004,
-                                    -5568742.4000000004,
-                                    5568742.4000000004,
-                                    5568742.4000000004]
-                                    )
-
-    def test_linesample(self):
-        data = np.fromfunction(lambda y, x: y*x, (40, 40))
-        rows = np.array([[1, 2], [3, 4]])
-        cols = np.array([[25, 26], [27, 28]])
-        res = grid.get_image_from_linesample(rows, cols, data)
-        expected = np.array([[25., 52.], [81., 112.]])
-        self.assertTrue(np.array_equal(res, expected), 'Linesample failed')
-        
-    def test_linesample_multi(self):
-        data1 = np.fromfunction(lambda y, x: y*x, (40, 40))
-        data2 = np.fromfunction(lambda y, x: 2*y*x, (40, 40))
-        data3 = np.fromfunction(lambda y, x: 3*y*x, (40, 40))
-        data = np.zeros((40, 40, 3))
-        data[:, :, 0] = data1
-        data[:, :, 1] = data2
-        data[:, :, 2] = data3
-        rows = np.array([[1, 2], [3, 4]])
-        cols = np.array([[25, 26], [27, 28]])
-        res = grid.get_image_from_linesample(rows, cols, data)
-        expected = np.array([[[25., 50., 75.],
-                                 [52., 104., 156.]],
-                               [[81., 162., 243.],
-                                [ 112.,  224.,  336.]]])
-        self.assertTrue(np.array_equal(res, expected), 'Linesample failed')
-        
-    def test_from_latlon(self):
-        data = np.fromfunction(lambda y, x: y*x, (800, 800))
-        lons = np.fromfunction(lambda y, x: x, (10, 10))
-        lats = np.fromfunction(lambda y, x: 50 - (5.0/10)*y, (10, 10))
-        #source_def = grid.AreaDefinition.get_from_area_def(self.area_def)
-        source_def = self.area_def
-        res = grid.get_image_from_lonlats(lons, lats, source_def, data)        
-        expected = np.array([[ 129276.,  141032.,  153370.,  165804.,  178334.,  190575.,
-                            202864.,  214768.,  226176.,  238080.],
-                            [ 133056.,  146016.,  158808.,  171696.,  184320.,  196992.,
-                             209712.,  222480.,  234840.,  247715.],
-                            [ 137026.,  150150.,  163370.,  177215.,  190629.,  203756.,
-                             217464.,  230256.,  243048.,  256373.],
-                            [ 140660.,  154496.,  168714.,  182484.,  196542.,  210650.,
-                             224257.,  238464.,  251712.,  265512.],
-                            [ 144480.,  158484.,  173148.,  187912.,  202776.,  217358.,
-                             231990.,  246240.,  259920.,  274170.],
-                            [ 147968.,  163261.,  178398.,  193635.,  208616.,  223647.,
-                             238728.,  253859.,  268584.,  283898.],
-                            [ 151638.,  167121.,  182704.,  198990.,  214775.,  230280.,
-                             246442.,  261617.,  276792.,  292574.],
-                            [ 154980.,  171186.,  187860.,  204016.,  220542.,  237120.,
-                             253125.,  269806.,  285456.,  301732.],
-                            [ 158500.,  175536.,  192038.,  209280.,  226626.,  243697.,
-                             260820.,  277564.,  293664.,  310408.],
-                            [ 161696.,  179470.,  197100.,  214834.,  232320.,  250236.,
-                             267448.,  285090.,  302328.,  320229.]])
-        self.assertTrue(np.array_equal(res, expected), 'Sampling from lat lon failed')
-        
-    def test_proj_coords(self):
-        #res = grid.get_proj_coords(self.area_def2)
-        res = self.area_def2.get_proj_coords()
-        cross_sum = res[0].sum() + res[1].sum() 
-        expected = 2977965.9999999963
-        self.assertAlmostEqual(cross_sum, expected, msg='Calculation of proj coords failed')
-        
-    def test_latlons(self):
-        #res = grid.get_lonlats(self.area_def2)
-        res = self.area_def2.get_lonlats()
-        cross_sum = res[0].sum() + res[1].sum() 
-        expected = 1440.8280578215431
-        self.assertAlmostEqual(cross_sum, expected, msg='Calculation of lat lons failed')
-        
-    @mp
-    def test_latlons_mp(self):
-        #res = grid.get_lonlats(self.area_def2, nprocs=2)
-        res = self.area_def2.get_lonlats(nprocs=2)
-        cross_sum = res[0].sum() + res[1].sum() 
-        expected = 1440.8280578215431
-        self.assertAlmostEqual(cross_sum, expected, msg='Calculation of lat lons failed')
-        
-    def test_resampled_image(self):
-        data = np.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
-        target_def = self.area_def
-        source_def = self.msg_area
-        res = grid.get_resampled_image(target_def, source_def, data, segments=1)
-        cross_sum = res.sum()
-        expected = 399936.39392500359
-        self.assertAlmostEqual(cross_sum, expected, msg='Resampling of image failed')
-
-    @tmp
-    def test_generate_linesample(self):
-        data = np.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
-        row_indices, col_indices = utils.generate_quick_linesample_arrays(self.msg_area,
-                                                                    self.area_def)
-        res = data[row_indices, col_indices]
-        cross_sum = res.sum()
-        expected = 399936.39392500359
-        self.assertAlmostEqual(cross_sum, expected, msg='Generate linesample failed')
-        self.failIf(row_indices.dtype != np.uint16 or col_indices.dtype != np.uint16, 
-                    'Generate linesample failed. Downcast to uint16 expected')
-    
-    @mp
-    def test_resampled_image_mp(self):
-        data = np.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
-        target_def = self.area_def
-        source_def = self.msg_area
-        res = grid.get_resampled_image(target_def, source_def, data, nprocs=2, segments=1)
-        cross_sum = res.sum()
-        expected = 399936.39392500359
-        self.assertAlmostEqual(cross_sum, expected, msg='Resampling of image mp failed')
-        
-    def test_single_lonlat(self):
-        lon, lat = self.area_def.get_lonlat(400, 400)
-        self.assertAlmostEqual(lon, 5.5028467120975835, msg='Resampling of single lon failed')
-        self.assertAlmostEqual(lat, 52.566998432390619, msg='Resampling of single lat failed')
-        
-    def test_proj4_string(self):
-        proj4_string = self.area_def.proj4_string
-        self.assertEqual(proj4_string, '+a=6378144.0 +b=6356759.0 +lat_ts=50.00 +lon_0=8.00 +proj=stere +lat_0=50.00')
-    
diff --git a/test/test_image.py b/test/test_image.py
deleted file mode 100644
index a8471a1..0000000
--- a/test/test_image.py
+++ /dev/null
@@ -1,202 +0,0 @@
-import os
-import unittest
-
-import numpy
-
-from pyresample import image, geometry, grid, utils
-
-def mask(f):
-    f.mask = True
-    return f
-
-def tmp(f):
-    f.tmp = True
-    return f
-
-
-class Test(unittest.TestCase):
-
-    area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-
-    msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees', 
-                                   'msg_full',
-                                   {'a': '6378169.0',
-                                    'b': '6356584.0',
-                                    'h': '35785831.0',
-                                    'lon_0': '0',
-                                    'proj': 'geos'},
-                                    3712,
-                                    3712,
-                                    [-5568742.4000000004,
-                                    -5568742.4000000004,
-                                    5568742.4000000004,
-                                    5568742.4000000004]
-                                    )
-    
-    msg_area_resize = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees', 
-                                   'msg_full',
-                                   {'a': '6378169.0',
-                                    'b': '6356584.0',
-                                    'h': '35785831.0',
-                                    'lon_0': '0',
-                                    'proj': 'geos'},
-                                    928,
-                                    928,
-                                    [-5568742.4000000004,
-                                    -5568742.4000000004,
-                                    5568742.4000000004,
-                                    5568742.4000000004]
-                                    )
-
-    @tmp
-    def test_image(self):
-        data = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
-        msg_con = image.ImageContainerQuick(data, self.msg_area, segments=1)
-        area_con = msg_con.resample(self.area_def)
-        res = area_con.image_data
-        cross_sum = res.sum()
-        expected = 399936.39392500359
-        self.assertAlmostEqual(cross_sum, expected, msg='ImageContainer resampling quick failed')
-    
-    @tmp
-    def test_image_segments(self):
-        data = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
-        msg_con = image.ImageContainerQuick(data, self.msg_area, segments=8)
-        area_con = msg_con.resample(self.area_def)
-        res = area_con.image_data
-        cross_sum = res.sum()
-        expected = 399936.39392500359
-        self.assertAlmostEqual(cross_sum, expected, msg='ImageContainer resampling quick segments failed')
-        
-    def test_return_type(self):
-        data = numpy.ones((3712, 3712)).astype('int')
-        msg_con = image.ImageContainerQuick(data, self.msg_area, segments=1)
-        area_con = msg_con.resample(self.area_def)
-        res = area_con.image_data
-        self.assertTrue(data.dtype is res.dtype, msg='Failed to maintain input data type')
-    
-    @mask
-    def test_masked_image(self):
-        data = numpy.zeros((3712, 3712))
-        mask = numpy.zeros((3712, 3712))
-        mask[:, 1865:] = 1
-        data_masked = numpy.ma.array(data, mask=mask)
-        msg_con = image.ImageContainerQuick(data_masked, self.msg_area, segments=1)
-        area_con = msg_con.resample(self.area_def)
-        res = area_con.image_data
-        resampled_mask = res.mask.astype('int')
-        expected = numpy.fromfile(os.path.join(os.path.dirname(__file__), 'test_files', 'mask_grid.dat'), 
-                                  sep=' ').reshape((800, 800))
-        self.assertTrue(numpy.array_equal(resampled_mask, expected), msg='Failed to resample masked array')
-
-    @mask
-    def test_masked_image_fill(self):
-        data = numpy.zeros((3712, 3712))
-        mask = numpy.zeros((3712, 3712))
-        mask[:, 1865:] = 1
-        data_masked = numpy.ma.array(data, mask=mask)
-        msg_con = image.ImageContainerQuick(data_masked, self.msg_area, 
-                                            fill_value=None, segments=1)
-        area_con = msg_con.resample(self.area_def)
-        res = area_con.image_data
-        resampled_mask = res.mask.astype('int')
-        expected = numpy.fromfile(os.path.join(os.path.dirname(__file__), 'test_files', 'mask_grid.dat'), 
-                                  sep=' ').reshape((800, 800))
-        self.assertTrue(numpy.array_equal(resampled_mask, expected), msg='Failed to resample masked array')
-        
-    def test_nearest_neighbour(self):        
-        data = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
-        msg_con = image.ImageContainerNearest(data, self.msg_area, 50000, segments=1)
-        area_con = msg_con.resample(self.area_def)
-        res = area_con.image_data
-        cross_sum = res.sum()
-        expected = 399936.783062
-        self.assertAlmostEqual(cross_sum, expected, 
-                                   msg='ImageContainer resampling nearest neighbour failed')
-    
-    def test_nearest_resize(self):        
-        data = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
-        msg_con = image.ImageContainerNearest(data, self.msg_area, 50000, segments=1)
-        area_con = msg_con.resample(self.msg_area_resize)
-        res = area_con.image_data
-        cross_sum = res.sum()
-        expected = 2212023.0175830
-        self.assertAlmostEqual(cross_sum, expected, 
-                                   msg='ImageContainer resampling nearest neighbour failed')
-        
-    def test_nearest_neighbour_multi(self):        
-        data1 = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
-        data2 = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712)) * 2
-        data = numpy.dstack((data1, data2))
-        msg_con = image.ImageContainerNearest(data, self.msg_area, 50000, segments=1)
-        area_con = msg_con.resample(self.area_def)
-        res = area_con.image_data
-        cross_sum1 = res[:, :, 0].sum()
-        expected1 = 399936.783062
-        self.assertAlmostEqual(cross_sum1, expected1, 
-                                   msg='ImageContainer resampling nearest neighbour multi failed')        
-        cross_sum2 = res[:, :, 1].sum()
-        expected2 = 399936.783062 * 2
-        self.assertAlmostEqual(cross_sum2, expected2, 
-                                   msg='ImageContainer resampling nearest neighbour multi failed')
-        
-    def test_nearest_neighbour_multi_preproc(self):
-        data1 = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
-        data2 = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712)) * 2
-        data = numpy.dstack((data1, data2))
-        msg_con = image.ImageContainer(data, self.msg_area)
-        #area_con = msg_con.resample_area_nearest_neighbour(self.area_def, 50000)
-        row_indices, col_indices = \
-            utils.generate_nearest_neighbour_linesample_arrays(self.msg_area, 
-                                                               self.area_def, 
-                                                               50000)
-        res = msg_con.get_array_from_linesample(row_indices, col_indices)
-        cross_sum1 = res[:, :, 0].sum()
-        expected1 = 399936.783062
-        self.assertAlmostEqual(cross_sum1, expected1, 
-                                   msg='ImageContainer resampling nearest neighbour multi preproc failed')        
-        cross_sum2 = res[:, :, 1].sum()
-        expected2 = 399936.783062 * 2
-        self.assertAlmostEqual(cross_sum2, expected2, 
-                                   msg='ImageContainer resampling nearest neighbour multi preproc failed')
-            
-    def test_nearest_swath(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        swath_con = image.ImageContainerNearest(data, swath_def, 50000, segments=1)
-        area_con = swath_con.resample(self.area_def)
-        res = area_con.image_data
-        cross_sum = res.sum()        
-        expected = 15874591.0
-        self.assertEqual(cross_sum, expected,\
-                             msg='ImageContainer swath resampling nearest failed')
-
-    def test_nearest_swath_segments(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))
-        data = numpy.dstack(3 * (data,))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        swath_con = image.ImageContainerNearest(data, swath_def, 50000, segments=2)
-        area_con = swath_con.resample(self.area_def)
-        res = area_con.image_data
-        cross_sum = res.sum()        
-        expected = 3 * 15874591.0
-        self.assertEqual(cross_sum, expected,\
-                             msg='ImageContainer swath segments resampling nearest failed')
-
-
diff --git a/test/test_kd_tree.py b/test/test_kd_tree.py
deleted file mode 100644
index cdd5ef4..0000000
--- a/test/test_kd_tree.py
+++ /dev/null
@@ -1,836 +0,0 @@
-from __future__ import with_statement
-
-import os
-import sys
-import unittest
-
-import warnings
-if sys.version_info < (2, 6):
-    warnings.simplefilter("ignore")
-else:
-    warnings.simplefilter("always")
-
-import numpy
-
-from pyresample import kd_tree, utils, geometry, data_reduce
-
-
-def mp(f):
-    f.mp = True
-    return f
-
-def quick(f):
-    f.quick = True
-    return f
-
-def tmp(f):
-    f.tmp = True
-    return f
-
-class Test(unittest.TestCase):
-
-    area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
-                                   {'a': '6378144.0',
-                                    'b': '6356759.0',
-                                    'lat_0': '50.00',
-                                    'lat_ts': '50.00',
-                                    'lon_0': '8.00',
-                                    'proj': 'stere'}, 
-                                    800,
-                                    800,
-                                    [-1370912.72,
-                                     -909968.64000000001,
-                                     1029087.28,
-                                     1490031.3600000001])
-
-    tdata = numpy.array([1, 2, 3])
-    tlons = numpy.array([11.280789, 12.649354, 12.080402])
-    tlats = numpy.array([56.011037, 55.629675, 55.641535])
-    tswath = geometry.SwathDefinition(lons=tlons, lats=tlats)
-    tgrid = geometry.CoordinateDefinition(lons=numpy.array([12.562036]), 
-                                          lats=numpy.array([55.715613])) 
-
-                   
-    def test_nearest_base(self):     
-        res = kd_tree.resample_nearest(self.tswath,\
-                                     self.tdata.ravel(), self.tgrid,\
-                                     100000, reduce_data=False, segments=1)
-        self.assertTrue(res[0] == 2, 'Failed to calculate nearest neighbour')
-    
-    @tmp
-    def test_gauss_base(self):
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_gauss(self.tswath, \
-                                             self.tdata.ravel(), self.tgrid,\
-                                             50000, 25000, reduce_data=False, segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_gauss(self.tswath, \
-                                             self.tdata.ravel(), self.tgrid,\
-                                             50000, 25000, reduce_data=False, segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour warning')
-                self.failIf(('Searching' not in str(w[0].message)), 'Failed to create correct neighbour warning')    
-        self.assertAlmostEqual(res[0], 2.2020729, 5, \
-                                   'Failed to calculate gaussian weighting')
-    
-       
-    def test_custom_base(self):
-        def wf(dist):
-            return 1 - dist/100000.0
-        
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_custom(self.tswath,\
-                                         self.tdata.ravel(), self.tgrid,\
-                                         50000, wf, reduce_data=False, segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_custom(self.tswath,\
-                                             self.tdata.ravel(), self.tgrid,\
-                                             50000, wf, reduce_data=False, segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour warning')
-                self.failIf(('Searching' not in str(w[0].message)), 'Failed to create correct neighbour warning')        
-        self.assertAlmostEqual(res[0], 2.4356757, 5,\
-                                   'Failed to calculate custom weighting')
-
-    @tmp
-    def test_gauss_uncert(self):
-        sigma = utils.fwhm2sigma(41627.730557884883)
-        if sys.version_info < (2, 6):
-            res, stddev, count = kd_tree.resample_gauss(self.tswath, self.tdata, 
-                                                         self.tgrid, 100000, sigma, 
-                                                         with_uncert=True)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res, stddev, count = kd_tree.resample_gauss(self.tswath, self.tdata, 
-                                                            self.tgrid, 100000, sigma, 
-                                                            with_uncert=True)
-                self.failIf(len(w) != 1, 'Failed to create neighbour warning')
-                self.failIf(('Searching' not in str(w[0].message)), 'Failed to create correct neighbour warning')
-
-        expected_res = 2.20206560694
-        expected_stddev = 0.707115076173
-        expected_count = 3
-        self.assertAlmostEqual(res[0], expected_res, 5, \
-                                   'Failed to calculate gaussian weighting with uncertainty')
-        self.assertAlmostEqual(stddev[0], expected_stddev, 5, \
-                                   'Failed to calculate uncertainty for gaussian weighting')
-        self.assertEqual(count[0], expected_count, 'Wrong data point count for gaussian weighting with uncertainty')
-
-    @tmp
-    def test_custom_uncert(self):
-        def wf(dist):
-            return 1 - dist/100000.0
-
-        if sys.version_info < (2, 6):
-            res, stddev, counts = kd_tree.resample_custom(self.tswath,
-                                                         self.tdata, self.tgrid,
-                                                         100000, wf, with_uncert=True)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res, stddev, counts = kd_tree.resample_custom(self.tswath,
-                                                         self.tdata, self.tgrid,
-                                                         100000, wf, with_uncert=True)   
-                self.failIf(len(w) != 1, 'Failed to create neighbour warning')
-                self.failIf(('Searching' not in str(w[0].message)), 'Failed to create correct neighbour warning')
-
-        self.assertAlmostEqual(res[0], 2.32193149, 5, \
-                                   'Failed to calculate custom weighting with uncertainty')
-        self.assertAlmostEqual(stddev[0], 0.81817972, 5, \
-                                   'Failed to calculate custom for gaussian weighting')
-        self.assertEqual(counts[0], 3, 'Wrong data point count for custom weighting with uncertainty')
-
-
-    
-    def test_nearest(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
-                                     self.area_def, 50000, segments=1)        
-        cross_sum = res.sum()        
-        expected = 15874591.0
-        self.assertEqual(cross_sum, expected,\
-                             msg='Swath resampling nearest failed')
-    
-       
-    def test_nearest_1d(self):
-        data = numpy.fromfunction(lambda x, y: x * y, (800, 800))        
-        lons = numpy.fromfunction(lambda x: 3 + x / 100. , (500,))
-        lats = numpy.fromfunction(lambda x: 75 - x / 10., (500,))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(self.area_def, data.ravel(),
-                                       swath_def, 50000, segments=1)
-        cross_sum = res.sum()        
-        expected = 35821299.0
-        self.assertEqual(res.shape, (500,),
-                             msg='Swath resampling nearest 1d failed')
-        self.assertEqual(cross_sum, expected,
-                             msg='Swath resampling nearest 1d failed')
-    
-    
-    def test_nearest_empty(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
-                                     self.area_def, 50000, segments=1)        
-        cross_sum = res.sum()        
-        expected = 0
-        self.assertEqual(cross_sum, expected,\
-                             msg='Swath resampling nearest empty failed')
-    
-    
-    def test_nearest_empty_multi(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data_multi,\
-                                     self.area_def, 50000, segments=1)                
-        self.assertEqual(res.shape, (800, 800, 3),\
-                             msg='Swath resampling nearest empty multi failed')
-    
-    
-    def test_nearest_empty_multi_masked(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data_multi,\
-                                     self.area_def, 50000, segments=1,
-                                     fill_value=None)                
-        self.assertEqual(res.shape, (800, 800, 3),
-                             msg='Swath resampling nearest empty multi masked failed')
-    
-    
-    def test_nearest_empty_masked(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
-                                     self.area_def, 50000, segments=1, 
-                                     fill_value=None)        
-        cross_sum = res.mask.sum()        
-        expected = res.size
-        self.assertTrue(cross_sum == expected,
-                        msg='Swath resampling nearest empty masked failed')
-    
-    
-    def test_nearest_segments(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
-                                     self.area_def, 50000, segments=2)        
-        cross_sum = res.sum()        
-        expected = 15874591.0
-        self.assertEqual(cross_sum, expected,\
-                             msg='Swath resampling nearest segments failed')
-    
-    
-    def test_nearest_remap(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
-                                     self.area_def, 50000, segments=1)
-        remap = kd_tree.resample_nearest(self.area_def, res.ravel(),\
-                                       swath_def, 5000, segments=1)        
-        cross_sum = remap.sum()
-        expected = 22275.0
-        self.assertEqual(cross_sum, expected,\
-                             msg='Grid remapping nearest failed')
-    
-    
-    def test_nearest_mp(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
-                                     self.area_def, 50000, nprocs=2, segments=1)
-        cross_sum = res.sum()
-        expected = 15874591.0
-        self.assertEqual(cross_sum, expected,\
-                             msg='Swath resampling mp nearest failed')
-    
-    
-    def test_nearest_multi(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        res = kd_tree.resample_nearest(swath_def, data_multi,\
-                                     self.area_def, 50000, segments=1)        
-        cross_sum = res.sum()
-        expected = 3 * 15874591.0
-        self.assertEqual(cross_sum, expected,\
-                             msg='Swath multi channel resampling nearest failed')
-    
-    
-    def test_nearest_multi_unraveled(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        data_multi = numpy.dstack((data, data, data))
-        res = kd_tree.resample_nearest(swath_def, data_multi,\
-                                     self.area_def, 50000, segments=1)        
-        cross_sum = res.sum()
-        expected = 3 * 15874591.0
-        self.assertEqual(cross_sum, expected,\
-                             msg='Swath multi channel resampling nearest failed')
-    
-       
-    def test_gauss_sparse(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_gauss(swath_def, data.ravel(),\
-                                     self.area_def, 50000, 25000, fill_value=-1, segments=1)        
-        cross_sum = res.sum()        
-        expected = 15387753.9852
-        self.assertAlmostEqual(cross_sum, expected, places=3,\
-                                   msg='Swath gauss sparse nearest failed')
-    
-         
-    def test_gauss(self):
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-5, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_gauss(swath_def, data.ravel(),\
-                                         self.area_def, 50000, 25000, segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_gauss(swath_def, data.ravel(),\
-                                             self.area_def, 50000, 25000, segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')        
-        cross_sum = res.sum()        
-        expected = 4872.81050892
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Swath resampling gauss failed')
-
-    
-    def test_gauss_fwhm(self):
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-5, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_gauss(swath_def, data.ravel(),\
-                                         self.area_def, 50000, utils.fwhm2sigma(41627.730557884883), segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_gauss(swath_def, data.ravel(),\
-                                             self.area_def, 50000, utils.fwhm2sigma(41627.730557884883), segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')        
-        cross_sum = res.sum()        
-        expected = 4872.81050892
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Swath resampling gauss failed')
-    
-    
-    def test_gauss_multi(self):
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_gauss(swath_def, data_multi,\
-                                         self.area_def, 50000, [25000, 15000, 10000], segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_gauss(swath_def, data_multi,\
-                                             self.area_def, 50000, [25000, 15000, 10000], segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning') 
-        cross_sum = res.sum()        
-        expected = 1461.84313918
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Swath multi channel resampling gauss failed')
-
-    @tmp
-    def test_gauss_multi_uncert(self):
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        if sys.version_info < (2, 6):
-            res, stddev, counts = kd_tree.resample_gauss(swath_def, data_multi,\
-                                                self.area_def, 50000, [25000, 15000, 10000], 
-                                                segments=1, with_uncert=True)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res, stddev, counts = kd_tree.resample_gauss(swath_def, data_multi,\
-                                                    self.area_def, 50000, [25000, 15000, 10000], 
-                                                    segments=1, with_uncert=True)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning') 
-        cross_sum = res.sum()
-        cross_sum_stddev = stddev.sum()
-        cross_sum_counts = counts.sum()
-        expected = 1461.84313918
-        expected_stddev = 0.446204424799
-        expected_counts = 4934802.0
-        self.assertTrue(res.shape == stddev.shape and stddev.shape == counts.shape and counts.shape == (800, 800, 3))
-        self.assertAlmostEqual(cross_sum, expected,
-                                msg='Swath multi channel resampling gauss failed on data')
-        self.assertAlmostEqual(cross_sum_stddev, expected_stddev,
-                                msg='Swath multi channel resampling gauss failed on stddev')
-        self.assertAlmostEqual(cross_sum_counts, expected_counts,
-                                msg='Swath multi channel resampling gauss failed on counts')
-    
-    
-    def test_gauss_multi_mp(self):
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_gauss(swath_def, data_multi,\
-                                         self.area_def, 50000, [25000, 15000, 10000],\
-                                         nprocs=2, segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_gauss(swath_def, data_multi,\
-                                             self.area_def, 50000, [25000, 15000, 10000],\
-                                             nprocs=2, segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning') 
-        cross_sum = res.sum()
-        expected = 1461.84313918
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Swath multi channel resampling gauss failed') 
-    
-    
-    def test_gauss_multi_mp_segments(self):
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_gauss(swath_def, data_multi,\
-                                         self.area_def, 50000, [25000, 15000, 10000],\
-                                         nprocs=2, segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_gauss(swath_def, data_multi,\
-                                             self.area_def, 50000, [25000, 15000, 10000],\
-                                             nprocs=2, segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
-        cross_sum = res.sum()
-        expected = 1461.84313918
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Swath multi channel segments resampling gauss failed')
-    
-    
-    def test_gauss_multi_mp_segments_empty(self):
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 165 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        res = kd_tree.resample_gauss(swath_def, data_multi,\
-                                     self.area_def, 50000, [25000, 15000, 10000],\
-                                     nprocs=2, segments=1)
-        cross_sum = res.sum()
-        self.assertTrue(cross_sum == 0,
-                        msg=('Swath multi channel segments empty ' 
-                             'resampling gauss failed')) 
-    
-    
-    def test_custom(self):
-        def wf(dist):
-            return 1 - dist/100000.0
-                    
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-5, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_custom(swath_def, data.ravel(),\
-                                          self.area_def, 50000, wf, segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_custom(swath_def, data.ravel(),\
-                                              self.area_def, 50000, wf, segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
-        cross_sum = res.sum()
-        expected = 4872.81050729
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Swath custom resampling failed')
-    
-    
-    def test_custom_multi(self):
-        def wf1(dist):
-            return 1 - dist/100000.0
-        
-        def wf2(dist):
-            return 1
-        
-        def wf3(dist):
-            return numpy.cos(dist)**2
-        
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        if sys.version_info < (2, 6):
-            res = kd_tree.resample_custom(swath_def, data_multi,\
-                                          self.area_def, 50000, [wf1, wf2, wf3], segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                res = kd_tree.resample_custom(swath_def, data_multi,\
-                                              self.area_def, 50000, [wf1, wf2, wf3], segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
-        cross_sum = res.sum()
-        expected = 1461.842980746
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Swath multi channel custom resampling failed')
-        
-    def test_reduce(self):
-        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
-        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
-        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
-        grid_lons, grid_lats = self.area_def.get_lonlats()
-        lons, lats, data = data_reduce.swath_from_lonlat_grid(grid_lons, grid_lats, 
-                                                              lons, lats, data, 
-                                                              7000)
-        cross_sum = data.sum()
-        expected = 20514375.0
-        self.assertAlmostEqual(cross_sum, expected, msg='Reduce data failed')
-    
-    def test_reduce_boundary(self):
-        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
-        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
-        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
-        boundary_lonlats = self.area_def.get_boundary_lonlats()
-        lons, lats, data = data_reduce.swath_from_lonlat_boundaries(boundary_lonlats[0],
-                                                              boundary_lonlats[1], 
-                                                              lons, lats, data, 
-                                                              7000)
-        cross_sum = data.sum()
-        expected = 20514375.0
-        self.assertAlmostEqual(cross_sum, expected, msg='Reduce data failed')
-        
-    def test_cartesian_reduce(self):
-        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
-        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
-        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
-        #grid = utils.generate_cartesian_grid(self.area_def)
-        grid = self.area_def.get_cartesian_coords()       
-        lons, lats, data = data_reduce.swath_from_cartesian_grid(grid, lons, lats, data, 
-                                                                 7000)
-        cross_sum = data.sum()
-        expected = 20514375.0
-        self.assertAlmostEqual(cross_sum, expected, msg='Cartesian reduce data failed')
-    
-    def test_area_con_reduce(self):
-        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
-        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
-        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
-        grid_lons, grid_lats = self.area_def.get_lonlats()
-        valid_index = data_reduce.get_valid_index_from_lonlat_grid(grid_lons, grid_lats, 
-                                                                   lons, lats, 7000) 
-        data = data[valid_index]
-        cross_sum = data.sum()
-        expected = 20514375.0
-        self.assertAlmostEqual(cross_sum, expected, msg='Reduce data failed')
-       
-    def test_area_con_cartesian_reduce(self):
-        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
-        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
-        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
-        cart_grid = self.area_def.get_cartesian_coords()
-        valid_index = data_reduce.get_valid_index_from_cartesian_grid(cart_grid, 
-                                                                      lons, lats, 7000)
-        data = data[valid_index]
-        cross_sum = data.sum()
-        expected = 20514375.0
-        self.assertAlmostEqual(cross_sum, expected, msg='Cartesian reduce data failed')
-               
-    def test_masked_nearest(self):
-        data = numpy.ones((50, 10))
-        data[:, 5:] = 2
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        mask = numpy.ones((50, 10))
-        mask[:, :5] = 0
-        masked_data = numpy.ma.array(data, mask=mask)
-        res = kd_tree.resample_nearest(swath_def, masked_data.ravel(), 
-                                     self.area_def, 50000, segments=1)
-        expected_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
-                                                    'test_files', 
-                                                    'mask_test_nearest_mask.dat'), 
-                                                    sep=' ').reshape((800, 800))
-        expected_data = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
-                                                    'test_files', 
-                                                    'mask_test_nearest_data.dat'), 
-                                                    sep=' ').reshape((800, 800))        
-        self.assertTrue(numpy.array_equal(expected_mask, res.mask), 
-                        msg='Resampling of swath mask failed')
-        self.assertTrue(numpy.array_equal(expected_data, res.data), 
-                        msg='Resampling of swath masked data failed')
-           
-    def test_masked_nearest_1d(self):
-        data = numpy.ones((800, 800))
-        data[:400, :] = 2
-        lons = numpy.fromfunction(lambda x: 3 + x / 100. , (500,))
-        lats = numpy.fromfunction(lambda x: 75 - x / 10., (500,))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        mask = numpy.ones((800, 800))
-        mask[400:, :] = 0
-        masked_data = numpy.ma.array(data, mask=mask)
-        res = kd_tree.resample_nearest(self.area_def, masked_data.ravel(),
-                                       swath_def, 50000, segments=1)
-        self.assertEqual(res.mask.sum(), 108,
-                             msg='Swath resampling masked nearest 1d failed')
-        
-    
-    def test_masked_gauss(self):
-        data = numpy.ones((50, 10))
-        data[:, 5:] = 2
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        mask = numpy.ones((50, 10))
-        mask[:, :5] = 0
-        masked_data = numpy.ma.array(data, mask=mask)
-        res = kd_tree.resample_gauss(swath_def, masked_data.ravel(),\
-                                   self.area_def, 50000, 25000, segments=1)
-        expected_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
-                                                    'test_files', 
-                                                    'mask_test_mask.dat'), 
-                                                    sep=' ').reshape((800, 800))
-        expected_data = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
-                                                    'test_files', 
-                                                    'mask_test_data.dat'), 
-                                                    sep=' ').reshape((800, 800))
-        expected = expected_data.sum()
-        cross_sum = res.data.sum()
-        
-        self.assertTrue(numpy.array_equal(expected_mask, res.mask), 
-                        msg='Gauss resampling of swath mask failed')
-        self.assertAlmostEqual(cross_sum, expected, places=3,\
-                                   msg='Gauss resampling of swath masked data failed')
-        
-     
-    def test_masked_fill_float(self):
-        data = numpy.ones((50, 10))
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data.ravel(), 
-                                     self.area_def, 50000, fill_value=None, segments=1)
-        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
-                                                         'test_files', 
-                                                         'mask_test_fill_value.dat'), 
-                                                         sep=' ').reshape((800, 800))
-        fill_mask = res.mask
-        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
-                         msg='Failed to create fill mask on float data')
-        
-    def test_masked_fill_int(self):
-        data = numpy.ones((50, 10)).astype('int')
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, data.ravel(), 
-                                     self.area_def, 50000, fill_value=None, segments=1)
-        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
-                                                         'test_files', 
-                                                         'mask_test_fill_value.dat'), 
-                                                         sep=' ').reshape((800, 800))
-        fill_mask = res.mask
-        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
-                        msg='Failed to create fill mask on integer data')
-        
-    def test_masked_full(self):
-        data = numpy.ones((50, 10))
-        data[:, 5:] = 2
-        mask = numpy.ones((50, 10))
-        mask[:, :5] = 0
-        masked_data = numpy.ma.array(data, mask=mask)
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, 
-                                    masked_data.ravel(), self.area_def, 50000,
-                                    fill_value=None, segments=1)
-        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
-                                                         'test_files', 
-                                                         'mask_test_full_fill.dat'), 
-                                                         sep=' ').reshape((800, 800))
-        fill_mask = res.mask
-
-        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
-                         msg='Failed to create fill mask on masked data')
-        
-    def test_masked_full_multi(self):
-        data = numpy.ones((50, 10))
-        data[:, 5:] = 2
-        mask1 = numpy.ones((50, 10))
-        mask1[:, :5] = 0
-        mask2 = numpy.ones((50, 10))
-        mask2[:, 5:] = 0
-        mask3 = numpy.ones((50, 10))
-        mask3[:25, :] = 0
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(), data.ravel()))
-        mask_multi = numpy.column_stack((mask1.ravel(), mask2.ravel(), mask3.ravel()))
-        masked_data = numpy.ma.array(data_multi, mask=mask_multi)
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        res = kd_tree.resample_nearest(swath_def, 
-                                    masked_data, self.area_def, 50000,
-                                    fill_value=None, segments=1)
-        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
-                                                         'test_files', 
-                                                         'mask_test_full_fill_multi.dat'), 
-                                                         sep=' ').reshape((800, 800, 3))
-        fill_mask = res.mask
-        cross_sum = res.sum()
-        expected = 357140.0
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Failed to resample masked data')        
-        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
-                         msg='Failed to create fill mask on masked data')
-        
-    def test_nearest_from_sample(self):
-        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        valid_input_index, valid_output_index, index_array, distance_array = \
-                                    kd_tree.get_neighbour_info(swath_def, 
-                                                             self.area_def, 
-                                                             50000, neighbours=1, segments=1)
-        res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800), data.ravel(), 
-                                                   valid_input_index, valid_output_index, 
-                                                   index_array)        
-        cross_sum = res.sum()        
-        expected = 15874591.0
-        self.assertEqual(cross_sum, expected,\
-                             msg='Swath resampling from neighbour info nearest failed')
-    
-    
-    def test_custom_multi_from_sample(self):
-        def wf1(dist):
-            return 1 - dist/100000.0
-        
-        def wf2(dist):
-            return 1
-        
-        def wf3(dist):
-            return numpy.cos(dist)**2
-        
-        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
-        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
-        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
-                                         data.ravel()))
-        
-        if sys.version_info < (2, 6):
-            valid_input_index, valid_output_index, index_array, distance_array = \
-                                        kd_tree.get_neighbour_info(swath_def, 
-                                                                   self.area_def, 
-                                                                   50000, segments=1)
-        else:
-            with warnings.catch_warnings(record=True) as w:
-                valid_input_index, valid_output_index, index_array, distance_array = \
-                                            kd_tree.get_neighbour_info(swath_def, 
-                                                                       self.area_def, 
-                                                                       50000, segments=1)
-                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
-                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
-            
-        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800), 
-                                                     data_multi, 
-                                                     valid_input_index, valid_output_index, 
-                                                     index_array, distance_array, 
-                                                     weight_funcs=[wf1, wf2, wf3])
-                        
-        cross_sum = res.sum()
-        
-        expected = 1461.842980746
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Swath multi channel custom resampling from neighbour info failed 1')
-        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800), 
-                                                   data_multi, 
-                                                   valid_input_index, valid_output_index, 
-                                                   index_array, distance_array, 
-                                                   weight_funcs=[wf1, wf2, wf3])
-        
-        # Look for error where input data has been manipulated    
-        cross_sum = res.sum()
-        expected = 1461.842980746
-        self.assertAlmostEqual(cross_sum, expected,\
-                                   msg='Swath multi channel custom resampling from neighbour info failed 2')
-
-
-    def test_masked_multi_from_sample(self):
-        data = numpy.ones((50, 10))
-        data[:, 5:] = 2
-        mask1 = numpy.ones((50, 10))
-        mask1[:, :5] = 0
-        mask2 = numpy.ones((50, 10))
-        mask2[:, 5:] = 0
-        mask3 = numpy.ones((50, 10))
-        mask3[:25, :] = 0
-        data_multi = numpy.column_stack((data.ravel(), data.ravel(), data.ravel()))
-        mask_multi = numpy.column_stack((mask1.ravel(), mask2.ravel(), mask3.ravel()))
-        masked_data = numpy.ma.array(data_multi, mask=mask_multi)
-        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
-        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
-        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
-        valid_input_index, valid_output_index, index_array, distance_array = \
-                                    kd_tree.get_neighbour_info(swath_def, 
-                                                             self.area_def, 
-                                                             50000, neighbours=1, segments=1)
-        res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800), 
-                                                   masked_data, 
-                                                   valid_input_index, 
-                                                   valid_output_index, index_array,
-                                                   fill_value=None)
-        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
-                                                         'test_files', 
-                                                         'mask_test_full_fill_multi.dat'), 
-                                                         sep=' ').reshape((800, 800, 3))
-        fill_mask = res.mask        
-        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
-                         msg='Failed to create fill mask on masked data')
-        
-        
-
diff --git a/test/test_plot.py b/test/test_plot.py
deleted file mode 100644
index d513f6e..0000000
--- a/test/test_plot.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import unittest
-import os
-
-import numpy as np
-	
-import pyresample as pr
-
-try:
-    import matplotlib
-    matplotlib.use('Agg')
-except ImportError:
-    pass # Postpone fail to individual tests
-
-def tmp(f):
-    f.tmp = True
-    return f	
-
-class Test(unittest.TestCase):
-    
-    filename = os.path.abspath(os.path.join(os.path.dirname(__file__), 
-                               'test_files', 'ssmis_swath.npz'))
-    data = np.load(filename)['data']
-    lons = data[:, 0].astype(np.float64)
-    lats = data[:, 1].astype(np.float64)
-    tb37v = data[:, 2].astype(np.float64)
- 
-    def test_ellps2axis(self):
-        a, b = pr.plot.ellps2axis('WGS84')
-        self.assertAlmostEqual(a, 6378137.0, 
-                                   msg='Failed to get semi-major axis of ellipsis')
-        self.assertAlmostEqual(b, 6356752.3142451793, 
-                                   msg='Failed to get semi-minor axis of ellipsis')
-    
-    def test_area_def2basemap(self):
-        area_def = pr.utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
-                                         'test_files', 'areas.cfg'), 'ease_sh')[0]
-        bmap = pr.plot.area_def2basemap(area_def)
-        self.assertTrue(bmap.rmajor == bmap.rminor and 
-                        bmap.rmajor == 6371228.0, 
-                        'Failed to create Basemap object')
-
-    def test_plate_carreeplot(self):
-        area_def = pr.utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
-                                            'test_files', 'areas.cfg'), 'pc_world')[0]
-        swath_def = pr.geometry.SwathDefinition(self.lons, self.lats)
-        result = pr.kd_tree.resample_nearest(swath_def, self.tb37v, area_def, 
-                                             radius_of_influence=20000, 
-                                             fill_value=None)		
-        plt = pr.plot._get_quicklook(area_def, result, num_meridians=0, 
-                                     num_parallels=0)
-    def test_easeplot(self):
-        area_def = pr.utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
-                                            'test_files', 'areas.cfg'), 'ease_sh')[0]
-        swath_def = pr.geometry.SwathDefinition(self.lons, self.lats)
-        result = pr.kd_tree.resample_nearest(swath_def, self.tb37v, area_def, 
-                                             radius_of_influence=20000, 
-                                             fill_value=None)		
-        plt = pr.plot._get_quicklook(area_def, result)
-
-    def test_orthoplot(self):
-        area_def = pr.utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
-                                            'test_files', 'areas.cfg'), 'ortho')[0]
-        swath_def = pr.geometry.SwathDefinition(self.lons, self.lats)
-        result = pr.kd_tree.resample_nearest(swath_def, self.tb37v, area_def, 
-                                             radius_of_influence=20000, 
-                                             fill_value=None)		
-        plt = pr.plot._get_quicklook(area_def, result)

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/pyresample.git



More information about the Pkg-grass-devel mailing list