[pyresample] 01/11: Imported Upstream version 1.0.0

Antonio Valentino a_valentino-guest at moszumanska.debian.org
Sat Jun 21 09:18:18 UTC 2014


This is an automated email from the git hooks/post-receive script.

a_valentino-guest pushed a commit to branch master
in repository pyresample.

commit 723a883bd04bb648235fd07ea7e7bc05a282c988
Author: Antonio Valentino <antonio.valentino at tiscali.it>
Date:   Sat Aug 10 10:06:25 2013 +0000

    Imported Upstream version 1.0.0
---
 LICENSE.txt                                   | 674 +++++++++++++++++++++
 MANIFEST.in                                   |   5 +
 PKG-INFO                                      |  16 +
 README                                        |   3 +
 docs/Makefile                                 |  89 +++
 docs/source/API.rst                           |  39 ++
 docs/source/_static/images/tb37_multi.png     | Bin 0 -> 244656 bytes
 docs/source/_static/images/tb37v_bmng.png     | Bin 0 -> 345622 bytes
 docs/source/_static/images/tb37v_ortho.png    | Bin 0 -> 69084 bytes
 docs/source/_static/images/tb37v_pc.png       | Bin 0 -> 110843 bytes
 docs/source/_static/images/tb37v_quick.png    | Bin 0 -> 174265 bytes
 docs/source/conf.py                           | 200 ++++++
 docs/source/data_reduce.rst                   |  54 ++
 docs/source/geo_def.rst                       | 312 ++++++++++
 docs/source/geo_filter.rst                    |  41 ++
 docs/source/grid.rst                          | 162 +++++
 docs/source/index.rst                         |  28 +
 docs/source/installation.rst                  |  78 +++
 docs/source/multi.rst                         |  36 ++
 docs/source/plot.rst                          | 139 +++++
 docs/source/preproc.rst                       |  53 ++
 docs/source/swath.rst                         | 212 +++++++
 pyresample.egg-info/PKG-INFO                  |  16 +
 pyresample.egg-info/SOURCES.txt               |  60 ++
 pyresample.egg-info/dependency_links.txt      |   1 +
 pyresample.egg-info/not-zip-safe              |   1 +
 pyresample.egg-info/requires.txt              |   9 +
 pyresample.egg-info/top_level.txt             |   1 +
 pyresample/__init__.py                        |  42 ++
 pyresample/_multi_proc.py                     | 104 ++++
 pyresample/_spatial_mp.py                     | 275 +++++++++
 pyresample/data_reduce.py                     | 302 ++++++++++
 pyresample/geo_filter.py                      |  86 +++
 pyresample/geometry.py                        | 838 ++++++++++++++++++++++++++
 pyresample/grid.py                            | 235 ++++++++
 pyresample/image.py                           | 281 +++++++++
 pyresample/kd_tree.py                         | 775 ++++++++++++++++++++++++
 pyresample/plot.py                            | 244 ++++++++
 pyresample/spherical_geometry.py              | 415 +++++++++++++
 pyresample/utils.py                           | 297 +++++++++
 pyresample/version.py                         |  18 +
 setup.cfg                                     |   5 +
 setup.py                                      |  54 ++
 test/test_files/areas.cfg                     |  35 ++
 test/test_files/mask_grid.dat                 |   1 +
 test/test_files/mask_test_data.dat            |   1 +
 test/test_files/mask_test_fill_value.dat      |   1 +
 test/test_files/mask_test_full_fill.dat       |   1 +
 test/test_files/mask_test_full_fill_multi.dat |   1 +
 test/test_files/mask_test_mask.dat            |   1 +
 test/test_files/mask_test_nearest_data.dat    |   1 +
 test/test_files/mask_test_nearest_mask.dat    |   1 +
 test/test_files/quick_mask_test.dat           |   1 +
 test/test_files/ssmis_swath.npz               | Bin 0 -> 3603074 bytes
 test/test_geometry.py                         | 513 ++++++++++++++++
 test/test_grid.py                             | 177 ++++++
 test/test_image.py                            | 202 +++++++
 test/test_kd_tree.py                          | 736 ++++++++++++++++++++++
 test/test_plot.py                             |  71 +++
 test/test_spherical_geometry.py               | 427 +++++++++++++
 test/test_swath.py                            |  62 ++
 test/test_utils.py                            |  53 ++
 62 files changed, 8485 insertions(+)

diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..94a9ed0
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,674 @@
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+    <program>  Copyright (C) <year>  <name of author>
+    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+  The GNU General Public License does not permit incorporating your program
+into proprietary programs.  If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.  But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..55773da
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,5 @@
+include docs/Makefile
+recursive-include docs/source *
+include test/test_files/*
+include LICENSE.txt
+include MANIFEST.in
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..a8e3315
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,16 @@
+Metadata-Version: 1.1
+Name: pyresample
+Version: 1.0.0
+Summary: Resampling of remote sensing data in Python
+Home-page: UNKNOWN
+Author: Esben S. Nielsen
+Author-email: esn at dmi.dk
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
+Classifier: Programming Language :: Python
+Classifier: Operating System :: OS Independent
+Classifier: Intended Audience :: Science/Research
+Classifier: Topic :: Scientific/Engineering
diff --git a/README b/README
new file mode 100644
index 0000000..5c5095c
--- /dev/null
+++ b/README
@@ -0,0 +1,3 @@
+Python package for geospatial resampling
+
+Look at http://code.google.com/p/pyresample/ and http://pytroll.org/ for more information.
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..5edde6a
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,89 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    = -a
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = build
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html      to make standalone HTML files"
+	@echo "  dirhtml   to make HTML files named index.html in directories"
+	@echo "  pickle    to make pickle files"
+	@echo "  json      to make JSON files"
+	@echo "  htmlhelp  to make HTML files and a HTML help project"
+	@echo "  qthelp    to make HTML files and a qthelp project"
+	@echo "  latex     to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  changes   to make an overview of all changed/added/deprecated items"
+	@echo "  linkcheck to check all external links for integrity"
+	@echo "  doctest   to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	-rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pyresample.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyresample.qhc"
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
+	      "run these through (pdf)latex."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/docs/source/API.rst b/docs/source/API.rst
new file mode 100644
index 0000000..c9dc533
--- /dev/null
+++ b/docs/source/API.rst
@@ -0,0 +1,39 @@
+pyresample API
+======================
+
+pyresample.geometry
+---------------------------------
+.. automodule:: geometry
+	:members:
+
+pyresample.image
+---------------------------------
+.. automodule:: image
+	:members:
+
+pyresample.grid
+---------------------------------
+.. automodule:: grid
+	:members:
+
+pyresample.kd_tree
+---------------------------------
+.. automodule:: kd_tree
+	:members:
+	
+pyresample.utils
+---------------------------------
+.. automodule:: utils
+	:members:
+
+pyresample.data_reduce
+---------------------------------
+.. automodule:: data_reduce
+	:members:
+
+pyresample.plot
+---------------------------------
+.. automodule:: plot
+	:members:
+
+	
\ No newline at end of file
diff --git a/docs/source/_static/images/tb37_multi.png b/docs/source/_static/images/tb37_multi.png
new file mode 100644
index 0000000..74c72e2
Binary files /dev/null and b/docs/source/_static/images/tb37_multi.png differ
diff --git a/docs/source/_static/images/tb37v_bmng.png b/docs/source/_static/images/tb37v_bmng.png
new file mode 100644
index 0000000..cc806a1
Binary files /dev/null and b/docs/source/_static/images/tb37v_bmng.png differ
diff --git a/docs/source/_static/images/tb37v_ortho.png b/docs/source/_static/images/tb37v_ortho.png
new file mode 100644
index 0000000..8b20bfa
Binary files /dev/null and b/docs/source/_static/images/tb37v_ortho.png differ
diff --git a/docs/source/_static/images/tb37v_pc.png b/docs/source/_static/images/tb37v_pc.png
new file mode 100644
index 0000000..eff75eb
Binary files /dev/null and b/docs/source/_static/images/tb37v_pc.png differ
diff --git a/docs/source/_static/images/tb37v_quick.png b/docs/source/_static/images/tb37v_quick.png
new file mode 100644
index 0000000..40deda1
Binary files /dev/null and b/docs/source/_static/images/tb37v_quick.png differ
diff --git a/docs/source/conf.py b/docs/source/conf.py
new file mode 100644
index 0000000..d258b7f
--- /dev/null
+++ b/docs/source/conf.py
@@ -0,0 +1,200 @@
+# -*- coding: utf-8 -*-
+#
+# pyresample documentation build configuration file, created by
+# sphinx-quickstart on Tue Jan  5 13:01:32 2010.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, '/opt/lib/python2.5/site-packages')
+sys.path.insert(0, os.path.abspath('../../'))
+sys.path.insert(0, os.path.abspath('../../pyresample'))
+#sys.path.append('')
+#print sys.path
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.doctest', 'sphinx.ext.autodoc']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'pyresample'
+copyright = u'2013, Esben S. Nielsen'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+
+import version as current_version
+# The short X.Y version.
+version = current_version.__version__
+# The full version, including alpha/beta/rc tags.
+release = current_version.__version__
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directory, that shouldn't be searched
+# for source files.
+exclude_trees = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  Major themes that come with
+# Sphinx are currently 'default' and 'sphinxdoc'.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'pyresampledoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+  ('index', 'pyresample.tex', u'pyresample Documentation',
+   u'Esben S. Nielsen', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/docs/source/data_reduce.rst b/docs/source/data_reduce.rst
new file mode 100644
index 0000000..7178038
--- /dev/null
+++ b/docs/source/data_reduce.rst
@@ -0,0 +1,54 @@
+Reduction of swath data
+=======================
+Given a swath and a cartesian grid or grid lons and lats pyresample can reduce the swath data
+to only the relevant part covering the grid area. The reduction is coarse in order not to risk removing 
+relevant data.
+
+From **data_reduce** the function **swath_from_lonlat_grid** can be used to reduce the swath data set to the 
+area covering the lon lat grid
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import geometry, data_reduce
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> data = np.fromfunction(lambda y, x: y*x, (50, 10))
+ >>> lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
+ >>> lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
+ >>> grid_lons, grid_lats = area_def.get_lonlats()
+ >>> reduced_lons, reduced_lats, reduced_data = \
+ ... 				data_reduce.swath_from_lonlat_grid(grid_lons, grid_lats, 
+ ...				lons, lats, data, 
+ ...				radius_of_influence=3000)
+ 
+**radius_of_influence** is used to calculate a buffer zone around the grid where swath data points
+are not reduced.
+ 
+The function **get_valid_index_from_lonlat_grid** returns a boolean array of same size as the swath
+indicating the relevant swath data points compared to the grid
+ 
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import geometry, data_reduce
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> data = np.fromfunction(lambda y, x: y*x, (50, 10))
+ >>> lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
+ >>> lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
+ >>> grid_lons, grid_lats = area_def.get_lonlats()
+ >>> valid_index = data_reduce.get_valid_index_from_lonlat_grid(grid_lons, grid_lats, 
+ ...						lons, lats, 
+ ...						radius_of_influence=3000)
+ 
\ No newline at end of file
diff --git a/docs/source/geo_def.rst b/docs/source/geo_def.rst
new file mode 100644
index 0000000..68c6e92
--- /dev/null
+++ b/docs/source/geo_def.rst
@@ -0,0 +1,312 @@
+Geometry definitions
+====================
+The module **pyresample.geometry** contains classes for describing different kinds of types
+of remote sensing data geometries. The use of the different classes is described below.
+
+AreaDefinition
+--------------
+
+The cartographic definition of grid areas used by Pyresample is contained in an object of type AreaDefintion. 
+The following arguments are needed to initialize an area:
+
+* **area_id** ID of area  
+* **name**: Description
+* **proj_id**: ID of projection 
+* **proj_dict**: Proj4 parameters as dict
+* **x_size**: Number of grid columns
+* **y_size**: Number of grid rows
+* **area_extent**: (x_ll, y_ll, x_ur, y_ur)
+
+where
+
+* **x_ll**: projection x coordinate of lower left corner of lower left pixel
+* **y_ll**: projection y coordinate of lower left corner of lower left pixel
+* **x_ur**: projection x coordinate of upper right corner of upper right pixel
+* **y_ur**: projection y coordinate of upper right corner of upper right pixel
+
+Creating an area definition:
+
+.. doctest::
+	
+ >>> from pyresample import geometry
+ >>> area_id = 'ease_sh'
+ >>> name = 'Antarctic EASE grid'
+ >>> proj_id = 'ease_sh'
+ >>> proj4_args = 'proj=laea, lat_0=-90, lon_0=0, a=6371228.0, units=m'
+ >>> x_size = 425
+ >>> y_size = 425
+ >>> area_extent = (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ >>> proj_dict = {'a': '6371228.0', 'units': 'm', 'lon_0': '0',
+ ...              'proj': 'laea', 'lat_0': '-90'}
+ >>> area_def = geometry.AreaDefinition(area_id, name, proj_id, proj_dict, x_size,
+ ...                                    y_size, area_extent)
+ >>> print area_def
+ Area ID: ease_sh
+ Name: Antarctic EASE grid
+ Projection ID: ease_sh
+ Projection: {'a': '6371228.0', 'units': 'm', 'lon_0': '0', 'proj': 'laea', 'lat_0': '-90'}
+ Number of columns: 425
+ Number of rows: 425
+ Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)
+
+pyresample.utils
+****************
+The utils module of pyresample has convenience functions for constructing
+area defintions. The function **get_area_def** can construct an area definition
+based on area extent and a proj4-string or a list of proj4 arguments.
+
+.. doctest::
+	
+ >>> from pyresample import utils
+ >>> area_id = 'ease_sh'
+ >>> area_name = 'Antarctic EASE grid'
+ >>> proj_id = 'ease_sh'
+ >>> proj4_args = '+proj=laea +lat_0=-90 +lon_0=0 +a=6371228.0 +units=m'
+ >>> x_size = 425
+ >>> y_size = 425
+ >>> area_extent = (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ >>> area_def = utils.get_area_def(area_id, area_name, proj_id, proj4_args, 
+ ...                  			   x_size, y_size, area_extent)
+ >>> print area_def
+ Area ID: ease_sh
+ Name: Antarctic EASE grid
+ Projection ID: ease_sh
+ Projection: {'a': '6371228.0', 'units': 'm', 'lon_0': '0', 'proj': 'laea', 'lat_0': '-90'}
+ Number of columns: 425
+ Number of rows: 425
+ Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)
+
+
+The **load_area** function can be used to parse area definitions from a configuration file. 
+Assuming the file **/tmp/areas.cfg** exists with the following content
+
+.. code-block:: bash
+
+ REGION: ease_sh {
+	NAME:           Antarctic EASE grid
+	PCS_ID:         ease_sh
+        PCS_DEF:        proj=laea, lat_0=-90, lon_0=0, a=6371228.0, units=m
+        XSIZE:          425
+        YSIZE:          425
+        AREA_EXTENT:    (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ };
+
+ REGION: ease_nh {
+        NAME:           Arctic EASE grid
+        PCS_ID:         ease_nh
+        PCS_DEF:        proj=laea, lat_0=90, lon_0=0, a=6371228.0, units=m
+        XSIZE:          425
+        YSIZE:          425
+        AREA_EXTENT:    (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ };
+
+An area definition dict can be read using
+
+.. doctest::
+
+ >>> from pyresample import utils
+ >>> area = utils.load_area('/tmp/areas.cfg', 'ease_nh')
+ >>> print area
+ Area ID: ease_nh
+ Name: Arctic EASE grid
+ Projection ID: ease_nh
+ Projection: {'a': '6371228.0', 'units': 'm', 'lon_0': '0', 'proj': 'laea', 'lat_0': '90'}
+ Number of columns: 425
+ Number of rows: 425
+ Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)
+
+Note: In the configuration file **REGION** maps to **area_id** and **PCS_ID** maps to **proj_id**.
+
+Several area definitions can be read at once using the region names in an argument list
+
+.. doctest::
+
+ >>> from pyresample import utils
+ >>> nh_def, sh_def = utils.load_area('/tmp/areas.cfg', 'ease_nh', 'ease_sh')
+ >>> print sh_def
+ Area ID: ease_sh
+ Name: Antarctic EASE grid
+ Projection ID: ease_sh
+ Projection: {'a': '6371228.0', 'units': 'm', 'lon_0': '0', 'proj': 'laea', 'lat_0': '-90'}
+ Number of columns: 425
+ Number of rows: 425
+ Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)
+
+GridDefinition
+--------------
+If the lons and lats grid values are known the area definition information can be skipped for some types
+of resampling by using a GridDefinition object instead an AreaDefinition object.
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import geometry
+ >>> lons = np.ones((100, 100))
+ >>> lats = np.ones((100, 100))
+ >>> grid_def = geometry.GridDefinition(lons=lons, lats=lats)
+ 
+SwathDefinition
+---------------
+A swath is defined by the lon and lat values of the data points
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import geometry
+ >>> lons = np.ones((500, 20))
+ >>> lats = np.ones((500, 20))
+ >>> swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+ 
+Two swaths can be concatenated if their coloumn count matches
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import geometry
+ >>> lons1 = np.ones((500, 20))
+ >>> lats1 = np.ones((500, 20))
+ >>> swath_def1 = geometry.SwathDefinition(lons=lons1, lats=lats1)
+ >>> lons2 = np.ones((300, 20))
+ >>> lats2 = np.ones((300, 20))
+ >>> swath_def2 = geometry.SwathDefinition(lons=lons2, lats=lats2)
+ >>> swath_def3 = swath_def1.concatenate(swath_def2) 
+ 
+Geographic coordinates and boundaries
+-------------------------------------
+A ***definition** object allows for retrieval of geographic coordinates using array slicing (slice stepping is currently not supported).
+
+All ***definition** objects exposes the coordinates **lons**, **lats** and **cartesian_coords**. 
+AreaDefinition exposes the full set of projection coordinates as **projection_x_coords** and **projection_y_coords** 
+
+Get full coordinate set:
+
+.. doctest::
+	
+ >>> from pyresample import utils
+ >>> area_id = 'ease_sh'
+ >>> area_name = 'Antarctic EASE grid'
+ >>> proj_id = 'ease_sh'
+ >>> proj4_args = '+proj=laea +lat_0=-90 +lon_0=0 +a=6371228.0 +units=m'
+ >>> x_size = 425
+ >>> y_size = 425
+ >>> area_extent = (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ >>> area_def = utils.get_area_def(area_id, area_name, proj_id, proj4_args, 
+ ...                  			   x_size, y_size, area_extent)
+ >>> lons = area_def.lons[:]
+
+Get slice of coordinate set:
+
+.. doctest::
+	
+ >>> from pyresample import utils
+ >>> area_id = 'ease_sh'
+ >>> area_name = 'Antarctic EASE grid'
+ >>> proj_id = 'ease_sh'
+ >>> proj4_args = '+proj=laea +lat_0=-90 +lon_0=0 +a=6371228.0 +units=m'
+ >>> x_size = 425
+ >>> y_size = 425
+ >>> area_extent = (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ >>> area_def = utils.get_area_def(area_id, area_name, proj_id, proj4_args, 
+ ...                  			   x_size, y_size, area_extent)
+ >>> cart_subset = area_def.cartesian_coords[100:200, 350:]
+ 
+If only the 1D range of a projection coordinate is required it can be extraxted using the **proj_x_coord** or **proj_y_coords** property of a geographic coordinate
+
+.. doctest::
+	
+ >>> from pyresample import utils
+ >>> area_id = 'ease_sh'
+ >>> area_name = 'Antarctic EASE grid'
+ >>> proj_id = 'ease_sh'
+ >>> proj4_args = '+proj=laea +lat_0=-90 +lon_0=0 +a=6371228.0 +units=m'
+ >>> x_size = 425
+ >>> y_size = 425
+ >>> area_extent = (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ >>> area_def = utils.get_area_def(area_id, area_name, proj_id, proj4_args, 
+ ...                  			   x_size, y_size, area_extent)
+ >>> proj_x_range = area_def.proj_x_coord
+ 
+Spherical geometry operations
+-----------------------------
+Some basic spherical operations are available for ***definition** objects. The spherical geometry operations
+are calculated based on the corners of a GeometryDefinition (2D SwathDefinition or Grid/AreaDefinition) and assuming the edges are great circle arcs.
+
+It can be tested if geometries overlaps
+
+.. doctest::
+
+ >>> import numpy as np	
+ >>> from pyresample import utils
+ >>> area_id = 'ease_sh'
+ >>> area_name = 'Antarctic EASE grid'
+ >>> proj_id = 'ease_sh'
+ >>> proj4_args = '+proj=laea +lat_0=-90 +lon_0=0 +a=6371228.0 +units=m'
+ >>> x_size = 425
+ >>> y_size = 425
+ >>> area_extent = (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ >>> area_def = utils.get_area_def(area_id, area_name, proj_id, proj4_args, 
+ ...                  			   x_size, y_size, area_extent)
+ >>> lons = np.array([[-40, -11.1], [9.5, 19.4], [65.5, 47.5], [90.3, 72.3]])
+ >>> lats = np.array([[-70.1, -58.3], [-78.8, -63.4], [-73, -57.6], [-59.5, -50]])
+ >>> swath_def = geometry.SwathDefinition(lons, lats)
+ >>> print swath_def.overlaps(area_def)
+ True
+ 
+The fraction of overlap can be calculated
+
+.. doctest::
+
+ >>> import numpy as np	
+ >>> from pyresample import utils
+ >>> area_id = 'ease_sh'
+ >>> area_name = 'Antarctic EASE grid'
+ >>> proj_id = 'ease_sh'
+ >>> proj4_args = '+proj=laea +lat_0=-90 +lon_0=0 +a=6371228.0 +units=m'
+ >>> x_size = 425
+ >>> y_size = 425
+ >>> area_extent = (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ >>> area_def = utils.get_area_def(area_id, area_name, proj_id, proj4_args, 
+ ...                  			   x_size, y_size, area_extent)
+ >>> lons = np.array([[-40, -11.1], [9.5, 19.4], [65.5, 47.5], [90.3, 72.3]])
+ >>> lats = np.array([[-70.1, -58.3], [-78.8, -63.4], [-73, -57.6], [-59.5, -50]])
+ >>> swath_def = geometry.SwathDefinition(lons, lats)
+ >>> overlap_fraction = swath_def.overlap_rate(area_def)
+ 
+And the polygon defining the (great circle) boundaries over the overlapping area can be calculated
+
+.. doctest::
+
+ >>> import numpy as np	
+ >>> from pyresample import utils
+ >>> area_id = 'ease_sh'
+ >>> area_name = 'Antarctic EASE grid'
+ >>> proj_id = 'ease_sh'
+ >>> proj4_args = '+proj=laea +lat_0=-90 +lon_0=0 +a=6371228.0 +units=m'
+ >>> x_size = 425
+ >>> y_size = 425
+ >>> area_extent = (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ >>> area_def = utils.get_area_def(area_id, area_name, proj_id, proj4_args, 
+ ...                  			   x_size, y_size, area_extent)
+ >>> lons = np.array([[-40, -11.1], [9.5, 19.4], [65.5, 47.5], [90.3, 72.3]])
+ >>> lats = np.array([[-70.1, -58.3], [-78.8, -63.4], [-73, -57.6], [-59.5, -50]])
+ >>> swath_def = geometry.SwathDefinition(lons, lats)
+ >>> overlap_polygon = swath_def.intersection(area_def)
+ 
+It can be tested if a (lon, lat) point is inside a GeometryDefinition
+
+.. doctest::
+
+ >>> import numpy as np	
+ >>> from pyresample import utils
+ >>> area_id = 'ease_sh'
+ >>> area_name = 'Antarctic EASE grid'
+ >>> proj_id = 'ease_sh'
+ >>> proj4_args = '+proj=laea +lat_0=-90 +lon_0=0 +a=6371228.0 +units=m'
+ >>> x_size = 425
+ >>> y_size = 425
+ >>> area_extent = (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+ >>> area_def = utils.get_area_def(area_id, area_name, proj_id, proj4_args, 
+ ...                  			   x_size, y_size, area_extent)
+ >>> print (0, -90) in area_def
+ True
+     
\ No newline at end of file
diff --git a/docs/source/geo_filter.rst b/docs/source/geo_filter.rst
new file mode 100644
index 0000000..bd7db53
--- /dev/null
+++ b/docs/source/geo_filter.rst
@@ -0,0 +1,41 @@
+Geographic filtering
+====================
+The module **pyresample.geo_filter** contains classes to filter geo data
+
+
+GridFilter
+----------
+Allows for filtering of data based on a geographic mask. The filtering uses a bucket sampling approach.
+
+The following example shows how to select data falling in the upper left and lower right quadrant of
+a full globe Plate Carrée projection using an 8x8 filter mask
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import geometry, geo_filter
+ >>> lons = np.array([-170, -30, 30, 170])
+ >>> lats = np.array([20, -40, 50, -80]) 
+ >>> swath_def = geometry.SwathDefinition(lons, lats)
+ >>> data = np.array([1, 2, 3, 4])
+ >>> filter_area = geometry.AreaDefinition('test', 'test', 'test', 
+ ...         {'proj' : 'eqc', 'lon_0' : 0.0, 'lat_0' : 0.0},
+ ...           8, 8,                                               
+ ...          (-20037508.34, -10018754.17, 20037508.34, 10018754.17)
+ ...		 )
+ >>> filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
+ ...         [1, 1, 1, 1, 0, 0, 0, 0],
+ ...         [1, 1, 1, 1, 0, 0, 0, 0],
+ ...         [1, 1, 1, 1, 0, 0, 0, 0],
+ ...         [0, 0, 0, 0, 1, 1, 1, 1],
+ ...         [0, 0, 0, 0, 1, 1, 1, 1],
+ ...         [0, 0, 0, 0, 1, 1, 1, 1],
+ ...         [0, 0, 0, 0, 1, 1, 1, 1],
+ ...         ])
+ >>> grid_filter = geo_filter.GridFilter(filter_area, filter)
+ >>> swath_def_filtered, data_filtered = grid_filter.filter(swath_def, data)
+
+Input swath_def and data must match as described in :ref:`swath`.
+
+The returned data will always have a 1D geometry_def and if multiple channels are present the filtered
+data will have the shape (number_of_points, channels).
\ No newline at end of file
diff --git a/docs/source/grid.rst b/docs/source/grid.rst
new file mode 100644
index 0000000..e1f73cf
--- /dev/null
+++ b/docs/source/grid.rst
@@ -0,0 +1,162 @@
+Resampling of gridded data
+==========================
+
+Pyresample can be used to resample from an existing grid to another. Nearest neighbour resampling is used.
+
+pyresample.image
+----------------
+
+A grid can be stored in an object of type **ImageContainer** along with its area definition.
+An object of type **ImageContainer** allows for calculating resampling using preprocessed arrays
+using the method **get_array_from_linesample**
+
+Resampling can be done using descendants of **ImageContainer** and calling their **resample** method.
+
+An **ImageContainerQuick** object allows for the grid to be resampled to a new area defintion
+using an approximate (but fast) nearest neighbour method. 
+Resampling an object of type **ImageContainerQuick** returns a new object of type **ImageContainerQuick**. 
+
+An **ImageContainerNearest** object allows for the grid to be resampled to a new area defintion (or swath definition)
+using an accurate kd-tree method.
+Resampling an object of type **ImageContainerNearest** returns a new object of 
+type **ImageContainerNearest**. 
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import image, geometry
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees',
+ ...                                'msg_full',
+ ...                                {'a': '6378169.0', 'b': '6356584.0',
+ ...                                 'h': '35785831.0', 'lon_0': '0',
+ ...                                 'proj': 'geos'},
+ ...                                3712, 3712,
+ ...                                [-5568742.4, -5568742.4,
+ ...                                 5568742.4, 5568742.4])
+ >>> data = np.ones((3712, 3712))
+ >>> msg_con_quick = image.ImageContainerQuick(data, msg_area)
+ >>> area_con_quick = msg_con_quick.resample(area_def)
+ >>> result_data_quick = area_con_quick.image_data
+ >>> msg_con_nn = image.ImageContainerNearest(data, msg_area, radius_of_influence=50000)
+ >>> area_con_nn = msg_con_nn.resample(area_def)
+ >>> result_data_nn = area_con_nn.image_data
+
+Data is assumed to be a numpy array of shape (rows, cols) or (rows, cols, channels).
+
+Masked arrays can be used as data input. In order to have undefined pixels masked out instead of 
+assigned a fill value set **fill_value=None** when calling **resample_area_***.
+
+Using **ImageContainerQuick** the risk of image artifacts increases as the distance
+from source projection center increases.
+
+The constructor argument **radius_of_influence** to **ImageContainerNearest** specifices the maximum
+distance to search for a neighbour for each point in the target grid. The unit is meters.
+
+The constructor arguments of an ImageContainer object can be changed as attributes later
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import image, geometry
+ >>> msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees',
+ ...                                'msg_full',
+ ...                                {'a': '6378169.0', 'b': '6356584.0',
+ ...                                 'h': '35785831.0', 'lon_0': '0',
+ ...                                 'proj': 'geos'},
+ ...                                3712, 3712,
+ ...                                [-5568742.4, -5568742.4,
+ ...                                 5568742.4, 5568742.4])
+ >>> data = np.ones((3712, 3712))
+ >>> msg_con_nn = image.ImageContainerNearest(data, msg_area, radius_of_influence=50000)
+ >>> msg_con_nn.radius_of_influence = 45000
+ >>> msg_con_nn.fill_value = -99
+ 
+Multi channel images
+********************
+
+If the dataset has several channels the last index of the data array specifies the channels
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import image, geometry
+ >>> msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees',
+ ...                                'msg_full',
+ ...                                {'a': '6378169.0', 'b': '6356584.0',
+ ...                                 'h': '35785831.0', 'lon_0': '0',
+ ...                                 'proj': 'geos'},
+ ...                                3712, 3712,
+ ...                                [-5568742.4, -5568742.4,
+ ...                                 5568742.4, 5568742.4])
+ >>> channel1 = np.ones((3712, 3712))
+ >>> channel2 = np.ones((3712, 3712)) * 2
+ >>> channel3 = np.ones((3712, 3712)) * 3
+ >>> data = np.dstack((channel1, channel2, channel3))
+ >>> msg_con_nn = image.ImageContainerNearest(data, msg_area, radius_of_influence=50000)
+ 
+
+Segmented resampling
+********************
+
+Pyresample calculates the result in segments in order to reduce memory footprint. This is controlled by the **segments** contructor keyword argument. If no **segments** argument is given pyresample will estimate the number of segments to use.
+ 
+Forcing quick resampling to use 4 resampling segments:
+ 
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import image, geometry
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees',
+ ...                                'msg_full',
+ ...                                {'a': '6378169.0', 'b': '6356584.0',
+ ...                                 'h': '35785831.0', 'lon_0': '0',
+ ...                                 'proj': 'geos'},
+ ...                                3712, 3712,
+ ...                                [-5568742.4, -5568742.4,
+ ...                                 5568742.4, 5568742.4])
+ >>> data = np.ones((3712, 3712))
+ >>> msg_con_quick = image.ImageContainerQuick(data, msg_area, segments=4)
+ >>> area_con_quick = msg_con_quick.resample(area_def)
+
+Constructor arguments
+*********************
+The full list of constructor arguments:
+
+ **ImageContainerQuick**:
+ 
+* image_data : Dataset. Masked arrays can be used.
+* image_data : Geometry definition.
+* fill_value (optional) : Fill value for undefined pixels. Defaults to 0. If set to **None** they will be masked out.
+* nprocs (optional) : Number of processor cores to use. Defaults to 1.
+* segments (optional) : Number of segments to split resampling in. Defaults to auto estimation.
+
+ **ImageContainerNearest**:
+
+* image_data : Dataset. Masked arrays can be used.
+* image_data : Geometry definition.
+* radius_of_influence : Cut off radius in meters when considering neighbour pixels.
+* epsilon (optional) : The distance to a found value is guaranteed to be no further than (1 + eps) times the distance to the correct neighbour.
+* fill_value (optional) : Fill value for undefined pixels. Defaults to 0. If set to **None** they will be masked out.
+* reduce_data (optional) : Apply geographic reduction of dataset before resampling. Defaults to True
+* nprocs (optional) : Number of processor cores to use. Defaults to 1.
+* segments (optional) : Number of segments to split resampling in. Defaults to auto estimation.
+
+Preprocessing of grid resampling
+*********************************
+For preprocessing of grid resampling see :ref:`preproc`
+ 
+ 
diff --git a/docs/source/index.rst b/docs/source/index.rst
new file mode 100644
index 0000000..8116f4d
--- /dev/null
+++ b/docs/source/index.rst
@@ -0,0 +1,28 @@
+.. pyresample documentation master file, created by
+   sphinx-quickstart on Tue Jan  5 13:01:32 2010.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+Pyresample
+======================================
+Pyresample is a Python package for resampling (reprojection) of earth observing satellite data.
+Pyresample handles both resampling of gridded data (e.g. geostationary satellites) and swath data (polar orbiting satellites). 
+Pyresample can use multiple processor cores for resampling. Pyresample supports masked arrays.
+
+Documentation
+-------------
+.. toctree::
+   :maxdepth: 2
+
+   installation
+   geo_def
+   geo_filter
+   grid
+   swath
+   multi
+   preproc
+   plot
+   data_reduce
+   API
+
+
diff --git a/docs/source/installation.rst b/docs/source/installation.rst
new file mode 100644
index 0000000..641b628
--- /dev/null
+++ b/docs/source/installation.rst
@@ -0,0 +1,78 @@
+Installing Pyresample
+=====================
+Pyresample depends on pyproj, numpy(>= 1.3), scipy(>= 0.7), multiprocessing 
+(builtin package for Python > 2.5) and configobj. Optionally pykdtree can be used instead of scipy from v0.8.0.
+
+The correct version of the packages should be installed on your system 
+(refer to numpy and scipy installation instructions) or use easy_install to handle dependencies automatically.
+
+In order to use the pyresample plotting functionality Basemap and matplotlib (>= 0.98) must be installed. 
+These packages are not a prerequisite for using any other pyresample functionality. 
+
+Package test
+************
+Test the package (requires nose):
+
+.. code-block:: bash
+
+	$ tar -zxvf pyresample-<version>.tar.gz
+	$ cd pyresample-<version>
+	$ nosetests
+	
+If all the tests passes the functionality of all pyresample functions on the system has been verified.
+
+Package installation
+********************
+A sandbox environment can be created for pyresample using `Virtualenv <http://pypi.python.org/pypi/virtualenv>`_
+
+Pyresample is available from pypi.
+  
+Install Pyresample using pip:
+
+.. code-block:: bash
+
+	$ pip install pyresample
+
+Alternatively install from tarball:
+
+.. code-block:: bash
+
+	$ tar -zxvf pyresample-<version>.tar.gz
+	$ cd pyresample-<version>
+	$ python setup.py install
+
+Using pykdtree
+**************
+
+As of pyresample v0.8.0 pykdtree can be used as backend instead of scipy. 
+This enables significant speedups for large datasets.
+
+pykdtree is used as a drop-in replacement for scipy. If it's available it will be used otherwise scipy will be used.
+To check which backend is active for your pyresample installation do:
+
+ >>> import pyresample as pr
+ >>> pr.kd_tree.which_kdtree()
+
+which returns either 'pykdtree' or 'scipy.spatial'.
+
+Please refere to pykdtree_ for installation description.
+
+If pykdtree is built with OpenMP support the number of threads is controlled with the standard OpenMP environment variable OMP_NUM_THREADS.
+The *nprocs* argument has no effect on pykdtree.
+
+Using numexpr
+*************
+
+As of pyresample v1.0.0 numexpr_ will be used for minor bottleneck optimization if available
+
+Show active plugins
+*******************
+The active drop-in plugins can be show using:
+
+ >>> import pyresample as pr
+ >>> pr.get_capabilities()
+
+.. _pykdtree: https://github.com/storpipfugl/pykdtree
+.. _numexpr: https://code.google.com/p/numexpr/
+ 
+ 
\ No newline at end of file
diff --git a/docs/source/multi.rst b/docs/source/multi.rst
new file mode 100644
index 0000000..564d141
--- /dev/null
+++ b/docs/source/multi.rst
@@ -0,0 +1,36 @@
+.. _multi:
+
+Using multiple processor cores
+==============================
+
+Multi core processing
+*********************
+
+Bottlenecks of pyresample can be executed in parallel. Parallel computing can be executed if the 
+pyresample function has the **nprocs** keyword argument. **nprocs** specifies the number of processes 
+to be used for calculation. If a class takes the constructor argument **nprocs** this sets **nprocs** for
+all methods of this class
+
+Example of resampling in parallel using 4 processes:
+
+.. doctest::
+
+ >>> import numpy
+ >>> from pyresample import kd_tree, geometry
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> data = numpy.fromfunction(lambda y, x: y*x, (50, 10))
+ >>> lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+ >>> lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+ >>> swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+ >>> result = kd_tree.resample_nearest(swath_def, data.ravel(),
+ ... area_def, radius_of_influence=50000, nprocs=4)
+
+Note: Do not use more processes than available processor cores. As there is a process creation overhead 
+there might be neglible performance improvement using say 8 compared to 4 processor cores. 
+Test on the actual system to determine the most sensible number of processes to use. 
diff --git a/docs/source/plot.rst b/docs/source/plot.rst
new file mode 100644
index 0000000..e808d2d
--- /dev/null
+++ b/docs/source/plot.rst
@@ -0,0 +1,139 @@
+Plotting with pyresample and Basemap
+====================================
+Pyresample supports basic integration with Basemap (http://matplotlib.sourceforge.net/basemap).
+
+Displaying data quickly
+-----------------------
+Pyresample has some convenience functions for displaying data from a single channel. 
+The function **plot.show_quicklook** shows a Basemap image of a dataset for a specified AreaDefinition.
+The function **plot.save_quicklook** saves the Basemap image directly to file.
+
+**Example usage:**
+
+.. doctest::
+
+ >>> import numpy as np	
+ >>> import pyresample as pr
+ >>> lons = np.zeros(1000)
+ >>> lats = np.arange(-80, -90, -0.01)
+ >>> tb37v = np.arange(1000)
+ >>> area_def = pr.utils.load_area('/tmp/areas.cfg', 'ease_sh')
+ >>> swath_def = pr.geometry.SwathDefinition(lons, lats)
+ >>> result = pr.kd_tree.resample_nearest(swath_def, tb37v, area_def,
+ ...                                      radius_of_influence=20000, fill_value=None)
+ >>> pr.plot.save_quicklook('/tmp/tb37v_quick.png', area_def, result, label='Tb 37v (K)')
+
+Assuming **lons**, **lats** and **tb37v** are initialized with real data the result might look something like this:
+  .. image:: _static/images/tb37v_quick.png
+  
+The data passed to the functions is a 2D array matching the AreaDefinition.
+
+The Plate Carree projection
++++++++++++++++++++++++++++
+The Plate Carree projection (regular lon/lat grid) is named **eqc** in Proj.4 and **cyl** in Basemap. pyresample uses the Proj.4 name.
+Assuming the file **/tmp/areas.cfg** has the following area definition:
+
+.. code-block:: bash
+
+ REGION: pc_world {
+    NAME:    Plate Carree world map
+    PCS_ID:  pc_world
+    PCS_DEF: proj=eqc
+    XSIZE: 640
+    YSIZE: 480
+    AREA_EXTENT:  (-20037508.34, -10018754.17, 20037508.34, 10018754.17)
+ };
+
+**Example usage:**
+
+ >>> import numpy as np 
+ >>> import pyresample as pr
+ >>> lons = np.zeros(1000)
+ >>> lats = np.arange(-80, -90, -0.01)
+ >>> tb37v = np.arange(1000)
+ >>> area_def = pr.utils.load_area('/tmp/areas.cfg', 'pc_world')
+ >>> swath_def = pr.geometry.SwathDefinition(lons, lats)
+ >>> result = pr.kd_tree.resample_nearest(swath_def, tb37v, area_def, radius_of_influence=20000, fill_value=None)
+ >>> pr.plot.save_quicklook('/tmp/tb37v_pc.png', area_def, result, num_meridians=0, num_parallels=0, label='Tb 37v (K)')
+
+Assuming **lons**, **lats** and **tb37v** are initialized with real data the result might look something like this:
+  .. image:: _static/images/tb37v_pc.png
+
+
+The Globe projections
++++++++++++++++++++++
+From v0.7.12 pyresample can use the geos, ortho and nsper projections with Basemap.
+Assuming the file **/tmp/areas.cfg** has the following area definition for an ortho projection area:
+
+.. code-block:: bash
+
+ REGION: ortho {
+   NAME:    Ortho globe
+   PCS_ID:  ortho_globe
+   PCS_DEF: proj=ortho, a=6370997.0, lon_0=40, lat_0=-40
+   XSIZE: 640
+   YSIZE: 480
+   AREA_EXTENT:  (-10000000, -10000000, 10000000, 10000000) 
+ };
+
+**Example usage:**
+
+ >>> import numpy as np 
+ >>> import pyresample as pr
+ >>> lons = np.zeros(1000)
+ >>> lats = np.arange(-80, -90, -0.01)
+ >>> tb37v = np.arange(1000)
+ >>> area_def = pr.utils.load_area('/tmp/areas.cfg', 'ortho')
+ >>> swath_def = pr.geometry.SwathDefinition(lons, lats)
+ >>> result = pr.kd_tree.resample_nearest(swath_def, tb37v, area_def, radius_of_influence=20000, fill_value=None)
+ >>> pr.plot.save_quicklook('tb37v_ortho.png', area_def, result, num_meridians=0, num_parallels=0, label='Tb 37v (K)')
+
+Assuming **lons**, **lats** and **tb37v** are initialized with real data the result might look something like this:
+  .. image:: _static/images/tb37v_ortho.png
+
+
+Getting a Basemap object
+------------------------
+In order to make more advanced plots than the preconfigured quicklooks a Basemap object can be generated from an
+AreaDefintion using the **plot.area_def2basemap(area_def, **kwargs)** function.
+
+**Example usage:**
+
+.. doctest::
+
+ >>> import numpy as np	
+ >>> import matplotlib.pyplot as plt
+ >>> import pyresample as pr
+ >>> lons = np.zeros(1000)
+ >>> lats = np.arange(-80, -90, -0.01)
+ >>> tb37v = np.arange(1000)
+ >>> area_def = pr.utils.load_area('/tmp/areas.cfg', 'ease_sh')
+ >>> swath_def = pr.geometry.SwathDefinition(lons, lats)
+ >>> result = pr.kd_tree.resample_nearest(swath_def, tb37v, area_def,
+ ...                                      radius_of_influence=20000, fill_value=None)
+ >>> bmap = pr.plot.area_def2basemap(area_def)
+ >>> bmng = bmap.bluemarble()
+ >>> col = bmap.imshow(result, origin='upper')
+ >>> plt.savefig('/tmp/tb37v_bmng.png', bbox_inches='tight')
+
+Assuming **lons**, **lats** and **tb37v** are initialized with real data the result might look something like this:
+  .. image:: _static/images/tb37v_bmng.png
+  
+Any keyword arguments (not concerning the projection) passed to **plot.area_def2basemap** will be passed
+directly to the Basemap initialization.
+
+For more information on how to plot with Basemap please refer to the Basemap and matplotlib documentation.
+
+Limitations
+-----------
+The pyresample use of Basemap is basically a conversion from a pyresample AreaDefintion to a Basemap object
+which allows for correct plotting of a resampled dataset using the **basemap.imshow** function.
+
+Currently only the following set of Proj.4 arguments can be interpreted in the conversion: 
+{'proj', 'a', 'b', 'ellps', 'lon_0', 'lat_0', 'lon_1', 'lat_1', 'lon_2', 'lat_2', 'lat_ts'}
+
+Any other Proj.4 parameters will be ignored. 
+If the ellipsoid is not defined in terms of 'ellps', 'a' or ('a', 'b') an exception will be raised.
+
+The xsize and ysize in an AreaDefinition will only be used during resampling when the image data for use in
+**basemap.imshow** is created. The actual size and shape of the final plot is handled by matplotlib.
diff --git a/docs/source/preproc.rst b/docs/source/preproc.rst
new file mode 100644
index 0000000..6756d5d
--- /dev/null
+++ b/docs/source/preproc.rst
@@ -0,0 +1,53 @@
+.. _preproc:
+
+Preprocessing of grids
+======================
+
+When resampling is performed repeatedly to the same grid significant execution time can be save by 
+preprocessing grid information.
+
+Preprocessing for grid resampling
+---------------------------------
+
+Using the function **generate_quick_linesample_arrays** or 
+**generate_nearest_neighbour_linesample_arrays** from **pyresample.utils** arrays containing 
+the rows and cols indices used to calculate the result in **image.resample_area_quick** or
+**resample_area_nearest_neighbour** can be obtained. These can be fed to the method 
+**get_array_from_linesample** of an **ImageContainer** object to obtain the resample result.
+
+.. doctest::
+
+ >>> import numpy
+ >>> from pyresample import utils, image, geometry
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees',
+ ...                                'msg_full',
+ ...                                {'a': '6378169.0', 'b': '6356584.0',
+ ...                                 'h': '35785831.0', 'lon_0': '0',
+ ...                                 'proj': 'geos'},
+ ...                                3712, 3712,
+ ...                                [-5568742.4, -5568742.4,
+ ...                                 5568742.4, 5568742.4])
+ >>> data = numpy.ones((3712, 3712))
+ >>> msg_con = image.ImageContainer(data, msg_area) 
+ >>> row_indices, col_indices = \
+ ...		utils.generate_nearest_neighbour_linesample_arrays(msg_area, area_def, 50000)
+ >>> result = msg_con.get_array_from_linesample(row_indices, col_indices) 
+
+The numpy arrays returned by **generate_*_linesample_arrays** can be and used with the 
+**ImageContainer.get_array_from_linesample** method when the same resampling is to be performed 
+again thus eliminating the need for calculating the reprojection.
+
+Numpy arrays can be saved and loaded using  **numpy.save** and **numpy.load**.
+
+
+
+
+
+
diff --git a/docs/source/swath.rst b/docs/source/swath.rst
new file mode 100644
index 0000000..cfa4bca
--- /dev/null
+++ b/docs/source/swath.rst
@@ -0,0 +1,212 @@
+.. _swath:
+
+Resampling of swath data
+========================
+
+Pyresample can be used to resample a swath dataset to a grid, a grid to a swath or a swath to another swath. 
+Resampling can be done using nearest neighbour method, Guassian weighting, weighting with an arbitrary radial function.
+
+pyresample.image
+----------------
+The ImageContainerNearest class can be used for nearest neighbour resampling of swaths as well as grids.
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import image, geometry
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> data = np.fromfunction(lambda y, x: y*x, (50, 10))
+ >>> lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
+ >>> lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
+ >>> swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+ >>> swath_con = image.ImageContainerNearest(data, swath_def, radius_of_influence=5000)
+ >>> area_con = swath_con.resample(area_def)
+ >>> result = area_con.image_data
+
+For other resampling types or splitting the process in two steps use the functions in **pyresample.swath** described below. 
+
+pyresample.swath
+----------------
+
+This module contains several functions for resampling swath data.
+
+Note distance calculation is approximated with cartesian distance.
+
+Masked arrays can be used as data input. In order to have undefined pixels masked out instead of 
+assigned a fill value set **fill_value=None** when calling the **resample_*** function.
+
+resample_nearest
+****************
+
+Function for resampling using nearest neighbour method.
+
+Example showing how to resample a generated swath dataset to a grid using nearest neighbour method:
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import kd_tree, geometry
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> data = np.fromfunction(lambda y, x: y*x, (50, 10))
+ >>> lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
+ >>> lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
+ >>> swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+ >>> result = kd_tree.resample_nearest(swath_def, data,
+ ... area_def, radius_of_influence=50000, epsilon=0.5)
+
+If the arguments **swath_def** and **area_def** where switched (and **data** matched the dimensions of **area_def**) the grid of **area_def**
+would be resampled to the swath defined by **swath_def**.  
+
+Note the keyword arguments:
+
+* **radius_of_influence**: The radius around each grid pixel in meters to search for neighbours in the swath.
+* **epsilon**: The distance to a found value is guaranteed to be no further than (1 + eps) times the distance to the correct neighbour. Allowing for uncertanty decreases execution time.
+
+If **data** is a masked array the mask will follow the neighbour pixel assignment.
+
+If there are multiple channels in the dataset the **data** argument should be of the shape of the lons and lat arrays 
+with the channels along the last axis e.g. (rows, cols, channels). Note: the convention of pyresample < 0.7.4 is to pass
+**data** in the form of (number_of_data_points, channels) is still accepted.
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import kd_tree, geometry
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> channel1 = np.fromfunction(lambda y, x: y*x, (50, 10))
+ >>> channel2 = np.fromfunction(lambda y, x: y*x, (50, 10)) * 2
+ >>> channel3 = np.fromfunction(lambda y, x: y*x, (50, 10)) * 3
+ >>> data = np.dstack((channel1, channel2, channel3))
+ >>> lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
+ >>> lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
+ >>> swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+ >>> result = kd_tree.resample_nearest(swath_def, data,
+ ... area_def, radius_of_influence=50000) 
+
+For nearest neighbour resampling the class **image.ImageContainerNearest** can be used as well as **kd_tree.resample_nearest**
+
+resample_gauss
+**************
+
+Function for resampling using nearest Gussian weighting. The Gauss weigh function is defined as exp(-dist^2/sigma^2).
+Note the pyresample sigma is **not** the standard deviation of the gaussian.
+Example showing how to resample a generated swath dataset to a grid using Gaussian weighting:
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import kd_tree, geometry
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> data = np.fromfunction(lambda y, x: y*x, (50, 10))
+ >>> lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
+ >>> lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
+ >>> swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+ >>> result = kd_tree.resample_gauss(swath_def, data, 
+ ... area_def, radius_of_influence=50000, sigmas=25000)
+
+If more channels are present in **data** the keyword argument **sigmas** must be a list containing a sigma for each channel.
+
+If **data** is a masked array any pixel in the result data that has been "contaminated" by weighting of a masked pixel is masked.
+
+Using the function **utils.fwhm2sigma** the sigma argument to the gauss resampling can be calculated from 3 dB FOV levels.
+
+resample_custom
+***************
+
+Function for resampling using arbitrary radial weight functions.
+
+Example showing how to resample a generated swath dataset to a grid using an arbitrary radial weight function:
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import kd_tree, geometry 
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> data = np.fromfunction(lambda y, x: y*x, (50, 10))
+ >>> lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
+ >>> lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
+ >>> swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+ >>> wf = lambda r: 1 - r/100000.0
+ >>> result  = kd_tree.resample_custom(swath_def, data,
+ ...  area_def, radius_of_influence=50000, weight_funcs=wf)
+
+If more channels are present in **data** the keyword argument **weight_funcs** must be a list containing a radial function for each channel.
+
+If **data** is a masked array any pixel in the result data that has been "contaminated" by weighting of a masked pixel is masked.
+
+Resampling from neighbour info
+******************************
+The resampling can be split in two steps: 
+
+First get arrays containing information about the nearest neighbours to each grid point. 
+Then use these arrays to retrive the resampling result.
+
+This approch can be useful if several datasets based on the same swath are to be resampled. The computational 
+heavy task of calculating the neighbour information can be done once and the result can be used to 
+retrieve the resampled data from each of the datasets fast.
+
+.. doctest::
+
+ >>> import numpy as np
+ >>> from pyresample import kd_tree, geometry
+ >>> area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD',
+ ...                                {'a': '6378144.0', 'b': '6356759.0',
+ ...                                 'lat_0': '50.00', 'lat_ts': '50.00',
+ ...                                 'lon_0': '8.00', 'proj': 'stere'}, 
+ ...                                800, 800,
+ ...                                [-1370912.72, -909968.64,
+ ...                                 1029087.28, 1490031.36])
+ >>> data = np.fromfunction(lambda y, x: y*x, (50, 10))
+ >>> lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
+ >>> lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
+ >>> swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+ >>> valid_input_index, valid_output_index, index_array, distance_array = \
+ ...                        kd_tree.get_neighbour_info(swath_def, 
+ ...                               	                   area_def, 50000,  
+ ...                                                   neighbours=1)
+ >>> res = kd_tree.get_sample_from_neighbour_info('nn', area_def.shape, data, 
+ ...                                              valid_input_index, valid_output_index,
+ ...                                              index_array)
+ 
+Note the keyword argument **neighbours=1**. This specifies only to consider one neighbour for each 
+grid point (the nearest neighbour). Also note **distance_array** is not a required argument for
+**get_sample_from_neighbour_info** when using nearest neighbour resampling
+
+Segmented resampling
+********************
+Whenever a resampling function takes the keyword argument **segments** the number of segments to split the resampling process in can be specified. This affects the memory footprint of pyresample. If the value of **segments** is left to default pyresample will estimate the number of segments to use. 
+    
+Speedup using pykdtree
+**********************
+
+pykdtree can be used instead of scipy to gain significant speedup for large datasets. See :ref:`multi`. 
diff --git a/pyresample.egg-info/PKG-INFO b/pyresample.egg-info/PKG-INFO
new file mode 100644
index 0000000..a8e3315
--- /dev/null
+++ b/pyresample.egg-info/PKG-INFO
@@ -0,0 +1,16 @@
+Metadata-Version: 1.1
+Name: pyresample
+Version: 1.0.0
+Summary: Resampling of remote sensing data in Python
+Home-page: UNKNOWN
+Author: Esben S. Nielsen
+Author-email: esn at dmi.dk
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
+Classifier: Programming Language :: Python
+Classifier: Operating System :: OS Independent
+Classifier: Intended Audience :: Science/Research
+Classifier: Topic :: Scientific/Engineering
diff --git a/pyresample.egg-info/SOURCES.txt b/pyresample.egg-info/SOURCES.txt
new file mode 100644
index 0000000..8f240ad
--- /dev/null
+++ b/pyresample.egg-info/SOURCES.txt
@@ -0,0 +1,60 @@
+LICENSE.txt
+MANIFEST.in
+README
+setup.py
+docs/Makefile
+docs/source/API.rst
+docs/source/conf.py
+docs/source/data_reduce.rst
+docs/source/geo_def.rst
+docs/source/geo_filter.rst
+docs/source/grid.rst
+docs/source/index.rst
+docs/source/installation.rst
+docs/source/multi.rst
+docs/source/plot.rst
+docs/source/preproc.rst
+docs/source/swath.rst
+docs/source/_static/images/tb37_multi.png
+docs/source/_static/images/tb37v_bmng.png
+docs/source/_static/images/tb37v_ortho.png
+docs/source/_static/images/tb37v_pc.png
+docs/source/_static/images/tb37v_quick.png
+pyresample/__init__.py
+pyresample/_multi_proc.py
+pyresample/_spatial_mp.py
+pyresample/data_reduce.py
+pyresample/geo_filter.py
+pyresample/geometry.py
+pyresample/grid.py
+pyresample/image.py
+pyresample/kd_tree.py
+pyresample/plot.py
+pyresample/spherical_geometry.py
+pyresample/utils.py
+pyresample/version.py
+pyresample.egg-info/PKG-INFO
+pyresample.egg-info/SOURCES.txt
+pyresample.egg-info/dependency_links.txt
+pyresample.egg-info/not-zip-safe
+pyresample.egg-info/requires.txt
+pyresample.egg-info/top_level.txt
+test/test_geometry.py
+test/test_grid.py
+test/test_image.py
+test/test_kd_tree.py
+test/test_plot.py
+test/test_spherical_geometry.py
+test/test_swath.py
+test/test_utils.py
+test/test_files/areas.cfg
+test/test_files/mask_grid.dat
+test/test_files/mask_test_data.dat
+test/test_files/mask_test_fill_value.dat
+test/test_files/mask_test_full_fill.dat
+test/test_files/mask_test_full_fill_multi.dat
+test/test_files/mask_test_mask.dat
+test/test_files/mask_test_nearest_data.dat
+test/test_files/mask_test_nearest_mask.dat
+test/test_files/quick_mask_test.dat
+test/test_files/ssmis_swath.npz
\ No newline at end of file
diff --git a/pyresample.egg-info/dependency_links.txt b/pyresample.egg-info/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/pyresample.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/pyresample.egg-info/not-zip-safe b/pyresample.egg-info/not-zip-safe
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/pyresample.egg-info/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/pyresample.egg-info/requires.txt b/pyresample.egg-info/requires.txt
new file mode 100644
index 0000000..37520a3
--- /dev/null
+++ b/pyresample.egg-info/requires.txt
@@ -0,0 +1,9 @@
+pyproj
+numpy
+configobj
+
+[numexpr]
+numexpr
+
+[pykdtree]
+pykdtree
\ No newline at end of file
diff --git a/pyresample.egg-info/top_level.txt b/pyresample.egg-info/top_level.txt
new file mode 100644
index 0000000..2c78f3a
--- /dev/null
+++ b/pyresample.egg-info/top_level.txt
@@ -0,0 +1 @@
+pyresample
diff --git a/pyresample/__init__.py b/pyresample/__init__.py
new file mode 100644
index 0000000..375e914
--- /dev/null
+++ b/pyresample/__init__.py
@@ -0,0 +1,42 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+import grid
+import image
+import kd_tree
+import utils
+import version
+import plot
+
+__version__ = version.__version__
+
+def get_capabilities():
+    cap = {}
+
+    try:
+        from pykdtree.kdtree import KDTree
+        cap['pykdtree'] = True 
+    except ImportError:
+        cap['pykdtree'] = False
+
+    try:
+        import numexpr
+        cap['numexpr'] = True 
+    except ImportError:
+        cap['numexpr'] = False 
+
+    return cap
diff --git a/pyresample/_multi_proc.py b/pyresample/_multi_proc.py
new file mode 100644
index 0000000..b1d1957
--- /dev/null
+++ b/pyresample/_multi_proc.py
@@ -0,0 +1,104 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+import ctypes
+
+import multiprocessing as mp
+import numpy as np
+
+class Scheduler(object):
+    
+    def __init__(self, ndata, nprocs, chunk=None, schedule='guided'):
+        if not schedule in ['guided','dynamic', 'static']:
+            raise ValueError, 'unknown scheduling strategy'
+        self._ndata = mp.RawValue(ctypes.c_int, ndata)
+        self._start = mp.RawValue(ctypes.c_int, 0)
+        self._lock = mp.Lock()
+        self._schedule = schedule
+        self._nprocs = nprocs
+        if schedule == 'guided' or schedule == 'dynamic':
+            min_chunk = ndata // (10*nprocs)
+            if chunk:
+                min_chunk = chunk
+            min_chunk = max(min_chunk, 1)
+            self._chunk = min_chunk
+        elif schedule == 'static':
+            min_chunk = ndata // nprocs
+            if chunk:
+                min_chunk = max(chunk, min_chunk)
+            min_chunk = max(min_chunk, 1)
+            self._chunk = min_chunk
+            
+    def __iter__(self):
+        return self
+
+    def next(self):
+        self._lock.acquire()
+        ndata = self._ndata.value
+        nprocs = self._nprocs
+        start = self._start.value
+        if self._schedule == 'guided':
+            _chunk = ndata // nprocs
+            chunk = max(self._chunk, _chunk)
+        else:
+            chunk = self._chunk
+        if ndata:
+            if chunk > ndata:
+                s0 = start
+                s1 = start + ndata
+                self._ndata.value = 0
+            else:
+                s0 = start
+                s1 = start + chunk
+                self._ndata.value = ndata - chunk
+                self._start.value = start + chunk
+            self._lock.release()
+            return slice(s0, s1)
+        else:
+            self._lock.release()
+            raise StopIteration
+
+
+def shmem_as_ndarray(raw_array):
+    _ctypes_to_numpy = {
+                        ctypes.c_char : np.int8,
+                        ctypes.c_wchar : np.int16,
+                        ctypes.c_byte : np.int8,
+                        ctypes.c_ubyte : np.uint8,
+                        ctypes.c_short : np.int16,
+                        ctypes.c_ushort : np.uint16,
+                        ctypes.c_int : np.int32,
+                        ctypes.c_uint : np.int32,
+                        ctypes.c_long : np.int32,
+                        ctypes.c_ulong : np.int32,
+                        ctypes.c_float : np.float32,
+                        ctypes.c_double : np.float64
+                        }
+    address = raw_array._wrapper.get_address()
+    size = raw_array._wrapper.get_size()
+    dtype = _ctypes_to_numpy[raw_array._type_]
+    class Dummy(object): pass
+    d = Dummy()
+    d.__array_interface__ = {
+                             'data' : (address, False),
+                             'typestr' : np.dtype(np.uint8).str,
+                             'descr' : np.dtype(np.uint8).descr,
+                             'shape' : (size,),
+                             'strides' : None,
+                             'version' : 3
+                             }                            
+    return np.asarray(d).view(dtype=dtype)
\ No newline at end of file
diff --git a/pyresample/_spatial_mp.py b/pyresample/_spatial_mp.py
new file mode 100644
index 0000000..78a0c0d
--- /dev/null
+++ b/pyresample/_spatial_mp.py
@@ -0,0 +1,275 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010, 2013  Esben S. Nielsen, Martin Raspaud
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+import ctypes
+
+import numpy as np
+import pyproj
+#import scipy.spatial as sp
+import multiprocessing as mp
+
+try:
+    import numexpr as ne
+except ImportError:
+    ne = None
+
+from _multi_proc import shmem_as_ndarray, Scheduler
+
+#Earth radius
+R = 6370997.0
+
+
+class cKDTree_MP(object):
+    ''' Multiprocessing cKDTree subclass, shared memory '''
+
+    def __init__(self, data, leafsize=10, nprocs=2, chunk=None,\
+                 schedule='guided'):
+        '''
+        Same as cKDTree.__init__ except that an internal copy
+        of data to shared memory is made.
+        Extra keyword arguments:
+        chunk : Minimum chunk size for the load balancer.
+        schedule: Strategy for balancing work load
+        ('static', 'dynamic' or 'guided').
+        '''
+
+        self.n, self.m = data.shape
+        # Allocate shared memory for data
+        self.shmem_data = mp.RawArray(ctypes.c_double, self.n*self.m)
+        
+        # View shared memory as ndarray, and copy over the data.
+        # The RawArray objects have information about the dtype and
+        # buffer size.
+        _data = shmem_as_ndarray(self.shmem_data).reshape((self.n, self.m))
+        _data[:,:] = data
+        
+        # Initialize parent, we must do this last because
+        # cKDTree stores a reference to the data array. We pass in
+        # the copy in shared memory rather than the origial data.
+        self.leafsize = leafsize
+        self._nprocs = nprocs
+        self._chunk = chunk
+        self._schedule = schedule        
+        
+    def query(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf):
+        '''
+        Same as cKDTree.query except parallelized with multiple
+        processes and shared memory.        
+        '''
+        
+        # allocate shared memory for x and result
+        nx = x.shape[0]
+        shmem_x = mp.RawArray(ctypes.c_double, nx*self.m)
+        shmem_d = mp.RawArray(ctypes.c_double, nx*k)
+        shmem_i = mp.RawArray(ctypes.c_int, nx*k)
+        
+        # view shared memory as ndarrays
+        _x = shmem_as_ndarray(shmem_x).reshape((nx, self.m))
+        if k == 1:
+            _d = shmem_as_ndarray(shmem_d)
+            _i = shmem_as_ndarray(shmem_i)
+        else:
+            _d = shmem_as_ndarray(shmem_d).reshape((nx, k))
+            _i = shmem_as_ndarray(shmem_i).reshape((nx, k))
+        
+        # copy x to shared memory
+        _x[:] = x
+        
+        # set up a scheduler to load balance the query        
+        scheduler = Scheduler(nx, self._nprocs, chunk=self._chunk,\
+                              schedule=self._schedule)
+
+        # query with multiple processes
+        query_args = [scheduler, self.shmem_data, self.n, self.m,\
+                      self.leafsize, shmem_x, nx, shmem_d, shmem_i,\
+                      k, eps, p, distance_upper_bound]
+                
+        _run_jobs(_parallel_query, query_args, self._nprocs)
+        # return results (private memory)
+        return _d.copy(), _i.copy()
+    
+
+class Proj(pyproj.Proj):
+
+    def __call__(self, data1, data2, inverse=False, radians=False,\
+                 errcheck=False, nprocs=1):
+        if self.is_latlong():
+            return data1, data2
+            
+        return super(Proj, self).__call__(data1, data2, inverse=inverse,\
+                                          radians=radians, errcheck=errcheck)
+
+
+class Proj_MP(pyproj.Proj):
+    
+    def __init__(self, *args, **kwargs):
+        self._args = args
+        self._kwargs = kwargs
+        
+    def __call__(self, data1, data2, inverse=False, radians=False,\
+                 errcheck=False, nprocs=2, chunk=None, schedule='guided'):
+        if self.is_latlong():
+            return data1, data2
+            
+        grid_shape = data1.shape
+        n = data1.size
+        
+        #Create shared memory
+        shmem_data1 = mp.RawArray(ctypes.c_double, n)
+        shmem_data2 = mp.RawArray(ctypes.c_double, n)
+        shmem_res1 = mp.RawArray(ctypes.c_double, n)
+        shmem_res2 = mp.RawArray(ctypes.c_double, n)
+        
+        # view shared memory as ndarrays
+        _data1 = shmem_as_ndarray(shmem_data1)
+        _data2 = shmem_as_ndarray(shmem_data2)
+        _res1 = shmem_as_ndarray(shmem_res1)
+        _res2 = shmem_as_ndarray(shmem_res2)
+        
+        # copy input data to shared memory
+        _data1[:] = data1.ravel()
+        _data2[:] = data2.ravel()
+        
+        # set up a scheduler to load balance the query        
+        scheduler = Scheduler(n, nprocs, chunk=chunk, schedule=schedule)
+                
+        # Projection with multiple processes
+        proj_call_args = [scheduler, shmem_data1, shmem_data2, shmem_res1,\
+                          shmem_res2, self._args, self._kwargs, inverse,\
+                          radians, errcheck]
+        
+        _run_jobs(_parallel_proj, proj_call_args, nprocs)
+        return _res1.copy().reshape(grid_shape), _res2.copy().reshape(grid_shape)
+
+
+class Cartesian(object):
+    
+    def __init__(self, *args, **kwargs):
+        pass
+    
+    def transform_lonlats(self, lons, lats):
+    
+        coords = np.zeros((lons.size, 3), dtype=lons.dtype)
+        deg2rad = lons.dtype.type(np.pi / 180)
+        if ne:
+            coords[:, 0] = ne.evaluate("R*cos(lats*deg2rad)*cos(lons*deg2rad)")
+            coords[:, 1] = ne.evaluate("R*cos(lats*deg2rad)*sin(lons*deg2rad)")
+            coords[:, 2] = ne.evaluate("R*sin(lats*deg2rad)")
+        else:
+            coords[:, 0] = R*np.cos(lats*deg2rad)*np.cos(lons*deg2rad)
+            coords[:, 1] = R*np.cos(lats*deg2rad)*np.sin(lons*deg2rad)
+            coords[:, 2] = R*np.sin(lats*deg2rad)
+        return coords
+     
+Cartesian_MP = Cartesian
+
+def _run_jobs(target, args, nprocs):
+    """Run process pool
+    """
+
+    # return status in shared memory
+    # access to these values are serialized automatically
+    ierr = mp.Value(ctypes.c_int, 0)
+    err_msg = mp.Array(ctypes.c_char, 1024)
+    
+    args.extend((ierr, err_msg))
+    
+    pool = [mp.Process(target=target, args=args) for n in range(nprocs)]
+    for p in pool: p.start()
+    for p in pool: p.join()
+    if ierr.value != 0:
+        raise RuntimeError,\
+                ('%d errors in worker processes. Last one reported:\n%s'%\
+                 (ierr.value, err_msg.value))
+                
+# This is executed in an external process:
+def _parallel_query(scheduler, # scheduler for load balancing
+                    data, ndata, ndim, leafsize, # data needed to reconstruct the kd-tree
+                    x, nx, d, i, # query data and results
+                    k, eps, p, dub, # auxillary query parameters
+                    ierr, err_msg): # return values (0 on success)
+    
+    try:     
+        # View shared memory as ndarrays.
+        _data = shmem_as_ndarray(data).reshape((ndata, ndim))
+        _x = shmem_as_ndarray(x).reshape((nx, ndim))
+        if k == 1:
+            _d = shmem_as_ndarray(d)
+            _i = shmem_as_ndarray(i)
+        else:
+            _d = shmem_as_ndarray(d).reshape((nx, k))
+            _i = shmem_as_ndarray(i).reshape((nx, k))
+
+        # Reconstruct the kd-tree from the data.
+        import scipy.spatial as sp
+        kdtree = sp.cKDTree(_data, leafsize=leafsize)
+
+        # Query for nearest neighbours, using slice ranges,
+        # from the load balancer.
+        for s in scheduler:
+            if k == 1:
+                _d[s], _i[s] = kdtree.query(_x[s,:], k=1, eps=eps, p=p,\
+                                                distance_upper_bound=dub)
+            else:
+                _d[s,:], _i[s,:] = kdtree.query(_x[s,:], k=k, eps=eps, p=p,\
+                                                distance_upper_bound=dub)
+    # An error occured, increment the return value ierr.
+    # Access to ierr is serialized by multiprocessing.
+    except Exception, e:
+        ierr.value += 1
+        err_msg.value = e.message  
+        
+def _parallel_proj(scheduler, data1, data2, res1, res2, proj_args, proj_kwargs,\
+                   inverse, radians, errcheck, ierr, err_msg):
+    try:
+        # View shared memory as ndarrays.
+        _data1 = shmem_as_ndarray(data1)
+        _data2 = shmem_as_ndarray(data2)
+        _res1 = shmem_as_ndarray(res1)
+        _res2 = shmem_as_ndarray(res2)
+        
+        #Initialise pyproj
+        proj = pyproj.Proj(*proj_args, **proj_kwargs)
+        
+        #Reproject data segment
+        for s in scheduler:
+            _res1[s], _res2[s] = proj(_data1[s], _data2[s], inverse=inverse,\
+                                       radians=radians, errcheck=errcheck)
+    
+    # An error occured, increment the return value ierr.
+    # Access to ierr is serialized by multiprocessing.
+    except Exception, e:
+        ierr.value += 1
+        err_msg.value = e.message  
+        
+def _parallel_transform(scheduler, lons, lats, n, coords, ierr, err_msg):
+    try:
+        # View shared memory as ndarrays.
+        _lons = shmem_as_ndarray(lons)
+        _lats = shmem_as_ndarray(lats)
+        _coords = shmem_as_ndarray(coords).reshape((n, 3))
+        
+        #Transform to cartesian coordinates
+        for s in scheduler:
+            _coords[s, 0] = R*np.cos(np.radians(_lats[s]))*np.cos(np.radians(_lons[s]))
+            _coords[s, 1] = R*np.cos(np.radians(_lats[s]))*np.sin(np.radians(_lons[s]))
+            _coords[s, 2] = R*np.sin(np.radians(_lats[s]))
+    # An error occured, increment the return value ierr.
+    # Access to ierr is serialized by multiprocessing.
+    except Exception, e:
+        ierr.value += 1
+        err_msg.value = e.message  
diff --git a/pyresample/data_reduce.py b/pyresample/data_reduce.py
new file mode 100644
index 0000000..fc5c5f8
--- /dev/null
+++ b/pyresample/data_reduce.py
@@ -0,0 +1,302 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Reduce data sets based on geographical information"""
+
+import numpy as np
+
+
+#Earth radius
+R = 6370997.0
+
+def swath_from_cartesian_grid(cart_grid, lons, lats, data, 
+                              radius_of_influence):
+    """Makes coarse data reduction of swath data by comparison with 
+    cartesian grid
+    
+    :Parameters:
+    chart_grid : numpy array          
+        Grid of area cartesian coordinates
+    lons : numpy array                
+        Swath lons
+    lats : numpy array                
+        Swath lats
+    data : numpy array                
+        Swath data
+    radius_of_influence : float 
+        Cut off distance in meters
+    
+    :Returns: 
+    (lons, lats, data) : list of numpy arrays
+        Reduced swath data and coordinate set
+    """
+    
+    valid_index = get_valid_index_from_cartesian_grid(cart_grid, lons, lats, 
+                                                      radius_of_influence)
+
+    lons = lons[valid_index]
+    lats = lats[valid_index]
+    data = data[valid_index]
+    
+    return lons, lats, data
+
+def get_valid_index_from_cartesian_grid(cart_grid, lons, lats, 
+                                        radius_of_influence):
+    """Calculates relevant data indices using coarse data reduction of swath 
+    data by comparison with cartesian grid
+    
+    :Parameters:
+    chart_grid : numpy array          
+        Grid of area cartesian coordinates
+    lons : numpy array                
+        Swath lons
+    lats : numpy array                
+        Swath lats
+    data : numpy array                
+        Swath data
+    radius_of_influence : float 
+        Cut off distance in meters
+    
+    :Returns: 
+    valid_index : numpy array
+        Boolean array of same size as lons and lats indicating relevant indices
+    """
+    
+    def _get_lons(x, y):
+        return np.rad2deg(np.arccos(x/np.sqrt(x**2 + y**2)))*np.sign(y)
+    
+    def _get_lats(z):
+        return 90 - np.rad2deg(np.arccos(z/R))
+    
+    #Get sides of target grid and transform to lon lats
+    lons_side1 = _get_lons(cart_grid[0, :, 0], cart_grid[0, :, 1])  
+    lons_side2 = _get_lons(cart_grid[:, -1, 0], cart_grid[:, -1, 1])
+    lons_side3 = _get_lons(cart_grid[-1, ::-1, 0], cart_grid[-1, ::-1, 1])
+    lons_side4 = _get_lons(cart_grid[::-1, 0, 0], cart_grid[::-1, 0, 1])
+    
+    lats_side1 = _get_lats(cart_grid[0, :, 2])
+    lats_side2 = _get_lats(cart_grid[:, -1, 2])
+    lats_side3 = _get_lats(cart_grid[-1, ::-1, 2])
+    lats_side4 = _get_lats(cart_grid[::-1, 0, 2])
+    
+    valid_index = _get_valid_index(lons_side1, lons_side2, lons_side3, lons_side4,
+                                   lats_side1, lats_side2, lats_side3, lats_side4,
+                                   lons, lats, radius_of_influence)
+    
+    return valid_index
+
+def swath_from_lonlat_grid(grid_lons, grid_lats, lons, lats, data,\
+                           radius_of_influence):
+    """Makes coarse data reduction of swath data by comparison with 
+    lon lat grid
+    
+    :Parameters:
+    grid_lons : numpy array          
+        Grid of area lons
+    grid_lats : numpy array           
+        Grid of area lats
+    lons : numpy array                
+        Swath lons
+    lats : numpy array                
+        Swath lats
+    data : numpy array                
+        Swath data
+    radius_of_influence : float 
+        Cut off distance in meters
+    
+    :Returns:
+    (lons, lats, data) : list of numpy arrays
+        Reduced swath data and coordinate set 
+    """
+    
+    valid_index = get_valid_index_from_lonlat_grid(grid_lons, grid_lats, lons, lats, radius_of_influence)
+
+    lons = lons[valid_index]
+    lats = lats[valid_index]
+    data = data[valid_index]
+    
+    return lons, lats, data
+
+def swath_from_lonlat_boundaries(boundary_lons, boundary_lats, lons, lats, data,\
+                           radius_of_influence):
+    """Makes coarse data reduction of swath data by comparison with 
+    lon lat boundary
+    
+    :Parameters:
+    boundary_lons : numpy array          
+        Grid of area lons
+    boundary_lats : numpy array           
+        Grid of area lats
+    lons : numpy array                
+        Swath lons
+    lats : numpy array                
+        Swath lats
+    data : numpy array                
+        Swath data
+    radius_of_influence : float 
+        Cut off distance in meters
+    
+    :Returns:
+    (lons, lats, data) : list of numpy arrays
+        Reduced swath data and coordinate set 
+    """
+    
+    valid_index = get_valid_index_from_lonlat_boundaries(boundary_lons, 
+                                                         boundary_lats, lons, lats, radius_of_influence)
+
+    lons = lons[valid_index]
+    lats = lats[valid_index]
+    data = data[valid_index]
+    
+    return lons, lats, data
+
+
+def get_valid_index_from_lonlat_grid(grid_lons, grid_lats, lons, lats, radius_of_influence):
+    """Calculates relevant data indices using coarse data reduction of swath 
+    data by comparison with lon lat grid
+    
+    :Parameters:
+    chart_grid : numpy array          
+        Grid of area cartesian coordinates
+    lons : numpy array                
+        Swath lons
+    lats : numpy array                
+        Swath lats
+    data : numpy array                
+        Swath data
+    radius_of_influence : float 
+        Cut off distance in meters
+    
+    :Returns: 
+    valid_index : numpy array
+        Boolean array of same size as lon and lat indicating relevant indices
+    """
+    
+    #Get sides of target grid
+    lons_side1 = grid_lons[0, :]    
+    lons_side2 = grid_lons[:, -1]
+    lons_side3 = grid_lons[-1, ::-1]
+    lons_side4 = grid_lons[::-1, 0]
+
+    lats_side1 = grid_lats[0, :]    
+    lats_side2 = grid_lats[:, -1]
+    lats_side3 = grid_lats[-1, :]
+    lats_side4 = grid_lats[:, 0]
+    
+    valid_index = _get_valid_index(lons_side1, lons_side2, lons_side3, lons_side4,
+                                   lats_side1, lats_side2, lats_side3, lats_side4,
+                                   lons, lats, radius_of_influence)
+    
+    return valid_index
+
+def get_valid_index_from_lonlat_boundaries(boundary_lons, boundary_lats, lons, lats, radius_of_influence):
+    """Find relevant indices from grid boundaries using the 
+    winding number theorem"""
+    
+    valid_index = _get_valid_index(boundary_lons.side1, boundary_lons.side2, 
+                                   boundary_lons.side3, boundary_lons.side4,
+                                   boundary_lats.side1, boundary_lats.side2, 
+                                   boundary_lats.side3, boundary_lats.side4,
+                                   lons, lats, radius_of_influence)
+    
+    return valid_index
+    
+def _get_valid_index(lons_side1, lons_side2, lons_side3, lons_side4,
+                     lats_side1, lats_side2, lats_side3, lats_side4,
+                     lons, lats, radius_of_influence):
+    """Find relevant indices from grid boundaries using the 
+    winding number theorem"""
+    
+    #Coarse reduction of data based on extrema analysis of the boundary 
+    #lon lat values of the target grid
+    illegal_lons = (((lons_side1 < -180) | (lons_side1 > 180)).any() or
+                    ((lons_side2 < -180) | (lons_side2 > 180)).any() or
+                    ((lons_side3 < -180) | (lons_side3 > 180)).any() or
+                    ((lons_side4 < -180) | (lons_side4 > 180)).any())
+    
+    illegal_lats = (((lats_side1 < -90) | (lats_side1 > 90)).any() or
+                    ((lats_side2 < -90) | (lats_side2 > 90)).any() or
+                    ((lats_side3 < -90) | (lats_side3 > 90)).any() or
+                    ((lats_side4 < -90) | (lats_side4 > 90)).any())
+    
+    if illegal_lons or illegal_lats:
+        #Grid boundaries are not safe to operate on
+        return np.ones(lons.size, dtype=np.bool)   
+    
+    #Find sum angle sum of grid boundary
+    angle_sum = 0
+    for side in (lons_side1, lons_side2, lons_side3, lons_side4):
+        prev = None
+        side_sum = 0
+        for lon in side:
+            if prev:
+                delta = lon - prev
+                if abs(delta) > 180:
+                    delta = (abs(delta)-360) * (delta//abs(delta))
+                angle_sum += delta
+                side_sum += delta
+            prev = lon
+    
+    #Buffer min and max lon and lat of interest with radius of interest
+    lat_min = min(lats_side1.min(), lats_side2.min(), lats_side3.min(),
+                  lats_side4.min())
+    lat_min_buffered = lat_min - float(radius_of_influence) / R
+    lat_max = max(lats_side1.max(), lats_side2.max(), lats_side3.max(),
+                  lats_side4.max())
+    lat_max_buffered = lat_max + float(radius_of_influence) / R
+
+    max_angle_s2 = max(abs(lats_side2.max()), abs(lats_side2.min()))
+    max_angle_s4 = max(abs(lats_side4.max()), abs(lats_side4.min()))
+    lon_min_buffered = (lons_side4.min() - 
+                       float(radius_of_influence) / 
+                       (np.sin(np.radians(max_angle_s4)) * R))
+                    
+    lon_max_buffered = (lons_side2.max() + 
+                       float(radius_of_influence) / 
+                       (np.sin(np.radians(max_angle_s2)) * R))
+    
+    #From the winding number theorem follows:
+    #angle_sum possiblilities:
+    #-360: area covers north pole
+    # 360: area covers south pole
+    #   0: area covers no poles
+    #else: area covers both poles    
+    if round(angle_sum) == -360:
+        #Covers NP
+        valid_index = (lats >= lat_min_buffered)        
+    elif round(angle_sum) == 360:
+        #Covers SP
+        valid_index = (lats <= lat_max_buffered)        
+    elif round(angle_sum) == 0:
+        #Covers no poles
+        valid_lats = (lats >= lat_min_buffered) * (lats <= lat_max_buffered)
+
+        if lons_side2.min() > lons_side4.max():
+            #No date line crossing                      
+            valid_lons = (lons >= lon_min_buffered) * (lons <= lon_max_buffered)
+        else:
+            #Date line crossing
+            seg1 = (lons >= lon_min_buffered) * (lons <= 180)
+            seg2 = (lons <= lon_max_buffered) * (lons >= -180)
+            valid_lons = seg1 + seg2                        
+        
+        valid_index = valid_lats * valid_lons        
+    else:
+        #Covers both poles don't reduce
+        valid_index = np.ones(lons.size, dtype=np.bool)
+
+    return valid_index
diff --git a/pyresample/geo_filter.py b/pyresample/geo_filter.py
new file mode 100644
index 0000000..e6fec3e
--- /dev/null
+++ b/pyresample/geo_filter.py
@@ -0,0 +1,86 @@
+import numpy as np
+
+import _spatial_mp
+import geometry
+
+class GridFilter(object):
+    """Geographic filter from a grid
+    
+    :Parameters:
+    grid_ll_x : float
+        Projection x coordinate of lower left corner of lower left pixel
+    grid_ll_y : float
+        Projection y coordinate of lower left corner of lower left pixel
+    grid_ur_x : float
+        Projection x coordinate of upper right corner of upper right pixel
+    grid_ur_y : float 
+        Projection y coordinate of upper right corner of upper right pixel 
+    proj4_string : string 
+    mask : numpy array
+        Mask as boolean numpy array
+        
+    """
+    
+    def __init__(self, area_def, filter, nprocs=1):
+        self.area_def = area_def
+        self._filter = filter.astype(np.bool)
+        self.nprocs = nprocs
+        
+    def get_valid_index(self, geometry_def):
+        """Calculates valid_index array  based on lons and lats
+        
+        :Parameters:
+        lons : numpy array
+        lats : numpy array
+        
+        :Returns:
+            Boolean numpy array of same shape as lons and lats
+             
+        """
+        
+        lons = geometry_def.lons[:]
+        lats = geometry_def.lats[:]
+        
+        #Get projection coords
+        if self.nprocs > 1:
+            proj = _spatial_mp.Proj_MP(**self.area_def.proj_dict)
+        else:
+            proj = _spatial_mp.Proj(**self.area_def.proj_dict)
+            
+        x_coord, y_coord = proj(lons, lats, nprocs=self.nprocs)
+                        
+        #Find array indices of coordinates   
+        target_x = ((x_coord / self.area_def.pixel_size_x) + 
+                    self.area_def.pixel_offset_x).astype(np.int32)
+        target_y = (self.area_def.pixel_offset_y - 
+                    (y_coord / self.area_def.pixel_size_y)).astype(np.int32)        
+        
+        #Create mask for pixels outside array (invalid pixels)
+        target_x_valid = (target_x >= 0) & (target_x < self.area_def.x_size)
+        target_y_valid = (target_y >= 0) & (target_y < self.area_def.y_size)
+        
+        #Set index of invalid pixels to 0
+        target_x[np.invert(target_x_valid)] = 0 
+        target_y[np.invert(target_y_valid)] = 0
+        
+        #Find mask
+        filter = self._filter[target_y, target_x]
+        
+        #Remove invalid pixels
+        filter = (filter & target_x_valid & target_y_valid).astype(np.bool)
+    
+        return filter
+    
+    def filter(self, geometry_def, data):
+        lons = geometry_def.lons[:]
+        lats = geometry_def.lats[:]
+        valid_index = self.get_valid_index(geometry_def)
+        lons_f = lons[valid_index]
+        lats_f = lats[valid_index]
+        data_f = data[valid_index]
+        geometry_def_f = \
+            geometry.CoordinateDefinition(lons_f, lats_f, 
+                                          nprocs=geometry_def.nprocs)
+        return geometry_def_f, data_f
+        
+        
diff --git a/pyresample/geometry.py b/pyresample/geometry.py
new file mode 100644
index 0000000..6593c94
--- /dev/null
+++ b/pyresample/geometry.py
@@ -0,0 +1,838 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010, 2013  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Classes for geometry operations"""
+import weakref
+
+import numpy as np
+
+import _spatial_mp
+
+
+class DimensionError(Exception):
+    pass
+
+
+class Boundary(object):
+    """Container for geometry boundary.
+    Labelling starts in upper left corner and proceeds clockwise"""
+      
+    def __init__(self, side1, side2, side3, side4):
+        self.side1 = side1
+        self.side2 = side2
+        self.side3 = side3
+        self.side4 = side4
+
+
+class BaseDefinition(object):
+    """Base class for geometry definitions"""
+           
+    def __init__(self, lons=None, lats=None, nprocs=1):
+        if type(lons) != type(lats):
+            raise TypeError('lons and lats must be of same type')
+        elif lons is not None:
+            if lons.shape != lats.shape:
+                raise ValueError('lons and lats must have same shape')
+            
+        self.nprocs = nprocs
+
+        self.lons = lons
+        self.lats = lats
+        
+        self.cartesian_coords = None
+    
+    def __eq__(self, other):
+        """Test for approximate equality"""
+
+        if other.lons is None or other.lats is None:
+            other_lons, other_lats = other.get_lonlats()
+        else:
+            other_lons = other.lons
+            other_lats = other.lats
+
+        if self.lons is None or self.lats is None:
+            self_lons, self_lats = self.get_lonlats()
+        else:
+            self_lons = self.lons
+            self_lats = self.lats
+
+        try:
+            return (np.allclose(self_lons, other_lons, atol=1e-6,
+                                rtol=5e-9) and
+                    np.allclose(self_lats, other_lats, atol=1e-6,
+                                rtol=5e-9))
+        except (AttributeError, ValueError):
+            return False  
+
+    def __ne__(self, other):
+        """Test for approximate equality"""
+        
+        return not self.__eq__(other)
+    
+    def get_lonlat(self, row, col):
+        """Retrieve lon and lat of single pixel
+        
+        :Parameters:
+        row : int
+        col : int
+        
+        :Returns:
+        (lon, lat) : tuple of floats
+        """
+        
+        if self.ndim != 2:
+            raise DimensionError(('operation undefined '
+                                  'for %sD geometry ') % self.ndim)
+        elif self.lons is None or self.lats is None:
+            raise ValueError('lon/lat values are not defined')
+        return self.lons[row, col], self.lats[row, col]
+    
+    def get_lonlats(self, data_slice=None, **kwargs):
+        """Base method for lon lat retrieval with slicing"""
+        
+        if self.lons is None or self.lats is None:
+            raise ValueError('lon/lat values are not defined')
+        elif data_slice is None:
+            return self.lons, self.lats
+        else:
+            return self.lons[data_slice], self.lats[data_slice]
+   
+    def get_boundary_lonlats(self):
+            """Returns Boundary objects"""
+            
+            side1 = self.get_lonlats(data_slice=(0, slice(None)))
+            side2 = self.get_lonlats(data_slice=(slice(None), -1))
+            side3 = self.get_lonlats(data_slice=(-1, slice(None)))
+            side4 = self.get_lonlats(data_slice=(slice(None), 0))
+            return Boundary(side1[0], side2[0], side3[0][::-1], side4[0][::-1]), Boundary(side1[1], side2[1], side3[1][::-1], side4[1][::-1])
+         
+    def get_cartesian_coords(self, nprocs=None, data_slice=None, cache=False):
+        """Retrieve cartesian coordinates of geometry definition
+        
+        :Parameters:
+        nprocs : int, optional
+            Number of processor cores to be used.
+            Defaults to the nprocs set when instantiating object
+        data_slice : slice object, optional
+            Calculate only cartesian coordnates for the defined slice
+        cache : bool, optional
+            Store result the result. Requires data_slice to be None
+            
+        :Returns:
+        cartesian_coords : numpy array
+        """
+
+        if self.cartesian_coords is None:
+            #Coordinates are not cached
+            if nprocs is None:
+                nprocs = self.nprocs
+            
+            if data_slice is None:
+                #Use full slice
+                data_slice = slice(None)
+                
+            lons, lats = self.get_lonlats(nprocs=nprocs, data_slice=data_slice)
+                    
+            if nprocs > 1:
+                cartesian = _spatial_mp.Cartesian_MP(nprocs)
+            else:
+                cartesian = _spatial_mp.Cartesian()
+                            
+            cartesian_coords = cartesian.transform_lonlats(np.ravel(lons), 
+                                                           np.ravel(lats))
+            
+            if isinstance(lons, np.ndarray) and lons.ndim > 1:
+                #Reshape to correct shape
+                cartesian_coords = cartesian_coords.reshape(lons.shape[0], 
+                                                            lons.shape[1], 3)
+            
+            if cache and data_slice is None:
+                self.cartesian_coords = cartesian_coords  
+        else:
+            #Coordinates are cached
+            if data_slice is None:
+                cartesian_coords = self.cartesian_coords
+            else:
+                cartesian_coords = self.cartesian_coords[data_slice]
+                
+        return cartesian_coords    
+
+    @property
+    def corners(self):
+        """Returns the corners of the current area.
+        """
+        from pyresample.spherical_geometry import Coordinate
+        return [Coordinate(*self.get_lonlat(0, 0)),
+                Coordinate(*self.get_lonlat(0, -1)),
+                Coordinate(*self.get_lonlat(-1, -1)),
+                Coordinate(*self.get_lonlat(-1, 0))]
+        
+    def __contains__(self, point):
+        """Is a point inside the 4 corners of the current area? This uses
+        great circle arcs as area boundaries.
+        """
+        from pyresample.spherical_geometry import point_inside, Coordinate
+        corners = self.corners
+
+        if isinstance(point, tuple):
+            return point_inside(Coordinate(*point), corners)
+        else:
+            return point_inside(point, corners)
+
+    def overlaps(self, other):
+        """Tests if the current area overlaps the *other* area. This is based
+        solely on the corners of areas, assuming the boundaries to be great
+        circles.
+        
+        :Parameters:
+        other : object
+            Instance of subclass of BaseDefinition
+            
+        :Returns:
+        overlaps : bool
+        """
+
+        from pyresample.spherical_geometry import Arc
+        
+        self_corners = self.corners
+
+        other_corners = other.corners
+        
+        for i in self_corners:
+            if i in other:
+                return True
+        for i in other_corners:
+            if i in self:
+                return True
+    
+        self_arc1 = Arc(self_corners[0], self_corners[1])
+        self_arc2 = Arc(self_corners[1], self_corners[2])
+        self_arc3 = Arc(self_corners[2], self_corners[3])
+        self_arc4 = Arc(self_corners[3], self_corners[0])
+
+        other_arc1 = Arc(other_corners[0], other_corners[1])
+        other_arc2 = Arc(other_corners[1], other_corners[2])
+        other_arc3 = Arc(other_corners[2], other_corners[3])
+        other_arc4 = Arc(other_corners[3], other_corners[0])
+
+        for i in (self_arc1, self_arc2, self_arc3, self_arc4):
+            for j in (other_arc1, other_arc2, other_arc3, other_arc4):
+                if i.intersects(j):
+                    return True
+        return False
+
+    def get_area(self):
+        """Get the area of the convex area defined by the corners of the current
+        area.
+        """
+        from pyresample.spherical_geometry import get_polygon_area
+
+        return get_polygon_area(self.corners)
+
+    def intersection(self, other):
+        """Returns the corners of the intersection polygon of the current area
+        with *other*.
+        
+        :Parameters:
+        other : object
+            Instance of subclass of BaseDefinition
+            
+        :Returns:
+        (corner1, corner2, corner3, corner4) : tuple of points
+        """
+        from pyresample.spherical_geometry import intersection_polygon
+        return intersection_polygon(self.corners, other.corners)
+
+    def overlap_rate(self, other):
+        """Get how much the current area overlaps an *other* area.
+        
+        :Parameters:
+        other : object
+            Instance of subclass of BaseDefinition
+            
+        :Returns:
+        overlap_rate : float
+        """
+        
+        from pyresample.spherical_geometry import get_polygon_area
+        other_area = other.get_area()
+        inter_area = get_polygon_area(self.intersection(other))
+        return inter_area / other_area
+
+
+ 
+class CoordinateDefinition(BaseDefinition):
+    """Base class for geometry definitions defined by lons and lats only"""
+     
+    def __init__(self, lons, lats, nprocs=1):
+        if lons.shape == lats.shape and lons.dtype == lats.dtype:
+            self.shape = lons.shape
+            self.size = lons.size
+            self.ndim = lons.ndim
+            self.dtype = lons.dtype
+        else:
+            raise ValueError(('%s must be created with either '
+                             'lon/lats of the same shape with same dtype') % 
+                             self.__class__.__name__)
+        super(CoordinateDefinition, self).__init__(lons, lats, nprocs)
+        
+    def concatenate(self, other):
+        if self.ndim != other.ndim:
+            raise DimensionError(('Unable to concatenate %sD and %sD '
+                                  'geometries') % (self.ndim, other.ndim))
+        klass = _get_highest_level_class(self, other)        
+        lons = np.concatenate((self.lons, other.lons))
+        lats = np.concatenate((self.lats, other.lats))
+        nprocs = min(self.nprocs, other.nprocs)
+        return klass(lons, lats, nprocs=nprocs)
+        
+    def append(self, other):    
+        if self.ndim != other.ndim:
+            raise DimensionError(('Unable to append %sD and %sD '
+                                  'geometries') % (self.ndim, other.ndim))
+        self.lons = np.concatenate((self.lons, other.lons))
+        self.lats = np.concatenate((self.lats, other.lats))
+        self.shape = self.lons.shape
+        self.size = self.lons.size
+
+    def __str__(self):
+        #Rely on numpy's object printing
+        return ('Shape: %s\nLons: %s\nLats: %s') % (str(self.shape), 
+                                                    str(self.lons),
+                                                    str(self.lats))
+        
+
+class GridDefinition(CoordinateDefinition):
+    """Grid defined by lons and lats
+    
+    :Parameters:
+    lons : numpy array
+    lats : numpy array
+    nprocs : int, optional
+        Number of processor cores to be used for calculations.
+        
+    :Attributes:
+    shape : tuple
+        Grid shape as (rows, cols)
+    size : int
+        Number of elements in grid
+        
+    Properties:
+    lons : object
+        Grid lons
+    lats : object
+        Grid lats
+    cartesian_coords : object
+        Grid cartesian coordinates
+    """
+    
+    def __init__(self, lons, lats, nprocs=1):
+        if lons.shape != lats.shape:
+            raise ValueError('lon and lat grid must have same shape')
+        elif lons.ndim != 2:
+            raise ValueError('2 dimensional lon lat grid expected')
+        
+        super(GridDefinition, self).__init__(lons, lats, nprocs)
+
+
+class SwathDefinition(CoordinateDefinition):
+    """Swath defined by lons and lats
+    
+    :Parameters:
+    lons : numpy array
+    lats : numpy array
+    nprocs : int, optional
+        Number of processor cores to be used for calculations.
+        
+    :Attributes:
+    shape : tuple
+        Swath shape
+    size : int
+        Number of elements in swath
+    ndims : int
+        Swath dimensions
+        
+    Properties:
+    lons : object
+        Swath lons
+    lats : object
+        Swath lats
+    cartesian_coords : object
+        Swath cartesian coordinates
+    """
+    
+    def __init__(self, lons, lats, nprocs=1):
+        if lons.shape != lats.shape:
+            raise ValueError('lon and lat arrays must have same shape')
+        elif lons.ndim > 2:
+            raise ValueError('Only 1 and 2 dimensional swaths are allowed')
+        super(SwathDefinition, self).__init__(lons, lats, nprocs)
+
+
+class AreaDefinition(BaseDefinition):    
+    """Holds definition of an area.
+
+    :Parameters:
+    area_id : str 
+        ID of area
+    name : str
+        Name of area
+    proj_id : str 
+        ID of projection
+    proj_dict : dict 
+        Dictionary with Proj.4 parameters
+    x_size : int 
+        x dimension in number of pixels
+    y_size : int     
+        y dimension in number of pixels    
+    area_extent : list 
+        Area extent as a list (LL_x, LL_y, UR_x, UR_y)
+    nprocs : int, optional 
+        Number of processor cores to be used
+    lons : numpy array, optional
+        Grid lons
+    lats : numpy array, optional
+        Grid lats
+    
+    :Attributes:
+    area_id : str         
+        ID of area
+    name : str
+        Name of area
+    proj_id : str         
+        ID of projection
+    proj_dict : dict        
+        Dictionary with Proj.4 parameters
+    x_size : int          
+        x dimension in number of pixels
+    y_size : int          
+        y dimension in number of pixels
+    shape : tuple
+        Corresponding array shape as (rows, cols)
+    size : int
+        Number of points in grid
+    area_extent : tuple     
+        Area extent as a tuple (LL_x, LL_y, UR_x, UR_y)
+    area_extent_ll : tuple     
+        Area extent in lons lats as a tuple (LL_lon, LL_lat, UR_lon, UR_lat)
+    pixel_size_x : float    
+        Pixel width in projection units
+    pixel_size_y : float    
+        Pixel height in projection units
+    pixel_upper_left : list 
+        Coordinates (x, y) of center of upper left pixel in projection units
+    pixel_offset_x : float 
+        x offset between projection center and upper left corner of upper 
+        left pixel in units of pixels.
+    pixel_offset_y : float 
+        y offset between projection center and upper left corner of upper 
+        left pixel in units of pixels..
+    
+    Properties:
+    proj4_string : str
+        Projection defined as Proj.4 string
+    lons : object
+        Grid lons
+    lats : object
+        Grid lats
+    cartesian_coords : object
+        Grid cartesian coordinates
+    projection_x_coords : object
+        Grid projection x coordinate
+    projection_y_coords : object
+        Grid projection y coordinate
+    """
+                  
+            
+    def __init__(self, area_id, name, proj_id, proj_dict, x_size, y_size,
+                 area_extent, nprocs=1, lons=None, lats=None, dtype=np.float64):
+        if not isinstance(proj_dict, dict):
+            raise TypeError('Wrong type for proj_dict: %s. Expected dict.'
+                            % type(proj_dict))
+
+        super(AreaDefinition, self).__init__(lons, lats, nprocs)
+        self.area_id = area_id
+        self.name = name
+        self.proj_id = proj_id
+        self.x_size = x_size
+        self.y_size = y_size
+        self.shape = (y_size, x_size)
+        if lons is not None:
+            if lons.shape != self.shape:
+                raise ValueError('Shape of lon lat grid must match '
+                                 'area definition')
+        self.size = y_size * x_size
+        self.ndim = 2
+        self.pixel_size_x = (area_extent[2] - area_extent[0]) / float(x_size)
+        self.pixel_size_y = (area_extent[3] - area_extent[1]) / float(y_size)
+        self.proj_dict = proj_dict
+        self.area_extent = tuple(area_extent)
+        
+        # Calculate area_extent in lon lat
+        proj = _spatial_mp.Proj(**proj_dict)
+        corner_lons, corner_lats = proj((area_extent[0], area_extent[2]), 
+                                        (area_extent[1], area_extent[3]), 
+                                        inverse=True)
+        self.area_extent_ll = (corner_lons[0], corner_lats[0], 
+                               corner_lons[1], corner_lats[1])
+                
+        #Calculate projection coordinates of center of upper left pixel
+        self.pixel_upper_left = \
+                              (float(area_extent[0]) + 
+                               float(self.pixel_size_x) / 2,
+                               float(area_extent[3]) - 
+                               float(self.pixel_size_y) / 2)
+        
+        #Pixel_offset defines the distance to projection center from origen (UL)
+        #of image in units of pixels. 
+        self.pixel_offset_x = -self.area_extent[0] / self.pixel_size_x
+        self.pixel_offset_y = self.area_extent[3] / self.pixel_size_y
+        
+        self.projection_x_coords = None
+        self.projection_y_coords = None
+
+        self.dtype = dtype
+        
+    def __str__(self):
+        #We need a sorted dictionary for a unique hash of str(self)
+        proj_dict = self.proj_dict
+        proj_str = ('{' + 
+                    ', '.join(["'%s': '%s'"%(str(k), str(proj_dict[k]))
+                               for k in sorted(proj_dict.keys())]) +
+                    '}')
+        return ('Area ID: %s\nName: %s\nProjection ID: %s\n'
+                'Projection: %s\nNumber of columns: %s\nNumber of rows: %s\n'
+                'Area extent: %s') % (self.area_id, self.name, self.proj_id, 
+                                      proj_str, self.x_size, self.y_size, 
+                                      self.area_extent)
+               
+    __repr__ = __str__
+    
+    def __eq__(self, other):
+        """Test for equality"""
+        
+        try:
+            return ((self.proj_dict == other.proj_dict) and
+                    (self.shape == other.shape) and
+                    (self.area_extent == other.area_extent))
+        except AttributeError:
+            return super(AreaDefinition, self).__eq__(other)
+        
+    def __ne__(self, other):
+        """Test for equality"""
+        
+        return not self.__eq__(other)
+               
+    def get_xy_from_lonlat(self, lon, lat):
+        """Retrieve closest x and y coordinates (column, row indices) for the
+        specified geolocation (lon,lat) if inside area. If lon,lat is a point a
+        ValueError is raised if the return point is outside the area domain. If
+        lon,lat is a tuple of sequences of longitudes and latitudes, a tuple of
+        masked arrays are returned.
+        
+        :Input:
+        lon : point or sequence (list or array) of longitudes
+        lat : point or sequence (list or array) of latitudes
+
+        :Returns:
+        (x, y) : tuple of integer points/arrays
+        """
+
+        if isinstance(lon, list):
+            lon = np.array(lon)
+        if isinstance(lat, list):
+            lat = np.array(lat)
+
+        if ((isinstance(lon, np.ndarray) and 
+             not isinstance(lat, np.ndarray)) or 
+            (not isinstance(lon, np.ndarray) and 
+             isinstance(lat, np.ndarray))):
+            raise ValueError("Both lon and lat needs to be of " + 
+                             "the same type and have the same dimensions!")
+        
+        if isinstance(lon, np.ndarray) and isinstance(lat, np.ndarray):
+            if lon.shape != lat.shape:
+                raise ValueError("lon and lat is not of the same shape!")
+
+        pobj = _spatial_mp.Proj(self.proj4_string)
+        upl_x = self.area_extent[0]
+        upl_y = self.area_extent[3]
+        xscale = abs(self.area_extent[2] - 
+                     self.area_extent[0]) / float(self.x_size)
+        yscale = abs(self.area_extent[1] - 
+                     self.area_extent[3]) / float(self.y_size)
+  
+        xm_, ym_ = pobj(lon, lat)
+        x__ = (xm_ - upl_x) / xscale
+        y__ = (upl_y - ym_) / yscale
+
+        if isinstance(x__, np.ndarray) and isinstance(y__, np.ndarray):
+            mask = (((x__ < 0 ) | (x__ > self.x_size)) | 
+                    ((y__ < 0)  | (y__ > self.y_size)))
+            return (np.ma.masked_array(x__.astype('int'), mask=mask, 
+                                       fill_value=-1),
+                    np.ma.masked_array(y__.astype('int'), mask=mask,
+                                       fill_value=-1))
+        else:
+            if ((x__ < 0 or x__ > self.x_size) or
+                (y__ < 0 or y__ > self.y_size)):
+                raise ValueError('Point outside area:( %f %f)' % (x__, y__))
+            return int(x__), int(y__)
+
+    def get_lonlat(self, row, col):
+        """Retrieves lon and lat values of single point in area grid
+        
+        :Parameters:
+        row : int
+        col : int
+        
+        :Returns:
+        (lon, lat) : tuple of floats
+        """
+        
+        return self.get_lonlats(nprocs=None, data_slice=(row, col))
+       
+    def get_proj_coords(self, data_slice=None, cache=False, dtype=None):
+        """Get projection coordinates of grid 
+    
+        :Parameters:
+        data_slice : slice object, optional
+            Calculate only coordinates for specified slice
+        cache : bool, optional
+            Store result the result. Requires data_slice to be None
+
+        :Returns: 
+        (target_x, target_y) : tuple of numpy arrays
+            Grids of area x- and y-coordinates in projection units
+        """
+        
+        def get_val(val, sub_val, max):
+            #Get value with substitution and wrapping
+            if val is None:
+                return sub_val
+            else:
+                if val < 0:
+                    #Wrap index
+                    return max + val
+                else:
+                    return val
+        
+        if self.projection_x_coords is not None and self.projection_y_coords is not None:
+            # Projection coords are cached
+            if data_slice is None:
+                return self.projection_x_coords, self.projection_y_coords
+            else:
+                return self.projection_x_coords[data_slice], self.projection_y_coords[data_slice]
+
+        is_single_value = False
+        is_1d_select = False
+
+        if dtype is None:
+            dtype = self.dtype
+
+        #create coordinates of local area as ndarrays
+        if data_slice is None or data_slice == slice(None):
+            #Full slice
+            rows = self.y_size
+            cols = self.x_size
+            row_start = 0
+            col_start = 0
+        else:            
+            if isinstance(data_slice, slice):
+                #Row slice
+                row_start = get_val(data_slice.start, 0, self.y_size)
+                col_start = 0
+                rows = get_val(data_slice.stop, self.y_size, self.y_size) - row_start                                 
+                cols = self.x_size
+            elif isinstance(data_slice[0], slice) and isinstance(data_slice[1], slice):
+                #Block slice
+                row_start = get_val(data_slice[0].start, 0, self.y_size)
+                col_start = get_val(data_slice[1].start, 0, self.x_size)
+                rows = get_val(data_slice[0].stop, self.y_size, self.y_size) - row_start
+                cols = get_val(data_slice[1].stop, self.x_size, self.x_size) - col_start
+            elif isinstance(data_slice[0], slice):
+                #Select from col
+                is_1d_select = True
+                row_start = get_val(data_slice[0].start, 0, self.y_size)
+                col_start = get_val(data_slice[1], 0, self.x_size)
+                rows = get_val(data_slice[0].stop, self.y_size, self.y_size) - row_start
+                cols = 1
+            elif isinstance(data_slice[1], slice):
+                #Select from row
+                is_1d_select = True
+                row_start = get_val(data_slice[0], 0, self.y_size)
+                col_start = get_val(data_slice[1].start, 0, self.x_size)
+                rows = 1
+                cols = get_val(data_slice[1].stop, self.x_size, self.x_size) - col_start
+            else:
+                #Single element select
+                is_single_value = True
+                
+                row_start = get_val(data_slice[0], 0, self.y_size)                
+                col_start = get_val(data_slice[1], 0, self.x_size)
+                    
+                rows = 1
+                cols = 1    
+        
+        #Calculate coordinates
+        target_x = np.fromfunction(lambda i, j: (j + col_start) * 
+                                   self.pixel_size_x + 
+                                   self.pixel_upper_left[0],
+                                   (rows, 
+                                    cols), dtype=dtype)
+    
+        target_y = np.fromfunction(lambda i, j: 
+                                   self.pixel_upper_left[1] - 
+                                   (i + row_start) * self.pixel_size_y,
+                                   (rows, 
+                                    cols), dtype=dtype)
+        
+        if is_single_value:
+            #Return single values
+            target_x = float(target_x)
+            target_y = float(target_y)
+        elif is_1d_select:
+            #Reshape to 1D array
+            target_x = target_x.reshape((target_x.size,))
+            target_y = target_y.reshape((target_y.size,))
+       
+        if cache and data_slice is None:
+            # Cache the result if requested
+            self.projection_x_coords = target_x
+            self.projection_y_coords = target_y
+
+        return target_x, target_y
+        
+    @property
+    def proj_x_coords(self):
+        return self.get_proj_coords(data_slice=(0, slice(None)))[0]
+
+    @property
+    def proj_y_coords(self):
+        return self.get_proj_coords(data_slice=(slice(None), 0))[1]
+
+
+    def get_lonlats(self, nprocs=None, data_slice=None, cache=False, dtype=None):
+        """Returns lon and lat arrays of area.
+    
+        :Parameters:        
+        nprocs : int, optional 
+            Number of processor cores to be used.
+            Defaults to the nprocs set when instantiating object
+        data_slice : slice object, optional
+            Calculate only coordinates for specified slice
+        cache : bool, optional
+            Store result the result. Requires data_slice to be None
+
+        :Returns: 
+        (lons, lats) : tuple of numpy arrays
+            Grids of area lons and and lats
+        """ 
+
+        if dtype is None:
+            dtype = self.dtype
+
+        if self.lons is None or self.lats is None:
+            #Data is not cached
+            if nprocs is None:
+                nprocs = self.nprocs
+                
+            #Proj.4 definition of target area projection
+            if nprocs > 1:
+                target_proj = _spatial_mp.Proj_MP(**self.proj_dict)
+            else:
+                target_proj = _spatial_mp.Proj(**self.proj_dict)
+        
+            #Get coordinates of local area as ndarrays
+            target_x, target_y = self.get_proj_coords(data_slice=data_slice, dtype=dtype)
+            
+            #Get corresponding longitude and latitude values
+            lons, lats = target_proj(target_x, target_y, inverse=True,
+                                     nprocs=nprocs)
+            lons = np.asanyarray(lons, dtype=dtype)
+            lats = np.asanyarray(lats, dtype=dtype)
+            
+            if cache and data_slice is None:
+                # Cache the result if requested
+                self.lons = lons
+                self.lats = lats
+
+            #Free memory
+            del(target_x)
+            del(target_y)
+        else:
+            #Data is cached
+            if data_slice is None:
+                #Full slice
+                lons = self.lons
+                lats = self.lats
+            else:
+                lons = self.lons[data_slice]
+                lats = self.lats[data_slice]
+            
+        return lons, lats
+
+    @property
+    def proj4_string(self):
+        """Returns projection definition as Proj.4 string"""
+        
+        items = self.proj_dict.items()
+        return '+' + ' +'.join([ t[0] + '=' + t[1] for t in items])         
+    
+
+def _get_slice(segments, shape):
+    """Generator for segmenting a 1D or 2D array"""
+    
+    if not (1 <= len(shape) <= 2):
+        raise ValueError('Cannot segment array of shape: %s' % str(shape))
+    else:
+        size = shape[0]
+        slice_length = np.ceil(float(size) / segments)
+        start_idx = 0
+        end_idx = slice_length
+        while start_idx < size:
+            if len(shape) == 1:
+                yield slice(start_idx, end_idx)
+            else:
+                yield (slice(start_idx, end_idx), slice(None))
+            start_idx = end_idx
+            end_idx = min(start_idx + slice_length, size)
+
+def _flatten_cartesian_coords(cartesian_coords):
+    """Flatten array to (n, 3) shape"""
+    
+    shape = cartesian_coords.shape 
+    if len(shape) > 2:
+        cartesian_coords = cartesian_coords.reshape(shape[0] * 
+                                                    shape[1], 3)
+    return cartesian_coords
+
+def _get_highest_level_class(obj1, obj2):
+    if (not issubclass(obj1.__class__, obj2.__class__) or 
+        not issubclass(obj2.__class__, obj1.__class__)):
+        raise TypeError('No common superclass for %s and %s' % 
+                        (obj1.__class__, obj2.__class__))
+
+    if obj1.__class__ == obj2.__class__:
+        klass = obj1.__class__
+    elif issubclass(obj1.__class__, obj2.__class__):
+        klass = obj2.__class__
+    else:
+        klass = obj1.__class__
+    return klass    
+           
+        
diff --git a/pyresample/grid.py b/pyresample/grid.py
new file mode 100644
index 0000000..d93494c
--- /dev/null
+++ b/pyresample/grid.py
@@ -0,0 +1,235 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Resample image from one projection to another 
+using nearest neighbour method in cartesian projection coordinate systems"""
+
+import numpy as np
+
+import geometry
+import _spatial_mp
+
+
+def get_image_from_linesample(row_indices, col_indices, source_image,
+                              fill_value=0):
+    """Samples from image based on index arrays.
+
+    :Parameters:
+    row_indices : numpy array 
+        Row indices. Dimensions must match col_indices
+    col_indices : numpy array 
+        Col indices. Dimensions must match row_indices
+    source_image : numpy array 
+        Source image
+    fill_value : {int, None} optional 
+            Set undetermined pixels to this value.
+            If fill_value is None a masked array is returned 
+            with undetermined pixels masked
+    
+    :Returns: 
+    image_data : numpy array
+        Resampled image 
+    """
+    
+    #mask out non valid row and col indices
+    row_mask = (row_indices >= 0) * (row_indices < source_image.shape[0])
+    col_mask = (col_indices >= 0) * (col_indices < source_image.shape[1])
+    valid_rows = row_indices * row_mask
+    valid_cols = col_indices * col_mask
+
+    #free memory
+    del(row_indices)
+    del(col_indices)
+    
+    #get valid part of image
+    target_image = source_image[valid_rows, valid_cols]
+    
+    #free memory
+    del(valid_rows)
+    del(valid_cols)
+
+    #create mask for valid data points
+    valid_data = row_mask * col_mask
+    if valid_data.ndim != target_image.ndim:
+        for i in range(target_image.ndim - valid_data.ndim):
+            valid_data = np.expand_dims(valid_data, axis=valid_data.ndim)
+            
+    #free memory
+    del(row_mask)
+    del(col_mask)
+    
+    #fill the non valid part of the image
+    if fill_value is not None:
+        target_filled = (target_image * valid_data + 
+                         (1 - valid_data) * fill_value)
+    else:
+        if np.ma.is_masked(target_image):
+            mask = ((1 - valid_data) | target_image.mask)
+        else:
+            mask = (1 - valid_data)
+        target_filled = np.ma.array(target_image, mask=mask)
+    
+    return target_filled.astype(target_image.dtype)
+    
+def get_linesample(lons, lats, source_area_def, nprocs=1):
+    """Returns index row and col arrays for resampling
+    
+    :Parameters:
+    lons : numpy array 
+        Lons. Dimensions must match lats
+    lats : numpy array   
+        Lats. Dimensions must match lons
+    source_area_def : object 
+        Source definition as AreaDefinition object
+    nprocs : int, optional 
+        Number of processor cores to be used
+    
+    :Returns:
+    (row_indices, col_indices) : tuple of numpy arrays
+        Arrays for resampling area by array indexing
+    """
+    
+    #Proj.4 definition of source area projection
+    if nprocs > 1:
+        source_proj = _spatial_mp.Proj_MP(**source_area_def.proj_dict)
+    else:
+        source_proj = _spatial_mp.Proj(**source_area_def.proj_dict)
+
+    #get cartesian projection values from longitude and latitude 
+    source_x, source_y = source_proj(lons, lats, nprocs=nprocs)
+
+    #Find corresponding pixels (element by element conversion of ndarrays)
+    source_pixel_x = (source_area_def.pixel_offset_x + \
+                      source_x / source_area_def.pixel_size_x).astype(np.int32)
+    
+    source_pixel_y = (source_area_def.pixel_offset_y - \
+                      source_y / source_area_def.pixel_size_y).astype(np.int32)
+                    
+    return source_pixel_y, source_pixel_x
+                          
+def get_image_from_lonlats(lons, lats, source_area_def, source_image_data, 
+                           fill_value=0, nprocs=1):
+    """Samples from image based on lon lat arrays 
+    using nearest neighbour method in cartesian projection coordinate systems.
+
+    :Parameters:
+    lons : numpy array 
+        Lons. Dimensions must match lats
+    lats : numpy array   
+        Lats. Dimensions must match lons
+    source_area_def : object 
+        Source definition as AreaDefinition object
+    source_image_data : numpy array 
+        Source image data
+    fill_value : {int, None} optional 
+            Set undetermined pixels to this value.
+            If fill_value is None a masked array is returned 
+            with undetermined pixels masked    
+    nprocs : int, optional 
+        Number of processor cores to be used
+    
+    :Returns:
+    image_data : numpy array 
+        Resampled image data
+    """
+
+    source_pixel_y, source_pixel_x = get_linesample(lons, lats, 
+                                                    source_area_def, 
+                                                    nprocs=nprocs)
+
+    #Return target image
+    return get_image_from_linesample(source_pixel_y, source_pixel_x,
+                                     source_image_data, fill_value)
+
+def get_resampled_image(target_area_def, source_area_def, source_image_data,
+                        fill_value=0, nprocs=1, segments=None):
+    """Resamples image using nearest neighbour method in cartesian 
+    projection coordinate systems.
+
+    :Parameters:
+    target_area_def : object 
+        Target definition as AreaDefinition object
+    source_area_def : object 
+        Source definition as AreaDefinition object
+    source_image_data : numpy array 
+        Source image data
+    fill_value : {int, None} optional 
+        Set undetermined pixels to this value.
+        If fill_value is None a masked array is returned 
+        with undetermined pixels masked    
+    nprocs : int, optional 
+        Number of processor cores to be used
+    segments : {int, None} optional
+        Number of segments to use when resampling.
+        If set to None an estimate will be calculated. 
+        
+    :Returns:
+    image_data : numpy array 
+        Resampled image data    
+    """
+    
+    if not isinstance(target_area_def, geometry.AreaDefinition):
+        raise TypeError('target_area_def must be of type AreaDefinition')
+    if not isinstance(source_area_def, geometry.AreaDefinition):
+        raise TypeError('source_area_def must be of type AreaDefinition')
+    if not isinstance(source_image_data, (np.ndarray,
+                                          np.ma.core.MaskedArray)):
+        raise TypeError('source_image must be of type ndarray'
+                        ' or a masked array.')
+
+    #Calculate number of segments if needed 
+    if segments is None:
+        rows = target_area_def.y_size
+        cut_off = 500
+        if rows > cut_off:
+            segments = int(rows / cut_off)
+        else:
+            segments = 1
+    
+    
+    if segments > 1:
+        #Iterate through segments        
+        for i, target_slice in enumerate(geometry._get_slice(segments,  
+                                                  target_area_def.shape)):
+            
+            #Select data from segment with slice
+            lons, lats = target_area_def.get_lonlats(nprocs=nprocs, data_slice=target_slice)
+            
+            #Calculate partial result
+            next_result = get_image_from_lonlats(lons, lats, source_area_def, 
+                                                 source_image_data, 
+                                                 fill_value, nprocs)
+                
+            #Build result iteratively 
+            if i == 0:
+                #First iteration
+                result = next_result
+            else:            
+                result = np.row_stack((result, next_result))
+        
+        return result
+    else:
+        #Get lon lat arrays of target area
+        lons, lats = target_area_def.get_lonlats(nprocs)
+        #Get target image
+        return get_image_from_lonlats(lons, lats, source_area_def, 
+                                      source_image_data, fill_value, nprocs)
+
+
+
+
+        
diff --git a/pyresample/image.py b/pyresample/image.py
new file mode 100644
index 0000000..b813874
--- /dev/null
+++ b/pyresample/image.py
@@ -0,0 +1,281 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Handles resampling of images with assigned geometry definitions"""
+
+import numpy as np
+
+import geometry, grid, kd_tree
+
+
+class ImageContainer(object):
+    """Holds image with geometry definition. 
+    Allows indexing with linesample arrays.
+    
+    :Parameters:
+    image_data : numpy array 
+        Image data
+    geo_def : object 
+        Geometry definition
+    fill_value : {int, None} optional 
+        Set undetermined pixels to this value.
+        If fill_value is None a masked array is returned 
+        with undetermined pixels masked
+    nprocs : int, optional 
+        Number of processor cores to be used
+        
+    :Attributes:
+    image_data : numpy array 
+        Image data
+    geo_def : object 
+        Geometry definition
+    fill_value : {int, None}
+        Resample result fill value
+    nprocs : int
+        Number of processor cores to be used for geometry operations
+    """
+        
+    def __init__(self, image_data, geo_def, fill_value=0, nprocs=1):
+        if not isinstance(image_data, (np.ndarray, np.ma.core.MaskedArray)):
+            raise TypeError('image_data must be either an ndarray'
+                            ' or a masked array')
+        elif ((image_data.ndim > geo_def.ndim + 1) or 
+              (image_data.ndim < geo_def.ndim)):
+                raise ValueError(('Unexpected number of dimensions for '
+                                 'image_data: ') % image_data.ndim)
+        for i, size in enumerate(geo_def.shape):
+            if image_data.shape[i] != size:
+                raise ValueError(('Size mismatch for image_data. Expected '
+                                  'size %s for dimension %s and got %s') %
+                                  (size, i, image_data.shape[i])) 
+        
+        self.shape = geo_def.shape
+        self.size = geo_def.size
+        self.ndim = geo_def.ndim
+        self.image_data = image_data
+        if image_data.ndim > geo_def.ndim:
+            self.channels = image_data.shape[-1]
+        else:
+            self.channels = 1
+        self.geo_def = geo_def
+        self.fill_value = fill_value
+        self.nprocs = nprocs        
+        
+    def __str__(self):
+        return 'Image:\n %s'%self.image_data.__str__()
+
+    def __repr__(self): 
+        return self.image_data.__repr__()
+        
+    def resample(self, target_geo_def):
+        """Base method for resampling"""
+        
+        raise NotImplementedError('Method "resample" is not implemented ' 
+                                  'in class %s' % self.__class__.__name__)
+
+    def get_array_from_linesample(self, row_indices, col_indices):
+        """Samples from image based on index arrays.
+
+        :Parameters:
+        row_indices : numpy array
+            Row indices. Dimensions must match col_indices
+        col_indices : numpy array 
+            Col indices. Dimensions must match row_indices 
+        
+        :Returns: 
+        image_data : numpy_array
+            Resampled image data
+        """
+        
+        if self.geo_def.ndim != 2:
+            raise TypeError('Resampling from linesamples only makes sense ' 
+                            'on 2D data')
+        
+        return grid.get_image_from_linesample(row_indices, col_indices,
+                                              self.image_data, 
+                                              self.fill_value)
+        
+    def get_array_from_neighbour_info(self, *args, **kwargs):
+        """Base method for resampling from preprocessed data."""
+        
+        raise NotImplementedError('Method "get_array_from_neighbour_info" is '
+                                  'not implemented in class %s' % 
+                                  self.__class__.__name__)
+
+
+class ImageContainerQuick(ImageContainer):
+    """Holds image with area definition. '
+    Allows quick resampling within area.
+    
+    :Parameters:
+    image_data : numpy array 
+        Image data
+    geo_def : object 
+        Area definition as AreaDefinition object
+    fill_value : {int, None} optional 
+        Set undetermined pixels to this value.
+        If fill_value is None a masked array is returned 
+        with undetermined pixels masked
+    nprocs : int, optional 
+        Number of processor cores to be used for geometry operations
+    segments : {int, None}
+        Number of segments to use when resampling.
+        If set to None an estimate will be calculated
+        
+    :Attributes:
+    image_data : numpy array 
+        Image data
+    geo_def : object 
+        Area definition as AreaDefinition object
+    fill_value : {int, None}
+        Resample result fill value
+        If fill_value is None a masked array is returned 
+        with undetermined pixels masked 
+    nprocs : int
+        Number of processor cores to be used
+    segments : {int, None}
+        Number of segments to use when resampling      
+    """
+
+    def __init__(self, image_data, geo_def, fill_value=0, nprocs=1, 
+                 segments=None):
+        if not isinstance(geo_def, geometry.AreaDefinition):
+            raise TypeError('area_def must be of type '
+                            'geometry.AreaDefinition')    
+        super(ImageContainerQuick, self).__init__(image_data, geo_def, 
+                                                  fill_value=fill_value, 
+                                                  nprocs=nprocs)
+        self.segments = segments
+
+    def resample(self, target_area_def):
+        """Resamples image to area definition using nearest neighbour 
+        approach in projection coordinates.
+        
+        :Parameters:
+        target_area_def : object 
+            Target area definition as AreaDefinition object
+        
+        :Returns: 
+        image_container : object
+            ImageContainerQuick object of resampled area   
+        """        
+        
+        resampled_image = grid.get_resampled_image(target_area_def,
+                                                   self.geo_def,
+                                                   self.image_data,
+                                                   fill_value=self.fill_value,
+                                                   nprocs=self.nprocs,
+                                                   segments=self.segments)
+
+        return ImageContainerQuick(resampled_image, target_area_def, 
+                                   fill_value=self.fill_value,
+                                   nprocs=self.nprocs, segments=self.segments)
+    
+
+class ImageContainerNearest(ImageContainer):
+    """Holds image with geometry definition. 
+    Allows nearest neighbour resampling to new geometry definition.
+    
+    :Parameters:
+    image_data : numpy array 
+        Image data
+    geo_def : object 
+        Geometry definition
+    radius_of_influence : float 
+        Cut off distance in meters    
+    epsilon : float, optional
+        Allowed uncertainty in meters. Increasing uncertainty
+        reduces execution time
+    fill_value : {int, None} optional 
+        Set undetermined pixels to this value.
+        If fill_value is None a masked array is returned 
+        with undetermined pixels masked
+    reduce_data : bool, optional
+        Perform coarse data reduction before resampling in order
+        to reduce execution time
+    nprocs : int, optional 
+        Number of processor cores to be used for geometry operations
+    segments : {int, None}
+        Number of segments to use when resampling.
+        If set to None an estimate will be calculated
+    
+    :Attributes:
+    image_data : numpy array 
+        Image data
+    geo_def : object 
+        Geometry definition
+    radius_of_influence : float 
+        Cut off distance in meters    
+    epsilon : float
+        Allowed uncertainty in meters
+    fill_value : {int, None}
+        Resample result fill value
+    reduce_data : bool
+        Perform coarse data reduction before resampling
+    nprocs : int
+        Number of processor cores to be used
+    segments : {int, None}
+        Number of segments to use when resampling   
+    """
+
+    def __init__(self, image_data, geo_def, radius_of_influence, epsilon=0, 
+                 fill_value=0, reduce_data=True, nprocs=1, segments=None):
+        super(ImageContainerNearest, self).__init__(image_data, geo_def, 
+                                                    fill_value=fill_value, 
+                                                    nprocs=nprocs)
+        self.radius_of_influence = radius_of_influence
+        self.epsilon = epsilon
+        self.reduce_data = reduce_data
+        self.segments = segments
+        
+    def resample(self, target_geo_def):
+        """Resamples image to area definition using nearest neighbour 
+        approach
+        
+        :Parameters:
+        target_geo_def : object 
+            Target geometry definition         
+          
+        :Returns: 
+        image_container : object
+            ImageContainerNearest object of resampled geometry   
+        """
+        
+        if self.image_data.ndim > 2 and self.ndim > 1:
+            image_data = self.image_data.reshape(self.image_data.shape[0] * 
+                                                 self.image_data.shape[1], 
+                                                 self.image_data.shape[2])
+        else:
+            image_data = self.image_data.ravel()
+                   
+        resampled_image = \
+                kd_tree.resample_nearest(self.geo_def, 
+                                         image_data, 
+                                         target_geo_def,
+                                         self.radius_of_influence, 
+                                         epsilon=self.epsilon,
+                                         fill_value=self.fill_value, 
+                                         nprocs=self.nprocs,
+                                         reduce_data=self.reduce_data,
+                                         segments=self.segments)
+        return ImageContainerNearest(resampled_image, target_geo_def, 
+                                     self.radius_of_influence, 
+                                     epsilon=self.epsilon,
+                                     fill_value=self.fill_value, 
+                                     reduce_data=self.reduce_data, 
+                                     nprocs=self.nprocs,
+                                     segments=self.segments)
\ No newline at end of file
diff --git a/pyresample/kd_tree.py b/pyresample/kd_tree.py
new file mode 100644
index 0000000..fc1027d
--- /dev/null
+++ b/pyresample/kd_tree.py
@@ -0,0 +1,775 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Handles reprojection of geolocated data. Several types of resampling are supported"""
+
+import types
+import warnings
+
+import numpy as np
+
+import geometry
+import data_reduce
+import _spatial_mp
+
+kd_tree_name = None
+try:
+    from pykdtree.kdtree import KDTree
+    kd_tree_name = 'pykdtree'
+except ImportError:
+    try:
+        import scipy.spatial as sp
+        kd_tree_name = 'scipy.spatial'        
+    except ImportError:
+        raise ImportError('Either pykdtree or scipy must be available')
+        
+class EmptyResult(Exception):
+    pass
+
+def which_kdtree():
+    """Returns the name of the kdtree used for resampling
+    """
+    
+    return kd_tree_name
+
+def resample_nearest(source_geo_def, data, target_geo_def,
+                     radius_of_influence, epsilon=0,
+                     fill_value=0, reduce_data=True, nprocs=1, segments=None):
+    """Resamples data using kd-tree nearest neighbour approach
+
+    :Parameters:
+    source_geo_def : object
+        Geometry definition of source
+    data : numpy array               
+        1d array of single channel data points or
+        (source_size, k) array of k channels of datapoints
+    target_geo_def : object
+        Geometry definition of target
+    radius_of_influence : float 
+        Cut off distance in meters
+    epsilon : float, optional
+        Allowed uncertainty in meters. Increasing uncertainty
+        reduces execution time
+    fill_value : {int, None}, optional 
+            Set undetermined pixels to this value.
+            If fill_value is None a masked array is returned 
+            with undetermined pixels masked    
+    reduce_data : bool, optional
+        Perform initial coarse reduction of source dataset in order
+        to reduce execution time
+    nprocs : int, optional
+        Number of processor cores to be used
+    segments : {int, None}
+        Number of segments to use when resampling.
+        If set to None an estimate will be calculated
+               
+    :Returns: 
+    data : numpy array 
+        Source data resampled to target geometry
+    """
+    
+    return _resample(source_geo_def, data, target_geo_def, 'nn',
+                     radius_of_influence, neighbours=1,
+                     epsilon=epsilon, fill_value=fill_value,
+                     reduce_data=reduce_data, nprocs=nprocs, segments=segments)
+
+def resample_gauss(source_geo_def, data, target_geo_def,
+                   radius_of_influence, sigmas, neighbours=8, epsilon=0,
+                   fill_value=0, reduce_data=True, nprocs=1, segments=None):
+    """Resamples data using kd-tree gaussian weighting neighbour approach
+
+    :Parameters:
+    source_geo_def : object
+        Geometry definition of source
+    data : numpy array               
+        1d array of single channel data points or
+        (source_size, k) array of k channels of datapoints
+    target_geo_def : object
+        Geometry definition of target
+    radius_of_influence : float 
+        Cut off distance in meters
+    sigmas : list of floats or float            
+        List of sigmas to use for the gauss weighting of each 
+        channel 1 to k, w_k = exp(-dist^2/sigma_k^2).
+        If only one channel is resampled sigmas is a single float value.
+    neighbours : int, optional 
+        The number of neigbours to consider for each grid point
+    epsilon : float, optional
+        Allowed uncertainty in meters. Increasing uncertainty
+        reduces execution time
+    fill_value : {int, None}, optional 
+            Set undetermined pixels to this value.
+            If fill_value is None a masked array is returned 
+            with undetermined pixels masked    
+    reduce_data : bool, optional
+        Perform initial coarse reduction of source dataset in order
+        to reduce execution time
+    nprocs : int, optional
+        Number of processor cores to be used
+    segments : {int, None}
+        Number of segments to use when resampling.
+        If set to None an estimate will be calculated
+    
+    :Returns: 
+    data : numpy array 
+        Source data resampled to target geometry
+    """
+    
+    def gauss(sigma):
+        #Return gauss functino object
+        return lambda r: np.exp(-r**2 / float(sigma)**2)
+    
+    #Build correct sigma argument
+    is_multi_channel = False
+    try:
+        sigmas.__iter__()
+        sigma_list = sigmas
+        is_multi_channel = True
+    except:
+        sigma_list = [sigmas] 
+        
+        
+    for sigma in sigma_list:
+        if not isinstance(sigma, (long, int, float)):
+            raise TypeError('sigma must be number')    
+    
+    #Get gauss function objects
+    if is_multi_channel:
+        weight_funcs = map(gauss, sigma_list) 
+    else:
+        weight_funcs = gauss(sigmas)
+        
+    return _resample(source_geo_def, data, target_geo_def, 'custom',
+                     radius_of_influence, neighbours=neighbours,
+                     epsilon=epsilon, weight_funcs=weight_funcs, fill_value=fill_value,
+                     reduce_data=reduce_data, nprocs=nprocs, segments=segments)
+
+def resample_custom(source_geo_def, data, target_geo_def,
+                    radius_of_influence, weight_funcs, neighbours=8,
+                    epsilon=0, fill_value=0, reduce_data=True, nprocs=1, 
+                    segments=None):
+    """Resamples data using kd-tree custom radial weighting neighbour approach
+
+    :Parameters:
+    source_geo_def : object
+        Geometry definition of source
+    data : numpy array               
+        1d array of single channel data points or
+        (source_size, k) array of k channels of datapoints
+    target_geo_def : object
+        Geometry definition of target
+    radius_of_influence : float 
+        Cut off distance in meters
+    weight_funcs : list of function objects or function object       
+        List of weight functions f(dist) to use for the weighting 
+        of each channel 1 to k.
+        If only one channel is resampled weight_funcs is
+        a single function object.
+    neighbours : int, optional 
+        The number of neigbours to consider for each grid point
+    epsilon : float, optional
+        Allowed uncertainty in meters. Increasing uncertainty
+        reduces execution time
+    fill_value : {int, None}, optional 
+            Set undetermined pixels to this value.
+            If fill_value is None a masked array is returned 
+            with undetermined pixels masked    
+    reduce_data : bool, optional
+        Perform initial coarse reduction of source dataset in order
+        to reduce execution time
+    nprocs : int, optional
+        Number of processor cores to be used
+    segments : {int, None}
+        Number of segments to use when resampling.
+        If set to None an estimate will be calculated
+    
+    :Returns: 
+    data : numpy array 
+        Source data resampled to target geometry
+    """
+    try:
+        for weight_func in weight_funcs:
+            if not isinstance(weight_func, types.FunctionType):
+                raise TypeError('weight_func must be function object')        
+    except:
+        if not isinstance(weight_funcs, types.FunctionType):
+            raise TypeError('weight_func must be function object')
+    
+    return _resample(source_geo_def, data, target_geo_def, 'custom',
+                     radius_of_influence, neighbours=neighbours,
+                     epsilon=epsilon, weight_funcs=weight_funcs,
+                     fill_value=fill_value, reduce_data=reduce_data,
+                     nprocs=nprocs, segments=segments)
+
+def _resample(source_geo_def, data, target_geo_def, resample_type,
+             radius_of_influence, neighbours=8, epsilon=0, weight_funcs=None,
+             fill_value=0, reduce_data=True, nprocs=1, segments=None):
+    """Resamples swath using kd-tree approach"""    
+                
+    valid_input_index, valid_output_index, index_array, distance_array = \
+                                 get_neighbour_info(source_geo_def, 
+                                                    target_geo_def, 
+                                                    radius_of_influence, 
+                                                    neighbours=neighbours, 
+                                                    epsilon=epsilon, 
+                                                    reduce_data=reduce_data, 
+                                                    nprocs=nprocs,
+                                                    segments=segments)
+    
+    return get_sample_from_neighbour_info(resample_type, 
+                                          target_geo_def.shape, 
+                                          data, valid_input_index, 
+                                          valid_output_index, index_array, 
+                                          distance_array=distance_array, 
+                                          weight_funcs=weight_funcs, 
+                                          fill_value=fill_value)
+    
+def get_neighbour_info(source_geo_def, target_geo_def, radius_of_influence, 
+                       neighbours=8, epsilon=0, reduce_data=True, nprocs=1, segments=None):
+    """Returns neighbour info
+    
+    :Parameters:
+    source_geo_def : object
+        Geometry definition of source
+    target_geo_def : object
+        Geometry definition of target
+    radius_of_influence : float 
+        Cut off distance in meters
+    neighbours : int, optional 
+        The number of neigbours to consider for each grid point
+    epsilon : float, optional
+        Allowed uncertainty in meters. Increasing uncertainty
+        reduces execution time
+    fill_value : {int, None}, optional 
+            Set undetermined pixels to this value.
+            If fill_value is None a masked array is returned 
+            with undetermined pixels masked    
+    reduce_data : bool, optional
+        Perform initial coarse reduction of source dataset in order
+        to reduce execution time
+    nprocs : int, optional
+        Number of processor cores to be used
+    segments : {int, None}
+        Number of segments to use when resampling.
+        If set to None an estimate will be calculated
+            
+    :Returns:
+    (valid_input_index, valid_output_index, 
+    index_array, distance_array) : tuple of numpy arrays
+        Neighbour resampling info
+    """
+
+    if source_geo_def.size < neighbours:
+        warnings.warn('Searching for %s neighbours in %s data points' % 
+                      (neighbours, source_geo_def.size))
+
+    if segments is None:
+        cut_off = 3000000
+        if target_geo_def.size > cut_off:
+            segments = int(target_geo_def.size / cut_off)
+        else:
+            segments = 1
+    
+    #Find reduced input coordinate set
+    valid_input_index, source_lons, source_lats = _get_valid_input_index(source_geo_def, target_geo_def, 
+                                               reduce_data, 
+                                               radius_of_influence, 
+                                               nprocs=nprocs)    
+    
+    #Create kd-tree
+    try:
+        resample_kdtree = _create_resample_kdtree(source_lons, source_lats, 
+                                                  valid_input_index, 
+                                                  nprocs=nprocs)
+    except EmptyResult:
+        #Handle if all input data is reduced away
+         valid_output_index, index_array, distance_array = \
+             _create_empty_info(source_geo_def, target_geo_def, neighbours)
+         return (valid_input_index, valid_output_index, index_array, 
+                 distance_array)
+     
+    if segments > 1:
+        #Iterate through segments     
+        for i, target_slice in enumerate(geometry._get_slice(segments, 
+                                                   target_geo_def.shape)):
+
+            #Query on slice of target coordinates
+            next_voi, next_ia, next_da = \
+                    _query_resample_kdtree(resample_kdtree, source_geo_def, 
+                                           target_geo_def, 
+                                           radius_of_influence, target_slice,
+                                           neighbours=neighbours, 
+                                           epsilon=epsilon, 
+                                           reduce_data=reduce_data, 
+                                           nprocs=nprocs)
+
+            #Build result iteratively
+            if i == 0:
+                #First iteration
+                valid_output_index = next_voi
+                index_array = next_ia
+                distance_array = next_da
+            else:    
+                valid_output_index = np.append(valid_output_index, next_voi)
+                if neighbours > 1:
+                    index_array = np.row_stack((index_array, next_ia))
+                    distance_array = np.row_stack((distance_array, next_da))
+                else:
+                    index_array = np.append(index_array, next_ia)
+                    distance_array = np.append(distance_array, next_da)        
+    else:
+        #Query kd-tree with full target coordinate set        
+        full_slice = slice(None)
+        valid_output_index, index_array, distance_array = \
+                    _query_resample_kdtree(resample_kdtree, source_geo_def, 
+                                           target_geo_def, 
+                                           radius_of_influence, full_slice,
+                                           neighbours=neighbours, 
+                                           epsilon=epsilon, 
+                                           reduce_data=reduce_data, 
+                                           nprocs=nprocs)
+    
+    # Check if number of neighbours is potentially too low
+    if neighbours > 1:
+        if not np.all(np.isinf(distance_array[:, -1])):
+            warnings.warn(('Possible more than %s neighbours '
+                           'within %s m for some data points') % 
+                          (neighbours, radius_of_influence))
+         
+    return valid_input_index, valid_output_index, index_array, distance_array           
+
+def _get_valid_input_index(source_geo_def, target_geo_def, reduce_data, 
+                           radius_of_influence, nprocs=1):
+    """Find indices of reduced inputput data"""
+    
+    source_lons, source_lats = source_geo_def.get_lonlats(nprocs=nprocs)
+    source_lons = source_lons.ravel()
+    source_lats = source_lats.ravel()
+    
+    if source_lons.size == 0 or source_lats.size == 0:
+        raise ValueError('Cannot resample empty data set')
+    elif source_lons.size != source_lats.size or \
+            source_lons.shape != source_lats.shape:
+        raise ValueError('Mismatch between lons and lats')
+    
+    #Remove illegal values
+    valid_data = ((source_lons >= -180) & (source_lons <= 180) & 
+                  (source_lats <= 90) & (source_lats >= -90))
+    valid_input_index = np.ones(source_geo_def.size, dtype=np.bool)
+    
+    if reduce_data:
+        #Reduce dataset 
+        if (isinstance(source_geo_def, geometry.CoordinateDefinition) and 
+            isinstance(target_geo_def, (geometry.GridDefinition, 
+                                       geometry.AreaDefinition))) or \
+           (isinstance(source_geo_def, (geometry.GridDefinition, 
+                                        geometry.AreaDefinition)) and
+            isinstance(target_geo_def, (geometry.GridDefinition, 
+                                        geometry.AreaDefinition))):
+            #Resampling from swath to grid or from grid to grid
+            lonlat_boundary = target_geo_def.get_boundary_lonlats()
+            valid_input_index = \
+                data_reduce.get_valid_index_from_lonlat_boundaries(
+                                            lonlat_boundary[0],
+                                            lonlat_boundary[1], 
+                                            source_lons, source_lats, 
+                                            radius_of_influence)
+    
+    #Combine reduced and legal values
+    valid_input_index = (valid_data & valid_input_index)
+    
+    
+    if(isinstance(valid_input_index, np.ma.core.MaskedArray)):
+        #Make sure valid_input_index is not a masked array
+        valid_input_index = valid_input_index.filled(False)
+    
+    return valid_input_index, source_lons, source_lats
+
+def _get_valid_output_index(source_geo_def, target_geo_def, target_lons, 
+                            target_lats, reduce_data, radius_of_influence):
+    """Find indices of reduced output data"""
+    
+    valid_output_index = np.ones(target_lons.size, dtype=np.bool)
+    
+    if reduce_data:
+        if isinstance(source_geo_def, (geometry.GridDefinition, 
+                                         geometry.AreaDefinition)) and \
+             isinstance(target_geo_def, geometry.CoordinateDefinition):
+            #Resampling from grid to swath
+            lonlat_boundary = source_geo_def.get_boundary_lonlats()
+            valid_output_index = \
+                data_reduce.get_valid_index_from_lonlat_boundaries(
+                                            lonlat_boundary[0],
+                                            lonlat_boundary[1], 
+                                            target_lons, 
+                                            target_lats, 
+                                            radius_of_influence)
+            valid_output_index = valid_output_index.astype(np.bool)
+            
+    #Remove illegal values
+    valid_out = ((target_lons >= -180) & (target_lons <= 180) & 
+                  (target_lats <= 90) & (target_lats >= -90))
+    
+    #Combine reduced and legal values
+    valid_output_index = (valid_output_index & valid_out)
+    
+    return valid_output_index
+        
+def _create_resample_kdtree(source_lons, source_lats, valid_input_index, nprocs=1):
+    """Set up kd tree on input"""
+    
+    """
+    if not isinstance(source_geo_def, geometry.BaseDefinition):
+        raise TypeError('source_geo_def must be of geometry type')
+    
+    #Get reduced cartesian coordinates and flatten them
+    source_cartesian_coords = source_geo_def.get_cartesian_coords(nprocs=nprocs)
+    input_coords = geometry._flatten_cartesian_coords(source_cartesian_coords)
+    input_coords = input_coords[valid_input_index]
+    """
+    
+    source_lons_valid = source_lons[valid_input_index]
+    source_lats_valid = source_lats[valid_input_index]
+    
+    if nprocs > 1:
+        cartesian = _spatial_mp.Cartesian_MP(nprocs)
+    else:
+        cartesian = _spatial_mp.Cartesian()
+
+    input_coords = cartesian.transform_lonlats(source_lons_valid, source_lats_valid)
+    
+    if input_coords.size == 0:
+        raise EmptyResult('No valid data points in input data')
+
+    #Build kd-tree on input
+    if kd_tree_name == 'pykdtree':
+        resample_kdtree = KDTree(input_coords)
+    elif nprocs > 1:        
+        resample_kdtree = _spatial_mp.cKDTree_MP(input_coords,
+                                                 nprocs=nprocs)
+    else:
+        resample_kdtree = sp.cKDTree(input_coords)
+        
+    return resample_kdtree
+
+def _query_resample_kdtree(resample_kdtree, source_geo_def, target_geo_def, 
+                        radius_of_influence, data_slice,
+                       neighbours=8, epsilon=0, reduce_data=True, nprocs=1):    
+    """Query kd-tree on slice of target coordinates"""
+
+    #Check validity of input    
+    if not isinstance(target_geo_def, geometry.BaseDefinition):
+        raise TypeError('target_geo_def must be of geometry type')    
+    elif not isinstance(radius_of_influence, (long, int, float)):
+        raise TypeError('radius_of_influence must be number')
+    elif not isinstance(neighbours, int):
+        raise TypeError('neighbours must be integer')
+    elif not isinstance(epsilon, (long, int, float)):
+        raise TypeError('epsilon must be number')
+    
+    #Get sliced target coordinates
+    target_lons, target_lats = target_geo_def.get_lonlats(nprocs=nprocs, 
+                                                           data_slice=data_slice, dtype=source_geo_def.dtype)
+    
+    #Find indiced of reduced target coordinates
+    valid_output_index = _get_valid_output_index(source_geo_def, 
+                                                 target_geo_def, 
+                                                 target_lons.ravel(), 
+                                                 target_lats.ravel(), 
+                                                 reduce_data, 
+                                                 radius_of_influence)
+
+    #Get cartesian target coordinates and select reduced set
+    if nprocs > 1:
+        cartesian = _spatial_mp.Cartesian_MP(nprocs)
+    else:
+        cartesian = _spatial_mp.Cartesian()
+        
+    target_lons_valid = target_lons.ravel()[valid_output_index] 
+    target_lats_valid = target_lats.ravel()[valid_output_index]
+    
+    output_coords = cartesian.transform_lonlats(target_lons_valid, target_lats_valid) 
+    
+    #Query kd-tree        
+    distance_array, index_array = resample_kdtree.query(output_coords, 
+                                                        k=neighbours,
+                                                        eps=epsilon,
+                                                        distance_upper_bound=
+                                                        radius_of_influence)
+       
+    return valid_output_index, index_array, distance_array
+
+def _create_empty_info(source_geo_def, target_geo_def, neighbours):
+    """Creates dummy info for empty result set"""
+    
+    valid_output_index = np.ones(target_geo_def.size, dtype=np.bool)
+    if neighbours > 1:
+        index_array = (np.ones((target_geo_def.size, neighbours), 
+                               dtype=np.int32) * source_geo_def.size)
+        distance_array = np.ones((target_geo_def.size, neighbours))
+    else:
+        index_array = (np.ones(target_geo_def.size, dtype=np.int32) * 
+                       source_geo_def.size)
+        distance_array = np.ones(target_geo_def.size)
+        
+    return valid_output_index, index_array, distance_array 
+    
+
+def get_sample_from_neighbour_info(resample_type, output_shape, data, 
+                                   valid_input_index, valid_output_index, 
+                                   index_array, distance_array=None, 
+                                   weight_funcs=None, fill_value=0):
+    """Resamples swath based on neighbour info
+    
+    :Parameters:
+    resample_type : {'nn', 'custom'}
+        'nn': Use nearest neighbour resampling
+        'custom': Resample based on weight_funcs
+    output_shape : (int, int)
+        Shape of output as (rows, cols)
+    valid_input_index : numpy array
+        valid_input_index from get_neighbour_info
+    valid_output_index : numpy array
+        valid_output_index from get_neighbour_info
+    index_array : numpy array
+        index_array from get_neighbour_info
+    distance_array : numpy array, optional
+        distance_array from get_neighbour_info
+        Not needed for 'nn' resample type
+    weight_funcs : list of function objects or function object, optional       
+        List of weight functions f(dist) to use for the weighting 
+        of each channel 1 to k.
+        If only one channel is resampled weight_funcs is
+        a single function object.
+        Must be supplied when using 'custom' resample type
+    fill_value : {int, None}, optional 
+        Set undetermined pixels to this value.
+        If fill_value is None a masked array is returned 
+        with undetermined pixels masked
+        
+    :Returns: 
+    data : numpy array 
+        Source data resampled to target geometry
+    """
+    
+    if data.ndim > 2 and data.shape[0] * data.shape[1] == valid_input_index.size:
+        data = data.reshape(data.shape[0] * data.shape[1], data.shape[2])
+    elif data.shape[0] != valid_input_index.size:
+        data = data.ravel()
+    
+    if valid_input_index.size != data.shape[0]:
+        raise ValueError('Mismatch between geometry and dataset')
+    
+    is_multi_channel = (data.ndim > 1)
+    
+    valid_input_size = valid_input_index.sum()
+    valid_output_size = valid_output_index.sum()
+    
+    if valid_input_size == 0 or valid_output_size == 0:
+        if is_multi_channel:
+            output_shape = list(output_shape)
+            output_shape.append(data.shape[1])
+            
+        #Handle empty result set
+        if fill_value is None:
+            #Use masked array for fill values
+            return np.ma.array(np.zeros(output_shape, data.dtype), 
+                               mask=np.ones(output_shape, dtype=np.bool))
+        else:
+            #Return fill vaues for all pixels
+            return np.ones(output_shape, dtype=data.dtype) * fill_value  
+    
+    #Get size of output and reduced input
+    input_size = valid_input_size
+    if len(output_shape) > 1:
+        output_size = output_shape[0] * output_shape[1]
+    else:
+        output_size = output_shape[0]
+        
+    #Check validity of input
+    if not isinstance(data, np.ndarray):
+        raise TypeError('data must be numpy array')
+    elif valid_input_index.ndim != 1:
+        raise TypeError('valid_index must be one dimensional array')
+    elif data.shape[0] != valid_input_index.size:
+        raise TypeError('Not the same number of datapoints in '
+                        'valid_input_index and data')
+    
+    valid_types = ('nn', 'custom')
+    if not resample_type in valid_types:
+        raise TypeError('Invalid resampling type: %s' % resample_type)
+    
+    if resample_type == 'custom' and weight_funcs is None:
+        raise ValueError('weight_funcs must be supplied when using '
+                          'custom resampling')
+    
+    if not isinstance(fill_value, (long, int, float)) and fill_value is not None:
+        raise TypeError('fill_value must be number or None')
+    
+    if index_array.ndim == 1:
+        neighbours = 1
+    else:
+        neighbours = index_array.shape[1]
+        if resample_type == 'nn':
+            raise ValueError('index_array contains more neighbours than ' 
+                             'just the nearest')
+    
+    #Reduce data    
+    new_data = data[valid_input_index]    
+    
+    #Nearest neighbour resampling should conserve data type
+    #Get data type
+    conserve_input_data_type = False
+    if resample_type == 'nn':
+        conserve_input_data_type = True
+        input_data_type = new_data.dtype
+    
+    #Handle masked array input
+    is_masked_data = False
+    if np.ma.is_masked(new_data):
+        #Add the mask as channels to the dataset
+        is_masked_data = True
+        new_data = np.column_stack((new_data.data, new_data.mask))
+    
+    #Prepare weight_funcs argument for handeling mask data
+    if weight_funcs is not None and is_masked_data:
+        if is_multi_channel:
+            weight_funcs = weight_funcs * 2
+        else:
+            weight_funcs = (weight_funcs,) * 2
+    
+    #Handle request for masking intead of using fill values        
+    use_masked_fill_value = False
+    if fill_value is None:
+        use_masked_fill_value = True
+        fill_value = _get_fill_mask_value(new_data.dtype)
+    
+    #Resample based on kd-tree query result
+    if resample_type == 'nn' or neighbours == 1:
+        #Get nearest neighbour using array indexing
+        index_mask = (index_array == input_size)
+        new_index_array = np.where(index_mask, 0, index_array)
+        result = new_data[new_index_array]
+        result[index_mask] = fill_value
+    else:
+        #Calculate result using weighting
+                
+        #Get neighbours and masks of valid indices
+        ch_neighbour_list = []
+        index_mask_list = []
+        for i in range(neighbours):
+            index_ni = index_array[:, i].copy()
+            index_mask_ni = (index_ni == input_size)
+            index_ni[index_mask_ni] = 0
+            ch_ni = new_data[index_ni]
+            ch_neighbour_list.append(ch_ni) 
+            index_mask_list.append(index_mask_ni)
+        
+        #Calculate weights 
+        weight_list = []
+        for i in range(neighbours):
+            #Set out of bounds distance to 1 in order to avoid numerical Inf
+            distance = distance_array[:, i].copy()
+            distance[index_mask_list[i]] = 1
+            
+            if new_data.ndim > 1:
+                #Calculate weights for each channel
+                num_weights = valid_output_index.sum()
+                weights = []
+                for j in range(new_data.shape[1]):                    
+                    calc_weight = weight_funcs[j](distance)
+                    #Use broadcasting to account for constant weight
+                    expanded_calc_weight = np.ones(num_weights) * calc_weight
+                    weights.append(expanded_calc_weight)
+                weight_list.append(np.column_stack(weights))
+            else:
+                weights = weight_funcs(distance)
+                weight_list.append(weights)
+                        
+        result = 0
+        norm = 0
+        
+        #Calculate result       
+        for i in range(neighbours):   
+            #Find invalid indices to be masked of from calculation
+            if new_data.ndim > 1:
+                inv_index_mask = np.expand_dims(np.invert(index_mask_list[i]), axis=1)
+            else:
+                inv_index_mask = np.invert(index_mask_list[i])
+            
+            #Aggregate result and norm
+            result += inv_index_mask * ch_neighbour_list[i] * weight_list[i]
+            norm += inv_index_mask * weight_list[i]
+                                
+        #Normalize result and set fillvalue
+        new_valid_index = (norm > 0)
+        result[new_valid_index] /= norm[new_valid_index]
+        result[np.invert(new_valid_index)] = fill_value 
+    
+    #Add fill values
+    if new_data.ndim > 1:
+        full_result = np.ones((output_size, new_data.shape[1])) * fill_value
+    else:
+        full_result = np.ones(output_size) * fill_value
+    full_result[valid_output_index] = result 
+    result = full_result
+    
+    #Calculte correct output shape    
+    if new_data.ndim > 1:
+        output_shape = list(output_shape)
+        output_shape.append(new_data.shape[1])
+    
+    #Reshape resampled data to correct shape
+    result = result.reshape(output_shape)
+    
+    #Remap mask channels to create masked output
+    if is_masked_data:
+        result = _remask_data(result)
+        
+    #Create masking of fill values
+    if use_masked_fill_value:
+        result = np.ma.masked_equal(result, fill_value)
+        
+    #Set output data type to input data type if relevant
+    if conserve_input_data_type:
+        result = result.astype(input_data_type)        
+    return result
+
+def _get_fill_mask_value(data_dtype):
+    """Returns the maximum value of dtype"""
+    
+    if issubclass(data_dtype.type, np.floating):
+        fill_value = np.finfo(data_dtype.type).max
+    elif issubclass(data_dtype.type, np.integer):
+        fill_value = np.iinfo(data_dtype.type).max
+    else:
+        raise TypeError('Type %s is unsupported for masked fill values' %
+                        data_dtype.type)
+    return fill_value
+
+def _remask_data(data):
+    """Interprets half the array as mask for the other half"""
+    
+    channels = data.shape[-1]
+    mask = data[..., (channels // 2):]            
+    #All pixels affected by masked pixels are masked out
+    mask = (mask != 0)
+    data = np.ma.array(data[..., :(channels // 2)], mask=mask)
+    if data.shape[-1] == 1:
+        data = data.reshape(data.shape[:-1])
+    return data
+
diff --git a/pyresample/plot.py b/pyresample/plot.py
new file mode 100644
index 0000000..f034015
--- /dev/null
+++ b/pyresample/plot.py
@@ -0,0 +1,244 @@
+import numpy as np
+
+
+def ellps2axis(ellps_name):
+    """Get semi-major and semi-minor axis from ellipsis definition
+    
+    :Parameters:
+    ellps_name : str
+        Standard name of ellipsis
+        
+    :Returns:
+    (a, b) : semi-major and semi-minor axis
+    """
+    
+    ellps = {'helmert': {'a': 6378200.0, 'b': 6356818.1696278909}, 
+             'intl': {'a': 6378388.0, 'b': 6356911.9461279465}, 
+             'merit': {'a': 6378137.0, 'b': 6356752.2982159676}, 
+             'wgs72': {'a': 6378135.0, 'b': 6356750.5200160937}, 
+             'sphere': {'a': 6370997.0, 'b': 6370997.0}, 
+             'clrk66': {'a': 6378206.4000000004, 'b': 6356583.7999999998}, 
+             'nwl9d': {'a': 6378145.0, 'b': 6356759.7694886839}, 
+             'lerch': {'a': 6378139.0, 'b': 6356754.2915103417}, 
+             'evrstss': {'a': 6377298.5559999999, 'b': 6356097.5503008962}, 
+             'evrst30': {'a': 6377276.3449999997, 'b': 6356075.4131402401}, 
+             'mprts': {'a': 6397300.0, 'b': 6363806.2827225132}, 
+             'krass': {'a': 6378245.0, 'b': 6356863.0187730473}, 
+             'walbeck': {'a': 6376896.0, 'b': 6355834.8466999996}, 
+             'kaula': {'a': 6378163.0, 'b': 6356776.9920869097}, 
+             'wgs66': {'a': 6378145.0, 'b': 6356759.7694886839}, 
+             'evrst56': {'a': 6377301.2429999998, 'b': 6356100.2283681016}, 
+             'new_intl': {'a': 6378157.5, 'b': 6356772.2000000002}, 
+             'airy': {'a': 6377563.3959999997, 'b': 6356256.9100000001}, 
+             'bessel': {'a': 6377397.1550000003, 'b': 6356078.9628181886}, 
+             'seasia': {'a': 6378155.0, 'b': 6356773.3205000004}, 
+             'aust_sa': {'a': 6378160.0, 'b': 6356774.7191953054}, 
+             'wgs84': {'a': 6378137.0, 'b': 6356752.3142451793}, 
+             'hough': {'a': 6378270.0, 'b': 6356794.3434343431}, 
+             'wgs60': {'a': 6378165.0, 'b': 6356783.2869594367}, 
+             'engelis': {'a': 6378136.0499999998, 'b': 6356751.3227215428}, 
+             'apl4.9': {'a': 6378137.0, 'b': 6356751.796311819}, 
+             'andrae': {'a': 6377104.4299999997, 'b': 6355847.4152333336}, 
+             'sgs85': {'a': 6378136.0, 'b': 6356751.301568781}, 
+             'delmbr': {'a': 6376428.0, 'b': 6355957.9261637237}, 
+             'fschr60m': {'a': 6378155.0, 'b': 6356773.3204827355}, 
+             'iau76': {'a': 6378140.0, 'b': 6356755.2881575283}, 
+             'plessis': {'a': 6376523.0, 'b': 6355863.0}, 
+             'cpm': {'a': 6375738.7000000002, 'b': 6356666.221912113}, 
+             'fschr68': {'a': 6378150.0, 'b': 6356768.3372443849}, 
+             'mod_airy': {'a': 6377340.1890000002, 'b': 6356034.4460000005}, 
+             'grs80': {'a': 6378137.0, 'b': 6356752.3141403561}, 
+             'bess_nam': {'a': 6377483.8650000002, 'b': 6356165.3829663256}, 
+             'fschr60': {'a': 6378166.0, 'b': 6356784.2836071067}, 
+             'clrk80': {'a': 6378249.1449999996, 'b': 6356514.9658284895}, 
+             'evrst69': {'a': 6377295.6639999999, 'b': 6356094.6679152036}, 
+             'grs67': {'a': 6378160.0, 'b': 6356774.5160907144}, 
+             'evrst48': {'a': 6377304.0630000001, 'b': 6356103.0389931547}}
+    try:
+        ellps_axis = ellps[ellps_name.lower()]
+        a = ellps_axis['a']
+        b = ellps_axis['b']
+    except KeyError, e:
+        raise ValueError(('Could not determine semi-major and semi-minor axis '
+                         'of specified ellipsis %s') % ellps_name)
+    return a, b
+
+def area_def2basemap(area_def, **kwargs):
+    """Get Basemap object from AreaDefinition
+    
+    :Parameters:
+    area_def : object
+        geometry.AreaDefinition object
+    **kwargs: Keyword arguments
+        Additional initialization arguments for Basemap
+        
+    :Returns:
+    bmap : Basemap object
+    """
+    
+    from mpl_toolkits.basemap import Basemap
+    try:
+        a, b = ellps2axis(area_def.proj_dict['ellps'])
+        rsphere = (a, b)
+    except KeyError:
+        try:
+            a = float(area_def.proj_dict['a'])
+            try:
+                b = float(area_def.proj_dict['b'])
+                rsphere = (a, b)
+            except KeyError:
+                rsphere = a
+        except KeyError:
+            # Default to WGS84 ellipsoid
+            a, b = ellps2axis('wgs84')
+            rsphere = (a, b)
+            
+    # Add projection specific basemap args to args passed to function    
+    basemap_args = kwargs
+    basemap_args['rsphere'] = rsphere
+
+    if area_def.proj_dict['proj'] in ('ortho', 'geos', 'nsper'):
+        llcrnrx, llcrnry, urcrnrx, urcrnry = area_def.area_extent
+        basemap_args['llcrnrx'] = llcrnrx
+        basemap_args['llcrnry'] = llcrnry
+        basemap_args['urcrnrx'] = urcrnrx
+        basemap_args['urcrnry'] = urcrnry
+    else:
+        llcrnrlon, llcrnrlat, urcrnrlon, urcrnrlat = area_def.area_extent_ll
+        basemap_args['llcrnrlon'] = llcrnrlon
+        basemap_args['llcrnrlat'] = llcrnrlat
+        basemap_args['urcrnrlon'] = urcrnrlon
+        basemap_args['urcrnrlat'] = urcrnrlat
+
+    if area_def.proj_dict['proj'] == 'eqc':
+        basemap_args['projection'] = 'cyl'
+    else:
+        basemap_args['projection'] = area_def.proj_dict['proj']
+    try:
+        basemap_args['lon_0'] = float(area_def.proj_dict['lon_0'])
+    except KeyError:
+        pass
+
+    try:
+        basemap_args['lat_0'] = float(area_def.proj_dict['lat_0']) 
+    except KeyError:
+        pass
+
+    try:
+        basemap_args['lon_1'] = float(area_def.proj_dict['lon_1']) 
+    except KeyError:
+        pass  
+
+    try:
+        basemap_args['lat_1'] = float(area_def.proj_dict['lat_1']) 
+    except KeyError:
+        pass  
+
+    try:
+        basemap_args['lon_2'] = float(area_def.proj_dict['lon_2']) 
+    except KeyError:
+        pass
+
+    try:
+        basemap_args['lat_2'] = float(area_def.proj_dict['lat_2']) 
+    except KeyError:
+        pass
+
+    try:
+        basemap_args['lat_ts'] = float(area_def.proj_dict['lat_ts']) 
+    except KeyError:
+        pass
+
+    return Basemap(**basemap_args) 
+            
+def _get_quicklook(area_def, data, vmin=None, vmax=None, 
+                   label='Variable (units)', num_meridians=45, 
+                   num_parallels=10, coast_res='c'):
+    """Get default Basemap matplotlib plot
+    """
+    
+    if area_def.shape != data.shape:
+        raise ValueError('area_def shape %s does not match data shape %s' % 
+                         (list(area_def.shape), list(data.shape)))
+    import matplotlib.pyplot as plt
+    bmap = area_def2basemap(area_def, resolution=coast_res)
+    bmap.drawcoastlines()
+    if num_meridians > 0:
+        bmap.drawmeridians(np.arange(-180, 180, num_meridians))
+    if num_parallels > 0:
+        bmap.drawparallels(np.arange(-90, 90, num_parallels))
+    if not (np.ma.isMaskedArray(data) and data.mask.all()):
+        col = bmap.imshow(data, origin='upper', vmin=vmin, vmax=vmax)
+        plt.colorbar(col, shrink=0.5, pad=0.05).set_label(label)
+        
+    return plt
+    
+def show_quicklook(area_def, data, vmin=None, vmax=None, 
+                   label='Variable (units)', num_meridians=45, 
+                   num_parallels=10, coast_res='c'):
+    """Display default quicklook plot
+    
+    :Parameters:
+    area_def : object
+        geometry.AreaDefinition object
+    data : numpy array | numpy masked array
+        2D array matching area_def. Use masked array for transparent values
+    vmin : float, optional
+        Min value for luminescence scaling
+    vmax : float, optional
+        Max value for luminescence scaling
+    label : str, optional
+        Label for data
+    num_meridians : int, optional
+        Number of meridians to plot on the globe
+    num_parallels : int, optional
+        Number of parallels to plot on the globe
+    coast_res : {'c', 'l', 'i', 'h', 'f'}, optional
+        Resolution of coastlines
+        
+    :Returns:
+    bmap : Basemap object
+    """
+        
+    plt = _get_quicklook(area_def, data, vmin=vmin, vmax=vmax, 
+                         label=label, num_meridians=num_meridians, 
+                         num_parallels=num_parallels, coast_res=coast_res)
+    plt.show()
+    plt.close()
+    
+def save_quicklook(filename, area_def, data, vmin=None, vmax=None, 
+                   label='Variable (units)', num_meridians=45, 
+                   num_parallels=10, coast_res='c', backend='AGG'):
+    """Display default quicklook plot
+    
+    :Parameters:
+    filename : str
+        path to output file
+    area_def : object
+        geometry.AreaDefinition object
+    data : numpy array | numpy masked array
+        2D array matching area_def. Use masked array for transparent values
+    vmin : float, optional
+        Min value for luminescence scaling
+    vmax : float, optional
+        Max value for luminescence scaling
+    label : str, optional
+        Label for data
+    num_meridians : int, optional
+        Number of meridians to plot on the globe
+    num_parallels : int, optional
+        Number of parallels to plot on the globe
+    coast_res : {'c', 'l', 'i', 'h', 'f'}, optional
+        Resolution of coastlines
+    backend : str, optional
+        matplotlib backend to use'
+    """
+    
+    import matplotlib
+    matplotlib.use(backend, warn=False)
+    plt = _get_quicklook(area_def, data, vmin=vmin, vmax=vmax, 
+                         label=label, num_meridians=num_meridians, 
+                         num_parallels=num_parallels, coast_res=coast_res)
+    plt.savefig(filename, bbox_inches='tight')
+    plt.close()
+    
diff --git a/pyresample/spherical_geometry.py b/pyresample/spherical_geometry.py
new file mode 100644
index 0000000..637c9d0
--- /dev/null
+++ b/pyresample/spherical_geometry.py
@@ -0,0 +1,415 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010  Martin Raspaud
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Classes for spherical geometry operations"""
+
+import math
+import numpy as np
+
+EPSILON = 0.0000001
+
+# FIXME: this has not been tested with R != 1
+
+class Coordinate(object):
+    """Point on earth in terms of lat and lon.
+    """
+    lat = None
+    lon = None
+    x__ = None
+    y__ = None
+    z__ = None
+    
+    def __init__(self, lon=None, lat=None,
+                 x__=None, y__=None, z__=None, R__=1):
+
+        self.R__ = R__
+        if lat is not None and lon is not None:
+            if not(-180 <= lon <= 180 and -90 <= lat <= 90):
+                raise ValueError('Illegal (lon, lat) coordinates: (%s, %s)'
+                                  % (lon, lat))
+            self.lat = math.radians(lat)
+            self.lon = math.radians(lon)
+            self._update_cart()
+        else:
+            self.x__ = x__
+            self.y__ = y__
+            self.z__ = z__
+            self._update_lonlat()
+
+    def _update_cart(self):
+        """Convert lon/lat to cartesian coordinates.
+        """
+
+        self.x__ = math.cos(self.lat) * math.cos(self.lon)
+        self.y__ = math.cos(self.lat) * math.sin(self.lon)
+        self.z__ = math.sin(self.lat)
+        
+
+    def _update_lonlat(self):
+        """Convert cartesian to lon/lat.
+        """
+        
+        self.lat = math.degrees(math.asin(self.z__ / self.R__))
+        self.lon = math.degrees(math.atan2(self.y__, self.x__))
+        
+    def __ne__(self, other):
+        if(abs(self.lat - other.lat) < EPSILON and
+           abs(self.lon - other.lon) < EPSILON):
+            return 0
+        else:
+            return 1
+
+    def __eq__(self, other):
+        return not self.__ne__(other)
+
+    def __str__(self):
+        return str((math.degrees(self.lon), math.degrees(self.lat)))
+    
+    def __repr__(self):
+        return str((math.degrees(self.lon), math.degrees(self.lat)))
+
+    def cross2cart(self, point):
+        """Compute the cross product, and convert to cartesian coordinates
+        (assuming radius 1).
+        """
+
+        lat1 = self.lat
+        lon1 = self.lon
+        lat2 = point.lat
+        lon2 = point.lon
+
+        res = Coordinate(
+            x__=(math.sin(lat1 - lat2) * math.sin((lon1 + lon2) / 2) *
+                 math.cos((lon1 - lon2) / 2) - math.sin(lat1 + lat2) *
+                 math.cos((lon1 + lon2) / 2) * math.sin((lon1 - lon2) / 2)),
+            y__=(math.sin(lat1 - lat2) * math.cos((lon1 + lon2) / 2) *
+                 math.cos((lon1 - lon2) / 2) + math.sin(lat1 + lat2) *
+                 math.sin((lon1 + lon2) / 2) * math.sin((lon1 - lon2) / 2)),
+            z__=(math.cos(lat1) * math.cos(lat2) * math.sin(lon1 - lon2)))
+
+        return res
+
+    def distance(self, point):
+        """Vincenty formula.
+        """
+        dlambda = self.lon - point.lon
+        num = ((math.cos(point.lat) * math.sin(dlambda)) ** 2 +
+               (math.cos(self.lat) * math.sin(point.lat) -
+                math.sin(self.lat) * math.cos(point.lat) *
+                math.cos(dlambda)) ** 2)
+        den = (math.sin(self.lat) * math.sin(point.lat) +
+               math.cos(self.lat) * math.cos(point.lat) * math.cos(dlambda))
+
+        return math.atan2(math.sqrt(num), den)
+
+    def norm(self):
+        """Return the norm of the vector.
+        """
+        return math.sqrt(self.x__ ** 2 + self.y__ ** 2 + self.z__ ** 2)
+
+    def normalize(self):
+        """normalize the vector.
+        """
+
+        norm = self.norm()
+        self.x__ /= norm
+        self.y__ /= norm
+        self.z__ /= norm
+
+        return self
+
+    def cross(self, point):
+        """cross product with another vector.
+        """
+        x__ = self.y__ * point.z__ - self.z__ * point.y__
+        y__ = self.z__ * point.x__ - self.x__ * point.z__
+        z__ = self.x__ * point.y__ - self.y__ * point.x__
+        
+        return Coordinate(x__=x__, y__=y__, z__=z__)
+
+    def dot(self, point):
+        """dot product with another vector.
+        """
+        return (self.x__ * point.x__ +
+                self.y__ * point.y__ +
+                self.z__ * point.z__)
+
+class Arc(object):
+    """An arc of the great circle between two points.
+    """
+    start = None
+    end = None
+
+    def __init__(self, start, end):
+        self.start, self.end = start, end
+
+    def center_angle(self):
+        """Angle of an arc at the center of the sphere.
+        """
+        val = (math.cos(self.start.lat - self.end.lat) +
+               math.cos(self.start.lon - self.end.lon) - 1)
+
+        if val > 1:
+            val = 1
+        elif val < -1:
+            val = -1
+        
+        return math.acos(val)
+                           
+    def __eq__(self, other):
+        if(self.start == other.start and self.end == other.end):
+            return 1
+        return 0
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __str__(self):
+        return str((str(self.start), str(self.end)))
+
+    def angle(self, other_arc):
+        """Oriented angle between two arcs.
+        """
+        if self.start == other_arc.start:
+            a__ = self.start
+            b__ = self.end
+            c__ = other_arc.end
+        elif self.start == other_arc.end:
+            a__ = self.start
+            b__ = self.end
+            c__ = other_arc.start
+        elif self.end == other_arc.end:
+            a__ = self.end
+            b__ = self.start
+            c__ = other_arc.start
+        elif self.end == other_arc.start:
+            a__ = self.end
+            b__ = self.start
+            c__ = other_arc.end
+        else:
+            raise ValueError("No common point in angle computation.")
+
+        ua_ = a__.cross(b__)
+        ub_ = a__.cross(c__)
+
+        val =  ua_.dot(ub_) / (ua_.norm() * ub_.norm())
+        if abs(val - 1) < EPSILON:
+            angle = 0
+        elif abs(val + 1) < EPSILON:
+            angle = math.pi
+        else:
+            angle = math.acos(val)    
+
+        n__ = ua_.normalize()
+        if n__.dot(c__) > 0:
+            return -angle
+        else:
+            return angle
+        
+    def intersections(self, other_arc):
+        """Gives the two intersections of the greats circles defined by the 
+       current arc and *other_arc*.
+        """
+        
+        
+        if self.end.lon - self.start.lon > math.pi:
+            self.end.lon -= 2 * math.pi
+        if other_arc.end.lon - other_arc.start.lon > math.pi:
+            other_arc.end.lon -= 2 * math.pi
+        if self.end.lon - self.start.lon < -math.pi:
+            self.end.lon += 2 * math.pi
+        if other_arc.end.lon - other_arc.start.lon < -math.pi:
+            other_arc.end.lon += 2 * math.pi
+            
+        ea_ = self.start.cross2cart(self.end).normalize()
+        eb_ = other_arc.start.cross2cart(other_arc.end).normalize()
+
+        cross = ea_.cross(eb_)
+        lat = math.atan2(cross.z__, math.sqrt(cross.x__ ** 2 + cross.y__ ** 2))
+        lon = math.atan2(-cross.y__, cross.x__)
+
+        return (Coordinate(math.degrees(lon), math.degrees(lat)),
+                Coordinate(math.degrees(modpi(lon + math.pi)),
+                           math.degrees(-lat)))
+
+    def intersects(self, other_arc):
+        """Says if two arcs defined by the current arc and the *other_arc*
+        intersect. An arc is defined as the shortest tracks between two points.
+        """
+
+        return bool(self.intersection(other_arc))
+
+    def intersection(self, other_arc):
+        """Says where, if two arcs defined by the current arc and the
+        *other_arc* intersect. An arc is defined as the shortest tracks between
+        two points.
+        """
+
+
+        for i in self.intersections(other_arc):
+            a__ = self.start
+            b__ = self.end
+            c__ = other_arc.start
+            d__ = other_arc.end
+
+
+            ab_ = a__.distance(b__)
+            cd_ = c__.distance(d__)
+
+            if(abs(a__.distance(i) + b__.distance(i) - ab_) < EPSILON and
+               abs(c__.distance(i) + d__.distance(i) - cd_) < EPSILON):
+                return i
+        return None
+
+def modpi(val):
+    """Puts *val* between -pi and pi.
+    """
+    return (val + math.pi) % (2 * math.pi) - math.pi
+
+def get_polygon_area(corners):
+    """Get the area of the convex area defined by *corners*.
+    """
+    # We assume the earth is spherical !!!
+    # Should be the radius of the earth at the observed position
+    R = 1
+    
+    c1_ = corners[0]
+    area = 0
+    
+    for idx in range(1, len(corners) - 1):
+        b1_ = Arc(c1_, corners[idx])
+        b2_ = Arc(c1_, corners[idx + 1])
+        b3_ = Arc(corners[idx], corners[idx + 1])
+        e__ = (abs(b1_.angle(b2_)) +
+            abs(b2_.angle(b3_)) + 
+                   abs(b3_.angle(b1_)))
+        area += R ** 2 * e__ - math.pi
+    return area
+
+def get_intersections(b__, boundaries):
+    """Get the intersections of *b__* with *boundaries*.
+    Returns both the intersection coordinates and the concerned boundaries.
+    """
+    
+    intersections = []
+    bounds = []
+    for other_b in boundaries:
+        inter = b__.intersection(other_b)
+        if inter is not None:
+            intersections.append(inter)
+            bounds.append(other_b)
+    return intersections, bounds
+    
+def get_first_intersection(b__, boundaries):
+    """Get the first intersection on *b__* with *boundaries*.
+    """
+    intersections, bounds = get_intersections(b__, boundaries)
+    del bounds
+    dists = np.array([b__.start.distance(p__) for p__ in intersections])
+    indices = dists.argsort()
+    if len(intersections) > 0:
+        return intersections[indices[0]]
+    return None
+
+def get_next_intersection(p__, b__, boundaries):
+    """Get the next intersection from the intersection of arcs *p__* and *b__*
+    along segment *b__* with *boundaries*.
+    """
+    new_b = Arc(p__, b__.end)
+    intersections, bounds = get_intersections(new_b, boundaries)
+    dists = np.array([b__.start.distance(p2) for p2 in intersections])
+    indices = dists.argsort()
+    if len(intersections) > 0 and intersections[indices[0]] != p__:
+        return intersections[indices[0]], bounds[indices[0]]
+    elif len(intersections) > 1:
+        return intersections[indices[1]], bounds[indices[1]]
+    return None, None
+
+def point_inside(point, corners):
+    """Is a point inside the 4 corners ? This uses great circle arcs as area
+    boundaries.
+    """
+    arc1 = Arc(corners[0], corners[1])
+    arc2 = Arc(corners[1], corners[2])
+    arc3 = Arc(corners[2], corners[3])
+    arc4 = Arc(corners[3], corners[0])
+    
+    arc5 = Arc(corners[1], point)
+    arc6 = Arc(corners[3], point)
+
+    angle1 = modpi(arc1.angle(arc2))
+    angle1bis = modpi(arc1.angle(arc5))
+
+    angle2 = modpi(arc3.angle(arc4))
+    angle2bis = modpi(arc3.angle(arc6))
+
+    return (np.sign(angle1) == np.sign(angle1bis) and
+            abs(angle1) > abs(angle1bis) and 
+            np.sign(angle2) == np.sign(angle2bis) and
+            abs(angle2) > abs(angle2bis))
+
+def intersection_polygon(area_corners, segment_corners):
+    """Get the intersection polygon between two areas.
+    """
+    area_boundaries = [Arc(area_corners[0], area_corners[1]),
+                       Arc(area_corners[1], area_corners[2]),
+                       Arc(area_corners[2], area_corners[3]),
+                       Arc(area_corners[3], area_corners[0])]
+    segment_boundaries = [Arc(segment_corners[0], segment_corners[1]),
+                          Arc(segment_corners[1], segment_corners[2]),
+                          Arc(segment_corners[2], segment_corners[3]),
+                          Arc(segment_corners[3], segment_corners[0])]
+
+    angle1 = area_boundaries[0].angle(area_boundaries[1])
+    angle2 = segment_boundaries[0].angle(segment_boundaries[1])
+    if np.sign(angle1) != np.sign(angle2):
+        segment_corners.reverse()
+        segment_boundaries = [Arc(segment_corners[0], segment_corners[1]),
+                              Arc(segment_corners[1], segment_corners[2]),
+                              Arc(segment_corners[2], segment_corners[3]),
+                              Arc(segment_corners[3], segment_corners[0])]
+    poly = []
+
+    boundaries = area_boundaries
+    other_boundaries = segment_boundaries
+
+    b__ = None
+
+    for b__ in boundaries:
+        if point_inside(b__.start, segment_corners):
+            poly.append(b__.start)
+            break
+        else:
+            inter = get_first_intersection(b__, other_boundaries)
+            if inter is not None:
+                poly.append(inter)
+                break
+    if len(poly) == 0:
+        return None
+    while len(poly) < 2 or poly[0] != poly[-1]:
+        inter, b2_ = get_next_intersection(poly[-1], b__, other_boundaries)
+        if inter is None:
+            poly.append(b__.end)
+            idx = (boundaries.index(b__) + 1) % len(boundaries)
+            b__ = boundaries[idx]
+        else:
+            poly.append(inter)
+            b__ = b2_
+            boundaries, other_boundaries = other_boundaries, boundaries
+    return poly[:-1]
+
+
diff --git a/pyresample/utils.py b/pyresample/utils.py
new file mode 100644
index 0000000..336848c
--- /dev/null
+++ b/pyresample/utils.py
@@ -0,0 +1,297 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Utility functions for pyresample"""
+
+import numpy as np
+from configobj import ConfigObj
+
+import geometry, grid, kd_tree
+import _spatial_mp
+
+class AreaNotFound(Exception):
+    """Exception raised when specified are is no found in file"""
+    pass
+
+def load_area(area_file_name, *regions):
+    """Load area(s) from area file
+    
+    :Parameters:
+    area_file_name : str
+        Path to area definition file
+    regions : str argument list 
+        Regions to parse. If no regions are specified all 
+        regions in the file are returned
+             
+    :Returns:
+    area_defs : object or list
+        If one area name is specified a single AreaDefinition object is returned
+        If several area names are specified a list of AreaDefinition objects is returned
+        
+    :Raises:
+    AreaNotFound
+        If a specified area name is not found
+    """
+    
+    area_list = parse_area_file(area_file_name, *regions)
+    if len(area_list) == 1:
+        return area_list[0]
+    else:
+        return area_list
+
+def parse_area_file(area_file_name, *regions):
+    """Parse area information from area file
+    
+    :Parameters:
+    area_file_name : str
+        Path to area definition file
+    regions : str argument list 
+        Regions to parse. If no regions are specified all 
+        regions in the file are returned
+             
+    :Returns:
+    area_defs : list
+        List of AreaDefinition objects
+        
+    :Raises:
+    AreaNotFound
+        If a specified area is not found
+    """
+            
+    area_file = open(area_file_name, 'r')
+    area_list = list(regions)
+    if len(area_list) == 0:
+        select_all_areas = True
+        area_defs = []
+    else:
+        select_all_areas = False
+        area_defs = [None for i in area_list]
+        
+    #Extract area from file
+    in_area = False
+    for line in area_file.readlines():
+        if not in_area:
+            if 'REGION' in line:
+                area_id = line.replace('REGION:', ''). \
+                              replace('{', '').strip()
+                if area_id in area_list or select_all_areas:
+                    in_area = True
+                    area_content = ''
+        elif '};' in line:
+            in_area = False            
+            if select_all_areas:
+                area_defs.append(_create_area(area_id, area_content))
+            else:
+                area_defs[area_list.index(area_id)] = _create_area(area_id,
+                                                                   area_content)
+        else:
+            area_content += line
+
+    area_file.close()
+    
+    #Check if all specified areas were found
+    if not select_all_areas:
+        for i, area in enumerate(area_defs):
+            if area is None:
+                raise AreaNotFound('Area "%s" not found in file "%s"'%
+                                   (area_list[i], area_file_name))    
+    return area_defs
+
+def _create_area(area_id, area_content):
+    """Parse area configuration"""
+
+    config_obj = area_content.replace('{', '').replace('};', '')
+    config_obj = ConfigObj([line.replace(':', '=', 1)
+                            for line in config_obj.splitlines()])
+    config = config_obj.dict()
+    config['REGION'] = area_id
+    try:
+        config['NAME'].__iter__()
+        config['NAME'] = ', '.join(config['NAME'])
+    except:
+        config['NAME'] = ''.join(config['NAME'])
+    config['XSIZE'] = int(config['XSIZE'])
+    config['YSIZE'] = int(config['YSIZE'])
+    config['AREA_EXTENT'][0] = config['AREA_EXTENT'][0].replace('(', '')
+    config['AREA_EXTENT'][3] = config['AREA_EXTENT'][3].replace(')', '')
+    
+    for i, val in enumerate(config['AREA_EXTENT']):
+        config['AREA_EXTENT'][i] = float(val)
+        
+    config['PCS_DEF'] = _get_proj4_args(config['PCS_DEF'])
+    
+    return geometry.AreaDefinition(config['REGION'], config['NAME'], 
+                                   config['PCS_ID'], config['PCS_DEF'], 
+                                   config['XSIZE'], config['YSIZE'], 
+                                   config['AREA_EXTENT'])
+
+def get_area_def(area_id, area_name, proj_id, proj4_args, x_size, y_size,
+                 area_extent):
+    """Construct AreaDefinition object from arguments
+    
+    :Parameters:
+    area_id : str
+        ID of area
+    proj_id : str
+        ID of projection
+    area_name :str
+        Description of area
+    proj4_args : list or str
+        Proj4 arguments as list of arguments or string
+    x_size : int
+        Number of pixel in x dimension
+    y_size : int  
+        Number of pixel in y dimension
+    area_extent : list 
+        Area extent as a list of ints (LL_x, LL_y, UR_x, UR_y)
+    
+    :Returns: 
+    area_def : object
+        AreaDefinition object
+    """
+    
+    proj_dict = _get_proj4_args(proj4_args)
+    return geometry.AreaDefinition(area_id, area_name, proj_id, proj_dict, x_size,
+                                   y_size, area_extent)    
+
+def generate_quick_linesample_arrays(source_area_def, target_area_def, nprocs=1):
+    """Generate linesample arrays for quick grid resampling
+    
+    :Parameters:
+    source_area_def : object 
+        Source area definition as AreaDefinition object
+    target_area_def : object 
+        Target area definition as AreaDefinition object
+    nprocs : int, optional 
+        Number of processor cores to be used
+
+    :Returns: 
+    (row_indices, col_indices) : tuple of numpy arrays
+    """
+    if not (isinstance(source_area_def, geometry.AreaDefinition) and
+            isinstance(target_area_def, geometry.AreaDefinition)):
+        raise TypeError('source_area_def and target_area_def must be of type '
+                        'geometry.AreaDefinition')
+            
+    lons, lats = target_area_def.get_lonlats(nprocs)
+    
+    source_pixel_y, source_pixel_x = grid.get_linesample(lons, lats, 
+                                                         source_area_def, 
+                                                         nprocs=nprocs)
+    
+    source_pixel_x = _downcast_index_array(source_pixel_x, 
+                                           source_area_def.shape[1]) 
+    source_pixel_y = _downcast_index_array(source_pixel_y, 
+                                           source_area_def.shape[0])
+                     
+    return source_pixel_y, source_pixel_x
+
+def generate_nearest_neighbour_linesample_arrays(source_area_def, target_area_def, 
+                                                 radius_of_influence, nprocs=1):
+    """Generate linesample arrays for nearest neighbour grid resampling
+    
+    :Parameters:
+    source_area_def : object 
+        Source area definition as AreaDefinition object
+    target_area_def : object 
+        Target area definition as AreaDefinition object
+    radius_of_influence : float 
+        Cut off distance in meters
+    nprocs : int, optional 
+        Number of processor cores to be used
+
+    :Returns: 
+    (row_indices, col_indices) : tuple of numpy arrays
+    """
+    
+    if not (isinstance(source_area_def, geometry.AreaDefinition) and
+            isinstance(target_area_def, geometry.AreaDefinition)):
+        raise TypeError('source_area_def and target_area_def must be of type '
+                        'geometry.AreaDefinition')
+    
+    valid_input_index, valid_output_index, index_array, distance_array = \
+                            kd_tree.get_neighbour_info(source_area_def, 
+                                                       target_area_def, 
+                                                       radius_of_influence, 
+                                                       neighbours=1,
+                                                       nprocs=nprocs)
+    #Enumerate rows and cols
+    rows = np.fromfunction(lambda i, j: i, source_area_def.shape, 
+                           dtype=np.int32).ravel()
+    cols = np.fromfunction(lambda i, j: j, source_area_def.shape, 
+                           dtype=np.int32).ravel()
+    
+    #Reduce to match resampling data set
+    rows_valid = rows[valid_input_index]
+    cols_valid = cols[valid_input_index]
+    
+    #Get result using array indexing
+    number_of_valid_points = valid_input_index.sum()
+    index_mask = (index_array == number_of_valid_points)
+    index_array[index_mask] = 0
+    row_sample = rows_valid[index_array]
+    col_sample = cols_valid[index_array]
+    row_sample[index_mask] = -1
+    col_sample[index_mask] = -1
+    
+    #Reshape to correct shape
+    row_indices = row_sample.reshape(target_area_def.shape)
+    col_indices = col_sample.reshape(target_area_def.shape)
+    
+    row_indices = _downcast_index_array(row_indices, 
+                                        source_area_def.shape[0])
+    col_indices = _downcast_index_array(col_indices, 
+                                        source_area_def.shape[1])
+    
+    return row_indices, col_indices
+
+def fwhm2sigma(fwhm):
+    """Calculate sigma for gauss function from FWHM (3 dB level)
+    
+    :Parameters:
+    fwhm : float 
+        FWHM of gauss function (3 dB level of beam footprint)
+
+    :Returns: 
+    sigma : float
+        sigma for use in resampling gauss function
+        
+    """
+    
+    return fwhm / (2 * np.sqrt(np.log(2)))
+    
+def _get_proj4_args(proj4_args):
+    """Create dict from proj4 args
+    """
+    
+    if isinstance(proj4_args, str):
+        proj_config = ConfigObj(proj4_args.replace('+', '').split())
+    else:
+        proj_config = ConfigObj(proj4_args)
+    return proj_config.dict()
+
+
+def _downcast_index_array(index_array, size):
+    """Try to downcast array to uint16
+    """
+    
+    if size <= np.iinfo(np.uint16).max:
+        mask = (index_array < 0) | (index_array >= size)
+        index_array[mask] = size
+        index_array = index_array.astype(np.uint16)
+    return index_array
+        
diff --git a/pyresample/version.py b/pyresample/version.py
new file mode 100644
index 0000000..c77622e
--- /dev/null
+++ b/pyresample/version.py
@@ -0,0 +1,18 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2010  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+__version__ = '1.0.0'
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..861a9f5
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build = 
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..6b27c76
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,54 @@
+#pyresample, Resampling of remote sensing image data in python
+# 
+#Copyright (C) 2012  Esben S. Nielsen
+#
+#This program is free software: you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation, either version 3 of the License, or
+#(at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+from setuptools import setup
+import sys
+import os
+
+import imp
+
+version = imp.load_source('pyresample.version', 'pyresample/version.py')
+
+requirements = ['pyproj', 'numpy', 'configobj']
+extras_require = {'pykdtree': ['pykdtree'], 'numexpr': ['numexpr']}
+
+if sys.version_info < (2, 6):
+    # multiprocessing is not in the standard library
+    requirements.append('multiprocessing')
+
+setup(name='pyresample',
+      version=version.__version__,
+      description='Resampling of remote sensing data in Python',
+      author='Esben S. Nielsen',
+      author_email='esn at dmi.dk',
+      package_dir = {'pyresample': 'pyresample'},
+      packages = ['pyresample'],      
+      install_requires=requirements,
+      extras_require = extras_require,
+      zip_safe = False,
+      classifiers=[
+      'Development Status :: 5 - Production/Stable',
+      'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
+      'Programming Language :: Python',
+      'Operating System :: OS Independent',
+      'Intended Audience :: Science/Research',
+      'Topic :: Scientific/Engineering'
+      ]
+      )
+
+
+
diff --git a/test/test_files/areas.cfg b/test/test_files/areas.cfg
new file mode 100644
index 0000000..3c6ef3c
--- /dev/null
+++ b/test/test_files/areas.cfg
@@ -0,0 +1,35 @@
+REGION: ease_sh {
+        NAME:           Antarctic EASE grid
+        PCS_ID:         ease_sh
+        PCS_DEF:        proj=laea, lat_0=-90, lon_0=0, a=6371228.0, units=m
+        XSIZE:          425
+        YSIZE:          425
+        AREA_EXTENT:    (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+};
+
+REGION: ease_nh {
+        NAME:           Arctic EASE grid
+        PCS_ID:         ease_nh
+        PCS_DEF:        proj=laea, lat_0=90, lon_0=0, a=6371228.0, units=m
+        XSIZE:          425
+        YSIZE:          425
+        AREA_EXTENT:    (-5326849.0625,-5326849.0625,5326849.0625,5326849.0625)
+};
+
+REGION: pc_world {
+  NAME:    Plate Carree world map
+  PCS_ID:  pc_world
+  PCS_DEF: proj=eqc
+  XSIZE: 640
+  YSIZE: 480
+  AREA_EXTENT:  (-20037508.342789244, -10018754.171394622, 20037508.342789244, 10018754.171394622)
+};
+
+REGION: ortho {
+  NAME:    Ortho globe
+  PCS_ID:  ortho_globe
+  PCS_DEF: proj=ortho, a=6370997.0, lon_0=40, lat_0=-40
+  XSIZE: 640
+  YSIZE: 480
+  AREA_EXTENT:  (-10000000, -10000000, 10000000, 10000000)
+};
diff --git a/test/test_files/mask_grid.dat b/test/test_files/mask_grid.dat
new file mode 100644
index 0000000..d390af5
--- /dev/null
+++ b/test/test_files/mask_grid.dat
@@ -0,0 +1 @@
+0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0  [...]
\ No newline at end of file
diff --git a/test/test_files/mask_test_data.dat b/test/test_files/mask_test_data.dat
new file mode 100644
index 0000000..1b6f0f7
--- /dev/null
+++ b/test/test_files/mask_test_data.dat
@@ -0,0 +1 @@
+0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0  [...]
\ No newline at end of file
diff --git a/test/test_files/mask_test_fill_value.dat b/test/test_files/mask_test_fill_value.dat
new file mode 100644
index 0000000..5f347c4
--- /dev/null
+++ b/test/test_files/mask_test_fill_value.dat
@@ -0,0 +1 @@
+1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0  [...]
\ No newline at end of file
diff --git a/test/test_files/mask_test_full_fill.dat b/test/test_files/mask_test_full_fill.dat
new file mode 100644
index 0000000..91d6c53
--- /dev/null
+++ b/test/test_files/mask_test_full_fill.dat
@@ -0,0 +1 @@
+1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0  [...]
\ No newline at end of file
diff --git a/test/test_files/mask_test_full_fill_multi.dat b/test/test_files/mask_test_full_fill_multi.dat
new file mode 100644
index 0000000..2ff7d90
--- /dev/null
+++ b/test/test_files/mask_test_full_fill_multi.dat
@@ -0,0 +1 @@
+1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0  [...]
\ No newline at end of file
diff --git a/test/test_files/mask_test_mask.dat b/test/test_files/mask_test_mask.dat
new file mode 100644
index 0000000..bf40e6e
--- /dev/null
+++ b/test/test_files/mask_test_mask.dat
@@ -0,0 +1 @@
+0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0  [...]
\ No newline at end of file
diff --git a/test/test_files/mask_test_nearest_data.dat b/test/test_files/mask_test_nearest_data.dat
new file mode 100644
index 0000000..7a8368c
--- /dev/null
+++ b/test/test_files/mask_test_nearest_data.dat
@@ -0,0 +1 @@
+0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0  [...]
\ No newline at end of file
diff --git a/test/test_files/mask_test_nearest_mask.dat b/test/test_files/mask_test_nearest_mask.dat
new file mode 100644
index 0000000..1e89e38
--- /dev/null
+++ b/test/test_files/mask_test_nearest_mask.dat
@@ -0,0 +1 @@
+0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0  [...]
\ No newline at end of file
diff --git a/test/test_files/quick_mask_test.dat b/test/test_files/quick_mask_test.dat
new file mode 100644
index 0000000..7d5952c
--- /dev/null
+++ b/test/test_files/quick_mask_test.dat
@@ -0,0 +1 @@
+1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1  [...]
\ No newline at end of file
diff --git a/test/test_files/ssmis_swath.npz b/test/test_files/ssmis_swath.npz
new file mode 100644
index 0000000..cb62620
Binary files /dev/null and b/test/test_files/ssmis_swath.npz differ
diff --git a/test/test_geometry.py b/test/test_geometry.py
new file mode 100644
index 0000000..e806a32
--- /dev/null
+++ b/test/test_geometry.py
@@ -0,0 +1,513 @@
+from __future__ import with_statement
+
+import unittest
+
+import numpy as np
+
+from pyresample import geometry, geo_filter
+
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+class Test(unittest.TestCase):
+    """Unit testing the geometry and geo_filter modules"""
+    def assert_raises(self, exception, call_able, *args):
+        """assertRaises() has changed from py2.6 to 2.7! Here is an attempt to
+        cover both"""
+        import sys
+        if sys.version_info < (2, 7):
+            self.assertRaises(exception, call_able, *args)
+        else:
+            with self.assertRaises(exception):
+                call_able(*args)
+
+           
+    def test_lonlat_precomp(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+        lons, lats = area_def.get_lonlats()
+        area_def2 = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001],
+                                     lons=lons, lats=lats)
+        lon, lat = area_def.get_lonlat(400, 400)
+        self.assertAlmostEqual(lon, 5.5028467120975835, 
+                                   msg='lon retrieval from precomputated grid failed')
+        self.assertAlmostEqual(lat, 52.566998432390619, 
+                                   msg='lat retrieval from precomputated grid failed')
+        
+    @tmp
+    def test_cartesian(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+        cart_coords = area_def.get_cartesian_coords()
+        exp = 5872039989466.8457031
+        self.assertTrue((cart_coords.sum() - exp) < 1e-7 * exp, 
+                        msg='Calculation of cartesian coordinates failed')   
+    
+    def test_swath(self):
+        lons1 = np.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
+        lats1 = np.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        
+        swath_def = geometry.SwathDefinition(lons1, lats1)
+        
+        lons2, lats2 = swath_def.get_lonlats()
+        
+        self.failIf(id(lons1) != id(lons2) or id(lats1) != id(lats2), 
+                    msg='Caching of swath coordinates failed')
+               
+    def test_area_equal(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+        area_def2 = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+        self.failIf(area_def != area_def2, 'area_defs are not equal as expected')
+         
+    def test_not_area_equal(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+       
+        msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees', 
+                                   'msg_full',
+                                   {'a': '6378169.0',
+                                    'b': '6356584.0',
+                                    'h': '35785831.0',
+                                    'lon_0': '0',
+                                    'proj': 'geos'},
+                                    3712,
+                                    3712,
+                                    [-5568742.4000000004,
+                                    -5568742.4000000004,
+                                    5568742.4000000004,
+                                    5568742.4000000004]
+                                    )
+        self.failIf(area_def == msg_area, 'area_defs are not expected to be equal')
+       
+    def test_swath_equal(self):
+        lons = np.array([1.2, 1.3, 1.4, 1.5])
+        lats = np.array([65.9, 65.86, 65.82, 65.78])
+        swath_def = geometry.SwathDefinition(lons, lats)
+        swath_def2 = geometry.SwathDefinition(lons, lats)
+        self.failIf(swath_def != swath_def2, 'swath_defs are not equal as expected')
+        
+    def test_swath_not_equal(self):
+        lats1 = np.array([65.9, 65.86, 65.82, 65.78])
+        lons = np.array([1.2, 1.3, 1.4, 1.5])
+        lats2 = np.array([65.91, 65.85, 65.80, 65.75])
+        swath_def = geometry.SwathDefinition(lons, lats1)
+        swath_def2 = geometry.SwathDefinition(lons, lats2)
+        self.failIf(swath_def == swath_def2, 'swath_defs are not expected to be equal')
+
+    def test_swath_equal_area(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+        
+        swath_def = geometry.SwathDefinition(*area_def.get_lonlats())
+
+        self.failIf(swath_def != area_def, "swath_def and area_def should be equal")
+
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+
+        self.failIf(area_def != swath_def, "swath_def and area_def should be equal")
+
+    def test_swath_not_equal_area(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+        
+        lons = np.array([1.2, 1.3, 1.4, 1.5])
+        lats = np.array([65.9, 65.86, 65.82, 65.78])
+        swath_def = geometry.SwathDefinition(lons, lats)
+
+        self.failIf(swath_def == area_def, "swath_def and area_def should be different")
+
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+
+        self.failIf(area_def == swath_def, "swath_def and area_def should be different")
+        
+    def test_concat_1d(self):
+        lons1 = np.array([1, 2, 3])
+        lats1 = np.array([1, 2, 3])
+        lons2 = np.array([4, 5, 6])
+        lats2 = np.array([4, 5, 6])
+        swath_def1 = geometry.SwathDefinition(lons1, lats1)
+        swath_def2 = geometry.SwathDefinition(lons2, lats2)
+        swath_def_concat = swath_def1.concatenate(swath_def2) 
+        expected = np.array([1, 2, 3, 4, 5, 6])
+        self.assertTrue(np.array_equal(swath_def_concat.lons, expected) and 
+                        np.array_equal(swath_def_concat.lons, expected), 
+                        'Failed to concatenate 1D swaths')
+
+    def test_concat_2d(self):
+        lons1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
+        lats1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
+        lons2 = np.array([[4, 5, 6], [6, 7, 8]])
+        lats2 = np.array([[4, 5, 6], [6, 7, 8]])
+        swath_def1 = geometry.SwathDefinition(lons1, lats1)
+        swath_def2 = geometry.SwathDefinition(lons2, lats2)
+        swath_def_concat = swath_def1.concatenate(swath_def2) 
+        expected = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7], [4, 5, 6], [6, 7, 8]])
+        self.assertTrue(np.array_equal(swath_def_concat.lons, expected) and 
+                        np.array_equal(swath_def_concat.lons, expected), 
+                        'Failed to concatenate 2D swaths')
+        
+    def test_append_1d(self):
+        lons1 = np.array([1, 2, 3])
+        lats1 = np.array([1, 2, 3])
+        lons2 = np.array([4, 5, 6])
+        lats2 = np.array([4, 5, 6])
+        swath_def1 = geometry.SwathDefinition(lons1, lats1)
+        swath_def2 = geometry.SwathDefinition(lons2, lats2)
+        swath_def1.append(swath_def2) 
+        expected = np.array([1, 2, 3, 4, 5, 6])
+        self.assertTrue(np.array_equal(swath_def1.lons, expected) and 
+                        np.array_equal(swath_def1.lons, expected), 
+                        'Failed to append 1D swaths')
+
+    def test_append_2d(self):
+        lons1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
+        lats1 = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7]])
+        lons2 = np.array([[4, 5, 6], [6, 7, 8]])
+        lats2 = np.array([[4, 5, 6], [6, 7, 8]])
+        swath_def1 = geometry.SwathDefinition(lons1, lats1)
+        swath_def2 = geometry.SwathDefinition(lons2, lats2)
+        swath_def1.append(swath_def2) 
+        expected = np.array([[1, 2, 3], [3, 4, 5], [5, 6, 7], [4, 5, 6], [6, 7, 8]])
+        self.assertTrue(np.array_equal(swath_def1.lons, expected) and 
+                        np.array_equal(swath_def1.lons, expected), 
+                        'Failed to append 2D swaths')
+
+    def test_grid_filter_valid(self):
+        lons = np.array([-170, -30, 30, 170])
+        lats = np.array([20, -40, 50, -80])
+        swath_def = geometry.SwathDefinition(lons, lats)
+        filter_area = geometry.AreaDefinition('test', 'test', 'test', 
+                                              {'proj' : 'eqc', 'lon_0' : 0.0, 'lat_0' : 0.0},
+                                              8, 8,
+                                              (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+        filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           ])
+        grid_filter = geo_filter.GridFilter(filter_area, filter)
+        valid_index = grid_filter.get_valid_index(swath_def)        
+        expected = np.array([1, 0, 0, 1])
+        self.assertTrue(np.array_equal(valid_index, expected), 'Failed to find grid filter')
+    
+    def test_grid_filter(self):
+        lons = np.array([-170, -30, 30, 170])
+        lats = np.array([20, -40, 50, -80])
+        swath_def = geometry.SwathDefinition(lons, lats)
+        data = np.array([1, 2, 3, 4])
+        filter_area = geometry.AreaDefinition('test', 'test', 'test', 
+                                              {'proj' : 'eqc', 'lon_0' : 0.0, 'lat_0' : 0.0},
+                                              8, 8,                                               
+                                              (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+        filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           ])
+        grid_filter = geo_filter.GridFilter(filter_area, filter)
+        swath_def_f, data_f = grid_filter.filter(swath_def, data)
+        expected = np.array([1, 4])
+        self.assertTrue(np.array_equal(data_f, expected), 'Failed grid filtering data')
+        expected_lons = np.array([-170, 170])
+        expected_lats = np.array([20, -80])
+        self.assertTrue(np.array_equal(swath_def_f.lons[:], expected_lons) 
+                        and np.array_equal(swath_def_f.lats[:], expected_lats), 
+                        'Failed finding grid filtering lon lats')
+        
+    def test_grid_filter2D(self):
+        lons = np.array([[-170, -30, 30, 170],
+                         [-170, -30, 30, 170]])
+        lats = np.array([[20, -40, 50, -80],
+                         [25, -35, 55, -75]])
+        swath_def = geometry.SwathDefinition(lons, lats)
+        data1 = np.ones((2, 4))
+        data2 = np.ones((2, 4)) * 2
+        data3 = np.ones((2, 4)) * 3
+        data = np.dstack((data1, data2, data3))
+        filter_area = geometry.AreaDefinition('test', 'test', 'test', 
+                                              {'proj' : 'eqc', 'lon_0' : 0.0, 'lat_0' : 0.0},
+                                              8, 8,                                               
+                                              (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+        filter = np.array([[1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [1, 1, 1, 1, 0, 0, 0, 0],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           [0, 0, 0, 0, 1, 1, 1, 1],
+                           ])
+        grid_filter = geo_filter.GridFilter(filter_area, filter, nprocs=2)
+        swath_def_f, data_f = grid_filter.filter(swath_def, data)
+        expected = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3], [1, 2, 3]])        
+        self.assertTrue(np.array_equal(data_f, expected), 'Failed 2D grid filtering data')
+        expected_lons = np.array([-170, 170, -170, 170])
+        expected_lats = np.array([20, -80, 25, -75])
+        self.assertTrue(np.array_equal(swath_def_f.lons[:], expected_lons) 
+                        and np.array_equal(swath_def_f.lats[:], expected_lats), 
+                        'Failed finding 2D grid filtering lon lats')
+    
+    def test_boundary(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    10,
+                                    10,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+        proj_x_boundary, proj_y_boundary = area_def.proj_x_coords, area_def.proj_y_coords
+        expected_x = np.array([-1250912.72, -1010912.72, -770912.72, 
+                             -530912.72, -290912.72, -50912.72, 189087.28, 
+                             429087.28, 669087.28, 909087.28])
+        expected_y = np.array([1370031.36, 1130031.36, 890031.36, 650031.36, 
+                               410031.36, 170031.36, -69968.64, -309968.64,  
+                               -549968.64, -789968.64])
+        self.assertTrue(np.allclose(proj_x_boundary, expected_x), 
+                        'Failed to find projection x coords')
+        self.assertTrue(np.allclose(proj_y_boundary, expected_y), 
+                        'Failed to find projection y coords')
+
+   
+    def test_area_extent_ll(self):
+        area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    10,
+                                    10,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+        self.assertAlmostEqual(sum(area_def.area_extent_ll), 
+                                   122.06448093539757, 5, 
+                                   'Failed to get lon and lats of area extent')
+    
+    @tmp                               
+    def test_latlong_area(self):
+        area_def = geometry.AreaDefinition('', '', '', 
+                                   {'proj': 'latlong'}, 
+                                    360, 180,
+                                    [-180, -90, 180, 90])
+        lons, lats = area_def.get_lonlats()
+        self.assertEqual(lons[0, 0], -179.5)
+        self.assertEqual(lats[0, 0], 89.5)
+        
+
+    def test_get_xy_from_lonlat(self):
+        """Test the function get_xy_from_lonlat"""
+        from pyresample import utils
+        area_id = 'test'
+        area_name = 'Test area with 2x2 pixels'
+        proj_id = 'test'
+        x_size = 2
+        y_size = 2
+        area_extent = [1000000, 0, 1050000, 50000] 
+        proj_dict = {"proj": 'laea', 
+                     'lat_0': '60', 
+                     'lon_0': '0', 
+                     'a': '6371228.0', 'units': 'm'}
+        area_def = utils.get_area_def(area_id, 
+                                      area_name, 
+                                      proj_id, 
+                                      proj_dict, 
+                                      x_size, y_size, 
+                                      area_extent)
+        import pyproj
+        p__ = pyproj.Proj(proj_dict)
+        lon_ul, lat_ul = p__(1000000, 50000, inverse=True)
+        lon_ur, lat_ur = p__(1050000, 50000, inverse=True)
+        lon_ll, lat_ll = p__(1000000, 0, inverse=True)
+        lon_lr, lat_lr = p__(1050000, 0, inverse=True)
+        
+        eps_lonlat = 0.01
+        eps_meters = 100
+        x__, y__ = area_def.get_xy_from_lonlat(lon_ul + eps_lonlat, 
+                                               lat_ul - eps_lonlat)
+        x_expect, y_expect = 0, 0
+        self.assertEqual(x__, x_expect)
+        self.assertEqual(y__, y_expect)
+        x__, y__ = area_def.get_xy_from_lonlat(lon_ur - eps_lonlat, 
+                                               lat_ur - eps_lonlat)
+        self.assertEqual(x__, 1)
+        self.assertEqual(y__, 0)
+        x__, y__ = area_def.get_xy_from_lonlat(lon_ll + eps_lonlat, 
+                                               lat_ll + eps_lonlat)
+        self.assertEqual(x__, 0)
+        self.assertEqual(y__, 1)
+        x__, y__ = area_def.get_xy_from_lonlat(lon_lr - eps_lonlat, 
+                                               lat_lr + eps_lonlat)
+        self.assertEqual(x__, 1)
+        self.assertEqual(y__, 1)
+
+        lon, lat = p__(1025000 - eps_meters, 25000 - eps_meters, inverse=True)
+        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
+        self.assertEqual(x__, 0)
+        self.assertEqual(y__, 1)
+
+        lon, lat = p__(1025000 + eps_meters, 25000 - eps_meters, inverse=True)
+        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
+        self.assertEqual(x__, 1)
+        self.assertEqual(y__, 1)
+
+        lon, lat = p__(1025000 - eps_meters, 25000 + eps_meters, inverse=True)
+        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
+        self.assertEqual(x__, 0)
+        self.assertEqual(y__, 0)
+
+        lon, lat = p__(1025000 + eps_meters, 25000 + eps_meters, inverse=True)
+        x__, y__ = area_def.get_xy_from_lonlat(lon, lat)
+        self.assertEqual(x__, 1)
+        self.assertEqual(y__, 0)
+
+        lon, lat = p__(999000, -10, inverse=True)
+        self.assert_raises(ValueError, area_def.get_xy_from_lonlat, lon, lat)
+        self.assert_raises(ValueError, area_def.get_xy_from_lonlat, 0., 0.)
+
+        # Test getting arrays back:
+        lons = [lon_ll + eps_lonlat, lon_ur - eps_lonlat]
+        lats = [lat_ll + eps_lonlat, lat_ur - eps_lonlat]
+        x__, y__ = area_def.get_xy_from_lonlat(lons, lats)
+
+        x_expects = np.array([0, 1])
+        y_expects = np.array([1, 0])
+        self.assertTrue((x__.data == x_expects).all())
+        self.assertTrue((y__.data == y_expects).all())
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/test_grid.py b/test/test_grid.py
new file mode 100644
index 0000000..8238b99
--- /dev/null
+++ b/test/test_grid.py
@@ -0,0 +1,177 @@
+import copy
+import unittest
+
+import numpy as np
+
+from pyresample import grid, geometry, utils
+
+
+def mp(f):
+    f.mp = True
+    return f
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+class Test(unittest.TestCase):
+    
+    area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+    
+    area_def2 = geometry.AreaDefinition('areaD2', 'Europe (3km, HRV, VTC)', 'areaD2', 
+                                    {'a': '6378144.0',
+                                     'b': '6356759.0',
+                                     'lat_0': '50.00',
+                                     'lat_ts': '50.00',
+                                     'lon_0': '8.00',
+                                     'proj': 'stere'}, 
+                                     5,
+                                     5,
+                                     [-1370912.72,
+                                      -909968.64000000001,
+                                      1029087.28,
+                                      1490031.3600000001])
+        
+    msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees', 
+                                   'msg_full',
+                                   {'a': '6378169.0',
+                                    'b': '6356584.0',
+                                    'h': '35785831.0',
+                                    'lon_0': '0',
+                                    'proj': 'geos'},
+                                    3712,
+                                    3712,
+                                    [-5568742.4000000004,
+                                    -5568742.4000000004,
+                                    5568742.4000000004,
+                                    5568742.4000000004]
+                                    )
+
+    def test_linesample(self):
+        data = np.fromfunction(lambda y, x: y*x, (40, 40))
+        rows = np.array([[1, 2], [3, 4]])
+        cols = np.array([[25, 26], [27, 28]])
+        res = grid.get_image_from_linesample(rows, cols, data)
+        expected = np.array([[25., 52.], [81., 112.]])
+        self.assertTrue(np.array_equal(res, expected), 'Linesample failed')
+        
+    def test_linesample_multi(self):
+        data1 = np.fromfunction(lambda y, x: y*x, (40, 40))
+        data2 = np.fromfunction(lambda y, x: 2*y*x, (40, 40))
+        data3 = np.fromfunction(lambda y, x: 3*y*x, (40, 40))
+        data = np.zeros((40, 40, 3))
+        data[:, :, 0] = data1
+        data[:, :, 1] = data2
+        data[:, :, 2] = data3
+        rows = np.array([[1, 2], [3, 4]])
+        cols = np.array([[25, 26], [27, 28]])
+        res = grid.get_image_from_linesample(rows, cols, data)
+        expected = np.array([[[25., 50., 75.],
+                                 [52., 104., 156.]],
+                               [[81., 162., 243.],
+                                [ 112.,  224.,  336.]]])
+        self.assertTrue(np.array_equal(res, expected), 'Linesample failed')
+        
+    def test_from_latlon(self):
+        data = np.fromfunction(lambda y, x: y*x, (800, 800))
+        lons = np.fromfunction(lambda y, x: x, (10, 10))
+        lats = np.fromfunction(lambda y, x: 50 - (5.0/10)*y, (10, 10))
+        #source_def = grid.AreaDefinition.get_from_area_def(self.area_def)
+        source_def = self.area_def
+        res = grid.get_image_from_lonlats(lons, lats, source_def, data)        
+        expected = np.array([[ 129276.,  141032.,  153370.,  165804.,  178334.,  190575.,
+                            202864.,  214768.,  226176.,  238080.],
+                            [ 133056.,  146016.,  158808.,  171696.,  184320.,  196992.,
+                             209712.,  222480.,  234840.,  247715.],
+                            [ 137026.,  150150.,  163370.,  177215.,  190629.,  203756.,
+                             217464.,  230256.,  243048.,  256373.],
+                            [ 140660.,  154496.,  168714.,  182484.,  196542.,  210650.,
+                             224257.,  238464.,  251712.,  265512.],
+                            [ 144480.,  158484.,  173148.,  187912.,  202776.,  217358.,
+                             231990.,  246240.,  259920.,  274170.],
+                            [ 147968.,  163261.,  178398.,  193635.,  208616.,  223647.,
+                             238728.,  253859.,  268584.,  283898.],
+                            [ 151638.,  167121.,  182704.,  198990.,  214775.,  230280.,
+                             246442.,  261617.,  276792.,  292574.],
+                            [ 154980.,  171186.,  187860.,  204016.,  220542.,  237120.,
+                             253125.,  269806.,  285456.,  301732.],
+                            [ 158500.,  175536.,  192038.,  209280.,  226626.,  243697.,
+                             260820.,  277564.,  293664.,  310408.],
+                            [ 161696.,  179470.,  197100.,  214834.,  232320.,  250236.,
+                             267448.,  285090.,  302328.,  320229.]])
+        self.assertTrue(np.array_equal(res, expected), 'Sampling from lat lon failed')
+        
+    def test_proj_coords(self):
+        #res = grid.get_proj_coords(self.area_def2)
+        res = self.area_def2.get_proj_coords()
+        cross_sum = res[0].sum() + res[1].sum() 
+        expected = 2977965.9999999963
+        self.assertAlmostEqual(cross_sum, expected, msg='Calculation of proj coords failed')
+        
+    def test_latlons(self):
+        #res = grid.get_lonlats(self.area_def2)
+        res = self.area_def2.get_lonlats()
+        cross_sum = res[0].sum() + res[1].sum() 
+        expected = 1440.8280578215431
+        self.assertAlmostEqual(cross_sum, expected, msg='Calculation of lat lons failed')
+        
+    @mp
+    def test_latlons_mp(self):
+        #res = grid.get_lonlats(self.area_def2, nprocs=2)
+        res = self.area_def2.get_lonlats(nprocs=2)
+        cross_sum = res[0].sum() + res[1].sum() 
+        expected = 1440.8280578215431
+        self.assertAlmostEqual(cross_sum, expected, msg='Calculation of lat lons failed')
+        
+    def test_resampled_image(self):
+        data = np.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
+        target_def = self.area_def
+        source_def = self.msg_area
+        res = grid.get_resampled_image(target_def, source_def, data, segments=1)
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(cross_sum, expected, msg='Resampling of image failed')
+
+    @tmp
+    def test_generate_linesample(self):
+        data = np.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
+        row_indices, col_indices = utils.generate_quick_linesample_arrays(self.msg_area,
+                                                                    self.area_def)
+        res = data[row_indices, col_indices]
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(cross_sum, expected, msg='Generate linesample failed')
+        self.failIf(row_indices.dtype != np.uint16 or col_indices.dtype != np.uint16, 
+                    'Generate linesample failed. Downcast to uint16 expected')
+    
+    @mp
+    def test_resampled_image_mp(self):
+        data = np.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
+        target_def = self.area_def
+        source_def = self.msg_area
+        res = grid.get_resampled_image(target_def, source_def, data, nprocs=2, segments=1)
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(cross_sum, expected, msg='Resampling of image mp failed')
+        
+    def test_single_lonlat(self):
+        lon, lat = self.area_def.get_lonlat(400, 400)
+        self.assertAlmostEqual(lon, 5.5028467120975835, msg='Resampling of single lon failed')
+        self.assertAlmostEqual(lat, 52.566998432390619, msg='Resampling of single lat failed')
+        
+    def test_proj4_string(self):
+        proj4_string = self.area_def.proj4_string
+        self.assertEqual(proj4_string, '+a=6378144.0 +b=6356759.0 +lat_ts=50.00 +lon_0=8.00 +proj=stere +lat_0=50.00')
+    
diff --git a/test/test_image.py b/test/test_image.py
new file mode 100644
index 0000000..a8471a1
--- /dev/null
+++ b/test/test_image.py
@@ -0,0 +1,202 @@
+import os
+import unittest
+
+import numpy
+
+from pyresample import image, geometry, grid, utils
+
+def mask(f):
+    f.mask = True
+    return f
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+
+class Test(unittest.TestCase):
+
+    area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+
+    msg_area = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees', 
+                                   'msg_full',
+                                   {'a': '6378169.0',
+                                    'b': '6356584.0',
+                                    'h': '35785831.0',
+                                    'lon_0': '0',
+                                    'proj': 'geos'},
+                                    3712,
+                                    3712,
+                                    [-5568742.4000000004,
+                                    -5568742.4000000004,
+                                    5568742.4000000004,
+                                    5568742.4000000004]
+                                    )
+    
+    msg_area_resize = geometry.AreaDefinition('msg_full', 'Full globe MSG image 0 degrees', 
+                                   'msg_full',
+                                   {'a': '6378169.0',
+                                    'b': '6356584.0',
+                                    'h': '35785831.0',
+                                    'lon_0': '0',
+                                    'proj': 'geos'},
+                                    928,
+                                    928,
+                                    [-5568742.4000000004,
+                                    -5568742.4000000004,
+                                    5568742.4000000004,
+                                    5568742.4000000004]
+                                    )
+
+    @tmp
+    def test_image(self):
+        data = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
+        msg_con = image.ImageContainerQuick(data, self.msg_area, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(cross_sum, expected, msg='ImageContainer resampling quick failed')
+    
+    @tmp
+    def test_image_segments(self):
+        data = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
+        msg_con = image.ImageContainerQuick(data, self.msg_area, segments=8)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 399936.39392500359
+        self.assertAlmostEqual(cross_sum, expected, msg='ImageContainer resampling quick segments failed')
+        
+    def test_return_type(self):
+        data = numpy.ones((3712, 3712)).astype('int')
+        msg_con = image.ImageContainerQuick(data, self.msg_area, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        self.assertTrue(data.dtype is res.dtype, msg='Failed to maintain input data type')
+    
+    @mask
+    def test_masked_image(self):
+        data = numpy.zeros((3712, 3712))
+        mask = numpy.zeros((3712, 3712))
+        mask[:, 1865:] = 1
+        data_masked = numpy.ma.array(data, mask=mask)
+        msg_con = image.ImageContainerQuick(data_masked, self.msg_area, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        resampled_mask = res.mask.astype('int')
+        expected = numpy.fromfile(os.path.join(os.path.dirname(__file__), 'test_files', 'mask_grid.dat'), 
+                                  sep=' ').reshape((800, 800))
+        self.assertTrue(numpy.array_equal(resampled_mask, expected), msg='Failed to resample masked array')
+
+    @mask
+    def test_masked_image_fill(self):
+        data = numpy.zeros((3712, 3712))
+        mask = numpy.zeros((3712, 3712))
+        mask[:, 1865:] = 1
+        data_masked = numpy.ma.array(data, mask=mask)
+        msg_con = image.ImageContainerQuick(data_masked, self.msg_area, 
+                                            fill_value=None, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        resampled_mask = res.mask.astype('int')
+        expected = numpy.fromfile(os.path.join(os.path.dirname(__file__), 'test_files', 'mask_grid.dat'), 
+                                  sep=' ').reshape((800, 800))
+        self.assertTrue(numpy.array_equal(resampled_mask, expected), msg='Failed to resample masked array')
+        
+    def test_nearest_neighbour(self):        
+        data = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
+        msg_con = image.ImageContainerNearest(data, self.msg_area, 50000, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 399936.783062
+        self.assertAlmostEqual(cross_sum, expected, 
+                                   msg='ImageContainer resampling nearest neighbour failed')
+    
+    def test_nearest_resize(self):        
+        data = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
+        msg_con = image.ImageContainerNearest(data, self.msg_area, 50000, segments=1)
+        area_con = msg_con.resample(self.msg_area_resize)
+        res = area_con.image_data
+        cross_sum = res.sum()
+        expected = 2212023.0175830
+        self.assertAlmostEqual(cross_sum, expected, 
+                                   msg='ImageContainer resampling nearest neighbour failed')
+        
+    def test_nearest_neighbour_multi(self):        
+        data1 = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
+        data2 = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712)) * 2
+        data = numpy.dstack((data1, data2))
+        msg_con = image.ImageContainerNearest(data, self.msg_area, 50000, segments=1)
+        area_con = msg_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum1 = res[:, :, 0].sum()
+        expected1 = 399936.783062
+        self.assertAlmostEqual(cross_sum1, expected1, 
+                                   msg='ImageContainer resampling nearest neighbour multi failed')        
+        cross_sum2 = res[:, :, 1].sum()
+        expected2 = 399936.783062 * 2
+        self.assertAlmostEqual(cross_sum2, expected2, 
+                                   msg='ImageContainer resampling nearest neighbour multi failed')
+        
+    def test_nearest_neighbour_multi_preproc(self):
+        data1 = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712))
+        data2 = numpy.fromfunction(lambda y, x: y*x*10**-6, (3712, 3712)) * 2
+        data = numpy.dstack((data1, data2))
+        msg_con = image.ImageContainer(data, self.msg_area)
+        #area_con = msg_con.resample_area_nearest_neighbour(self.area_def, 50000)
+        row_indices, col_indices = \
+            utils.generate_nearest_neighbour_linesample_arrays(self.msg_area, 
+                                                               self.area_def, 
+                                                               50000)
+        res = msg_con.get_array_from_linesample(row_indices, col_indices)
+        cross_sum1 = res[:, :, 0].sum()
+        expected1 = 399936.783062
+        self.assertAlmostEqual(cross_sum1, expected1, 
+                                   msg='ImageContainer resampling nearest neighbour multi preproc failed')        
+        cross_sum2 = res[:, :, 1].sum()
+        expected2 = 399936.783062 * 2
+        self.assertAlmostEqual(cross_sum2, expected2, 
+                                   msg='ImageContainer resampling nearest neighbour multi preproc failed')
+            
+    def test_nearest_swath(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        swath_con = image.ImageContainerNearest(data, swath_def, 50000, segments=1)
+        area_con = swath_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()        
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,\
+                             msg='ImageContainer swath resampling nearest failed')
+
+    def test_nearest_swath_segments(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))
+        data = numpy.dstack(3 * (data,))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        swath_con = image.ImageContainerNearest(data, swath_def, 50000, segments=2)
+        area_con = swath_con.resample(self.area_def)
+        res = area_con.image_data
+        cross_sum = res.sum()        
+        expected = 3 * 15874591.0
+        self.assertEqual(cross_sum, expected,\
+                             msg='ImageContainer swath segments resampling nearest failed')
+
+
diff --git a/test/test_kd_tree.py b/test/test_kd_tree.py
new file mode 100644
index 0000000..247b5ed
--- /dev/null
+++ b/test/test_kd_tree.py
@@ -0,0 +1,736 @@
+from __future__ import with_statement
+
+import os
+import sys
+import unittest
+import warnings
+if sys.version_info < (2, 6):
+    warnings.simplefilter("ignore")
+else:    
+    warnings.simplefilter("always")
+import numpy
+
+from pyresample import kd_tree, utils, geometry, grid, data_reduce
+
+
+def mp(f):
+    f.mp = True
+    return f
+
+def quick(f):
+    f.quick = True
+    return f
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+class Test(unittest.TestCase):
+
+    area_def = geometry.AreaDefinition('areaD', 'Europe (3km, HRV, VTC)', 'areaD', 
+                                   {'a': '6378144.0',
+                                    'b': '6356759.0',
+                                    'lat_0': '50.00',
+                                    'lat_ts': '50.00',
+                                    'lon_0': '8.00',
+                                    'proj': 'stere'}, 
+                                    800,
+                                    800,
+                                    [-1370912.72,
+                                     -909968.64000000001,
+                                     1029087.28,
+                                     1490031.3600000001])
+
+    tdata = numpy.array([1, 2, 3])
+    tlons = numpy.array([11.280789, 12.649354, 12.080402])
+    tlats = numpy.array([56.011037, 55.629675, 55.641535])
+    tswath = geometry.SwathDefinition(lons=tlons, lats=tlats)
+    #grid = numpy.ones((1, 1, 2))
+    #grid[0, 0, 0] = 12.562036
+    #grid[0, 0, 1] = 55.715613
+    tgrid = geometry.CoordinateDefinition(lons=numpy.array([12.562036]), 
+                                          lats=numpy.array([55.715613])) 
+               
+    def test_nearest_base(self):     
+        res = kd_tree.resample_nearest(self.tswath,\
+                                     self.tdata.ravel(), self.tgrid,\
+                                     100000, reduce_data=False, segments=1)
+        self.assertTrue(res[0] == 2, 'Failed to calculate nearest neighbour')
+    
+    def test_gauss_base(self):
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_gauss(self.tswath, \
+                                             self.tdata.ravel(), self.tgrid,\
+                                             50000, 25000, reduce_data=False, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(self.tswath, \
+                                             self.tdata.ravel(), self.tgrid,\
+                                             50000, 25000, reduce_data=False, segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour warning')
+                self.failIf(('Searching' not in str(w[0].message)), 'Failed to create correct neighbour warning')    
+        self.assertAlmostEqual(res[0], 2.2020729, 5, \
+                                   'Failed to calculate gaussian weighting')
+        
+    def test_custom_base(self):
+        def wf(dist):
+            return 1 - dist/100000.0
+        
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_custom(self.tswath,\
+                                         self.tdata.ravel(), self.tgrid,\
+                                         50000, wf, reduce_data=False, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:     
+                res = kd_tree.resample_custom(self.tswath,\
+                                             self.tdata.ravel(), self.tgrid,\
+                                             50000, wf, reduce_data=False, segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour warning')
+                self.failIf(('Searching' not in str(w[0].message)), 'Failed to create correct neighbour warning')        
+        self.assertAlmostEqual(res[0], 2.4356757, 5,\
+                                   'Failed to calculate custom weighting')
+    def test_nearest(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
+                                     self.area_def, 50000, segments=1)        
+        cross_sum = res.sum()        
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,\
+                             msg='Swath resampling nearest failed')
+       
+    def test_nearest_1d(self):
+        data = numpy.fromfunction(lambda x, y: x * y, (800, 800))        
+        lons = numpy.fromfunction(lambda x: 3 + x / 100. , (500,))
+        lats = numpy.fromfunction(lambda x: 75 - x / 10., (500,))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(self.area_def, data.ravel(),
+                                       swath_def, 50000, segments=1)
+        cross_sum = res.sum()        
+        expected = 35821299.0
+        self.assertEqual(res.shape, (500,),
+                             msg='Swath resampling nearest 1d failed')
+        self.assertEqual(cross_sum, expected,
+                             msg='Swath resampling nearest 1d failed')
+    
+    def test_nearest_empty(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
+                                     self.area_def, 50000, segments=1)        
+        cross_sum = res.sum()        
+        expected = 0
+        self.assertEqual(cross_sum, expected,\
+                             msg='Swath resampling nearest empty failed')
+    
+    def test_nearest_empty_multi(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
+                                         data.ravel()))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data_multi,\
+                                     self.area_def, 50000, segments=1)                
+        self.assertEqual(res.shape, (800, 800, 3),\
+                             msg='Swath resampling nearest empty multi failed')
+    
+    def test_nearest_empty_multi_masked(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
+                                         data.ravel()))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data_multi,\
+                                     self.area_def, 50000, segments=1,
+                                     fill_value=None)                
+        self.assertEqual(res.shape, (800, 800, 3),
+                             msg='Swath resampling nearest empty multi masked failed')
+            
+    def test_nearest_empty_masked(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 165 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
+                                     self.area_def, 50000, segments=1, 
+                                     fill_value=None)        
+        cross_sum = res.mask.sum()        
+        expected = res.size
+        self.assertTrue(cross_sum == expected,
+                        msg='Swath resampling nearest empty masked failed')
+    
+    def test_nearest_segments(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
+                                     self.area_def, 50000, segments=2)        
+        cross_sum = res.sum()        
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,\
+                             msg='Swath resampling nearest segments failed')
+    
+    def test_nearest_remap(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
+                                     self.area_def, 50000, segments=1)
+        remap = kd_tree.resample_nearest(self.area_def, res.ravel(),\
+                                       swath_def, 5000, segments=1)        
+        cross_sum = remap.sum()
+        expected = 22275.0
+        self.assertEqual(cross_sum, expected,\
+                             msg='Grid remapping nearest failed')
+    
+    @mp
+    def test_nearest_mp(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(),\
+                                     self.area_def, 50000, nprocs=2, segments=1)
+        cross_sum = res.sum()
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,\
+                             msg='Swath resampling mp nearest failed')
+       
+    def test_nearest_multi(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
+                                         data.ravel()))
+        res = kd_tree.resample_nearest(swath_def, data_multi,\
+                                     self.area_def, 50000, segments=1)        
+        cross_sum = res.sum()
+        expected = 3 * 15874591.0
+        self.assertEqual(cross_sum, expected,\
+                             msg='Swath multi channel resampling nearest failed')
+     
+    def test_nearest_multi_unraveled(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.dstack((data, data, data))
+        res = kd_tree.resample_nearest(swath_def, data_multi,\
+                                     self.area_def, 50000, segments=1)        
+        cross_sum = res.sum()
+        expected = 3 * 15874591.0
+        self.assertEqual(cross_sum, expected,\
+                             msg='Swath multi channel resampling nearest failed')
+        
+    def test_gauss_sparse(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_gauss(swath_def, data.ravel(),\
+                                     self.area_def, 50000, 25000, fill_value=-1, segments=1)        
+        cross_sum = res.sum()        
+        expected = 15387753.9852
+        self.assertAlmostEqual(cross_sum, expected, places=3,\
+                                   msg='Swath gauss sparse nearest failed')
+            
+    def test_gauss(self):
+        data = numpy.fromfunction(lambda y, x: (y + x)*10**-5, (5000, 100))        
+        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
+        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_gauss(swath_def, data.ravel(),\
+                                         self.area_def, 50000, 25000, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data.ravel(),\
+                                             self.area_def, 50000, 25000, segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')        
+        cross_sum = res.sum()        
+        expected = 4872.81050892
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Swath resampling gauss failed')
+
+    @tmp
+    def test_gauss_fwhm(self):
+        data = numpy.fromfunction(lambda y, x: (y + x)*10**-5, (5000, 100))        
+        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
+        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_gauss(swath_def, data.ravel(),\
+                                         self.area_def, 50000, utils.fwhm2sigma(41627.730557884883), segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data.ravel(),\
+                                             self.area_def, 50000, utils.fwhm2sigma(41627.730557884883), segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')        
+        cross_sum = res.sum()        
+        expected = 4872.81050892
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Swath resampling gauss failed')
+        
+    def test_gauss_multi(self):
+        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
+        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
+        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
+                                         data.ravel()))
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_gauss(swath_def, data_multi,\
+                                         self.area_def, 50000, [25000, 15000, 10000], segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data_multi,\
+                                             self.area_def, 50000, [25000, 15000, 10000], segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning') 
+        cross_sum = res.sum()        
+        expected = 1461.84313918
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Swath multi channel resampling gauss failed')
+    
+    def test_gauss_multi_mp(self):
+        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
+        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
+        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
+                                         data.ravel()))
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_gauss(swath_def, data_multi,\
+                                         self.area_def, 50000, [25000, 15000, 10000],\
+                                         nprocs=2, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data_multi,\
+                                             self.area_def, 50000, [25000, 15000, 10000],\
+                                             nprocs=2, segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning') 
+        cross_sum = res.sum()
+        expected = 1461.84313918
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Swath multi channel resampling gauss failed') 
+       
+    def test_gauss_multi_mp_segments(self):
+        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
+        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
+        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
+                                         data.ravel()))
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_gauss(swath_def, data_multi,\
+                                         self.area_def, 50000, [25000, 15000, 10000],\
+                                         nprocs=2, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data_multi,\
+                                             self.area_def, 50000, [25000, 15000, 10000],\
+                                             nprocs=2, segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 1461.84313918
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Swath multi channel segments resampling gauss failed')
+        
+    def test_gauss_multi_mp_segments_empty(self):
+        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
+        lons = numpy.fromfunction(lambda y, x: 165 + (10.0/100)*x, (5000, 100))
+        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
+                                         data.ravel()))
+        res = kd_tree.resample_gauss(swath_def, data_multi,\
+                                     self.area_def, 50000, [25000, 15000, 10000],\
+                                     nprocs=2, segments=1)
+        cross_sum = res.sum()
+        self.assertTrue(cross_sum == 0,
+                        msg=('Swath multi channel segments empty ' 
+                             'resampling gauss failed')) 
+    
+    def test_custom(self):
+        def wf(dist):
+            return 1 - dist/100000.0
+                    
+        data = numpy.fromfunction(lambda y, x: (y + x)*10**-5, (5000, 100))        
+        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
+        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_custom(swath_def, data.ravel(),\
+                                          self.area_def, 50000, wf, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_custom(swath_def, data.ravel(),\
+                                              self.area_def, 50000, wf, segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 4872.81050729
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Swath custom resampling failed')
+     
+    def test_custom_multi(self):
+        def wf1(dist):
+            return 1 - dist/100000.0
+        
+        def wf2(dist):
+            return 1
+        
+        def wf3(dist):
+            return numpy.cos(dist)**2
+        
+        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
+        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
+        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
+                                         data.ravel()))
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_custom(swath_def, data_multi,\
+                                          self.area_def, 50000, [wf1, wf2, wf3], segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_custom(swath_def, data_multi,\
+                                              self.area_def, 50000, [wf1, wf2, wf3], segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
+        cross_sum = res.sum()
+        expected = 1461.842980746
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Swath multi channel custom resampling failed')
+        
+    def test_reduce(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
+        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
+        grid_lons, grid_lats = self.area_def.get_lonlats()
+        lons, lats, data = data_reduce.swath_from_lonlat_grid(grid_lons, grid_lats, 
+                                                              lons, lats, data, 
+                                                              7000)
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(cross_sum, expected, msg='Reduce data failed')
+    
+    def test_reduce_boundary(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
+        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
+        boundary_lonlats = self.area_def.get_boundary_lonlats()
+        lons, lats, data = data_reduce.swath_from_lonlat_boundaries(boundary_lonlats[0],
+                                                              boundary_lonlats[1], 
+                                                              lons, lats, data, 
+                                                              7000)
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(cross_sum, expected, msg='Reduce data failed')
+        
+    def test_cartesian_reduce(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
+        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
+        #grid = utils.generate_cartesian_grid(self.area_def)
+        grid = self.area_def.get_cartesian_coords()       
+        lons, lats, data = data_reduce.swath_from_cartesian_grid(grid, lons, lats, data, 
+                                                                 7000)
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(cross_sum, expected, msg='Cartesian reduce data failed')
+    
+    def test_area_con_reduce(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
+        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
+        grid_lons, grid_lats = self.area_def.get_lonlats()
+        valid_index = data_reduce.get_valid_index_from_lonlat_grid(grid_lons, grid_lats, 
+                                                                   lons, lats, 7000) 
+        data = data[valid_index]
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(cross_sum, expected, msg='Reduce data failed')
+       
+    def test_area_con_cartesian_reduce(self):
+        data = numpy.fromfunction(lambda y, x: (y + x), (1000, 1000))
+        lons = numpy.fromfunction(lambda y, x: -180 + (360.0/1000)*x, (1000, 1000))
+        lats = numpy.fromfunction(lambda y, x: -90 + (180.0/1000)*y, (1000, 1000))
+        cart_grid = self.area_def.get_cartesian_coords()
+        valid_index = data_reduce.get_valid_index_from_cartesian_grid(cart_grid, 
+                                                                      lons, lats, 7000)
+        data = data[valid_index]
+        cross_sum = data.sum()
+        expected = 20514375.0
+        self.assertAlmostEqual(cross_sum, expected, msg='Cartesian reduce data failed')
+               
+    def test_masked_nearest(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        mask = numpy.ones((50, 10))
+        mask[:, :5] = 0
+        masked_data = numpy.ma.array(data, mask=mask)
+        res = kd_tree.resample_nearest(swath_def, masked_data.ravel(), 
+                                     self.area_def, 50000, segments=1)
+        expected_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
+                                                    'test_files', 
+                                                    'mask_test_nearest_mask.dat'), 
+                                                    sep=' ').reshape((800, 800))
+        expected_data = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
+                                                    'test_files', 
+                                                    'mask_test_nearest_data.dat'), 
+                                                    sep=' ').reshape((800, 800))        
+        self.assertTrue(numpy.array_equal(expected_mask, res.mask), 
+                        msg='Resampling of swath mask failed')
+        self.assertTrue(numpy.array_equal(expected_data, res.data), 
+                        msg='Resampling of swath masked data failed')
+           
+    def test_masked_nearest_1d(self):
+        data = numpy.ones((800, 800))
+        data[:400, :] = 2
+        lons = numpy.fromfunction(lambda x: 3 + x / 100. , (500,))
+        lats = numpy.fromfunction(lambda x: 75 - x / 10., (500,))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        mask = numpy.ones((800, 800))
+        mask[400:, :] = 0
+        masked_data = numpy.ma.array(data, mask=mask)
+        res = kd_tree.resample_nearest(self.area_def, masked_data.ravel(),
+                                       swath_def, 50000, segments=1)
+        self.assertEqual(res.mask.sum(), 108,
+                             msg='Swath resampling masked nearest 1d failed')
+        
+    
+    def test_masked_gauss(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        mask = numpy.ones((50, 10))
+        mask[:, :5] = 0
+        masked_data = numpy.ma.array(data, mask=mask)
+        res = kd_tree.resample_gauss(swath_def, masked_data.ravel(),\
+                                   self.area_def, 50000, 25000, segments=1)
+        expected_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
+                                                    'test_files', 
+                                                    'mask_test_mask.dat'), 
+                                                    sep=' ').reshape((800, 800))
+        expected_data = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
+                                                    'test_files', 
+                                                    'mask_test_data.dat'), 
+                                                    sep=' ').reshape((800, 800))
+        expected = expected_data.sum()
+        cross_sum = res.data.sum()
+        
+        self.assertTrue(numpy.array_equal(expected_mask, res.mask), 
+                        msg='Gauss resampling of swath mask failed')
+        self.assertAlmostEqual(cross_sum, expected, places=3,\
+                                   msg='Gauss resampling of swath masked data failed')
+        
+     
+    def test_masked_fill_float(self):
+        data = numpy.ones((50, 10))
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(), 
+                                     self.area_def, 50000, fill_value=None, segments=1)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
+                                                         'test_files', 
+                                                         'mask_test_fill_value.dat'), 
+                                                         sep=' ').reshape((800, 800))
+        fill_mask = res.mask
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
+                         msg='Failed to create fill mask on float data')
+        
+    def test_masked_fill_int(self):
+        data = numpy.ones((50, 10)).astype('int')
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, data.ravel(), 
+                                     self.area_def, 50000, fill_value=None, segments=1)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
+                                                         'test_files', 
+                                                         'mask_test_fill_value.dat'), 
+                                                         sep=' ').reshape((800, 800))
+        fill_mask = res.mask
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
+                        msg='Failed to create fill mask on integer data')
+        
+    def test_masked_full(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        mask = numpy.ones((50, 10))
+        mask[:, :5] = 0
+        masked_data = numpy.ma.array(data, mask=mask)
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, 
+                                    masked_data.ravel(), self.area_def, 50000,
+                                    fill_value=None, segments=1)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
+                                                         'test_files', 
+                                                         'mask_test_full_fill.dat'), 
+                                                         sep=' ').reshape((800, 800))
+        fill_mask = res.mask
+
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
+                         msg='Failed to create fill mask on masked data')
+        
+    def test_masked_full_multi(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        mask1 = numpy.ones((50, 10))
+        mask1[:, :5] = 0
+        mask2 = numpy.ones((50, 10))
+        mask2[:, 5:] = 0
+        mask3 = numpy.ones((50, 10))
+        mask3[:25, :] = 0
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(), data.ravel()))
+        mask_multi = numpy.column_stack((mask1.ravel(), mask2.ravel(), mask3.ravel()))
+        masked_data = numpy.ma.array(data_multi, mask=mask_multi)
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        res = kd_tree.resample_nearest(swath_def, 
+                                    masked_data, self.area_def, 50000,
+                                    fill_value=None, segments=1)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
+                                                         'test_files', 
+                                                         'mask_test_full_fill_multi.dat'), 
+                                                         sep=' ').reshape((800, 800, 3))
+        fill_mask = res.mask
+        cross_sum = res.sum()
+        expected = 357140.0
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Failed to resample masked data')        
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
+                         msg='Failed to create fill mask on masked data')
+        
+    def test_nearest_from_sample(self):
+        data = numpy.fromfunction(lambda y, x: y*x, (50, 10))        
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        valid_input_index, valid_output_index, index_array, distance_array = \
+                                    kd_tree.get_neighbour_info(swath_def, 
+                                                             self.area_def, 
+                                                             50000, neighbours=1, segments=1)
+        res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800), data.ravel(), 
+                                                   valid_input_index, valid_output_index, 
+                                                   index_array)        
+        cross_sum = res.sum()        
+        expected = 15874591.0
+        self.assertEqual(cross_sum, expected,\
+                             msg='Swath resampling from neighbour info nearest failed')
+    
+    def test_custom_multi_from_sample(self):
+        def wf1(dist):
+            return 1 - dist/100000.0
+        
+        def wf2(dist):
+            return 1
+        
+        def wf3(dist):
+            return numpy.cos(dist)**2
+        
+        data = numpy.fromfunction(lambda y, x: (y + x)*10**-6, (5000, 100))        
+        lons = numpy.fromfunction(lambda y, x: 3 + (10.0/100)*x, (5000, 100))
+        lats = numpy.fromfunction(lambda y, x: 75 - (50.0/5000)*y, (5000, 100))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(),\
+                                         data.ravel()))
+        
+        if sys.version_info < (2, 6):
+            valid_input_index, valid_output_index, index_array, distance_array = \
+                                        kd_tree.get_neighbour_info(swath_def, 
+                                                                   self.area_def, 
+                                                                   50000, segments=1)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                valid_input_index, valid_output_index, index_array, distance_array = \
+                                            kd_tree.get_neighbour_info(swath_def, 
+                                                                       self.area_def, 
+                                                                       50000, segments=1)
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
+            
+        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800), 
+                                                     data_multi, 
+                                                     valid_input_index, valid_output_index, 
+                                                     index_array, distance_array, 
+                                                     weight_funcs=[wf1, wf2, wf3])
+                        
+        cross_sum = res.sum()
+        
+        expected = 1461.842980746
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Swath multi channel custom resampling from neighbour info failed 1')
+        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800), 
+                                                   data_multi, 
+                                                   valid_input_index, valid_output_index, 
+                                                   index_array, distance_array, 
+                                                   weight_funcs=[wf1, wf2, wf3])
+        
+        # Look for error where input data has been manipulated    
+        cross_sum = res.sum()
+        expected = 1461.842980746
+        self.assertAlmostEqual(cross_sum, expected,\
+                                   msg='Swath multi channel custom resampling from neighbour info failed 2')
+
+
+    def test_masked_multi_from_sample(self):
+        data = numpy.ones((50, 10))
+        data[:, 5:] = 2
+        mask1 = numpy.ones((50, 10))
+        mask1[:, :5] = 0
+        mask2 = numpy.ones((50, 10))
+        mask2[:, 5:] = 0
+        mask3 = numpy.ones((50, 10))
+        mask3[:25, :] = 0
+        data_multi = numpy.column_stack((data.ravel(), data.ravel(), data.ravel()))
+        mask_multi = numpy.column_stack((mask1.ravel(), mask2.ravel(), mask3.ravel()))
+        masked_data = numpy.ma.array(data_multi, mask=mask_multi)
+        lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10)) 
+        lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
+        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
+#        res = swath.resample_nearest(lons.ravel(), lats.ravel(), 
+#                                    masked_data, self.area_def, 50000,
+#                                    fill_value=None)
+        valid_input_index, valid_output_index, index_array, distance_array = \
+                                    kd_tree.get_neighbour_info(swath_def, 
+                                                             self.area_def, 
+                                                             50000, neighbours=1, segments=1)
+        res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800), 
+                                                   masked_data, 
+                                                   valid_input_index, 
+                                                   valid_output_index, index_array,
+                                                   fill_value=None)
+        expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__), 
+                                                         'test_files', 
+                                                         'mask_test_full_fill_multi.dat'), 
+                                                         sep=' ').reshape((800, 800, 3))
+        fill_mask = res.mask        
+        self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask), 
+                         msg='Failed to create fill mask on masked data')
+        
+        
+
diff --git a/test/test_plot.py b/test/test_plot.py
new file mode 100644
index 0000000..a1580a5
--- /dev/null
+++ b/test/test_plot.py
@@ -0,0 +1,71 @@
+import unittest
+import os
+
+import numpy as np
+	
+import pyresample as pr
+
+def tmp(f):
+    f.tmp = True
+    return f	
+
+class Test(unittest.TestCase):
+    
+    filename = os.path.abspath(os.path.join(os.path.dirname(__file__), 
+                               'test_files', 'ssmis_swath.npz'))
+    data = np.load(filename)['data']
+    lons = data[:, 0].astype(np.float64)
+    lats = data[:, 1].astype(np.float64)
+    tb37v = data[:, 2].astype(np.float64)
+
+
+    def test_ellps2axis(self):
+        a, b = pr.plot.ellps2axis('WGS84')
+        self.assertAlmostEqual(a, 6378137.0, 
+                                   msg='Failed to get semi-major axis of ellipsis')
+        self.assertAlmostEqual(b, 6356752.3142451793, 
+                                   msg='Failed to get semi-minor axis of ellipsis')
+    
+    @tmp   
+    def test_area_def2basemap(self):
+        area_def = pr.utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
+                                         'test_files', 'areas.cfg'), 'ease_sh')[0]
+        bmap = pr.plot.area_def2basemap(area_def)
+        self.assertTrue(bmap.rmajor == bmap.rminor and 
+                        bmap.rmajor == 6371228.0, 
+                        'Failed to create Basemap object')
+
+    	        
+    def test_plate_carreeplot(self):
+        import matplotlib
+        matplotlib.use('Agg')
+        area_def = pr.utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
+                                            'test_files', 'areas.cfg'), 'pc_world')[0]
+        swath_def = pr.geometry.SwathDefinition(self.lons, self.lats)
+        result = pr.kd_tree.resample_nearest(swath_def, self.tb37v, area_def, 
+                                             radius_of_influence=20000, 
+                                             fill_value=None)		
+        plt = pr.plot._get_quicklook(area_def, result, num_meridians=0, 
+                                     num_parallels=0)
+            
+    def test_easeplot(self):
+        import matplotlib
+        matplotlib.use('Agg')
+        area_def = pr.utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
+                                            'test_files', 'areas.cfg'), 'ease_sh')[0]
+        swath_def = pr.geometry.SwathDefinition(self.lons, self.lats)
+        result = pr.kd_tree.resample_nearest(swath_def, self.tb37v, area_def, 
+                                             radius_of_influence=20000, 
+                                             fill_value=None)		
+        plt = pr.plot._get_quicklook(area_def, result)
+
+    def test_orthoplot(self):
+        import matplotlib
+        matplotlib.use('Agg')
+        area_def = pr.utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
+                                            'test_files', 'areas.cfg'), 'ortho')[0]
+        swath_def = pr.geometry.SwathDefinition(self.lons, self.lats)
+        result = pr.kd_tree.resample_nearest(swath_def, self.tb37v, area_def, 
+                                             radius_of_influence=20000, 
+                                             fill_value=None)		
+        plt = pr.plot._get_quicklook(area_def, result)
diff --git a/test/test_spherical_geometry.py b/test/test_spherical_geometry.py
new file mode 100644
index 0000000..098b16a
--- /dev/null
+++ b/test/test_spherical_geometry.py
@@ -0,0 +1,427 @@
+from __future__ import with_statement
+
+import numpy as np
+import unittest
+import math
+
+from pyresample.spherical_geometry import Coordinate, Arc
+from pyresample import geometry
+
+
+class TestOverlap(unittest.TestCase):
+    """Testing overlapping functions in pyresample.
+    """
+    def assert_raises(self, exception, call_able, *args):
+        """assertRaises() has changed from py2.6 to 2.7! Here is an attempt to
+        cover both"""
+        import sys
+        if sys.version_info < (2, 7):
+            self.assertRaises(exception, call_able, *args)
+        else:
+            with self.assertRaises(exception):
+                call_able(*args)
+
+    def test_inside(self):
+        """Testing if a point is inside an area.
+        """
+        lons = np.array([[-11, 11], [-11, 11]])
+        lats = np.array([[11, 11], [-11, -11]])
+        area = geometry.SwathDefinition(lons, lats)
+        
+        point = Coordinate(0, 0)
+
+        self.assertTrue(point in area)
+
+        point = Coordinate(0, 12)
+        self.assertFalse(point in area)
+
+
+        lons = np.array([[-179, 179], [-179, 179]])
+        lats = np.array([[1, 1], [-1, -1]])
+        area = geometry.SwathDefinition(lons, lats)
+
+        point = Coordinate(180, 0)
+        self.assertTrue(point in area)
+
+        point = Coordinate(180, 12)
+        self.assertFalse(point in area)
+
+        point = Coordinate(-180, 12)
+        self.assertFalse(point in area)
+
+        self.assert_raises(ValueError, Coordinate, 0, 192)
+
+        self.assert_raises(ValueError, Coordinate, 15, -91)
+
+        # case of the north pole
+        lons = np.array([[0, 90], [-90, 180]])
+        lats = np.array([[89, 89], [89, 89]])
+        area = geometry.SwathDefinition(lons, lats)
+
+        point = Coordinate(90, 90)
+        self.assertTrue(point in area)
+
+    def test_overlaps(self):
+        """Test if two areas overlap.
+        """
+        lons1 = np.array([[0, 90], [-90, 180]])
+        lats1 = np.array([[89, 89], [89, 89]])
+        area1 = geometry.SwathDefinition(lons1, lats1)
+        
+        lons2 = np.array([[45, 135], [-45, -135]])
+        lats2 = np.array([[89, 89], [89, 89]])
+        area2 = geometry.SwathDefinition(lons2, lats2)
+
+        self.assertTrue(area1.overlaps(area2))
+        self.assertTrue(area2.overlaps(area1))
+
+        lons1 = np.array([[0, 45], [135, 90]])
+        lats1 = np.array([[89, 89], [89, 89]])
+        area1 = geometry.SwathDefinition(lons1, lats1)
+        
+        lons2 = np.array([[180, -135], [-45, -90]])
+        lats2 = np.array([[89, 89], [89, 89]])
+        area2 = geometry.SwathDefinition(lons2, lats2)
+
+        self.assertFalse(area1.overlaps(area2))
+        self.assertFalse(area2.overlaps(area1))
+        
+        lons1 = np.array([[-1, 1], [-1, 1]])
+        lats1 = np.array([[1, 1], [-1, -1]])
+        area1 = geometry.SwathDefinition(lons1, lats1)
+
+        lons2 = np.array([[0, 2], [0, 2]])
+        lats2 = np.array([[0, 0], [2, 2]])
+        area2 = geometry.SwathDefinition(lons2, lats2)
+
+        self.assertTrue(area1.overlaps(area2))
+        self.assertTrue(area2.overlaps(area1))
+        
+
+        lons1 = np.array([[-1, 0], [-1, 0]])
+        lats1 = np.array([[1, 2], [-1, 0]])
+        area1 = geometry.SwathDefinition(lons1, lats1)
+
+        lons2 = np.array([[1, 2], [1, 2]])
+        lats2 = np.array([[1, 2], [-1, 0]])
+        area2 = geometry.SwathDefinition(lons2, lats2)
+        
+        self.assertFalse(area1.overlaps(area2))
+        self.assertFalse(area2.overlaps(area1))
+
+
+    def test_overlap_rate(self):
+        """Test how much two areas overlap.
+        """
+
+        lons1 = np.array([[-1, 1], [-1, 1]])
+        lats1 = np.array([[1, 1], [-1, -1]])
+        area1 = geometry.SwathDefinition(lons1, lats1)
+
+        lons2 = np.array([[0, 2], [0, 2]])
+        lats2 = np.array([[0, 0], [2, 2]])
+        area2 = geometry.SwathDefinition(lons2, lats2)
+
+        self.assertAlmostEqual(area1.overlap_rate(area2), 0.25, 3)
+        self.assertAlmostEqual(area2.overlap_rate(area1), 0.25, 3)
+        
+        lons1 = np.array([[82.829699999999974, 36.888300000000001],
+                          [98.145499999999984, 2.8773]])
+        lats1 = np.array([[60.5944, 52.859999999999999],
+                          [80.395899999999997, 66.7547]])
+        area1 = geometry.SwathDefinition(lons1, lats1)
+        
+        lons2 = np.array([[7.8098183315148422, 26.189349044600252],
+                          [7.8098183315148422, 26.189349044600252]])
+        lats2 = np.array([[62.953206630716465, 62.953206630716465],
+                          [53.301561187195546, 53.301561187195546]])
+        area2 = geometry.SwathDefinition(lons2, lats2)
+
+
+        self.assertAlmostEqual(area1.overlap_rate(area2), 0.07, 2)
+        self.assertAlmostEqual(area2.overlap_rate(area1), 0.012, 3)
+        
+        lons1 = np.array([[82.829699999999974, 36.888300000000001],
+                          [98.145499999999984, 2.8773]])
+        lats1 = np.array([[60.5944, 52.859999999999999],
+                          [80.395899999999997, 66.7547]])
+        area1 = geometry.SwathDefinition(lons1, lats1)
+
+        lons2 = np.array([[12.108984194981202, 30.490647126520301],
+                          [12.108984194981202, 30.490647126520301]])
+        lats2 = np.array([[65.98228561983025, 65.98228561983025],
+                          [57.304862819933433, 57.304862819933433]])
+        area2 = geometry.SwathDefinition(lons2, lats2)
+
+        
+        self.assertAlmostEqual(area1.overlap_rate(area2), 0.5, 2)
+        self.assertAlmostEqual(area2.overlap_rate(area1), 0.068, 3)
+
+
+
+class TestSphereGeometry(unittest.TestCase):
+    """Testing sphere geometry from this module.
+    """
+
+    def test_angle(self):
+        """Testing the angle value between two arcs.
+        """
+
+        base = 0
+
+        p0_ = Coordinate(base, base)
+        p1_ = Coordinate(base, base + 1)
+        p2_ = Coordinate(base + 1, base)
+        p3_ = Coordinate(base, base - 1)
+        p4_ = Coordinate(base - 1, base)
+
+        arc1 = Arc(p0_, p1_)
+        arc2 = Arc(p0_, p2_)
+        arc3 = Arc(p0_, p3_)
+        arc4 = Arc(p0_, p4_)
+
+        self.assertAlmostEqual(arc1.angle(arc2), math.pi / 2,
+                               msg="this should be pi/2")
+        self.assertAlmostEqual(arc2.angle(arc3), math.pi / 2,
+                               msg="this should be pi/2")
+        self.assertAlmostEqual(arc3.angle(arc4), math.pi / 2,
+                               msg="this should be pi/2")
+        self.assertAlmostEqual(arc4.angle(arc1), math.pi / 2,
+                               msg="this should be pi/2")
+
+        self.assertAlmostEqual(arc1.angle(arc4), -math.pi / 2,
+                               msg="this should be -pi/2")
+        self.assertAlmostEqual(arc4.angle(arc3), -math.pi / 2,
+                               msg="this should be -pi/2")
+        self.assertAlmostEqual(arc3.angle(arc2), -math.pi / 2,
+                               msg="this should be -pi/2")
+        self.assertAlmostEqual(arc2.angle(arc1), -math.pi / 2,
+                               msg="this should be -pi/2")
+
+        self.assertAlmostEqual(arc1.angle(arc3), math.pi,
+                               msg="this should be pi")
+        self.assertAlmostEqual(arc3.angle(arc1), math.pi,
+                               msg="this should be pi")
+        self.assertAlmostEqual(arc2.angle(arc4), math.pi,
+                               msg="this should be pi")
+        self.assertAlmostEqual(arc4.angle(arc2), math.pi,
+                               msg="this should be pi")
+
+
+        p5_ = Coordinate(base + 1, base + 1)
+        p6_ = Coordinate(base + 1, base - 1)
+        p7_ = Coordinate(base - 1, base - 1)
+        p8_ = Coordinate(base - 1, base + 1)
+
+        arc5 = Arc(p0_, p5_)
+        arc6 = Arc(p0_, p6_)
+        arc7 = Arc(p0_, p7_)
+        arc8 = Arc(p0_, p8_)
+
+        self.assertAlmostEqual(arc1.angle(arc5), math.pi / 4, 3,
+                               msg="this should be pi/4")
+        self.assertAlmostEqual(arc5.angle(arc2), math.pi / 4, 3,
+                               msg="this should be pi/4")
+        self.assertAlmostEqual(arc2.angle(arc6), math.pi / 4, 3,
+                               msg="this should be pi/4")
+        self.assertAlmostEqual(arc6.angle(arc3), math.pi / 4, 3,
+                               msg="this should be pi/4")
+        self.assertAlmostEqual(arc3.angle(arc7), math.pi / 4, 3,
+                               msg="this should be pi/4")
+        self.assertAlmostEqual(arc7.angle(arc4), math.pi / 4, 3,
+                               msg="this should be pi/4")
+        self.assertAlmostEqual(arc4.angle(arc8), math.pi / 4, 3,
+                               msg="this should be pi/4")
+        self.assertAlmostEqual(arc8.angle(arc1), math.pi / 4, 3,
+                               msg="this should be pi/4")
+
+        self.assertAlmostEqual(arc1.angle(arc6), 3 * math.pi / 4, 3,
+                               msg="this should be 3pi/4")
+
+
+        c0_ = Coordinate(180, 0)
+        c1_ = Coordinate(180, 1)
+        c2_ = Coordinate(-179, 0)
+        c3_ = Coordinate(-180, -1)
+        c4_ = Coordinate(179, 0)
+
+
+        arc1 = Arc(c0_, c1_)
+        arc2 = Arc(c0_, c2_)
+        arc3 = Arc(c0_, c3_)
+        arc4 = Arc(c0_, c4_)
+
+        self.assertAlmostEqual(arc1.angle(arc2), math.pi / 2,
+                               msg="this should be pi/2")
+        self.assertAlmostEqual(arc2.angle(arc3), math.pi / 2,
+                               msg="this should be pi/2")
+        self.assertAlmostEqual(arc3.angle(arc4), math.pi / 2,
+                               msg="this should be pi/2")
+        self.assertAlmostEqual(arc4.angle(arc1), math.pi / 2,
+                               msg="this should be pi/2")
+
+        self.assertAlmostEqual(arc1.angle(arc4), -math.pi / 2,
+                               msg="this should be -pi/2")
+        self.assertAlmostEqual(arc4.angle(arc3), -math.pi / 2,
+                               msg="this should be -pi/2")
+        self.assertAlmostEqual(arc3.angle(arc2), -math.pi / 2,
+                               msg="this should be -pi/2")
+        self.assertAlmostEqual(arc2.angle(arc1), -math.pi / 2,
+                               msg="this should be -pi/2")
+
+        # case of the north pole
+
+        c0_ = Coordinate(0, 90)
+        c1_ = Coordinate(0, 89)
+        c2_ = Coordinate(-90, 89)
+        c3_ = Coordinate(180, 89)
+        c4_ = Coordinate(90, 89)
+
+        arc1 = Arc(c0_, c1_)
+        arc2 = Arc(c0_, c2_)
+        arc3 = Arc(c0_, c3_)
+        arc4 = Arc(c0_, c4_)
+
+        self.assertAlmostEqual(arc1.angle(arc2), math.pi / 2,
+                               msg="this should be pi/2")
+        self.assertAlmostEqual(arc2.angle(arc3), math.pi / 2,
+                               msg="this should be pi/2")
+        self.assertAlmostEqual(arc3.angle(arc4), math.pi / 2,
+                               msg="this should be pi/2")
+        self.assertAlmostEqual(arc4.angle(arc1), math.pi / 2,
+                               msg="this should be pi/2")
+
+        self.assertAlmostEqual(arc1.angle(arc4), -math.pi / 2,
+                               msg="this should be -pi/2")
+        self.assertAlmostEqual(arc4.angle(arc3), -math.pi / 2,
+                               msg="this should be -pi/2")
+        self.assertAlmostEqual(arc3.angle(arc2), -math.pi / 2,
+                               msg="this should be -pi/2")
+        self.assertAlmostEqual(arc2.angle(arc1), -math.pi / 2,
+                               msg="this should be -pi/2")
+
+        self.assertAlmostEqual(Arc(c1_, c2_).angle(arc1), math.pi/4, 3,
+                               msg="this should be pi/4")
+                               
+        self.assertAlmostEqual(Arc(c4_, c3_).angle(arc4), -math.pi/4, 3,
+                               msg="this should be -pi/4")
+
+        self.assertAlmostEqual(Arc(c1_, c4_).angle(arc1), -math.pi/4, 3,
+                               msg="this should be -pi/4")
+
+
+    def test_intersects(self):
+        """Test if two arcs intersect.
+        """
+        p0_ = Coordinate(0, 0)
+        p1_ = Coordinate(0, 1)
+        p2_ = Coordinate(1, 0)
+        p3_ = Coordinate(0, -1)
+        p4_ = Coordinate(-1, 0)
+        p5_ = Coordinate(1, 1)
+        p6_ = Coordinate(1, -1)
+
+        arc13 = Arc(p1_, p3_)
+        arc24 = Arc(p2_, p4_)
+
+        arc32 = Arc(p3_, p2_)
+        arc41 = Arc(p4_, p1_)
+
+        arc40 = Arc(p4_, p0_)
+        arc56 = Arc(p5_, p6_)
+
+        arc45 = Arc(p4_, p5_)
+        arc02 = Arc(p0_, p2_)
+
+        arc35 = Arc(p3_, p5_)
+
+        self.assertTrue(arc13.intersects(arc24))
+
+        self.assertFalse(arc32.intersects(arc41))
+
+        self.assertFalse(arc56.intersects(arc40))
+
+        self.assertFalse(arc56.intersects(arc40))
+
+        self.assertFalse(arc45.intersects(arc02))
+
+        self.assertTrue(arc35.intersects(arc24))
+
+
+
+        p0_ = Coordinate(180, 0)
+        p1_ = Coordinate(180, 1)
+        p2_ = Coordinate(-179, 0)
+        p3_ = Coordinate(-180, -1)
+        p4_ = Coordinate(179, 0)
+        p5_ = Coordinate(-179, 1)
+        p6_ = Coordinate(-179, -1)
+
+        arc13 = Arc(p1_, p3_)
+        arc24 = Arc(p2_, p4_)
+
+        arc32 = Arc(p3_, p2_)
+        arc41 = Arc(p4_, p1_)
+
+        arc40 = Arc(p4_, p0_)
+        arc56 = Arc(p5_, p6_)
+
+        arc45 = Arc(p4_, p5_)
+        arc02 = Arc(p0_, p2_)
+
+        arc35 = Arc(p3_, p5_)
+
+        self.assertTrue(arc13.intersects(arc24))
+
+        self.assertFalse(arc32.intersects(arc41))
+
+        self.assertFalse(arc56.intersects(arc40))
+
+        self.assertFalse(arc56.intersects(arc40))
+
+        self.assertFalse(arc45.intersects(arc02))
+
+        self.assertTrue(arc35.intersects(arc24))
+
+        # case of the north pole
+
+        p0_ = Coordinate(0, 90)
+        p1_ = Coordinate(0, 89)
+        p2_ = Coordinate(90, 89)
+        p3_ = Coordinate(180, 89)
+        p4_ = Coordinate(-90, 89)    
+        p5_ = Coordinate(45, 89)
+        p6_ = Coordinate(135, 89)
+
+        arc13 = Arc(p1_, p3_)
+        arc24 = Arc(p2_, p4_)
+
+        arc32 = Arc(p3_, p2_)
+        arc41 = Arc(p4_, p1_)
+
+        arc40 = Arc(p4_, p0_)
+        arc56 = Arc(p5_, p6_)
+
+        arc45 = Arc(p4_, p5_)
+        arc02 = Arc(p0_, p2_)
+
+        arc35 = Arc(p3_, p5_)
+
+        self.assertTrue(arc13.intersects(arc24))
+
+        self.assertFalse(arc32.intersects(arc41))
+
+        self.assertFalse(arc56.intersects(arc40))
+
+        self.assertFalse(arc56.intersects(arc40))
+
+        self.assertFalse(arc45.intersects(arc02))
+
+        self.assertTrue(arc35.intersects(arc24))
+
+
+
+if __name__ == '__main__':
+    unittest.main()
+
+
diff --git a/test/test_swath.py b/test/test_swath.py
new file mode 100644
index 0000000..bc19e59
--- /dev/null
+++ b/test/test_swath.py
@@ -0,0 +1,62 @@
+from __future__ import with_statement
+
+import os
+import sys
+import unittest
+import warnings
+warnings.simplefilter("always")
+
+import numpy as np
+
+from pyresample import kd_tree, geometry
+
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+class Test(unittest.TestCase):
+    
+    filename = os.path.abspath(os.path.join(os.path.dirname(__file__), 
+                               'test_files', 'ssmis_swath.npz'))
+    data = np.load(filename)['data']
+    lons = data[:, 0].astype(np.float64)
+    lats = data[:, 1].astype(np.float64)
+    tb37v = data[:, 2].astype(np.float64)
+    
+    @tmp           
+    def test_self_map(self):
+        swath_def = geometry.SwathDefinition(lons=self.lons, lats=self.lats)
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_gauss(swath_def, self.tb37v.copy(), swath_def, 
+                                         radius_of_influence=70000, sigmas=56500)
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, self.tb37v.copy(), swath_def, 
+                                             radius_of_influence=70000, sigmas=56500)
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
+       
+        self.assertAlmostEqual(res.sum() / 100., 668848.082208, 1, 
+                                msg='Failed self mapping swath for 1 channel')
+                           
+    def test_self_map_multi(self):
+        data = np.column_stack((self.tb37v, self.tb37v, self.tb37v))
+        swath_def = geometry.SwathDefinition(lons=self.lons, lats=self.lats)
+        if sys.version_info < (2, 6):
+            res = kd_tree.resample_gauss(swath_def, data, swath_def, 
+                                         radius_of_influence=70000, sigmas=[56500, 56500, 56500])
+        else:
+            with warnings.catch_warnings(record=True) as w:
+                res = kd_tree.resample_gauss(swath_def, data, swath_def, 
+                                             radius_of_influence=70000, sigmas=[56500, 56500, 56500])
+                self.failIf(len(w) != 1, 'Failed to create neighbour radius warning')
+                self.failIf(('Possible more' not in str(w[0].message)), 'Failed to create correct neighbour radius warning')
+                
+        self.assertAlmostEqual(res[:, 0].sum() / 100., 668848.082208, 1, 
+                                   msg='Failed self mapping swath multi for channel 1')
+        self.assertAlmostEqual(res[:, 1].sum() / 100., 668848.082208, 1, 
+                                   msg='Failed self mapping swath multi for channel 2')
+        self.assertAlmostEqual(res[:, 2].sum() / 100., 668848.082208, 1, 
+                                   msg='Failed self mapping swath multi for channel 3')            
+    
diff --git a/test/test_utils.py b/test/test_utils.py
new file mode 100644
index 0000000..40bd699
--- /dev/null
+++ b/test/test_utils.py
@@ -0,0 +1,53 @@
+import os
+import unittest
+
+from pyresample import utils
+
+
+def tmp(f):
+    f.tmp = True
+    return f
+
+class Test(unittest.TestCase):
+
+    def test_area_parser(self):
+        ease_nh, ease_sh = utils.parse_area_file(os.path.join(os.path.dirname(__file__), 
+                                                              'test_files', 
+                                                              'areas.cfg'), 'ease_nh', 'ease_sh')
+        
+        nh_found = (ease_nh.__str__() =="""Area ID: ease_nh
+Name: Arctic EASE grid
+Projection ID: ease_nh
+Projection: {'a': '6371228.0', 'lat_0': '90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'}
+Number of columns: 425
+Number of rows: 425
+Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""")
+        
+        sh_found = (ease_sh.__str__() =="""Area ID: ease_sh
+Name: Antarctic EASE grid
+Projection ID: ease_sh
+Projection: {'a': '6371228.0', 'lat_0': '-90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'}
+Number of columns: 425
+Number of rows: 425
+Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""")
+        
+        self.assertTrue(nh_found and sh_found, msg='Failed to parse areas correctly')
+    
+    def test_load_area(self):
+        ease_nh = utils.load_area(os.path.join(os.path.dirname(__file__), 
+                                                              'test_files', 
+                                                              'areas.cfg'), 'ease_nh')
+        nh_found = (ease_nh.__str__() =="""Area ID: ease_nh
+Name: Arctic EASE grid
+Projection ID: ease_nh
+Projection: {'a': '6371228.0', 'lat_0': '90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'}
+Number of columns: 425
+Number of rows: 425
+Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""")
+        self.assertTrue(nh_found, msg='Failed to load area correctly') 
+        
+    def test_not_found_exception(self):
+        self.assertRaises(utils.AreaNotFound, utils.parse_area_file, 
+                          os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 
+                          'no_area')
+        

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/pyresample.git



More information about the Pkg-grass-devel mailing list