[python-geopandas] 01/03: Imported Upstream version 0.1.1

Johan Van de Wauw johanvdw-guest at moszumanska.debian.org
Wed Dec 17 22:35:31 UTC 2014


This is an automated email from the git hooks/post-receive script.

johanvdw-guest pushed a commit to branch master
in repository python-geopandas.

commit 7992eb448237e8df8750d70b12489a03f5dba90c
Author: Johan Van de Wauw <johan.vandewauw at gmail.com>
Date:   Wed Dec 17 23:26:56 2014 +0100

    Imported Upstream version 0.1.1
---
 .coveragerc                                        |   7 +
 .gitignore                                         |   8 +
 .requirements-2.6.txt                              |   2 +
 .travis.yml                                        |  42 ++
 CONTRIBUTING.md                                    |  54 +++
 LICENSE.txt                                        |  25 ++
 README.md                                          | 100 +++++
 debian/watch                                       |   2 +
 doc/Makefile                                       | 153 +++++++
 doc/make.bat                                       | 190 ++++++++
 doc/source/_static/boros_with_holes.png            | Bin 0 -> 148576 bytes
 doc/source/_static/holes.png                       | Bin 0 -> 155266 bytes
 doc/source/_static/nyc.png                         | Bin 0 -> 86376 bytes
 doc/source/_static/nyc_hull.png                    | Bin 0 -> 57169 bytes
 doc/source/_static/test.png                        | Bin 0 -> 8199 bytes
 doc/source/_static/test_buffer.png                 | Bin 0 -> 20327 bytes
 doc/source/about.rst                               |  14 +
 doc/source/conf.py                                 | 247 +++++++++++
 doc/source/index.rst                               |  39 ++
 doc/source/install.rst                             |  69 +++
 doc/source/user.rst                                | 488 +++++++++++++++++++++
 examples/choropleths.ipynb                         | 270 ++++++++++++
 examples/nyc.png                                   | Bin 0 -> 327243 bytes
 examples/nyc_boros.py                              |  48 ++
 examples/nyc_hull.png                              | Bin 0 -> 212981 bytes
 examples/test.png                                  | Bin 0 -> 32540 bytes
 examples/test_buffer.png                           | Bin 0 -> 74008 bytes
 geopandas/__init__.py                              |  16 +
 geopandas/base.py                                  | 400 +++++++++++++++++
 geopandas/geocode.py                               | 122 ++++++
 geopandas/geodataframe.py                          | 396 +++++++++++++++++
 geopandas/geoseries.py                             | 257 +++++++++++
 geopandas/io/__init__.py                           |   0
 geopandas/io/file.py                               |  22 +
 geopandas/io/sql.py                                |  46 ++
 geopandas/plotting.py                              | 304 +++++++++++++
 requirements.test.txt                              |   7 +
 requirements.txt                                   |   5 +
 setup.py                                           |  90 ++++
 tests/__init__.py                                  |   0
 tests/baseline_images/test_plotting/lines_plot.png | Bin 0 -> 10708 bytes
 .../baseline_images/test_plotting/points_plot.png  | Bin 0 -> 10814 bytes
 tests/baseline_images/test_plotting/poly_plot.png  | Bin 0 -> 11281 bytes
 tests/test_geocode.py                              |  91 ++++
 tests/test_geodataframe.py                         | 429 ++++++++++++++++++
 tests/test_geom_methods.py                         | 415 ++++++++++++++++++
 tests/test_geoseries.py                            | 143 ++++++
 tests/test_io.py                                   |  57 +++
 tests/test_plotting.py                             |  78 ++++
 tests/test_types.py                                |  89 ++++
 tests/util.py                                      | 213 +++++++++
 51 files changed, 4938 insertions(+)

diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..d6b8b6a
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,7 @@
+[report]
+exclude_lines =
+    pragma: no cover
+    def __repr__
+    raise AssertionError
+    raise NotImplementedError
+    if __name__ == .__main__.:
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..3d5a618
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,8 @@
+.coverage
+*.pyc
+build
+dist
+doc/_build
+geopandas.egg-info
+geopandas/version.py
+*.py~
diff --git a/.requirements-2.6.txt b/.requirements-2.6.txt
new file mode 100644
index 0000000..062b6c5
--- /dev/null
+++ b/.requirements-2.6.txt
@@ -0,0 +1,2 @@
+unittest2
+ordereddict
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..6fd2689
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,42 @@
+language: python
+
+python:
+  - 2.6
+  - 2.7
+  - 3.2
+  - 3.3
+  - 3.4
+
+env:
+  - PANDAS_VERSION=v0.13.1
+  - PANDAS_VERSION=v0.14.0
+  - PANDAS_VERSION=master
+
+matrix:
+  exclude:
+    - python: 2.6
+      env: PANDAS_VERSION=v0.14.0
+
+before_install:
+  - sudo add-apt-repository -y ppa:ubuntugis/ppa
+  - sudo apt-get update
+  - sudo apt-get install gdal-bin libgdal-dev
+#  - sudo -u postgres psql -c "drop database if exists test_geopandas"
+#  - sudo -u postgres psql -c "create database test_geopandas"
+#  - sudo -u postgres psql -c "create extension postgis" -d test_geopandas
+
+install:
+  - pip install -r requirements.txt --use-mirrors
+  - pip install -r requirements.test.txt --use-mirrors
+  - if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install -r .requirements-2.6.txt --use-mirrors; fi
+  - git clone git://github.com/pydata/pandas.git
+  - cd pandas
+  - git checkout $PANDAS_VERSION
+  - python setup.py install
+  - cd ..
+
+script:
+  - py.test tests --cov geopandas -v --cov-report term-missing
+
+after_success:
+  - coveralls
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..d4330ca
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,54 @@
+Guidelines
+==========
+
+Contributions to GeoPandas are very welcome.  They are likely to
+be accepted more quickly if they follow these guidelines.
+
+At this stage of GeoPandas development, the priorities are to define a
+simple, usable, and stable API and to have clean, maintainable,
+readable code.  Performance matters, but not at the expense of those
+goals.
+
+In general, GeoPandas follows the conventions of the pandas project
+where applicable.  Please read [pandas contributing
+guidelines](https://github.com/pydata/pandas/blob/master/CONTRIBUTING.md).
+
+In particular, when submitting a pull request:
+
+- All existing tests should pass.  Please make sure that the test
+  suite passes, both locally and on
+  [Travis CI](https://travis-ci.org/geopandas/geopandas).  Status on
+  Travis will be visible on a pull request.  If you want to enable
+  Travis CI on your own fork, please read the pandas guidelines link
+  above or the
+  [getting started docs](http://about.travis-ci.org/docs/user/getting-started/).
+
+- New functionality should include tests.  Please write reasonable
+  tests for your code and make sure that they pass on your pull request.
+
+- Classes, methods, functions, etc. should have docstrings.  The first
+  line of a docstring should be a standalone summary.  Parameters and
+  return values should be ducumented explicitly.
+
+Improving the documentation and testing for code already in GeoPandas
+is a great way to get started if you'd like to make a contribution.
+
+Style
+-----
+
+- GeoPandas supports python 2 (2.6+) and python 3 (3.2+) with a single
+  code base.  Use modern python idioms when possible that are
+  compatibile with both major versions, and use the
+  [six](https://pythonhosted.org/six) library where helpful to smooth
+  over the differences.  Use `from __future__ import` statements where
+  appropriate.  Test code locally in both python 2 and python 3 when
+  possible (all supported versions will be automatically tested on
+  Travis CI).
+
+- Follow PEP 8 when possible.
+
+- Imports should be grouped with standard library imports first,
+  3rd-party libraries next, and geopandas imports third.  Within each
+  grouping, imports should be alphabetized.  Always use absolute
+  imports when possible, and explicit relative imports for local
+  imports when necessary in tests.
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..15d5aed
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,25 @@
+Copyright (c) 2013, GeoPandas developers.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution.
+ * Neither the name of Enthought, Inc. nor the names of its contributors may
+   be used to endorse or promote products derived from this software without
+   specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..6c2bc82
--- /dev/null
+++ b/README.md
@@ -0,0 +1,100 @@
+GeoPandas [![build status](https://secure.travis-ci.org/geopandas/geopandas.png?branch=master)](https://travis-ci.org/geopandas/geopandas) [![Coverage Status](https://coveralls.io/repos/geopandas/geopandas/badge.png)](https://coveralls.io/r/geopandas/geopandas)
+=========
+
+Python tools for geographic data
+
+Introduction
+------------
+
+GeoPandas is a project to add support for geographic data to
+[pandas](http://pandas.pydata.org) objects.  It currently implements
+`GeoSeries` and `GeoDataFrame` types which are subclasses of
+`pandas.Series` and `pandas.DataFrame` respectively.  GeoPandas
+objects can act on [shapely](http://toblerity.github.io/shapely)
+geometry objects and perform geometric operations.
+
+GeoPandas geometry operations are cartesian.  The coordinate reference
+system (crs) can be stored as an attribute on an object, and is
+automatically set when loading from a file.  Objects may be
+transformed to new coordinate systems with the `to_crs()` method.
+There is currently no enforcement of like coordinates for operations,
+but that may change in the future.
+
+Examples
+--------
+
+    >>> p1 = Polygon([(0, 0), (1, 0), (1, 1)])
+    >>> p2 = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
+    >>> p3 = Polygon([(2, 0), (3, 0), (3, 1), (2, 1)])
+    >>> g = GeoSeries([p1, p2, p3])
+    >>> g
+    0    POLYGON ((0.0000000000000000 0.000000000000000...
+    1    POLYGON ((0.0000000000000000 0.000000000000000...
+    2    POLYGON ((2.0000000000000000 0.000000000000000...
+    dtype: object
+
+![Example 1](examples/test.png)
+
+Some geographic operations return normal pandas object.  The `area` property of a `GeoSeries` will return a `pandas.Series` containing the area of each item in the `GeoSeries`:
+
+    >>> print g.area
+    0    0.5
+    1    1.0
+    2    1.0
+    dtype: float64
+
+Other operations return GeoPandas objects:
+
+    >>> g.buffer(0.5)
+    Out[15]:
+    0    POLYGON ((-0.3535533905932737 0.35355339059327...
+    1    POLYGON ((-0.5000000000000000 0.00000000000000...
+    2    POLYGON ((1.5000000000000000 0.000000000000000...
+    dtype: object
+
+![Example 2](examples/test_buffer.png)
+
+GeoPandas objects also know how to plot themselves.  GeoPandas uses [descartes](https://pypi.python.org/pypi/descartes) to generate a [matplotlib](http://matplotlib.org) plot. To generate a plot of our GeoSeries, use:
+
+    >>> g.plot()
+
+GeoPandas also implements alternate constructors that can read any data format recognized by [fiona](http://toblerity.github.io/fiona).  To read a [file containing the boroughs of New York City](http://www.nyc.gov/html/dcp/download/bytes/nybb_14aav.zip):
+
+    >>> boros = GeoDataFrame.from_file('nybb.shp')
+    >>> boros.set_index('BoroCode', inplace=True)
+    >>> boros.sort()
+    >>> boros
+                   BoroName    Shape_Area     Shape_Leng  \
+    BoroCode
+    1             Manhattan  6.364422e+08  358532.956418
+    2                 Bronx  1.186804e+09  464517.890553
+    3              Brooklyn  1.959432e+09  726568.946340
+    4                Queens  3.049947e+09  861038.479299
+    5         Staten Island  1.623853e+09  330385.036974
+    
+                                                       geometry
+    BoroCode
+    1         (POLYGON ((981219.0557861328125000 188655.3157...
+    2         (POLYGON ((1012821.8057861328125000 229228.264...
+    3         (POLYGON ((1021176.4790039062500000 151374.796...
+    4         (POLYGON ((1029606.0765991210937500 156073.814...
+    5         (POLYGON ((970217.0223999023437500 145643.3322...
+
+![New York City boroughs](examples/nyc.png)
+ 
+    >>> boros['geometry'].convex_hull
+    0    POLYGON ((915517.6877458114176989 120121.88125...
+    1    POLYGON ((1000721.5317993164062500 136681.7761...
+    2    POLYGON ((988872.8212280273437500 146772.03179...
+    3    POLYGON ((977855.4451904296875000 188082.32238...
+    4    POLYGON ((1017949.9776000976562500 225426.8845...
+    dtype: object
+
+![Convex hulls of New York City boroughs](examples/nyc_hull.png)
+
+TODO
+----
+
+- Finish implementing and testing pandas methods on GeoPandas objects
+- The current GeoDataFrame does not do very much.
+- spatial joins, grouping and more...
diff --git a/debian/watch b/debian/watch
new file mode 100644
index 0000000..60bd43d
--- /dev/null
+++ b/debian/watch
@@ -0,0 +1,2 @@
+version=3
+https://github.com/geopandas/geopandas/tags .*/v(\d.*)\.(?:tgz|tbz2|txz|tar\.(?:gz|bz2|xz))
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000..f507382
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = build
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  texinfo    to make Texinfo files"
+	@echo "  info       to make Texinfo files and run them through makeinfo"
+	@echo "  gettext    to make PO message catalogs"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	-rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/GeoPandas.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/GeoPandas.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/GeoPandas"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/GeoPandas"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo
+	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+	@echo "Run \`make' in that directory to run these through makeinfo" \
+	      "(use \`make info' here to do that automatically)."
+
+info:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo "Running Texinfo files through makeinfo..."
+	make -C $(BUILDDIR)/texinfo info
+	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+	@echo
+	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/doc/make.bat b/doc/make.bat
new file mode 100644
index 0000000..c1a44bd
--- /dev/null
+++ b/doc/make.bat
@@ -0,0 +1,190 @@
+ at ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+	set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
+set I18NSPHINXOPTS=%SPHINXOPTS% source
+if NOT "%PAPER%" == "" (
+	set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+	set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+	:help
+	echo.Please use `make ^<target^>` where ^<target^> is one of
+	echo.  html       to make standalone HTML files
+	echo.  dirhtml    to make HTML files named index.html in directories
+	echo.  singlehtml to make a single large HTML file
+	echo.  pickle     to make pickle files
+	echo.  json       to make JSON files
+	echo.  htmlhelp   to make HTML files and a HTML help project
+	echo.  qthelp     to make HTML files and a qthelp project
+	echo.  devhelp    to make HTML files and a Devhelp project
+	echo.  epub       to make an epub
+	echo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+	echo.  text       to make text files
+	echo.  man        to make manual pages
+	echo.  texinfo    to make Texinfo files
+	echo.  gettext    to make PO message catalogs
+	echo.  changes    to make an overview over all changed/added/deprecated items
+	echo.  linkcheck  to check all external links for integrity
+	echo.  doctest    to run all doctests embedded in the documentation if enabled
+	goto end
+)
+
+if "%1" == "clean" (
+	for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+	del /q /s %BUILDDIR%\*
+	goto end
+)
+
+if "%1" == "html" (
+	%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+	goto end
+)
+
+if "%1" == "dirhtml" (
+	%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+	goto end
+)
+
+if "%1" == "singlehtml" (
+	%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+	goto end
+)
+
+if "%1" == "pickle" (
+	%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the pickle files.
+	goto end
+)
+
+if "%1" == "json" (
+	%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the JSON files.
+	goto end
+)
+
+if "%1" == "htmlhelp" (
+	%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+	goto end
+)
+
+if "%1" == "qthelp" (
+	%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+	echo.^> qcollectiongenerator %BUILDDIR%\qthelp\GeoPandas.qhcp
+	echo.To view the help file:
+	echo.^> assistant -collectionFile %BUILDDIR%\qthelp\GeoPandas.ghc
+	goto end
+)
+
+if "%1" == "devhelp" (
+	%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished.
+	goto end
+)
+
+if "%1" == "epub" (
+	%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The epub file is in %BUILDDIR%/epub.
+	goto end
+)
+
+if "%1" == "latex" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "text" (
+	%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The text files are in %BUILDDIR%/text.
+	goto end
+)
+
+if "%1" == "man" (
+	%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The manual pages are in %BUILDDIR%/man.
+	goto end
+)
+
+if "%1" == "texinfo" (
+	%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+	goto end
+)
+
+if "%1" == "gettext" (
+	%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+	goto end
+)
+
+if "%1" == "changes" (
+	%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.The overview file is in %BUILDDIR%/changes.
+	goto end
+)
+
+if "%1" == "linkcheck" (
+	%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+	goto end
+)
+
+if "%1" == "doctest" (
+	%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+	goto end
+)
+
+:end
diff --git a/doc/source/_static/boros_with_holes.png b/doc/source/_static/boros_with_holes.png
new file mode 100644
index 0000000..8dc2ad6
Binary files /dev/null and b/doc/source/_static/boros_with_holes.png differ
diff --git a/doc/source/_static/holes.png b/doc/source/_static/holes.png
new file mode 100644
index 0000000..f93b8ba
Binary files /dev/null and b/doc/source/_static/holes.png differ
diff --git a/doc/source/_static/nyc.png b/doc/source/_static/nyc.png
new file mode 100644
index 0000000..2ea881b
Binary files /dev/null and b/doc/source/_static/nyc.png differ
diff --git a/doc/source/_static/nyc_hull.png b/doc/source/_static/nyc_hull.png
new file mode 100644
index 0000000..668713c
Binary files /dev/null and b/doc/source/_static/nyc_hull.png differ
diff --git a/doc/source/_static/test.png b/doc/source/_static/test.png
new file mode 100644
index 0000000..818dbaa
Binary files /dev/null and b/doc/source/_static/test.png differ
diff --git a/doc/source/_static/test_buffer.png b/doc/source/_static/test_buffer.png
new file mode 100644
index 0000000..506563c
Binary files /dev/null and b/doc/source/_static/test_buffer.png differ
diff --git a/doc/source/about.rst b/doc/source/about.rst
new file mode 100644
index 0000000..1efef84
--- /dev/null
+++ b/doc/source/about.rst
@@ -0,0 +1,14 @@
+About
+=====
+
+Known issues
+------------
+
+- The ``geopy`` API has changed significantly over recent versions.
+  ``geopy 0.99`` is currently supported (though it is known to fail
+  with Python 3.2, it should work with other supported python
+  versions).
+
+.. toctree::
+   :maxdepth: 2
+
diff --git a/doc/source/conf.py b/doc/source/conf.py
new file mode 100644
index 0000000..da2f27c
--- /dev/null
+++ b/doc/source/conf.py
@@ -0,0 +1,247 @@
+# -*- coding: utf-8 -*-
+#
+# GeoPandas documentation build configuration file, created by
+# sphinx-quickstart on Tue Oct 15 08:08:14 2013.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'GeoPandas'
+copyright = u'2013-2014, GeoPandas developers'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+d = {}
+try:
+    execfile(os.path.join('..', '..', 'geopandas', 'version.py'), d)
+    version = release = d['version']
+except:
+    # FIXME: This shouldn't be hardwired, but should be set one place only
+    version = release = '0.1.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+if os.environ.get('READTHEDOCS', None) == 'True':
+    html_theme = 'default'
+else:
+    html_theme = 'nature'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'GeoPandasdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+  ('index', 'GeoPandas.tex', u'GeoPandas Documentation',
+   u'Kelsey Jordahl', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    ('index', 'geopandas', u'GeoPandas Documentation',
+     [u'Kelsey Jordahl'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  ('index', 'GeoPandas', u'GeoPandas Documentation',
+   u'Kelsey Jordahl', 'GeoPandas', 'One line description of project.',
+   'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
diff --git a/doc/source/index.rst b/doc/source/index.rst
new file mode 100644
index 0000000..6ea251b
--- /dev/null
+++ b/doc/source/index.rst
@@ -0,0 +1,39 @@
+GeoPandas |version|
+===================
+
+GeoPandas is an open source project to make working with geospatial
+data in python easier.  GeoPandas extends the datatypes used by
+`pandas`_ to allow spatial operations on geometric types.  Geometric
+operations are performed by `shapely`_.  Geopandas further depends on
+`fiona`_ for file access and `descartes`_ and `matplotlib`_ for plotting.
+
+.. _pandas: http://pandas.pydata.org
+.. _shapely: http://toblerity.github.io/shapely
+.. _fiona: http://toblerity.github.io/fiona
+.. _Descartes: https://pypi.python.org/pypi/descartes
+.. _matplotlib: http://matplotlib.org
+
+Description
+-----------
+
+The goal of GeoPandas is to make working with geospatial data in
+python easier.  It combines the capabilities of pandas and shapely,
+providing geospatial operations in pandas and a high-level interface
+to multiple geometries to shapely.  GeoPandas enables you to easily do
+operations in python that would otherwise require a spatial database
+such as PostGIS.
+
+.. toctree::
+   :maxdepth: 2
+
+  Installation <install>
+  User Guide <user>
+  About <about>
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/doc/source/install.rst b/doc/source/install.rst
new file mode 100644
index 0000000..9b3ddcc
--- /dev/null
+++ b/doc/source/install.rst
@@ -0,0 +1,69 @@
+Installation
+============
+
+The released version of GeoPandas is 0.1.  To install the released
+version, use ``pip install geopandas``.
+
+You may install the latest development version by cloning the
+`GitHub`_ repository and using the setup script::
+
+    git clone https://github.com/geopandas/geopandas.git
+    cd geopandas
+    python setup.py install
+
+It is also possible to install the latest development version
+available on PyPI with `pip` by adding the ``--pre`` flag for pip 1.4
+and later, or to use `pip` to install directly from the GitHub
+repository with::
+
+    pip install git+git://github.com/geopandas/geopandas.git
+
+
+Dependencies
+------------
+
+Supports Python versions 2.6, 2.7, and 3.2+.
+
+- `numpy`_
+- `pandas`_ (version 0.13 or later)
+- `shapely`_
+- `fiona`_
+- `six`_
+- `geopy`_ 0.99 (optional; for geocoding)
+- `psycopg2`_ (optional; for PostGIS connection)
+
+For plotting, these additional packages may be used:
+
+- `matplotlib`_
+- `descartes`_
+- `pysal`_
+
+Testing
+-------
+
+To run the current set of tests from the source directory, run::
+
+    nosetests -v
+
+from a command line.
+
+Tests are automatically run on all commits on the GitHub repository,
+including pull requests, on `Travis CI`_.
+
+.. _PyPI: https://pypi.python.org/pypi/geopandas
+.. _GitHub: https://github.com/geopandas/geopandas
+.. _numpy: http://www.numpy.org
+.. _pandas: http://pandas.pydata.org
+.. _shapely: http://toblerity.github.io/shapely
+.. _fiona: http://toblerity.github.io/fiona
+.. _Descartes: https://pypi.python.org/pypi/descartes
+.. _matplotlib: http://matplotlib.org
+.. _geopy: https://github.com/geopy/geopy
+.. _six: https://pythonhosted.org/six
+.. _psycopg2: https://pypi.python.org/pypi/psycopg2
+.. _pysal: http://pysal.org
+.. _Travis CI: https://travis-ci.org/geopandas/geopandas
+
+.. toctree::
+   :maxdepth: 2
+
diff --git a/doc/source/user.rst b/doc/source/user.rst
new file mode 100644
index 0000000..e5c27e2
--- /dev/null
+++ b/doc/source/user.rst
@@ -0,0 +1,488 @@
+GeoPandas User Guide
+====================
+
+GeoPandas implements two main data structures, a ``GeoSeries`` and a
+``GeoDataFrame``.  These are subclasses of pandas ``Series`` and
+``DataFrame``, respectively.
+
+GeoSeries
+---------
+
+A ``GeoSeries`` contains a sequence of geometries.
+
+The ``GeoSeries`` class implements nearly all of the attributes and
+methods of Shapely objects.  When applied to a ``GeoSeries``, they
+will apply elementwise to all geometries in the series.  Binary
+operations can be applied between two ``GeoSeries``, in which case the
+operation is carried out elementwise.  The two series will be aligned
+by matching indices.  Binary operations can also be applied to a
+single geometry, in which case the operation is carried out for each
+element of the series with that geometry.  In either case, a
+``Series`` or a ``GeoSeries`` will be returned, as appropriate.
+
+The following Shapely methods and attributes are available on
+``GeoSeries`` objects:
+
+.. attribute:: GeoSeries.area
+
+  Returns a ``Series`` containing the area of each geometry in the ``GeoSeries``.
+
+.. attribute:: GeoSeries.bounds
+
+  Returns a ``DataFrame`` with columns ``minx``, ``miny``, ``maxx``,
+  ``maxy`` values containing the bounds for each geometry.
+  (see ``GeoSeries.total_bounds`` for the limits of the entire series).
+
+.. attribute:: GeoSeries.length
+
+  Returns a ``Series`` containing the length of each geometry.
+
+.. attribute:: GeoSeries.geom_type
+
+  Returns a ``Series`` of strings specifying the `Geometry Type` of
+  each object.
+
+.. method:: GeoSeries.distance(other)
+
+  Returns a ``Series`` containing the minimum distance to the `other`
+  ``GeoSeries`` (elementwise) or geometric object.
+
+.. method:: GeoSeries.representative_point()
+
+  Returns a ``GeoSeries`` of (cheaply computed) points that are
+  guaranteed to be within each geometry.
+
+.. attribute:: GeoSeries.exterior
+
+  Returns a ``GeoSeries`` of LinearRings representing the outer
+  boundary of each polygon in the GeoSeries.  (Applies to GeoSeries
+  containing only Polygons).
+
+.. attribute:: GeoSeries.interiors
+
+  Returns a ``GeoSeries`` of InteriorRingSequences representing the
+  inner rings of each polygon in the GeoSeries.  (Applies to GeoSeries
+  containing only Polygons).
+
+`Unary Predicates`
+
+.. attribute:: GeoSeries.is_empty
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` for
+  empty geometries.
+
+.. attribute:: GeoSeries.is_ring
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` for
+  features that are closed.
+
+.. attribute:: GeoSeries.is_simple
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` for
+  geometries that do not cross themselves (meaningful only for
+  `LineStrings` and `LinearRings`).
+
+.. attribute:: GeoSeries.is_valid
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` for
+  geometries that are valid.
+
+`Binary Predicates`
+
+.. method:: GeoSeries.almost_equals(other[, decimal=6])
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` if
+  each object is approximately equal to the `other` at all
+  points to specified `decimal` place precision.  (See also :meth:`equals`)
+
+.. method:: GeoSeries.contains(other)
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` if
+  each object's `interior` contains the `boundary` and
+  `interior` of the other object and their boundaries do not touch at all.
+
+.. method:: GeoSeries.crosses(other)
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` if
+  the `interior` of each object intersects the `interior` of
+  the other but does not contain it, and the dimension of the intersection is
+  less than the dimension of the one or the other.
+
+.. method:: GeoSeries.disjoint(other)
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` if
+  the `boundary` and `interior` of each object does not
+  intersect at all with those of the other.
+
+.. method:: GeoSeries.equals(other)
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` if
+  if the set-theoretic `boundary`, `interior`, and `exterior`
+  of each object coincides with those of the other.
+
+.. method:: GeoSeries.intersects(other)
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` if
+  if the `boundary` and `interior` of each object intersects in
+  any way with those of the other.
+
+.. method:: GeoSeries.touches(other)
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` if
+  the objects have at least one point in common and their
+  interiors do not intersect with any part of the other.
+
+.. method:: GeoSeries.within(other)
+
+  Returns a ``Series`` of ``dtype('bool')`` with value ``True`` if
+  each object's `boundary` and `interior` intersect only
+  with the `interior` of the other (not its `boundary` or `exterior`).
+  (Inverse of :meth:`contains`)
+
+`Set-theoretic Methods`
+
+.. attribute:: GeoSeries.boundary
+
+  Returns a ``GeoSeries`` of lower dimensional objects representing
+  each geometries's set-theoretic `boundary`.
+
+.. attribute:: GeoSeries.centroid
+
+  Returns a ``GeoSeries`` of points for each geometric centroid.
+
+.. method:: GeoSeries.difference(other)
+
+  Returns a ``GeoSeries`` of the points in each geometry that
+  are not in the *other* object.
+
+.. method:: GeoSeries.intersection(other)
+
+  Returns a ``GeoSeries`` of the intersection of each object with the `other`
+  geometric object.
+
+.. method:: GeoSeries.symmetric_difference(other)
+
+  Returns a ``GeoSeries`` of the points in each object not in the `other`
+  geometric object, and the points in the `other` not in this object.
+
+.. method:: GeoSeries.union(other)
+
+  Returns a ``GeoSeries`` of the union of points from each object and the
+  `other` geometric object.
+
+`Constructive Methods`
+
+.. method:: GeoSeries.buffer(distance, resolution=16)
+
+  Returns a ``GeoSeries`` of geometries representing all points within a given `distance`
+  of each geometric object.
+
+.. attribute:: GeoSeries.convex_hull
+
+  Returns a ``GeoSeries`` of geometries representing the smallest
+  convex `Polygon` containing all the points in each object unless the
+  number of points in the object is less than three. For two points,
+  the convex hull collapses to a `LineString`; for 1, a `Point`.
+
+.. attribute:: GeoSeries.envelope
+
+  Returns a ``GeoSeries`` of geometries representing the point or
+  smallest rectangular polygon (with sides parallel to the coordinate
+  axes) that contains each object.
+
+.. method:: GeoSeries.simplify(tolerance, preserve_topology=True)
+
+  Returns a ``GeoSeries`` containing a simplified representation of
+  each object.
+
+`Affine transformations`
+
+.. method:: GeoSeries.rotate(self, angle, origin='center', use_radians=False)
+
+  Rotate the coordinates of the GeoSeries.
+
+.. method:: GeoSeries.scale(self, xfact=1.0, yfact=1.0, zfact=1.0, origin='center')
+
+ Scale the geometries of the GeoSeries along each (x, y, z) dimensio.
+
+.. method:: GeoSeries.skew(self, angle, origin='center', use_radians=False)
+
+  Shear/Skew the geometries of the GeoSeries by angles along x and y dimensions.
+
+.. method:: GeoSeries.translate(self, angle, origin='center', use_radians=False)
+
+  Shift the coordinates of the GeoSeries.
+
+`Aggregating methods`
+
+.. attribute:: GeoSeries.unary_union
+
+  Return a geometry containing the union of all geometries in the ``GeoSeries``.
+
+Additionally, the following methods are implemented:
+
+.. method:: GeoSeries.from_file()
+
+  Load a ``GeoSeries`` from a file from any format recognized by
+  `fiona`_.
+
+.. method:: GeoSeries.to_crs(crs=None, epsg=None)
+
+  Transform all geometries in a GeoSeries to a different coordinate
+  reference system.  The ``crs`` attribute on the current GeoSeries
+  must be set.  Either ``crs`` in dictionary form or an EPSG code may
+  be specified for output.
+
+  This method will transform all points in all objects.  It has no
+  notion or projecting entire geometries.  All segments joining points
+  are assumed to be lines in the current projection, not geodesics.
+  Objects crossing the dateline (or other projection boundary) will
+  have undesirable behavior.
+
+.. method:: GeoSeries.plot(colormap='Set1', alpha=0.5, axes=None)
+
+  Generate a plot of the geometries in the ``GeoSeries``.
+  ``colormap`` can be any recognized by matplotlib, but discrete
+  colormaps such as ``Accent``, ``Dark2``, ``Paired``, ``Pastel1``,
+  ``Pastel2``, ``Set1``, ``Set2``, or ``Set3`` are recommended.
+  Wraps the ``plot_series()`` function.
+
+.. attribute:: GeoSeries.total_bounds
+
+  Returns a tuple containing ``minx``, ``miny``, ``maxx``,
+  ``maxy`` values for the bounds of the series as a whole.
+  See ``GeoSeries.bounds`` for the bounds of the geometries contained
+  in the series.
+
+
+Methods of pandas ``Series`` objects are also available, although not
+all are applicable to geometric objects and some may return a
+``Series`` rather than a ``GeoSeries`` result.  The methods
+``copy()``, ``align()``, ``isnull()`` and ``fillna()`` have been
+implemented specifically for ``GeoSeries`` and are expected to work
+correctly.
+
+GeoDataFrame
+------------
+
+A ``GeoDataFrame`` is a tablular data structure that contains a column
+called ``geometry`` which contains a `GeoSeries``.
+
+Currently, the following methods are implemented for a ``GeoDataFrame``:
+
+.. classmethod:: GeoDataFrame.from_file(filename, **kwargs)
+
+  Load a ``GeoDataFrame`` from a file from any format recognized by
+  `fiona`_.  See ``read_file()``.
+
+.. classmethod:: GeoDataFrame.from_postgis(sql, con, geom_col='geom', crs=None, index_col=None, coerce_float=True, params=None)
+
+  Load a ``GeoDataFrame`` from a file from a PostGIS database.
+  See ``read_postgis()``.
+
+.. method:: GeoSeries.to_crs(crs=None, epsg=None, inplace=False)
+
+  Transform all geometries in the ``geometry`` column of a
+  GeoDataFrame to a different coordinate reference system.  The
+  ``crs`` attribute on the current GeoSeries must be set.  Either
+  ``crs`` in dictionary form or an EPSG code may be specified for
+  output.  If ``inplace=True`` the geometry column will be replaced in
+  the current dataframe, otherwise a new GeoDataFrame will be returned.
+
+  This method will transform all points in all objects.  It has no
+  notion or projecting entire geometries.  All segments joining points
+  are assumed to be lines in the current projection, not geodesics.
+  Objects crossing the dateline (or other projection boundary) will
+  have undesirable behavior.
+
+.. method:: GeoSeries.to_file(filename, driver="ESRI Shapefile", **kwargs)
+
+  Write the ``GeoDataFrame`` to a file.  By default, an ESRI shapefile
+  is written, but any OGR data source supported by Fiona can be
+  written.  ``**kwargs`` are passed to the Fiona driver.
+
+.. method:: GeoSeries.to_json(**kwargs)
+
+  Returns a GeoJSON representation of the ``GeoDataFrame`` as a string.
+
+.. method:: GeoDataFrame.plot(column=None, colormap=None, alpha=0.5, categorical=False, legend=False, axes=None)
+
+  Generate a plot of the geometries in the ``GeoDataFrame``.  If the
+  ``column`` parameter is given, colors plot according to values in
+  that column, otherwise calls ``GeoSeries.plot()`` on the
+  ``geometry`` column.  Wraps the ``plot_dataframe()`` function.
+
+All pandas ``DataFrame`` methods are also available, although they may
+not operate in a meaningful way on the ``geometry`` column and may not
+return a ``GeoDataFrame`` result even when it would be appropriate to
+do so.
+
+Geopandas functions
+-------------------
+
+.. function:: geopandas.geocode.geocode(strings, provider='googlev3', **kwargs)
+
+  Geocode a list of strings and return a GeoDataFrame containing the
+  resulting points in its ``geometry`` column.  Available
+  ``provider``s include ``googlev3``, ``bing``, ``google``, ``yahoo``,
+  ``mapquest``, and ``openmapquest``.  ``**kwargs`` will be passed as
+  parameters to the appropriate geocoder.
+
+  Requires `geopy`_.  Please consult the Terms of Service for the
+  chosen provider.
+
+Examples
+--------
+
+.. sourcecode:: python
+
+    >>> p1 = Polygon([(0, 0), (1, 0), (1, 1)])
+    >>> p2 = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
+    >>> p3 = Polygon([(2, 0), (3, 0), (3, 1), (2, 1)])
+    >>> g = GeoSeries([p1, p2, p3])
+    >>> g
+    0    POLYGON ((0.0000000000000000 0.000000000000000...
+    1    POLYGON ((0.0000000000000000 0.000000000000000...
+    2    POLYGON ((2.0000000000000000 0.000000000000000...
+    dtype: object
+
+.. image:: _static/test.png
+
+Some geographic operations return normal pandas object.  The ``area`` property of a ``GeoSeries`` will return a ``pandas.Series`` containing the area of each item in the ``GeoSeries``:
+
+.. sourcecode:: python
+
+    >>> print g.area
+    0    0.5
+    1    1.0
+    2    1.0
+    dtype: float64
+
+Other operations return GeoPandas objects:
+
+.. sourcecode:: python
+
+    >>> g.buffer(0.5)
+    Out[15]:
+    0    POLYGON ((-0.3535533905932737 0.35355339059327...
+    1    POLYGON ((-0.5000000000000000 0.00000000000000...
+    2    POLYGON ((1.5000000000000000 0.000000000000000...
+    dtype: object
+
+.. image:: _static/test_buffer.png
+
+GeoPandas objects also know how to plot themselves.  GeoPandas uses `descartes`_ to generate a `matplotlib`_ plot. To generate a plot of our GeoSeries, use:
+
+.. sourcecode:: python
+
+    >>> g.plot()
+
+GeoPandas also implements alternate constructors that can read any data format recognized by `fiona`_.  To read a `file containing the boroughs of New York City`_:
+
+.. sourcecode:: python
+
+    >>> boros = GeoDataFrame.from_file('nybb.shp')
+    >>> boros.set_index('BoroCode', inplace=True)
+    >>> boros.sort()
+    >>> boros
+                   BoroName    Shape_Area     Shape_Leng  \
+    BoroCode
+    1             Manhattan  6.364422e+08  358532.956418
+    2                 Bronx  1.186804e+09  464517.890553
+    3              Brooklyn  1.959432e+09  726568.946340
+    4                Queens  3.049947e+09  861038.479299
+    5         Staten Island  1.623853e+09  330385.036974
+    
+                                                       geometry
+    BoroCode
+    1         (POLYGON ((981219.0557861328125000 188655.3157...
+    2         (POLYGON ((1012821.8057861328125000 229228.264...
+    3         (POLYGON ((1021176.4790039062500000 151374.796...
+    4         (POLYGON ((1029606.0765991210937500 156073.814...
+    5         (POLYGON ((970217.0223999023437500 145643.3322...
+
+.. image:: _static/nyc.png
+ 
+.. sourcecode:: python
+
+    >>> boros['geometry'].convex_hull
+    0    POLYGON ((915517.6877458114176989 120121.88125...
+    1    POLYGON ((1000721.5317993164062500 136681.7761...
+    2    POLYGON ((988872.8212280273437500 146772.03179...
+    3    POLYGON ((977855.4451904296875000 188082.32238...
+    4    POLYGON ((1017949.9776000976562500 225426.8845...
+    dtype: object
+
+.. image:: _static/nyc_hull.png
+
+To demonstrate a more complex operation, we'll generate a
+``GeoSeries`` containing 2000 random points:
+
+.. sourcecode:: python
+
+    >>> from shapely.geometry import Point
+    >>> xmin, xmax, ymin, ymax = 900000, 1080000, 120000, 280000
+    >>> xc = (xmax - xmin) * np.random.random(2000) + xmin
+    >>> yc = (ymax - ymin) * np.random.random(2000) + ymin
+    >>> pts = GeoSeries([Point(x, y) for x, y in zip(xc, yc)])
+
+Now draw a circle with fixed radius around each point:
+
+.. sourcecode:: python
+
+    >>> circles = pts.buffer(2000)
+
+We can collapse these circles into a single shapely MultiPolygon
+geometry with
+
+.. sourcecode:: python
+
+    >>> mp = circles.unary_union
+
+To extract the part of this geometry contained in each borough, we can
+just use:
+
+.. sourcecode:: python
+
+    >>> holes = boros['geometry'].intersection(mp)
+
+.. image:: _static/holes.png
+ 
+and to get the area outside of the holes:
+
+.. sourcecode:: python
+
+    >>> boros_with_holes = boros['geometry'].difference(mp)
+
+.. image:: _static/boros_with_holes.png
+ 
+Note that this can be simplified a bit, since ``geometry`` is
+available as an attribute on a ``GeoDataFrame``, and the
+``intersection`` and ``difference`` methods are implemented with the
+"&" and "-" operators, respectively.  For example, the latter could
+have been expressed simply as ``boros.geometry - mp``.
+
+It's easy to do things like calculate the fractional area in each
+borough that are in the holes:
+
+.. sourcecode:: python
+
+    >>> holes.area / boros.geometry.area
+    BoroCode
+    1           0.602015
+    2           0.523457
+    3           0.585901
+    4           0.577020
+    5           0.559507
+    dtype: float64
+
+.. _Descartes: https://pypi.python.org/pypi/descartes
+.. _matplotlib: http://matplotlib.org
+.. _fiona: http://toblerity.github.io/fiona
+.. _geopy: https://github.com/geopy/geopy
+.. _file containing the boroughs of New York City: http://www.nyc.gov/html/dcp/download/bytes/nybb_14aav.zip
+
+.. toctree::
+   :maxdepth: 2
+
+
diff --git a/examples/choropleths.ipynb b/examples/choropleths.ipynb
new file mode 100644
index 0000000..524c905
--- /dev/null
+++ b/examples/choropleths.ipynb
@@ -0,0 +1,270 @@
+{
+ "metadata": {
+  "name": ""
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Protoyping choropleth classification schemes from PySAL for use with GeoPandas\n",
+      "\n",
+      "\n",
+      "Under the hood, if PySAL is not available or if an unsupported classification scheme is specified, the choropleth classification would fall back to GeoPandas defaults."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import geopandas as gp"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 9
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# we use PySAL for loading a test shapefile\n",
+      "# replace this cell if you have a local shapefile and want to use GeoPandas readers\n",
+      "import pysal as ps \n",
+      "pth = ps.examples.get_path(\"columbus.shp\")\n",
+      "tracts = gp.GeoDataFrame.from_file(pth)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 10
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "tracts.plot(column='CRIME', scheme='QUANTILES', k=3, colormap='OrRd')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 11,
+       "text": [
+        "<matplotlib.axes.AxesSubplot at 0x10cedbed0>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAXsAAAD4CAYAAAANbUbJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XlclNX+wPHPrMzAsG+yCQoq7qm4ZSaZ+5ZpmVpaYmbd\ntDSz9VdpN9Oy3XZzazNbryZGLoWappi74IIKyiIKsg7DMOvvD5RUthmYEZfzfl0uzDPPWYbwO8+c\n55zvkVitViuCIAjCDU3a2B0QBEEQnE8Ee0EQhJuACPaCIAg3ARHsBUEQbgIi2AuCINwERLAXBEG4\nCdQa7OPi4ggMDKR9+/aVx+bMmUNoaCidOnWiU6dOJCQkVFs2ISGB6OhoWrRowRtvvOHYXguCIAh2\nkdQ2z37r1q1oNBomTpzIwYMHAZg7dy7u7u489dRTNVZqNptp1aoVGzduJCQkhK5du7Jy5Upat27t\n+FcgCIIg1 [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10d8ae3d0>"
+       ]
+      }
+     ],
+     "prompt_number": 11
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "tracts.plot(column='CRIME', scheme='Unrecognized', k=3, colormap='OrRd')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Unrecognized scheme:  Unrecognized\n",
+        "Using Quantiles instead\n"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 12,
+       "text": [
+        "<matplotlib.axes.AxesSubplot at 0x10f177290>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAXsAAAD4CAYAAAANbUbJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XlclNX+wPHPrMzAsG+yCQoq7qm4ZSaZ+5ZpmVpaYmbd\ntDSz9VdpN9Oy3XZzazNbryZGLoWappi74IIKyiIKsg7DMOvvD5RUthmYEZfzfl0uzDPPWYbwO8+c\n55zvkVitViuCIAjCDU3a2B0QBEEQnE8Ee0EQhJuACPaCIAg3ARHsBUEQbgIi2AuCINwERLAXBEG4\nCdQa7OPi4ggMDKR9+/aVx+bMmUNoaCidOnWiU6dOJCQkVFs2ISGB6OhoWrRowRtvvOHYXguCIAh2\nkdQ2z37r1q1oNBomTpzIwYMHAZg7dy7u7u489dRTNVZqNptp1aoVGzduJCQkhK5du7Jy5Upat27t\n+FcgCIIg1 [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10d41ae50>"
+       ]
+      }
+     ],
+     "prompt_number": 12
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 12
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "tracts.plot(column='CRIME', scheme='QUANTILES', k=1, colormap='OrRd')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Invalid k:  1\n",
+        "2<=k<=9, setting k=5 (default)\n"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 13,
+       "text": [
+        "<matplotlib.axes.AxesSubplot at 0x10f4bca90>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAXsAAAD4CAYAAAANbUbJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XlclNX+wPHPrMwMw77JJiiguG+omVFkLrlmVpZWlpjZ\n/aWmma23Um+m7Yvtm1am2XK7mhhpJmqWkjuKCyooiCjIzjD7/P5ASWSbgRlxOe9XvGCeec4yhN95\n5jznfI/EZrPZEARBEK5q0pbugCAIguB6ItgLgiBcA0SwFwRBuAaIYC8IgnANEMFeEAThGiCCvSAI\nwjWgwWCfmJhIUFAQXbp0qT42Z84cwsLC6NGjBz169CA5ObnOssnJycTGxhITE8Mrr7zi3F4LgiAI\nDpE0NM9+8+bNaLVaJkyYQFpaGgBz587Fw8ODxx9/vN5KLRYL7du357fffiM0NJTevXuzfPlyOnTo\n4PxXIAiCI [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10f4cb9d0>"
+       ]
+      }
+     ],
+     "prompt_number": 13
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "tracts.plot(column='CRIME', scheme='fisher_jenks', k=8, colormap='OrRd')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 14,
+       "text": [
+        "<matplotlib.axes.AxesSubplot at 0x10fb36250>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAXsAAAD4CAYAAAANbUbJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xd8VMXawPHf1mw2m95IJSSU0HsTo1Epgog0EVBRQFCv\nIIiiKFcBXxEsKAgWRAUURSxXQYKRoqEpRqpAEEIJpBBIb5vt+/4RiIS03WSXQJjv5+aSPedM2QhP\nzs6ZeUZitVqtCIIgCI2atKE7IAiCIDifCPaCIAg3ARHsBUEQbgIi2AuCINwERLAXBEG4CYhgLwiC\ncBOoMdhPmDCBwMBA2rdvX35s7ty5hIaG0rlzZzp37kx8fHyVZePj44mOjqZFixa88cYbju21IAiC\nYBdJTfPsd+7ciUajYdy4cRw+fBiAefPm4e7uzowZM6qt1Gw206pVK7Zu3UpISAjdu3dn7dq1tG7d\n2vHvQBAEQ [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10f19a050>"
+       ]
+      }
+     ],
+     "prompt_number": 14
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "tracts.plot(column='CRIME', scheme='equal_interval', k=7, colormap='OrRd')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 15,
+       "text": [
+        "<matplotlib.axes.AxesSubplot at 0x10fd88790>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAXsAAAD4CAYAAAANbUbJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xdc1dX/wPHXndx72VuWooDiJBU1M4rKkWmmDVNLS8ys\nX1Zq2R5amdou20vLymx+NTFyFDhKMSeCAxWUIcpel7vv7w+URBDuhXvFcZ6Pr1+4n8/njIv2vh/O\n55z3kVitViuCIAjCJU3a1h0QBEEQnE8Ee0EQhMuACPaCIAiXARHsBUEQLgMi2AuCIFwGRLAXBEG4\nDDQZ7BMSEggMDKRnz551x+bMmUNoaCi9e/emd+/eJCUlNVo2KSmJ6OhooqKiWLhwoWN7LQiCINhF\n0tQ8+40bN+Lm5sakSZNIS0sDYO7cubi7uzNr1qxzVmo2m+nSpQvr1q0jJCSEfv36sWzZMrp27er4\ndyAIgiA0q [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10fd92f50>"
+       ]
+      }
+     ],
+     "prompt_number": 15
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "tracts.plot(column='CRIME', scheme='equal_interval', k=12, colormap='OrRd')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Invalid k:  12\n",
+        "2<=k<=9, setting k=5 (default)\n"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 16,
+       "text": [
+        "<matplotlib.axes.AxesSubplot at 0x10fd9bc90>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAXsAAAD4CAYAAAANbUbJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xdc1PUfwPHXTe6OvWUJCijuVFwpReZOzYamNtXMhppl\n2i7tl2mZv4bZTi0rs/ErTYwciZmWlBPBgQrKEGWv47j5+wMlkXUHd+L4PB/xgPve9zOO8H3f+3w/\nn/dHYrFYLAiCIAhXNWlLd0AQBEFwPBHsBUEQrgEi2AuCIFwDRLAXBEG4BohgLwiCcA0QwV4QBOEa\n0GCwnzx5Mv7+/nTp0qX62Lx58wgODqZ79+50796d+Pj4OsvGx8cTFRVFZGQkr7/+un17LQiCINhE\n0tA8++3bt+Pi4sJ9991HUlISAPPnz8fV1ZUnn3yy3kpNJhPt27dn8+bNBAUF0atXL1avXk2HDh3s\n/woEQRCER [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10fb62310>"
+       ]
+      }
+     ],
+     "prompt_number": 16
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Notes\n",
+      "\n",
+      "This is only using a subset of the classifiers in PySAL. specifically those that take only an attribute and a value of k as an argument. This simplifies the number of new default parameters that would be required in GeoPandas.DataFrame.plot().\n",
+      "\n",
+      "It is of course possible to add other classifiers with the cost of additional kw args."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file
diff --git a/examples/nyc.png b/examples/nyc.png
new file mode 100644
index 0000000..bb2a450
Binary files /dev/null and b/examples/nyc.png differ
diff --git a/examples/nyc_boros.py b/examples/nyc_boros.py
new file mode 100644
index 0000000..6db6ec5
--- /dev/null
+++ b/examples/nyc_boros.py
@@ -0,0 +1,48 @@
+"""
+Generate example images for GeoPandas documentation.
+
+TODO: autogenerate these from docs themselves
+
+Kelsey Jordahl
+Time-stamp: <Tue May  6 12:17:29 EDT 2014>
+"""
+import numpy as np
+import matplotlib.pyplot as plt
+from shapely.geometry import Point
+from geopandas import GeoSeries, GeoDataFrame
+
+np.random.seed(1)
+DPI = 100
+
+# http://www.nyc.gov/html/dcp/download/bytes/nybb_14aav.zip
+boros = GeoDataFrame.from_file('nybb.shp')
+boros.set_index('BoroCode', inplace=True)
+boros.sort()
+boros.plot()
+plt.xticks(rotation=90)
+plt.savefig('nyc.png', dpi=DPI, bbox_inches='tight')
+#plt.show()
+boros.geometry.convex_hull.plot()
+plt.xticks(rotation=90)
+plt.savefig('nyc_hull.png', dpi=DPI, bbox_inches='tight')
+#plt.show()
+
+N = 2000  # number of random points
+R = 2000  # radius of buffer in feet
+xmin, xmax = plt.gca().get_xlim()
+ymin, ymax = plt.gca().get_ylim()
+#xmin, xmax, ymin, ymax = 900000, 1080000, 120000, 280000
+xc = (xmax - xmin) * np.random.random(N) + xmin
+yc = (ymax - ymin) * np.random.random(N) + ymin
+pts = GeoSeries([Point(x, y) for x, y in zip(xc, yc)])
+mp = pts.buffer(R).unary_union
+boros_with_holes = boros.geometry - mp
+boros_with_holes.plot()
+plt.xticks(rotation=90)
+plt.savefig('boros_with_holes.png', dpi=DPI, bbox_inches='tight')
+plt.show()
+holes = boros.geometry & mp
+holes.plot()
+plt.xticks(rotation=90)
+plt.savefig('holes.png', dpi=DPI, bbox_inches='tight')
+plt.show()
diff --git a/examples/nyc_hull.png b/examples/nyc_hull.png
new file mode 100644
index 0000000..2f81594
Binary files /dev/null and b/examples/nyc_hull.png differ
diff --git a/examples/test.png b/examples/test.png
new file mode 100644
index 0000000..374737f
Binary files /dev/null and b/examples/test.png differ
diff --git a/examples/test_buffer.png b/examples/test_buffer.png
new file mode 100644
index 0000000..0e44af7
Binary files /dev/null and b/examples/test_buffer.png differ
diff --git a/geopandas/__init__.py b/geopandas/__init__.py
new file mode 100644
index 0000000..4c0d5f8
--- /dev/null
+++ b/geopandas/__init__.py
@@ -0,0 +1,16 @@
+try:
+    from geopandas.version import version as __version__
+except ImportError:
+    __version__ = '0.1.1'
+
+from geopandas.geoseries import GeoSeries
+from geopandas.geodataframe import GeoDataFrame
+
+from geopandas.io.file import read_file
+from geopandas.io.sql import read_postgis
+
+# make the interactive namespace easier to use
+# for `from geopandas import *` demos.
+import geopandas as gpd
+import pandas as pd
+import numpy as np
diff --git a/geopandas/base.py b/geopandas/base.py
new file mode 100644
index 0000000..cc27e3e
--- /dev/null
+++ b/geopandas/base.py
@@ -0,0 +1,400 @@
+from warnings import warn
+
+from shapely.geometry import MultiPoint, MultiLineString, MultiPolygon
+from shapely.geometry.base import BaseGeometry
+from shapely.ops import cascaded_union, unary_union
+import shapely.affinity as affinity
+
+import numpy as np
+from pandas import Series, DataFrame
+
+import geopandas as gpd
+
+
+def _geo_op(this, other, op):
+    """Operation that returns a GeoSeries"""
+    if isinstance(other, GeoPandasBase):
+        this = this.geometry
+        crs = this.crs
+        if crs != other.crs:
+            warn('GeoSeries crs mismatch: {0} and {1}'.format(this.crs,
+                                                              other.crs))
+        this, other = this.align(other.geometry)
+        return gpd.GeoSeries([getattr(this_elem, op)(other_elem)
+                             for this_elem, other_elem in zip(this, other)],
+                             index=this.index, crs=crs)
+    else:
+        return gpd.GeoSeries([getattr(s, op)(other)
+                             for s in this.geometry],
+                             index=this.index, crs=this.crs)
+
+
+# TODO: think about merging with _geo_op
+def _series_op(this, other, op, **kwargs):
+    """Geometric operation that returns a pandas Series"""
+    if isinstance(other, GeoPandasBase):
+        this = this.geometry
+        this, other = this.align(other.geometry)
+        return Series([getattr(this_elem, op)(other_elem, **kwargs)
+                      for this_elem, other_elem in zip(this, other)],
+                      index=this.index)
+    else:
+        return Series([getattr(s, op)(other, **kwargs)
+                      for s in this.geometry], index=this.index)
+
+def _geo_unary_op(this, op):
+    """Unary operation that returns a GeoSeries"""
+    return gpd.GeoSeries([getattr(geom, op) for geom in this.geometry],
+                     index=this.index, crs=this.crs)
+
+def _series_unary_op(this, op):
+    """Unary operation that returns a Series"""
+    return Series([getattr(geom, op) for geom in this.geometry],
+                     index=this.index)
+
+
+class GeoPandasBase(object):
+    @property
+    def area(self):
+        """Return the area of each geometry in the GeoSeries"""
+        return _series_unary_op(self, 'area')
+
+    @property
+    def geom_type(self):
+        """Return the geometry type of each geometry in the GeoSeries"""
+        return _series_unary_op(self, 'geom_type')
+
+    @property
+    def type(self):
+        """Return the geometry type of each geometry in the GeoSeries"""
+        return self.geom_type
+
+    @property
+    def length(self):
+        """Return the length of each geometry in the GeoSeries"""
+        return _series_unary_op(self, 'length')
+
+    @property
+    def is_valid(self):
+        """Return True for each valid geometry, else False"""
+        return _series_unary_op(self, 'is_valid')
+
+    @property
+    def is_empty(self):
+        """Return True for each empty geometry, False for non-empty"""
+        return _series_unary_op(self, 'is_empty')
+
+    @property
+    def is_simple(self):
+        """Return True for each simple geometry, else False"""
+        return _series_unary_op(self, 'is_simple')
+
+    @property
+    def is_ring(self):
+        """Return True for each geometry that is a closed ring, else False"""
+        # operates on the exterior, so can't use _series_unary_op()
+        return Series([geom.exterior.is_ring for geom in self.geometry],
+                      index=self.index)
+
+    #
+    # Unary operations that return a GeoSeries
+    #
+
+    @property
+    def boundary(self):
+        """Return the bounding geometry for each geometry"""
+        return _geo_unary_op(self, 'boundary')
+
+    @property
+    def centroid(self):
+        """Return the centroid of each geometry in the GeoSeries"""
+        return _geo_unary_op(self, 'centroid')
+
+    @property
+    def convex_hull(self):
+        """Return the convex hull of each geometry"""
+        return _geo_unary_op(self, 'convex_hull')
+
+    @property
+    def envelope(self):
+        """Return a bounding rectangle for each geometry"""
+        return _geo_unary_op(self, 'envelope')
+
+    @property
+    def exterior(self):
+        """Return the outer boundary of each polygon"""
+        # TODO: return empty geometry for non-polygons
+        return _geo_unary_op(self, 'exterior')
+
+    @property
+    def interiors(self):
+        """Return the interior rings of each polygon"""
+        # TODO: return empty list or None for non-polygons
+        return _geo_unary_op(self, 'interiors')
+
+    def representative_point(self):
+        """Return a GeoSeries of points guaranteed to be in each geometry"""
+        return gpd.GeoSeries([geom.representative_point()
+                             for geom in self.geometry],
+                         index=self.index)
+
+    #
+    # Reduction operations that return a Shapely geometry
+    #
+
+    @property
+    def cascaded_union(self):
+        """Deprecated: Return the unary_union of all geometries"""
+        return cascaded_union(self.values)
+
+    @property
+    def unary_union(self):
+        """Return the union of all geometries"""
+        return unary_union(self.values)
+
+    #
+    # Binary operations that return a pandas Series
+    #
+
+    def contains(self, other):
+        """Return True for all geometries that contain *other*, else False"""
+        return _series_op(self, other, 'contains')
+
+    def geom_equals(self, other):
+        """Return True for all geometries that equal *other*, else False"""
+        return _series_op(self, other, 'equals')
+
+    def geom_almost_equals(self, other, decimal=6):
+        """Return True for all geometries that is approximately equal to *other*, else False"""
+        # TODO: pass precision argument
+        return _series_op(self, other, 'almost_equals', decimal=decimal)
+
+    def geom_equals_exact(self, other, tolerance):
+        """Return True for all geometries that equal *other* to a given tolerance, else False"""
+        # TODO: pass tolerance argument.
+        return _series_op(self, other, 'equals_exact', tolerance=tolerance)
+
+    def crosses(self, other):
+        """Return True for all geometries that cross *other*, else False"""
+        return _series_op(self, other, 'crosses')
+
+    def disjoint(self, other):
+        """Return True for all geometries that are disjoint with *other*, else False"""
+        return _series_op(self, other, 'disjoint')
+
+    def intersects(self, other):
+        """Return True for all geometries that intersect *other*, else False"""
+        return _series_op(self, other, 'intersects')
+
+    def overlaps(self, other):
+        """Return True for all geometries that overlap *other*, else False"""
+        return _series_op(self, other, 'overlaps')
+
+    def touches(self, other):
+        """Return True for all geometries that touch *other*, else False"""
+        return _series_op(self, other, 'touches')
+
+    def within(self, other):
+        """Return True for all geometries that are within *other*, else False"""
+        return _series_op(self, other, 'within')
+
+    def distance(self, other):
+        """Return distance of each geometry to *other*"""
+        return _series_op(self, other, 'distance')
+
+    #
+    # Binary operations that return a GeoSeries
+    #
+
+    def difference(self, other):
+        """Return the set-theoretic difference of each geometry with *other*"""
+        return _geo_op(self, other, 'difference')
+
+    def symmetric_difference(self, other):
+        """Return the symmetric difference of each geometry with *other*"""
+        return _geo_op(self, other, 'symmetric_difference')
+
+    def union(self, other):
+        """Return the set-theoretic union of each geometry with *other*"""
+        return _geo_op(self, other, 'union')
+
+    def intersection(self, other):
+        """Return the set-theoretic intersection of each geometry with *other*"""
+        return _geo_op(self, other, 'intersection')
+
+    #
+    # Other operations
+    #
+
+    @property
+    def bounds(self):
+        """Return a DataFrame of minx, miny, maxx, maxy values of geometry objects"""
+        bounds = np.array([geom.bounds for geom in self.geometry])
+        return DataFrame(bounds,
+                         columns=['minx', 'miny', 'maxx', 'maxy'],
+                         index=self.index)
+                         
+    @property
+    def total_bounds(self):
+        """Return a single bounding box (minx, miny, maxx, maxy) for all geometries
+
+        This is a shortcut for calculating the min/max x and y bounds individually.
+        """
+
+        b = self.bounds
+        return (b['minx'].min(),
+                b['miny'].min(),
+                b['maxx'].max(),
+                b['maxy'].max())
+
+    def buffer(self, distance, resolution=16):
+        return gpd.GeoSeries([geom.buffer(distance, resolution) 
+                             for geom in self.geometry],
+                         index=self.index, crs=self.crs)
+
+    def simplify(self, *args, **kwargs):
+        return gpd.GeoSeries([geom.simplify(*args, **kwargs)
+                             for geom in self.geometry],
+                      index=self.index, crs=self.crs)
+
+    def relate(self, other):
+        raise NotImplementedError
+
+    def project(self, other, normalized=False):
+        """
+        Return the distance along each geometry nearest to *other*
+        
+        Parameters
+        ----------
+        other : BaseGeometry or GeoSeries
+            The *other* geometry to computed projected point from.
+        normalized : boolean
+            If normalized is True, return the distance normalized to
+            the length of the object.
+        
+        The project method is the inverse of interpolate.
+        """
+        
+        return _series_op(self, other, 'project', normalized=normalized)
+
+    def interpolate(self, distance, normalized=False):
+        """
+        Return a point at the specified distance along each geometry
+        
+        Parameters
+        ----------
+        distance : float or Series of floats
+            Distance(s) along the geometries at which a point should be returned
+        normalized : boolean
+            If normalized is True, distance will be interpreted as a fraction 
+            of the geometric object's length.
+        """
+        
+        return gpd.GeoSeries([s.interpolate(distance, normalized) 
+                             for s in self.geometry],
+            index=self.index, crs=self.crs)
+        
+    def translate(self, xoff=0.0, yoff=0.0, zoff=0.0):
+        """
+        Shift the coordinates of the GeoSeries.
+
+        Parameters
+        ----------
+        xoff, yoff, zoff : float, float, float
+            Amount of offset along each dimension.
+            xoff, yoff, and zoff for translation along the x, y, and z 
+            dimensions respectively.
+
+        See shapely manual for more information:
+        http://toblerity.org/shapely/manual.html#affine-transformations
+        """
+
+        return gpd.GeoSeries([affinity.translate(s, xoff, yoff, zoff) 
+                             for s in self.geometry], 
+            index=self.index, crs=self.crs)
+
+    def rotate(self, angle, origin='center', use_radians=False):
+        """
+        Rotate the coordinates of the GeoSeries.
+        
+        Parameters
+        ----------
+        angle : float
+            The angle of rotation can be specified in either degrees (default) 
+            or radians by setting use_radians=True. Positive angles are 
+            counter-clockwise and negative are clockwise rotations.
+        origin : string, Point, or tuple (x, y)
+            The point of origin can be a keyword 'center' for the bounding box 
+            center (default), 'centroid' for the geometry's centroid, a Point 
+            object or a coordinate tuple (x, y).
+        use_radians : boolean
+            Whether to interpret the angle of rotation as degrees or radians
+            
+        See shapely manual for more information:
+        http://toblerity.org/shapely/manual.html#affine-transformations
+        """
+
+        return gpd.GeoSeries([affinity.rotate(s, angle, origin=origin, 
+            use_radians=use_radians) for s in self.geometry],
+            index=self.index, crs=self.crs)
+
+    def scale(self, xfact=1.0, yfact=1.0, zfact=1.0, origin='center'):
+        """
+        Scale the geometries of the GeoSeries along each (x, y, z) dimension.
+
+        Parameters
+        ----------
+        xfact, yfact, zfact : float, float, float
+            Scaling factors for the x, y, and z dimensions respectively.
+        origin : string, Point, or tuple
+            The point of origin can be a keyword 'center' for the 2D bounding 
+            box center (default), 'centroid' for the geometry's 2D centroid, a 
+            Point object or a coordinate tuple (x, y, z).
+
+        Note: Negative scale factors will mirror or reflect coordinates.
+
+        See shapely manual for more information:
+        http://toblerity.org/shapely/manual.html#affine-transformations
+        """
+
+        return gpd.GeoSeries([affinity.scale(s, xfact, yfact, zfact, 
+            origin=origin) for s in self.geometry], index=self.index, 
+            crs=self.crs)
+                           
+    def skew(self, xs=0.0, ys=0.0, origin='center', use_radians=False):
+        """
+        Shear/Skew the geometries of the GeoSeries by angles along x and y dimensions.
+        
+        Parameters
+        ----------
+        xs, ys : float, float
+            The shear angle(s) for the x and y axes respectively. These can be 
+            specified in either degrees (default) or radians by setting 
+            use_radians=True.
+        origin : string, Point, or tuple (x, y)
+            The point of origin can be a keyword 'center' for the bounding box 
+            center (default), 'centroid' for the geometry's centroid, a Point 
+            object or a coordinate tuple (x, y).
+        use_radians : boolean
+            Whether to interpret the shear angle(s) as degrees or radians
+            
+        See shapely manual for more information:
+        http://toblerity.org/shapely/manual.html#affine-transformations
+        """
+        
+        return gpd.GeoSeries([affinity.skew(s, xs, ys, origin=origin, 
+            use_radians=use_radians) for s in self.geometry],
+            index=self.index, crs=self.crs)
+
+
+def _array_input(arr):
+    if isinstance(arr, (MultiPoint, MultiLineString, MultiPolygon)):
+        # Prevent against improper length detection when input is a
+        # Multi*
+        geom = arr
+        arr = np.empty(1, dtype=object)
+        arr[0] = geom
+
+    return arr
+
+
diff --git a/geopandas/geocode.py b/geopandas/geocode.py
new file mode 100644
index 0000000..c154f3c
--- /dev/null
+++ b/geopandas/geocode.py
@@ -0,0 +1,122 @@
+from collections import defaultdict
+import time
+
+from fiona.crs import from_epsg
+import numpy as np
+import pandas as pd
+from shapely.geometry import Point
+from six import iteritems
+
+import geopandas as gpd
+
+
+def _throttle_time(provider):
+    """ Amount of time to wait between requests to a geocoding API.
+
+    Currently implemented for Nominatim, as their terms of service
+    require a maximum of 1 request per second.
+    https://wiki.openstreetmap.org/wiki/Nominatim_usage_policy
+    """
+    if provider == 'nominatim':
+        return 1
+    else:
+        return 0
+
+
+def geocode(strings, provider='googlev3', **kwargs):
+    """
+    Geocode a set of strings and get a GeoDataFrame of the resulting points.
+
+    Parameters
+    ----------
+    strings : list or Series of addresses to geocode
+    provider : geopy geocoder to use, default 'googlev3'
+        Some providers require additional arguments such as access keys
+        See each geocoder's specific parameters in geopy.geocoders
+        * googlev3, default
+        * bing
+        * google
+        * yahoo
+        * mapquest
+        * openmapquest
+    
+    Ensure proper use of the results by consulting the Terms of Service for
+    your provider.
+
+    Geocoding requires geopy. Install it using 'pip install geopy'. See also
+    https://github.com/geopy/geopy
+
+    Example
+    -------
+    >>> df = geocode(['boston, ma', '1600 pennsylvania ave. washington, dc'])
+    address                                               geometry
+    0                                    Boston, MA, USA  POINT (-71.0597731999999951 42.3584308000000007)
+    1  1600 Pennsylvania Avenue Northwest, President'...  POINT (-77.0365122999999983 38.8978377999999978)
+
+    """
+    import geopy
+    from geopy.geocoders.base import GeocoderQueryError
+
+    if not isinstance(strings, pd.Series):
+        strings = pd.Series(strings)
+
+    # workaround changed name in 0.96
+    try:
+        Yahoo = geopy.geocoders.YahooPlaceFinder
+    except AttributeError:
+        Yahoo = geopy.geocoders.Yahoo
+
+    coders = {'googlev3': geopy.geocoders.GoogleV3,
+              'bing': geopy.geocoders.Bing,
+              'yahoo': Yahoo,
+              'mapquest': geopy.geocoders.MapQuest,
+              'openmapquest': geopy.geocoders.OpenMapQuest,
+              'nominatim' : geopy.geocoders.Nominatim}
+
+    if provider not in coders:
+        raise ValueError('Unknown geocoding provider: {0}'.format(provider))
+
+    coder = coders[provider](**kwargs)
+    results = {}
+    for i, s in iteritems(strings):
+        try:
+            results[i] = coder.geocode(s)
+        except (GeocoderQueryError, ValueError):
+            results[i] = (None, None)
+        time.sleep(_throttle_time(provider))
+
+    df = _prepare_geocode_result(results)
+    return df
+
+def _prepare_geocode_result(results):
+    """
+    Helper function for the geocode function
+
+    Takes a dict where keys are index entries, values are tuples containing:
+    (address, (lat, lon))
+
+    """
+    # Prepare the data for the DataFrame as a dict of lists
+    d = defaultdict(list)
+    index = []
+
+    for i, s in iteritems(results):
+        address, loc = s
+
+        # loc is lat, lon and we want lon, lat
+        if loc is None:
+            p = Point()
+        else:
+            p = Point(loc[1], loc[0])
+
+        if address is None:
+            address = np.nan
+
+        d['geometry'].append(p)
+        d['address'].append(address)
+        index.append(i)
+
+    df = gpd.GeoDataFrame(d, index=index)
+    df.crs = from_epsg(4326)
+
+    return df
diff --git a/geopandas/geodataframe.py b/geopandas/geodataframe.py
new file mode 100644
index 0000000..a3809ec
--- /dev/null
+++ b/geopandas/geodataframe.py
@@ -0,0 +1,396 @@
+try:
+    from collections import OrderedDict
+except ImportError:
+    # Python 2.6
+    from ordereddict import OrderedDict
+from collections import defaultdict
+import json
+import os
+import sys
+
+import numpy as np
+from pandas import DataFrame, Series
+from shapely.geometry import mapping, shape
+from shapely.geometry.base import BaseGeometry
+from six import string_types, iteritems
+
+from geopandas import GeoSeries
+from geopandas.base import GeoPandasBase
+from geopandas.plotting import plot_dataframe
+import geopandas.io
+
+
+DEFAULT_GEO_COLUMN_NAME = 'geometry'
+PY3 = sys.version_info[0] == 3
+
+
+class GeoDataFrame(GeoPandasBase, DataFrame):
+    """
+    A GeoDataFrame object is a pandas.DataFrame that has a column
+    with geometry. In addition to the standard DataFrame constructor arguments,
+    GeoDataFrame also accepts the following keyword arguments:
+
+    Keyword Arguments
+    -----------------
+    crs : str (optional)
+        Coordinate system
+    geometry : str or array (optional)
+        If str, column to use as geometry. If array, will be set as 'geometry'
+        column on GeoDataFrame.
+    """
+    _metadata = ['crs', '_geometry_column_name']
+    _geometry_column_name = DEFAULT_GEO_COLUMN_NAME
+
+    def __init__(self, *args, **kwargs):
+        crs = kwargs.pop('crs', None)
+        geometry = kwargs.pop('geometry', None)
+        super(GeoDataFrame, self).__init__(*args, **kwargs)
+        self.crs = crs
+        if geometry is not None:
+            self.set_geometry(geometry, inplace=True)
+
+    def __setattr__(self, attr, val):
+        # have to special case geometry b/c pandas tries to use as column...
+        if attr == 'geometry':
+            object.__setattr__(self, attr, val)
+        else:
+            super(GeoDataFrame, self).__setattr__(attr, val)
+
+    def _get_geometry(self):
+        if self._geometry_column_name not in self:
+            raise AttributeError("No geometry data set yet (expected in"
+                                 " column '%s'." % self._geometry_column_name)
+        return self[self._geometry_column_name]
+
+    def _set_geometry(self, col):
+        # TODO: Use pandas' core.common.is_list_like() here.
+        if not isinstance(col, (list, np.ndarray, Series)):
+            raise ValueError("Must use a list-like to set the geometry"
+                             " property")
+
+        self.set_geometry(col, inplace=True)
+
+    geometry = property(fget=_get_geometry, fset=_set_geometry,
+                        doc="Geometry data for GeoDataFrame")
+
+    def set_geometry(self, col, drop=False, inplace=False, crs=None):
+        """
+        Set the GeoDataFrame geometry using either an existing column or
+        the specified input. By default yields a new object.
+
+        The original geometry column is replaced with the input.
+
+        Parameters
+        ----------
+        keys : column label or array
+        drop : boolean, default True
+            Delete column to be used as the new geometry
+        inplace : boolean, default False
+            Modify the GeoDataFrame in place (do not create a new object)
+        crs : str/result of fion.get_crs (optional)
+            Coordinate system to use. If passed, overrides both DataFrame and
+            col's crs. Otherwise, tries to get crs from passed col values or
+            DataFrame.
+
+        Examples
+        --------
+        >>> df1 = df.set_geometry([Point(0,0), Point(1,1), Point(2,2)])
+        >>> df2 = df.set_geometry('geom1')
+
+        Returns
+        -------
+        geodataframe : GeoDataFrame
+        """
+        # Most of the code here is taken from DataFrame.set_index()
+        if inplace:
+            frame = self
+        else:
+            frame = self.copy()
+
+        if not crs:
+            crs = getattr(col, 'crs', self.crs)
+
+        to_remove = None
+        geo_column_name = DEFAULT_GEO_COLUMN_NAME
+        if isinstance(col, (Series, list, np.ndarray)):
+            level = col
+        elif hasattr(col, 'ndim') and col.ndim != 1:
+            raise ValueError("Must pass array with one dimension only.")
+        else:
+            try:
+                level = frame[col].values
+            except KeyError:
+                raise ValueError("Unknown column %s" % col)
+            except:
+                raise
+            if drop:
+                to_remove = col
+                geo_column_name = DEFAULT_GEO_COLUMN_NAME
+            else:
+                geo_column_name = col
+
+        if to_remove:
+            del frame[to_remove]
+
+        if isinstance(level, GeoSeries) and level.crs != crs:
+            # Avoids caching issues/crs sharing issues
+            level = level.copy()
+            level.crs = crs
+
+        # Check that we are using a listlike of geometries
+        if not all(isinstance(item, BaseGeometry) for item in level):
+            raise TypeError("Input geometry column must contain valid geometry objects.")
+        frame[geo_column_name] = level
+        frame._geometry_column_name = geo_column_name
+        frame.crs = crs
+
+        if not inplace:
+            return frame
+
+    @classmethod
+    def from_file(cls, filename, **kwargs):
+        """
+        Alternate constructor to create a GeoDataFrame from a file.
+        
+        Example:
+            df = geopandas.GeoDataFrame.from_file('nybb.shp')
+
+        Wraps geopandas.read_file(). For additional help, see read_file()
+
+        """
+        return geopandas.io.file.read_file(filename, **kwargs)
+
+    @classmethod
+    def from_features(cls, features, crs=None):
+        """
+        Alternate constructor to create GeoDataFrame from an iterable of
+        features. Each element must be a feature dictionary or implement
+        the __geo_interface__.
+        See: https://gist.github.com/sgillies/2217756
+
+        """
+        rows = []
+        for f in features:
+            if hasattr(f, "__geo_interface__"):
+                f = f.__geo_interface__
+            else:
+                f = f
+
+            d = {'geometry': shape(f['geometry'])}
+            d.update(f['properties'])
+            rows.append(d)
+        df = GeoDataFrame.from_dict(rows)
+        df.crs = crs
+        return df
+
+    @classmethod
+    def from_postgis(cls, sql, con, geom_col='geom', crs=None, index_col=None,
+                     coerce_float=True, params=None):
+        """
+        Alternate constructor to create a GeoDataFrame from a sql query
+        containing a geometry column.
+
+        Example:
+            df = geopandas.GeoDataFrame.from_postgis(con,
+                "SELECT geom, highway FROM roads;")
+
+        Wraps geopandas.read_postgis(). For additional help, see read_postgis()
+
+        """
+        return geopandas.io.sql.read_postgis(sql, con, geom_col, crs, index_col, 
+                     coerce_float, params)
+
+
+    def to_json(self, na='null', **kwargs):
+        """Returns a GeoJSON representation of the GeoDataFrame.
+
+        Parameters
+        ----------
+        na : {'null', 'drop', 'keep'}, default 'null'
+            Indicates how to output missing (NaN) values in the GeoDataFrame
+            * null: ouput the missing entries as JSON null
+            * drop: remove the property from the feature. This applies to
+                    each feature individually so that features may have
+                    different properties
+            * keep: output the missing entries as NaN
+        
+        The remaining *kwargs* are passed to json.dumps().
+        """
+        def fill_none(row):
+            """
+            Takes in a Series, converts to a dictionary with null values
+            set to None
+
+            """
+            na_keys = row.index[row.isnull()]
+            d = row.to_dict()
+            for k in na_keys:
+                d[k] = None
+            return d
+
+        # na_methods must take in a Series and return dict-like
+        na_methods = {'null': fill_none,
+                      'drop': lambda row: row.dropna(),
+                      'keep': lambda row: row}
+
+        if na not in na_methods:
+            raise ValueError('Unknown na method {0}'.format(na))
+        f = na_methods[na]
+
+        def feature(i, row):
+            row = f(row)
+            return {
+                'id': str(i),
+                'type': 'Feature',
+                'properties':
+                    dict((k, v) for k, v in iteritems(row) if k != self._geometry_column_name),
+                'geometry': mapping(row[self._geometry_column_name]) }
+
+        return json.dumps(
+            {'type': 'FeatureCollection',
+             'features': [feature(i, row) for i, row in self.iterrows()]},
+            **kwargs )
+            
+    def to_file(self, filename, driver="ESRI Shapefile", **kwargs):
+        """
+        Write this GeoDataFrame to an OGR data source
+        
+        A dictionary of supported OGR providers is available via:
+        >>> import fiona
+        >>> fiona.supported_drivers
+
+        Parameters
+        ----------
+        filename : string 
+            File path or file handle to write to.
+        driver : string, default 'ESRI Shapefile'
+            The OGR format driver used to write the vector file.
+
+        The *kwargs* are passed to fiona.open and can be used to write 
+        to multi-layer data, store data within archives (zip files), etc.
+        """
+        import fiona
+        def convert_type(in_type):
+            if in_type == object:
+                return 'str'
+            out_type = type(np.asscalar(np.zeros(1, in_type))).__name__
+            if out_type == 'long':
+                out_type = 'int'
+            return out_type
+            
+        def feature(i, row):
+            return {
+                'id': str(i),
+                'type': 'Feature',
+                'properties':
+                    dict((k, v) for k, v in iteritems(row) if k != 'geometry'),
+                'geometry': mapping(row['geometry']) }
+        
+        properties = OrderedDict([(col, convert_type(_type)) for col, _type 
+            in zip(self.columns, self.dtypes) if col!='geometry'])
+        # Need to check geom_types before we write to file... 
+        # Some (most?) providers expect a single geometry type: 
+        # Point, LineString, or Polygon
+        geom_types = self['geometry'].geom_type.unique()
+        from os.path import commonprefix # To find longest common prefix
+        geom_type = commonprefix([g[::-1] for g in geom_types])[::-1]  # Reverse
+        if geom_type == '': # No common suffix = mixed geometry types
+            raise ValueError("Geometry column cannot contains mutiple "
+                             "geometry types when writing to file.")
+        schema = {'geometry': geom_type, 'properties': properties}
+        filename = os.path.abspath(os.path.expanduser(filename))
+        with fiona.open(filename, 'w', driver=driver, crs=self.crs, 
+                        schema=schema, **kwargs) as c:
+            for i, row in self.iterrows():
+                c.write(feature(i, row))
+
+    def to_crs(self, crs=None, epsg=None, inplace=False):
+        """Transform geometries to a new coordinate reference system
+
+        This method will transform all points in all objects.  It has
+        no notion or projecting entire geometries.  All segments
+        joining points are assumed to be lines in the current
+        projection, not geodesics.  Objects crossing the dateline (or
+        other projection boundary) will have undesirable behavior.
+        """
+        if inplace:
+            df = self
+        else:
+            df = self.copy()
+        geom = df.geometry.to_crs(crs=crs, epsg=epsg)
+        df.geometry = geom
+        df.crs = geom.crs
+        if not inplace:
+            return df
+
+    def __getitem__(self, key):
+        """
+        If the result is a column containing only 'geometry', return a
+        GeoSeries. If it's a DataFrame with a 'geometry' column, return a
+        GeoDataFrame.
+        """
+        result = super(GeoDataFrame, self).__getitem__(key)
+        geo_col = self._geometry_column_name
+        if isinstance(key, string_types) and key == geo_col:
+            result.__class__ = GeoSeries
+            result.crs = self.crs
+        elif isinstance(result, DataFrame) and geo_col in result:
+            result.__class__ = GeoDataFrame
+            result.crs = self.crs
+            result._geometry_column_name = geo_col
+        elif isinstance(result, DataFrame) and geo_col not in result:
+            result.__class__ = DataFrame
+            result.crs = self.crs
+        return result
+
+    #
+    # Implement pandas methods
+    #
+
+    @property
+    def _constructor(self):
+        return GeoDataFrame
+
+    def __finalize__(self, other, method=None, **kwargs):
+        """ propagate metadata from other to self """
+        # NOTE: backported from pandas master (upcoming v0.13)
+        for name in self._metadata:
+            object.__setattr__(self, name, getattr(other, name, None))
+        return self
+
+    def copy(self, deep=True):
+        """
+        Make a copy of this GeoDataFrame object
+
+        Parameters
+        ----------
+        deep : boolean, default True
+            Make a deep copy, i.e. also copy data
+
+        Returns
+        -------
+        copy : GeoDataFrame
+        """
+        # FIXME: this will likely be unnecessary in pandas >= 0.13
+        data = self._data
+        if deep:
+            data = data.copy()
+        return GeoDataFrame(data).__finalize__(self)
+
+    def plot(self, *args, **kwargs):
+        return plot_dataframe(self, *args, **kwargs)
+
+def _dataframe_set_geometry(self, col, drop=False, inplace=False, crs=None):
+    if inplace:
+        raise ValueError("Can't do inplace setting when converting from"
+                         " DataFrame to GeoDataFrame")
+    gf = GeoDataFrame(self)
+    # this will copy so that BlockManager gets copied
+    return gf.set_geometry(col, drop=drop, inplace=False, crs=crs)
+
+if PY3:
+    DataFrame.set_geometry = _dataframe_set_geometry
+else:
+    import types
+    DataFrame.set_geometry = types.MethodType(_dataframe_set_geometry, None,
+                                              DataFrame)
diff --git a/geopandas/geoseries.py b/geopandas/geoseries.py
new file mode 100644
index 0000000..9ac72b7
--- /dev/null
+++ b/geopandas/geoseries.py
@@ -0,0 +1,257 @@
+from functools import partial
+from warnings import warn
+
+import numpy as np
+from pandas import Series, DataFrame
+import pyproj
+from shapely.geometry import shape, Polygon, Point
+from shapely.geometry.collection import GeometryCollection
+from shapely.geometry.base import BaseGeometry
+from shapely.ops import transform
+
+from geopandas.plotting import plot_series
+from geopandas.base import GeoPandasBase
+
+OLD_PANDAS = issubclass(Series, np.ndarray)
+
+def _is_empty(x):
+    try:
+        return x.is_empty
+    except:
+        return False
+
+def _convert_array_args(args):
+    if len(args) == 1 and isinstance(args[0], BaseGeometry):
+        args = ([args[0]],)
+    return args
+
+class GeoSeries(GeoPandasBase, Series):
+    """A Series object designed to store shapely geometry objects."""
+    _metadata = ['name', 'crs']
+
+    def __new__(cls, *args, **kwargs):
+        kwargs.pop('crs', None)
+        if OLD_PANDAS:
+            args = _convert_array_args(args)
+            arr = Series.__new__(cls, *args, **kwargs)
+        else:
+            arr = Series.__new__(cls)
+        if type(arr) is GeoSeries:
+            return arr
+        else:
+            return arr.view(GeoSeries)
+
+    def __init__(self, *args, **kwargs):
+        if not OLD_PANDAS:
+            args = _convert_array_args(args)
+        crs = kwargs.pop('crs', None)
+
+        super(GeoSeries, self).__init__(*args, **kwargs)
+        self.crs = crs
+
+    @property
+    def geometry(self):
+        return self
+
+    @classmethod
+    def from_file(cls, filename, **kwargs):
+        """
+        Alternate constructor to create a GeoSeries from a file
+        
+        Parameters
+        ----------
+        
+        filename : str
+            File path or file handle to read from. Depending on which kwargs
+            are included, the content of filename may vary, see:
+            http://toblerity.github.io/fiona/README.html#usage
+            for usage details.
+        kwargs : key-word arguments
+            These arguments are passed to fiona.open, and can be used to 
+            access multi-layer data, data stored within archives (zip files),
+            etc.
+        
+        """
+        import fiona
+        geoms = []
+        with fiona.open(filename, **kwargs) as f:
+            crs = f.crs
+            for rec in f:
+                geoms.append(shape(rec['geometry']))
+        g = GeoSeries(geoms)
+        g.crs = crs
+        return g
+
+    def to_file(self, filename, driver="ESRI Shapefile", **kwargs):
+        from geopandas import GeoDataFrame
+        data = GeoDataFrame({"geometry": self,
+                          "id":self.index.values},
+                          index=self.index)
+        data.crs = self.crs
+        data.to_file(filename, driver, **kwargs)
+        
+    #
+    # Implement pandas methods
+    #
+
+    @property
+    def _constructor(self):
+        return GeoSeries
+
+    def _wrapped_pandas_method(self, mtd, *args, **kwargs):
+        """Wrap a generic pandas method to ensure it returns a GeoSeries"""
+        val = getattr(super(GeoSeries, self), mtd)(*args, **kwargs)
+        if type(val) == Series:
+            val.__class__ = GeoSeries
+            val.crs = self.crs
+        return val
+
+    def __getitem__(self, key):
+        return self._wrapped_pandas_method('__getitem__', key)
+
+    def sort_index(self, *args, **kwargs):
+        return self._wrapped_pandas_method('sort_index', *args, **kwargs)
+
+    def take(self, *args, **kwargs):
+        return self._wrapped_pandas_method('take', *args, **kwargs)
+
+    def select(self, *args, **kwargs):
+        return self._wrapped_pandas_method('select', *args, **kwargs)
+
+    @property
+    def _can_hold_na(self):
+        return False
+
+    def __finalize__(self, other, method=None, **kwargs):
+        """ propagate metadata from other to self """
+        # NOTE: backported from pandas master (upcoming v0.13)
+        for name in self._metadata:
+            object.__setattr__(self, name, getattr(other, name, None))
+        return self
+
+    def copy(self, order='C'):
+        """
+        Make a copy of this GeoSeries object
+
+        Parameters
+        ----------
+        deep : boolean, default True
+            Make a deep copy, i.e. also copy data
+
+        Returns
+        -------
+        copy : GeoSeries
+        """
+        # FIXME: this will likely be unnecessary in pandas >= 0.13
+        return GeoSeries(self.values.copy(order), index=self.index,
+                      name=self.name).__finalize__(self)
+
+    def isnull(self):
+        """Null values in a GeoSeries are represented by empty geometric objects"""
+        non_geo_null = super(GeoSeries, self).isnull()
+        val = self.apply(_is_empty)
+        return np.logical_or(non_geo_null, val)
+
+    def fillna(self, value=None, method=None, inplace=False,
+               **kwargs):
+        """Fill NA/NaN values with a geometry (empty polygon by default).
+
+        "method" is currently not implemented for pandas <= 0.12.
+        """
+        if value is None:
+            value = Point()
+        if not OLD_PANDAS:
+            return super(GeoSeries, self).fillna(value=value, method=method,
+                                                 inplace=inplace, **kwargs)
+        else:
+            # FIXME: this is an ugly way to support pandas <= 0.12
+            if method is not None:
+                raise NotImplementedError('Fill method is currently not implemented for GeoSeries')
+            if isinstance(value, BaseGeometry):
+                result = self.copy() if not inplace else self
+                mask = self.isnull()
+                result[mask] = value
+                if not inplace:
+                    return GeoSeries(result)
+            else:
+                raise ValueError('Non-geometric fill values not allowed for GeoSeries')
+
+    def align(self, other, join='outer', level=None, copy=True,
+              fill_value=None, **kwargs):
+        if fill_value is None:
+            fill_value = Point()
+        left, right = super(GeoSeries, self).align(other, join=join,
+                                                   level=level, copy=copy,
+                                                   fill_value=fill_value,
+                                                   **kwargs)
+        if isinstance(other, GeoSeries):
+            return GeoSeries(left), GeoSeries(right)
+        else: # It is probably a Series, let's keep it that way
+            return GeoSeries(left), right
+
+
+    def __contains__(self, other):
+        """Allow tests of the form "geom in s"
+
+        Tests whether a GeoSeries contains a geometry.
+
+        Note: This is not the same as the geometric method "contains".
+        """
+        if isinstance(other, BaseGeometry):
+            return np.any(self.geom_equals(other))
+        else:
+            return False
+
+
+    def plot(self, *args, **kwargs):
+        return plot_series(self, *args, **kwargs)
+
+    #
+    # Additional methods
+    #
+
+    def to_crs(self, crs=None, epsg=None):
+        """Transform geometries to a new coordinate reference system
+
+        This method will transform all points in all objects.  It has
+        no notion or projecting entire geometries.  All segments
+        joining points are assumed to be lines in the current
+        projection, not geodesics.  Objects crossing the dateline (or
+        other projection boundary) will have undesirable behavior.
+        """
+        from fiona.crs import from_epsg
+        if self.crs is None:
+            raise ValueError('Cannot transform naive geometries.  '
+                             'Please set a crs on the object first.')
+        if crs is None:
+            try:
+                crs = from_epsg(epsg)
+            except TypeError:
+                raise TypeError('Must set either crs or epsg for output.')
+        proj_in = pyproj.Proj(preserve_units=True, **self.crs)
+        proj_out = pyproj.Proj(preserve_units=True, **crs)
+        project = partial(pyproj.transform, proj_in, proj_out)
+        result = self.apply(lambda geom: transform(project, geom))
+        result.__class__ = GeoSeries
+        result.crs = crs
+        return result
+
+    #
+    # Implement standard operators for GeoSeries
+    #
+
+    def __xor__(self, other):
+        """Implement ^ operator as for builtin set type"""
+        return self.symmetric_difference(other)
+
+    def __or__(self, other):
+        """Implement | operator as for builtin set type"""
+        return self.union(other)
+
+    def __and__(self, other):
+        """Implement & operator as for builtin set type"""
+        return self.intersection(other)
+
+    def __sub__(self, other):
+        """Implement - operator as for builtin set type"""
+        return self.difference(other)
diff --git a/geopandas/io/__init__.py b/geopandas/io/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/geopandas/io/file.py b/geopandas/io/file.py
new file mode 100644
index 0000000..05f35b5
--- /dev/null
+++ b/geopandas/io/file.py
@@ -0,0 +1,22 @@
+from geopandas import GeoDataFrame
+
+def read_file(filename, **kwargs):
+    """
+    Returns a GeoDataFrame from a file.
+
+    *filename* is either the absolute or relative path to the file to be
+    opened and *kwargs* are keyword args to be passed to the method when
+    opening the file.
+    """
+    import fiona
+    bbox = kwargs.pop('bbox', None)
+    with fiona.open(filename, **kwargs) as f:
+        crs = f.crs
+        if bbox != None:
+            assert len(bbox)==4
+            f_filt = f.filter(bbox=bbox)
+        else:
+            f_filt = f
+        gdf = GeoDataFrame.from_features(f, crs=crs)
+
+    return gdf
diff --git a/geopandas/io/sql.py b/geopandas/io/sql.py
new file mode 100644
index 0000000..b88f412
--- /dev/null
+++ b/geopandas/io/sql.py
@@ -0,0 +1,46 @@
+import binascii
+
+from pandas import read_sql
+import shapely.wkb
+
+from geopandas import GeoSeries, GeoDataFrame
+
+
+def read_postgis(sql, con, geom_col='geom', crs=None, index_col=None,
+                 coerce_float=True, params=None):
+    """
+    Returns a GeoDataFrame corresponding to the result of the query 
+    string, which must contain a geometry column.
+
+    Examples:
+    sql = "SELECT geom, kind FROM polygons;"
+    df = geopandas.read_postgis(sql, con)
+
+    Parameters
+    ----------
+    sql: string
+    con: DB connection object or SQLAlchemy engine
+    geom_col: string, default 'geom'
+        column name to convert to shapely geometries
+    crs: optional
+        CRS to use for the returned GeoDataFrame
+
+    See the documentation for pandas.read_sql for further explanation 
+    of the following parameters:
+    index_col, coerce_float, params
+
+    """
+    df = read_sql(sql, con, index_col=index_col, coerce_float=coerce_float,
+                  params=params)
+
+    if geom_col not in df:
+        raise ValueError("Query missing geometry column '{0}'".format(
+            geom_col))
+
+    wkb_geoms = df[geom_col]
+
+    s = wkb_geoms.apply(lambda x: shapely.wkb.loads(binascii.unhexlify(x.encode())))
+
+    df[geom_col] = GeoSeries(s)
+
+    return GeoDataFrame(df, crs=crs, geometry=geom_col)
diff --git a/geopandas/plotting.py b/geopandas/plotting.py
new file mode 100644
index 0000000..69557de
--- /dev/null
+++ b/geopandas/plotting.py
@@ -0,0 +1,304 @@
+from __future__ import print_function
+
+import numpy as np
+from six import next
+from six.moves import xrange
+
+
+def plot_polygon(ax, poly, facecolor='red', edgecolor='black', alpha=0.5):
+    """ Plot a single Polygon geometry """
+    from descartes.patch import PolygonPatch
+    a = np.asarray(poly.exterior)
+    # without Descartes, we could make a Patch of exterior
+    ax.add_patch(PolygonPatch(poly, facecolor=facecolor, alpha=alpha))
+    ax.plot(a[:, 0], a[:, 1], color=edgecolor)
+    for p in poly.interiors:
+        x, y = zip(*p.coords)
+        ax.plot(x, y, color=edgecolor)
+
+
+def plot_multipolygon(ax, geom, facecolor='red', alpha=0.5):
+    """ Can safely call with either Polygon or Multipolygon geometry
+    """
+    if geom.type == 'Polygon':
+        plot_polygon(ax, geom, facecolor=facecolor, alpha=alpha)
+    elif geom.type == 'MultiPolygon':
+        for poly in geom.geoms:
+            plot_polygon(ax, poly, facecolor=facecolor, alpha=alpha)
+
+
+def plot_linestring(ax, geom, color='black', linewidth=1):
+    """ Plot a single LineString geometry """
+    a = np.array(geom)
+    ax.plot(a[:,0], a[:,1], color=color, linewidth=linewidth)
+
+
+def plot_multilinestring(ax, geom, color='red', linewidth=1):
+    """ Can safely call with either LineString or MultiLineString geometry
+    """
+    if geom.type == 'LineString':
+        plot_linestring(ax, geom, color=color, linewidth=linewidth)
+    elif geom.type == 'MultiLineString':
+        for line in geom.geoms:
+            plot_linestring(ax, line, color=color, linewidth=linewidth)
+
+
+def plot_point(ax, pt, marker='o', markersize=2):
+    """ Plot a single Point geometry """
+    ax.plot(pt.x, pt.y, marker=marker, markersize=markersize, linewidth=0)
+
+
+def gencolor(N, colormap='Set1'):
+    """
+    Color generator intended to work with one of the ColorBrewer
+    qualitative color scales.
+
+    Suggested values of colormap are the following:
+
+        Accent, Dark2, Paired, Pastel1, Pastel2, Set1, Set2, Set3
+
+    (although any matplotlib colormap will work).
+    """
+    from matplotlib import cm
+    # don't use more than 9 discrete colors
+    n_colors = min(N, 9)
+    cmap = cm.get_cmap(colormap, n_colors)
+    colors = cmap(range(n_colors))
+    for i in xrange(N):
+        yield colors[i % n_colors]
+
+def plot_series(s, colormap='Set1', alpha=0.5, axes=None):
+    """ Plot a GeoSeries
+
+        Generate a plot of a GeoSeries geometry with matplotlib.
+
+        Parameters
+        ----------
+
+        Series
+            The GeoSeries to be plotted.  Currently Polygon,
+            MultiPolygon, LineString, MultiLineString and Point
+            geometries can be plotted.
+
+        colormap : str (default 'Set1')
+            The name of a colormap recognized by matplotlib.  Any
+            colormap will work, but categorical colormaps are
+            generally recommended.  Examples of useful discrete
+            colormaps include:
+
+                Accent, Dark2, Paired, Pastel1, Pastel2, Set1, Set2, Set3
+
+        alpha : float (default 0.5)
+            Alpha value for polygon fill regions.  Has no effect for
+            lines or points.
+
+        axes : matplotlib.pyplot.Artist (default None)
+            axes on which to draw the plot
+
+        Returns
+        -------
+
+        matplotlib axes instance
+    """
+    import matplotlib.pyplot as plt
+    if axes == None:
+        fig = plt.gcf()
+        fig.add_subplot(111, aspect='equal')
+        ax = plt.gca()
+    else:
+        ax = axes
+    color = gencolor(len(s), colormap=colormap)
+    for geom in s:
+        if geom.type == 'Polygon' or geom.type == 'MultiPolygon':
+            plot_multipolygon(ax, geom, facecolor=next(color), alpha=alpha)
+        elif geom.type == 'LineString' or geom.type == 'MultiLineString':
+            plot_multilinestring(ax, geom, color=next(color))
+        elif geom.type == 'Point':
+            plot_point(ax, geom)
+    plt.draw()
+    return ax
+
+
+def plot_dataframe(s, column=None, colormap=None, alpha=0.5,
+                   categorical=False, legend=False, axes=None, scheme=None,
+                   k=5):
+    """ Plot a GeoDataFrame
+
+        Generate a plot of a GeoDataFrame with matplotlib.  If a
+        column is specified, the plot coloring will be based on values
+        in that column.  Otherwise, a categorical plot of the
+        geometries in the `geometry` column will be generated.
+
+        Parameters
+        ----------
+
+        GeoDataFrame
+            The GeoDataFrame to be plotted.  Currently Polygon,
+            MultiPolygon, LineString, MultiLineString and Point
+            geometries can be plotted.
+
+        column : str (default None)
+            The name of the column to be plotted.
+
+        categorical : bool (default False)
+            If False, colormap will reflect numerical values of the
+            column being plotted.  For non-numerical columns (or if
+            column=None), this will be set to True.
+
+        colormap : str (default 'Set1')
+            The name of a colormap recognized by matplotlib.
+
+        alpha : float (default 0.5)
+            Alpha value for polygon fill regions.  Has no effect for
+            lines or points.
+
+        legend : bool (default False)
+            Plot a legend (Experimental; currently for categorical
+            plots only)
+
+        axes : matplotlib.pyplot.Artist (default None)
+            axes on which to draw the plot
+
+        scheme : pysal.esda.mapclassify.Map_Classifier
+            Choropleth classification schemes
+
+        k   : int (default 5)
+            Number of classes (ignored if scheme is None)
+
+
+        Returns
+        -------
+
+        matplotlib axes instance
+    """
+    import matplotlib.pyplot as plt
+    from matplotlib.lines import Line2D
+    from matplotlib.colors import Normalize
+    from matplotlib import cm
+
+    if column is None:
+        return plot_series(s.geometry, colormap=colormap, alpha=alpha, axes=axes)
+    else:
+        if s[column].dtype is np.dtype('O'):
+            categorical = True
+        if categorical:
+            if colormap is None:
+                colormap = 'Set1'
+            categories = list(set(s[column].values))
+            categories.sort()
+            valuemap = dict([(k, v) for (v, k) in enumerate(categories)])
+            values = [valuemap[k] for k in s[column]]
+        else:
+            values = s[column]
+        if scheme is not None:
+            values = __pysal_choro(values, scheme, k=k)
+        cmap = norm_cmap(values, colormap, Normalize, cm)
+        if axes == None:
+            fig = plt.gcf()
+            fig.add_subplot(111, aspect='equal')
+            ax = plt.gca()
+        else:
+            ax = axes
+        for geom, value in zip(s.geometry, values):
+            if geom.type == 'Polygon' or geom.type == 'MultiPolygon':
+                plot_multipolygon(ax, geom, facecolor=cmap.to_rgba(value), alpha=alpha)
+            elif geom.type == 'LineString' or geom.type == 'MultiLineString':
+                plot_multilinestring(ax, geom, color=cmap.to_rgba(value))
+            # TODO: color point geometries
+            elif geom.type == 'Point':
+                plot_point(ax, geom)
+        if legend:
+            if categorical:
+                patches = []
+                for value, cat in enumerate(categories):
+                    patches.append(Line2D([0], [0], linestyle="none",
+                                          marker="o", alpha=alpha,
+                                          markersize=10, markerfacecolor=cmap.to_rgba(value)))
+                ax.legend(patches, categories, numpoints=1, loc='best')
+            else:
+                # TODO: show a colorbar
+                raise NotImplementedError
+    plt.draw()
+    return ax
+
+
+def __pysal_choro(values, scheme, k=5):
+    """ Wrapper for choropleth schemes from PySAL for use with plot_dataframe
+
+        Parameters
+        ----------
+
+        values
+            Series to be plotted
+
+        scheme
+            pysal.esda.mapclassify classificatin scheme ['Equal_interval'|'Quantiles'|'Fisher_Jenks']
+
+        k
+            number of classes (2 <= k <=9)
+
+        Returns
+        -------
+
+        values
+            Series with values replaced with class identifier if PySAL is available, otherwise the original values are used
+    """
+
+    try: 
+        from pysal.esda.mapclassify import Quantiles, Equal_Interval, Fisher_Jenks
+        schemes = {}
+        schemes['equal_interval'] = Equal_Interval
+        schemes['quantiles'] = Quantiles
+        schemes['fisher_jenks'] = Fisher_Jenks
+        s0 = scheme
+        scheme = scheme.lower()
+        if scheme not in schemes:
+            scheme = 'quantiles'
+            print('Unrecognized scheme: ', s0)
+            print('Using Quantiles instead')
+        if k<2 or k>9:
+            print('Invalid k: ', k)
+            print('2<=k<=9, setting k=5 (default)')
+            k = 5
+        binning = schemes[scheme](values, k)
+        values = binning.yb
+    except ImportError: 
+        print('PySAL not installed, setting map to default')
+
+    return values
+
+def norm_cmap(values, cmap, normalize, cm):
+
+    """ Normalize and set colormap
+
+        Parameters
+        ----------
+
+        values
+            Series or array to be normalized
+
+        cmap
+            matplotlib Colormap
+
+        normalize
+            matplotlib.colors.Normalize
+
+        cm
+            matplotlib.cm
+
+        Returns
+        -------
+        n_cmap
+            mapping of normalized values to colormap (cmap)
+            
+    """
+
+    mn, mx = min(values), max(values)
+    norm = normalize(vmin=mn, vmax=mx)
+    n_cmap  = cm.ScalarMappable(norm=norm, cmap=cmap)
+    return n_cmap
+
+
+
+
+
diff --git a/requirements.test.txt b/requirements.test.txt
new file mode 100644
index 0000000..69b4255
--- /dev/null
+++ b/requirements.test.txt
@@ -0,0 +1,7 @@
+psycopg2>=2.5.1
+SQLAlchemy>=0.8.3
+geopy==0.99
+matplotlib>=1.2.1
+descartes>=1.0
+pytest-cov
+coveralls
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..3f2efce
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,5 @@
+Cython>=0.16
+shapely>=1.2.18
+fiona>=1.0.1
+pyproj>=1.9.3
+six>=1.3.0
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..1eaf90e
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env/python
+"""Installation script
+
+Version handling borrowed from pandas project.
+"""
+
+import sys
+import os
+import warnings
+
+try:
+    from setuptools import setup
+except ImportError:
+    from distutils.core import setup
+
+LONG_DESCRIPTION = """GeoPandas is a project to add support for geographic data to
+`pandas`_ objects.
+
+The goal of GeoPandas is to make working with geospatial data in
+python easier. It combines the capabilities of `pandas`_ and `shapely`_,
+providing geospatial operations in pandas and a high-level interface
+to multiple geometries to shapely. GeoPandas enables you to easily do
+operations in python that would otherwise require a spatial database
+such as PostGIS.
+
+.. _pandas: http://pandas.pydata.org
+.. _shapely: http://toblerity.github.io/shapely
+"""
+
+MAJOR = 0
+MINOR = 1
+MICRO = 1
+ISRELEASED = True
+VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
+QUALIFIER = ''
+
+FULLVERSION = VERSION
+if not ISRELEASED:
+    FULLVERSION += '.dev'
+    try:
+        import subprocess
+        try:
+            pipe = subprocess.Popen(["git", "rev-parse", "--short", "HEAD"],
+                                    stdout=subprocess.PIPE).stdout
+        except OSError:
+            # msysgit compatibility
+            pipe = subprocess.Popen(
+                ["git.cmd", "describe", "HEAD"],
+                stdout=subprocess.PIPE).stdout
+        rev = pipe.read().strip()
+        # makes distutils blow up on Python 2.7
+        if sys.version_info[0] >= 3:
+            rev = rev.decode('ascii')
+
+        FULLVERSION = '%d.%d.%d.dev-%s' % (MAJOR, MINOR, MICRO, rev)
+
+    except:
+        warnings.warn("WARNING: Couldn't get git revision")
+else:
+    FULLVERSION += QUALIFIER
+
+
+def write_version_py(filename=None):
+    cnt = """\
+version = '%s'
+short_version = '%s'
+"""
+    if not filename:
+        filename = os.path.join(
+            os.path.dirname(__file__), 'geopandas', 'version.py')
+
+    a = open(filename, 'w')
+    try:
+        a.write(cnt % (FULLVERSION, VERSION))
+    finally:
+        a.close()
+
+write_version_py()
+
+setup(name='geopandas',
+      version=FULLVERSION,
+      description='Geographic pandas extensions',
+      license='BSD',
+      author='Kelsey Jordahl',
+      author_email='kjordahl at enthought.com',
+      url='http://geopandas.org',
+      long_description=LONG_DESCRIPTION,
+      packages=['geopandas', 'geopandas.io'],
+      install_requires=['pandas', 'shapely', 'fiona', 'descartes', 'pyproj'],
+)
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/baseline_images/test_plotting/lines_plot.png b/tests/baseline_images/test_plotting/lines_plot.png
new file mode 100644
index 0000000..e048aed
Binary files /dev/null and b/tests/baseline_images/test_plotting/lines_plot.png differ
diff --git a/tests/baseline_images/test_plotting/points_plot.png b/tests/baseline_images/test_plotting/points_plot.png
new file mode 100644
index 0000000..74a6ce7
Binary files /dev/null and b/tests/baseline_images/test_plotting/points_plot.png differ
diff --git a/tests/baseline_images/test_plotting/poly_plot.png b/tests/baseline_images/test_plotting/poly_plot.png
new file mode 100644
index 0000000..a204e7c
Binary files /dev/null and b/tests/baseline_images/test_plotting/poly_plot.png differ
diff --git a/tests/test_geocode.py b/tests/test_geocode.py
new file mode 100644
index 0000000..f108454
--- /dev/null
+++ b/tests/test_geocode.py
@@ -0,0 +1,91 @@
+from __future__ import absolute_import
+
+import sys
+
+from fiona.crs import from_epsg
+import pandas as pd
+from shapely.geometry import Point
+import geopandas as gpd
+import nose
+
+from geopandas.geocode import geocode, _prepare_geocode_result
+from .util import unittest
+
+
+def _skip_if_no_geopy():
+    try:
+        import geopy
+    except ImportError:
+        raise nose.SkipTest("Geopy not installed. Skipping tests.")
+    except SyntaxError:
+        raise nose.SkipTest("Geopy is known to be broken on Python 3.2. "
+                            "Skipping tests.")
+
+class TestGeocode(unittest.TestCase):
+    def setUp(self):
+        _skip_if_no_geopy()
+        self.locations = ['260 Broadway, New York, NY',
+                          '77 Massachusetts Ave, Cambridge, MA']
+
+    def test_prepare_result(self):
+        # Calls _prepare_result with sample results from the geocoder call
+        # loop
+        from geopandas.geocode import _prepare_geocode_result
+        p0 = Point(12.3, -45.6) # Treat these as lat/lon
+        p1 = Point(-23.4, 56.7)
+        d = {'a': ('address0', p0.coords[0]),
+             'b': ('address1', p1.coords[0])}
+
+        df = _prepare_geocode_result(d)
+        assert type(df) is gpd.GeoDataFrame
+        self.assertEqual(from_epsg(4326), df.crs)
+        self.assertEqual(len(df), 2)
+        self.assert_('address' in df)
+
+        coords = df.loc['a']['geometry'].coords[0]
+        test = p0.coords[0]
+        # Output from the df should be lon/lat
+        self.assertAlmostEqual(coords[0], test[1])
+        self.assertAlmostEqual(coords[1], test[0])
+
+        coords = df.loc['b']['geometry'].coords[0]
+        test = p1.coords[0]
+        self.assertAlmostEqual(coords[0], test[1])
+        self.assertAlmostEqual(coords[1], test[0])
+
+    def test_prepare_result_none(self):
+        from geopandas.geocode import _prepare_geocode_result
+        p0 = Point(12.3, -45.6) # Treat these as lat/lon
+        d = {'a': ('address0', p0.coords[0]),
+             'b': (None, None)}
+
+        df = _prepare_geocode_result(d)
+        assert type(df) is gpd.GeoDataFrame
+        self.assertEqual(from_epsg(4326), df.crs)
+        self.assertEqual(len(df), 2)
+        self.assert_('address' in df)
+
+        row = df.loc['b']
+        self.assertEqual(len(row['geometry'].coords), 0)
+        self.assert_(pd.np.isnan(row['address']))
+    
+    def test_bad_provider(self):
+        from geopandas.geocode import geocode
+        with self.assertRaises(ValueError):
+            geocode(['cambridge, ma'], 'badprovider')
+
+    def test_googlev3(self):
+        from geopandas.geocode import geocode
+        g = geocode(self.locations, provider='googlev3', timeout=2)
+        self.assertIsInstance(g, gpd.GeoDataFrame)
+
+    def test_openmapquest(self):
+        from geopandas.geocode import geocode
+        g = geocode(self.locations, provider='openmapquest', timeout=2)
+        self.assertIsInstance(g, gpd.GeoDataFrame)
+
+    @unittest.skip('Nominatim server is unreliable for tests.')
+    def test_nominatim(self):
+        from geopandas.geocode import geocode
+        g = geocode(self.locations, provider='nominatim', timeout=2)
+        self.assertIsInstance(g, gpd.GeoDataFrame)
diff --git a/tests/test_geodataframe.py b/tests/test_geodataframe.py
new file mode 100644
index 0000000..e595b49
--- /dev/null
+++ b/tests/test_geodataframe.py
@@ -0,0 +1,429 @@
+from __future__ import absolute_import
+
+import json
+import os
+import tempfile
+import shutil
+
+import numpy as np
+import pandas as pd
+from pandas.util.testing import assert_frame_equal
+from shapely.geometry import Point, Polygon
+
+import fiona
+from geopandas import GeoDataFrame, read_file, GeoSeries
+from .util import unittest, download_nybb, assert_geoseries_equal, connect, \
+                  create_db, validate_boro_df, PANDAS_NEW_SQL_API
+
+
+class TestDataFrame(unittest.TestCase):
+
+    def setUp(self):
+        N = 10
+
+        nybb_filename = download_nybb()
+
+        self.df = read_file('/nybb_14a_av/nybb.shp', vfs='zip://' + nybb_filename)
+        self.tempdir = tempfile.mkdtemp()
+        self.boros = self.df['BoroName']
+        self.crs = {'init': 'epsg:4326'}
+        self.df2 = GeoDataFrame([
+            {'geometry' : Point(x, y), 'value1': x + y, 'value2': x * y}
+            for x, y in zip(range(N), range(N))], crs=self.crs)
+
+    def tearDown(self):
+        shutil.rmtree(self.tempdir)
+
+    def test_df_init(self):
+        self.assertTrue(type(self.df2) is GeoDataFrame)
+        self.assertTrue(self.df2.crs == self.crs)
+
+    def test_different_geo_colname(self):
+        data = {"A": range(5), "B": range(-5, 0),
+                "location": [Point(x, y) for x, y in zip(range(5), range(5))]}
+        df = GeoDataFrame(data, crs=self.crs, geometry='location')
+        locs = GeoSeries(data['location'], crs=self.crs)
+        assert_geoseries_equal(df.geometry, locs)
+        self.assert_('geometry' not in df)
+        self.assertEqual(df.geometry.name, 'location')
+        # internal implementation detail
+        self.assertEqual(df._geometry_column_name, 'location')
+
+        geom2 = [Point(x, y) for x, y in zip(range(5, 10), range(5))]
+        df2 = df.set_geometry(geom2, crs='dummy_crs')
+        self.assert_('geometry' in df2)
+        self.assert_('location' in df2)
+        self.assertEqual(df2.crs, 'dummy_crs')
+        self.assertEqual(df2.geometry.crs, 'dummy_crs')
+        # reset so it outputs okay
+        df2.crs = df.crs
+        assert_geoseries_equal(df2.geometry, GeoSeries(geom2, crs=df2.crs))
+        # for right now, non-geometry comes back as series
+        assert_geoseries_equal(df2['location'], df['location'],
+                                  check_series_type=False, check_dtype=False)
+
+    def test_geo_getitem(self):
+        data = {"A": range(5), "B": range(-5, 0),
+                "location": [Point(x, y) for x, y in zip(range(5), range(5))]}
+        df = GeoDataFrame(data, crs=self.crs, geometry='location')
+        self.assert_(isinstance(df.geometry, GeoSeries))
+        df['geometry'] = df["A"]
+        self.assert_(isinstance(df.geometry, GeoSeries))
+        self.assertEqual(df.geometry[0], data['location'][0])
+        # good if this changed in the future
+        self.assert_(not isinstance(df['geometry'], GeoSeries))
+        self.assert_(isinstance(df['location'], GeoSeries))
+
+        data["geometry"] = [Point(x + 1, y - 1) for x, y in zip(range(5), range(5))]
+        df = GeoDataFrame(data, crs=self.crs)
+        self.assert_(isinstance(df.geometry, GeoSeries))
+        self.assert_(isinstance(df['geometry'], GeoSeries))
+        # good if this changed in the future
+        self.assert_(not isinstance(df['location'], GeoSeries))
+
+    def test_geometry_property(self):
+        assert_geoseries_equal(self.df.geometry, self.df['geometry'],
+                                  check_dtype=True, check_index_type=True)
+
+        df = self.df.copy()
+        new_geom = [Point(x,y) for x, y in zip(range(len(self.df)),
+                                               range(len(self.df)))]
+        df.geometry = new_geom
+
+        new_geom = GeoSeries(new_geom, index=df.index, crs=df.crs)
+        assert_geoseries_equal(df.geometry, new_geom)
+        assert_geoseries_equal(df['geometry'], new_geom)
+
+        # new crs
+        gs = GeoSeries(new_geom, crs="epsg:26018")
+        df.geometry = gs
+        self.assertEqual(df.crs, "epsg:26018")
+
+    def test_geometry_property_errors(self):
+        with self.assertRaises(AttributeError):
+            df = self.df.copy()
+            del df['geometry']
+            df.geometry
+
+        # list-like error
+        with self.assertRaises(ValueError):
+            df = self.df2.copy()
+            df.geometry = 'value1'
+
+        # list-like error
+        with self.assertRaises(ValueError):
+            df = self.df.copy()
+            df.geometry = 'apple'
+
+        # non-geometry error
+        with self.assertRaises(TypeError):
+            df = self.df.copy()
+            df.geometry = list(range(df.shape[0]))
+
+        with self.assertRaises(KeyError):
+            df = self.df.copy()
+            del df['geometry']
+            df['geometry']
+
+        # ndim error
+        with self.assertRaises(ValueError):
+            df = self.df.copy()
+            df.geometry = df
+
+    def test_set_geometry(self):
+        geom = GeoSeries([Point(x,y) for x,y in zip(range(5), range(5))])
+        original_geom = self.df.geometry
+
+        df2 = self.df.set_geometry(geom)
+        self.assert_(self.df is not df2)
+        assert_geoseries_equal(df2.geometry, geom)
+        assert_geoseries_equal(self.df.geometry, original_geom)
+        assert_geoseries_equal(self.df['geometry'], self.df.geometry)
+        # unknown column
+        with self.assertRaises(ValueError):
+            self.df.set_geometry('nonexistent-column')
+
+        # ndim error
+        with self.assertRaises(ValueError):
+            self.df.set_geometry(self.df)
+
+        # new crs - setting should default to GeoSeries' crs
+        gs = GeoSeries(geom, crs="epsg:26018")
+        new_df = self.df.set_geometry(gs)
+        self.assertEqual(new_df.crs, "epsg:26018")
+
+        # explicit crs overrides self and dataframe
+        new_df = self.df.set_geometry(gs, crs="epsg:27159")
+        self.assertEqual(new_df.crs, "epsg:27159")
+        self.assertEqual(new_df.geometry.crs, "epsg:27159")
+
+        # Series should use dataframe's
+        new_df = self.df.set_geometry(geom.values)
+        self.assertEqual(new_df.crs, self.df.crs)
+        self.assertEqual(new_df.geometry.crs, self.df.crs)
+
+    def test_set_geometry_col(self):
+        g = self.df.geometry
+        g_simplified = g.simplify(100)
+        self.df['simplified_geometry'] = g_simplified
+        df2 = self.df.set_geometry('simplified_geometry')
+
+        # Drop is false by default
+        self.assert_('simplified_geometry' in df2)
+        assert_geoseries_equal(df2.geometry, g_simplified)
+
+        # If True, drops column and renames to geometry
+        df3 = self.df.set_geometry('simplified_geometry', drop=True)
+        self.assert_('simplified_geometry' not in df3)
+        assert_geoseries_equal(df3.geometry, g_simplified)
+
+    def test_set_geometry_inplace(self):
+        geom = [Point(x,y) for x,y in zip(range(5), range(5))]
+        ret = self.df.set_geometry(geom, inplace=True)
+        self.assert_(ret is None)
+        geom = GeoSeries(geom, index=self.df.index, crs=self.df.crs)
+        assert_geoseries_equal(self.df.geometry, geom)
+
+    def test_set_geometry_series(self):
+        # Test when setting geometry with a Series that
+        # alignment will occur
+        #
+        # Reverse the index order
+        # Set the Series to be Point(i,i) where i is the index
+        self.df.index = range(len(self.df)-1, -1, -1)
+
+        d = {}
+        for i in range(len(self.df)):
+            d[i] = Point(i, i)
+        g = GeoSeries(d)
+        # At this point, the DataFrame index is [4,3,2,1,0] and the
+        # GeoSeries index is [0,1,2,3,4]. Make sure set_geometry aligns
+        # them to match indexes
+        df = self.df.set_geometry(g)
+
+        for i, r in df.iterrows():
+            self.assertAlmostEqual(i, r['geometry'].x)
+            self.assertAlmostEqual(i, r['geometry'].y)
+
+    def test_to_json(self):
+        text = self.df.to_json()
+        data = json.loads(text)
+        self.assertTrue(data['type'] == 'FeatureCollection')
+        self.assertTrue(len(data['features']) == 5)
+
+    def test_to_json_geom_col(self):
+        df = self.df.copy()
+        df['geom'] = df['geometry']
+        df['geometry'] = np.arange(len(df))
+        df.set_geometry('geom', inplace=True)
+
+        text = df.to_json()
+        data = json.loads(text)
+        self.assertTrue(data['type'] == 'FeatureCollection')
+        self.assertTrue(len(data['features']) == 5)
+
+    def test_to_json_na(self):
+        # Set a value as nan and make sure it's written
+        self.df['Shape_Area'][self.df['BoroName']=='Queens'] = np.nan
+
+        text = self.df.to_json()
+        data = json.loads(text)
+        self.assertTrue(len(data['features']) == 5)
+        for f in data['features']:
+            props = f['properties']
+            self.assertEqual(len(props), 4)
+            if props['BoroName'] == 'Queens':
+                self.assertTrue(props['Shape_Area'] is None)
+
+    def test_to_json_bad_na(self):
+        # Check that a bad na argument raises error
+        with self.assertRaises(ValueError):
+            text = self.df.to_json(na='garbage')
+
+    def test_to_json_dropna(self):
+        self.df['Shape_Area'][self.df['BoroName']=='Queens'] = np.nan
+        self.df['Shape_Leng'][self.df['BoroName']=='Bronx'] = np.nan
+
+        text = self.df.to_json(na='drop')
+        data = json.loads(text)
+        self.assertEqual(len(data['features']), 5)
+        for f in data['features']:
+            props = f['properties']
+            if props['BoroName'] == 'Queens':
+                self.assertEqual(len(props), 3)
+                self.assertTrue('Shape_Area' not in props)
+                # Just make sure setting it to nan in a different row
+                # doesn't affect this one
+                self.assertTrue('Shape_Leng' in props)
+            elif props['BoroName'] == 'Bronx':
+                self.assertEqual(len(props), 3)
+                self.assertTrue('Shape_Leng' not in props)
+                self.assertTrue('Shape_Area' in props)
+            else:
+                self.assertEqual(len(props), 4)
+
+    def test_to_json_keepna(self):
+        self.df['Shape_Area'][self.df['BoroName']=='Queens'] = np.nan
+        self.df['Shape_Leng'][self.df['BoroName']=='Bronx'] = np.nan
+
+        text = self.df.to_json(na='keep')
+        data = json.loads(text)
+        self.assertEqual(len(data['features']), 5)
+        for f in data['features']:
+            props = f['properties']
+            self.assertEqual(len(props), 4)
+            if props['BoroName'] == 'Queens':
+                self.assertTrue(np.isnan(props['Shape_Area']))
+                # Just make sure setting it to nan in a different row
+                # doesn't affect this one
+                self.assertTrue('Shape_Leng' in props)
+            elif props['BoroName'] == 'Bronx':
+                self.assertTrue(np.isnan(props['Shape_Leng']))
+                self.assertTrue('Shape_Area' in props)
+
+    def test_copy(self):
+        df2 = self.df.copy()
+        self.assertTrue(type(df2) is GeoDataFrame)
+        self.assertEqual(self.df.crs, df2.crs)
+
+    def test_to_file(self):
+        """ Test to_file and from_file """
+        tempfilename = os.path.join(self.tempdir, 'boros.shp')
+        self.df.to_file(tempfilename)
+        # Read layer back in?
+        df = GeoDataFrame.from_file(tempfilename)
+        self.assertTrue('geometry' in df)
+        self.assertTrue(len(df) == 5)
+        self.assertTrue(np.alltrue(df['BoroName'].values == self.boros))
+
+    def test_to_file_types(self):
+        """ Test various integer type columns (GH#93) """
+        tempfilename = os.path.join(self.tempdir, 'int.shp')
+        int_types = [np.int, np.int8, np.int16, np.int32, np.int64, np.intp,
+                     np.uint8, np.uint16, np.uint32, np.uint64, np.long]
+        geometry = self.df2.geometry
+        data = dict((str(i), np.arange(len(geometry), dtype=dtype))
+                     for i, dtype in enumerate(int_types))
+        df = GeoDataFrame(data, geometry=geometry)
+        df.to_file(tempfilename)
+
+    def test_mixed_types_to_file(self):
+        """ Test that mixed geometry types raise error when writing to file """
+        tempfilename = os.path.join(self.tempdir, 'test.shp')
+        s = GeoDataFrame({'geometry' : [Point(0, 0),
+                                        Polygon([(0, 0), (1, 0), (1, 1)])]})
+        with self.assertRaises(ValueError):
+            s.to_file(tempfilename)
+
+    def test_bool_index(self):
+        # Find boros with 'B' in their name
+        df = self.df[self.df['BoroName'].str.contains('B')]
+        self.assertTrue(len(df) == 2)
+        boros = df['BoroName'].values
+        self.assertTrue('Brooklyn' in boros)
+        self.assertTrue('Bronx' in boros)
+        self.assertTrue(type(df) is GeoDataFrame)
+
+    def test_transform(self):
+        df2 = self.df2.copy()
+        df2.crs = {'init': 'epsg:26918', 'no_defs': True}
+        lonlat = df2.to_crs(epsg=4326)
+        utm = lonlat.to_crs(epsg=26918)
+        self.assertTrue(all(df2['geometry'].geom_almost_equals(utm['geometry'], decimal=2)))
+
+    def test_from_features(self):
+        nybb_filename = download_nybb()
+        with fiona.open('/nybb_14a_av/nybb.shp',
+                        vfs='zip://' + nybb_filename) as f:
+            features = list(f)
+            crs = f.crs
+
+        df = GeoDataFrame.from_features(features, crs=crs)
+        df.rename(columns=lambda x: x.lower(), inplace=True)
+        validate_boro_df(self, df)
+        self.assert_(df.crs == crs)
+
+    def test_from_features_unaligned_properties(self):
+        p1 = Point(1,1)
+        f1 = {'type': 'Feature', 
+                'properties': {'a': 0}, 
+                'geometry': p1.__geo_interface__}
+
+        p2 = Point(2,2)
+        f2 = {'type': 'Feature',
+                'properties': {'b': 1},
+                'geometry': p2.__geo_interface__}
+
+        p3 = Point(3,3)
+        f3 = {'type': 'Feature',
+                'properties': {'a': 2},
+                'geometry': p3.__geo_interface__}
+
+        df = GeoDataFrame.from_features([f1, f2, f3])
+
+        result = df[['a', 'b']]
+        expected = pd.DataFrame.from_dict([{'a': 0, 'b': np.nan},
+                                           {'a': np.nan, 'b': 1},
+                                           {'a': 2, 'b': np.nan}])
+        assert_frame_equal(expected, result)
+
+    def test_from_postgis_default(self):
+        con = connect('test_geopandas')
+        if con is None or not create_db(self.df):
+            raise unittest.case.SkipTest()
+
+        try:
+            sql = "SELECT * FROM nybb;"
+            df = GeoDataFrame.from_postgis(sql, con)
+        finally:
+            if PANDAS_NEW_SQL_API:
+                # It's not really a connection, it's an engine
+                con = con.connect()
+            con.close()
+
+        validate_boro_df(self, df)
+
+    def test_from_postgis_custom_geom_col(self):
+        con = connect('test_geopandas')
+        if con is None or not create_db(self.df):
+            raise unittest.case.SkipTest()
+
+        try:
+            sql = """SELECT
+                     borocode, boroname, shape_leng, shape_area,
+                     geom AS __geometry__
+                     FROM nybb;"""
+            df = GeoDataFrame.from_postgis(sql, con, geom_col='__geometry__')
+        finally:
+            if PANDAS_NEW_SQL_API:
+                # It's not really a connection, it's an engine
+                con = con.connect()
+            con.close()
+
+        validate_boro_df(self, df)
+
+    def test_dataframe_to_geodataframe(self):
+        df = pd.DataFrame({"A": range(len(self.df)), "location":
+                           list(self.df.geometry)}, index=self.df.index)
+        gf = df.set_geometry('location', crs=self.df.crs)
+        self.assertIsInstance(df, pd.DataFrame)
+        self.assertIsInstance(gf, GeoDataFrame)
+        assert_geoseries_equal(gf.geometry, self.df.geometry)
+        self.assertEqual(gf.geometry.name, 'location')
+        self.assert_('geometry' not in gf)
+
+        gf2 = df.set_geometry('location', crs=self.df.crs, drop=True)
+        self.assertIsInstance(df, pd.DataFrame)
+        self.assertIsInstance(gf2, GeoDataFrame)
+        self.assertEqual(gf2.geometry.name, 'geometry')
+        self.assert_('geometry' in gf2)
+        self.assert_('location' not in gf2)
+        self.assert_('location' in df)
+
+        # should be a copy
+        df.ix[0, "A"] = 100
+        self.assertEqual(gf.ix[0, "A"], 0)
+        self.assertEqual(gf2.ix[0, "A"], 0)
+
+        with self.assertRaises(ValueError):
+            df.set_geometry('location', inplace=True)
diff --git a/tests/test_geom_methods.py b/tests/test_geom_methods.py
new file mode 100644
index 0000000..9e87965
--- /dev/null
+++ b/tests/test_geom_methods.py
@@ -0,0 +1,415 @@
+from __future__ import absolute_import
+
+import string
+
+import numpy as np
+from numpy.testing import assert_array_equal
+from pandas.util.testing import assert_series_equal, assert_frame_equal
+from pandas import Series, DataFrame
+from shapely.geometry import Point, LinearRing, LineString, Polygon
+from shapely.geometry.collection import GeometryCollection
+
+from geopandas import GeoSeries, GeoDataFrame
+from geopandas.base import GeoPandasBase
+from .util import (
+    unittest, geom_equals, geom_almost_equals, assert_geoseries_equal
+)
+
+class TestGeomMethods(unittest.TestCase):
+
+    def setUp(self):
+        self.t1 = Polygon([(0, 0), (1, 0), (1, 1)])
+        self.t2 = Polygon([(0, 0), (1, 1), (0, 1)])
+        self.sq = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
+        self.inner_sq = Polygon([(0.25, 0.25), (0.75, 0.25), (0.75, 0.75),
+                            (0.25, 0.75)])
+        self.nested_squares = Polygon(self.sq.boundary,
+                                      [self.inner_sq.boundary])
+        self.p0 = Point(5, 5)
+        self.g0 = GeoSeries([self.t1, self.t2, self.sq, self.inner_sq,
+                             self.nested_squares, self.p0])
+        self.g1 = GeoSeries([self.t1, self.sq])
+        self.g2 = GeoSeries([self.sq, self.t1])
+        self.g3 = GeoSeries([self.t1, self.t2])
+        self.g3.crs = {'init': 'epsg:4326', 'no_defs': True}
+        self.g4 = GeoSeries([self.t2, self.t1])
+        self.na = GeoSeries([self.t1, self.t2, Polygon()])
+        self.na_none = GeoSeries([self.t1, self.t2, None])
+        self.a1 = self.g1.copy()
+        self.a1.index = ['A', 'B']
+        self.a2 = self.g2.copy()
+        self.a2.index = ['B', 'C']
+        self.esb = Point(-73.9847, 40.7484)
+        self.sol = Point(-74.0446, 40.6893)
+        self.landmarks = GeoSeries([self.esb, self.sol],
+                                   crs={'init': 'epsg:4326', 'no_defs': True})
+        self.l1 = LineString([(0, 0), (0, 1), (1, 1)])
+        self.l2 = LineString([(0, 0), (1, 0), (1, 1), (0, 1)])
+        self.g5 = GeoSeries([self.l1, self.l2])
+
+        # Crossed lines
+        self.l3 = LineString([(0, 0), (1, 1)])
+        self.l4 = LineString([(0, 1), (1, 0)])
+        self.crossed_lines = GeoSeries([self.l3, self.l4])
+
+        # Placeholder for testing, will just drop in different geometries
+        # when needed
+        self.gdf1 = GeoDataFrame({'geometry' : self.g1,
+                                  'col0' : [1.0, 2.0],
+                                  'col1' : ['geo', 'pandas']})
+        self.gdf2 = GeoDataFrame({'geometry' : self.g1,
+                                  'col3' : [4, 5],
+                                  'col4' : ['rand', 'string']})
+
+
+    def _test_unary_real(self, op, expected, a):
+        """ Tests for 'area', 'length', 'is_valid', etc. """
+        fcmp = assert_series_equal
+        self._test_unary(op, expected, a, fcmp)
+
+    def _test_unary_topological(self, op, expected, a):
+        if isinstance(expected, GeoPandasBase):
+            fcmp = assert_geoseries_equal
+        else:
+            fcmp = lambda a, b: self.assert_(geom_equals(a, b))
+        self._test_unary(op, expected, a, fcmp)
+
+    def _test_binary_topological(self, op, expected, a, b, *args, **kwargs):
+        """ Tests for 'intersection', 'union', 'symmetric_difference', etc. """
+        if isinstance(expected, GeoPandasBase):
+            fcmp = assert_geoseries_equal
+        else:
+            fcmp = lambda a, b: self.assert_(geom_equals(a, b))
+
+        if isinstance(b, GeoPandasBase):
+            right_df = True
+        else:
+            right_df = False
+
+        self._binary_op_test(op, expected, a, b, fcmp, True, right_df, 
+                        *args, **kwargs)
+
+    def _test_binary_real(self, op, expected, a, b, *args, **kwargs):
+        fcmp = assert_series_equal
+        self._binary_op_test(op, expected, a, b, fcmp, True, False, *args, **kwargs)
+
+    def _test_binary_operator(self, op, expected, a, b):
+        """
+        The operators only have GeoSeries on the left, but can have
+        GeoSeries or GeoDataFrame on the right.
+
+        """
+        if isinstance(expected, GeoPandasBase):
+            fcmp = assert_geoseries_equal
+        else:
+            fcmp = lambda a, b: self.assert_(geom_equals(a, b))
+
+        if isinstance(b, GeoPandasBase):
+            right_df = True
+        else:
+            right_df = False
+
+        self._binary_op_test(op, expected, a, b, fcmp, False, right_df)
+
+    def _binary_op_test(self, op, expected, left, right, fcmp, left_df,
+                        right_df, 
+                        *args, **kwargs):
+        """
+        This is a helper to call a function on GeoSeries and GeoDataFrame
+        arguments. For example, 'intersection' is a member of both GeoSeries
+        and GeoDataFrame and can take either GeoSeries or GeoDataFrame inputs.
+        This function has the ability to test all four combinations of input
+        types.
+
+        Parameters
+        ----------
+        
+        expected : str
+            The operation to be tested. e.g., 'intersection'
+        left: GeoSeries
+        right: GeoSeries
+        fcmp: function 
+            Called with the result of the operation and expected. It should
+            assert if the result is incorrect
+        left_df: bool
+            If the left input should also be called with a GeoDataFrame
+        right_df: bool
+            Indicates whether the right input should be called with a
+            GeoDataFrame
+
+        """
+        def _make_gdf(s):
+            n = len(s)
+            col1 = string.ascii_lowercase[:n]
+            col2 = range(n)
+            
+            return GeoDataFrame({'geometry': s.values, 
+                                 'col1' : col1, 
+                                 'col2' : col2},
+                                 index=s.index, crs=s.crs)
+
+        # Test GeoSeries.op(GeoSeries)
+        result = getattr(left, op)(right, *args, **kwargs)
+        fcmp(result, expected)
+        
+        if left_df:
+            # Test GeoDataFrame.op(GeoSeries)
+            gdf_left = _make_gdf(left)
+            result = getattr(gdf_left, op)(right, *args, **kwargs)
+            fcmp(result, expected)
+
+        if right_df:
+            # Test GeoSeries.op(GeoDataFrame)
+            gdf_right = _make_gdf(right)
+            result = getattr(left, op)(gdf_right, *args, **kwargs)
+            fcmp(result, expected)
+
+            if left_df:
+                # Test GeoDataFrame.op(GeoDataFrame)
+                result = getattr(gdf_left, op)(gdf_right, *args, **kwargs)
+                fcmp(result, expected)
+
+    def _test_unary(self, op, expected, a, fcmp):
+        # GeoSeries, (GeoSeries or geometry)
+        result = getattr(a, op)
+        fcmp(result, expected)
+
+        # GeoDataFrame, (GeoSeries or geometry)
+        gdf = self.gdf1.set_geometry(a)
+        result = getattr(gdf, op)
+        fcmp(result, expected)
+
+    def test_intersection(self):
+        self._test_binary_topological('intersection', self.t1, 
+                                      self.g1, self.g2)
+
+    def test_union_series(self):
+        self._test_binary_topological('union', self.sq, self.g1, self.g2)
+
+    def test_union_polygon(self):
+        self._test_binary_topological('union', self.sq, self.g1, self.t2)
+
+    def test_symmetric_difference_series(self):
+        self._test_binary_topological('symmetric_difference', self.sq,
+                                      self.g3, self.g4)
+
+    def test_symmetric_difference_poly(self):
+        expected = GeoSeries([GeometryCollection(), self.sq], crs=self.g3.crs)
+        self._test_binary_topological('symmetric_difference', expected,
+                                      self.g3, self.t1)
+
+    def test_difference_series(self):
+        expected = GeoSeries([GeometryCollection(), self.t2])
+        self._test_binary_topological('difference', expected,
+                                      self.g1, self.g2)
+
+    def test_difference_poly(self):
+        expected = GeoSeries([self.t1, self.t1])
+        self._test_binary_topological('difference', expected,
+                                      self.g1, self.t2)
+
+    def test_boundary(self):
+        l1 = LineString([(0, 0), (1, 0), (1, 1), (0, 0)])
+        l2 = LineString([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)])
+        expected = GeoSeries([l1, l2], index=self.g1.index, crs=self.g1.crs)
+
+        self._test_unary_topological('boundary', expected, self.g1)
+
+    def test_area(self):
+        expected = Series(np.array([0.5, 1.0]), index=self.g1.index)
+        self._test_unary_real('area', expected, self.g1)
+
+    def test_bounds(self):
+        # Set columns to get the order right
+        expected = DataFrame({'minx': [0.0, 0.0], 'miny': [0.0, 0.0],
+                              'maxx': [1.0, 1.0], 'maxy': [1.0, 1.0]},
+                              index=self.g1.index,
+                              columns=['minx', 'miny', 'maxx', 'maxy'])
+
+        result = self.g1.bounds
+        assert_frame_equal(expected, result)
+
+        gdf = self.gdf1.set_geometry(self.g1)
+        result = gdf.bounds
+        assert_frame_equal(expected, result)
+
+    def test_contains(self):
+        expected = [True, False, True, False, False, False]
+        assert_array_equal(expected, self.g0.contains(self.t1))
+
+    def test_length(self):
+        expected = Series(np.array([2 + np.sqrt(2), 4]), index=self.g1.index)
+        self._test_unary_real('length', expected, self.g1)
+
+    def test_crosses(self):
+        expected = [False, False, False, False, False, False]
+        assert_array_equal(expected, self.g0.crosses(self.t1))
+
+        expected = [False, True]
+        assert_array_equal(expected, self.crossed_lines.crosses(self.l3))
+
+    def test_disjoint(self):
+        expected = [False, False, False, False, False, True]
+        assert_array_equal(expected, self.g0.disjoint(self.t1))
+
+    def test_intersects(self):
+        expected = [True, True, True, True, True, False]
+        assert_array_equal(expected, self.g0.intersects(self.t1))
+
+    def test_overlaps(self):
+        expected = [True, True, False, False, False, False]
+        assert_array_equal(expected, self.g0.overlaps(self.inner_sq))
+
+        expected = [False, False]
+        assert_array_equal(expected, self.g4.overlaps(self.t1))
+
+    def test_touches(self):
+        expected = [False, True, False, False, False, False]
+        assert_array_equal(expected, self.g0.touches(self.t1))
+
+    def test_within(self):
+        expected = [True, False, False, False, False, False]
+        assert_array_equal(expected, self.g0.within(self.t1))
+
+        expected = [True, True, True, True, True, False]
+        assert_array_equal(expected, self.g0.within(self.sq))
+
+    def test_is_valid(self):
+        expected = Series(np.array([True] * len(self.g1)), self.g1.index)
+        self._test_unary_real('is_valid', expected, self.g1)
+
+    def test_is_empty(self):
+        expected = Series(np.array([False] * len(self.g1)), self.g1.index)
+        self._test_unary_real('is_empty', expected, self.g1)
+
+    def test_is_ring(self):
+        expected = Series(np.array([True] * len(self.g1)), self.g1.index)
+        self._test_unary_real('is_ring', expected, self.g1)
+
+    def test_is_simple(self):
+        expected = Series(np.array([True] * len(self.g1)), self.g1.index)
+        self._test_unary_real('is_simple', expected, self.g1)
+
+    def test_exterior(self):
+        exp_exterior = GeoSeries([LinearRing(p.boundary) for p in self.g3])
+        for expected, computed in zip(exp_exterior, self.g3.exterior):
+            assert computed.equals(expected)
+
+    def test_interiors(self):
+        square_series = GeoSeries(self.nested_squares)
+        exp_interiors = GeoSeries([LinearRing(self.inner_sq.boundary)])
+        for expected, computed in zip(exp_interiors, square_series.interiors):
+            assert computed[0].equals(expected)
+
+
+    def test_interpolate(self):
+        expected = GeoSeries([Point(0.5, 1.0), Point(0.75, 1.0)])
+        self._test_binary_topological('interpolate', expected, self.g5,
+                                      0.75, normalized=True)
+
+        expected = GeoSeries([Point(0.5, 1.0), Point(1.0, 0.5)])
+        self._test_binary_topological('interpolate', expected, self.g5,
+                                      1.5)
+
+    def test_project(self):
+        expected = Series([2.0, 1.5], index=self.g5.index)
+        p = Point(1.0, 0.5)
+        self._test_binary_real('project', expected, self.g5, p)
+
+        expected = Series([1.0, 0.5], index=self.g5.index)
+        self._test_binary_real('project', expected, self.g5, p,
+                               normalized=True)
+
+    def test_translate_tuple(self):
+        trans = self.sol.x - self.esb.x, self.sol.y - self.esb.y
+        self.assert_(self.landmarks.translate(*trans)[0].equals(self.sol))
+
+        res = self.gdf1.set_geometry(self.landmarks).translate(*trans)[0]
+        self.assert_(res.equals(self.sol))
+
+    def test_rotate(self):
+        angle = 98
+        expected = self.g4
+
+        o = Point(0,0)
+        res = self.g4.rotate(angle, origin=o).rotate(-angle, origin=o)
+        self.assert_(geom_almost_equals(self.g4, res))
+
+        res = self.gdf1.set_geometry(self.g4).rotate(angle, origin=Point(0,0))
+        self.assert_(geom_almost_equals(expected,
+                                        res.rotate(-angle, origin=o)))
+
+    def test_scale(self):
+        expected = self.g4
+
+        scale = 2., 1.
+        inv = tuple(1./i for i in scale)
+
+        o = Point(0,0)
+        res = self.g4.scale(*scale, origin=o).scale(*inv, origin=o)
+        self.assertTrue(geom_almost_equals(expected, res))
+
+        res = self.gdf1.set_geometry(self.g4).scale(*scale, origin=o)
+        res = res.scale(*inv, origin=o)
+        self.assert_(geom_almost_equals(expected, res))
+
+    def test_skew(self):
+        expected = self.g4
+
+        skew = 45.
+        o = Point(0,0)
+
+        # Test xs
+        res = self.g4.skew(xs=skew, origin=o).skew(xs=-skew, origin=o)
+        self.assert_(geom_almost_equals(expected, res))
+
+        res = self.gdf1.set_geometry(self.g4).skew(xs=skew, origin=o)
+        res = res.skew(xs=-skew, origin=o)
+        self.assert_(geom_almost_equals(expected, res))
+
+        # Test ys
+        res = self.g4.skew(ys=skew, origin=o).skew(ys=-skew, origin=o)
+        self.assert_(geom_almost_equals(expected, res))
+
+        res = self.gdf1.set_geometry(self.g4).skew(ys=skew, origin=o)
+        res = res.skew(ys=-skew, origin=o)
+        self.assert_(geom_almost_equals(expected, res))
+
+    def test_envelope(self):
+        e = self.g3.envelope
+        self.assertTrue(np.alltrue(e.geom_equals(self.sq)))
+        self.assertIsInstance(e, GeoSeries)
+        self.assertEqual(self.g3.crs, e.crs)
+
+    def test_total_bounds(self):
+        bbox = self.sol.x, self.sol.y, self.esb.x, self.esb.y
+        self.assert_(self.landmarks.total_bounds, bbox)
+
+        df = GeoDataFrame({'geometry': self.landmarks,
+                           'col1': range(len(self.landmarks))})
+        self.assert_(df.total_bounds, bbox)
+
+    #
+    # Test '&', '|', '^', and '-'
+    # The left can only be a GeoSeries. The right hand side can be a
+    # GeoSeries, GeoDataFrame or Shapely geometry
+    #
+    def test_intersection_operator(self):
+        self._test_binary_operator('__and__', self.t1, self.g1, self.g2)
+
+    def test_union_operator(self):
+        self._test_binary_operator('__or__', self.sq, self.g1, self.g2)
+
+    def test_union_operator_polygon(self):
+        self._test_binary_operator('__or__', self.sq, self.g1, self.t2)
+
+    def test_symmetric_difference_operator(self):
+        self._test_binary_operator('__xor__', self.sq, self.g3, self.g4)
+
+    def test_difference_series(self):
+        expected = GeoSeries([GeometryCollection(), self.t2])
+        self._test_binary_operator('__sub__', expected, self.g1, self.g2)
+
+    def test_difference_poly(self):
+        expected = GeoSeries([self.t1, self.t1])
+        self._test_binary_operator('__sub__', expected, self.g1, self.t2)
diff --git a/tests/test_geoseries.py b/tests/test_geoseries.py
new file mode 100644
index 0000000..daa21d4
--- /dev/null
+++ b/tests/test_geoseries.py
@@ -0,0 +1,143 @@
+from __future__ import absolute_import
+
+import os
+import shutil
+import tempfile
+import numpy as np
+from numpy.testing import assert_array_equal
+from pandas import Series
+from shapely.geometry import (Polygon, Point, LineString,
+                              MultiPoint, MultiLineString, MultiPolygon)
+from shapely.geometry.base import BaseGeometry
+from geopandas import GeoSeries
+from .util import unittest, geom_equals, geom_almost_equals
+
+
+class TestSeries(unittest.TestCase):
+
+    def setUp(self):
+        self.tempdir = tempfile.mkdtemp()
+        self.t1 = Polygon([(0, 0), (1, 0), (1, 1)])
+        self.t2 = Polygon([(0, 0), (1, 1), (0, 1)])
+        self.sq = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)])
+        self.g1 = GeoSeries([self.t1, self.sq])
+        self.g2 = GeoSeries([self.sq, self.t1])
+        self.g3 = GeoSeries([self.t1, self.t2])
+        self.g3.crs = {'init': 'epsg:4326', 'no_defs': True}
+        self.g4 = GeoSeries([self.t2, self.t1])
+        self.na = GeoSeries([self.t1, self.t2, Polygon()])
+        self.na_none = GeoSeries([self.t1, self.t2, None])
+        self.a1 = self.g1.copy()
+        self.a1.index = ['A', 'B']
+        self.a2 = self.g2.copy()
+        self.a2.index = ['B', 'C']
+        self.esb = Point(-73.9847, 40.7484)
+        self.sol = Point(-74.0446, 40.6893)
+        self.landmarks = GeoSeries([self.esb, self.sol],
+                                   crs={'init': 'epsg:4326', 'no_defs': True})
+        self.l1 = LineString([(0, 0), (0, 1), (1, 1)])
+        self.l2 = LineString([(0, 0), (1, 0), (1, 1), (0, 1)])
+        self.g5 = GeoSeries([self.l1, self.l2])
+
+    def tearDown(self):
+        shutil.rmtree(self.tempdir)
+
+    def test_single_geom_constructor(self):
+        p = Point(1,2)
+        line = LineString([(2, 3), (4, 5), (5, 6)])
+        poly = Polygon([(0, 0), (1, 0), (1, 1)],
+                          [[(.1, .1), (.9, .1), (.9, .9)]])
+        mp = MultiPoint([(1, 2), (3, 4), (5, 6)])
+        mline = MultiLineString([[(1, 2), (3, 4), (5, 6)], [(7, 8), (9, 10)]])
+
+        poly2 = Polygon([(1, 1), (1, -1), (-1, -1), (-1, 1)],
+                        [[(.5, .5), (.5, -.5), (-.5, -.5), (-.5, .5)]])
+        mpoly = MultiPolygon([poly, poly2])
+
+        geoms = [p, line, poly, mp, mline, mpoly]
+        index = ['a', 'b', 'c', 'd']
+
+        for g in geoms:
+            gs = GeoSeries(g)
+            self.assert_(len(gs) == 1)
+            self.assert_(gs.iloc[0] is g)
+
+            gs = GeoSeries(g, index=index)
+            self.assert_(len(gs) == len(index))
+            for x in gs:
+                self.assert_(x is g)
+
+    def test_copy(self):
+        gc = self.g3.copy()
+        self.assertTrue(type(gc) is GeoSeries)
+        self.assertEqual(self.g3.name, gc.name)
+        self.assertEqual(self.g3.crs, gc.crs)
+
+    def test_in(self):
+        self.assertTrue(self.t1 in self.g1)
+        self.assertTrue(self.sq in self.g1)
+        self.assertTrue(self.t1 in self.a1)
+        self.assertTrue(self.t2 in self.g3)
+        self.assertTrue(self.sq not in self.g3)
+        self.assertTrue(5 not in self.g3)
+
+    def test_geom_equals(self):
+        self.assertTrue(np.alltrue(self.g1.geom_equals(self.g1)))
+        assert_array_equal(self.g1.geom_equals(self.sq), [False, True])
+
+    def test_geom_equals_align(self):
+        a = self.a1.geom_equals(self.a2)
+        self.assertFalse(a['A'])
+        self.assertTrue(a['B'])
+        self.assertFalse(a['C'])
+
+    def test_align(self):
+        a1, a2 = self.a1.align(self.a2)
+        self.assertTrue(a2['A'].is_empty)
+        self.assertTrue(a1['B'].equals(a2['B']))
+        self.assertTrue(a1['C'].is_empty)
+
+    def test_geom_almost_equals(self):
+        # TODO: test decimal parameter
+        self.assertTrue(np.alltrue(self.g1.geom_almost_equals(self.g1)))
+        assert_array_equal(self.g1.geom_almost_equals(self.sq), [False, True])
+
+    def test_geom_equals_exact(self):
+        # TODO: test tolerance parameter
+        self.assertTrue(np.alltrue(self.g1.geom_equals_exact(self.g1, 0.001)))
+        assert_array_equal(self.g1.geom_equals_exact(self.sq, 0.001), [False, True])
+
+    def test_to_file(self):
+        """ Test to_file and from_file """
+        tempfilename = os.path.join(self.tempdir, 'test.shp')
+        self.g3.to_file(tempfilename)
+        # Read layer back in?
+        s = GeoSeries.from_file(tempfilename)
+        self.assertTrue(all(self.g3.geom_equals(s)))
+        # TODO: compare crs
+
+    def test_representative_point(self):
+        self.assertTrue(np.alltrue(self.g1.contains(self.g1.representative_point())))
+        self.assertTrue(np.alltrue(self.g2.contains(self.g2.representative_point())))
+        self.assertTrue(np.alltrue(self.g3.contains(self.g3.representative_point())))
+        self.assertTrue(np.alltrue(self.g4.contains(self.g4.representative_point())))
+
+    def test_transform(self):
+        utm18n = self.landmarks.to_crs(epsg=26918)
+        lonlat = utm18n.to_crs(epsg=4326)
+        self.assertTrue(np.alltrue(self.landmarks.geom_almost_equals(lonlat)))
+        with self.assertRaises(ValueError):
+            self.g1.to_crs(epsg=4326)
+        with self.assertRaises(TypeError):
+            self.landmarks.to_crs(crs=None, epsg=None)
+
+    def test_fillna(self):
+        na = self.na_none.fillna(Point())
+        self.assertTrue(isinstance(na[2], BaseGeometry))
+        self.assertTrue(na[2].is_empty)
+        self.assertTrue(geom_equals(self.na_none[:2], na[:2]))
+        # XXX: method works inconsistently for different pandas versions
+        #self.na_none.fillna(method='backfill')
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/test_io.py b/tests/test_io.py
new file mode 100644
index 0000000..ec04bd7
--- /dev/null
+++ b/tests/test_io.py
@@ -0,0 +1,57 @@
+from __future__ import absolute_import
+
+import fiona
+
+from geopandas import GeoDataFrame, read_postgis, read_file
+import tests.util
+from .util import PANDAS_NEW_SQL_API, unittest
+
+
+class TestIO(unittest.TestCase):
+    def setUp(self):
+        nybb_filename = tests.util.download_nybb()
+        path = '/nybb_14a_av/nybb.shp'
+        vfs = 'zip://' + nybb_filename
+        self.df = read_file(path, vfs=vfs)
+        with fiona.open(path, vfs=vfs) as f:
+            self.crs = f.crs
+
+    def test_read_postgis_default(self):
+        con = tests.util.connect('test_geopandas')
+        if con is None or not tests.util.create_db(self.df):
+            raise unittest.case.SkipTest()
+
+        try:
+            sql = "SELECT * FROM nybb;"
+            df = read_postgis(sql, con)
+        finally:
+            if PANDAS_NEW_SQL_API:
+                # It's not really a connection, it's an engine
+                con = con.connect()
+            con.close()
+
+        tests.util.validate_boro_df(self, df)
+
+    def test_read_postgis_custom_geom_col(self):
+        con = tests.util.connect('test_geopandas')
+        if con is None or not tests.util.create_db(self.df):
+            raise unittest.case.SkipTest()
+
+        try:
+            sql = """SELECT
+                     borocode, boroname, shape_leng, shape_area,
+                     geom AS __geometry__
+                     FROM nybb;"""
+            df = read_postgis(sql, con, geom_col='__geometry__')
+        finally:
+            if PANDAS_NEW_SQL_API:
+                # It's not really a connection, it's an engine
+                con = con.connect()
+            con.close()
+
+        tests.util.validate_boro_df(self, df)
+
+    def test_read_file(self):
+        df = self.df.rename(columns=lambda x: x.lower())
+        tests.util.validate_boro_df(self, df)
+        self.assert_(df.crs == self.crs)
diff --git a/tests/test_plotting.py b/tests/test_plotting.py
new file mode 100644
index 0000000..5bc3ef6
--- /dev/null
+++ b/tests/test_plotting.py
@@ -0,0 +1,78 @@
+from __future__ import absolute_import
+
+import os
+import shutil
+import tempfile
+import unittest
+
+import matplotlib
+matplotlib.use('Agg', warn=False)
+from matplotlib.pyplot import Artist, savefig, clf
+from matplotlib.testing.noseclasses import ImageComparisonFailure
+from matplotlib.testing.compare import compare_images
+from shapely.geometry import Polygon, LineString, Point
+from six.moves import xrange
+
+from geopandas import GeoSeries
+
+
+# If set to True, generate images rather than perform tests (all tests will pass!)
+GENERATE_BASELINE = False
+
+BASELINE_DIR = os.path.join(os.path.dirname(__file__), 'baseline_images', 'test_plotting')
+
+
+class PlotTests(unittest.TestCase):
+    
+    def setUp(self):
+        self.tempdir = tempfile.mkdtemp()
+        return
+
+    def tearDown(self):
+        shutil.rmtree(self.tempdir)
+        return
+
+    def _compare_images(self, ax, filename, tol=8):
+        """ Helper method to do the comparisons """
+        assert isinstance(ax, Artist)
+        if GENERATE_BASELINE:
+            savefig(os.path.join(BASELINE_DIR, filename))
+        savefig(os.path.join(self.tempdir, filename))
+        err = compare_images(os.path.join(BASELINE_DIR, filename),
+                             os.path.join(self.tempdir, filename),
+                             tol, in_decorator=True)
+        if err:
+            raise ImageComparisonFailure('images not close: %(actual)s '
+                                         'vs. %(expected)s '
+                                         '(RMS %(rms).3f)' % err)
+
+    def test_poly_plot(self):
+        """ Test plotting a simple series of polygons """
+        clf()
+        filename = 'poly_plot.png'
+        t1 = Polygon([(0, 0), (1, 0), (1, 1)])
+        t2 = Polygon([(1, 0), (2, 0), (2, 1)])
+        polys = GeoSeries([t1, t2])
+        ax = polys.plot()
+        self._compare_images(ax=ax, filename=filename)
+
+    def test_point_plot(self):
+        """ Test plotting a simple series of points """
+        clf()
+        filename = 'points_plot.png'
+        N = 10
+        points = GeoSeries(Point(i, i) for i in xrange(N))
+        ax = points.plot()
+        self._compare_images(ax=ax, filename=filename)
+
+    def test_line_plot(self):
+        """ Test plotting a simple series of lines """
+        clf()
+        filename = 'lines_plot.png'
+        N = 10
+        lines = GeoSeries([LineString([(0, i), (9, i)]) for i in xrange(N)])
+        ax = lines.plot()
+        self._compare_images(ax=ax, filename=filename)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/test_types.py b/tests/test_types.py
new file mode 100644
index 0000000..fdd14f5
--- /dev/null
+++ b/tests/test_types.py
@@ -0,0 +1,89 @@
+from __future__ import absolute_import
+
+import numpy as np
+from shapely.geometry import Point
+from pandas import Series, DataFrame
+
+from geopandas import GeoSeries, GeoDataFrame
+from .util import unittest
+
+OLD_PANDAS = issubclass(Series, np.ndarray)
+
+
+class TestSeries(unittest.TestCase):
+
+    def setUp(self):
+        N = self.N = 10
+        r = 0.5
+        self.pts = GeoSeries([Point(x, y) for x, y in zip(range(N), range(N))])
+        self.polys = self.pts.buffer(r)
+
+    def test_slice(self):
+        assert type(self.pts[:2]) is GeoSeries
+        assert type(self.pts[::2]) is GeoSeries
+        assert type(self.polys[:2]) is GeoSeries
+
+    def test_head(self):
+        assert type(self.pts.head()) is GeoSeries
+
+    def test_tail(self):
+        assert type(self.pts.tail()) is GeoSeries
+
+    def test_sort_index(self):
+        assert type(self.pts.sort_index()) is GeoSeries
+
+    def test_loc(self):
+        assert type(self.pts.loc[5:]) is GeoSeries
+
+    def test_iloc(self):
+        assert type(self.pts.iloc[5:]) is GeoSeries
+
+    def test_fancy(self):
+        idx = (self.pts.index % 2).astype(bool)
+        assert type(self.pts[idx]) is GeoSeries
+
+    def test_take(self):
+        assert type(self.pts.take(list(range(0, self.N, 2)))) is GeoSeries
+
+    def test_select(self):
+        assert type(self.pts.select(lambda x: x % 2 == 0)) is GeoSeries
+
+    @unittest.skipIf(OLD_PANDAS, 'Groupby not supported on pandas <= 0.12')
+    def test_groupby(self):
+        for f, s in self.pts.groupby(lambda x: x % 2):
+            assert type(s) is GeoSeries
+
+
+class TestDataFrame(unittest.TestCase):
+
+    def setUp(self):
+        N = 10
+        self.df = GeoDataFrame([
+            {'geometry' : Point(x, y), 'value1': x + y, 'value2': x*y}
+            for x, y in zip(range(N), range(N))])
+
+    def test_geometry(self):
+        assert type(self.df.geometry) is GeoSeries
+        # still GeoSeries if different name
+        df2 = GeoDataFrame({"coords": [Point(x,y) for x, y in zip(range(5),
+                                                                  range(5))],
+                            "nums": range(5)}, geometry="coords")
+        assert type(df2.geometry) is GeoSeries
+        assert type(df2['coords']) is GeoSeries
+
+    def test_nongeometry(self):
+        assert type(self.df['value1']) is Series
+
+    def test_geometry_multiple(self):
+        assert type(self.df[['geometry', 'value1']]) is GeoDataFrame
+
+    def test_nongeometry_multiple(self):
+        assert type(self.df[['value1', 'value2']]) is DataFrame
+
+    def test_slice(self):
+        assert type(self.df[:2]) is GeoDataFrame
+        assert type(self.df[::2]) is GeoDataFrame
+
+    def test_fancy(self):
+        idx = (self.df.index % 2).astype(bool)
+        assert type(self.df[idx]) is GeoDataFrame
diff --git a/tests/util.py b/tests/util.py
new file mode 100644
index 0000000..9751c31
--- /dev/null
+++ b/tests/util.py
@@ -0,0 +1,213 @@
+import io
+import os.path
+from six.moves.urllib.request import urlopen
+
+from geopandas import GeoDataFrame, GeoSeries
+
+# Compatibility layer for Python 2.6: try loading unittest2
+import sys
+if sys.version_info[:2] == (2, 6):
+    try:
+        import unittest2 as unittest
+    except ImportError:
+        import unittest
+
+else:
+    import unittest
+
+try:
+    import psycopg2
+    from psycopg2 import OperationalError
+except ImportError:
+    class OperationalError(Exception):
+        pass
+
+try:
+    from pandas import read_sql_table
+except ImportError:
+    PANDAS_NEW_SQL_API = False
+else:
+    PANDAS_NEW_SQL_API = True
+
+
+def download_nybb():
+    """ Returns the path to the NYC boroughs file. Downloads if necessary. """
+    # Data from http://www.nyc.gov/html/dcp/download/bytes/nybb_14aav.zip
+    # saved as geopandas/examples/nybb_14aav.zip.
+    filename = 'nybb_14aav.zip'
+    full_path_name = os.path.join('examples', filename)
+    if not os.path.exists(full_path_name):
+        with io.open(full_path_name, 'wb') as f:
+            response = urlopen('http://www.nyc.gov/html/dcp/download/bytes/{0}'.format(filename))
+            f.write(response.read())
+    return full_path_name
+
+
+def validate_boro_df(test, df):
+    """ Tests a GeoDataFrame that has been read in from the nybb dataset."""
+    test.assertTrue(isinstance(df, GeoDataFrame))
+    # Make sure all the columns are there and the geometries
+    # were properly loaded as MultiPolygons
+    test.assertEqual(len(df), 5)
+    columns = ('borocode', 'boroname', 'shape_leng', 'shape_area')
+    for col in columns:
+        test.assertTrue(col in df.columns, 'Column {0} missing'.format(col))
+    test.assertTrue(all(df.geometry.type == 'MultiPolygon'))
+
+
+def connect(dbname):
+    try:
+        con = psycopg2.connect(dbname=dbname)
+    except (NameError, OperationalError):
+        return None
+
+    return con
+
+
+def create_db(df):
+    """
+    Create a nybb table in the test_geopandas PostGIS database.
+    Returns a boolean indicating whether the database table was successfully
+    created
+
+    """
+    # Try to create the database, skip the db tests if something goes
+    # wrong
+    # If you'd like these tests to run, create a database called
+    # 'test_geopandas' and enable postgis in it:
+    # > createdb test_geopandas
+    # > psql -c "CREATE EXTENSION postgis" -d test_geopandas
+    con = connect('test_geopandas')
+    if con is None:
+        return False
+
+    try:
+        cursor = con.cursor()
+        cursor.execute("DROP TABLE IF EXISTS nybb;")
+
+        sql = """CREATE TABLE nybb (
+            geom        geometry,
+            borocode    integer,
+            boroname    varchar(40),
+            shape_leng  float,
+            shape_area  float
+        );"""
+        cursor.execute(sql)
+
+        for i, row in df.iterrows():
+            sql = """INSERT INTO nybb VALUES (
+                ST_GeometryFromText(%s), %s, %s, %s, %s
+            );"""
+            cursor.execute(sql, (row['geometry'].wkt,
+                                 row['BoroCode'],
+                                 row['BoroName'],
+                                 row['Shape_Leng'],
+                                 row['Shape_Area']))
+    finally:
+        cursor.close()
+        con.commit()
+        con.close()
+
+    return True
+
+
+def assert_seq_equal(left, right):
+    """Poor man's version of assert_almost_equal which isn't working with Shapely
+    objects right now"""
+    assert len(left) == len(right), "Mismatched lengths: %d != %d" % (len(left), len(right))
+
+    for elem_left, elem_right in zip(left, right):
+        assert elem_left == elem_right, "%r != %r" % (left, right)
+
+
+def geom_equals(this, that):
+    """Test for geometric equality. Empty geometries are considered equal.
+
+    Parameters
+    ----------
+    this, that : arrays of Geo objects (or anything that has an `is_empty`
+                 attribute)
+    """
+
+    return (this.geom_equals(that) | (this.is_empty & that.is_empty)).all()
+
+
+def geom_almost_equals(this, that):
+    """Test for 'almost' geometric equality. Empty geometries considered equal.
+
+    Parameters
+    ----------
+    this, that : arrays of Geo objects (or anything that has an `is_empty`
+                 property)
+    """
+
+    return (this.geom_almost_equals(that) |
+            (this.is_empty & that.is_empty)).all()
+
+# TODO: Remove me when standardizing on pandas 0.13, which already includes
+#       this test util.
+def assert_isinstance(obj, klass_or_tuple):
+    assert isinstance(obj, klass_or_tuple), "type: %r != %r" % (
+                                           type(obj).__name__,
+                                           getattr(klass_or_tuple, '__name__',
+                                                   klass_or_tuple))
+
+def assert_geoseries_equal(left, right, check_dtype=False,
+                           check_index_type=False,
+                           check_series_type=True,
+                           check_less_precise=False,
+                           check_geom_type=False,
+                           check_crs=True):
+    """Test util for checking that two GeoSeries are equal.
+
+    Parameters
+    ----------
+    left, right : two GeoSeries
+    check_dtype : bool, default False
+        if True, check geo dtype [only included so it's a drop-in replacement
+        for assert_series_equal]
+    check_index_type : bool, default False
+        check that index types are equal
+    check_series_type : bool, default True
+        check that both are same type (*and* are GeoSeries). If False,
+        will attempt to convert both into GeoSeries.
+    check_less_precise : bool, default False
+        if True, use geom_almost_equals. if False, use geom_equals.
+    check_geom_type : bool, default False
+        if True, check that all the geom types are equal.
+    check_crs: bool, default True
+        if check_series_type is True, then also check that the
+        crs matches
+    """
+    assert len(left) == len(right), "%d != %d" % (len(left), len(right))
+
+    if check_index_type:
+        assert_isinstance(left.index, type(right.index))
+
+    if check_dtype:
+        assert left.dtype == right.dtype, "dtype: %s != %s" % (left.dtype,
+                                                               right.dtype)
+
+    if check_series_type:
+        assert isinstance(left, GeoSeries)
+        assert_isinstance(left, type(right))
+
+        if check_crs:
+            assert(left.crs == right.crs)
+    else:
+        if not isinstance(left, GeoSeries):
+            left = GeoSeries(left)
+        if not isinstance(right, GeoSeries):
+            right = GeoSeries(right, index=left.index)
+
+    assert left.index.equals(right.index), "index: %s != %s" % (left.index,
+                                                                right.index)
+
+    if check_geom_type:
+        assert (left.type == right.type).all(), "type: %s != %s" % (left.type,
+                                                                    right.type)
+
+    if check_less_precise:
+        assert geom_almost_equals(left, right)
+    else:
+        assert geom_equals(left, right)

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/python-geopandas.git



More information about the Pkg-grass-devel mailing list