[pysal] 01/02: Imported Upstream version 1.9.1

Johan Van de Wauw johanvdw-guest at moszumanska.debian.org
Fri Jun 12 20:58:01 UTC 2015


This is an automated email from the git hooks/post-receive script.

johanvdw-guest pushed a commit to branch master
in repository pysal.

commit 919fbbd032459f1a112d3bd2a2894f14389644cf
Author: Johan Van de Wauw <johan.vandewauw at gmail.com>
Date:   Thu Feb 5 21:45:06 2015 +0100

    Imported Upstream version 1.9.1
---
 .gitignore                                         |   79 +
 .travis.yml                                        |   46 +
 CHANGELOG.txt                                      | 1993 +++++++
 INSTALL.txt                                        |  242 +
 LICENSE.txt                                        |   30 +
 MANIFEST.in                                        |    2 +
 Makefile                                           |   25 +
 README.md                                          |  101 +
 README.txt                                         |  101 +
 THANKS.txt                                         |   43 +
 Vagrantfile                                        |   72 +
 authors.txt                                        |   22 +
 distribute_setup.py                                |  485 ++
 doc/Makefile                                       |   99 +
 doc/source/_static/favicon.png                     |  Bin 0 -> 4808 bytes
 doc/source/_static/images/bugs.png                 |  Bin 0 -> 6442 bytes
 doc/source/_static/images/documentation.png        |  Bin 0 -> 1868 bytes
 doc/source/_static/images/download.png             |  Bin 0 -> 3585 bytes
 doc/source/_static/images/download2.png            |  Bin 0 -> 7449 bytes
 doc/source/_static/images/feed-icon.jpg            |  Bin 0 -> 2285 bytes
 doc/source/_static/images/feed-icon.png            |  Bin 0 -> 2776 bytes
 doc/source/_static/images/socal_1.jpg              |  Bin 0 -> 13410 bytes
 doc/source/_static/images/socal_2.jpg              |  Bin 0 -> 20994 bytes
 doc/source/_static/images/socal_3.jpg              |  Bin 0 -> 18934 bytes
 doc/source/_static/images/tutorial.png             |  Bin 0 -> 7119 bytes
 doc/source/_static/images/yi_jing_01_chien.jpg     |  Bin 0 -> 9498 bytes
 doc/source/_static/mydoc.css                       |    3 +
 doc/source/_static/pysalgraph.png                  |  Bin 0 -> 46397 bytes
 doc/source/_static/pysalsmall.png                  |  Bin 0 -> 15755 bytes
 doc/source/_templates/ganalytics_layout.html       |  213 +
 doc/source/_templates/layout.html                  |   24 +
 doc/source/conf.py                                 |  230 +
 doc/source/contents.txt                            |   16 +
 doc/source/developers/docs/index.rst               |  252 +
 doc/source/developers/guidelines.rst               |  130 +
 doc/source/developers/index.rst                    |   18 +
 doc/source/developers/known-issues.rst             |   42 +
 doc/source/developers/pep/index.rst                |   18 +
 doc/source/developers/pep/pep-0001.rst             |   74 +
 doc/source/developers/pep/pep-0002.rst             |   56 +
 doc/source/developers/pep/pep-0003.rst             |   55 +
 doc/source/developers/pep/pep-0004.rst             |   56 +
 doc/source/developers/pep/pep-0005.rst             |   56 +
 doc/source/developers/pep/pep-0006.rst             |   60 +
 doc/source/developers/pep/pep-0007.rst             |   76 +
 doc/source/developers/pep/pep-0008.rst             |   55 +
 doc/source/developers/pep/pep-0009.rst             |   57 +
 doc/source/developers/pep/pep-0010.rst             |   46 +
 doc/source/developers/pep/pep-0011.rst             |  120 +
 doc/source/developers/projects.rst                 |   25 +
 doc/source/developers/py3k.rst                     |  111 +
 doc/source/developers/release.rst                  |   93 +
 doc/source/developers/testing.rst                  |  127 +
 doc/source/funding.rst                             |   16 +
 doc/source/index.rst                               |   87 +
 doc/source/index.txt                               |  110 +
 doc/source/library/cg/index.rst                    |   16 +
 doc/source/library/cg/kdtree.rst                   |   10 +
 doc/source/library/cg/locators.rst                 |   14 +
 doc/source/library/cg/rtree.rst                    |   10 +
 doc/source/library/cg/shapes.rst                   |   10 +
 doc/source/library/cg/sphere.rst                   |   10 +
 doc/source/library/cg/standalone.rst               |   11 +
 doc/source/library/contrib/index.rst               |   73 +
 doc/source/library/core/FileIO.rst                 |   15 +
 doc/source/library/core/IOHandlers/arcgis_dbf.rst  |   13 +
 doc/source/library/core/IOHandlers/arcgis_swm.rst  |   13 +
 doc/source/library/core/IOHandlers/arcgis_txt.rst  |   10 +
 doc/source/library/core/IOHandlers/csvWrapper.rst  |   13 +
 doc/source/library/core/IOHandlers/dat.rst         |   10 +
 doc/source/library/core/IOHandlers/gal.rst         |   13 +
 doc/source/library/core/IOHandlers/geobugs_txt.rst |   13 +
 doc/source/library/core/IOHandlers/geoda_txt.rst   |   10 +
 doc/source/library/core/IOHandlers/gwt.rst         |   10 +
 doc/source/library/core/IOHandlers/index.rst       |    8 +
 doc/source/library/core/IOHandlers/mat.rst         |   10 +
 doc/source/library/core/IOHandlers/mtx.rst         |   10 +
 doc/source/library/core/IOHandlers/pyDbfIO.rst     |   11 +
 doc/source/library/core/IOHandlers/pyShpIO.rst     |   12 +
 doc/source/library/core/IOHandlers/stata_txt.rst   |   12 +
 doc/source/library/core/IOHandlers/wk1.rst         |   13 +
 doc/source/library/core/IOHandlers/wkt.rst         |   15 +
 doc/source/library/core/Tables.rst                 |   12 +
 doc/source/library/core/index.rst                  |    8 +
 doc/source/library/esda/gamma.rst                  |   10 +
 doc/source/library/esda/geary.rst                  |   10 +
 doc/source/library/esda/getis-ord.rst              |   11 +
 doc/source/library/esda/index.rst                  |   13 +
 doc/source/library/esda/join_counts.rst            |    9 +
 doc/source/library/esda/mapclassify.rst            |   10 +
 doc/source/library/esda/moran.rst                  |   11 +
 doc/source/library/esda/smoothing.rst              |    9 +
 doc/source/library/index.rst                       |   34 +
 doc/source/library/inequality/gini.rst             |   12 +
 doc/source/library/inequality/index.rst            |    8 +
 doc/source/library/inequality/theil.rst            |   12 +
 doc/source/library/network/index.rst               |    7 +
 doc/source/library/network/network.rst             |   13 +
 doc/source/library/region/index.rst                |    9 +
 doc/source/library/region/maxp.rst                 |    8 +
 doc/source/library/region/randomregion.rst         |    8 +
 .../library/spatial_dynamics/directional.rst       |    8 +
 doc/source/library/spatial_dynamics/ergodic.rst    |    8 +
 doc/source/library/spatial_dynamics/index.rst      |   13 +
 .../library/spatial_dynamics/interaction.rst       |    8 +
 doc/source/library/spatial_dynamics/markov.rst     |    8 +
 doc/source/library/spatial_dynamics/rank.rst       |    8 +
 doc/source/library/spreg/diagnostics.rst           |   12 +
 doc/source/library/spreg/diagnostics_sp.rst        |   12 +
 doc/source/library/spreg/diagnostics_tsls.rst      |   12 +
 doc/source/library/spreg/error_sp.rst              |   12 +
 doc/source/library/spreg/error_sp_het.rst          |   12 +
 doc/source/library/spreg/error_sp_het_regimes.rst  |   12 +
 doc/source/library/spreg/error_sp_hom.rst          |   12 +
 doc/source/library/spreg/error_sp_hom_regimes.rst  |   12 +
 doc/source/library/spreg/error_sp_regimes.rst      |   12 +
 doc/source/library/spreg/index.rst                 |   29 +
 doc/source/library/spreg/ml_error.rst              |   12 +
 doc/source/library/spreg/ml_error_regimes.rst      |   12 +
 doc/source/library/spreg/ml_lag.rst                |   12 +
 doc/source/library/spreg/ml_lag_regimes.rst        |   12 +
 doc/source/library/spreg/ols.rst                   |   12 +
 doc/source/library/spreg/ols_regimes.rst           |   12 +
 doc/source/library/spreg/probit.rst                |   12 +
 doc/source/library/spreg/regimes.rst               |   12 +
 doc/source/library/spreg/twosls.rst                |   12 +
 doc/source/library/spreg/twosls_regimes.rst        |   12 +
 doc/source/library/spreg/twosls_sp.rst             |   12 +
 doc/source/library/spreg/twosls_sp_regimes.rst     |   12 +
 doc/source/library/weights/Contiguity.rst          |   11 +
 doc/source/library/weights/Distance.rst            |   11 +
 doc/source/library/weights/Wsets.rst               |    9 +
 doc/source/library/weights/index.rst               |   12 +
 doc/source/library/weights/spatial_lag.rst         |   14 +
 doc/source/library/weights/user.rst                |   10 +
 doc/source/library/weights/util.rst                |   10 +
 doc/source/library/weights/weights.rst             |   13 +
 doc/source/license.rst                             |   33 +
 doc/source/news.rst                                |   55 +
 doc/source/users/index.rst                         |   11 +
 doc/source/users/installation.rst                  |  160 +
 doc/source/users/introduction.rst                  |   68 +
 doc/source/users/tutorials/autocorrelation.rst     |  830 +++
 doc/source/users/tutorials/dynamics.rst            |  805 +++
 doc/source/users/tutorials/econometrics.rst        |   23 +
 doc/source/users/tutorials/examples.rst            |  142 +
 doc/source/users/tutorials/fileio.rst              |  429 ++
 doc/source/users/tutorials/index.rst               |   20 +
 doc/source/users/tutorials/intro.rst               |  108 +
 doc/source/users/tutorials/next.rst                |   17 +
 doc/source/users/tutorials/region.rst              |  211 +
 doc/source/users/tutorials/shapely.rst             |   94 +
 doc/source/users/tutorials/smoothing.rst           |  405 ++
 doc/source/users/tutorials/weights.rst             |  944 +++
 pysal/COPYING                                      |   25 +
 pysal/__init__.py                                  |   94 +
 pysal/cg/__init__.py                               |   10 +
 pysal/cg/kdtree.py                                 |  246 +
 pysal/cg/locators.py                               |  990 ++++
 pysal/cg/rtree.py                                  |  639 ++
 pysal/cg/segmentLocator.py                         |  403 ++
 pysal/cg/shapes.py                                 | 1913 ++++++
 pysal/cg/sphere.py                                 |  502 ++
 pysal/cg/standalone.py                             |  913 +++
 pysal/cg/tests/__init__.py                         |    0
 pysal/cg/tests/test_geoJSON.py                     |   26 +
 pysal/cg/tests/test_locators.py                    |   65 +
 pysal/cg/tests/test_rtree.py                       |   68 +
 pysal/cg/tests/test_segmentLocator.py              |   48 +
 pysal/cg/tests/test_shapes.py                      |  758 +++
 pysal/cg/tests/test_standalone.py                  |  601 ++
 pysal/common.py                                    |   25 +
 pysal/contrib/README                               |   57 +
 pysal/contrib/__init__.py                          |    0
 pysal/contrib/clusterpy/README.md                  |   15 +
 pysal/contrib/clusterpy/__init__.py                |   12 +
 pysal/contrib/clusterpy/clusterpy.ipynb            | 3320 +++++++++++
 pysal/contrib/clusterpy/clusterpy_ext.py           |  284 +
 pysal/contrib/db/PGDump.py                         |  104 +
 pysal/contrib/db/README                            |  125 +
 pysal/contrib/db/README_PLPYTHON                   |   10 +
 pysal/contrib/network/__init__.py                  |    0
 pysal/contrib/network/access.py                    |   56 +
 pysal/contrib/network/crimes.dbf                   |  Bin 0 -> 5551 bytes
 pysal/contrib/network/crimes.shp                   |  Bin 0 -> 8136 bytes
 pysal/contrib/network/crimes.shx                   |  Bin 0 -> 2396 bytes
 pysal/contrib/network/distances.csv                |   65 +
 pysal/contrib/network/kernel.py                    |  184 +
 pysal/contrib/network/kfuncs.py                    |   75 +
 pysal/contrib/network/klincs.py                    |  233 +
 pysal/contrib/network/lincs.py                     |  349 ++
 pysal/contrib/network/network.py                   |  599 ++
 pysal/contrib/network/priordict.py                 |   80 +
 pysal/contrib/network/simulator.py                 |  235 +
 pysal/contrib/network/streets.dbf                  |  Bin 0 -> 11525 bytes
 pysal/contrib/network/streets.shp                  |  Bin 0 -> 26044 bytes
 pysal/contrib/network/streets.shx                  |  Bin 0 -> 2444 bytes
 pysal/contrib/network/test_access.py               |   53 +
 pysal/contrib/network/test_kernel.py               |   44 +
 pysal/contrib/network/test_kfuncs.py               |   40 +
 pysal/contrib/network/test_klincs.py               |  136 +
 pysal/contrib/network/test_lincs.py                |   89 +
 pysal/contrib/network/test_network.py              |  186 +
 pysal/contrib/network/test_weights.py              |   22 +
 pysal/contrib/network/weights.py                   |   80 +
 pysal/contrib/opendata/__init__.py                 |    0
 pysal/contrib/opendata/google.py                   |  287 +
 pysal/contrib/shapely_ext.py                       |  301 +
 pysal/contrib/shared_perimeter_weights.py          |   33 +
 pysal/contrib/spatialnet/__init__.py               |    0
 pysal/contrib/spatialnet/beth_roads.shp            |  Bin 0 -> 77660 bytes
 pysal/contrib/spatialnet/beth_roads.shx            |  Bin 0 -> 3164 bytes
 pysal/contrib/spatialnet/cleanNetShp.py            |  141 +
 pysal/contrib/spatialnet/eberly.shp                |  Bin 0 -> 2652 bytes
 pysal/contrib/spatialnet/eberly.shx                |  Bin 0 -> 332 bytes
 pysal/contrib/spatialnet/spatialnet.py             |  107 +
 pysal/contrib/spatialnet/util.py                   |  221 +
 pysal/contrib/viz/__init__.py                      |    0
 pysal/contrib/viz/mapping.py                       |  598 ++
 pysal/contrib/viz/mapping_guide.ipynb              |  335 ++
 pysal/contrib/viz/taz_example.ipynb                |  454 ++
 pysal/contrib/weights_viewer/__init__.py           |    0
 pysal/contrib/weights_viewer/transforms.py         |  208 +
 pysal/contrib/weights_viewer/weights_viewer.py     |  179 +
 pysal/core/FileIO.py                               |  346 ++
 pysal/core/IOHandlers/__init__.py                  |   20 +
 pysal/core/IOHandlers/arcgis_dbf.py                |  231 +
 pysal/core/IOHandlers/arcgis_swm.py                |  212 +
 pysal/core/IOHandlers/arcgis_txt.py                |  208 +
 pysal/core/IOHandlers/csvWrapper.py                |   97 +
 pysal/core/IOHandlers/dat.py                       |  132 +
 pysal/core/IOHandlers/gal.py                       |  218 +
 pysal/core/IOHandlers/geobugs_txt.py               |  254 +
 pysal/core/IOHandlers/geoda_txt.py                 |   91 +
 pysal/core/IOHandlers/gwt.py                       |  286 +
 pysal/core/IOHandlers/mat.py                       |  175 +
 pysal/core/IOHandlers/mtx.py                       |  241 +
 pysal/core/IOHandlers/pyDbfIO.py                   |  306 +
 pysal/core/IOHandlers/pyShpIO.py                   |  189 +
 pysal/core/IOHandlers/stata_txt.py                 |  236 +
 pysal/core/IOHandlers/template.py                  |  145 +
 pysal/core/IOHandlers/tests/__init__.py            |    0
 pysal/core/IOHandlers/tests/test_arcgis_dbf.py     |   57 +
 pysal/core/IOHandlers/tests/test_arcgis_swm.py     |   44 +
 pysal/core/IOHandlers/tests/test_arcgis_txt.py     |   60 +
 pysal/core/IOHandlers/tests/test_csvWrapper.py     |   61 +
 pysal/core/IOHandlers/tests/test_dat.py            |   44 +
 pysal/core/IOHandlers/tests/test_gal.py            |   49 +
 pysal/core/IOHandlers/tests/test_geobugs_txt.py    |   57 +
 pysal/core/IOHandlers/tests/test_geoda_txt.py      |   26 +
 pysal/core/IOHandlers/tests/test_gwt.py            |   54 +
 pysal/core/IOHandlers/tests/test_mat.py            |   49 +
 pysal/core/IOHandlers/tests/test_mtx.py            |   57 +
 pysal/core/IOHandlers/tests/test_pyDbfIO.py        |  111 +
 pysal/core/IOHandlers/tests/test_pyShpIO.py        |   85 +
 pysal/core/IOHandlers/tests/test_stata_txt.py      |   61 +
 pysal/core/IOHandlers/tests/test_wk1.py            |   44 +
 pysal/core/IOHandlers/tests/test_wkt.py            |   31 +
 pysal/core/IOHandlers/wk1.py                       |  330 ++
 pysal/core/IOHandlers/wkt.py                       |   98 +
 pysal/core/Tables.py                               |  167 +
 pysal/core/__init__.py                             |    1 +
 pysal/core/tests/__init__.py                       |    0
 pysal/core/tests/test_FileIO.py                    |    0
 pysal/core/util/__init__.py                        |    2 +
 pysal/core/util/shapefile.py                       |  758 +++
 pysal/core/util/tests/test_shapefile.py            |  362 ++
 pysal/core/util/tests/test_weight_converter.py     |  137 +
 pysal/core/util/tests/test_wkt.py                  |   52 +
 pysal/core/util/weight_converter.py                |  243 +
 pysal/core/util/wkt.py                             |  125 +
 pysal/esda/__init__.py                             |   12 +
 pysal/esda/gamma.py                                |  204 +
 pysal/esda/geary.py                                |  161 +
 pysal/esda/getisord.py                             |  394 ++
 pysal/esda/join_counts.py                          |  144 +
 pysal/esda/mapclassify.py                          | 1867 ++++++
 pysal/esda/mixture_smoothing.py                    |  311 +
 pysal/esda/moran.py                                |  793 +++
 pysal/esda/smoothing.py                            | 1573 +++++
 pysal/esda/tests/__init__.py                       |    0
 pysal/esda/tests/test_gamma.py                     |   68 +
 pysal/esda/tests/test_geary.py                     |   61 +
 pysal/esda/tests/test_getisord.py                  |   59 +
 pysal/esda/tests/test_join_counts.py               |   41 +
 pysal/esda/tests/test_mapclassify.py               |  373 ++
 pysal/esda/tests/test_mixture_smoothing.py         |   43 +
 pysal/esda/tests/test_moran.py                     |   94 +
 pysal/esda/tests/test_smoothing.py                 |  248 +
 pysal/examples/10740.dbf                           |  Bin 0 -> 8188 bytes
 pysal/examples/10740.shp                           |  Bin 0 -> 521068 bytes
 pysal/examples/10740.shx                           |  Bin 0 -> 1660 bytes
 pysal/examples/10740_queen.gal                     |  391 ++
 pysal/examples/10740_rook.gal                      |  391 ++
 pysal/examples/Chicago77.dbf                       |  Bin 0 -> 16787 bytes
 pysal/examples/Chicago77.shp                       |  Bin 0 -> 687568 bytes
 pysal/examples/Chicago77.shx                       |  Bin 0 -> 716 bytes
 pysal/examples/Line.dbf                            |  Bin 0 -> 621 bytes
 pysal/examples/Line.prj                            |    1 +
 pysal/examples/Line.shp                            |  Bin 0 -> 564 bytes
 pysal/examples/Line.shx                            |  Bin 0 -> 132 bytes
 pysal/examples/NAT.dbf                             |  Bin 0 -> 2501092 bytes
 pysal/examples/NAT.shp                             |  Bin 0 -> 1462216 bytes
 pysal/examples/NAT.shx                             |  Bin 0 -> 24780 bytes
 pysal/examples/Point.dbf                           |  Bin 0 -> 1236 bytes
 pysal/examples/Point.prj                           |    1 +
 pysal/examples/Point.shp                           |  Bin 0 -> 352 bytes
 pysal/examples/Point.shx                           |  Bin 0 -> 172 bytes
 pysal/examples/Polygon.dbf                         |  Bin 0 -> 498 bytes
 pysal/examples/Polygon.prj                         |    1 +
 pysal/examples/Polygon.shp                         |  Bin 0 -> 992 bytes
 pysal/examples/Polygon.shx                         |  Bin 0 -> 124 bytes
 pysal/examples/README.txt                          |    5 +
 pysal/examples/__init__.py                         |    9 +
 pysal/examples/arcgis_ohio.dbf                     |  Bin 0 -> 21876 bytes
 pysal/examples/arcgis_txt.txt                      |    9 +
 pysal/examples/baltim.dbf                          |  Bin 0 -> 31595 bytes
 pysal/examples/baltim.shp                          |  Bin 0 -> 6008 bytes
 pysal/examples/baltim.shx                          |  Bin 0 -> 1788 bytes
 pysal/examples/baltim_k4.gwt                       |  845 +++
 pysal/examples/baltim_q.gal                        |  423 ++
 pysal/examples/book.gal                            |   33 +
 pysal/examples/book.txt                            |   18 +
 pysal/examples/burkitt.dbf                         |  Bin 0 -> 7558 bytes
 pysal/examples/burkitt.shp                         |  Bin 0 -> 5364 bytes
 pysal/examples/burkitt.shx                         |  Bin 0 -> 1604 bytes
 pysal/examples/calempdensity.csv                   |   59 +
 pysal/examples/columbus.dbf                        |  Bin 0 -> 10082 bytes
 pysal/examples/columbus.gal                        |   99 +
 pysal/examples/columbus.html                       |  132 +
 pysal/examples/columbus.json                       |  104 +
 pysal/examples/columbus.shp                        |  Bin 0 -> 21980 bytes
 pysal/examples/columbus.shx                        |  Bin 0 -> 492 bytes
 pysal/examples/desmith.gal                         |   21 +
 pysal/examples/desmith.txt                         |   12 +
 pysal/examples/eberly_net.dbf                      |  Bin 0 -> 1348 bytes
 pysal/examples/eberly_net.shp                      |  Bin 0 -> 2652 bytes
 pysal/examples/eberly_net.shx                      |  Bin 0 -> 332 bytes
 pysal/examples/eberly_net_pts_offnetwork.dbf       |  Bin 0 -> 2197 bytes
 pysal/examples/eberly_net_pts_offnetwork.shp       |  Bin 0 -> 2900 bytes
 pysal/examples/eberly_net_pts_offnetwork.shx       |  Bin 0 -> 900 bytes
 pysal/examples/eberly_net_pts_onnetwork.dbf        |  Bin 0 -> 1275 bytes
 pysal/examples/eberly_net_pts_onnetwork.shp        |  Bin 0 -> 3180 bytes
 pysal/examples/eberly_net_pts_onnetwork.shx        |  Bin 0 -> 980 bytes
 pysal/examples/examples.txt                        |  141 +
 pysal/examples/geobugs_scot                        |   66 +
 pysal/examples/geodanet/crimes.dbf                 |  Bin 0 -> 5551 bytes
 pysal/examples/geodanet/crimes.prj                 |    1 +
 pysal/examples/geodanet/crimes.sbn                 |  Bin 0 -> 3228 bytes
 pysal/examples/geodanet/crimes.sbx                 |  Bin 0 -> 428 bytes
 pysal/examples/geodanet/crimes.shp                 |  Bin 0 -> 8136 bytes
 pysal/examples/geodanet/crimes.shp.xml             |    3 +
 pysal/examples/geodanet/crimes.shx                 |  Bin 0 -> 2396 bytes
 pysal/examples/geodanet/schools.dbf                |  Bin 0 -> 146 bytes
 pysal/examples/geodanet/schools.prj                |    1 +
 pysal/examples/geodanet/schools.sbn                |  Bin 0 -> 212 bytes
 pysal/examples/geodanet/schools.sbx                |  Bin 0 -> 124 bytes
 pysal/examples/geodanet/schools.shp                |  Bin 0 -> 324 bytes
 pysal/examples/geodanet/schools.shp.xml            |  546 ++
 pysal/examples/geodanet/schools.shx                |  Bin 0 -> 164 bytes
 pysal/examples/geodanet/streets.dbf                |  Bin 0 -> 11525 bytes
 pysal/examples/geodanet/streets.prj                |    1 +
 pysal/examples/geodanet/streets.sbn                |  Bin 0 -> 3388 bytes
 pysal/examples/geodanet/streets.sbx                |  Bin 0 -> 540 bytes
 pysal/examples/geodanet/streets.shp                |  Bin 0 -> 26044 bytes
 pysal/examples/geodanet/streets.shx                |  Bin 0 -> 2444 bytes
 pysal/examples/juvenile.dbf                        |  Bin 0 -> 4834 bytes
 pysal/examples/juvenile.gwt                        | 2803 +++++++++
 pysal/examples/juvenile.html                       |   63 +
 pysal/examples/juvenile.shp                        |  Bin 0 -> 4804 bytes
 pysal/examples/juvenile.shx                        |  Bin 0 -> 1444 bytes
 pysal/examples/lattice10x10.shp                    |  Bin 0 -> 13700 bytes
 pysal/examples/lattice10x10.shx                    |  Bin 0 -> 900 bytes
 pysal/examples/mexico.csv                          |   33 +
 pysal/examples/mexico.gal                          |   65 +
 pysal/examples/nat_queen.gal                       | 6171 ++++++++++++++++++++
 pysal/examples/nat_queen_old.gal                   | 6171 ++++++++++++++++++++
 pysal/examples/natregimes.dbf                      |  Bin 0 -> 2627705 bytes
 pysal/examples/natregimes.shp                      |  Bin 0 -> 1462216 bytes
 pysal/examples/natregimes.shx                      |  Bin 0 -> 24780 bytes
 pysal/examples/nonplanarsegments.dbf               |  Bin 0 -> 87 bytes
 pysal/examples/nonplanarsegments.prj               |    1 +
 pysal/examples/nonplanarsegments.qpj               |    1 +
 pysal/examples/nonplanarsegments.shp               |  Bin 0 -> 308 bytes
 pysal/examples/nonplanarsegments.shx               |  Bin 0 -> 116 bytes
 pysal/examples/ohio.swm                            |  Bin 0 -> 6978 bytes
 pysal/examples/rook31.dbf                          |  Bin 0 -> 161 bytes
 pysal/examples/rook31.gal                          |    7 +
 pysal/examples/rook31.shp                          |  Bin 0 -> 556 bytes
 pysal/examples/rook31.shx                          |  Bin 0 -> 124 bytes
 pysal/examples/sacramentot2.dbf                    |  Bin 0 -> 98149 bytes
 pysal/examples/sacramentot2.gal                    |  807 +++
 pysal/examples/sacramentot2.sbn                    |  Bin 0 -> 11172 bytes
 pysal/examples/sacramentot2.sbx                    |  Bin 0 -> 588 bytes
 pysal/examples/sacramentot2.shp                    |  Bin 0 -> 1587812 bytes
 pysal/examples/sacramentot2.shx                    |  Bin 0 -> 3324 bytes
 pysal/examples/sids2.dbf                           |  Bin 0 -> 23810 bytes
 pysal/examples/sids2.gal                           |  201 +
 pysal/examples/sids2.html                          |  124 +
 pysal/examples/sids2.shp                           |  Bin 0 -> 46196 bytes
 pysal/examples/sids2.shx                           |  Bin 0 -> 900 bytes
 pysal/examples/snow_maps/SohoPeople.dbf            |  Bin 0 -> 3662 bytes
 pysal/examples/snow_maps/SohoPeople.prj            |    1 +
 pysal/examples/snow_maps/SohoPeople.sbn            |  Bin 0 -> 3460 bytes
 pysal/examples/snow_maps/SohoPeople.sbx            |  Bin 0 -> 380 bytes
 pysal/examples/snow_maps/SohoPeople.shp            |  Bin 0 -> 9172 bytes
 pysal/examples/snow_maps/SohoPeople.shx            |  Bin 0 -> 2692 bytes
 pysal/examples/snow_maps/SohoWater.dbf             |  Bin 0 -> 157 bytes
 pysal/examples/snow_maps/SohoWater.prj             |    1 +
 pysal/examples/snow_maps/SohoWater.sbn             |  Bin 0 -> 252 bytes
 pysal/examples/snow_maps/SohoWater.sbx             |  Bin 0 -> 124 bytes
 pysal/examples/snow_maps/SohoWater.shp             |  Bin 0 -> 464 bytes
 pysal/examples/snow_maps/SohoWater.shx             |  Bin 0 -> 204 bytes
 pysal/examples/snow_maps/Soho_Network.dbf          |  Bin 0 -> 892 bytes
 pysal/examples/snow_maps/Soho_Network.prj          |    1 +
 pysal/examples/snow_maps/Soho_Network.sbn          |  Bin 0 -> 1292 bytes
 pysal/examples/snow_maps/Soho_Network.sbx          |  Bin 0 -> 228 bytes
 pysal/examples/snow_maps/Soho_Network.shp          |  Bin 0 -> 11620 bytes
 pysal/examples/snow_maps/Soho_Network.shx          |  Bin 0 -> 1044 bytes
 pysal/examples/south.dbf                           |  Bin 0 -> 1145962 bytes
 pysal/examples/south.shp                           |  Bin 0 -> 737448 bytes
 pysal/examples/south.shx                           |  Bin 0 -> 11396 bytes
 pysal/examples/south_q.gal                         | 2825 +++++++++
 pysal/examples/spat-sym-us.mat                     |  Bin 0 -> 416 bytes
 pysal/examples/spat-sym-us.wk1                     |  Bin 0 -> 2221 bytes
 pysal/examples/spdep_listw2WB_columbus             |   59 +
 pysal/examples/spi_download.csv                    |   66 +
 pysal/examples/stata_full.txt                      |   57 +
 pysal/examples/stata_sparse.txt                    |   57 +
 pysal/examples/states48.gal                        |   97 +
 pysal/examples/stl.gal                             |  157 +
 pysal/examples/stl_hom.csv                         |   79 +
 pysal/examples/stl_hom.dbf                         |  Bin 0 -> 23280 bytes
 pysal/examples/stl_hom.html                        |  143 +
 pysal/examples/stl_hom.shp                         |  Bin 0 -> 28276 bytes
 pysal/examples/stl_hom.shx                         |  Bin 0 -> 724 bytes
 pysal/examples/stl_hom.txt                         |   80 +
 pysal/examples/stl_hom.wkt                         |   78 +
 pysal/examples/stl_hom_rook.gal                    |  157 +
 pysal/examples/street_net_pts.dbf                  |  Bin 0 -> 3398 bytes
 pysal/examples/street_net_pts.prj                  |    1 +
 pysal/examples/street_net_pts.qpj                  |    1 +
 pysal/examples/street_net_pts.shp                  |  Bin 0 -> 8584 bytes
 pysal/examples/street_net_pts.shx                  |  Bin 0 -> 2524 bytes
 pysal/examples/taz.dbf                             |  Bin 0 -> 485344 bytes
 pysal/examples/taz.shp                             |  Bin 0 -> 5774828 bytes
 pysal/examples/taz.shx                             |  Bin 0 -> 32972 bytes
 pysal/examples/us48.dbf                            |  Bin 0 -> 4274 bytes
 pysal/examples/us48.shp                            |  Bin 0 -> 186476 bytes
 pysal/examples/us48.shx                            |  Bin 0 -> 484 bytes
 pysal/examples/usjoin.csv                          |   49 +
 pysal/examples/virginia.dbf                        |  Bin 0 -> 11410 bytes
 pysal/examples/virginia.gal                        |  273 +
 pysal/examples/virginia.prj                        |    1 +
 pysal/examples/virginia.shp                        |  Bin 0 -> 71416 bytes
 pysal/examples/virginia.shx                        |  Bin 0 -> 1188 bytes
 pysal/examples/virginia_rook.gal                   |  273 +
 pysal/examples/wmat.dat                            |  232 +
 pysal/examples/wmat.mtx                            |  237 +
 pysal/inequality/__init__.py                       |    8 +
 pysal/inequality/_indices.py                       |  579 ++
 pysal/inequality/gini.py                           |  167 +
 pysal/inequality/tests/__init__.py                 |    0
 pysal/inequality/tests/test_theil.py               |   42 +
 pysal/inequality/theil.py                          |  201 +
 pysal/meta/akern1.wmd                              |   15 +
 pysal/meta/chain.wmd                               |   14 +
 pysal/meta/chain2.wmd                              |   14 +
 pysal/meta/kernel.wmd                              |   17 +
 pysal/meta/knn.wmd                                 |   14 +
 pysal/meta/taz_block.wmd                           |   11 +
 pysal/meta/taz_intersection.wmd                    |   16 +
 pysal/meta/taz_rook.wmd                            |   10 +
 pysal/meta/wmd.py                                  |  476 ++
 pysal/meta/wrook1.wmd                              |    9 +
 pysal/meta/wrooko1.wmd                             |   12 +
 pysal/meta/wrooko2.wmd                             |   12 +
 pysal/meta/wrooko2l.wmd                            |   12 +
 pysal/network/Network Usage.ipynb                  |  644 ++
 pysal/network/__init__.py                          |    7 +
 pysal/network/analysis.py                          |  186 +
 pysal/network/network.py                           | 1162 ++++
 pysal/network/util.py                              |  155 +
 pysal/region/__init__.py                           |    9 +
 pysal/region/components.py                         |  160 +
 pysal/region/maxp.py                               |  588 ++
 pysal/region/randomregion.py                       |  527 ++
 pysal/region/tests/__init__.py                     |    0
 pysal/region/tests/test_components.py              |   25 +
 pysal/region/tests/test_maxp.py                    |   56 +
 pysal/region/tests/test_randomregion.py            |  177 +
 pysal/spatial_dynamics/__init__.py                 |   10 +
 pysal/spatial_dynamics/directional.py              |  187 +
 pysal/spatial_dynamics/ergodic.py                  |  179 +
 pysal/spatial_dynamics/interaction.ipynb           | 4151 +++++++++++++
 pysal/spatial_dynamics/interaction.py              |  641 ++
 pysal/spatial_dynamics/markov.py                   | 1548 +++++
 pysal/spatial_dynamics/rank.py                     |  443 ++
 pysal/spatial_dynamics/tests/__init__.py           |    0
 pysal/spatial_dynamics/tests/test_directional.py   |   62 +
 pysal/spatial_dynamics/tests/test_ergodic.py       |   55 +
 pysal/spatial_dynamics/tests/test_interaction.py   |   80 +
 pysal/spatial_dynamics/tests/test_markov.py        |  190 +
 pysal/spatial_dynamics/tests/test_rank.py          |   73 +
 pysal/spatial_dynamics/tests/test_util.py          |   41 +
 pysal/spatial_dynamics/util.py                     |   79 +
 pysal/spreg/__init__.py                            |   20 +
 pysal/spreg/diagnostics.py                         | 1424 +++++
 pysal/spreg/diagnostics_sp.py                      |  826 +++
 pysal/spreg/diagnostics_tsls.py                    |  337 ++
 pysal/spreg/error_sp.py                            | 1133 ++++
 pysal/spreg/error_sp_het.py                        | 1509 +++++
 pysal/spreg/error_sp_het_regimes.py                | 1481 +++++
 pysal/spreg/error_sp_hom.py                        | 1522 +++++
 pysal/spreg/error_sp_hom_regimes.py                | 1496 +++++
 pysal/spreg/error_sp_regimes.py                    | 1374 +++++
 pysal/spreg/ml_error.py                            |  511 ++
 pysal/spreg/ml_error_regimes.py                    |  462 ++
 pysal/spreg/ml_lag.py                              |  593 ++
 pysal/spreg/ml_lag_regimes.py                      |  484 ++
 pysal/spreg/ols.py                                 |  471 ++
 pysal/spreg/ols_regimes.py                         |  541 ++
 pysal/spreg/probit.py                              |  684 +++
 pysal/spreg/regimes.py                             |  689 +++
 pysal/spreg/robust.py                              |  169 +
 pysal/spreg/summary_output.py                      | 1235 ++++
 pysal/spreg/tests/test_diagnostics.py              |  128 +
 pysal/spreg/tests/test_diagnostics_sp.py           |  179 +
 pysal/spreg/tests/test_diagnostics_tsls.py         |   66 +
 pysal/spreg/tests/test_error_sp.py                 |  317 +
 pysal/spreg/tests/test_error_sp_het.py             |  407 ++
 pysal/spreg/tests/test_error_sp_het_regimes.py     |  307 +
 pysal/spreg/tests/test_error_sp_het_sparse.py      |  414 ++
 pysal/spreg/tests/test_error_sp_hom.py             |  315 +
 pysal/spreg/tests/test_error_sp_hom_regimes.py     |  307 +
 pysal/spreg/tests/test_error_sp_hom_sparse.py      |  320 +
 pysal/spreg/tests/test_error_sp_regimes.py         |  305 +
 pysal/spreg/tests/test_error_sp_sparse.py          |  332 ++
 pysal/spreg/tests/test_ml_error.py                 |   71 +
 pysal/spreg/tests/test_ml_error_regimes.py         |  138 +
 pysal/spreg/tests/test_ml_lag.py                   |   68 +
 pysal/spreg/tests/test_ml_lag_regimes.py           |  117 +
 pysal/spreg/tests/test_ols.py                      |  125 +
 pysal/spreg/tests/test_ols_regimes.py              |  144 +
 pysal/spreg/tests/test_ols_sparse.py               |  107 +
 pysal/spreg/tests/test_probit.py                   |  108 +
 pysal/spreg/tests/test_twosls.py                   |  254 +
 pysal/spreg/tests/test_twosls_regimes.py           |  276 +
 pysal/spreg/tests/test_twosls_sp.py                |  369 ++
 pysal/spreg/tests/test_twosls_sp_regimes.py        |  352 ++
 pysal/spreg/tests/test_twosls_sp_sparse.py         |  379 ++
 pysal/spreg/tests/test_twosls_sparse.py            |  258 +
 pysal/spreg/twosls.py                              |  475 ++
 pysal/spreg/twosls_regimes.py                      |  515 ++
 pysal/spreg/twosls_sp.py                           |  540 ++
 pysal/spreg/twosls_sp_regimes.py                   |  705 +++
 pysal/spreg/user_output.py                         |  642 ++
 pysal/spreg/utils.py                               |  839 +++
 pysal/spreg/w_utils.py                             |   27 +
 pysal/test_NameSpace.py                            |   51 +
 pysal/version.py                                   |    1 +
 pysal/weights/Contiguity.py                        |   97 +
 pysal/weights/Distance.py                          |  532 ++
 pysal/weights/Wsets.py                             |  506 ++
 pysal/weights/__init__.py                          |   12 +
 pysal/weights/_contW_binning.py                    |  389 ++
 pysal/weights/_contW_rtree.py                      |  116 +
 pysal/weights/spatial_lag.py                       |   84 +
 pysal/weights/tests/__init__.py                    |    0
 pysal/weights/tests/test_Contiguity.py             |   27 +
 pysal/weights/tests/test_Distance.py               |  149 +
 pysal/weights/tests/test_Wsets.py                  |   64 +
 pysal/weights/tests/test__contW_binning.py         |  132 +
 pysal/weights/tests/test__contW_rtree.py           |  138 +
 pysal/weights/tests/test_spatial_lag.py            |   41 +
 pysal/weights/tests/test_user.py                   |  161 +
 pysal/weights/tests/test_util.py                   |  186 +
 pysal/weights/tests/test_weights.py                |  451 ++
 pysal/weights/user.py                              | 1123 ++++
 pysal/weights/util.py                              | 1197 ++++
 pysal/weights/weights.py                           | 1066 ++++
 pysal/weights/weights_from_geojson.ipynb           |  311 +
 requirements.txt                                   |    1 +
 rtd.txt                                            |    1 +
 setup.cfg                                          |    6 +
 setup.py                                           |  120 +
 tools/cron.py                                      |   16 +
 tools/docs.sh                                      |   39 +
 tools/github_stats.py                              |  193 +
 tools/py3tool.py                                   |  345 ++
 tools/test.sh                                      |   69 +
 travis.txt                                         |    6 +
 592 files changed, 112811 insertions(+)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..b3b303c
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,79 @@
+*.py[cod]
+*.bak
+.ipynb_checkpoints/
+# C extensions
+*.so
+
+# Packages
+*.egg
+*.egg-info
+dist
+build
+eggs
+parts
+bin
+var
+sdist
+develop-eggs
+.installed.cfg
+lib
+lib64
+__pycache__
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage
+.tox
+nosetests.xml
+
+# Translations
+*.mo
+
+# Mr Developer
+.mr.developer.cfg
+.project
+.pydevproject
+
+# OS generated files #
+######################
+.DS_Store
+.DS_Store?
+._*
+.Spotlight-V100
+.Trashes
+Icon?
+ehthumbs.db
+Thumbs.db
+
+
+# pysal
+#
+lattice.*
+.vagrant/
+pysal/contrib/viz/.ipynb_checkpoints/
+pysal/contrib/viz/bp.png
+pysal/contrib/viz/fj.png
+pysal/contrib/viz/fj_classless.png
+pysal/contrib/viz/lmet.tex
+pysal/contrib/viz/lmp.tex
+pysal/contrib/viz/lmplot.png
+pysal/contrib/viz/lmss.tex
+pysal/contrib/viz/lmt.tex
+pysal/contrib/viz/out.png
+pysal/contrib/viz/p.tex
+pysal/contrib/viz/quantiles.png
+pysal/contrib/viz/quantiles_HR60.png
+pysal/contrib/viz/quantiles_HR70.png
+pysal/contrib/viz/quantiles_HR80.png
+pysal/contrib/viz/quantiles_HR90.png
+pysal/contrib/viz/quatiles.png
+pysal/contrib/viz/region.ipynb
+pysal/contrib/viz/south_base.html
+pysal/contrib/viz/sp.tex
+pysal/contrib/viz/sss.tex
+pysal/examples/south.prj
+
+#Vi
+*.swp
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..e5d1168
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,46 @@
+language: python
+branches:
+only:
+  - master
+python:
+  - "2.7"
+  #- "2.6"
+  #
+env:
+  - SCIPY_VERSION="deb http://us.archive.ubuntu.com/ubuntu/ precise main universe"
+  - SCIPY_VERSION="deb http://us.archive.ubuntu.com/ubuntu/ trusty main universe"
+
+virtualenv:
+  system_site_packages: true
+
+before_install:
+  - sudo apt-get update -qq
+  - sudo apt-get install -qq libgeos-3.2.2 libgeos-dev python-numpy python-scipy
+  - sudo apt-add-repository "$SCIPY_VERSION"
+  - sudo apt-get update -qq
+  - sudo apt-get --only-upgrade install -qq python-numpy python-scipy
+
+install:
+  - pip install -q -r travis.txt
+  #- sudo pip install http://sourceforge.net/projects/pychecker/files/pychecker/0.8.19/pychecker-0.8.19.tar.gz/download
+  #- pip install coveralls
+  - sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm
+
+script: 
+  - python -c 'import numpy; print numpy.__version__'
+  - python -c 'import scipy; print scipy.__version__'
+  - python setup.py install  >/dev/null
+  - python -c 'import pysal; print pysal.version'
+  #- python -c 'import multiprocessing as mp; print mp.cpu_count()'
+  #- pychecker --limit 1000 pysal
+  - python setup.py sdist >/dev/null
+  #- nosetests --processes=-1 --process-timeout=60 --verbosity=1 --ignore-files=collection --exclude-dir=pysal/contrib
+  - nosetests
+  - cd doc; make pickle; make doctest
+notifications:
+    email:
+        recipients:
+            - sjsrey at gmail.com
+            - phil.stphns at gmail.com
+        on_success: always
+        on_failure: always
diff --git a/CHANGELOG.txt b/CHANGELOG.txt
new file mode 100644
index 0000000..17a0e90
--- /dev/null
+++ b/CHANGELOG.txt
@@ -0,0 +1,1993 @@
+v<1.9.1>, 2015-01-31
+
+GitHub stats for 2015/01/30 - 2015/01/31 
+
+These lists are automatically generated, and may be incomplete or contain duplicates.
+
+The following 4 authors contributed 14 commits.
+
+* Dani Arribas-Bel
+* Serge Rey
+* Sergio Rey
+* jlaura
+
+
+We closed a total of 8 issues, 3 pull requests and 5 regular issues;
+this is the full list (generated with the script 
+:file:`tools/github_stats.py`):
+
+Pull Requests (3):
+
+* :ghpull:`566`: Fix for 1.9.0 missing file in setup.py
+* :ghpull:`563`: Updating release instructions
+* :ghpull:`561`: Rolling over to 1.10
+
+Issues (5):
+
+* :ghissue:`566`: Fix for 1.9.0 missing file in setup.py
+* :ghissue:`565`: Bsetup
+* :ghissue:`563`: Updating release instructions
+* :ghissue:`562`: adjustments to release management
+* :ghissue:`561`: Rolling over to 1.10
+
+
+v<1.9.0>, 2015-01-30
+
+GitHub stats for 2014/07/25 - 2015/01/30 
+
+These lists are automatically generated, and may be incomplete or contain duplicates.
+
+The following 12 authors contributed 131 commits.
+
+* Andy Reagan
+* Dani Arribas-Bel
+* Jay
+* Levi John Wolf
+* Philip Stephens
+* Qunshan
+* Serge Rey
+* jlaura
+* ljwolf
+* luc
+
+
+We closed a total of 113 issues, 44 pull requests and 69 regular issues;
+this is the full list (generated with the script 
+:file:`tools/github_stats.py`):
+
+Pull Requests (44):
+
+* :ghpull:`560`: modifying import scheme for network module
+* :ghpull:`559`: Network2
+* :ghpull:`558`: Network2
+* :ghpull:`557`: Network2
+* :ghpull:`556`: Added analytical functions and edge segmentation
+* :ghpull:`550`: Network2
+* :ghpull:`553`: correction in denominator of spatial tau. 
+* :ghpull:`547`: Updates to get network integrated
+* :ghpull:`544`: update .gitignore
+* :ghpull:`543`: k nearest neighbor gwt example file for baltimore points (with k=4) added to examples directory
+* :ghpull:`542`: new format nat_queen.gal file added to examples directory
+* :ghpull:`541`: Update tutorial docs for new book
+* :ghpull:`540`: doc: updating instructions for anaconda and enthought
+* :ghpull:`539`: doc: pysal is now on sagemathcloud
+* :ghpull:`538`: Clean up of cg and fixes of other doctests/formats
+* :ghpull:`536`: adding entry for getis ord module
+* :ghpull:`537`: new opendata module for contrib
+* :ghpull:`535`: Add method for extracting data columns as Numpy array rather than list
+* :ghpull:`534`: added geogrid to __all__ in sphere.py
+* :ghpull:`533`: added geogrid function to create a grid of points on a sphere
+* :ghpull:`532`: new functions to deal with spherical geometry: lat-lon conversion, degre...
+* :ghpull:`530`: I390
+* :ghpull:`528`: Replacing 0 by min value in choropleths
+* :ghpull:`526`: B166
+* :ghpull:`525`: copyright update
+* :ghpull:`524`: New homogeneity tests for general case and spatial markov as a special case
+* :ghpull:`523`: pointing to github.io pages
+* :ghpull:`520`: Same typo. Toolkit.
+* :ghpull:`518`: Update util.py
+* :ghpull:`519`: Typo
+* :ghpull:`517`: Documentation correction for Prais Conditional Mobility Index
+* :ghpull:`516`: ENH for https://github.com/PySAL/PySAL.github.io/issues/17
+* :ghpull:`515`: BUG: conditional check for extension of lower bound of colorbar to conta...
+* :ghpull:`514`: ENH: adding the user_defined classification
+* :ghpull:`513`: rewriting to not use ipython notebook --pylab=line
+* :ghpull:`512`: Viz
+* :ghpull:`508`: Adding barebones pysal2matplotlib options in viz
+* :ghpull:`511`: DOC updating news
+* :ghpull:`507`: Sched
+* :ghpull:`510`: BUG: fix for #509
+* :ghpull:`506`: 1.9dev
+* :ghpull:`505`: REL bumping master to 1.9.0dev
+* :ghpull:`504`: Release prep 1.8
+* :ghpull:`503`: Grid for landing page
+
+Issues (69):
+
+* :ghissue:`560`: modifying import scheme for network module
+* :ghissue:`559`: Network2
+* :ghissue:`558`: Network2
+* :ghissue:`557`: Network2
+* :ghissue:`556`: Added analytical functions and edge segmentation
+* :ghissue:`555`: Added edge segmentation by distance
+* :ghissue:`550`: Network2
+* :ghissue:`553`: correction in denominator of spatial tau. 
+* :ghissue:`549`: Network2
+* :ghissue:`547`: Updates to get network integrated
+* :ghissue:`548`: Installation Issues
+* :ghissue:`546`: Network2
+* :ghissue:`545`: Network
+* :ghissue:`544`: update .gitignore
+* :ghissue:`543`: k nearest neighbor gwt example file for baltimore points (with k=4) added to examples directory
+* :ghissue:`542`: new format nat_queen.gal file added to examples directory
+* :ghissue:`541`: Update tutorial docs for new book
+* :ghissue:`540`: doc: updating instructions for anaconda and enthought
+* :ghissue:`539`: doc: pysal is now on sagemathcloud
+* :ghissue:`538`: Clean up of cg and fixes of other doctests/formats
+* :ghissue:`536`: adding entry for getis ord module
+* :ghissue:`537`: new opendata module for contrib
+* :ghissue:`535`: Add method for extracting data columns as Numpy array rather than list
+* :ghissue:`534`: added geogrid to __all__ in sphere.py
+* :ghissue:`533`: added geogrid function to create a grid of points on a sphere
+* :ghissue:`532`: new functions to deal with spherical geometry: lat-lon conversion, degre...
+* :ghissue:`390`: add option to have local moran quadrant codes align with geoda
+* :ghissue:`530`: I390
+* :ghissue:`528`: Replacing 0 by min value in choropleths
+* :ghissue:`526`: B166
+* :ghissue:`176`: contrib module for proj 4
+* :ghissue:`178`: contrib module for gdal/org
+* :ghissue:`203`: implement network class in spatialnet
+* :ghissue:`204`: pysal-networkx util functions
+* :ghissue:`209`: csv reader enhancement
+* :ghissue:`215`: Add a tutorial for the spreg module
+* :ghissue:`244`: ps.knnW_from_shapefile returns wrong W ids when idVariable specified
+* :ghissue:`246`: Only use idVariable in W when writing out to file
+* :ghissue:`283`: Create new nodes at intersections of edges
+* :ghissue:`291`: Enum links around regions hangs
+* :ghissue:`292`: Handle multiple filaments within a region in the Wed construction
+* :ghissue:`302`: Handle hole polygons when constructing wed
+* :ghissue:`309`: Develop consistent solution for precision induced errors in doctests across platforms
+* :ghissue:`350`: reading/writing weights file with spaces in the ids
+* :ghissue:`450`: x_name and summary method not consistent in ols
+* :ghissue:`521`: Nosetests don't accept setup.cfg
+* :ghissue:`509`: ESDA bin type inconsistency
+* :ghissue:`525`: copyright update
+* :ghissue:`524`: New homogeneity tests for general case and spatial markov as a special case
+* :ghissue:`523`: pointing to github.io pages
+* :ghissue:`520`: Same typo. Toolkit.
+* :ghissue:`522`: Nosetests for python3 porting
+* :ghissue:`518`: Update util.py
+* :ghissue:`519`: Typo
+* :ghissue:`517`: Documentation correction for Prais Conditional Mobility Index
+* :ghissue:`516`: ENH for https://github.com/PySAL/PySAL.github.io/issues/17
+* :ghissue:`515`: BUG: conditional check for extension of lower bound of colorbar to conta...
+* :ghissue:`514`: ENH: adding the user_defined classification
+* :ghissue:`513`: rewriting to not use ipython notebook --pylab=line
+* :ghissue:`512`: Viz
+* :ghissue:`508`: Adding barebones pysal2matplotlib options in viz
+* :ghissue:`511`: DOC updating news
+* :ghissue:`507`: Sched
+* :ghissue:`510`: BUG: fix for #509
+* :ghissue:`502`: spreg.ml_lag.ML_Lag is very very very time-consuming?
+* :ghissue:`506`: 1.9dev
+* :ghissue:`505`: REL bumping master to 1.9.0dev
+* :ghissue:`504`: Release prep 1.8
+* :ghissue:`503`: Grid for landing page
+
+
+v<1.8.0>, 2014-07-25
+
+GitHub stats for 2014/01/29 - 2014/07/25 
+
+These lists are automatically generated, and may be incomplete or contain duplicates.
+
+The following 8 authors contributed 281 commits.
+
+* Dani Arribas-Bel
+* Jay
+* Philip Stephens
+* Serge Rey
+* Sergio Rey
+* jlaura
+* pedrovma
+* sjsrey
+
+
+We closed a total of 160 issues, 60 pull requests and 100 regular issues;
+this is the full list (generated with the script 
+:file:`tools/github_stats.py`):
+
+Pull Requests (60):
+
+* :ghpull:`503`: Grid for landing page
+* :ghpull:`501`: Two figs rather than three
+* :ghpull:`500`: More efficient higher order operations
+* :ghpull:`499`: renamed nat_queen.gal for #452
+* :ghpull:`497`: ENH Deprecation warning for regime_weights #486
+* :ghpull:`494`: Enables testing against two versions of SciPy shipped with the last two Ubuntu LTS versions.
+* :ghpull:`490`: Fix for #487
+* :ghpull:`492`: BUG cleaning up temporary files for #398
+* :ghpull:`493`: Phil: Skipping several tests that fail due to precision under older scipy
+* :ghpull:`489`: test suite fixes
+* :ghpull:`488`: More tests to skip if scipy less than 11
+* :ghpull:`484`: ENH: cleaning up more test generated files
+* :ghpull:`483`: Forwarding Phil's commit: skipping doctests, conditional skip of unit tests
+* :ghpull:`482`: DOC cleaning up files after running doctests #398
+* :ghpull:`481`: DOC contrib updates and links
+* :ghpull:`480`: DOC cleaning up doctests
+* :ghpull:`479`: ENH Changing regime_weights to block_weights for #455
+* :ghpull:`478`: DOC: link fixes
+* :ghpull:`477`: cKDTree for #460
+* :ghpull:`476`: redefining w.remap_ids to take only a single arg
+* :ghpull:`475`: Adding docstrings and error check to fix #471
+* :ghpull:`470`: fixing order of args for api consistency. 
+* :ghpull:`469`: Idfix for #449
+* :ghpull:`463`: updating gitignore
+* :ghpull:`462`: ENH: handle the case of an ergodic distribution where one state has 0 probability
+* :ghpull:`458`: ENH: Vagrantfile for PySAL devs and workshops
+* :ghpull:`447`: Clusterpy
+* :ghpull:`456`: BUG: fix for #451 handling W or WSP in higher_order_sp
+* :ghpull:`454`: Foobar
+* :ghpull:`443`: Updating spreg: several minor bug and documentation fixes.
+* :ghpull:`453`: Resolving conflicts
+* :ghpull:`448`: Wsp
+* :ghpull:`445`: ENH: unique qualitative color ramp. Also refactoring for future ipython deprecation of --pylab=inline
+* :ghpull:`446`: Wmd
+* :ghpull:`444`: Scipy dependency
+* :ghpull:`442`: Wmd
+* :ghpull:`441`: fixed kernel wmd for updated wmd structure
+* :ghpull:`440`: ENH: sidebar for Releases and installation doc update
+* :ghpull:`439`: - events
+* :ghpull:`438`: ENH: pruning to respect flake8
+* :ghpull:`437`: BUG: fix for removal of scipy.stat._support #436
+* :ghpull:`433`: Rank markov
+* :ghpull:`424`: testing
+* :ghpull:`431`: FOSS4G
+* :ghpull:`430`: Network
+* :ghpull:`429`: moving analytics out of wed class and into their own module
+* :ghpull:`428`: Network
+* :ghpull:`427`: devel docs
+* :ghpull:`425`: Viz2contrib
+* :ghpull:`423`: Update news.rst
+* :ghpull:`422`: ENH: Update doc instructions for napoleon dependency
+* :ghpull:`421`: Adding files used in some examples as per Luc's request.
+* :ghpull:`419`: Doc fixes 1.7
+* :ghpull:`393`: Doc fixes 1.7
+* :ghpull:`417`: ENH hex lattice W for #416
+* :ghpull:`415`: Temporarily commenting out tests that are blocking Travis.
+* :ghpull:`407`: Viz: Moving into contrib/viz in master
+* :ghpull:`404`: version change
+* :ghpull:`401`: fixes #388
+* :ghpull:`402`: release changes
+
+Issues (100):
+
+* :ghissue:`503`: Grid for landing page
+* :ghissue:`501`: Two figs rather than three
+* :ghissue:`500`: More efficient higher order operations
+* :ghissue:`452`: nat_queen.gal example file
+* :ghissue:`499`: renamed nat_queen.gal for #452
+* :ghissue:`486`: add a deprecation warning on regime_weights
+* :ghissue:`497`: ENH Deprecation warning for regime_weights #486
+* :ghissue:`449`: Lower order neighbor included in higher order
+* :ghissue:`487`: Issue with w.weights when row-standardizing
+* :ghissue:`398`: running test suite generates files
+* :ghissue:`358`: Graph weights
+* :ghissue:`338`: ENH: Move Geary's C calculations to Cython.
+* :ghissue:`494`: Enables testing against two versions of SciPy shipped with the last two Ubuntu LTS versions.
+* :ghissue:`490`: Fix for #487
+* :ghissue:`492`: BUG cleaning up temporary files for #398
+* :ghissue:`493`: Phil: Skipping several tests that fail due to precision under older scipy
+* :ghissue:`489`: test suite fixes
+* :ghissue:`485`: Revert "ENH: cleaning up more test generated files"
+* :ghissue:`488`: More tests to skip if scipy less than 11
+* :ghissue:`484`: ENH: cleaning up more test generated files
+* :ghissue:`483`: Forwarding Phil's commit: skipping doctests, conditional skip of unit tests
+* :ghissue:`482`: DOC cleaning up files after running doctests #398
+* :ghissue:`481`: DOC contrib updates and links
+* :ghissue:`480`: DOC cleaning up doctests
+* :ghissue:`455`: regime weights vs block weights
+* :ghissue:`479`: ENH Changing regime_weights to block_weights for #455
+* :ghissue:`478`: DOC: link fixes
+* :ghissue:`460`: Optimize KDTree
+* :ghissue:`477`: cKDTree for #460
+* :ghissue:`472`: Check for any side effects from new id remapping in w.sparse
+* :ghissue:`473`: update all user space functions for new w.remap_ids
+* :ghissue:`476`: redefining w.remap_ids to take only a single arg
+* :ghissue:`263`: Transition to scipy.spatial.cKDTree from scipy.spatial.KDTree
+* :ghissue:`414`: Travis build is killing nosetests
+* :ghissue:`335`: Weights transformation docs
+* :ghissue:`471`: add docstring example for w.remap_ids
+* :ghissue:`475`: Adding docstrings and error check to fix #471
+* :ghissue:`405`: ENH: Handling ids in W (Leave open for discussion)
+* :ghissue:`470`: fixing order of args for api consistency. 
+* :ghissue:`469`: Idfix for #449
+* :ghissue:`467`: redirect pysal.org to new dynamic landing page
+* :ghissue:`466`: design the grid for the notebooks
+* :ghissue:`464`: design new dynamic landing page for github.io
+* :ghissue:`465`: move news out of docs and into dynamic landing page
+* :ghissue:`468`: Move dynamic items out of sphinx docs and into dynamic landing page
+* :ghissue:`463`: updating gitignore
+* :ghissue:`451`: docs for higher_order_sp have wrong argument types
+* :ghissue:`462`: ENH: handle the case of an ergodic distribution where one state has 0 probability
+* :ghissue:`458`: ENH: Vagrantfile for PySAL devs and workshops
+* :ghissue:`447`: Clusterpy
+* :ghissue:`456`: BUG: fix for #451 handling W or WSP in higher_order_sp
+* :ghissue:`457`: This is a test to see if pull request notifications get sent out to the list
+* :ghissue:`454`: Foobar
+* :ghissue:`443`: Updating spreg: several minor bug and documentation fixes.
+* :ghissue:`453`: Resolving conflicts
+* :ghissue:`412`: On travis and darwin test_ml_error_regimes.py hangs 
+* :ghissue:`448`: Wsp
+* :ghissue:`435`: Will spatial durbin model be added in the near future?
+* :ghissue:`445`: ENH: unique qualitative color ramp. Also refactoring for future ipython deprecation of --pylab=inline
+* :ghissue:`446`: Wmd
+* :ghissue:`444`: Scipy dependency
+* :ghissue:`442`: Wmd
+* :ghissue:`441`: fixed kernel wmd for updated wmd structure
+* :ghissue:`440`: ENH: sidebar for Releases and installation doc update
+* :ghissue:`439`: - events
+* :ghissue:`438`: ENH: pruning to respect flake8
+* :ghissue:`436`: Scipy 0.14 induced breakage
+* :ghissue:`437`: BUG: fix for removal of scipy.stat._support #436
+* :ghissue:`408`: Use of `platform.system()` to determine platform
+* :ghissue:`403`: Scipy dependency
+* :ghissue:`434`: W Object Metadata Attribute
+* :ghissue:`433`: Rank markov
+* :ghissue:`424`: testing
+* :ghissue:`432`: Implementation of rank Markov classes
+* :ghissue:`431`: FOSS4G
+* :ghissue:`430`: Network
+* :ghissue:`429`: moving analytics out of wed class and into their own module
+* :ghissue:`420`: Local Moran's I,  I Attribute Undefined
+* :ghissue:`418`: Extended pysal.weights.user.build_lattice_shapefile 
+* :ghissue:`428`: Network
+* :ghissue:`427`: devel docs
+* :ghissue:`426`: dev docs
+* :ghissue:`425`: Viz2contrib
+* :ghissue:`423`: Update news.rst
+* :ghissue:`422`: ENH: Update doc instructions for napoleon dependency
+* :ghissue:`421`: Adding files used in some examples as per Luc's request.
+* :ghissue:`419`: Doc fixes 1.7
+* :ghissue:`393`: Doc fixes 1.7
+* :ghissue:`416`: Add hexagonal lattice option for lat2W
+* :ghissue:`417`: ENH hex lattice W for #416
+* :ghissue:`409`: add wiki page on viz module design
+* :ghissue:`413`: Temporary fix for https://github.com/pysal/pysal/issues/412
+* :ghissue:`415`: Temporarily commenting out tests that are blocking Travis.
+* :ghissue:`407`: Viz: Moving into contrib/viz in master
+* :ghissue:`406`: Viz: pruning old code and adding more examples for TAZ paper
+* :ghissue:`380`: Pep 8 and Line Length
+* :ghissue:`404`: version change
+* :ghissue:`401`: fixes #388
+* :ghissue:`388`: update testing procedures docs
+* :ghissue:`402`: release changes
+
+v<1.7.0>, 2014-01-29
+
+36d268f Philip Stephens -Merge pull request #400 from sjsrey/mldoc
+c2c4741 Serge Rey -Formatting ml docs
+685f5e3 Sergio Rey -Merge pull request #399 from sjsrey/master
+481ccb4 Serge Rey -correct thanks
+4a5cce3 Sergio Rey -Update index.txt
+1fe7aeb Philip Stephens -Merge pull request #396 from sjsrey/mldoc
+e731278 Serge Rey -EHN: fixing link to bleeding edge docs.
+e4e9930 Serge Rey -ENH: adding ml docs to api
+9b3c77e Serge Rey -Merge branch 'master' of github.com:pysal/pysal
+dda3c01 Philip Stephens -Merge pull request #389 from dfolch/master
+74b26d5 Philip Stephens -Merge pull request #392 from pedrovma/spreg17
+b47ba84 pedrovma -Bump.
+3d8504c Sergio Rey -Merge pull request #386 from pastephens/master
+f9b59ea Philip Stephens -Merge branch 'master' of https://github.com/pysal/pysal
+429e19e pedrovma -Upgrading to spreg 1.7.
+c698747 David Folch -removing legacy speedup hack that is no longer relevant
+88177d0 Sergio Rey -Merge pull request #387 from sjsrey/scipy13
+64a4089 Serge Rey -BUG: sorting ijs for asymmetries
+5539ef5 Sergio Rey -Merge pull request #1 from sjsrey/scipy13
+8a86951 Serge Rey -BUG: fixes for scipy .0.9.0 to 0.13.0 induced errors
+fe02796 Philip Stephens -tweaking travis to only run master commits
+8c1fbe8 jlaura -Merge pull request #385 from sjsrey/docupdate
+b71aedc Serge Rey -ENH: update date
+4f237e4 Sergio Rey -Merge pull request #384 from sjsrey/moran
+01da3be Serge Rey -ENH: Analytical p-values for Moran are two-tailed by default #337
+918fe60 Philip Stephens -further travis tweaks
+3920d73 Sergio Rey -Merge pull request #382 from sjsrey/st_docs
+d90bc70 Serge Rey -DOC: updating refs for concordance algorithm
+0db2790 Philip Stephens -tweaks to travis
+063e057 Philip Stephens -upgrading scipy on travis
+f90e742 Philip Stephens -Merge branch 'master' of https://github.com/pysal/pysal
+edc9c07 Dani Arribas-Bel -Merge pull request #379 from sjsrey/b244
+82479bb Serge Rey -BUG: fix for the comment https://github.com/pysal/pysal/issues/244#issuecomment-30055558
+57ba485 jlaura -Update README.md
+981ed31 Sergio Rey -Merge pull request #377 from darribas/master
+3320c39 darribas -Changing cmap default in plot_choropleth so every type defaults to its own adecuate colormap
+e063bee darribas -Fixing ignorance of argument cmap in base_choropleth_unique
+1f10906 Dani Arribas-Bel -Merge pull request #375 from sjsrey/viz
+94aa3e7 Dani Arribas-Bel -Merge pull request #376 from pedrovma/baltim_data
+7568b0b pedrovma -Adding Baltimore example dataset for use with LM models.
+5b23f89 Serge Rey -greys for classless map
+d4eae1e Dani Arribas-Bel -Merge pull request #374 from sjsrey/viz
+652440d Serge Rey -shrinking colorbar
+c17bf67 Sergio Rey -Merge pull request #373 from darribas/master
+a71c3cb darribas -Fixing minor conflict to merge darribas viz branch into darribas master
+ec27e30 Dani Arribas-Bel -Merge pull request #372 from sjsrey/viz
+8c03170 Serge Rey -option for resolution of output figs
+3fc5bd4 Philip Stephens -Merge branch 'master' of https://github.com/pysal/pysal
+2b5cb23 jlaura -Merge pull request #371 from sjsrey/geopandas
+469afa7 Serge Rey -fix for #370
+59cdafc jlaura -Merge pull request #369 from pedrovma/south_data
+6b88e13 jlaura -Merge pull request #368 from schmidtc/issue367
+40fe928 pedrovma -Adding south data to be used in ML doctests.
+bcc257e schmidtc -fixes #367
+87e057f jlaura -Merge pull request #366 from sjsrey/ml_lag
+a64eb27 Serge Rey -queen contiguity for nat.shp
+77add5c Sergio Rey -Merge pull request #365 from sjsrey/news
+82464ef Serge Rey -narsc workshop
+fd79424 Sergio Rey -Merge pull request #364 from sjsrey/news
+bc7f25a Serge Rey -Merge branch 'master' of https://github.com/sjsrey/pysal
+d669913 David Folch -Merge pull request #363 from sjsrey/maxp
+22f9e36 Serge Rey -update example for bug fix #362
+fac3b8a Serge Rey -- update tests for bug fix #362
+44b4b06 Sergio Rey -Merge pull request #1 from sjsrey/maxp
+1e6f1e5 Serge Rey -- fix for #362
+68ab3e9 Sergio Rey -Merge pull request #361 from sjsrey/components
+aa27c7e Serge Rey -doc test fix
+7c08208 Serge Rey -putting Graph class back in for component checking
+003b519 Serge Rey -alternative efficient component checker
+2080e62 Serge Rey -- fixing doc
+4fda442 Serge Rey -Merge branch 'components' of github.com:sjsrey/pysal into components
+e9e613b Serge Rey -reverting back to old component check
+83d855e Serge Rey -updating example
+9defd86 jlaura -Merge pull request #360 from sjsrey/components
+6f92335 Serge Rey -more efficient connectivity test
+ebde3d1 Dani Arribas-Bel -Adding try/except for ogr since it's only used to reprojection methods but not on the plotting toolkit
+5b170eb Sergio Rey -Merge pull request #356 from sjsrey/classification
+c9dac41 Serge Rey -- update unit tests for reshaping jenks caspal
+d9b06e2 Sergio Rey -Merge pull request #355 from sjsrey/cleanup/moran
+dc589e8 darribas -Adding caution note when plotting points to the notebook. Ideally, we wanna be able to build a PathCollection out of the XYs, but for now we rely on plt.scatter, which gets the job done but has some problems.
+2224b95 darribas -Including support for points in  base_choropleth_unique and base_choropleth_classless
+ac2d08a darribas -Modifying example to show how to do choropleth mapping on points
+270786e darribas -Adding support for choropleth plotting on point map objects (this may come from map_point_shp or from a simple matplotlib scatter
+e56697c Sergio Rey -Merge pull request #357 from jlaura/newstyle_wed
+4c67c2f Jay -errors in segmentation fixed
+512cc76 Serge Rey -have Jenks-Caspal bins be a one dimensional array - to be consistent with all other classifiers
+5254859 Philip Stephens -Merge branch 'master' of https://github.com/pysal/pysal
+788ecab Serge -pruning
+5b6b7b6 Serge -pruning
+eb7e9a1 Jay -bug fix and all pointers filled for external edges
+e47aa7a Jay -Node insertion, precursor to segmentation.
+18a44d1 darribas -*Replacing shp by map_obj in medium layer functionality. *Bringing everything else in line with it *Adding example for line colorig and mixing overlaying of points.
+bd041b1 darribas -Replacing shp_link by shp as input for medium and low-level layers. This brings much more flexibility and opens the door to plot formats other than shapefiles (e.g. geojson)
+c74a361 darribas -Adding IP notebook to exemplify and keep track of development of mapping module
+d23c882 darribas -Minor fixes
+4b82a76 darribas -New commit message* Replacing map_poly_shp_lonlat for map_poly_shp in base_choropleth_classif/unique/classless * removed 'projection' from base_choropleth_classif/unique/classless * Allow base_choropleth_classif/unique/classless to plot multi-part polygons properly * changes streamlined to generic plot_choropleth * Added dependency on pandas for rapid reindexing (this is done externally on the method _expand_values to it is easy to drop the dependency when neccesary/tim [...]
+7a0eaec darribas -Merge branch 'viz' of github.com:darribas/pysal into viz
+5536424 darribas -Merge branch 'master' of github.com:darribas/pysal
+e54ce16 Sergio Rey -Merge pull request #353 from darribas/master
+819ee60 darribas -Adding immediate todo on head of the file
+946772d darribas -Passing k to base_choropleth_classif from plot_choropleth. This should fix Issue #352
+f299b45 darribas -Merge branch 'master' of https://github.com/pysal/pysal
+f044f43 Jay -Added W generation
+5f48446 jlaura -Merge pull request #348 from sjsrey/master
+938a1ae Serge Rey -- adding nn stats to point based methods
+a86a051 Philip Stephens -removing dependency tracking service, it was ruby only
+1e24fde Philip Stephens -testing dependency tracking service
+3aa410c Philip Stephens -Merge pull request #347 from pedrovma/w_silence_island
+03990f6 pedrovma -Extending PR #310 (silence island warnings) to include w.transform.
+160001a Sergio Rey -Merge pull request #346 from jlaura/newstyle_wed
+44989f9 Sergio Rey -Merge pull request #345 from sjsrey/master
+2fd99b8 Sergio Rey -Update README.md
+bdcc6a8 Jay -NCSR with uniform distribution
+769aa03 Jay -Fixed snapping
+2561071 Jay -saved notebook and updated readme
+3784783 Jay -ReadMe for Changes
+019e16b Sergio Rey -Merge pull request #334 from jseabold/fix-build-example-dirs
+1889885 Skipper Seabold -BLD: Correctly install package_data dirs.
+ff4e355 Serge Rey -- assignments
+c5b0cc0 Serge Rey -- reorg
+a4f5642 Serge Rey -Merge branch 'network' of github.com:pysal/pysal into network
+a95fec8 jlaura -Update README.md
+1713145 Serge Rey -Merge branch 'master' of github.com:pysal/pysal into network
+ede75c0 Sergio Rey -Merge pull request #329 from jlaura/wed_polar
+7399cf2 Jay -Single-source shortest path notebook
+9eb3fc1 Philip Stephens -Merge pull request #331 from sjsrey/docfix
+ef9c82a Serge Rey -- sphinx doctest markup fix
+1e2b6b3 jlaura -Update README.md
+e19bffa jlaura -Merge pull request #330 from pysal/b328
+6afc30b Serge Rey -- tutorial doc fixes for #328
+c7239f1 Serge Rey -- b328 fix
+d5fec13 Serge Rey -- fix for #328  making all p-values one-tailed
+16b5e6e Jay -enumeration working with filaments
+9507bbc jlaura -Update README.md
+eef8eec Serge Rey -- stub for design of module
+2707d60 Jay -Filaments in polar coordinates
+b64f9e2 Serge Rey -Documentation for the development of network module
+b90876e Serge Rey -Merge branch 'network' of github.com:pysal/pysal into network
+ddad2a5 Philip Stephens -Merge pull request #326 from sjsrey/doc
+6b0cd08 Serge Rey -- update release schedule
+4cc7bca Jay -bisecting for single point working
+79c77d9 jlaura -Merge pull request #324 from pysal/bf_id
+9f4c7c9 Serge Rey -id is a keyword
+72b1f85 Sergio Rey -Merge pull request #323 from jlaura/network
+b5cdae0 Jay -fix to shp2graph
+846dce2 Jay -Brute force for point outside network
+d6c2ef4 Jay -Added length computation, alter global morans
+b7e1465 Jay -Added new pointer to reader/writer
+616d62d Jay -LISA and Global Morans on the network
+16f84d6 Jay -Added explicit point external to network warning
+34f4d8e Jay -update to the ipython notebook
+e359e59 Jay -JSON and cPickle Bianry WED Reader/Writer
+5373c82 Sergio Rey -Merge pull request #322 from jlaura/network
+059d99c Jay -wed into class, tests added
+aa5969d Sergio Rey -Merge pull request #320 from pastephens/master
+a18000b Philip Stephens -version added info
+5b8d490 Philip Stephens -typo
+d31a22a Philip Stephens -stubs for cg docs
+4dbdfe3 schmidtc -fixes #318
+35a0317 Jay -Merge branch 'master' of https://github.com/pysal/pysal into network
+77e8387 Jay -Merge branch 'geojson' of https://github.com/pysal/pysal into network
+ad670c5 Sergio Rey -Merge pull request #317 from pastephens/master
+628f27e Philip Stephens -merging local changes
+f9dcb3e Philip Stephens -simplified install instructions
+f2fab4c Serge Rey -- notebook on w construction for geojson
+830826b Serge Rey -prototyping W from geojson
+b10240d Serge Rey -created with "ogr2ogr -lco WRITE_BBOX=YES -f "GeoJSON" columbus.json columbus.shp"
+d546926 Philip Stephens -merging with pull
+d711011 darribas -Merge branch 'rod'
+8bef782 darribas -Merge branch 'rod' of https://github.com/pysal/pysal into rod
+03c1003 pedrovma -Merge pull request #315 from sjsrey/rod
+950fe8b Serge Rey -Replacing ROD with regular dictionary
+b1f009f Philip Stephens -Changes to release docs.
+028364a Sergio Rey -Update THANKS.txt
+94f5916 Sergio Rey -Update INSTALL.txt
+
+v<1.6.0>, 2013-07-31
+
+5fa9d09 darribas -silent_island_warning implemented for w_union
+6526c62 Sergio Rey -Update README.md
+ea826c1 darribas -silent_island_warning implemented for w_intersection
+335540a darribas -silent_island_warning implemented for w_difference
+0a156cb darribas -silent_island_warning implemented for w_symmetric_difference. Previous commit included support of silent_island_warning for WSP2W as well
+34d20d7 darribas -silent_island_warning implemented for w_clip
+499815d pedrovma -Test fixing...
+8778f75 pedrovma -Test fixing...
+a799a13 pedrovma -Test fixing...
+6482d81 pedrovma -Test fixing...
+2752b1b pedrovma -Test fixing...
+0c0a5bf pedrovma -Test fixing...
+bbf9dcb pedrovma -Test fixing...
+05c34ff pedrovma -Test fixing...
+8a3986a Serge Rey -- preparing for release, version updates
+9106cfe pedrovma -Matching travis results reg. precision issues.
+3cd0ce1 Serge Rey -- updating changelog
+74dadd6 pedrovma -Bump.
+c7774fb Serge Rey -- update THANKS.txt - testing travis for timing out
+cd98057 Serge Rey -- travis fix for multiprocessing permission error
+86702f8 Serge Rey -- start of changelog for 1.6
+3ee686d pedrovma -Reloading to check new results from Travis.
+2de1d21 Serge Rey -- docs
+ef72edc Serge Rey -- update docs
+0716581 Serge Rey -- deal with multiprocessing on travis
+b508c88 Serge Rey -- excluding network from 1.6 release
+ff13e31 pedrovma -Matching Travis results. Multiprocessing errors still an issue.
+5b916ba pedrovma -Adding Chow test on lambda and updating dynamics of regime_err_sep and regime_lag_sep in combom models.
+b6e687f darribas -Patch to include switch for island warning as proposed in #295. The method  is modified as well to include the switch
+7ea5f35 pedrovma -Fixing defaults
+62ca76b pedrovma -Updating documentation and checking if there are more than 2 regimes when regimes methods are used.
+3212249 pedrovma -Fixing documentation on 'name_regimes'
+a782d50 pedrovma -Updating tests for integration with pysal 1.6
+14f9181 pedrovma -Merging spreg_1.6 with my pysal fork.
+817f2c2 Serge Rey -- having build_lattice_shapefile also create the associated dbf file - useful for testing our contiguity builders against geoda since dbf is   required by the latter
+41d59a4 Serge Rey -- adding diagonal option to kernel weights in user.py
+506d808 Serge Rey -update when added
+b2ec3d4 Serge Rey -- updating api docs
+9d45496 Serge Rey -- example and doctests for spatial gini
+95635bb Serge Rey -updating release docs
+bd2f924 darribas -Fixing doctest of towsp method by including isinstance(wsp, ps.weights.weights.WSP)
+76183d7 darribas -Fixing doctest of towsp method by including type(wsp)
+0c54181 darribas -Adding  method in W that calls WSP class for convenience and elegance. Related to issue #226
+f3b23e8 Philip Stephens -adding source build to travis-ci
+60930e7 Philip Stephens -adding new url for downloads
+9bf7f5b Philip Stephens -modified release docs.
+f98d4a9 Philip Stephens -interim ci
+aa19028 Philip Stephens -Adding docs about installing in develop mode.
+674112f Philip Stephens -starting rewrite of install docs
+af0d9b3 Philip Stephens -working on doc tickets
+200e77e Serge Rey -handle ties in knnW in doctest
+d0d2dd2 Serge Rey -resetting README for pysal/pysal
+6afb6ac Serge Rey -- updating docs for new api in interation.py
+4c5572f Serge Rey -- updating tests for new api
+fabd16a Serge Rey -- refactored signatures to use numpy arrays rather than event class
+6367947 Serge Rey -- refactor knox for large samples
+5fad3b2 Serge Rey -- updating travis test
+06894d8 Serge Rey -- updated README
+8b06e63 Serge Rey -- so only i get email when i commit locally
+efbb7ff Serge Rey -- removing google pysal-dev circle
+9859bda Serge Rey -- turning off gmail circle
+51f6d3e Serge Rey -- fixing
+46b1084 Serge Rey --docos
+4e2c27a Philip Stephens -missing if statement added
+d1a83fd Serge Rey -- fixing docs
+8275d76 Serge Rey -- fix precision
+87ea5cc Philip Stephens -adding to authors and quick test fix for linux
+1cfb67f Serge Rey -cant easily remove idVariable, reverting
+5933d1e Serge Rey -removing idvariable from Distance - causes too many issues
+05f2573 Philip Stephens -removing coverage tests
+fcb8c6f Philip Stephens -Knox using KDTree.
+2237173 Serge Rey -with tests against previous implementation removed
+233e59a Serge Rey -speed comparison for change to query_pairs in kdtree
+fb78ea9 Serge Rey -removing test file
+4d04575 Philip Stephens -testing
+357a184 Serge Rey -second great idea
+1fafc2b Serge Rey -on a plane commit 1
+fef6eae Philip Stephens -fix
+86c17ac Serge Rey -- test file
+a619f62 Philip Stephens -interim ci
+1a9d881 Serge Rey -- knox test using kdtrees
+7459c44 Serge Rey -Fixing reference to missing shapefile Fixing one rounding error induced test
+5616b12 Serge Rey -refactored to avoid second loop in explicit queen or rook check
+d3d2f71 Philip Stephens -Revert "Changed doctest path calls to account for modified shapefile."
+da1d8a1 Philip Stephens -Changed doctest path calls to account for modified shapefile.
+f591c99 Philip Stephens -progress on permutations of knox for larger datasets
+8d31cde Serge Rey -Testing integration of spatialnet creation and reading into wed
+11de6f3 Jay -Fixed wed_modular.py
+077658a Serge Rey -adding new test case for wed extraction from a spatialnet shapefile
+bbb10b4 Philip Stephens -saving state of development
+44076b7 Serge Rey -- update doc test
+6fdd94d Serge Rey -- moved regions_from_graph into wed_modular - documented all functions and cleaned up
+5bd27c3 Serge Rey -- wrapping in functions
+3ad162f Serge Rey -- working version of wed_modular module - starting point for clean up
+2380f15 Philip Stephens -Copy of sphinx install docs. Closes #251
+5687700 Philip Stephens -tweaks to install instructions
+9ffd432 Serge Rey -- updating for switch from svn to git
+fdaf521 Philip Stephens -Fixing 250
+5ba4fdf Serge Rey -Fixes #249 Closes #249
+d89944d Pedro -Adding docs for each regimes estimator
+f03bb63 Serge Rey -- updating docs for spatial regimes in spreg
+a49d0f7 Philip Stephens -Adding info to setup script.
+1f27605 Philip Stephens -mainly docs
+04f8a31 Philip Stephens -Adding test coverage with nose, data collected and presented on coveralls.io
+6db978b Philip Stephens -last changes
+137e088 Philip Stephens -added bigdata parameter
+7ca81c2 Philip Stephens -got Knox stat working in alt form
+24c1fcc Philip Stephens -workign on refactoring the space-time matrices for the Knox test [ci-skip]
+28013f0 Serge Rey -- enumeration of cw edges for faces
+baa8f60 Serge Rey -- hole is now included and enumeration of links (cw) around nodes works for all nodes. - isolated nodes also handled in enumeration of links around nodes.
+33741c8 Serge Rey -- filaments inserted and pointers updated - have to add hole polygon and isolated nodes, but almost there!!!!!!!!!
+416d3db Serge Rey -- pointers updated for edges of connected components
+c34e274 Serge Rey -- convex/between edge test as start of testing for insertion of multiple   internal filaments in one region.
+78d96b1 Serge Rey -- filament insertion and pointer updates
+ced2c5b Serge Rey -- filament insertion (inc)
+ba4263f Jay -Logic roughed in for filaments [ci skip]
+cf3b0bc Jay -updated wed ipynb [ci skip]
+33ce81e Serge Rey -- refactoring of wed construction (incomplete)
+0fc16fc Jay -modular WED Pulled Apart 2 funcs in 1 cell
+bf73b90 Jay -modular WED
+3163377 Serge Rey -- new modular wed construction
+e50b31d Jay -added test_wed additions to test_wed2
+1cbc941 Serge Rey -- isolated nodes handled
+d28b97f Serge Rey -- isolated filament handled
+6188fd5 Serge Rey -- hole component handled
+a96040b Serge Rey -- getting connected components (current 14,15,16  and 25,26,27 are not   included)
+3aa31a5 Jay -Added boolean arg to include or exclude holes [ci skip]
+d07876d Jay -Filament identification [ci skip]
+0139ea5 Philip Stephens -Slight speed improvement getting rid of append calls in reading shapefile and building x,y lists.
+43010b5 Serge Rey -- fixed logic problem with enum for v1, starting on components
+8737918 Pedro -Adding more meaningful error message to inverse distance weights
+01f52f6 Serge Rey -- replacing code that got deleted previously
+7c4c6e1 Philip Stephens -Replacing deleted files.
+a8da725 Philip Stephens -added date support to spacetimeevents class, a date column to example dbf.
+90c4730 Philip Stephens -logic works, numeric test still failing
+b8e43e1 Philip Stephens -saving progress on interaction
+81f2408 Serge Rey -- handling external end-node-filament
+7de6253 Serge Rey -- adding end node filament handling - edge enumeration around node working
+f542b9a Serge -- adding end node filament handling - edge enumeration around node working
+d7e3a57 Philip Stephens -[ci skip] disabling nose-progressive so travis output looks best
+fe03013 Dani Arribas-Bel -Adding set of diversity indices to inequality module under _indices.py for now. Still lacks doctests, unittests, and a few others will be added
+951b6f5 Dani Arribas-Bel -Adding try/except to the import of Basemap to allow the use of the module when there is no Basemap installation
+89003eb Serge Rey -- adding wed for eberly example
+665ef22 Serge Rey -- fixed 7,2 failure
+71fc9ad Serge Rey -start of adding gini and other inequality measures
+f7b7bcc Phil Stephens -Adding nose-progressive plugin to test suite. Devs can run test suite with 'make test'.
+f5db7bf Serge Rey -- updating copyright
+07574b5 Serge Rey -- docs
+478d2cb Philip Stephens -Adding requirement. Removing redundancy.
+916a6ca Serge Rey -- more island check updates
+edd9960 Serge Rey -- more island check doctest changes
+ad1a91c Serge Rey -- updating doctests for island check
+ce77772 Serge Rey -- fixing doctests to incorporate new island warning
+554a30b Serge Rey -- silencing floating point warning
+4f76862 Serge Rey -- moving default contiguity builder back to binning from rtree
+b99665b Jay -Eberly
+d911344 Jay -mp removed, passing nosetests on my machine serial
+f005675 Serge Rey -improved binning algorithm for contiguity builder
+4a69557 Serge Rey -- double checking threshold in Distance Band - new example to show functionality
+7256f13 Serge Rey -- fix handling of idVariable for knnW
+31bb36e Jay -bug fixes [ci skip]
+a2d2dd4 Jay -WEberly - WED Building [ci skip]
+3abc55e Serge Rey -- fixing doctests for new check/reporting for islands
+756ac05 Serge Rey -- adding warning if islands exist upon W instantiation
+db097a6 Jay -Weberly, bug fix, c and cc link remaining
+d5cc6f9 Jay -All but start / end working
+033963d Jay -Integration to WEberly error fixed [ci skip]
+22b931a Serge Rey -- removing main for doc tests which can be run from nosetests. - updating testing docs
+bf753e9 Jay -Integration to WEberly started [ci skip]
+6506e07 Serge Rey -- typo
+aede375 Serge Rey -- replacing double quotes around multi word ids with strings joined with   underscores
+cf029e8 Serge Rey -- changes to wrap string ids in gwt writer - see https://github.com/pysal/pysal/issues/244#issuecomment-16707353
+626ac08 Serge Rey -- adding shapefile and variable name to gwt objects created in user space
+3c84bb0 Jay -Working version 4.19 [ci skip]
+7d77da9 darribas -Include warning in sp_att when rho is outside (-1, 1), ammends #243 although the true problem (pearsonr in diagnostics_tsls) will still raise an error
+3719d21 Jay -working WED [ci skip]
+b4ce294 Serge Rey -checking edges
+f4bb412 Jay -excessive print statements removed. ci skip
+9f7dee6 Jay -SUCCESS! ci skip
+9077615 Phil Stephens -Note, [ci skip] anywhere in your commit message causes Travis to NOT build a test run.
+cb072c4 Jay -getting there
+d3b36bc Serge Rey -correcting typo user told me about
+19ea051 Jay -trivial working
+b9ea577 Jay -eberly cycles - edge issue still
+d5153e3 Serge Rey -more refinement of wed from plannar graph
+edff44b Philip Stephens -adding git ignore file
+8093f21 Serge Rey -wed from minimum cycle basis
+b5bcead Serge Rey -handle filaments
+9a8927a Serge Rey -face extraction using horton algorithm
+10d66c1 Serge Rey -updating readme formatting
+59f3750 schmidtc -adding Universal newline support to csvReader, fixes #235
+09e813f Serge Rey -- updating notifications
+f8b0a26 Serge Rey -- fixing Distance.py and testing travis message
+d1ec0f2 Phil Stephens -quieting pip output and fix one doctest
+927e799 Phil Stephens -adding networkx, tweaks to travis config
+5971bb1 Serge Rey -neighbors from wed
+28f0e55 Serge Rey -adding robust segment intersection tests
+3bcac73 Serge Rey -adding doubly connected edge list to network module
+86f0fea darribas -Adding methods to read line and point shapefiles and improving the method to append different collections to one axes. Still in progress
+b61cb55 Serge Rey -- fixing introduced bug in knnW_arc
+801e78d Serge Rey -Handle point sets with large percentage of duplicate points
+dbafbc4 serge -update pointer to github
+427a620 Serge Rey -dealing with filaments
+23216ef Serge Rey -Fixed cw enumeration of links incident to a node
+0a51a53 Serge Rey -- readme
+5f4cab4 sjsrey -cw enumeration not working for all nodes
+f2e65d3 Serge Rey -- cw traversal of edges incident with a node
+90d150c sjsrey -- version debug for travis
+24598a8 sjsrey -- noting move to org
+9fb8a17 sjsrey -- fixing tutorial tests
+5a14f9e serge -- cleaning up weights tests
+6265b3b Serge Rey -- fixing doc tests
+7e8c4fe Serge Rey -- testing after move to org
+37fc8d4 Serge Rey -- testing post commit emails
+bed7f6e Phil Stephens -removed files
+eab2895 Phil Stephens -removed virginia_queen files
+bcef010 Serge Rey -- adding diagonal argument to Kernel weights - adding doctest evaluation to Distance.py
+02d27e9 Phil Stephens -adding libgeos-dev
+1126d71 Phil Stephens -pipe build output to null
+37dbb35 Phil Stephens -adding -y flag to pip uninstall
+06d56e9 Phil Stephens -adding libgeos_c install, pysal from pip
+4c53277 Phil Stephens -trying to quiet output, using Makefile
+74448e8 Phil Stephens -find setup.py
+4634fb1 Phil Stephens -test install in venv and build
+5d58723 Phil Stephens -working out travis-ci doctest configuration
+5e905d3 Phil Stephens -adding numpydoc
+33a5298 Phil Stephens -tweaks travis config
+5c85f50 Phil Stephens -tweaking service configs
+4ed1201 Josh Kalderimis -use the correct syntax for sysytem_site_packages
+954b6d2 Phil Stephens -stop!
+311eca8 Phil Stephens -ssp=true
+c601bca Phil Stephens -numpy first
+54b0afe Phil Stephens -ok, so travis is serious about not using system site packages.
+2b912cc Phil Stephens -doh
+28994df Phil Stephens -better yaml
+ce1d89e Phil Stephens -testing
+b535d3e Phil Stephens -testing
+440a772 Phil Stephens -tweaking pip requirements file
+34a74e2 Phil Stephens -tweaking travis file
+33b13aa Serge Rey -- new links
+8e09d7b Serge Rey -- setting up travis
+d33001e Sergio Rey -Update CHANGELOG.txt
+9d4de66 Serge Rey -- added authors
+ab672c9 Serge Rey -- modified knnW to speed up dict construction
+4edd2ab Serge Rey -- update cr
+39e6564 Phil Stephens -syncing install instructions with docs
+9e98db9 Phil Stephens -adding website favicon; chrome does not empty cache properly!!
+
+ * migration to github from svn
+   svn2git http://pysal.googlecode.com/svn --authors ~/Dropbox/pysal/src/pysal/authors.txt --verbose
+
+v<1.5.0>, 2013-01-31
+
+2013-01-29 20:36  phil.stphns
+
+	* doc/source/users/installation.txt: updating and simplifying user
+	  install instructions.
+
+2013-01-18 16:17  sjsrey
+
+	* Adding regime classes for all GM methods and OLS available in
+	  pysal.spreg, i.e. OLS, TSLS, spatial lag models, spatial error models
+	  and SARAR models. All tests and heteroskedasticity
+	  corrections/estimators currently available in pysal.spreg apply to
+	  regime models (e.g. White, HAC and KP-HET). With the regimes, it is
+	  possible to estimate models that have:
+	  -- Common or regime-specific error variance;
+	  -- Common or regime-specific coefficients for all variables or for a
+	  selection of variables;
+	  -- Common or regime-specific constant term;
+	  - Various refactoring to streamline code base and improve long term
+	    maintainability
+	    - Contributions from Luc Anselin, Pedro Amaral, Daniel Arribas-Bel
+	      and David Folch
+
+2013-01-18 14:08  schmidtc
+
+	* pysal/common.py: implemented deepcopy for ROD, see #237
+
+2013-01-08 12:28  dreamessence
+
+	* pysal/contrib/spatialnet/__init__.py: Adding __init__.py to make it importable
+
+2012-12-31 22:53  schmidtc
+
+	* pysal/core/IOHandlers/gwt.py: adding kwt support, see #232
+
+2012-12-21 20:53  sjsrey at gmail.com
+
+	* pysal/__init__.py, pysal/cg/rtree.py,
+	  pysal/contrib/weights_viewer/weights_viewer.py,
+	  pysal/weights/weights.py: - turning off randomization in rtree
+
+2012-12-06 16:34  dfolch
+
+	* pysal/contrib/shapely_ext.py: adding unary_union() to shapely
+	  contrib; note this only works with shapely version 1.2.16 or higher
+
+
+2012-11-29 13:39  dreamessence 
+
+	* pysal/contrib/viz/mapping.py: Added option in setup_ax to pass
+	  pre-existing axes object to append.  It is optional and it enables,
+	  for instance, to embed several different maps in one single figure
+
+2012-11-20 00:23  dfolch
+
+	* pysal/contrib/shapely_ext.py: adding shapely's cascaded_union
+	  function to contrib
+
+2012-11-12 18:08  dreamessence
+
+	* pysal/contrib/viz/mapping.py: -Adding transCRS method to convert
+	  points from one prj to another arbitrary one -Adding map_poly_shp to
+	  be able to plot shapefiles in arbitrary projections, not needing to
+	  be in lonlat and not depending on Basemap
+
+2012-11-09 15:40  sjsrey at gmail.com
+
+	* pysal/weights/weights.py:
+	  - distinguish between intrinsic symmetry and general symmetry
+	
+2012-11-02 17:48  schmidtc
+
+	* pysal/weights/user.py, pysal/weights/util.py: Adding Minkowski
+	  p-norm to min_threshold_dist_from_shapefile, see issue #221
+
+2012-10-19 22:35  sjsrey at gmail.com
+
+	* pysal/weights/weights.py:
+	  explicitly prohibit chaining of transformations - all
+	  transformations are only applied to the original weights at
+	  instantiation
+
+2012-10-19 17:38  sjsrey at gmail.com
+
+	* pysal/spatial_dynamics/markov.py:
+	  - fixing bug in permutation matrix to reorder kronecker product in
+	    the join test
+
+2012-10-17 17:55  sjsrey at gmail.com
+
+	* pysal/weights/util.py:
+	  -
+	  higher order contiguity for WSP objects
+
+2012-10-17 15:43  sjsrey at gmail.com
+
+	* pysal/weights/user.py:
+	  -
+	  id_order attribute was always NONE for wsp created from
+	  
+	  queen/rook_from_shapefile with sparse=True
+
+2012-10-16 19:25  schmidtc
+
+	* pysal/weights/util.py: improving memory usage of
+	  get_points_array_from_shapefile, no need to read entire shapefile
+	  into memory.
+
+2012-10-15 00:44  dreamessence
+
+	* pysal/contrib/viz/mapping.py: First attempt to refactor Serge's code
+	  for choropleth mapping.  It now offers a more general and flexible
+	  architecture.  Still lots of work and extensions left.  The module
+	  is explained in a notebook available as a gist at
+	  https://gist.github.com/3890284 and viewable at
+	  http://nbviewer.ipython.org/3890284/
+
+2012-10-12 18:34  schmidtc
+
+	* pysal/contrib/spatialnet/spatialnet.py: modified SpatialNetwork.snap
+	  to calculate and return the snapped point
+
+2012-10-12 17:05  dfolch
+
+	* pysal/contrib/viz/mapping.py: made edits to unique_values_map to
+	  allow for unlimited number of categories; I commented out the
+	  previous code so these changes can easily be rolled back if it
+	  breaks something somewhere else
+
+2012-10-12 15:03  schmidtc
+
+	* pysal/cg/segmentLocator.py: Fixing issue with segmentLocator, when
+	  query point is extreamly far from the grid boundary, overflow errors
+	  were causing the KDTree to not return any results.  Changed both
+	  KDtree's to use Float64 and share the same data.  Previously,
+	  cKDTree was using float64 and KDtree was using int32.
+
+2012-10-11 08:12  dreamessence
+
+	* pysal/contrib/viz/__init__.py: Adding __init__.py to viz module to
+	  make it importable
+
+2012-08-31 02:57  phil.stphns
+
+	* pysal/spreg/tests/test_diagnostics.py,
+	  pysal/spreg/tests/test_diagnostics_sp.py,
+	  pysal/spreg/tests/test_diagnostics_tsls.py,
+	  pysal/spreg/tests/test_error_sp.py,
+	  pysal/spreg/tests/test_error_sp_het.py,
+	  pysal/spreg/tests/test_error_sp_het_sparse.py,
+	  pysal/spreg/tests/test_error_sp_hom.py,
+	  pysal/spreg/tests/test_error_sp_hom_sparse.py,
+	  pysal/spreg/tests/test_error_sp_sparse.py,
+	  pysal/spreg/tests/test_ols.py,
+	  pysal/spreg/tests/test_ols_sparse.py,
+	  pysal/spreg/tests/test_probit.py,
+	  pysal/spreg/tests/test_twosls.py,
+	  pysal/spreg/tests/test_twosls_sp.py,
+	  pysal/spreg/tests/test_twosls_sp_sparse.py,
+	  pysal/spreg/tests/test_twosls_sparse.py:
+	  - autopep8 -iv spreg/tests/*.py - nosetests pysal - no fixes needed
+
+2012-08-31 01:16  phil.stphns
+
+	* pysal/spreg/diagnostics.py,
+	  pysal/spreg/diagnostics_sp.py,
+	  pysal/spreg/diagnostics_tsls.py,
+	  pysal/spreg/error_sp.py,
+	  pysal/spreg/error_sp_het.py,
+	  pysal/spreg/error_sp_hom.py,
+	  pysal/spreg/ols.py,
+	  pysal/spreg/probit.py,
+	  pysal/spreg/robust.py,
+	  pysal/spreg/summary_output.py,
+	  pysal/spreg/twosls.py,
+	  pysal/spreg/twosls_sp.py,
+	  pysal/spreg/user_output.py,
+	  pysal/spreg/utils.py:
+	  -
+	  autopep8 -iv spreg/*.py - fixed autopep8-introduced doctest failures
+	  - fixed lingering scientific notation test failures
+
+2012-08-31 00:26  phil.stphns
+
+	* pysal/esda/gamma.py,
+	  pysal/esda/join_counts.py,
+	  pysal/esda/mapclassify.py,
+	  pysal/esda/mixture_smoothing.py,
+	  pysal/esda/moran.py,
+	  pysal/esda/smoothing.py:
+	  -
+	  autopep8 fixes - make sure to run unit and doc tests before
+	  committing - one autofix breaks long lines, and thus breaks some
+	  doctests; must be
+	  
+	  fixed manually
+
+2012-08-31 00:10  phil.stphns
+
+	* pysal/esda/getisord.py:
+	  -
+	  using autopep8 module - call: autopep8 -vi getisord.py
+
+2012-08-30 23:18  phil.stphns
+
+	* pysal/esda/geary.py:
+	  -
+	  pep8 clear - removed wildcard import
+
+2012-08-26 22:53  phil.stphns
+
+	* pysal/spatial_dynamics/directional.py,
+	  pysal/spatial_dynamics/ergodic.py,
+	  pysal/spatial_dynamics/interaction.py,
+	  pysal/spatial_dynamics/markov.py,
+	  pysal/spatial_dynamics/rank.py,
+	  pysal/spatial_dynamics/util.py:
+	  -pep8 and pylint fixes -clean wildcard imports
+
+2012-08-26 21:03  phil.stphns
+
+	* pysal/region/maxp.py,
+	  pysal/region/randomregion.py:
+	  -
+	  cleaning up imports
+
+2012-08-26 18:16  phil.stphns
+
+	* pysal/region/maxp.py:
+	  -
+	  style fixes with pep8 - cmd line call: pep8 --show-source
+	  --ignore=E128,E302,E501,E502,W293,W291
+	  
+	  region/maxp.py
+
+2012-08-26 17:47  phil.stphns
+
+	* pysal/common.py,
+	  pysal/examples/README.txt,
+	  pysal/region/components.py,
+	  pysal/region/randomregion.py:
+	  -
+	  using pep8 module
+
+2012-08-24 20:47  schmidtc
+
+	* pysal/network,
+	  pysal/network/__init__.py: adding network module
+
+2012-08-21 22:53  phil.stphns
+
+	* doc/source/_templates/ganalytics_layout.html:
+	  -
+	  updating analytics tracker
+
+2012-08-17 17:11  sjsrey at gmail.com
+
+	* pysal/contrib/spatialnet/util.py:
+	  -
+	  more utility functions for pysal
+	  -
+	  networkx interop
+
+2012-08-16 23:44  phil.stphns
+
+	* setup.py:
+	  -
+	  tweak for build names
+
+2012-08-12 13:15  dreamessence
+
+	* doc/source/index.txt:
+	  Adding announcement links to landing page
+
+2012-08-11 17:38  sjsrey
+
+	* LICENSE.txt:
+	  -
+	  update
+
+2012-08-09 17:19  phil.stphns
+
+	* doc/source/developers/pep/pep-0008.txt:
+	  updating
+	  spatial
+	  db
+	  pep
+
+2012-08-08 17:22  schmidtc
+
+	* pysal/weights/Distance.py:
+	  Fixing bug in Kernel weights that causes erroneous results when
+	  using ArcDistances.  See issue #218.
+
+2012-08-04 21:14  sjsrey
+
+	* doc/source/developers/docs/index.txt:
+	  -
+	  fixed
+	  links
+
+2012-08-04 21:03  sjsrey
+
+	* doc/source/developers/docs/index.txt:
+	  -
+	  hints
+	  on
+	  editing
+	  docs
+	  
+
+2012-08-04 20:14  phil.stphns
+
+	* doc/source/developers/pep/pep-0011.txt:
+	  note
+	  about
+	  travis-ci
+	  and
+	  github
+
+2012-08-04 16:24  sjsrey
+
+	* doc/source/developers/pep/pep-0011.txt:
+	  PEP-0011
+	  
+
+2012-08-04 16:22  sjsrey
+
+	* doc/source/developers/pep/index.txt:
+	  -
+	  PEP 0011 Move from Google Code to Github
+
+2012-08-04 04:42  sjsrey
+
+	* doc/source/index.txt:
+	  - broken link
+
+2012-08-04 04:35  sjsrey
+
+	* doc/source/index.txt:
+	  - news updates
+
+2012-08-04 04:24  sjsrey
+
+	* doc/source/index.txt:
+	  - reorg
+
+2012-08-02 02:32  sjsrey
+
+	* pysal/examples/__init__.py:
+	  -
+	  moving back to r1049 but leaving r1310 in history for ideas on
+	  moving forward - we need to distinguish between using examples in
+	  the doctests (which the users see) and for the developers since we
+	  are no longer distributing examples with the source
+
+2012-08-02 01:49  sjsrey
+
+	* pysal/examples/__init__.py:
+	  -
+	  correct conditional this time (i hope)
+
+2012-08-02 01:36  sjsrey
+
+	* pysal/examples/__init__.py:
+	  -
+	  compromise
+	  -
+	  returns pth rather than None if file does not exist
+
+2012-08-02 00:58  sjsrey
+
+	* pysal/examples/__init__.py:
+	  -
+	  link to examples download
+
+2012-08-02 00:42  sjsrey
+
+	* pysal/examples/__init__.py:
+	  -
+	  explicit check if examples are actually present
+
+
+
+
+
+v<1.4.0>, 2012-07-31
+
+2013-01-31 
+
+
+2012-07-31 21:30  sjsrey at gmail.com
+
+	* pysal/spatial_dynamics/ergodic.py,
+	  pysal/spatial_dynamics/rank.py:
+	  - docs/example
+
+2012-07-31 20:47  sjsrey at gmail.com
+
+	* pysal/spreg/tests/test_error_sp_hom.py:
+	  - rounding/precision issue
+
+2012-07-31 20:27  sjsrey at gmail.com
+
+	* pysal/spatial_dynamics/directional.py,
+	  pysal/spatial_dynamics/tests/test_directional.py:
+	  - fixing pvalue bug
+
+2012-07-31 20:24  sjsrey at gmail.com
+
+	* doc/source/users/tutorials/dynamics.txt:
+	  - fixed rounding problem
+
+2012-07-31 19:58  sjsrey at gmail.com
+
+	* doc/source/index.txt,
+	  doc/source/users/tutorials/autocorrelation.txt,
+	  doc/source/users/tutorials/dynamics.txt,
+	  doc/source/users/tutorials/econometrics.txt,
+	  doc/source/users/tutorials/fileio.txt,
+	  doc/source/users/tutorials/index.txt,
+	  doc/source/users/tutorials/intro.txt,
+	  doc/source/users/tutorials/region.txt,
+	  doc/source/users/tutorials/smoothing.txt,
+	  doc/source/users/tutorials/weights.txt:
+	  - adding links to API for more details
+
+2012-07-31 19:05  sjsrey at gmail.com
+
+	* pysal/spatial_dynamics/directional.py:
+	  - consistency on pvalues for randomization
+
+2012-07-31 19:02  sjsrey at gmail.com
+
+	* pysal/weights/Distance.py:
+	  - docs
+
+2012-07-31 18:58  sjsrey at gmail.com
+
+	* doc/source/users/tutorials/dynamics.txt:
+	  - seed issue 
+
+2012-07-31 18:36  sjsrey at gmail.com
+
+	* doc/source/users/tutorials/autocorrelation.txt:
+	  - closing issue 214
+
+2012-07-31 18:19  sjsrey at gmail.com
+
+	* doc/source/users/tutorials/autocorrelation.txt:
+	  - fixing random.seed issues in doctests
+
+2012-07-31 17:31  schmidtc
+
+	* pysal/cg/shapes.py,
+	  pysal/cg/tests/test_shapes.py:
+	  Fixing small bugs with VerticleLines and testing
+
+2012-07-31 16:26  sjsrey at gmail.com
+
+	* doc/source/developers/guidelines.txt,
+	  doc/source/users/installation.txt:
+	  - updating docs 
+
+
+
+2012-07-26 15:24  schmidtc
+
+	* pysal/core/FileIO.py,
+	  pysal/core/Tables.py:
+	 Fixing issue #190
+
+2012-07-24 16:32  schmidtc
+
+	* pysal/cg/sphere.py:
+	Allowing linear2arcdist function to maintin 'inf', this allows compatability with Scipy's KDTree and addresses issue 208.  
+
+2012-07-24 16:07  schmidtc
+
+	* pysal/cg/locators.py,
+	  pysal/core/FileIO.py,
+	  pysal/core/Tables.py:
+	  Addressing issue 212, renaming nested and private classes to begin with an underscore.  By default sphinx does not try to document private object, which avoids what appears to be a a bug in Sphinx.
+
+2012-07-17 22:06  sjsrey at gmail.com
+
+	* pysal/spreg/probit.py: pedro doc fixes
+
+2012-07-17 15:07  schmidtc
+
+	* pysal/cg/segmentLocator.py,
+	  pysal/cg/tests/test_segmentLocator.py: Cleaned up fix for Issue 211
+
+2012-07-13 22:50  sjsrey at gmail.com
+
+	* doc/source/users/tutorials/autocorrelation.txt: fixing sphinx weirdness in footnotes 
+
+2012-07-13 22:37  sjsrey at gmail.com
+
+	* doc/source/users/tutorials/autocorrelation.txt: update for new default parameter values
+
+2012-07-13 22:13  sjsrey at gmail.com
+
+	* pysal/esda/geary.py,
+	  pysal/esda/tests/test_geary.py: consistency on transformation and permutation args
+
+2012-07-13 19:59  sjsrey at gmail.com
+
+	* doc/source/users/tutorials/dynamics.txt,
+	  pysal/__init__.py,
+	  pysal/spatial_dynamics/rank.py: - update user tutorial and __init__
+
+2012-07-13 19:33  sjsrey at gmail.com
+
+	* pysal/spatial_dynamics/rank.py,
+	  pysal/spatial_dynamics/tests/test_rank.py: - O(n log n) algorithm for spatial tau (old one was O(n^2)) - closing ticket http://code.google.com/p/pysal/issues/detail?id=83
+
+2012-07-13 17:57  schmidtc
+
+	* pysal/core/IOHandlers/pyDbfIO.py,
+	  pysal/core/IOHandlers/tests/test_pyDbfIO.py: Adding better support for writing Null values to DBF.  See issue #193
+
+2012-07-13 15:55  schmidtc
+
+	* pysal/core/util/shapefile.py,
+	  pysal/core/util/tests/test_shapefile.py: Cleaning up support for ZM points, polylines and polygons in the shapefile reader. Added unit tests for same.
+
+2012-07-13 15:42  sjsrey at gmail.com
+
+	* doc/source/library/esda/gamma.txt: - update version info 
+
+2012-07-13 15:37  sjsrey at gmail.com
+
+	* doc/source/library/esda/gamma.txt,
+	  doc/source/library/esda/index.txt: - adding gamma to api docs
+
+2012-07-13 00:21  sjsrey at gmail.com
+
+	* pysal/esda/gamma.py: optimizations 
+
+2012-07-12 21:28  schmidtc
+
+	* pysal/core/IOHandlers/pyDbfIO.py: Disabling mising value warning for DBF files.  See issue #185
+
+2012-07-12 21:07  schmidtc
+
+	* pysal/cg/segmentLocator.py,
+	  pysal/cg/shapes.py,
+	  pysal/cg/tests/test_segmentLocator.py,
+	  pysal/contrib/spatialnet/spatialnet.py: Adding unittests for segmentLocator (including one that fails see #211). Added VerticalLine class to represent verticle LineSegments. Updated __all__ in segmentLocator. Minor comment formatting in spatialnet.
+
+2012-07-12 19:41  lanselin at gmail.com
+
+	* doc/source/users/tutorials/autocorrelation.txt: tutorial for gamma index
+
+2012-07-12 19:40  lanselin at gmail.com
+
+	* pysal/esda/gamma.py,
+	  pysal/esda/tests/test_gamma.py: gamma with generic function
+
+2012-07-12 14:17  sjsrey at gmail.com
+
+	* pysal/__init__.py: - gamma index added
+
+2012-07-12 03:14  lanselin at gmail.com
+
+	* pysal/esda/tests/test_gamma.py: tests for gamma
+
+2012-07-12 03:13  lanselin at gmail.com
+
+	* pysal/esda/gamma.py: gamma index of spatial autocorrelation
+
+2012-07-12 03:11  lanselin at gmail.com
+
+	* pysal/esda/__init__.py: gamma index 
+
+2012-07-11 21:32  lanselin at gmail.com
+
+	* pysal/esda/join_counts.py,
+	  pysal/esda/tests/test_join_counts.py: join counts without analytical results, new permutation 
+
+2012-07-11 21:32  lanselin at gmail.com
+
+	* doc/source/users/tutorials/autocorrelation.txt: updated docs for join counts
+
+2012-07-10 21:13  lanselin at gmail.com
+
+	* doc/source/users/tutorials/autocorrelation.txt: docs for join count in autocorrelation
+
+2012-07-10 21:12  lanselin at gmail.com
+
+	* pysal/esda/join_counts.py,
+	  pysal/esda/tests/test_join_counts.py: additional test in join counts, docs added
+
+2012-07-10 19:24  lanselin at gmail.com
+
+	* pysal/esda/join_counts.py,
+	  pysal/esda/tests/test_join_counts.py: join counts with permutations for BB, updated tests to include permutations
+
+2012-07-09 04:22  sjsrey
+
+	* pysal/weights/weights.py:
+	  - fixing bug luc identified with regard to mean_neighbor property.  wrong key name was used in cache dictionary.  
+
+2012-07-07 17:00  sjsrey
+
+	* pysal/__init__.py: update for spreg and contrib inclusion
+
+
+2012-07-07 16:51  sjsrey
+
+	* pysal/spatial_dynamics/markov.py: - updating doc strings
+
+2012-07-07 16:17  sjsrey
+
+	* pysal/spreg/probit.py:
+	  - fixing doc string and refs 
+
+2012-07-06 21:58  dfolch
+
+	* doc/source/library/spreg/probit.txt: txt file to include probit in the HTML docs
+
+2012-07-06 21:11  dfolch
+
+	* pysal/spreg/tests/test_ols_sparse.py: fixing unittest error; still no solution to scientific notation formatting in doctests
+
+2012-07-06 20:24  dfolch
+
+	* pysal/spreg/__init__.py,
+	  pysal/spreg/diagnostics.py,
+	  pysal/spreg/diagnostics_sp.py,
+	  pysal/spreg/diagnostics_tsls.py,
+	  pysal/spreg/error_sp.py,
+	  pysal/spreg/error_sp_het.py,
+	  pysal/spreg/error_sp_hom.py,
+	  pysal/spreg/ols.py,
+	  pysal/spreg/probit.py,
+	  pysal/spreg/robust.py,
+	  pysal/spreg/summary_output.py,
+	  pysal/spreg/tests/test_diagnostics.py,
+	  pysal/spreg/tests/test_diagnostics_sp.py,
+	  pysal/spreg/tests/test_diagnostics_tsls.py,
+	  pysal/spreg/tests/test_error_sp.py,
+	  pysal/spreg/tests/test_error_sp_het.py,
+	  pysal/spreg/tests/test_error_sp_het_sparse.py,
+	  pysal/spreg/tests/test_error_sp_hom.py,
+	  pysal/spreg/tests/test_error_sp_hom_sparse.py,
+	  pysal/spreg/tests/test_error_sp_sparse.py,
+	  pysal/spreg/tests/test_ols.py,
+	  pysal/spreg/tests/test_ols_sparse.py,
+	  pysal/spreg/tests/test_probit.py,
+	  pysal/spreg/tests/test_twosls.py,
+	  pysal/spreg/tests/test_twosls_sp.py,
+	  pysal/spreg/tests/test_twosls_sp_sparse.py,
+	  pysal/spreg/tests/test_twosls_sparse.py,
+	  pysal/spreg/twosls.py,
+	  pysal/spreg/twosls_sp.py,
+	  pysal/spreg/user_output.py,
+	  pysal/spreg/utils.py: -Adding classic probit regression class -Adding spatial diagnostics for probit -Allowing x parameter to be either a numpy array or scipy sparse matrix in all regression classes -Adding additional unit tests -Various refactoring to streamline code base and improve long term maintainability -Contributions from Luc Anselin, Pedro Amaral, Daniel Arribas-Bel, David Folch and Nicholas Malizia
+
+2012-07-03 18:59  sjsrey
+
+	* pysal/spatial_dynamics/markov.py,
+	  pysal/spatial_dynamics/tests/test_markov.py: - refactor significant move_types for clarity and fixing a logic bug
+
+
+2012-06-20 04:50  sjsrey at gmail.com
+
+	* doc/source/developers/docs/index.txt:
+	  - added section for how to write a tutorial for new modules 
+2012-06-20 02:45  sjsrey
+
+	* doc/source/developers/docs/index.txt:
+	  - updating doc building instructions
+
+2012-06-06 18:58  phil.stphns
+
+	* .build-osx10.6-py26.sh,
+	  .build-osx10.6-py27.sh:
+	  - local modifications for Frameworks builds
+
+2012-06-05 20:56  phil.stphns
+
+	* .build-osx10.6-py26.sh,
+	  .build-osx10.6-py27.sh,
+	  .build-osx10.7-py27.sh, .runTests.sh:
+	  - adding experimental build and test scripts.
+
+2012-06-05 16:43  schmidtc
+
+	* pysal/cg/segmentLocator.py,
+	  pysal/cg/shapes.py,
+	  pysal/contrib/spatialnet/spatialnet.py:
+	  initial snap function for spatialnet
+
+2012-06-05 16:38  schmidtc
+
+	* pysal/core/IOHandlers/pyShpIO.py,
+	  pysal/core/util/shapefile.py,
+	  pysal/core/util/tests/test_shapefile.py: Adding PolygonZ support to Shapefile IO
+
+2012-05-24 21:57  sjsrey
+
+	* pysal/esda/mapclassify.py: - truncate option for fisher_jenks sampling
+
+2012-05-15 20:08  schmidtc
+
+	* pysal/cg/segmentLocator.py,
+	  pysal/cg/shapes.py: Added query to SegmentLocator
+
+2012-05-11 22:17  sjsrey
+
+	* pysal/esda/mapclassify.py: - added Fisher_Jenks_Sampled
+
+2012-05-11 00:45  mhwang4
+
+	* pysal/contrib/network/distances.csv,
+	  pysal/contrib/network/simulator.py,
+	  pysal/contrib/network/test_lincs.py,
+	  pysal/contrib/network/test_weights.py,
+	  pysal/contrib/network/weights.py: adding test code for distance-file-based weight generator; updates on simulator 
+
+2012-05-10 22:37  mhwang4
+
+	* pysal/contrib/network/klincs.py,
+	  pysal/contrib/network/lincs.py,
+	  pysal/contrib/network/test_klincs.py,
+	  pysal/contrib/network/test_lincs.py: adding test code for network-constrained lisa
+
+2012-05-10 21:11  mhwang4
+
+	* pysal/contrib/network/crimes.dbf,
+	  pysal/contrib/network/crimes.shp,
+	  pysal/contrib/network/crimes.shx,
+	  pysal/contrib/network/test_klincs.py: test code for local K function
+
+2012-05-08 18:05  mhwang4
+
+	* pysal/contrib/network/streets.dbf,
+	  pysal/contrib/network/streets.shp,
+	  pysal/contrib/network/streets.shx,
+	  pysal/contrib/network/test_network.py: adding a test data set
+
+2012-05-08 16:34  schmidtc
+
+	* pysal/cg/segmentLocator.py,
+	  pysal/cg/shapes.py,
+	  pysal/core/FileIO.py: Adding start of segmentLocator, adding minimal slicing support to FileIO
+
+
+2012-05-03 17:03  schmidtc
+
+	* pysal/cg/shapes.py,
+	  pysal/cg/tests/test_shapes.py: Adding solve for x support to Line. Cleaning up LineSegment's Line method.
+
+2012-04-20 17:48  schmidtc
+
+	* pysal/cg/shapes.py: adding arclen method to Chain object.
+
+2012-04-19 16:37  dfolch
+
+	* pysal/weights/Distance.py: reducing number of distance queries in Kernel from n^2 to n
+
+2012-04-17 21:20  schmidtc
+
+	* pysal/contrib/spatialnet/spatialnet.py: adding distance
+
+2012-04-17 19:46  schmidtc
+
+	* pysal/contrib/spatialnet/cleanNetShp.py,
+	  pysal/contrib/spatialnet/spatialnet.py: Adding FNODE/TNODE to dbf when cleaning shapefiles. Added util function createSpatialNetworkShapefile Added SpatialNetwork class
+
+2012-04-17 15:32  schmidtc
+
+	* pysal/contrib/weights_viewer/weights_viewer.py: "revert back to the background when the point is outside of any unit" - request from serge
+
+2012-04-11 02:50  schmidtc
+
+	* pysal/cg/kdtree.py: Fixing user submitted bug,issue #206.
+
+2012-04-10 22:00  dreamessence
+
+	* pysal/weights/Wsets.py: Including w_clip in __all__
+
+2012-04-10 21:58  dreamessence
+
+	* pysal/weights/Wsets.py: Adding w_clip method to clip W matrices (sparse and/or pysal.W) with a second (binary) matrix
+
+2012-04-10 21:57  schmidtc
+
+	* pysal/contrib/spatialnet/beth_roads.shp,
+	  pysal/contrib/spatialnet/beth_roads.shx,
+	  pysal/contrib/spatialnet/cleanNetShp.py: Adding network shapefile cleaning tools and temporary sample data.  
+
+2012-04-10 21:48  sjsrey
+
+	* pysal/contrib/spatialnet/util.py: - more stubs for util mod
+
+2012-04-10 19:58  sjsrey
+
+	* pysal/contrib/spatialnet/util.py: - start of util module
+
+2012-04-03 20:43  sjsrey
+
+	* pysal/contrib/spatialnet: - new contrib module - integrate geodanet functional (move over from network) - wrap networkx
+
+2012-04-03 01:21  schmidtc
+
+	* pysal/cg/rtree.py: Adding pickle support to RTree
+
+
+2012-03-28 23:27  mhwang4 
+	* pysal/contrib/network/kernel.py,
+	  pysal/contrib/network/kfuncs.py,
+	  pysal/contrib/network/test_access.py,
+	  pysal/contrib/network/test_kernel.py,
+	  pysal/contrib/network/test_kfuncs.py,
+	  pysal/contrib/network/test_network.py: adding examples for network-related modules
+
+2012-03-19 15:33  schmidtc
+
+	* pysal/core/IOHandlers/pyDbfIO.py: Adding support for writing Null dates
+
+2012-03-14 21:04  phil.stphns
+
+	* doc/source/developers/testing.txt,
+	  doc/source/users/installation.txt: Small changes to user install instructions to highlight the ease with which pysal can be installed ;-> And, developer instructions for running the test suite from within a session if desired.
+
+2012-03-03 00:00  phil.stphns
+
+	* pysal/spatial_dynamics/markov.py: Potential source of dev docs pngmath latex fail.
+
+2012-02-24 23:29  mhwang4
+
+	* pysal/contrib/network/network.py: fixing bug in network.py
+
+2012-02-20 19:50  phil.stphns
+
+	* doc/source/developers/py3k.txt: Developer doc to explain setting up PySAL for Python3.
+
+2012-02-20 16:18  schmidtc
+
+	* pysal/esda/__init__.py: removing invalid __all__ from esda's init.  See #194
+
+2012-02-16 23:15  phil.stphns
+
+	* pysal/__init__.py,
+	  pysal/core/util/shapefile.py: Minor changes to imports that cause py3tool to stumble.
+
+2012-02-15 23:16  phil.stphns
+
+	* doc/source/developers/py3k.txt,
+	  doc/source/users/installation.txt: Modified links in user installation instructions.  Added more steps for developers setting up Python3 dev environments on OSX.
+
+2012-02-14 21:55  schmidtc
+
+	* pysal/esda/getisord.py: fixing side effect caused when changing the shape of y, creating a new view with reshape instead.
+
+2012-02-14 21:21  schmidtc
+
+	* pysal/esda/getisord.py: optimizing G_Local
+
+2012-02-14 20:37  schmidtc
+
+	* pysal/esda/getisord.py: optimizing G
+
+2012-02-14 00:21  phil.stphns
+
+	* doc/source/developers/index.txt,
+	  doc/source/developers/py3k.txt,
+	  doc/source/developers/release.txt: Adding early docs on Python 3 support. Modifying release instructions.
+
+v<1.3.0>, 2012-01-31
+
+    * core/IOHandlers/pyDbfIO.py: Addressing issue #186
+
+    * cg/shapes.py: fixing small bug in polygon constructor that causes
+      an exception when an empty list is passed in for the holes.
+
+    * cg/standalone.py: removing standalone centroid method. see issue
+      #138.
+
+    * esda/mapclassify.py, esda/tests/test_mapclassify.py: - new
+      implementation of fisher jenks
+
+    * spreg/__init__.py, spreg/diagnostics_sp.py,
+      spreg/diagnostics_tsls.py, spreg/error_sp.py,
+      spreg/error_sp_het.py, spreg/error_sp_hom.py, spreg/ols.py,
+      spreg/robust.py, spreg/tests, spreg/twosls.py,
+      spreg/twosls_sp.py, spreg/user_output.py, spreg/utils.py: Adding
+      the following non-spatial/spatial regression modules:
+      * Two Stage Least Squares
+      * Spatial Two Stage Least Squares
+      * GM Error (KP 98-99)
+      * GM Error Homoskedasticity (Drukker et. al, 2010)
+      * GM Error Heteroskedasticity (Arraiz et. al, 2010)
+      * Anselin-Kelejian test for residual spatial autocorrelation of
+      residuals from IV regression
+      
+      Adding also utility functions and other helper classes.
+      
+    * cg/standalone.py: slight improvment to get_shared_segments, in
+      part to make it more readable.
+
+    * cg/shapes.py, cg/tests/test_standalone.py: adding <,<=,>,>= tests
+      to Point, this fixes a bug in the get_shared_segments function
+      that was causing some LineSegments to be incorectly ordered
+      because the default memory address was being used instead of the
+      points location.
+
+    * core/IOHandlers/tests/test_wkt.py, core/IOHandlers/wkt.py,
+      core/util/tests/test_wkt.py, core/util/wkt.py,
+      weights/tests/test_Distance.py, weights/tests/test_user.py,
+      weights/user.py: Fixing small numerical errors n testing that
+      resulted from changing the centroid algorithm.
+
+    * esda/moran.py: another optimization for __crand see issue #188
+
+    * weights/util.py: Added option for row-standardized SW in lat2SW.
+      Implementing suggestion from Charlie in Issue 181 from
+      StackOverflow
+
+    * esda/moran.py: another optimization to __crand, see issue #188
+      for details.
+
+    * esda/moran.py: Optimized __crand in Local_Moran
+
+    * cg/shapes.py, cg/standalone.py, contrib/shapely_ext.py:
+      Adddressing issue #138, centroids for polygons with holes
+      Fixing some issues with the shapely wrapper and out implemenation
+      of __geo_interface__
+
+    * weights/Distance.py: previous 'fix' to uniform kernel did not
+      have correct dimensions
+
+    * core/IOHandlers/arcgis_txt.py, core/IOHandlers/dat.py,
+      weights/user.py: fixing rounding errors with docstrings
+
+    * contrib/README, contrib/shared_perimeter_weights.py: Adding
+      shared perimeter weights, see Issue #46
+
+    * contrib/README, contrib/shapely_ext.py: moving shapely_ext into
+      contrib
+
+    * core/IOHandlers/pyDbfIO.py: Fixing issue with scientific notation
+      is DBF files. #182
+
+    * core/IOHandlers/pyShpIO.py: clockwise testing should only be
+      performed on Polygons. #183
+
+    * spreg/diagnostics_sp.py: Switching ints to floats in variance of
+      Morans I for residuals to get correct results
+
+    * core/util/shapefile.py, examples/__init__.py: Add a "get_path"
+      function to examples module.
+      pysal.examples.get_path('stl_hom.shp') will always return the
+      correct system path to stl_hom.shp, no matter where it's run
+      from. This is useful for testing.
+      Modified shapefile tests to use the new function.
+
+    * spreg/diagnostics.py: Adding check on condition_index to pick OLS
+      (xtx) or IV (hth) model
+
+    * core/IOHandlers/template.py: Updating template to pass unit
+      testing.
+
+    * core/util/shapefile.py: Fixing issue #180. Making shapefile
+      opener case insensitive.
+
+    * spatial_dynamics/interaction.py,
+      spatial_dynamics/tests/test_interaction.py: Adding modified Knox
+      and changes to existing tests in spatial_dynamics.
+
+    * core/IOHandlers/arcgis_txt.py,
+      core/IOHandlers/tests/test_arcgis_txt.py: fixing arcgis_txt.py so
+      that it ignores self-neighbors with zero weights
+
+    * core/FileIO.py: Updating library README.
+      Removing docstrings from FileIO module.
+
+    * contrib/README: adding contrib to installer and adding initial
+      README
+
+    * core/IOHandlers/gwt.py: rewrote GWT reader to avoid list appends.
+      resulted in speed up of about 12x.
+
+    * core/IOHandlers/pyDbfIO.py: implementing _get_col for dbf files.
+
+    * core/IOHandlers/gwt.py: Adding a small fix to gwt reader, if the
+      ids cannot be found in the associated DBF, they will be read in
+      order from the GWT file.
+
+    * contrib/weights_viewer/weights_viewer.py: Small change to
+      identify polygons that are their own neighbor.
+
+    * weights/Distance.py: removing incorrect kernel functions and
+      fixing bug in uniform kernel
+
+    * weights/util.py: refactoring insert_diagonal so that it can add
+      or overwrite the diagonal weights
+
+    * contrib, contrib/README, contrib/__init__.py,
+      contrib/weights_viewer, contrib/weights_viewer/__init__.py,
+      contrib/weights_viewer/transforms.py,
+      contrib/weights_viewer/weights_viewer.py: Adding 1st contrib, a
+      wxPython based Weights file viewer.
+
+
+    * spatial_dynamics/markov.py: - handle case of zero transitions in
+      spatial markov, consistent with treatment
+      in classic markov
+
+    * core/FileIO.py, core/IOHandlers/pyShpIO.py: Changes to allow
+      reading of null polygons.
+
+    * core/util/shapefile.py, core/util/tests/test_shapefile.py:
+      refactoring shapefile reader, see issue #89
+
+    * core/FileIO.py: small change to FileIO to allow FileFormat
+      argument to be passed through
+
+    * esda/getisord.py: fixing bug in local Z values for integer data
+
+    * cg/__init__.py, weights/user.py, weights/util.py: adding radius
+      option to user weights methods
+
+    * cg/kdtree.py, common.py, weights/Distance.py,
+      weights/tests/test_Distance.py: Distance weights can not be
+      passed an instnace of KDTree instead of an array. If the KDTree
+      is of type ArcKDTree, the weights returns will be based on
+      ArcDistances. Adding tests for Arc cases off KNN and
+      DistanceBand.
+
+    * weights/util.py: - added function for local clustering
+      coefficient - summary for W as a graph
+
+    * cg/kdtree.py, cg/sphere.py: finishing up Arc_KDTree
+
+    * weights/Distance.py: More doctest fixes.
+
+    * region/maxp.py, spreg/diagnostics.py, weights/Distance.py,
+      weights/user.py: Fixing the doctests for dusty python setup.
+
+    * cg/kdtree.py, cg/sphere.py: adding spherical wrapper around scipy
+      kdtree
+
+    * cg/__init__.py, cg/sphere.py: Adding spherical distance tools to
+      cg. Related to issue #168
+
+    * core/IOHandlers/gwt.py, core/IOHandlers/tests/test_gwt.py:
+      re-enabled gwt writing.
+      'o' transform is used on all GWTs for writing (w is returned to
+      existing transform on exit)
+      Also, setting '_shpName' and '_varName' attributes on W's which
+      are read in through gwt.
+      the writer will check if these vars exist and use them for the
+      header, this prevents metadata loss on simple copies
+
+    * esda/join_counts.py: - fix for handling int array type
+
+    * spreg/diagnostics.py: Adding more efficient constant check for
+      spreg.
+
+    * cg/shapes.py: adding __geo_interface__ and asShape adapter for
+      Point, LineString and Polygon
+
+    * spreg/diagnostics.py: minor change to t-stat function to
+      accommodate future regression models
+
+    * esda/mapclassify.py: - more general fix for #166
+
+
+v<1.2.0>, 2011-07-31
+
+    * pysal/spreg/user_output.py: Fix for bug 162
+
+    * pysal/spatial_dynamics/markov.py: Added markov mobility measures; addresses issue 137
+
+    * pysal/weights/weights.py: Partially addressed issue 160 by removing the shimbel, order, and higher_order methods from W. 
+
+    * doc/source/users/installation.txt: Adding known issue regarding GNU/Linux testing and random seeds; see ticket 52.
+
+    * pysal/esda/geary.py: Adding sparse implementation of Geary's C; substantial gains on larger datasets. 
+
+    * pysal/core/IOHandlers/mtx.py: Adding WSP2W function for fast conversion of sparse weights object (WSP) to pysal W.
+
+    * pysal/esda/getisord.py: Adding Getis-Ord G test module
+
+    * pysal/weights/util.py: Added function that inserts values along the main diagonal of a weights object
+
+    * doc/source/users/tutorials: Fixed issue 76.
+
+    * pysal/core/IOHandlers/mtx.py: Added an IOHandler for MatrixMarket MTX files
+
+    * pysal/esda/moran.py: Optimized conditional randomization
+
+    * pysal/weights/util.py: Re-adding full2W() method to convert full arrays into W objects; related to issue #136. 
+
+    * pysal/core/IOHandlers/gal.py: Added sparse WSP (thin W); gal reader can return W or WSP
+
+    * pysal/core/IOHandlers/pyDbfIO.py: Bug Fix, DBF files are not properly closed when opened in 'r' mode. See issue #155.
+
+    * pysal/core/IOHandlers/stata_txt.py: Adding FileIO handlers for STATA text files
+
+    * pysal/weights/user.py: Fixed issue #154, adding k option to User Kernel weights functions.
+
+    * pysal/core/IOHandlers/mat.py: Adding an IOHandler for MATLAB mat file
+
+    * pysal/core/IOHandlers/wk1.py: Adding an IO handler for wk1 file
+
+    * pysal/core/IOHandlers/geobugs_txt.py: Adding an IO handler for geobugs text file.
+
+    * pysal/core/IOHandlers/arcgis_swm.py: Added ArcGIS SWM file handler
+
+    * pysal/core/IOHandlers/arcgis_dbf.py: Adding a spatial weights file in the (ArcGIS-style) DBF format. 
+
+    * pysal/core/IOHandlers/arcgis_txt.py: Added ArcGIS ASCII file IO handler. 
+
+    * pysal/core/IOHandlers/dat.py: Added DAT file handler.
+
+    * pysal/cg/locators.py: Added point in polygon method for Polygon and PolygonLocator
+
+    * pysal/weights/Distance.py: Optimized Kernel() method to run much faster for the case of adaptive bandwidths
+
+    * pysal/weights/user.py: Added helper function in user.py to create scipy sparse matrix from a gal file
+
+    * pysal/common.py: Added shallow copy method to Read-Only Dict to support multiprocessing.
+
+    * pysal/spatial_dynamics/rank.py: More efficient regime weights
+
+    * pysal/weights/Distance.py: Adding epanechnikov and bisquare kernel funtions
+
+    * pysal/core/IOHandlers/pyDbfIO.py: Adding NULL support to numerical DBF fields; modifying PointLocator API to match PolygonLocator API 
+
+    * pysal/cg/locators.py: Handles case when query rectangle is completely inside a polygon
+
+    * pysal/cg/locators.py: Explicit polygon overlap hit test
+
+    * pysal/cg/standalone.py: Adding point-polygon intersection support for polygons with holes.
+
+    * pysal/spatial_dynamics/markov.py: Added homogeneity test.
+
+    * pysal/spatial_dynamics/markov.py: Added spillover test in LISA_Markov.
+
+    * pysal/cg/locators.py: Added Rtree based spatial index for polygonlocator.
+
+    * pysal/cg/rtree.py: Added pure python Rtree module.
+
+    * doc/source/developers/pep/pep-0010.txt: Added PEP 0010: Rtree module in pure python.
+
+    * pysal/esda/geary.py: Fixed bug 144.
+
+    * pysal/spatial_dynamics/markov.py: Added significance filtering of LISA markov.
+
+    * doc/source/developers/pep/pep-0009.txt: Added new PEP, "PEP 0009: Add Python 3.x Support."
+
+    * doc/source/developers/guidelines.txt: New release cycle schedules for 1.2 and 1.3.
+
+    * doc/source/developers/release.txt: Updated pypi instructions; PySAL available on the Python Package Index via download, easy_install, and pip. 
+
+v<1.1.0>, 2011-01-31
+
+    * pysal/core/FileIO.py, pysal/core/IOHandlers/pyDbfIO.py: Added missing value support to FileIO. Warnings will be issued when missing values are found and the value will be set to pysal.MISSINGVALUE, currently None, but the user can change it as needed.
+
+    * pysal/spreg/: Added Spatial Regression module, spreg, and tests.  Added non-spatial diagnostic tests for OLS regression. 
+
+    * pysal/core/IOHandlers/gwt.py: Fixing bottle neck in gwt reader, adding support for GeoDa Style ID's and DBF id_order.
+
+    * pysal/cg/standalone.py: adding, distance_matrix, full distance matrix calculation using sparse matrices
+
+    * pysal/core/util: Moved "converters" into core.util, allows them to be used independently of FileIO.
+
+    * pysal/weights/Distance.py: Adding work around for bug in scipy spatial, see pysal issue #126
+
+    * pysal/weights/user.py: Added build_lattice_shapefile in weights.user, which writes an ncol by nrow grid to a shapefile.
+
+    * pysal/weights/Distance.py: fixed coincident point problem in knnW and made sure it returns k neighbors 
+    * pysal/spatial_dynamics/interaction.py: Added a suite of spatio-temporal interaction tests including the Knox, Mantel, and Jacquez tests. 
+
+    * pysal/weights/util.py: Added lat2SW, allows to create a sparse W matrix for a regular lattice.
+
+    * pysal/tests/tests.py: - new 1.1 integration testing scheme.
+
+    * pysal/esda/interaction.py: added standardized Mantel test and improved readability.
+
+    * pysal/spatial_dynamics/directional.py: - adding directional LISA analytics
+
+    * pysal/esda/mapclassify.py: Natural_Breaks will lower k for data with fewer than k unique values, prints warning.
+
+    * pysal/region/randomregion.py: improvements to spatially constrained random region algorithm
+
+    * pysal/esda/smoothing.py: Adding choynowski probabilities and SMR to smoothing.py
+
+    * doc/source/developers/release.txt: - updating release cycle - release management
+
+v<1.0.0>, 2010-07-31 -- Initial release.
diff --git a/INSTALL.txt b/INSTALL.txt
new file mode 100644
index 0000000..9fca779
--- /dev/null
+++ b/INSTALL.txt
@@ -0,0 +1,242 @@
+.. _installation:
+
+================
+Installing PySAL
+================
+
+.. contents::
+
+Prerequisites
+=============
+
+
+Before installing PySAL, make sure the following libraries are properly
+installed on your machine:
+
+
+* `Python <http://www.python.org/>`_ 2.5 or later
+* `Numpy <http://new.scipy.org/download.html>`_ 1.3 or later
+* `Scipy <http://new.scipy.org/download.html>`_ 0.7 or later
+
+
+*Optional, bundled installation:* 
+
+The Anaconda Python distribution ships with PySAL. Opting for this distro means you don't have to read any further!
+Check it out at http://continuum.io/downloads.html
+
+With the `Enthought Python Distribution (EPD Free) <http://www.enthought.com/products/epd_free.php>`_, 
+you will install all of PySAL's required dependencies, as well as iPython and a lot more, rather than installing dependencies one by one.
+The full-blown EPD package is free for academics, and downloadable `here <http://www.enthought.com/products/edudownload.php>`_. 
+
+* Optional, tools to compile our documentation:*
+
+* `Sphinx <http://sphinx.pocoo.org/>`_
+* `numpydoc <http://pypi.python.org/pypi/numpydoc/0.4>`_  extension to Sphinx (*not included in EPD*)
+
+*Optional, recommended additions:*
+
+* `iPython <http://ipython.scipy.org/moin/Download>`_
+
+Getting PySAL
+=============
+
+PySAL is available on the `Python Package Index
+<http://pypi.python.org/pypi/PySAL>`_, which means it can be
+downloaded and installed manually or from the command line using pip or easy_install, as
+follows::
+
+ $ pip install pysal
+ #or
+ $ easy_install pysal
+
+Alternatively, grab the source distribution (.tar.gz) and decompress it to your selected destination. Open a command shell and navigate to the decompressed PySAL folder. Type::
+
+ $ python setup.py install
+
+Development version on GitHub 
+-----------------------------
+
+Developers can checkout PySAL using **git**::
+
+ $ git clone https://github.com/pysal/pysal.git pysal
+
+
+This will create a folder called 'pysal' containing the source
+code in trunk. This will allow you to easily update any change
+to the developer code base by running::
+
+ $ git pull
+
+Since PySAL is in active development, changes are made frequently. For that
+reason, we recommend you 'tell' Python to look for PySAL in that folder
+rather than properly install it as a package. You can do this by adding the
+PySAL folder to the PYTHONPATH, a UNIX environment variable. 
+To do that, create or edit your command shell profile (.bash_profile if using
+BASH) and add the following line, substituting where appropriate::
+
+
+ $ export PYTHONPATH=${PYTHONPATH}:"/path_to_desired/folder/pysal/"
+
+Save and quit the file, then source it as follows::
+
+ $ source ~/.bash_profile
+
+You are all set. To test your setup, start a Python session and type::
+
+ >>> import pysal; pysal.open.check()
+
+Your terminal should reply with the following::
+
+ PySAL File I/O understands the following file extensions:
+ Ext: '.shp', Modes: ['r', 'wb', 'w', 'rb']
+ Ext: '.mtx', Modes: ['r', 'w']
+ Ext: '.swm', Modes: ['r', 'w']
+ Ext: '.mat', Modes: ['r', 'w']
+ Ext: '.shx', Modes: ['r', 'wb', 'w', 'rb']
+ Ext: '.stata_text', Modes: ['r', 'w']
+ Ext: '.geoda_txt', Modes: ['r']
+ Ext: '.dbf', Modes: ['r', 'w']
+ Ext: '.dat', Modes: ['r', 'w']
+ Ext: '.gwt', Modes: ['r']
+ Ext: '.gal', Modes: ['r', 'w']
+ Ext: '.arcgis_text', Modes: ['r', 'w']
+ Ext: '.wk1', Modes: ['r', 'w']
+ Ext: '.arcgis_dbf', Modes: ['r', 'w']
+ Ext: '.geobugs_text', Modes: ['r', 'w']
+ Ext: '.csv', Modes: ['r']
+ Ext: '.wkt', Modes: ['r']
+ >>>
+
+
+Windows
+~~~~~~~~
+
+To keep up to date with PySAL development, you will need a Git client that allows you to access and 
+update the code from our repository. We recommend 
+`GitHub Windows <http://windows.github.com/>`_.
+
+After using GitHub Windows to install a local copy of Pysal, you need to tell
+your Python distribution where to find PySAL.  There are several ways to do this. Here we
+will use a simple one that requires you to create a text file
+called ``sitecustomize.py`` in the
+``site-packages`` folder of your Python distribution. That path looks more or
+less like this::
+ 
+ C:\PythonXX\Lib\site-packages\sitecustomize.py
+
+,where XX corresponds to the version of the Python distribution you are using
+(25 for 2.5, for example).
+Edit ``sitecustomize.py`` with the following two lines of text::
+
+ import sys
+ sys.path.append("C:/PySALsvn/pysal-read-only")
+ 
+Save and close the file.
+You are all set.  To test your setup, start a Python session and type::
+
+ >>> import pysal; pysal.open.check()
+
+Your terminal should reply with the following::
+
+
+ PySAL File I/O understands the following file extensions:
+ Ext: '.shp', Modes: ['r', 'wb', 'w', 'rb']
+ Ext: '.mtx', Modes: ['r', 'w']
+ Ext: '.swm', Modes: ['r', 'w']
+ Ext: '.mat', Modes: ['r', 'w']
+ Ext: '.shx', Modes: ['r', 'wb', 'w', 'rb']
+ Ext: '.stata_text', Modes: ['r', 'w']
+ Ext: '.geoda_txt', Modes: ['r']
+ Ext: '.dbf', Modes: ['r', 'w']
+ Ext: '.dat', Modes: ['r', 'w']
+ Ext: '.gwt', Modes: ['r']
+ Ext: '.gal', Modes: ['r', 'w']
+ Ext: '.arcgis_text', Modes: ['r', 'w']
+ Ext: '.wk1', Modes: ['r', 'w']
+ Ext: '.arcgis_dbf', Modes: ['r', 'w']
+ Ext: '.geobugs_text', Modes: ['r', 'w']
+ Ext: '.csv', Modes: ['r']
+ Ext: '.wkt', Modes: ['r']
+ >>>
+
+
+Known Issues
+============
+
+1.5 install fails with scipy 11.0 on Mac OS X
+---------------------------------------------
+
+Running `python setup.py install` results in::
+
+	from _cephes import *
+	ImportError:
+	dlopen(/Users/serge/Documents/p/pysal/virtualenvs/python1.5/lib/python2.7/site-packages/scipy/special/_cephes.so,
+	2): Symbol not found: _aswfa_
+	  Referenced from:
+	  /Users/serge/Documents/p/pysal/virtualenvs/python1.5/lib/python2.7/site-packages/scipy/special/_cephes.so
+	    Expected in: dynamic lookup
+
+This occurs when your scipy on Mac OS X was complied with gnu95 and not
+gfortran.  See `this thread <http://mail.scipy.org/pipermail/scipy-user/2010-November/027548.html>`_ for possible solutions.
+
+weights.DistanceBand failing
+----------------------------
+
+This occurs due to a bug in scipy.sparse prior to version 0.8. If you are running such a version see `Issue 73 <http://code.google.com/p/pysal/issues/detail?id=73&sort=milestone>`_ for a fix.
+
+doc tests and unit tests under Linux
+------------------------------------
+
+Some Linux machines return different results for the unit and doc tests. We suspect this has to do with the way random seeds are set. See `Issue 52 <http://code.google.com/p/pysal/issues/detail?id=52&sort=milestone>`_
+
+LISA Markov missing a transpose
+-------------------------------
+In versions of PySAL < 1.1 there is a bug in the LISA Markov, resulting in
+incorrect values. For a fix and more details see `Issue 115 <http://code.google.com/p/pysal/issues/detail?id=115>`_.
+
+
+PIP Install Fails
+-----------------
+
+
+Having numpy and scipy specified in pip requiretments.txt causes PIP install of pysal to fail. For discussion and suggested fixes see `Issue 207 <http://code.google.com/p/pysal/issues/detail?id=207&sort=milestone>`_.
+
+Troubleshooting
+===============
+
+If you experience problems when building, installing, or testing PySAL, ask for
+help on the
+`OpenSpace <http://geodacenter.asu.edu/support/community>`_ 
+list or
+browse the archives of the
+`pysal-dev <http://groups.google.com/group/pysal-dev?pli=1>`_ 
+google group. 
+
+Please include the output of the following commands in your message:
+
+1) Platform information::
+
+    python -c 'import os,sys;print os.name, sys.platform'
+    uname -a
+
+2) Python version::
+    
+    python -c 'import sys; print sys.version'
+
+3) SciPy version::
+
+    python -c 'import scipy; print scipy.__version__'
+
+3) NumPy version::
+
+    python -c 'import numpy; print numpy.__version__'
+
+4) Feel free to add any other relevant information.
+   For example, the full output (both stdout and stderr) of the PySAL
+   installation command can be very helpful. Since this output can be
+   rather large, ask before sending it into the mailing list (or
+   better yet, to one of the developers, if asked).
+
+
+
+
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..8ca191d
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,30 @@
+Copyright (c) 2007-2014, PySAL Developers
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+  list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+  notice, this list of conditions and the following disclaimer in the
+  documentation and/or other materials provided with the distribution.
+
+* Neither the name of the GeoDa Center for Geospatial Analysis and Computation
+  nor the names of its contributors may be used to endorse or promote products
+  derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..f7f610c
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,2 @@
+include authors.txt INSTALL.txt LICENSE.txt THANKS.txt CHANGELOG.txt MANIFEST.in pysal/COPYING 
+
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..cb93325
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,25 @@
+# developer Makefile for repeated tasks
+# 
+.PHONY: clean
+
+test:
+	nosetests 
+
+doctest:
+	cd doc; make pickle; make doctest
+
+install:
+	python setup.py install >/dev/null
+
+src:
+	python setup.py sdist >/dev/null
+
+win:
+	python setup.py bdist_wininst >/dev/null
+
+clean: 
+	find . -name "*.pyc" -exec rm '{}' ';'
+	find pysal -name "__pycache__" -exec rm -rf '{}' ';'
+	rm -rf dist
+	rm -rf build
+	rm -rf PySAL.egg-info
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..b0504df
--- /dev/null
+++ b/README.md
@@ -0,0 +1,101 @@
+Python Spatial Analysis Library
+==============================
+
+What is PySAL
+--------------
+[![Build
+Status](https://travis-ci.org/pysal/pysal.png)](https://travis-ci.org/pysal/pysal)
+
+PySAL is an open source cross-platform library of spatial analysis functions
+written in Python. It is intended to support the development of high level
+applications for spatial analysis.
+
+It is important to underscore what PySAL is, and is not, designed to do. First
+and foremost, PySAL is a library in the fullest sense of the word. Developers
+looking for a suite of spatial analytical methods that they can incorporate
+into application development should feel at home using PySAL. Spatial analysts
+who may be carrying out research projects requiring customized scripting,
+extensive simulation analysis, or those seeking to advance the state of the art
+in spatial analysis should also find PySAL to be a useful foundation for their
+work.
+
+End users looking for a user friendly graphical user interface for spatial
+analysis should not turn to PySAL directly. Instead, we would direct them to
+projects like STARS and the GeoDaX suite of software products which wrap PySAL
+functionality in GUIs. At the same time, we expect that with developments such
+as the Python based plug-in architectures for QGIS, GRASS, and the toolbox
+extensions for ArcGIS, that end user access to PySAL functionality will be
+widening in the near future.
+
+PySAL package structure
+-----------------------
+
+Currently PySAL consists of the following files and directories:
+
+  LICENSE.txt
+    PySAL license.
+
+  INSTALL.txt
+    PySAL prerequisites, installation, testing, and troubleshooting.
+
+  THANKS.txt
+    PySAL developers and contributors. 
+
+  README.txt
+    PySAL structure (this document).
+
+  setup.py
+    Script for building and installing PySAL.
+
+  MANIFEST.in
+    Additions to distutils-generated PySAL tar-balls.
+
+  CHANGELOG.txt
+    Changes since the last release
+
+  pysal/
+    Contains PySAL __init__.py and the directories of PySAL modules.
+
+  doc/
+    Contains PySAL documentation using the Sphinx framework.
+
+PySAL modules
+-------------
+
+* pysal.cg — Computational geometry
+* pysal.contrib - Contributed modules
+* pysal.core — Core data structures and IO
+* pysal.esda — Exploratory spatial data analysis
+* pysal.examples — Data sets
+* pysal.inequality — Spatial inequality analysis
+* pysal.network - Spatial analysis on networks
+* pysal.region — Spatially constrained clustering
+* pysal.spatial_dynamics — Spatial dynamics
+* pysal.spreg — Regression and diagnostics
+* pysal.weights — Spatial weights
+
+Website
+-------
+All things PySAL can be found here
+    http://pysal.org/
+
+Mailing Lists
+-------------
+Please see the developer list here
+    http://groups.google.com/group/pysal-dev
+
+Help for users is here
+    http://groups.google.com/group/openspace-list
+
+Bug reports
+-----------
+To search for or report bugs, please see
+    http://github.com/pysal/pysal/issues
+
+License information
+-------------------
+See the file "LICENSE.txt" for information on the history of this
+software, terms & conditions for usage, and a DISCLAIMER OF ALL
+WARRANTIES.
+
+
diff --git a/README.txt b/README.txt
new file mode 100644
index 0000000..b0504df
--- /dev/null
+++ b/README.txt
@@ -0,0 +1,101 @@
+Python Spatial Analysis Library
+==============================
+
+What is PySAL
+--------------
+[![Build
+Status](https://travis-ci.org/pysal/pysal.png)](https://travis-ci.org/pysal/pysal)
+
+PySAL is an open source cross-platform library of spatial analysis functions
+written in Python. It is intended to support the development of high level
+applications for spatial analysis.
+
+It is important to underscore what PySAL is, and is not, designed to do. First
+and foremost, PySAL is a library in the fullest sense of the word. Developers
+looking for a suite of spatial analytical methods that they can incorporate
+into application development should feel at home using PySAL. Spatial analysts
+who may be carrying out research projects requiring customized scripting,
+extensive simulation analysis, or those seeking to advance the state of the art
+in spatial analysis should also find PySAL to be a useful foundation for their
+work.
+
+End users looking for a user friendly graphical user interface for spatial
+analysis should not turn to PySAL directly. Instead, we would direct them to
+projects like STARS and the GeoDaX suite of software products which wrap PySAL
+functionality in GUIs. At the same time, we expect that with developments such
+as the Python based plug-in architectures for QGIS, GRASS, and the toolbox
+extensions for ArcGIS, that end user access to PySAL functionality will be
+widening in the near future.
+
+PySAL package structure
+-----------------------
+
+Currently PySAL consists of the following files and directories:
+
+  LICENSE.txt
+    PySAL license.
+
+  INSTALL.txt
+    PySAL prerequisites, installation, testing, and troubleshooting.
+
+  THANKS.txt
+    PySAL developers and contributors. 
+
+  README.txt
+    PySAL structure (this document).
+
+  setup.py
+    Script for building and installing PySAL.
+
+  MANIFEST.in
+    Additions to distutils-generated PySAL tar-balls.
+
+  CHANGELOG.txt
+    Changes since the last release
+
+  pysal/
+    Contains PySAL __init__.py and the directories of PySAL modules.
+
+  doc/
+    Contains PySAL documentation using the Sphinx framework.
+
+PySAL modules
+-------------
+
+* pysal.cg — Computational geometry
+* pysal.contrib - Contributed modules
+* pysal.core — Core data structures and IO
+* pysal.esda — Exploratory spatial data analysis
+* pysal.examples — Data sets
+* pysal.inequality — Spatial inequality analysis
+* pysal.network - Spatial analysis on networks
+* pysal.region — Spatially constrained clustering
+* pysal.spatial_dynamics — Spatial dynamics
+* pysal.spreg — Regression and diagnostics
+* pysal.weights — Spatial weights
+
+Website
+-------
+All things PySAL can be found here
+    http://pysal.org/
+
+Mailing Lists
+-------------
+Please see the developer list here
+    http://groups.google.com/group/pysal-dev
+
+Help for users is here
+    http://groups.google.com/group/openspace-list
+
+Bug reports
+-----------
+To search for or report bugs, please see
+    http://github.com/pysal/pysal/issues
+
+License information
+-------------------
+See the file "LICENSE.txt" for information on the history of this
+software, terms & conditions for usage, and a DISCLAIMER OF ALL
+WARRANTIES.
+
+
diff --git a/THANKS.txt b/THANKS.txt
new file mode 100644
index 0000000..577d2b0
--- /dev/null
+++ b/THANKS.txt
@@ -0,0 +1,43 @@
+PySAL is an open source library of routines for exploratory spatial data
+analysis using Python.  It is a community project sponsored by the GeoDa
+Center for Geospatial Analysis and Computation at Arizona State University.
+PySAL originated with code contributions by Luc Anselin and Serge Rey.
+
+Since then many people have contributed to PySAL, in code development,
+suggestions, and financial support.  Below is a partial list.  If you've
+been left off, please email the "PySAL Developers List" <pysal-dev at googlegroups.com>
+
+Pedro Amaral
+Luc Anselin
+Daniel Arribas-Bel
+David C. Folch
+Myunghwa Hwang
+Julia Koschinsky
+Jason Laura
+Xun Li
+Nicholas Malizia
+Mark McCann
+Serge Rey
+Charles R. Schmidt
+Philip Stephens
+Bohumil Svoma
+Ran Wei
+Andrew Winslow
+Jing Yao
+Xinyue Ye
+
+Funding from the following sources has supported PySAL development:
+
+National Science Foundation CyberGIS Software Integration for Sustained Geospatial Innovation
+
+National Institute of Justice Flexible Geospatial Visual Analytics and
+Simulation Technologies to Enhance Criminal Justice Decision Support Systems
+
+National Institutes of Health Geospatial Factors and Impacts: Measurement and
+Use (R01CA126858-02)
+
+National Science Foundation An Exploratory Space-Time Data Analysis Toolkit for
+Spatial Social Science Research (0433132)
+
+National Science Foundation Hedonic Models of Location Decisions with
+Applications to Geospatial Microdata (0852261)
diff --git a/Vagrantfile b/Vagrantfile
new file mode 100644
index 0000000..79dcc08
--- /dev/null
+++ b/Vagrantfile
@@ -0,0 +1,72 @@
+# -*- mode: ruby -*-
+# vi: set ft=ruby :
+
+MACHINE_NAME = "PySALvm"
+# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
+VAGRANTFILE_API_VERSION = "2"
+
+Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
+  # All Vagrant configuration is done here. The most common configuration
+  # options are documented and commented below. For a complete reference,
+  # please see the online documentation at vagrantup.com.
+
+  # Every Vagrant virtual environment requires a box to build off of.
+  config.vm.box = "ubuntu/trusty32"
+  config.ssh.forward_agent = true
+  config.ssh.forward_x11 = true
+  config.vm.network :forwarded_port, host: 8888, guest: 8888
+
+$requirements = <<END
+apt-get update -qq
+apt-get install -y build-essential
+apt-get install -y git-core
+apt-get install -y python-dev
+apt-get install -y python-pip
+apt-get install -y python-numpy
+apt-get install -y python-scipy
+apt-get install -y python-matplotlib
+apt-get install -y python-pandas
+apt-get install -y python-networkx
+apt-get install -y python-qt4
+apt-get install -y qt4-dev-tools
+apt-get install -y python-sip
+apt-get install -y python-sip-dev
+apt-get install -y python-tk
+pip install ipython[notebook]
+pip install -U pyzmq
+pip install -U jinja2
+pip install -U tornado
+pip install -U pygments
+pip install -U pysal
+pip install -U clusterpy
+END
+
+$ipython_notebook = <<CONF_SCRIPT
+ipython profile create
+echo "c.NotebookApp.ip = '0.0.0.0'" >> /home/vagrant/.ipython/profile_default/ipython_notebook_config.py
+echo "c.IPKernelApp.pylab = 'inline'" >> /home/vagrant/.ipython/profile_default/ipython_notebook_config.py
+mkdir -p /home/vagrant/.config/matplotlib
+echo "backend: Qt4AGG" >> /home/vagrant/.config/matplotlib/matplotlibrc
+CONF_SCRIPT
+
+_bashrc = 'echo -e "force_color_prompt=yes" >> /home/vagrant/.bashrc;'
+_bashrc << 'echo -e "red_color=\'\e[1;31m\'" >> /home/vagrant/.bashrc;'
+_bashrc << 'echo -e "end_color=\'\e[0m\'" >> /home/vagrant/.bashrc;'
+_bashrc << "echo -e 'PS1=\"[\${red_color}#{MACHINE_NAME}\${end_color}]$ \"' >> /home/vagrant/.bashrc;"
+_bashrc << 'echo -e alias netebook=\"ipython notebook\" >> /home/vagrant/.bashrc;'
+_bashrc << 'echo -e export EDITOR=\"vi\" >> /home/vagrant/.bashrc;'
+_bashrc << 'echo -e export PYTHONPATH=\"/vagrant\" >> /home/vagrant/.bashrc;'
+
+_bash_login = 'echo -e "cd /vagrant" >> /home/vagrant/.bash_login;'
+_bash_login << 'echo -e "source ~/.bashrc" >> /home/vagrant/.bash_login;'
+
+
+
+  config.vm.provision :shell, :inline => $requirements
+  config.vm.provision :shell, :inline => $ipython_notebook, :privileged => false
+  config.vm.provision :shell, :inline => _bashrc
+  config.vm.provision :shell, :inline => _bash_login
+  config.vm.provision :shell, :inline => "touch ~/.huslogin", :privileged => false
+
+
+end
diff --git a/authors.txt b/authors.txt
new file mode 100644
index 0000000..3cfe054
--- /dev/null
+++ b/authors.txt
@@ -0,0 +1,22 @@
+andrewwinslow = Andrew Winslow <andrewwinslow at gmail.com>
+bohumil.svoma at asu.edu = bohumul <bohumil.svoma at asu.edu>
+dfolch = David Folch <dfolch at gmail.com>
+dfolch at gmail.com = David Folch <dfolch at gmail.com>
+dreamessence = Dani Arribas <dreamessence at gmail.com>
+dreamessence at gmail.com = Dani Arribas <dreamessence at gmail.com>
+drferling at gmail.com = Jay Laura <drferling at gmail.com>
+kx.knight at gmail.com = Xing Kang <kx.knight at gmail.com>
+lanselin = Luc Anselin <lanselin at gmail.com>
+lanselin at gmail.com = Luc Anselin <lanselin at gmail.com>
+mhwang4 = Myhungha Hwang <mhwang4 at gmail.com>
+mhwang4 at gmail.com  = Myhungha Hwang <mhwang4 at gmail.com>
+nmalizia = Nick Malizia <nmalizia at gmail.com>
+nmalizia at gmail.com = Nick Malizia <nmalizia at gmail.com>
+phil.stphns = Phil Stephens <phil.stphns at gmail.com>
+phil.stphns at gmail.com = Phil Stephens <phil.stphns at gmail.com>
+schmidtc = Charles Schimdt <schmidtc at gmail.com>
+schmidtc at gmail.com = Charles Schimdt <schmidtc at gmail.com>
+sjsrey = Serge Rey <sjsrey at gmail.com>
+sjsrey at gmail.com = Serge Rey <sjsrey at gmail.com>
+xinyueye = Xinyue Ye <xinyueye at gmail.com>
+(no author) = Serge Rey <sjsrey at gmail.com>
diff --git a/distribute_setup.py b/distribute_setup.py
new file mode 100644
index 0000000..bbb6f3c
--- /dev/null
+++ b/distribute_setup.py
@@ -0,0 +1,485 @@
+#!python
+"""Bootstrap distribute installation
+
+If you want to use setuptools in your package's setup.py, just include this
+file in the same directory with it, and add this to the top of your setup.py::
+
+    from distribute_setup import use_setuptools
+    use_setuptools()
+
+If you want to require a specific version of setuptools, set a download
+mirror, or use an alternate download directory, you can do so by supplying
+the appropriate options to ``use_setuptools()``.
+
+This file can also be run as a script to install or upgrade setuptools.
+"""
+import os
+import sys
+import time
+import fnmatch
+import tempfile
+import tarfile
+from distutils import log
+
+try:
+    from site import USER_SITE
+except ImportError:
+    USER_SITE = None
+
+try:
+    import subprocess
+
+    def _python_cmd(*args):
+        args = (sys.executable,) + args
+        return subprocess.call(args) == 0
+
+except ImportError:
+    # will be used for python 2.3
+    def _python_cmd(*args):
+        args = (sys.executable,) + args
+        # quoting arguments if windows
+        if sys.platform == 'win32':
+            def quote(arg):
+                if ' ' in arg:
+                    return '"%s"' % arg
+                return arg
+            args = [quote(arg) for arg in args]
+        return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
+
+DEFAULT_VERSION = "0.6.19"
+DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
+SETUPTOOLS_FAKED_VERSION = "0.6c11"
+
+SETUPTOOLS_PKG_INFO = """\
+Metadata-Version: 1.0
+Name: setuptools
+Version: %s
+Summary: xxxx
+Home-page: xxx
+Author: xxx
+Author-email: xxx
+License: xxx
+Description: xxx
+""" % SETUPTOOLS_FAKED_VERSION
+
+
+def _install(tarball):
+    # extracting the tarball
+    tmpdir = tempfile.mkdtemp()
+    log.warn('Extracting in %s', tmpdir)
+    old_wd = os.getcwd()
+    try:
+        os.chdir(tmpdir)
+        tar = tarfile.open(tarball)
+        _extractall(tar)
+        tar.close()
+
+        # going in the directory
+        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+        os.chdir(subdir)
+        log.warn('Now working in %s', subdir)
+
+        # installing
+        log.warn('Installing Distribute')
+        if not _python_cmd('setup.py', 'install'):
+            log.warn('Something went wrong during the installation.')
+            log.warn('See the error message above.')
+    finally:
+        os.chdir(old_wd)
+
+
+def _build_egg(egg, tarball, to_dir):
+    # extracting the tarball
+    tmpdir = tempfile.mkdtemp()
+    log.warn('Extracting in %s', tmpdir)
+    old_wd = os.getcwd()
+    try:
+        os.chdir(tmpdir)
+        tar = tarfile.open(tarball)
+        _extractall(tar)
+        tar.close()
+
+        # going in the directory
+        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+        os.chdir(subdir)
+        log.warn('Now working in %s', subdir)
+
+        # building an egg
+        log.warn('Building a Distribute egg in %s', to_dir)
+        _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+    finally:
+        os.chdir(old_wd)
+    # returning the result
+    log.warn(egg)
+    if not os.path.exists(egg):
+        raise IOError('Could not build the egg.')
+
+
+def _do_download(version, download_base, to_dir, download_delay):
+    egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
+                       % (version, sys.version_info[0], sys.version_info[1]))
+    if not os.path.exists(egg):
+        tarball = download_setuptools(version, download_base,
+                                      to_dir, download_delay)
+        _build_egg(egg, tarball, to_dir)
+    sys.path.insert(0, egg)
+    import setuptools
+    setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+                   to_dir=os.curdir, download_delay=15, no_fake=True):
+    # making sure we use the absolute path
+    to_dir = os.path.abspath(to_dir)
+    was_imported = 'pkg_resources' in sys.modules or \
+        'setuptools' in sys.modules
+    try:
+        try:
+            import pkg_resources
+            if not hasattr(pkg_resources, '_distribute'):
+                if not no_fake:
+                    _fake_setuptools()
+                raise ImportError
+        except ImportError:
+            return _do_download(version, download_base, to_dir, download_delay)
+        try:
+            pkg_resources.require("distribute>="+version)
+            return
+        except pkg_resources.VersionConflict:
+            e = sys.exc_info()[1]
+            if was_imported:
+                sys.stderr.write(
+                "The required version of distribute (>=%s) is not available,\n"
+                "and can't be installed while this script is running. Please\n"
+                "install a more recent version first, using\n"
+                "'easy_install -U distribute'."
+                "\n\n(Currently using %r)\n" % (version, e.args[0]))
+                sys.exit(2)
+            else:
+                del pkg_resources, sys.modules['pkg_resources']    # reload ok
+                return _do_download(version, download_base, to_dir,
+                                    download_delay)
+        except pkg_resources.DistributionNotFound:
+            return _do_download(version, download_base, to_dir,
+                                download_delay)
+    finally:
+        if not no_fake:
+            _create_fake_setuptools_pkg_info(to_dir)
+
+def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+                        to_dir=os.curdir, delay=15):
+    """Download distribute from a specified location and return its filename
+
+    `version` should be a valid distribute version number that is available
+    as an egg for download under the `download_base` URL (which should end
+    with a '/'). `to_dir` is the directory where the egg will be downloaded.
+    `delay` is the number of seconds to pause before an actual download
+    attempt.
+    """
+    # making sure we use the absolute path
+    to_dir = os.path.abspath(to_dir)
+    try:
+        from urllib.request import urlopen
+    except ImportError:
+        from urllib2 import urlopen
+    tgz_name = "distribute-%s.tar.gz" % version
+    url = download_base + tgz_name
+    saveto = os.path.join(to_dir, tgz_name)
+    src = dst = None
+    if not os.path.exists(saveto):  # Avoid repeated downloads
+        try:
+            log.warn("Downloading %s", url)
+            src = urlopen(url)
+            # Read/write all in one block, so we don't create a corrupt file
+            # if the download is interrupted.
+            data = src.read()
+            dst = open(saveto, "wb")
+            dst.write(data)
+        finally:
+            if src:
+                src.close()
+            if dst:
+                dst.close()
+    return os.path.realpath(saveto)
+
+def _no_sandbox(function):
+    def __no_sandbox(*args, **kw):
+        try:
+            from setuptools.sandbox import DirectorySandbox
+            if not hasattr(DirectorySandbox, '_old'):
+                def violation(*args):
+                    pass
+                DirectorySandbox._old = DirectorySandbox._violation
+                DirectorySandbox._violation = violation
+                patched = True
+            else:
+                patched = False
+        except ImportError:
+            patched = False
+
+        try:
+            return function(*args, **kw)
+        finally:
+            if patched:
+                DirectorySandbox._violation = DirectorySandbox._old
+                del DirectorySandbox._old
+
+    return __no_sandbox
+
+def _patch_file(path, content):
+    """Will backup the file then patch it"""
+    existing_content = open(path).read()
+    if existing_content == content:
+        # already patched
+        log.warn('Already patched.')
+        return False
+    log.warn('Patching...')
+    _rename_path(path)
+    f = open(path, 'w')
+    try:
+        f.write(content)
+    finally:
+        f.close()
+    return True
+
+_patch_file = _no_sandbox(_patch_file)
+
+def _same_content(path, content):
+    return open(path).read() == content
+
+def _rename_path(path):
+    new_name = path + '.OLD.%s' % time.time()
+    log.warn('Renaming %s into %s', path, new_name)
+    os.rename(path, new_name)
+    return new_name
+
+def _remove_flat_installation(placeholder):
+    if not os.path.isdir(placeholder):
+        log.warn('Unkown installation at %s', placeholder)
+        return False
+    found = False
+    for file in os.listdir(placeholder):
+        if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
+            found = True
+            break
+    if not found:
+        log.warn('Could not locate setuptools*.egg-info')
+        return
+
+    log.warn('Removing elements out of the way...')
+    pkg_info = os.path.join(placeholder, file)
+    if os.path.isdir(pkg_info):
+        patched = _patch_egg_dir(pkg_info)
+    else:
+        patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
+
+    if not patched:
+        log.warn('%s already patched.', pkg_info)
+        return False
+    # now let's move the files out of the way
+    for element in ('setuptools', 'pkg_resources.py', 'site.py'):
+        element = os.path.join(placeholder, element)
+        if os.path.exists(element):
+            _rename_path(element)
+        else:
+            log.warn('Could not find the %s element of the '
+                     'Setuptools distribution', element)
+    return True
+
+_remove_flat_installation = _no_sandbox(_remove_flat_installation)
+
+def _after_install(dist):
+    log.warn('After install bootstrap.')
+    placeholder = dist.get_command_obj('install').install_purelib
+    _create_fake_setuptools_pkg_info(placeholder)
+
+def _create_fake_setuptools_pkg_info(placeholder):
+    if not placeholder or not os.path.exists(placeholder):
+        log.warn('Could not find the install location')
+        return
+    pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
+    setuptools_file = 'setuptools-%s-py%s.egg-info' % \
+            (SETUPTOOLS_FAKED_VERSION, pyver)
+    pkg_info = os.path.join(placeholder, setuptools_file)
+    if os.path.exists(pkg_info):
+        log.warn('%s already exists', pkg_info)
+        return
+
+    log.warn('Creating %s', pkg_info)
+    f = open(pkg_info, 'w')
+    try:
+        f.write(SETUPTOOLS_PKG_INFO)
+    finally:
+        f.close()
+
+    pth_file = os.path.join(placeholder, 'setuptools.pth')
+    log.warn('Creating %s', pth_file)
+    f = open(pth_file, 'w')
+    try:
+        f.write(os.path.join(os.curdir, setuptools_file))
+    finally:
+        f.close()
+
+_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info)
+
+def _patch_egg_dir(path):
+    # let's check if it's already patched
+    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+    if os.path.exists(pkg_info):
+        if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
+            log.warn('%s already patched.', pkg_info)
+            return False
+    _rename_path(path)
+    os.mkdir(path)
+    os.mkdir(os.path.join(path, 'EGG-INFO'))
+    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+    f = open(pkg_info, 'w')
+    try:
+        f.write(SETUPTOOLS_PKG_INFO)
+    finally:
+        f.close()
+    return True
+
+_patch_egg_dir = _no_sandbox(_patch_egg_dir)
+
+def _before_install():
+    log.warn('Before install bootstrap.')
+    _fake_setuptools()
+
+
+def _under_prefix(location):
+    if 'install' not in sys.argv:
+        return True
+    args = sys.argv[sys.argv.index('install')+1:]
+    for index, arg in enumerate(args):
+        for option in ('--root', '--prefix'):
+            if arg.startswith('%s=' % option):
+                top_dir = arg.split('root=')[-1]
+                return location.startswith(top_dir)
+            elif arg == option:
+                if len(args) > index:
+                    top_dir = args[index+1]
+                    return location.startswith(top_dir)
+        if arg == '--user' and USER_SITE is not None:
+            return location.startswith(USER_SITE)
+    return True
+
+
+def _fake_setuptools():
+    log.warn('Scanning installed packages')
+    try:
+        import pkg_resources
+    except ImportError:
+        # we're cool
+        log.warn('Setuptools or Distribute does not seem to be installed.')
+        return
+    ws = pkg_resources.working_set
+    try:
+        setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
+                                  replacement=False))
+    except TypeError:
+        # old distribute API
+        setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
+
+    if setuptools_dist is None:
+        log.warn('No setuptools distribution found')
+        return
+    # detecting if it was already faked
+    setuptools_location = setuptools_dist.location
+    log.warn('Setuptools installation detected at %s', setuptools_location)
+
+    # if --root or --preix was provided, and if
+    # setuptools is not located in them, we don't patch it
+    if not _under_prefix(setuptools_location):
+        log.warn('Not patching, --root or --prefix is installing Distribute'
+                 ' in another location')
+        return
+
+    # let's see if its an egg
+    if not setuptools_location.endswith('.egg'):
+        log.warn('Non-egg installation')
+        res = _remove_flat_installation(setuptools_location)
+        if not res:
+            return
+    else:
+        log.warn('Egg installation')
+        pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
+        if (os.path.exists(pkg_info) and
+            _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
+            log.warn('Already patched.')
+            return
+        log.warn('Patching...')
+        # let's create a fake egg replacing setuptools one
+        res = _patch_egg_dir(setuptools_location)
+        if not res:
+            return
+    log.warn('Patched done.')
+    _relaunch()
+
+
+def _relaunch():
+    log.warn('Relaunching...')
+    # we have to relaunch the process
+    # pip marker to avoid a relaunch bug
+    if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']:
+        sys.argv[0] = 'setup.py'
+    args = [sys.executable] + sys.argv
+    sys.exit(subprocess.call(args))
+
+
+def _extractall(self, path=".", members=None):
+    """Extract all members from the archive to the current working
+       directory and set owner, modification time and permissions on
+       directories afterwards. `path' specifies a different directory
+       to extract to. `members' is optional and must be a subset of the
+       list returned by getmembers().
+    """
+    import copy
+    import operator
+    from tarfile import ExtractError
+    directories = []
+
+    if members is None:
+        members = self
+
+    for tarinfo in members:
+        if tarinfo.isdir():
+            # Extract directories with a safe mode.
+            directories.append(tarinfo)
+            tarinfo = copy.copy(tarinfo)
+            tarinfo.mode = 448 # decimal for oct 0700
+        self.extract(tarinfo, path)
+
+    # Reverse sort directories.
+    if sys.version_info < (2, 4):
+        def sorter(dir1, dir2):
+            return cmp(dir1.name, dir2.name)
+        directories.sort(sorter)
+        directories.reverse()
+    else:
+        directories.sort(key=operator.attrgetter('name'), reverse=True)
+
+    # Set correct owner, mtime and filemode on directories.
+    for tarinfo in directories:
+        dirpath = os.path.join(path, tarinfo.name)
+        try:
+            self.chown(tarinfo, dirpath)
+            self.utime(tarinfo, dirpath)
+            self.chmod(tarinfo, dirpath)
+        except ExtractError:
+            e = sys.exc_info()[1]
+            if self.errorlevel > 1:
+                raise
+            else:
+                self._dbg(1, "tarfile: %s" % e)
+
+
+def main(argv, version=DEFAULT_VERSION):
+    """Install or upgrade setuptools and EasyInstall"""
+    tarball = download_setuptools()
+    _install(tarball)
+
+
+if __name__ == '__main__':
+    main(sys.argv[1:])
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000..964792d
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,99 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html      to make standalone HTML files"
+	@echo "  dirhtml   to make HTML files named index.html in directories"
+	@echo "  pickle    to make pickle files"
+	@echo "  json      to make JSON files"
+	@echo "  htmlhelp  to make HTML files and a HTML help project"
+	@echo "  qthelp    to make HTML files and a qthelp project"
+	@echo "  latex     to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  changes   to make an overview of all changed/added/deprecated items"
+	@echo "  linkcheck to check all external links for integrity"
+	@echo "  doctest   to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	-rm -rf build/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html
+	@echo
+	@echo "Build finished. The HTML pages are in build/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) build/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in build/dirhtml."
+
+pickle:
+	$(SPHINXBUILD) -Q -b pickle $(ALLSPHINXOPTS) build/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in build/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) build/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in build/qthelp, like this:"
+	@echo "# qcollectiongenerator build/qthelp/pysal.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile build/qthelp/pysal.qhc"
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in build/latex."
+	@echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
+	      "run these through (pdf)latex."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes
+	@echo
+	@echo "The overview file is in build/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in build/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest  $(ALLSPHINXOPTS) build/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in build/doctest/output.txt."
+	rm ../pysal/examples/sids2.swm
+	rm ../pysal/examples/virginia_queen
+	rm ../pysal/examples/virginia_queen.dat
+	rm ../pysal/examples/virginia_queen.dbf
+	rm ../pysal/examples/virginia_queen.gal
+	rm ../pysal/examples/virginia_queen.mat
+	rm ../pysal/examples/virginia_queen.mtx
+	rm ../pysal/examples/virginia_queen.swm
+	rm ../pysal/examples/virginia_queen.txt
+	rm ../pysal/examples/virginia_queen.wk1
+
diff --git a/doc/source/_static/favicon.png b/doc/source/_static/favicon.png
new file mode 100644
index 0000000..f7bc67c
Binary files /dev/null and b/doc/source/_static/favicon.png differ
diff --git a/doc/source/_static/images/bugs.png b/doc/source/_static/images/bugs.png
new file mode 100644
index 0000000..153a38b
Binary files /dev/null and b/doc/source/_static/images/bugs.png differ
diff --git a/doc/source/_static/images/documentation.png b/doc/source/_static/images/documentation.png
new file mode 100644
index 0000000..b5b9ce8
Binary files /dev/null and b/doc/source/_static/images/documentation.png differ
diff --git a/doc/source/_static/images/download.png b/doc/source/_static/images/download.png
new file mode 100644
index 0000000..5b78d2b
Binary files /dev/null and b/doc/source/_static/images/download.png differ
diff --git a/doc/source/_static/images/download2.png b/doc/source/_static/images/download2.png
new file mode 100644
index 0000000..d6986a8
Binary files /dev/null and b/doc/source/_static/images/download2.png differ
diff --git a/doc/source/_static/images/feed-icon.jpg b/doc/source/_static/images/feed-icon.jpg
new file mode 100644
index 0000000..a2ca139
Binary files /dev/null and b/doc/source/_static/images/feed-icon.jpg differ
diff --git a/doc/source/_static/images/feed-icon.png b/doc/source/_static/images/feed-icon.png
new file mode 100644
index 0000000..ff1777b
Binary files /dev/null and b/doc/source/_static/images/feed-icon.png differ
diff --git a/doc/source/_static/images/socal_1.jpg b/doc/source/_static/images/socal_1.jpg
new file mode 100644
index 0000000..9f8f677
Binary files /dev/null and b/doc/source/_static/images/socal_1.jpg differ
diff --git a/doc/source/_static/images/socal_2.jpg b/doc/source/_static/images/socal_2.jpg
new file mode 100644
index 0000000..dfc9b5c
Binary files /dev/null and b/doc/source/_static/images/socal_2.jpg differ
diff --git a/doc/source/_static/images/socal_3.jpg b/doc/source/_static/images/socal_3.jpg
new file mode 100644
index 0000000..d91d54d
Binary files /dev/null and b/doc/source/_static/images/socal_3.jpg differ
diff --git a/doc/source/_static/images/tutorial.png b/doc/source/_static/images/tutorial.png
new file mode 100644
index 0000000..b9766d3
Binary files /dev/null and b/doc/source/_static/images/tutorial.png differ
diff --git a/doc/source/_static/images/yi_jing_01_chien.jpg b/doc/source/_static/images/yi_jing_01_chien.jpg
new file mode 100644
index 0000000..276df14
Binary files /dev/null and b/doc/source/_static/images/yi_jing_01_chien.jpg differ
diff --git a/doc/source/_static/mydoc.css b/doc/source/_static/mydoc.css
new file mode 100644
index 0000000..a3df02c
--- /dev/null
+++ b/doc/source/_static/mydoc.css
@@ -0,0 +1,3 @@
+ at import url("default.css");
+
+span.strike {text-decoration: line-through; }
diff --git a/doc/source/_static/pysalgraph.png b/doc/source/_static/pysalgraph.png
new file mode 100644
index 0000000..92462a8
Binary files /dev/null and b/doc/source/_static/pysalgraph.png differ
diff --git a/doc/source/_static/pysalsmall.png b/doc/source/_static/pysalsmall.png
new file mode 100644
index 0000000..d33dba4
Binary files /dev/null and b/doc/source/_static/pysalsmall.png differ
diff --git a/doc/source/_templates/ganalytics_layout.html b/doc/source/_templates/ganalytics_layout.html
new file mode 100644
index 0000000..351b2bb
--- /dev/null
+++ b/doc/source/_templates/ganalytics_layout.html
@@ -0,0 +1,213 @@
+{%- block doctype -%}
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+{%- endblock %}
+{%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %}
+{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %}
+
+{%- macro relbar() %}
+    <div class="related">
+      <h3>{{ _('Navigation') }}</h3>
+      <ul>
+        {%- for rellink in rellinks %}
+        <li class="right" {% if loop.first %}style="margin-right: 10px"{% endif %}>
+          <a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags }}"
+             {{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a>
+          {%- if not loop.first %}{{ reldelim2 }}{% endif %}</li>
+        {%- endfor %}
+        {%- block rootrellink %}
+        <li><a href="{{ pathto(master_doc) }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li>
+        {%- endblock %}
+        {%- for parent in parents %}
+          <li><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li>
+        {%- endfor %}
+        {%- block relbaritems %} {% endblock %}
+      </ul>
+    </div>
+{%- endmacro %}
+
+{%- macro sidebar() %}
+      {%- if not embedded %}{% if not theme_nosidebar|tobool %}
+      <div class="sphinxsidebar">
+        <div class="sphinxsidebarwrapper">
+          {%- block sidebarlogo %}
+          {%- if logo %}
+            <p class="logo"><a href="{{ pathto(master_doc) }}">
+              <img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
+            </a></p>
+          {%- endif %}
+          {%- endblock %}
+          {%- block sidebartoc %}
+          {%- if display_toc %}
+            <h3><a href="{{ pathto(master_doc) }}">{{ _('Table Of Contents') }}</a></h3>
+            {{ toc }}
+          {%- endif %}
+          {%- endblock %}
+          {%- block sidebarrel %}
+          {%- if prev %}
+            <h4>{{ _('Previous topic') }}</h4>
+            <p class="topless"><a href="{{ prev.link|e }}"
+                                  title="{{ _('previous chapter') }}">{{ prev.title }}</a></p>
+          {%- endif %}
+          {%- if next %}
+            <h4>{{ _('Next topic') }}</h4>
+            <p class="topless"><a href="{{ next.link|e }}"
+                                  title="{{ _('next chapter') }}">{{ next.title }}</a></p>
+          {%- endif %}
+          {%- endblock %}
+          {%- block sidebarsourcelink %}
+          {%- if show_source and has_source and sourcename %}
+            <h3>{{ _('This Page') }}</h3>
+            <ul class="this-page-menu">
+              <li><a href="{{ pathto('_sources/' + sourcename, true)|e }}"
+                     rel="nofollow">{{ _('Show Source') }}</a></li>
+            </ul>
+          {%- endif %}
+          {%- endblock %}
+          {%- if customsidebar %}
+          {% include customsidebar %}
+          {%- endif %}
+          {%- block sidebarsearch %}
+          {%- if pagename != "search" %}
+          <div id="searchbox" style="display: none">
+            <h3>{{ _('Quick search') }}</h3>
+              <form class="search" action="{{ pathto('search') }}" method="get">
+                <input type="text" name="q" size="18" />
+                <input type="submit" value="{{ _('Go') }}" />
+                <input type="hidden" name="check_keywords" value="yes" />
+                <input type="hidden" name="area" value="default" />
+              </form>
+              <p class="searchtip" style="font-size: 90%">
+              {{ _('Enter search terms or a module, class or function name.') }}
+              </p>
+          </div>
+          <script type="text/javascript">$('#searchbox').show(0);</script>
+          {%- endif %}
+          {%- endblock %}
+        </div>
+      </div>
+      {%- endif %}{% endif %}
+{%- endmacro %}
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    {{ metatags }}
+    {%- if not embedded %}
+      {%- set titlesuffix = " — "|safe + docstitle|e %}
+    {%- else %}
+      {%- set titlesuffix = "" %}
+    {%- endif %}
+    <title>{{ title|striptags }}{{ titlesuffix }}</title>
+    <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
+    <link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" />
+    {%- if not embedded %}
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    '{{ pathto("", 1) }}',
+        VERSION:     '{{ release|e }}',
+        COLLAPSE_MODINDEX: false,
+        FILE_SUFFIX: '{{ file_suffix }}',
+        HAS_SOURCE:  {{ has_source|lower }}
+      };
+    </script>
+    {%- for scriptfile in script_files %}
+    <script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
+    {%- endfor %}
+    {%- if use_opensearch %}
+    <link rel="search" type="application/opensearchdescription+xml"
+          title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}"
+          href="{{ pathto('_static/opensearch.xml', 1) }}"/>
+    {%- endif %}
+    {%- if favicon %}
+    <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/>
+    {%- endif %}
+    {%- endif %}
+{%- block linktags %}
+    {%- if hasdoc('about') %}
+    <link rel="author" title="{{ _('About these documents') }}" href="{{ pathto('about') }}" />
+    {%- endif %}
+    {%- if hasdoc('genindex') %}
+    <link rel="index" title="{{ _('Index') }}" href="{{ pathto('genindex') }}" />
+    {%- endif %}
+    {%- if hasdoc('search') %}
+    <link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}" />
+    {%- endif %}
+    {%- if hasdoc('copyright') %}
+    <link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}" />
+    {%- endif %}
+    <link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}" />
+    {%- if parents %}
+    <link rel="up" title="{{ parents[-1].title|striptags }}" href="{{ parents[-1].link|e }}" />
+    {%- endif %}
+    {%- if next %}
+    <link rel="next" title="{{ next.title|striptags }}" href="{{ next.link|e }}" />
+    {%- endif %}
+    {%- if prev %}
+    <link rel="prev" title="{{ prev.title|striptags }}" href="{{ prev.link|e }}" />
+    {%- endif %}
+{%- endblock %}
+{%- block extrahead %} {% endblock %}
+  </head>
+  <body>
+{%- block header %}{% endblock %}
+
+{%- block relbar1 %}{{ relbar() }}{% endblock %}
+
+{%- block sidebar1 %} {# possible location for sidebar #} {% endblock %}
+
+    <div class="document">
+{%- block document %}
+      <div class="documentwrapper">
+      {%- if not embedded %}{% if not theme_nosidebar|tobool %}
+        <div class="bodywrapper">
+      {%- endif %}{% endif %}
+          <div class="body">
+            {% block body %} {% endblock %}
+          </div>
+      {%- if not embedded %}{% if not theme_nosidebar|tobool %}
+        </div>
+      {%- endif %}{% endif %}
+      </div>
+{%- endblock %}
+
+{%- block sidebar2 %}{{ sidebar() }}{% endblock %}
+      <div class="clearer"></div>
+    </div>
+
+{%- block relbar2 %}{{ relbar() }}{% endblock %}
+
+{%- block footer %}
+    <div class="footer">
+    {%- if hasdoc('copyright') %}
+      {% trans path=pathto('copyright'), copyright=copyright|e %}© <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
+    {%- else %}
+      {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
+    {%- endif %}
+    {%- if last_updated %}
+      {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
+    {%- endif %}
+    {%- if show_sphinx %}
+      {% trans sphinx_version=sphinx_version|e %}Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> {{ sphinx_version }}.{% endtrans %}
+    {%- endif %}
+    </div>
+{%- endblock %}
+
+<!--Begin Google Analytics Code -->
+<script type="text/javascript">
+
+  var _gaq = _gaq || [];
+  _gaq.push(['_setAccount', 'UA-34241241-1']);
+  _gaq.push(['_trackPageview']);
+
+  (function() {
+    var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
+    ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
+    var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
+  })();
+
+</script>
+
+<!--End Google Analytics Code -->
+ </body>
+</html>
diff --git a/doc/source/_templates/layout.html b/doc/source/_templates/layout.html
new file mode 100644
index 0000000..f5b4b6d
--- /dev/null
+++ b/doc/source/_templates/layout.html
@@ -0,0 +1,24 @@
+{% extends "ganalytics_layout.html" %}
+
+
+{% block rootrellink %}
+        <li><a href="{{ pathto('index') }}">home</a>| </li>
+        <li><a href="{{ pathto('search') }}">search</a>| </li>
+       <li><a href="{{ pathto('contents') }}">documentation </a> »</li>
+{% endblock %}
+
+
+{% block relbar1 %}
+
+<div style= "background-color: white; text-align: left; vertical-align:middle">
+	<a href="{{ pathto('index') }}"><img src="{{
+pathto("_static/pysalsmall.png", 1) }}" border="0" alt="pysal"/></a>
+</div>
+
+{{ super() }}
+{% endblock %}
+
+{# put the sidebar before the body #}
+{% block sidebar1 %}{% endblock %}
+{% block sidebar2 %}{{ sidebar() }}{% endblock %}
+
diff --git a/doc/source/conf.py b/doc/source/conf.py
new file mode 100644
index 0000000..df9e4c9
--- /dev/null
+++ b/doc/source/conf.py
@@ -0,0 +1,230 @@
+# -*- coding: utf-8 -*-
+#
+# pysal documentation build configuration file, created by
+# sphinx-quickstart on Wed Aug 26 19:58:20 2009.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys 
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.append(os.path.abspath('sphinxext'))
+#sys.path.append(os.path.abspath('../../'))
+sys.path.append(os.path.abspath('~/anaconda/lib/python2.7/site-packages/'))
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc',
+'sphinx.ext.doctest','sphinx.ext.graphviz', 'sphinx.ext.intersphinx',
+'sphinx.ext.pngmath',
+'sphinx.ext.autosummary','sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
+
+#doctest extension config values
+doctest_path = '/Users/stephens/code/pysal/doc/source/users/tutorials/' 
+doctest_test_doctest_blocks = ''
+#doctest_global_setup = 
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+#source_suffix = '.txt'
+
+# The encoding of source files.
+#source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'pysal'
+copyright = u'2014-, PySAL Developers; 2009-13 Sergio Rey'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '1.9.1'
+# The full version, including alpha/beta/rc tags.
+release = '1.9.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directory, that shouldn't be searched
+# for source files.
+exclude_trees = []
+
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+add_function_parentheses = False
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+#custom strikethrough setup
+#html_style = 'mydoc.css'
+
+# The theme to use for HTML and HTML Help pages.  Major themes that come with
+# Sphinx are currently 'default' and 'sphinxdoc'.
+html_theme = 'default'
+#html_theme = 'sphinxdoc'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+html_theme_options = {
+        "rightsidebar": "true",
+        "relbarbgcolor": "CornflowerBlue",
+        "sidebartextcolor": "black",
+        "sidebarlinkcolor": "#355f7c",
+        "sidebarbgcolor": "#F2F2F2",
+        "codebgcolor": "AliceBlue",
+        "footerbgcolor": "Black",
+        "externalrefs": "false",
+        "bodyfont": "Optima",
+        "headfont": "Optima "
+        }
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = "%s v%s Reference Guide" % (project, version)
+html_title = "Python Spatial Analysis Library" 
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+html_short_title = "PySAL"
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+html_favicon = 'favicon.png'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'pysaldoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+  ('index', 'pysal.tex', u'pysal Documentation',
+   u'PySAL Developers', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'http://docs.python.org/': None}
+
+#numpydoc option
+numpydoc_show_class_members = True
+numpydoc_class_members_toctree = False
diff --git a/doc/source/contents.txt b/doc/source/contents.txt
new file mode 100644
index 0000000..b9bdf05
--- /dev/null
+++ b/doc/source/contents.txt
@@ -0,0 +1,16 @@
+.. _contents:
+
+===================
+PySAL Documentation
+===================
+
+    :Release: |version|
+    :Date: |today|
+
+
+.. toctree::
+   :maxdepth: 1
+
+   users/index
+   developers/index
+   library/index
diff --git a/doc/source/developers/docs/index.rst b/doc/source/developers/docs/index.rst
new file mode 100644
index 0000000..147f1fb
--- /dev/null
+++ b/doc/source/developers/docs/index.rst
@@ -0,0 +1,252 @@
+.. role:: strike
+
+*******************
+PySAL Documentation
+*******************
+.. contents::
+
+.. _compiling-doc-label:
+
+
+Writing Documentation
+=====================
+
+The PySAL project contains two distinct forms of documentation: inline and
+non-inline. Inline docs are contained in the source
+code itself, in what are known as *docstrings*.  Non-inline documentation is in the
+doc folder in the trunk. 
+
+Inline documentation is processed with an extension to Sphinx called napoleon.
+We have adopted the community standard outlined `here`_.
+
+PySAL makes use of the built-in Sphinx extension *viewcode*, which allows the
+reader to quicky toggle between docs and source code. To use it,
+the source code module requires at least one properly formatted docstring.
+
+Non-inline documentation editors can opt to strike-through older documentation rather than
+delete it with the custom "role" directive as
+follows.  Near the top of the document, add the role directive.  Then, to strike through old text, add the :strike:
+directive and offset the text with back-ticks. This :strike:`strikethrough` is produced
+like this::
+
+  .. role:: strike
+
+  ...
+  ...
+
+  This :strike:`strikethrough` is produced like this:
+
+Compiling Documentation
+=======================
+ 
+PySAL documentation is built using `Sphinx`_ and the Sphinx extension `napoleon`_, which formats PySAL's docstrings. 
+
+Note
+----
+If you're using Sphinx version 1.3 or newer, napoleon is included and should be called in the main conf.py as sphinx.ext.napoleon rather than installing it as we show below.
+
+If you're using a version of Sphinx that does not ship with napoleon ( Sphinx < 1.3), you'll need napoleon version 0.2.4 or later and Sphinx version 1.0 or later to compile the documentation. 
+Both modules are available at the Python Package Index, and can be downloaded and installed
+from the command line using *pip* or *easy_install*.::
+
+       $ easy_install sphinx
+       $ easy_install sphinxcontrib-napoleon
+
+If you get a permission error, trying using 'sudo'. 
+
+The source for the docs is in `doc`. Building the documentation is
+done as follows (assuming sphinx and napoleon are already installed)::
+
+        $ cd doc; ls
+        build  Makefile  source
+
+        $ make clean
+        $ make html
+
+To see the results in a browser open `build/html/index.html`. To make
+changes, edit (or add) the relevant files in `source` and rebuild the
+docs using the 'make html' (or 'make clean' if you're adding new documents) command. 
+Consult the `Sphinx markup guide`_ for details on the syntax and structure of the files in `source`.
+
+Once you're happy with your changes, check-in the `source` files. Do not
+add or check-in files under  `build` since they are dynamically built.
+
+Changes checked in to `Github`_ will be propogated to `readthedocs`_ within a few minutes.
+
+
+Lightweight Editing with rst2html.py
+------------------------------------
+
+Because the doc build process can sometimes be lengthy, you may want to avoid
+having to do a full build until after you are done with your major edits on
+one particular document.  As part of the
+`docutils`_ package,
+the file `rs2html.py` can take an `rst` document and generate the html file.
+This will get most of the work done that you need to get a sense if your edits
+are good, *without* having to rebuild all the PySAL docs. As of version 0.8 it
+also understands LaTeX. It will cough on some sphinx directives, but those can
+be dealt with in the final build.
+
+To use this download the doctutils tarball and put `rst2html.py` somewhere in
+your path. In vim (on Mac OS X) you can then add something like::
+
+    map ;r ^[:!rst2html.py % > ~/tmp/tmp.html; open ~/tmp/tmp.html^M^M
+
+which will render the html in your default browser.
+
+Things to watch out for
+------------------------
+
+If you encounter a failing tutorial doctest that does not seem to be in error, it could be 
+a difference in whitespace between the expected and received output. In that case, add an 
+'options' line as follows::
+ 
+ .. doctest::
+    :options: +NORMALIZE_WHITESPACE
+	
+    >>> print 'a   b   c'
+    abc
+
+Adding a new package and modules
+================================
+
+To include the docstrings of a new module in the :doc:`API docs </library/index>` the following steps are required:
+
+ 1. In the directory `/doc/source/library` add a directory with the name of
+    the new package. You can skip to step 3 if the package exists and you are
+    just adding new modules to this package.
+ 2. Within `/doc/source/library/packageName` add a file `index.rst`
+ 3. For each new module in this package, add a file `moduleName.rst` and
+    update the `index.rst` file to include `modulename`.
+
+
+Adding a new tutorial: spreg
+============================
+
+While the :doc:`API docs </library/index>` are automatically generated when
+compiling with Sphinx, tutorials that demonstrate use cases for new modules
+need to be crafted by the developer. Below we use the case of one particular
+module that currently does not have a tutorial as a guide for how to add
+tutorials for new modules.
+
+As of PySAL 1.3 there are API docs for
+:doc:`spreg </library/spreg/index>`
+but no :doc:`tutorial </users/tutorials/index>` currently exists for this module. 
+
+We will fix this and add a tutorial for
+:doc:`spreg </library/spreg/index>`.
+
+
+Requirements
+------------
+
+ - sphinx
+ - napoleon
+ - pysal sources
+
+
+You can install `sphinx` or `napoleon` using `easy_install` as described
+above in :ref:`compiling-doc-label`.
+
+Where to add the tutorial content
+---------------------------------
+
+Within the PySAL source the docs live in::
+
+    pysal/doc/source
+
+This directory has the source `reStructuredText`_ files used to render the html
+pages. The tutorial pages live under::
+
+    pysal/doc/source/users/tutorials
+
+As of PySAL 1.3, the content of this directory is::
+
+	autocorrelation.rst  fileio.rst  next.rst     smoothing.rst
+	dynamics.rst	     index.rst	 region.rst   weights.rst
+	examples.rst	     intro.rst	 shapely.rst
+
+The body of the `index.rst` file lists the sections for the tutorials::
+	   
+	   Introduction to the Tutorials <intro>
+	   File Input and Output <fileio>
+	   Spatial Weights <weights>
+	   Spatial Autocorrelation <autocorrelation>
+	   Spatial Smoothing <smoothing>
+	   Regionalization <region>
+	   Spatial Dynamics <dynamics>
+	   Shapely Extension <shapely>
+	   Next Steps <next>
+	   Sample Datasets <examples>
+
+In order to add a tutorial for `spreg` we need the to change this to read::
+
+	   Introduction to the Tutorials <intro>
+	   File Input and Output <fileio>
+	   Spatial Weights <weights>
+	   Spatial Autocorrelation <autocorrelation>
+	   Spatial Smoothing <smoothing>
+	   Spatial Regression <spreg>
+	   Regionalization <region>
+	   Spatial Dynamics <dynamics>
+	   Shapely Extension <shapely>
+	   Next Steps <next>
+	   Sample Datasets <examples>
+
+So we are adding a new section that will show up as `Spatial Regression` and
+its contents will be found in the file `spreg.rst`. To create the latter
+file simpy copy say `dynamics.rst` to `spreg.rst` and then modify `spreg.rst`
+to have the correct content.
+
+Once this is done, move back up to the top level doc directory::
+
+	pysal/doc
+
+Then::
+
+        $ make clean
+        $ make html
+
+Point your browser to `pysal/doc/build/html/index.html`
+
+and check your work. You can then make changes to the `spreg.rst` file and
+recompile until you are set with the content.
+
+Proper Reference Formatting
+---------------------------
+
+For proper hypertext linking of reference material, each unique reference in a
+single python module can only be explicitly named once. Take the following example for
+instance::
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+    two-stage least squares procedure for estimating a spatial autoregressive
+    model with autoregressive disturbances". The Journal of Real State
+    Finance and Economics, 17, 1.
+
+It is "named" as "1".  Any other references (even the same paper) with the same "name" will cause a
+Duplicate Reference error when Sphinx compiles the document.  Several
+work-arounds are available but no concensus has emerged. 
+
+One possible solution is to use an anonymous reference on any subsequent
+duplicates, signified by a single underscore with no brackets.  Another solution
+is to put all document references together at the bottom of the document, rather
+than listing them at the bottom of each class, as has been done in some modules. 
+
+
+
+.. _tutorial: /users/tutorials/index
+.. _docutils: http://docutils.sourceforge.net/docs/user/tools.html
+.. _API docs: /library/index
+.. _spreg: /library/spreg/index
+.. _Sphinx: http://pypi.python.org/pypi/Sphinx/
+.. _here: https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt
+.. _Github: http://github.com/pysal
+.. _spreg: /library/spreg/index
+.. _reStructuredText: http://sphinx.pocoo.org/rest.html
+.. _Sphinx markup guide: http://sphinx.pocoo.org/contents.html
+.. _napoleon: http://sphinxcontrib-napoleon.readthedocs.org/en/latest/sphinxcontrib.napoleon.html
+.. _readthedocs: http://pysal.readthedocs.org/en/latest
diff --git a/doc/source/developers/guidelines.rst b/doc/source/developers/guidelines.rst
new file mode 100644
index 0000000..31bb86a
--- /dev/null
+++ b/doc/source/developers/guidelines.rst
@@ -0,0 +1,130 @@
+.. _guidelines:
+
+==========
+Guidelines
+==========
+.. contents::
+
+PySAL is adopting many of the conventions in the larger scientific computing
+in Python community and we ask that anyone interested in joining the project
+please review the following documents:
+
+ * `Documentation standards <http://projects.scipy.org/numpy/wiki/CodingStyleGuidelines>`_
+ * `Coding guidelines <http://www.python.org/dev/peps/pep-0008/>`_
+ * :doc:`Testing guidelines <testing>`
+
+
+-----------------------
+Open Source Development
+-----------------------
+
+PySAL is an open source project and we invite any interested user who wants to
+contribute to the project to contact one of the
+`team members <https://github.com/pysal?tab=members>`_. For users who
+are new to open source development you may want to consult the following
+documents for background information:
+
+ * `Contributing to Open Source Projects HOWTO
+   <http://www.kegel.com/academy/opensource.html>`_
+
+
+
+
+-----------------------
+Source Code
+-----------------------
+
+
+PySAL uses `git <http://git-scm.com/>`_ and github for our  `code repository <https://github.com/pysal/pysal.git/>`_.
+
+
+You can setup PySAL for local development following the :doc:`installation instructions </users/installation>`.
+
+
+------------------------
+Development Mailing List
+------------------------
+
+Development discussions take place on `pysal-dev
+<http://groups.google.com/group/pysal-dev>`_.
+
+
+-----------------------
+Release Schedule
+-----------------------
+
+PySAL development follows a six-month release schedule that is aligned with
+the academic calendar.
+
+1.10 Cycle
+=========
+
+========   ========   ================= ====================================================
+Start      End        Phase             Notes
+========   ========   ================= ====================================================
+2/1/15      2/14/15   Module Proposals  Developers draft PEPs and prototype
+2/15/15     2/15/15   Developer vote    All developers vote on PEPs 
+2/16/15     2/16/15   Module Approval   BDFL announces final approval
+2/17/15     6/30/15   Development       Implementation and testing of approved modules
+7/1/15      7/27/15   Code Freeze       APIs fixed, bug and testing changes only
+7/23/15     7/30/15   Release Prep      Test release builds, updating svn 
+7/31/15     7/31/15   Release           Official release of 1.10
+========   ========   ================= ====================================================
+
+
+1.11 Cycle
+=========
+
+========   ========   ================= ====================================================
+Start      End        Phase             Notes
+========   ========   ================= ====================================================
+8/1/15      8/14/15   Module Proposals  Developers draft PEPs and prototype
+8/15/15     8/15/15   Developer vote    All developers vote on PEPs 
+8/16/15     8/16/15   Module Approval   BDFL announces final approval
+8/17/15    12/30/15   Development       Implementation and testing of approved modules
+1/1/16       1/1/16   Code Freeze       APIs fixed, bug and testing changes only
+1/23/16     1/30/16   Release Prep      Test release builds, updating svn 
+1/31/16     1/31/16   Release           Official release of 1.11
+========   ========   ================= ====================================================
+
+
+
+
+
+
+-----------------------
+Governance
+-----------------------
+
+PySAL is organized around the Benevolent Dictator for Life (BDFL) model of project management.
+The BDFL is responsible for overall project management and direction. Developers have a critical role in shaping that
+direction. Specific roles and rights are as follows:
+
+=========   ================        ===================================================
+Title       Role                    Rights
+=========   ================        ===================================================
+BDFL        Project Director        Commit, Voting, Veto, Developer Approval/Management
+Developer   Development             Commit, Voting
+=========   ================        ===================================================
+
+-----------------------
+Voting and PEPs
+-----------------------
+
+During the initial phase of a release cycle, new functionality for PySAL should be described in a PySAL Enhancment
+Proposal (PEP). These should follow the
+`standard format  <http://www.python.org/dev/peps/pep-0009/>`_
+used by the Python project. For PySAL, the PEP process is as follows
+
+#. Developer prepares a plain text PEP following the guidelines
+
+#. Developer sends PEP to the BDFL
+
+#. Developer posts PEP to the PEP index
+
+#. All developers consider the PEP and vote
+
+#. PEPs receiving a majority approval become priorities for the release cycle
+
+
+
diff --git a/doc/source/developers/index.rst b/doc/source/developers/index.rst
new file mode 100644
index 0000000..3fb20dd
--- /dev/null
+++ b/doc/source/developers/index.rst
@@ -0,0 +1,18 @@
+===============
+Developer Guide
+===============
+
+Go to our issues queue on `GitHub NOW!
+<http://github.com/pysal/pysal/issues?state=open>`_
+
+.. toctree::
+   :maxdepth: 1
+
+   guidelines
+   testing
+   pep/index
+   docs/index
+   release
+   py3k
+   projects
+   known-issues
diff --git a/doc/source/developers/known-issues.rst b/doc/source/developers/known-issues.rst
new file mode 100644
index 0000000..0f1ec27
--- /dev/null
+++ b/doc/source/developers/known-issues.rst
@@ -0,0 +1,42 @@
+
+Known Issues
+============
+
+1.5 install fails with scipy 11.0 on Mac OS X
+---------------------------------------------
+
+Running `python setup.py install` results in::
+
+	from _cephes import *
+	ImportError:
+	dlopen(/Users/serge/Documents/p/pysal/virtualenvs/python1.5/lib/python2.7/site-packages/scipy/special/_cephes.so,
+	2): Symbol not found: _aswfa_
+	  Referenced from:
+	  /Users/serge/Documents/p/pysal/virtualenvs/python1.5/lib/python2.7/site-packages/scipy/special/_cephes.so
+	    Expected in: dynamic lookup
+
+This occurs when your scipy on Mac OS X was complied with gnu95 and not
+gfortran.  See `this thread <http://mail.scipy.org/pipermail/scipy-user/2010-November/027548.html>`_ for possible solutions.
+
+weights.DistanceBand failing
+----------------------------
+
+This occurs due to a bug in scipy.sparse prior to version 0.8. If you are running such a version see `Issue 73 <http://code.google.com/p/pysal/issues/detail?id=73&sort=milestone>`_ for a fix.
+
+doc tests and unit tests under Linux
+------------------------------------
+
+Some Linux machines return different results for the unit and doc tests. We suspect this has to do with the way random seeds are set. See `Issue 52 <http://code.google.com/p/pysal/issues/detail?id=52&sort=milestone>`_
+
+LISA Markov missing a transpose
+-------------------------------
+In versions of PySAL < 1.1 there is a bug in the LISA Markov, resulting in
+incorrect values. For a fix and more details see `Issue 115 <http://code.google.com/p/pysal/issues/detail?id=115>`_.
+
+
+PIP Install Fails
+-----------------
+
+
+Having numpy and scipy specified in pip requiretments.txt causes PIP install of pysal to fail. For discussion and suggested fixes see `Issue 207 <http://code.google.com/p/pysal/issues/detail?id=207&sort=milestone>`_.
+
diff --git a/doc/source/developers/pep/index.rst b/doc/source/developers/pep/index.rst
new file mode 100644
index 0000000..a8d6489
--- /dev/null
+++ b/doc/source/developers/pep/index.rst
@@ -0,0 +1,18 @@
+*********************************
+PySAL Enhancement Proposals (PEP)
+*********************************
+
+.. toctree::
+   :maxdepth: 1 
+
+   pep-0001
+   pep-0002
+   pep-0003
+   pep-0004
+   pep-0005
+   pep-0006
+   pep-0007
+   pep-0008
+   pep-0009
+   pep-0010
+   pep-0011
diff --git a/doc/source/developers/pep/pep-0001.rst b/doc/source/developers/pep/pep-0001.rst
new file mode 100644
index 0000000..36581df
--- /dev/null
+++ b/doc/source/developers/pep/pep-0001.rst
@@ -0,0 +1,74 @@
+********************************
+PEP 0001 Spatial Dynamics Module
+********************************
+
+========  =================================
+Author    Serge Rey <sjsrey at gmail.com>,
+          Xinyue Ye <xinyue.ye at gmail.com>
+Status    Approved 1.0
+Created   18-Jan-2010
+Updated   09-Feb-2010
+========  =================================
+
+Abstract
+========
+
+With the increasing availability of spatial longitudinal data sets there
+is an growing demand for exploratory methods that integrate both the
+spatial and temporal dimensions of the data. The spatial dynamics
+module combines a number of previously developed and to-be-developed
+classes for the analysis of spatial dynamics. It will include classes
+for the following statistics for spatial dynamics, Markov, spatial
+Markov, rank mobility, spatial rank mobility, space-time LISA.
+
+Motivation
+==========
+
+Rather than having each of the spatial dynamics as separate modules in
+PySAL, it makes sense to move them all within the same module. This would
+facilitate common signatures for constructors and similar forms of data
+structures for space-time analysis (and generation of results).
+
+The module would implement some of the ideas for extending LISA statistics
+to a dynamic context ([Anselin2000]_ [ReyJanikas2006]_),
+and recent work developing empirics and summary
+measures for comparative space time analysis ([ReyYe2010]_).
+
+
+Reference Implementation
+========================
+
+We suggest adding the module ``pysal.spatialdynamics`` which in turn would
+encompass the following modules:
+
+* rank mobility
+  rank concordance (relative mobility or internal mixing) 
+  Kendall's index
+  
+* spatial rank mobility 
+  add a spatial dimension into rank mobility investigate the extent to
+  which the relative mobility is spatially dependent 
+  use various types of spatial weight matrix
+
+* Markov 
+  empirical transition probability matrix (mobility across class)
+  Shorrock's index
+  
+* Spatial Markov
+  adds a spatial dimension (regional conditioning) into classic Markov models
+  a trace statistic from a modified Markov transition matrix
+  investigate the extent to which the inter-class mobility are spatially dependent
+  
+* Space-Time LISA 
+  extends LISA measures to integrate the time dimension
+  combined with cg (computational geometry) module to develop comparative measurements
+
+References
+==========
+
+.. [Anselin2000] Anselin, Luc (2000) Computing environments for spatial data analysis. *Journal of Geographical Systems* 2: 201-220
+
+.. [ReyJanikas2006] Rey, S.J. and M.V. Janikas (2006) STARS: Space-Time Analysis of Regional Systems, *Geographical Analysis* 38: 67-86.
+
+.. [ReyYe2010] Rey, S.J. and X. Ye (2010) Comparative spatial dyanmics of regional systems. In Paez, A. et al. (eds) *Progress in Spatial Analysis: Methods and Applications*. Springer: Berlin, 441-463.
+
diff --git a/doc/source/developers/pep/pep-0002.rst b/doc/source/developers/pep/pep-0002.rst
new file mode 100644
index 0000000..d2d19b2
--- /dev/null
+++ b/doc/source/developers/pep/pep-0002.rst
@@ -0,0 +1,56 @@
+***************************************
+PEP 0002 Residential Segregation Module
+***************************************
+
+========  =================================
+Author    David C. Folch <david.folch at asu.edu>
+          Serge Rey <srey at asu.edu>
+Status    Draft
+Created   10-Feb-2010
+Updated   
+========  =================================
+
+
+Abstract
+========
+
+The segregation module combines a number of previously developed and
+to-be-developed measures for the analysis of residential segregation. It will
+include classes for two-group and multi-group aspatial (classic) segregation
+indices along with their spatialized counterparts.  Local segregation indices
+will also be included.
+
+
+Motivation
+==========
+
+The study of residential segregation continues to be a popular field in
+empirical social science and public policy development.  While some of the
+classic measures are relatively simple to implement, the spatial versions are
+not nearly as straightforward for the average user.  Furthermore, there does
+not appear to be a Python implementation of residential segregation measures
+currently available.  There is a standalone C#.Net GUI implementation
+(http://www.ucs.inrs.ca/inc/Groupes/LASER/Segregation.zip) containing many of
+the measures to be implanted via this PEP but this is Windows only and I could
+not get it to run easily (it is not open source but the author sent me the
+code). 
+
+It has been noted that there is no one-size-fits-all segregation index;
+however, some are clearly more popular than others.  This module would bring
+together a wide variety of measures to allow users to easily compare the
+results from different indices.  
+
+
+Reference Implementation
+========================
+
+We suggest adding the module ``pysal.segregation`` which in turn would
+encompass the following modules:
+
+* globalSeg
+* localSeg
+
+
+References
+==========
+
diff --git a/doc/source/developers/pep/pep-0003.rst b/doc/source/developers/pep/pep-0003.rst
new file mode 100644
index 0000000..3d677d7
--- /dev/null
+++ b/doc/source/developers/pep/pep-0003.rst
@@ -0,0 +1,55 @@
+*********************************
+PEP 0003 Spatial Smoothing Module
+*********************************
+
+========  =================================
+Author    Myunghwa Hwang <mhwang4 at gmail.com>
+          Luc Anselin <luc.anselin at asu.edu>
+          Serge Rey <srey at asu.edu>
+Status    Approved 1.0
+Created   11-Feb-2010
+Updated   
+========  =================================
+
+
+Abstract
+========
+
+Spatial smoothing techniques aim to adjust problems with applying simple 
+normalization to rate computation. Geographic studies of disease widely 
+adopt these techniques to better summarize spatial patterns of disease occurrences.
+The smoothing module combines a number of previously developed and to-be-developed 
+classes for carrying out spatial smoothing. It will include classes for 
+the following techniques: mean and median based smoothing, nonparametric smoothing, 
+and empirical Bayes smoothing.
+
+
+Motivation
+==========
+
+Despite wide usage of spatial smoothing techniques in epidemiology, 
+there are only few software libraries that include a range of different smoothing 
+techniques at one place. 
+Since spatial smoothing is a subtype of exploratory data analysis method, 
+PySAL is the best place that host multiple smoothing techniques. 
+
+The smoothing module will mainly implement the techniques reported in [Anselin2006].
+
+
+Reference Implementation
+========================
+
+We suggest adding the module ``pysal.esda.smoothing`` which in turn would
+encompass the following modules:
+
+* locally weighted averages, locally weighted median, headbanging
+* spatial rate smoothing
+* excess risk, empricial Bayes smoothing, spatial empirical Bayes smoothing
+* headbanging
+
+
+References
+==========
+
+[Anselin2006] Anselin, L., N. Lozano, and J. Koschinsky (2006) Rate Transformations and Smoothing, GeoDa Center Research Report.
+
diff --git a/doc/source/developers/pep/pep-0004.rst b/doc/source/developers/pep/pep-0004.rst
new file mode 100644
index 0000000..4533e0f
--- /dev/null
+++ b/doc/source/developers/pep/pep-0004.rst
@@ -0,0 +1,56 @@
+**********************************************************************
+PEP 0004 Geographically Nested Inequality based on the Geary Statistic
+**********************************************************************
+
+========  =================================
+Author    Boris Dev <boris.dev at gmail.com>
+          Charles Schmidt <schmidtc at gmail.com>
+Status    Draft
+Created   9-Aug-2010
+Updated   
+========  =================================
+
+
+Abstract
+========
+
+ 
+I propose to extend the Geary statistic to describe inequality
+patterns between people in the same geographic zones. Geographically nested
+associations can be represented with a spatial weights matrix defined jointly
+using both geographic and social positions. The key class in the proposed
+geographically nested inequality module would sub-class from class
+``pysal.esda.geary`` with 2 extensions: 1) as an additional argument, an array
+of regimes to represent social space; and 2) for the output, spatially nested
+randomizations will be performed for pseudo-significance tests.  
+
+Motivation
+==========
+
+Geographically nested measures may reveal inequality patterns that are masked
+by conventional aggregate approaches. Aggregate human inequality statistics
+summarize the size of the gaps in variables such as mortality rate or income
+level between different different groups of people. A geographically nested
+measure is computed using only a pairwise subset of the values defined by
+common location in the same geographic zone.  For example, this type of
+measure was proposed in my dissertation to assess changes in income inequality
+between nearby blocks of different school attendance zones or different racial
+neighborhoods within the same cities. Since there are no standard statistical
+packages to do this sort of analysis, currently such a pairwise approach to
+inequality analysis across many geographic zones is tedious for researchers
+who are non-hackers. Since it will take advantage of the currently existing
+``pysal.esda.geary`` and ``pysal.weights.regime_weights()``, the proposed
+module should be readable for hackers. 
+  
+Reference Implementation
+========================
+
+I suggest adding the module ``pysal.inequality.nested``.
+
+
+References
+==========
+
+[Dev2010] Dev, B. (2010) "Assessing Inequality using Geographic Income Distributions: Spatial Data Analysis of States,
+Neighborhoods, and School Attendance Zones" http://dl.dropbox.com/u/408103/dissertation.pdf.
+
diff --git a/doc/source/developers/pep/pep-0005.rst b/doc/source/developers/pep/pep-0005.rst
new file mode 100644
index 0000000..a8c69ea
--- /dev/null
+++ b/doc/source/developers/pep/pep-0005.rst
@@ -0,0 +1,56 @@
+*************************************************
+PEP 0005 Space Time Event Clustering Module
+*************************************************
+
+========  ======================================
+Author    Nicholas Malizia <nmalizia at gmail.com>,
+          Serge Rey <sjsrey at gmail.com>
+Status    Approved 1.1
+Created   13-Jul-2010
+Updated   06-Oct-2010
+========  ======================================
+
+Abstract
+========
+
+The space-time event clustering module will be an addition (in the form of a sub-module) to the spatial dynamics module. The purpose of this module will be to house all methods concerned with identifying clusters within spatio-temporal event data. The module will include classes for the major methods for spatio-temporal event clustering, including: the Knox, Mantel, Jacquez k Nearest Neighbors, and the Space-Time K Function. Although these methods are tests of global spatio-temporal clus [...]
+
+Motivation
+==========
+
+While the methods of the parent module are concerned with the dynamics of aggregate lattice-based data, the methods encompassed in this sub-module will focus on exploring the dynamics of individual events. The methods suggested here have historically been utilized by researchers looking for clusters of events in the fields of epidemiology and criminology. Currently, the methods presented here are not widely implemented in an open source context. Although the Knox, Mantel, and Jacquez met [...]
+
+Reference Implementation
+========================
+
+We suggest adding the module ``pysal.spatialdynamics.events`` which in turn would encompass the following modules:
+
+Knox
+    The Knox test for space-time interaction sets critical distances in space and time; if the data are clustered, numerous pairs of events will be located within both of these critical distances and the test statistic will be large [3]_. Significance will be established using a Monte Carlo method. This means that either the time stamp or location of the events is scrambled and the statistic is calculated again. This procedure is permuted to generate a distribution of statistics (for the [...]
+
+Mantel
+    Akin to the Knox test in its simplicity, the Mantel test keeps the distance information discarded by the Knox test. The Mantel statistic is calculated by summing the product of the distances between all the pairs of events [4]_. Again, significance will be determined through a Monte Carlo approach. 
+
+Jacquez
+    This test tallies the number of event pairs that are within k-nearest neighbors of each other in *both* space and time. Significance of this count is established using a Monte Carlo permutation method [5]_. Again, the permutation is done by randomizing either the time or location of the events and then running the statistic again. The test should be implemented with the additional descriptives as suggested by [6]_.
+
+SpaceTimeK
+    The space-time K function takes the K function which has been used to detect clustering in spatial point patterns and expands it to the realm of spatio-temporal data. Essentially, the method calculates K functions in space and time independently and then compares the product of these functions with a K function which takes both dimensions of space and time into account from the start [7]_. Significance is established through Monte Carlo methods and the construction of confidence envelopes.
+
+References
+==========
+
+.. [1] G. Jacquez, D. Greiling, H. Durbeck, L. Estberg, E. Do, A. Long, and B. Rommel. ClusterSeer User Guide 2: Software for Identifying Disease Clusters. Ann Arbor, MI: TerraSeer Press, 2002.
+
+.. [2] B. Rowlingson and P. Diggle. splancs: Spatial and Space-Time Point Pattern Analysis. R Package. Version 2.01-25, 2009.
+
+.. [3] E. Knox. The detection of space-time interactions. Journal of the Royal Statistical Society. Series C (Applied Statistics), 13(1):25–30, 1964.
+
+.. [4] N. Mantel. The detection of disease clustering and a generalized regression approach. Cancer Research, 27(2):209–220, 1967.
+
+.. [5] G. Jacquez. A k nearest neighbour test for space-time interaction. Statistics in Medicine, 15(18):1935– 1949, 1996.
+
+.. [6] E. Mack and N. Malizia. Enhancing the results of the Jacquez *k* Nearest Neighbor test for space-time interaction. *In Preparation*
+
+.. [7] P. Diggle, A. Chetwynd, R. Haggkvist, and S. Morris. Second-order analysis of space-time clustering. Statistical Methods in Medical Research, 4(2):124, 1995.
+
diff --git a/doc/source/developers/pep/pep-0006.rst b/doc/source/developers/pep/pep-0006.rst
new file mode 100644
index 0000000..9959b97
--- /dev/null
+++ b/doc/source/developers/pep/pep-0006.rst
@@ -0,0 +1,60 @@
+**********************************
+PEP 0006 Kernel Density Estimation
+**********************************
+
+========  ======================================
+Author    Serge Rey <sjsrey at gmail.com>
+          Charles Schmidt <schmidtc at gmail.com>
+ 
+Status    Draft
+Created   11-Oct-2010
+Updated   11-Oct-2010
+========  ======================================
+
+Abstract
+========
+
+The kernel density estimation module will provide a uniform interface to a set
+of kernel density estimation (KDE) methods. Currently KDE is used in various places
+within PySAL
+(e.g., :class:`~pysal.weights.Distance.Kernel`,
+:class:`~pysal.esda.smoothing.Kernel_Smoother`) as well as in STARS and
+various projects within the GeoDA Center, but these implementations were done
+separately. This module would centralize KDE within PySAL as well as extend
+the suite of KDE methods and related measures available in PySAL.
+
+Motivation
+==========
+
+KDE is widely used throughout spatial analysis, from estimation of process
+intensity in point pattern analysis, deriving spatial weights, geographically
+weighted regression, rate smoothing, to hot spot detection, among others. 
+
+
+Reference Implementation
+========================
+
+
+Since KDE would be used throughout existing (and likely future) modules in
+PySAL, it makes sense to implement it as a top level module in PySAL.
+
+Core KDE methods that would be implemented include:
+
+   * triangular
+   * uniform
+   * quadratic
+   * quartic
+   * gaussian
+
+Additional classes and methods to deal with KDE on restricted spaces would
+also be implemented.
+
+A unified KDE api would be developed for use of the module.
+
+Computational optimization would form a significant component of the effort
+for this PEP.
+
+References
+==========
+
+in progress
diff --git a/doc/source/developers/pep/pep-0007.rst b/doc/source/developers/pep/pep-0007.rst
new file mode 100644
index 0000000..021896b
--- /dev/null
+++ b/doc/source/developers/pep/pep-0007.rst
@@ -0,0 +1,76 @@
+*****************************
+PEP 0007 Spatial Econometrics
+*****************************
+
+========  ======================================
+Author    Luc Anselin <luc.anselin at asu.edu>
+          Serge Rey <sjsrey at gmail.com>,David Folch <dfolch at asu.edu>,Daniel Arribas-Bel <daniel.arribas.bel at gmail.com>,Pedro Amaral <pvmda2 at cam.ac.uk>,Nicholas Malizia <nmalizia at gmail.com>,Ran Wei <rwei5 at asu.edu>,Jing Yao <jyao13 at asu.edu>,Elizabeth Mack <Elizabeth.A.Mack at asu.edu>
+Status    Approved 1.1
+Created   12-Oct-2010
+Updated   12-Oct-2010
+========  ======================================
+
+Abstract
+========
+
+The spatial econometrics module will provide a uniform interface to the
+spatial econometric functionality contained in the former PySpace and
+current GeoDaSpace efforts. This module would centralize all specification,
+estimation, diagnostic testing and prediction/simulation for spatial
+econometric models.
+
+
+Motivation
+==========
+
+Spatial econometric methodology is at the core of GeoDa and 
+GeoDaSpace. This module would allow access to state of the art
+methods at the source code level.
+
+
+Reference Implementation
+========================
+
+We suggest adding the module ``pysal.spreg``. As development
+progresses, there may be a need for submodules dealing with
+pure cross sectional regression, spatial panel models and spatial probit.
+
+Core methods to be implemented include:
+
+   * OLS estimation with diagnostics for spatial effects
+   * 2SLS estimation with diagnostics for spatial effects
+   * spatial 2SLS for spatial lag model (with endogeneity)
+   * GM and GMM estimation for spatial error model
+   * GMM spatial error with heteroskedasticity
+   * spatial HAC estimation
+
+A significant component of the effort for this PEP would consist of
+implementing methods with good performance on very large data
+sets, exploiting sparse matrix operations in scipy.
+
+References
+==========
+
+[1] Anselin, L. (1988). Spatial Econometrics, Methods and Models. Kluwer, Dordrecht.
+
+[2] Anselin, L. (2006). Spatial econometrics. In Mills, T. and Patterson, K., editors,
+     Palgrave Handbook of Econometrics, Volume I, Econometric Theory, pp. 901-969.
+     Palgrave Macmillan, Basingstoke.
+     
+[3] Arraiz, I., Drukker, D., Kelejian H.H., and Prucha, I.R. (2010). A spatial Cliff-Ord-type
+     model with heteroskedastic innovations: small and large sample results.
+     Journal of Regional Science 50: 592-614.
+     
+[4] Kelejian, H.H. and Prucha, I.R. (1998). A generalized spatial two stage least squares
+     procedure for estimationg a spatial autoregressive model with autoregressive
+     disturbances. Journal of Real Estate Finance and Economics 17: 99-121.
+     
+[5] Kelejian, H.H. and Prucha, I.R. (1999). A generalized moments estimator  for the
+     autoregressive parameter in a spatial model. International Economic Review 40: 509-533.
+     
+[6] Kelejian, H.H. and Prucha, I.R. (2007). HAC estimation in a spatial framework.
+     Journal of Econometrics 140: 131-154.
+     
+[7] Kelejian, H.H. and Prucha, I.R. (2010). Specification and estimation of spatial autoregressive
+     models with autoregressive and heteroskedastic disturbances. Journal of Econometrics
+     (forthcoming).
diff --git a/doc/source/developers/pep/pep-0008.rst b/doc/source/developers/pep/pep-0008.rst
new file mode 100644
index 0000000..00ddfba
--- /dev/null
+++ b/doc/source/developers/pep/pep-0008.rst
@@ -0,0 +1,55 @@
+**************************************************
+PEP 0008 Spatial Database Module
+**************************************************
+
+========  =======================================
+Author    Phil Stephens <phil.stphns at gmail.com>,
+          Serge Rey <sjsrey at gmail.com>
+Status    Draft
+Created   09-Sep-2010
+Updated   31-Aug-2012
+========  =======================================
+
+Abstract
+========
+
+A spatial database module will extend PySAL file I/O capabilities to spatial
+database software, allowing PySAL
+users to connect to and perform geographic lookups and queries on spatial
+databases.
+
+Motivation
+==========
+
+PySAL currently reads and writes geometry in only the Shapefile data
+structure. 
+Spatially-indexed databases permit queries on the
+geometric relations between objects [1]_.
+
+Reference Implementation
+========================
+
+We propose to add the module ``pysal.contrib.spatialdb``, hereafter
+referred to simply as spatialdb. 
+spatialdb will leverage the Python Object Relational Mapper (ORM) libraries SQLAlchemy [2]_ and GeoAlchemy [3]_, MIT-licensed software that
+provides a database-agnostic SQL layer for several different databases and
+spatial database extensions 
+including PostgreSQL/PostGIS, Oracle Spatial, Spatialite, MS SQL Server, MySQL Spatial, and others.
+These lightweight libraries manage database connections, transactions, and SQL expression translation.  
+
+Another option to research is the GeoDjango package. It provides a large
+number of spatial lookups [5]_ and geo queries for PostGIS databases, and a
+smaller set of lookups / queries for Oracle, MySQL, and SpatiaLite.
+
+References
+==========
+
+.. [1] OpenGeo (2010) `Spatial Database Tips and Tricks <http://workshops.opengeo.org/postgis-spatialdbtips/introduction.html>`_.   Accessed September 9, 2010.  
+
+.. [2] SQLAlchemy (2010) `SQLAlchemy 0.6.5 Documentation <http://www.sqlalchemy.org/docs/orm/index.html>`_.  Accessed October 4, 2010.
+
+.. [3] GeoAlchemy (2010) `GeoAlchemy 0.4.1 Documentation <http://geoalchemy.org/index.html>`_. Accessed October 4, 2010.
+
+.. [4] GeoAlchemy (2012) `GeoAlchemy on GitHub <http://https://github.com/geoalchemy/geoalchemy>`_. Accessed August 9, 2012.
+
+.. [5] GeoDjango (2012) `GeoDjango Compatibility Tables <http://docs.djangoproject.com/en/dev/ref/contrib/gis/db-api>`_. Accessed August 31, 2012.
diff --git a/doc/source/developers/pep/pep-0009.rst b/doc/source/developers/pep/pep-0009.rst
new file mode 100644
index 0000000..129c06a
--- /dev/null
+++ b/doc/source/developers/pep/pep-0009.rst
@@ -0,0 +1,57 @@
+*******************************
+PEP 0009 Add Python 3.x Support
+*******************************
+
+========  =================================
+Author    Charles Schmidt <schmidtc at gmail.com>
+Status    Approved 1.2
+Created   02-Feb-2011
+Updated   02-Feb-2011
+========  =================================
+
+Abstract
+========
+
+Python 2.x is being phased out in favor of the backwards incompatible Python 3
+line.  In order to stay relevant to the python community as a whole PySAL needs
+to support the latest production releases of Python. With the release of Numpy
+1.5 and the pending release of SciPy 0.9, all PySAL dependencies support Python
+3. This PEP proposes porting the code base to support both the 2.x and 3.x lines
+of Python.
+
+Motivation
+==========
+
+Python 2.7 is the final major release in the 2.x line.  The Python 2.x line will
+continue to receive bug fixes, however only the 3.x line will receive new
+features ([Python271]_).  Python 3.x introduces many backward incompatible
+changes to Python ([PythonNewIn3]_).  Numpy added support for Python 3.0 in
+version 1.5 ([NumpyANN150]_).  Scipy 0.9.0 is currently in the release candidate
+stage and supports Python 3.0 ([SciPyRoadmap]_, [SciPyANN090rc2]_).  Many of the
+new features in Python 2.7 were back ported from 3.0, allowing us to start using
+some of the new feature of the language without abandoning our 2.x users.
+
+Reference Implementation
+========================
+
+Since python 2.6 the interpreter has included a '-3' command line switch to
+"warn about Python 3.x incompatibilities that 2to3 cannot trivially fix"
+([Python2to3]_).  Running PySAL tests with this switch produces no warnings
+internal to PySAL.  This suggests porting to 3.x will require only trivial
+changes to the code. A porting strategy is provided by [PythonNewIn3]_.
+
+References
+==========
+
+.. [Python271] http://www.python.org/download/releases/2.7.1/
+
+.. [PythonNewIn3] http://docs.python.org/release/3.0.1/whatsnew/3.0.html
+
+.. [Python2to3] http://docs.python.org/release/3.0.1/library/2to3.html#to3-reference
+
+.. [NumpyANN150] http://mail.scipy.org/pipermail/numpy-discussion/2010-August/052522.html
+
+.. [SciPyRoadmap] http://projects.scipy.org/scipy/roadmap#python-3
+
+.. [SciPyANN090rc2] http://mail.scipy.org/pipermail/scipy-dev/2011-January/015927.html
+
diff --git a/doc/source/developers/pep/pep-0010.rst b/doc/source/developers/pep/pep-0010.rst
new file mode 100644
index 0000000..f66c5b3
--- /dev/null
+++ b/doc/source/developers/pep/pep-0010.rst
@@ -0,0 +1,46 @@
+*******************************
+PEP 0010 Add pure Python rtree
+*******************************
+
+========  ============================
+Author    Serge Rey <sjsrey at gmail.com>
+Status    Approved 1.2
+Created   12-Feb-2011
+Updated   12-Feb-2011
+========  ============================
+
+Abstract
+========
+
+A pure Python implementation of an Rtree will be developed for use in the
+construction of spatial weights matrices based on contiguity relations in
+shapefiles as well as supporting a spatial index that can be used by GUI based
+applications built with PySAL requiring brushing and linking.
+
+
+Motivation
+==========
+
+As of 1.1 PySAL checks if the external library ([Rtree]_) is installed. If it
+is not, then an internal binning algorithm is used to determine contiguity
+relations in shapefiles for the construction of certain spatial weights.  A
+pure Python implementation of Rtrees may provide for improved cross-platform
+efficiency when the external Rtree library is not present. At the same time,
+such an implementation can be relied on by application developers using PySAL
+who wish to build visualization applications supporting brushing, linking and
+other interactions requiring spatial indices for object selection.
+
+
+Reference Implementation
+========================
+
+A pure Python implementation of Rtrees has recently been implemented ([pyrtree]_) and
+is undergoing testing for possible inclusion in PySAL. It appears that this
+module can be integrated into PySAL with modest effort.
+
+References
+==========
+
+.. [Rtree] http://pypi.python.org/pypi/Rtree/
+
+.. [pyrtree] http://code.google.com/p/pyrtree/
diff --git a/doc/source/developers/pep/pep-0011.rst b/doc/source/developers/pep/pep-0011.rst
new file mode 100644
index 0000000..aca7719
--- /dev/null
+++ b/doc/source/developers/pep/pep-0011.rst
@@ -0,0 +1,120 @@
+****************************************
+PEP 0011 Move from Google Code to Github
+****************************************
+
+========  ============================
+Author    Serge Rey <sjsrey at gmail.com>
+Status    Draft
+Created   04-Aug-2012
+Updated   04-Aug-2012
+========  ============================
+
+Abstract
+========
+
+This proposal is to move the PySAL code repository from Google Code to Github.
+
+
+Motivation
+==========
+
+`Git`_ is a decentralized version control system that
+brings a number of benefits:
+
+ - distributed development
+ - off-line development
+ - elegant and lightweight branching
+ - fast operations
+ - flexible workflows
+
+among `many others.`_
+
+The two main PySAL dependencies, SciPy and NumPy, made the switch to GitHub
+roughly two years ago. In discussions with members of those development teams
+and related projects (pandas, statsmodels) it is clear that git is gaining
+widespread adoption in the Python scientific computing community. By moving to
+git and GitHub, PySAL would benefit by facilitating interaction with developers in this
+community. Discussions with developers at SciPy 2012 indicated that all
+projects experienced significant growth in community involvement after the
+move to Github. Other projects considering such a move have been `discussing`_ similar issues.
+
+Moving to GitHub would also streamline the administration of project updates,
+documentation and related tasks. The Google Code infrastructure requires
+updates in multiple locations which results in either additional work, or
+neglected changes during releases. GitHub understands markdown and reStructured
+text formats, the latter is heavily used in PySAL documentation and the former
+is clearly preferred to wiki markup on Google Code.
+
+Although there is a learning curve to Git, it is relatively minor for
+developers familiar with Subversion, as all PySAL developers are. Moreover,
+several of the developers have been using Git and GitHub for other projects
+and have expressed interest in such a move. There are excellent on-line
+resources for learning more about git, such as this
+`book.`_
+
+Reference Implementation
+========================
+
+Moving code and history
+-----------------------
+
+There are utilities, such as
+`svn2git`_ 
+that can be used to convert an SVN repo to a git repo.
+
+The converted git repo would then be pushed to a `GitHub`_ account.
+
+Setting up post-(commit|push|pull) hooks
+----------------------------------------
+Migration of the current integration testing will be required. Github has
+support for `Post-Receive Hooks`_
+that can be used for this aspect of the migration.
+
+
+Moving issues tracking over
+---------------------------
+
+A decision about whether to move the issue tracking over to Github will have
+to be considered. This has been handled in different ways:
+
+  - keep using Google Code for issue tracking
+  - move all issues (even closed ones) over to Github
+  - freeze tickets at Google Code and have a breadcrumb for active tickets
+    pointing to issue tracker at Github
+
+If we decide to move the issues over we may look at 
+`tratihubus`_
+as well as other possibilities.
+
+Continuous integration with travis-ci
+-------------------------------------
+
+`Travis-CI`_ is a hosted Continuous Integration (CI) service that is integrated
+with GitHub. This sponsored service provides:
+
+  - testing with multiple versions of Python
+  - testing with multiple versions of project dependencies (numpy and scipy)
+  - `build history`_
+  - integrated GitHub commit hooks
+  - testing against multiple `database services`_
+
+Configuration is achieved with a single YAML file, reducing development
+overhead, maintenance, and monitoring.
+
+Code Sprint for GitHub migration
+--------------------------------
+The proposal is to organize a future sprint to focus on this migration.
+
+
+
+.. _discussing: http://groups.google.com/group/networkx-devel/browse_thread/thread/6b82286cdd5e434a
+.. _tratihubus: https://GitHub.com/roskakori/tratihubis
+.. _Git: http://git-scm.com
+.. _many others.: http://www.youtube.com/watch?v=4XpnKHJAok8
+.. _GitHub: https://GitHub.com
+.. _svn2git: https://help.GitHub.com/articles/importing-from-subversion
+.. _Post-Receive Hooks: https://help.GitHub.com/articles/post-receive-hooks/
+.. _book.: http://git-scm.com/book
+.. _Travis-CI: http://travis-ci.org/
+.. _build history: http://travis-ci.org/#!/zendframework/zf2
+.. _database services: http://about.travis-ci.org/docs/user/database-setup/
diff --git a/doc/source/developers/projects.rst b/doc/source/developers/projects.rst
new file mode 100644
index 0000000..742885f
--- /dev/null
+++ b/doc/source/developers/projects.rst
@@ -0,0 +1,25 @@
+.. _projectsusingpysal:
+
+=========================
+Projects Using PySAL 
+=========================
+
+This page lists other software projects making use of PySAL.  If your project is
+not listed here, contact one of the `team members <http://code.google.com/p/pysal/people/list>`_ and we'll add it. 
+
+----------------------
+GeoDa Center Projects
+----------------------
+
+  * `GeoDaNet <http://geodacenter.asu.edu/software>`_
+  * `GeoDaSpace <http://geodacenter.asu.edu/software>`_
+  * `GeoDaWeights <http://geodacenter.asu.edu/software>`_
+  * `STARS <http://geodacenter.asu.edu/software>`_
+
+
+-----------------
+Related Projects
+-----------------
+  * `Anaconda <http://continuum.io/downloads>`_
+  * `StatsModels <http://statsmodels.sourceforge.net/related.html#related>`_
+  * `PythonAnywhere <http://pythonanywhere.com>`_ includes latest PySAL release
diff --git a/doc/source/developers/py3k.rst b/doc/source/developers/py3k.rst
new file mode 100644
index 0000000..057265f
--- /dev/null
+++ b/doc/source/developers/py3k.rst
@@ -0,0 +1,111 @@
+=================
+PySAL and Python3
+=================
+.. contents::
+
+Background
+----------
+
+PySAL Enhancement Proposal #9 was approved February 2, 2011. It called for
+adapting the code base to support both Python 2.x and 3.x releases. 
+
+Setting up for development
+--------------------------
+
+First install `Python3 <http://python.org/download/releases/3.2.2/>`_.
+Once Python3 is installed, you have the choice of downloading the following
+files as pure source code from PyPi and running "python3 setup.py install" for
+each, or follow the instructions below to setup useful helpers: easy_install and
+pip.
+
+To get setuptools and pip, first get distribute from PyPi::
+ 
+ curl -O http://python-distribute.org/distribute_setup.py
+ python3 distribute_setup.py
+ # Now you have easy_install
+ # It may be useful to setup an alias to this version of easy_install in your shell profile
+ alias easy_install3='/Library/Frameworks/Python.framework/Versions/3.2/bin/easy_install'
+
+After distribute is installed, get pip::
+
+  curl -O https://raw.github.com/pypa/pip/master/contrib/get-pip.py
+  python3 get-pip.py
+  # It may be useful to setup an alias to this version of pip in your shell profile
+  alias pip3='/Library/Frameworks/Python.framework/Versions/3.2/bin/pip'
+
+
+NumPy and SciPy require extensive refactoring on installation. We recommend
+`downloading the source code <http://new.scipy.org/download.html>`_, unzipping,
+and running::
+
+  cd numpy<dir>
+  python3 setup.py install 
+  # If all looks good, cd outside of the source directory, and verify import 
+  cd
+  python3 -c 'import numpy'
+
+Be sure to install NumPy first since SciPy depends on it. Now install SciPy in
+the same manner::
+
+  cd scipy<dir>
+  python3 setup.py install
+  # After extensive building, if all looks good, cd outside of the source directory, and verify import 
+  cd
+  python3 -c 'import scipy'
+
+Post any installation-related issues to the pysal-dev mailing list. 
+If python complains about not finding gcc-4.2, and you're sure it is installed,
+(run "gcc --version" to verify), you may create an alias to satisfy this::
+
+  cd /usr/bin/ 
+  sudo ln -s gcc  gcc-4.2
+
+
+Now for PySAL. Get the bleeding edge repository version of PySAL and pass in
+this call::
+ 
+ cd pysal/trunk
+ python3 setup.py install
+
+You'll be able to watch the dynamic refactoring taking place. If all goes well,
+PySAL will be installed into your Python3 site-packages directory. Confirm
+success with:: 
+
+  cd
+  python3 -c 'import pysal; pysal.open.check()'
+
+
+Optional Installations
+-----------------------
+
+Now that you have pip, get iPython::
+
+ # Use pip from the Python3 distribution on your system, or with the alias above
+ pip3 install iPython
+
+The first time you launch iPython3, you may receive a warning about the Python
+library readline. The warning makes it clear that pip does not work to install
+readline, so use easy_install, which was installed with distribute above::
+
+ /Library/Frameworks/Python.framework/Versions/3.2/bin/easy_install readline
+
+If when launching iPython3 you receive another warning about kernmagic, note
+that iPython 0.12 and newer use an alternate config file from previous versions.
+Since I had not extensively customized my iPython profile, I just deleted the
+~/.iPython directory and relaunched iPython3.
+   
+Now let's get our testing and documentation suites::
+
+  pip3 install nose nose-exclude sphinx numpydoc
+
+Now that nose is installed, let's run the test suite. Since the refactored code
+only exists in the Python3 site-packages directory, cd into it and run nose.
+First, however, copy our nose config files to the installed pysal so that nose
+finds them::
+ 
+ cp <path to local pysal svn>/nose-exclude.txt /Library/Frameworks/Python.frameworks/Versions/3.2/lib/python3.2/site-packages/pysal/
+ cp <path to local pysal svn>/setup.cfg /Library/Frameworks/Python.frameworks/Versions/3.2/lib/python3.2/site-packages/pysal/
+ cd /Library/Frameworks/Python.frameworks/Versions/3.2/lib/python3.2/site-packages
+ /Library/Frameworks/Python.framework/Versions/3.2/bin/nosetests pysal > ~/Desktop/nose-output.txt 2>&1
+
+
diff --git a/doc/source/developers/release.rst b/doc/source/developers/release.rst
new file mode 100644
index 0000000..7f7c9eb
--- /dev/null
+++ b/doc/source/developers/release.rst
@@ -0,0 +1,93 @@
+.. _release:
+.. role:: strike
+
+************************
+PySAL Release Management
+************************
+.. contents::
+
+Prepare the release
+-------------------
+
+- Check all tests pass.
+- Update CHANGELOG::
+
+     $ python tools/github_stats.py >> chglog
+
+- Prepend `chglog` to `CHANGELOG` and edit
+- Edit THANKS and README and README.md if needed.
+- Change MAJOR, MINOR version in setup script.
+- Change pysal/version.py to non-dev number
+- Change the docs version from X.xdev to X.x by editing doc/source/conf.py in two places.
+- Change docs/index.rst to update Stable version and date, and Development version
+- Commit all changes.
+
+Tag 
+---
+
+Make the Tag::
+
+  $ git tag -a v1.4 -m 'my version 1.4'
+
+  $ git push upstream v1.4
+
+On each build machine, clone and checkout the newly created tag::
+
+  $ git clone http://github.com/pysal/pysal.git
+  $ git fetch --tags
+  $ git checkout v1.4
+
+Make docs
+---------
+
+As of verison 1.6, docs are automatically compiled and hosted_.
+
+Make and Upload distributions
+-------------------------------
+
+- Make and upload_ to the Python Package Index in one shot!::
+
+   $ python setup.py sdist  (to test it)
+   $ python setup.py sdist upload
+
+  - if not registered_, do so. Follow the prompts. You can save the
+      login credentials in a dot-file, .pypirc
+
+- Make and upload the Windows installer to SourceForge.
+  - On a Windows box, build the installer as so:: 
+
+    $ python setup.py bdist_wininst
+
+Announce
+--------
+
+- Draft and distribute press release on geodacenter.asu.edu, openspace-list, and pysal.org
+
+  - On GeoDa center website, do this:
+
+   - Login and expand the wrench icon to reveal the Admin menu
+   - Click "Administer", "Content Management", "Content"
+   - Next, click "List", filter by type, and select "Featured Project".
+   - Click "Filter"
+
+   Now you will see the list of Featured Projects. Find "PySAL".
+
+   - Choose to 'edit' PySAL and modify the short text there. This changes the text users see on the homepage slider.
+   - Clicking on the name "PySAL" allows you to edit the content of the PySAL project page, which is also the "About PySAL" page linked to from the homepage slider.
+
+Put master back to dev
+----------------------
+
+- Change MAJOR, MINOR version in setup script.
+- Change pysal/version.py to dev number
+- Change the docs version from X.x to X.xdev by editing doc/source/conf.py in two places.
+- Update the release schedule in doc/source/developers/guidelines.rst
+
+
+Update the `github.io news page <https://github.com/pysal/pysal.github.io/blob/master/_includes/news.md>`_
+to  announce the release.
+
+.. _upload: http://docs.python.org/2.7/distutils/uploading.html
+.. _registered: http://docs.python.org/2.7/distutils/packageindex.html
+.. _source: http://docs.python.org/distutils/sourcedist.html
+.. _hosted: http://pysal.readthedocs.org
diff --git a/doc/source/developers/testing.rst b/doc/source/developers/testing.rst
new file mode 100644
index 0000000..94e5b1d
--- /dev/null
+++ b/doc/source/developers/testing.rst
@@ -0,0 +1,127 @@
+.. _testing:
+..  role:: strike
+
+************************
+PySAL Testing Procedures
+************************
+.. contents::
+
+As of PySAL release 1.6, continuous integration testing was ported to the
+Travis-CI hosted testing framework (http://travis-ci.org).  There is integration within GitHub that
+provides Travis-CI test results included in a pending Pull Request page, so
+developers can know before merging a Pull Request that the changes will or will
+not induce breakage. 
+
+Take a moment to read about the Pull Request development model on our wiki at
+https://github.com/pysal/pysal/wiki/GitHub-Standard-Operating-Procedures
+
+PySAL relies on two different modes of testing [1] integration (regression)
+testing and [2] doctests. All developers responsible for given packages shall
+utilize both modes.
+
+Integration Testing
+===================
+
+Each package shall have a directory `tests` in which unit test scripts for
+each module in the package directory are required. 
+For example, in the directory `pysal/esda` the module `moran.py` requires a
+unittest script named `test_moran.py`. This path for this script needs to be
+`pysal/esda/tests/test_moran.py`.
+
+To ensure that any changes made to one package/module do not introduce breakage in the wider project,
+developers should run the package wide test suite using nose before making any
+commits. As of release version 1.5, all tests must pass using a 64-bit
+version of Python.
+To run the new test suite, install nose, nose-progressive, and nose-exclude into your
+working python installation. If you're using EPD, nose is already available::
+
+    pip install -U nose
+    pip install nose-progressive
+    pip install nose-exclude
+
+Then::
+
+  cd trunk/
+  nosetests pysal/
+  
+You can also run the test suite from within a Python session. At the
+conclusion of the test, Python will, however, exit::
+ 
+  import pysal
+  import nose
+  nose.runmodule('pysal')
+
+
+The file setup.cfg (added in revision 1050) in trunk holds nose configuration variables. When nosetests
+is run from trunk, nose reads those configuration parameters into its operation,
+so developers do not need to specify the optional flags on the command line as
+shown below. 
+
+To specify running just a subset of the tests, you can also run::
+
+  nosetests pysal/esda/
+  
+or any other directory, for instance, to run just those tests. 
+To run the entire unittest test suite plus all of the doctests, run::
+
+  nosetests --with-doctest pysal/
+
+To exclude a specific directory or directories, install nose-exclude from PyPi
+(pip install nose-exclude). Then run it like this::
+
+  nosetests -v --exclude-dir=pysal/contrib --with-doctest  pysal/
+
+
+Note that you'll probably run into an IOError complaining about too many open
+files. To fix that, pass this via the command line::
+
+  ulimit -S -n 1024
+
+That changes the machine's open file limit for just the current terminal
+session. 
+
+The trunk should most always be in a state where all tests are passed.
+
+
+Generating Unit Tests
+=====================
+
+A useful development companion is the package `pythoscope <http://pythoscope.org>`_. It scans
+package folders and produces test script stubs for your modules that fail until
+you write the tests -- a pesky but useful trait. Using pythoscope in the most
+basic way requires just two simple command line calls::
+ 
+ pythoscope --init
+
+ pythoscope <my_module>.py
+
+
+:strike:`One caveat: pythoscope does not name your test classes in a PySAL-friendly way
+so you'll have to rename each test class after the test scripts are generated.`
+Nose finds tests!
+
+Docstrings and Doctests
+=======================
+
+All public classes and functions should include examples in their docstrings. Those examples serve two purposes:
+
+#. Documentation for users
+#. Tests to ensure code behavior is aligned with the documentation
+
+Doctests will be executed when building PySAL documentation with Sphinx.
+
+
+Developers *should* run tests manually before committing any changes that
+may potentially effect usability. Developers can run doctests (docstring
+tests) manually from the command line using nosetests ::
+
+  nosetests --with-doctest pysal/
+
+Tutorial Doctests
+=================
+
+All of the tutorials are tested along with the overall test suite. Developers
+can test their changes against the tutorial docstrings by cd'ing into
+/doc/ and running::
+
+    make doctest
diff --git a/doc/source/funding.rst b/doc/source/funding.rst
new file mode 100644
index 0000000..7bb8856
--- /dev/null
+++ b/doc/source/funding.rst
@@ -0,0 +1,16 @@
+.. funding:
+
+Funding
+=======
+
+National Science Foundation `CyberGIS Software Integration for Sustained Geospatial Innovation <http://geodacenter.asu.edu/about/news/NSF_Cyber_Award>`_
+
+National Institute of Justice `Flexible Geospatial Visual Analytics and Simulation Technologies to Enhance Criminal Justice Decision Support Systems <http://geoplan.asu.edu/node/3855>`_
+
+National Institutes of Health `Geospatial Factors and Impacts: Measurement and Use (R01CA126858-02) <http://geodacenter.asu.edu/projects/rti/content/geospatial-fact>`_
+
+National Science Foundation `An Exploratory Space-Time Data Analysis Toolkit for Spatial Social Science Research (0433132) <http://www.nsf.gov/awardsearch/showAward.do?AwardNumber=0433132>`_ 
+
+National Science Foundation `Hedonic Models of Location Decisions with Applications to Geospatial Microdata (0852261) <http://www.nsf.gov/awardsearch/showAward.do?AwardNumber=0852261>`_
+
+
diff --git a/doc/source/index.rst b/doc/source/index.rst
new file mode 100644
index 0000000..fba27f9
--- /dev/null
+++ b/doc/source/index.rst
@@ -0,0 +1,87 @@
+.. pysal documentation master file, created by
+   sphinx-quickstart on Wed Aug 26 19:58:20 2009.
+
+.. raw:: html
+
+    <style type="text/css">
+    .section h1 { display: none; }
+    </style>
+
+PySAL
+=====
+
+.. raw:: html
+
+ <div class="section" id="showcase">
+      <a href="http://nbviewer.ipython.org/github/pysal/pysal/blob/master/pysal/contrib/viz/taz_example.ipynb?create=1"><img     src="_static/images/socal_2.jpg" border="0" alt="So. Cal. taz network"/></a>
+      <a href="http://nbviewer.ipython.org/github/pysal/pysal/blob/master/pysal/contrib/viz/taz_example.ipynb?create=1"><img     src="_static/images/socal_3.jpg" border="0" alt="County components"/></a>
+  </div>
+
+
+.. sidebar:: Releases
+
+    - `Stable 1.9.1 - January 2015 <users/installation.html>`_
+    - `Development 1.10.0dev  <http://github.com/pysal/pysal/>`_
+
+PySAL is an open source library of spatial analysis functions written in
+Python intended to support the development of high level applications.
+PySAL is open source under the `BSD License`_.
+
+.. _BSD License: license.html
+
+
+.. toctree::
+    :maxdepth: 1
+
+    News <http://pysal.github.io/news.html>
+    Events <http://pysal.github.io/upcoming_events.html>
+    Funding <http://pysal.github.io/funding.html>
+    Gallery <http://pysal.github.io/grid.html>
+    users/index
+    developers/index
+    library/index
+
+.. raw:: html
+
+  <style type="text/css">
+  table.linktable {
+      margin: 10px;
+      margin-bottom: 20px;
+      margin-left:auto;
+      margin-right: auto;
+  }
+  table.icontable {
+      margin: 10px;
+  }
+  table.linktable td {
+      padding-left: 15px;
+      padding-right: 15px;
+      padding-bottom: 5px;
+      text-align: center;
+  }
+  </style>
+  
+  <table class="linktable">
+  <tr>
+    <td>
+      <a href="users/installation.html">
+         <img alt="Download" src="_static/images/download2.png" title="Download" height="80" style="display: block; margin-left: auto; margin-right: auto;"></a> </td>
+    <td>
+      <a href="users/tutorials/index.html">
+        <img alt="Getting Started" src="_static/images/tutorial.png" title="Getting Started"  height="80" style="display: block; margin-left: auto; margin-right: auto;"></a> </td>
+    <td>
+      <a href="library/index.html">
+        <img alt="Documentation" src="_static/images/documentation.png" title="Documentation"  height="80" style="display: block; margin-left: auto; margin-right: auto;"></a> </td>
+    <td>
+      <a href="http://github.com/pysal/pysal/issues?state=open">
+        <img alt="Bug Report" src="_static/images/bugs.png" title="Bug Report"  height="80" style="display: block; margin-left: auto; margin-right: auto;"></a> </td>
+  </tr>
+
+  <tr>
+    <td><strong><small><a href="users/installation.html">Install</a></small></strong></td>
+    <td><strong><small><a href="users/tutorials/index.html">Getting Started</a></small></strong></td>
+    <td><strong><small><a href="library/index.html">Documentation</a></small></strong></td>
+    <td><strong><small><a href="http://github.com/pysal/pysal/issues?state=open">Report Bugs</a></small></strong></td>
+  </tr>
+  </table>
+
diff --git a/doc/source/index.txt b/doc/source/index.txt
new file mode 100644
index 0000000..27bc1d5
--- /dev/null
+++ b/doc/source/index.txt
@@ -0,0 +1,110 @@
+.. pysal documentation master file, created by
+   sphinx-quickstart on Wed Aug 26 19:58:20 2009.
+
+*******
+Welcome
+*******
+
+PySAL is a cross-platform library of spatial analysis functions written in
+Python. It is intended to support the development of high level applications
+for spatial analysis.
+
+PySAL is Open Source and licensed under the `BSD License`_.
+
+.. _BSD License: http://opensource.org/licenses/bsd-license.php 
+
+Getting Started
+===============
+
+If you are new to PySAL you might want to read :ref:`users-tutorials`.
+
+Downloads
+=========
+
+`Source downloads  <http://pypi.python.org/pypi/PySAL>`_  are hosted on the
+Python Package Index. 
+
+Graphical installers for recent releases are hosted on the `GeoDa Center
+website <http://geodacenter.asu.edu/software>`_. 
+
+Legacy builds (<=1.5) are still available on the old source code repository at
+Google Code, `here <http://code.google.com/p/pysal>`_.  You can also find old
+versions of this documentation there as well. 
+
+Getting Involved
+================
+
+Prospective developers can learn more in the :doc:`developer guide
+</developers/index>`.
+
+You can track PySAL development, submit bug reports, and make feature requests 
+`here <https://github.com/pysal/pysal/issues>`_. 
+
+
+Documentation
+=============
+
+    :Release: |version|
+    :Date: |today|
+
+.. toctree::
+    :maxdepth: 1
+
+    users/index
+    developers/index
+    library/index
+
+Bleeding edge documentation is available for developers by selecting `latest` from the drop-down in the lower right-hand corner of this page.
+
+News
+====
+
+(2013-11-13) `PySAL Workshop at North American Regional Science Association Meetings, Atlanta, GA <http://www.narsc.org/newsite/?page_id=2547>`_ 
+
+
+(2013-07-31) `PySAL 1.6.0 released <https://github.com/pysal/pysal/wiki/PySAL-1.6-Released-(2013-07-31)>`_ (`Install <http://pysal.geodacenter.org/1.6/users/installation.html>`_)
+
+
+(2013-01-30) `PySAL 1.5.0 released <http://code.google.com/p/pysal/wiki/Announce1_5>`_ (`Install <http://pysal.geodacenter.org/1.5/users/installation.html>`_)
+
+(2012-10-24) `PySAL short course at OSGRS 2012 <https://twitter.com/OGRS2012/status/261106998861504512>`_
+
+(2012-09-18) `PySAL short course at GIScience 2012 <http://www.giscience.org/workshops.html>`_
+
+(2012-07-31) `PySAL 1.4.0 released <http://code.google.com/p/pysal/wiki/Announce1_4>`_ (`Install <http://pysal.geodacenter.org/1.4/users/installation.html>`_)
+
+(2012-07-30) `Short course on PySAL for Spatial Regression <https://www.geodapress.com/workshops/spatial-regression#description>`_
+
+(2012-07-30) `PySAL presentation at Joint Statistical Meetings
+<https://www.amstat.org/meetings/jsm/2012/onlineprogram/AbstractDetails.cfm?abstractid=303498>`_
+
+(2012-07-18) `PySAL at SciPy 2012 <http://conference.scipy.org/scipy2012/schedule/conf_schedule_1.php>`_
+
+(2012-01-31) `PySAL 1.3.0 released <http://code.google.com/p/pysal/wiki/Announce1_3>`_ (`Install <http://pysal.geodacenter.org/1.3/users/installation.html>`_)
+
+(2011-07-31) `PySAL 1.2.0 released <http://code.google.com/p/pysal/wiki/Announce1_2>`_ (`Install <http://pysal.geodacenter.org/1.2/users/installation.html>`_)
+
+(2011-01-31) `PySAL 1.1.0 released <http://code.google.com/p/pysal/wiki/Announce1_1>`_ (`Install <http://pysal.geodacenter.org/1.1/users/installation.html>`_)
+
+(2010-07-31) `PySAL 1.0.0 released <http://code.google.com/p/pysal/wiki/Announce1_0>`_ (`Install <http://pysal.geodacenter.org/1.0/users/installation.html>`_)
+
+
+(2010-01-6) New Website for PySAL!
+
+(2009-11-03) `National Institute of Justice funding <http://geoplan.asu.edu/node/3855>`_
+
+
+Funding
+=======
+
+National Science Foundation `CyberGIS Software Integration for Sustained Geospatial Innovation <http://geodacenter.asu.edu/about/news/NSF_Cyber_Award>`_
+
+National Institute of Justice `Flexible Geospatial Visual Analytics and Simulation Technologies to Enhance Criminal Justice Decision Support Systems <http://geoplan.asu.edu/node/3855>`_
+
+National Institutes of Health `Geospatial Factors and Impacts: Measurement and Use (R01CA126858-02) <http://geodacenter.asu.edu/rti>`_
+
+National Science Foundation `An Exploratory Space-Time Data Analysis Toolit for Spatial Social Science Research (0433132) <http://www.nsf.gov/awardsearch/showAward.do?AwardNumber=0433132>`_ 
+
+National Science Foundation `Hedonic Models of Location Decisions with Applications to Geospatial Microdata (0852261) <http://www.nsf.gov/awardsearch/showAward.do?AwardNumber=0852261>`_
+
+
diff --git a/doc/source/library/cg/index.rst b/doc/source/library/cg/index.rst
new file mode 100644
index 0000000..e3229d3
--- /dev/null
+++ b/doc/source/library/cg/index.rst
@@ -0,0 +1,16 @@
+:mod:`pysal.cg` -- Computational Geometry
+==========================================
+
+.. toctree::
+    :maxdepth: 1
+
+    locators
+    shapes
+    standalone
+    rtree
+    kdtree
+    sphere
+
+
+
+
diff --git a/doc/source/library/cg/kdtree.rst b/doc/source/library/cg/kdtree.rst
new file mode 100644
index 0000000..8f4fd91
--- /dev/null
+++ b/doc/source/library/cg/kdtree.rst
@@ -0,0 +1,10 @@
+:mod:`cg.kdtree` --- KDTree 
+===========================
+
+The :mod:`cg.kdtree` module provides kdtree data structures for PySAL.
+
+.. versionadded:: 1.3
+
+.. automodule:: pysal.cg.kdtree
+    :members:
+
diff --git a/doc/source/library/cg/locators.rst b/doc/source/library/cg/locators.rst
new file mode 100644
index 0000000..e846ff1
--- /dev/null
+++ b/doc/source/library/cg/locators.rst
@@ -0,0 +1,14 @@
+:mod:`cg.locators` --- Locators
+===============================
+
+The :mod:`cg.locators` module provides ....
+
+.. versionadded:: 1.0
+
+
+.. automodule:: pysal.cg.locators
+    :synopsis: Computational geometry code for PySAL: Python Spatial Analysis Library
+    :members:
+    :inherited-members:
+    :undoc-members:
+
diff --git a/doc/source/library/cg/rtree.rst b/doc/source/library/cg/rtree.rst
new file mode 100644
index 0000000..a1eb2a3
--- /dev/null
+++ b/doc/source/library/cg/rtree.rst
@@ -0,0 +1,10 @@
+:mod:`cg.rtree` --- rtree 
+===========================
+
+The :mod:`cg.rtree` module provides a pure python rtree.
+
+.. versionadded:: 1.2
+
+.. automodule:: pysal.cg.rtree
+    :members:
+
diff --git a/doc/source/library/cg/shapes.rst b/doc/source/library/cg/shapes.rst
new file mode 100644
index 0000000..776efc1
--- /dev/null
+++ b/doc/source/library/cg/shapes.rst
@@ -0,0 +1,10 @@
+:mod:`cg.shapes` --- Shapes 
+===========================
+
+The :mod:`cg.shapes` module provides basic data structures.
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.cg.shapes
+    :members:
+
diff --git a/doc/source/library/cg/sphere.rst b/doc/source/library/cg/sphere.rst
new file mode 100644
index 0000000..43154f0
--- /dev/null
+++ b/doc/source/library/cg/sphere.rst
@@ -0,0 +1,10 @@
+:mod:`cg.sphere` --- Sphere 
+===========================
+
+The :mod:`cg.sphere` module provides tools for working with spherical distances.
+
+.. versionadded:: 1.3
+
+.. automodule:: pysal.cg.sphere
+    :members:
+
diff --git a/doc/source/library/cg/standalone.rst b/doc/source/library/cg/standalone.rst
new file mode 100644
index 0000000..9e5ba1a
--- /dev/null
+++ b/doc/source/library/cg/standalone.rst
@@ -0,0 +1,11 @@
+:mod:`cg.standalone` --- Standalone
+===================================
+
+The :mod:`cg.standalone` module provides ...
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.cg.standalone
+    :synopsis: Computational geometry code for PySAL: Python Spatial Analysis Library
+    :members:
+
diff --git a/doc/source/library/contrib/index.rst b/doc/source/library/contrib/index.rst
new file mode 100644
index 0000000..7fe43d8
--- /dev/null
+++ b/doc/source/library/contrib/index.rst
@@ -0,0 +1,73 @@
+:mod:`pysal.contrib` -- Contributed Modules 
+===========================================
+
+**Intro**
+
+The PySAL Contrib library contains user contributions that enhance PySAL, but
+are not fit for inclusion in the general library. The primary reason a
+contribution would not be allowed in the general library is external
+dependencies. PySAL has a strict no dependency policy (aside from Numpy/Scipy).
+This helps ensure the library is easy to install and maintain.
+
+However, this policy often limits our ability to make use of existing code or
+exploit performance enhancements from C-extensions. This contrib module is
+designed to alleviate this problem. There are no restrictions on external
+dependencies in contrib. 
+
+**Ground Rules**
+
+ 1. Contribs must not be used within the general library.
+ 2. *Explicit imports*: each contrib must be imported manually.
+ 3. *Documentation*: each contrib must be documented, dependencies especially.
+
+**Contribs**
+
+Currently the following contribs are available:
+
+ 1. World To View Transform -- A class for modeling viewing windows, used by Weights Viewer.
+
+    - .. versionadded:: 1.3
+    - Path: pysal.contrib.weights_viewer.transforms
+    - Requires: None
+
+ 2. Weights Viewer -- A Graphical tool for examining spatial weights.
+
+    - .. versionadded:: 1.3
+    - Path: pysal.contrib.weights_viewer.weights_viewer
+    - Requires: `wxPython`_
+
+ 3. Shapely Extension -- Exposes shapely methods as standalone functions
+
+    - .. versionadded:: 1.3
+    - Path: pysal.contrib.shapely_ext
+    - Requires: `shapely`_
+
+ 4. Shared Perimeter Weights -- calculate shared perimeters weights.
+
+    - .. versionadded:: 1.3
+    - Path: pysal.contrib.shared_perimeter_weights
+    - Requires: `shapely`_
+
+ 5. Visualization -- Lightweight visualization layer (`Project page`_).
+
+    - .. versionadded:: 1.7
+    - Path: pysal.contrib.viz
+    - Requires: `matplotlib`_
+
+ 6. Clusterpy -- Spatially constrained clustering.
+
+    - .. versionadded:: 1.8
+    - Path: pysal.contrib.clusterpy
+    - Requires: `clusterpy`_
+
+
+
+
+
+
+
+.. _clusterpy: https://pypi.python.org/pypi/clusterPy/0.9.9
+.. _matplotlib: http://matplotlib.org/
+.. _project page: https://github.com/pysal/pysal/wiki/PySAL-Visualization-Project
+.. _shapely: https://pypi.python.org/pypi/Shapely
+.. _wxPython: http://www.wxpython.org/
diff --git a/doc/source/library/core/FileIO.rst b/doc/source/library/core/FileIO.rst
new file mode 100644
index 0000000..78d0d42
--- /dev/null
+++ b/doc/source/library/core/FileIO.rst
@@ -0,0 +1,15 @@
+:mod:`FileIO` -- File Input/Output System
+===============================================
+
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.core.FileIO
+    :synopsis: File Input/Output System for PySAL
+    :members:
+    :undoc-members:
+    :inherited-members:
+
+
+
+
diff --git a/doc/source/library/core/IOHandlers/arcgis_dbf.rst b/doc/source/library/core/IOHandlers/arcgis_dbf.rst
new file mode 100644
index 0000000..880b03a
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/arcgis_dbf.rst
@@ -0,0 +1,13 @@
+:mod:`IOHandlers.arcgis_dbf` -- ArcGIS DBF plugin 
+========================================================================
+
+.. versionadded:: 1.2
+
+.. automodule:: pysal.core.IOHandlers.arcgis_dbf
+    :synopsis: ArcGIS DBF weights file plugin
+    :members:
+    :undoc-members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/library/core/IOHandlers/arcgis_swm.rst b/doc/source/library/core/IOHandlers/arcgis_swm.rst
new file mode 100644
index 0000000..9fe2b0c
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/arcgis_swm.rst
@@ -0,0 +1,13 @@
+:mod:`IOHandlers.arcgis_swm` --- ArcGIS SWM plugin
+==================================================
+
+.. versionadded:: 1.2
+
+.. automodule:: pysal.core.IOHandlers.arcgis_swm
+    :synopsis: ArcGIS SWM weights file plugin 
+    :members:
+    :undoc-members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/library/core/IOHandlers/arcgis_txt.rst b/doc/source/library/core/IOHandlers/arcgis_txt.rst
new file mode 100644
index 0000000..3c0c28f
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/arcgis_txt.rst
@@ -0,0 +1,10 @@
+:mod:`IOHandlers.arcgis_txt` -- ArcGIS ASCII plugin 
+=================================================================
+
+.. versionadded:: 1.2
+
+.. automodule:: pysal.core.IOHandlers.arcgis_txt
+    :synopsis: ArcGIS ASCII text weights file plugin 
+    :members:
+    :undoc-members:
+    :inherited-members:
diff --git a/doc/source/library/core/IOHandlers/csvWrapper.rst b/doc/source/library/core/IOHandlers/csvWrapper.rst
new file mode 100644
index 0000000..bd5ecbb
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/csvWrapper.rst
@@ -0,0 +1,13 @@
+:mod:`IOHandlers.csvWrapper` --- CSV plugin 
+===========================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.core.IOHandlers.csvWrapper
+    :synopsis: CSV Plugin 
+    :members:
+    :undoc-members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/library/core/IOHandlers/dat.rst b/doc/source/library/core/IOHandlers/dat.rst
new file mode 100644
index 0000000..b0a54e2
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/dat.rst
@@ -0,0 +1,10 @@
+:mod:`IOHandlers.dat` --- DAT plugin 
+====================================
+
+.. versionadded:: 1.2
+
+.. automodule:: pysal.core.IOHandlers.dat
+    :synopsis: DAT Plugin for PySAL FileIO System
+    :members:
+    :undoc-members:
+    :inherited-members:
diff --git a/doc/source/library/core/IOHandlers/gal.rst b/doc/source/library/core/IOHandlers/gal.rst
new file mode 100644
index 0000000..14cb409
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/gal.rst
@@ -0,0 +1,13 @@
+:mod:`IOHandlers.gal` --- GAL plugin 
+====================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.core.IOHandlers.gal
+    :synopsis: GAL Plugin for PySAL FileIO System
+    :members:
+    :undoc-members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/library/core/IOHandlers/geobugs_txt.rst b/doc/source/library/core/IOHandlers/geobugs_txt.rst
new file mode 100644
index 0000000..11c0b24
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/geobugs_txt.rst
@@ -0,0 +1,13 @@
+:mod:`IOHandlers.geobugs_txt` --- GeoBUGS plugin
+================================================
+
+.. versionadded:: 1.2
+
+.. automodule:: pysal.core.IOHandlers.geobugs_txt
+    :synopsis: GeoBUGS Plugin for PySAL FileIO System
+    :members:
+    :undoc-members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/library/core/IOHandlers/geoda_txt.rst b/doc/source/library/core/IOHandlers/geoda_txt.rst
new file mode 100644
index 0000000..68cee8c
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/geoda_txt.rst
@@ -0,0 +1,10 @@
+:mod:`IOHandlers.geoda_txt` -- Geoda text plugin
+================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.core.IOHandlers.geoda_txt
+    :synopsis: Geoda_txt Plugin for PySAL FileIO System
+    :members:
+    :undoc-members:
+    :inherited-members:
diff --git a/doc/source/library/core/IOHandlers/gwt.rst b/doc/source/library/core/IOHandlers/gwt.rst
new file mode 100644
index 0000000..96b6cff
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/gwt.rst
@@ -0,0 +1,10 @@
+:mod:`IOHandlers.gwt` --- GWT plugin
+====================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.core.IOHandlers.gwt
+    :synopsis: GWT Plugin for PySAL FileIO System
+    :members:
+    :undoc-members:
+    :inherited-members:
diff --git a/doc/source/library/core/IOHandlers/index.rst b/doc/source/library/core/IOHandlers/index.rst
new file mode 100644
index 0000000..7f806d8
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/index.rst
@@ -0,0 +1,8 @@
+:mod:`pysal.core.IOHandlers` --- Input Output Handlers 
+======================================================
+
+.. toctree::
+    :glob:
+
+    *
+
diff --git a/doc/source/library/core/IOHandlers/mat.rst b/doc/source/library/core/IOHandlers/mat.rst
new file mode 100644
index 0000000..ffefa56
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/mat.rst
@@ -0,0 +1,10 @@
+:mod:`IOHandlers.mat` --- MATLAB Level 4-5 plugin
+======================================================
+
+.. versionadded:: 1.2
+
+.. automodule:: pysal.core.IOHandlers.mat
+    :synopsis: MATLAB Level 4-5 file plugin
+    :members:
+    :undoc-members:
+    :inherited-members:
diff --git a/doc/source/library/core/IOHandlers/mtx.rst b/doc/source/library/core/IOHandlers/mtx.rst
new file mode 100644
index 0000000..a5b69bc
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/mtx.rst
@@ -0,0 +1,10 @@
+:mod:`IOHandlers.mtx` --- Matrix Market MTX plugin
+=======================================================
+
+.. versionadded:: 1.2
+
+.. automodule:: pysal.core.IOHandlers.mtx
+    :synopsis: Matrix Market MTX file plugin
+    :members:
+    :undoc-members:
+    :inherited-members:
diff --git a/doc/source/library/core/IOHandlers/pyDbfIO.rst b/doc/source/library/core/IOHandlers/pyDbfIO.rst
new file mode 100644
index 0000000..491cc58
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/pyDbfIO.rst
@@ -0,0 +1,11 @@
+:mod:`IOHandlers.pyDbfIO` -- PySAL DBF plugin
+=============================================
+
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.core.IOHandlers.pyDbfIO
+    :synopsis: DBF Plugin 
+    :members:
+    :undoc-members:
+    :inherited-members:
diff --git a/doc/source/library/core/IOHandlers/pyShpIO.rst b/doc/source/library/core/IOHandlers/pyShpIO.rst
new file mode 100644
index 0000000..800ea9e
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/pyShpIO.rst
@@ -0,0 +1,12 @@
+:mod:`IOHandlers.pyShpIO` -- Shapefile plugin
+==============================================
+
+The :mod:`IOHandlers.pyShpIO` Shapefile Plugin for PySAL's FileIO System
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.core.IOHandlers.pyShpIO
+    :synopsis: PySAL Shapefile Plugin
+    :members:
+    :undoc-members:
+    :inherited-members:
diff --git a/doc/source/library/core/IOHandlers/stata_txt.rst b/doc/source/library/core/IOHandlers/stata_txt.rst
new file mode 100644
index 0000000..eb14b0c
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/stata_txt.rst
@@ -0,0 +1,12 @@
+:mod:`IOHandlers.stata_txt` --- STATA plugin 
+============================================================
+.. versionadded:: 1.2
+
+.. automodule:: pysal.core.IOHandlers.stata_txt
+    :synopsis: PySAL STATA spatial weights plugin 
+    :members:
+    :undoc-members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/library/core/IOHandlers/wk1.rst b/doc/source/library/core/IOHandlers/wk1.rst
new file mode 100644
index 0000000..97b84f9
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/wk1.rst
@@ -0,0 +1,13 @@
+:mod:`IOHandlers.wk1` --- Lotus WK1 plugin 
+===============================================
+
+.. versionadded:: 1.2
+
+.. automodule:: pysal.core.IOHandlers.wk1
+    :synopsis: PySAL plugin for WK1 files 
+    :members:
+    :undoc-members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/library/core/IOHandlers/wkt.rst b/doc/source/library/core/IOHandlers/wkt.rst
new file mode 100644
index 0000000..ab6126d
--- /dev/null
+++ b/doc/source/library/core/IOHandlers/wkt.rst
@@ -0,0 +1,15 @@
+:mod:`IOHandlers.wkt` -- Well Known Text (geometry) plugin
+===========================================================
+
+.. versionadded:: 1.0
+
+PySAL plugin for Well Known Text (geometry) 
+
+.. automodule:: pysal.core.IOHandlers.wkt
+    :synopsis: PySAL plugin for Well Known Text (geometry) 
+    :members:
+    :undoc-members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/library/core/Tables.rst b/doc/source/library/core/Tables.rst
new file mode 100644
index 0000000..9e1a894
--- /dev/null
+++ b/doc/source/library/core/Tables.rst
@@ -0,0 +1,12 @@
+:mod:`Tables` -- DataTable Extension
+==========================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.core.Tables
+    :synopsis: DataTable Extension for PySAL FileIO System
+    :members:
+    :undoc-members:
+
+
+
diff --git a/doc/source/library/core/index.rst b/doc/source/library/core/index.rst
new file mode 100644
index 0000000..9ab7868
--- /dev/null
+++ b/doc/source/library/core/index.rst
@@ -0,0 +1,8 @@
+:mod:`pysal.core` --- Core Data Structures and IO
+=================================================
+
+.. toctree::
+
+    Tables
+    FileIO
+    IOHandlers/index
diff --git a/doc/source/library/esda/gamma.rst b/doc/source/library/esda/gamma.rst
new file mode 100644
index 0000000..c2c377b
--- /dev/null
+++ b/doc/source/library/esda/gamma.rst
@@ -0,0 +1,10 @@
+:mod:`esda.gamma` --- Gamma statistics for spatial autocorrelation
+======================================================================
+
+
+.. versionadded:: 1.4
+
+.. automodule:: pysal.esda.gamma
+    :synopsis: Global measures of autocorrelation based on Gamma statistics
+    :members:
+
diff --git a/doc/source/library/esda/geary.rst b/doc/source/library/esda/geary.rst
new file mode 100644
index 0000000..1dd8ab4
--- /dev/null
+++ b/doc/source/library/esda/geary.rst
@@ -0,0 +1,10 @@
+:mod:`esda.geary` --- Geary's C statistics for spatial autocorrelation
+======================================================================
+
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.esda.geary
+    :synopsis: Global measures of autocorrelation based on Geary's C 
+    :members:
+
diff --git a/doc/source/library/esda/getis-ord.rst b/doc/source/library/esda/getis-ord.rst
new file mode 100644
index 0000000..36b3923
--- /dev/null
+++ b/doc/source/library/esda/getis-ord.rst
@@ -0,0 +1,11 @@
+
+:mod:`esda.getisord` --- Getis-Ord statistics for spatial association
+=====================================================================
+
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.esda.getisord
+    :synopsis: Getis-Ord global and local measures of spatial association 
+    :members:
+
diff --git a/doc/source/library/esda/index.rst b/doc/source/library/esda/index.rst
new file mode 100644
index 0000000..36697eb
--- /dev/null
+++ b/doc/source/library/esda/index.rst
@@ -0,0 +1,13 @@
+:mod:`pysal.esda` --- Exploratory Spatial Data Analysis
+=========================================================
+
+.. toctree::
+
+    gamma
+    geary
+    getis-ord
+    join_counts
+    mapclassify
+    moran
+    smoothing
+
diff --git a/doc/source/library/esda/join_counts.rst b/doc/source/library/esda/join_counts.rst
new file mode 100644
index 0000000..6b74d26
--- /dev/null
+++ b/doc/source/library/esda/join_counts.rst
@@ -0,0 +1,9 @@
+:mod:`esda.join_counts` --- Spatial autocorrelation statistics for binary attributes
+====================================================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.esda.join_counts
+    :synopsis: Spatial autocorrelation statistics for binary attributes
+    :members:
+
diff --git a/doc/source/library/esda/mapclassify.rst b/doc/source/library/esda/mapclassify.rst
new file mode 100644
index 0000000..1b42dc1
--- /dev/null
+++ b/doc/source/library/esda/mapclassify.rst
@@ -0,0 +1,10 @@
+:mod:`esda.mapclassify` --- Choropleth map classification
+=========================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.esda.mapclassify
+    :synopsis: Choropleth map classification
+    :members:
+    :inherited-members:
+    :undoc-members:
diff --git a/doc/source/library/esda/moran.rst b/doc/source/library/esda/moran.rst
new file mode 100644
index 0000000..bd6d253
--- /dev/null
+++ b/doc/source/library/esda/moran.rst
@@ -0,0 +1,11 @@
+:mod:`esda.moran` --- Moran's I measures of spatial autocorrelation
+====================================================================
+
+.. versionadded:: 1.0
+
+Moran's I global and local measures of spatial autocorrelation 
+
+.. automodule:: pysal.esda.moran
+    :synopsis: Moran's I global and local measures of spatial autocorrelation 
+    :members:
+
diff --git a/doc/source/library/esda/smoothing.rst b/doc/source/library/esda/smoothing.rst
new file mode 100644
index 0000000..0608362
--- /dev/null
+++ b/doc/source/library/esda/smoothing.rst
@@ -0,0 +1,9 @@
+:mod:`esda.smoothing` --- Smoothing of spatial rates
+====================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.esda.smoothing
+    :synopsis: Smoothing of spatial rates
+    :members:
+
diff --git a/doc/source/library/index.rst b/doc/source/library/index.rst
new file mode 100644
index 0000000..7fdccf4
--- /dev/null
+++ b/doc/source/library/index.rst
@@ -0,0 +1,34 @@
+.. _library-index
+
+#################
+Library Reference
+#################
+
+:Release: |version|
+:Date: |today|
+
+Python Spatial Analysis Library
+===============================
+
+The Python Spatial Analysis Library consists of several sub-packages each addressing a different area of spatial analysis.  In addition to these sub-packages PySAL includes some general utilities used across all modules.
+
+
+Sub-packages
+-------------
+
+.. toctree::
+    :maxdepth: 1
+
+    cg/index
+    core/index
+    esda/index
+    inequality/index
+    region/index
+    spatial_dynamics/index
+    spreg/index
+    weights/index
+    network/index
+    contrib/index
+
+
+
diff --git a/doc/source/library/inequality/gini.rst b/doc/source/library/inequality/gini.rst
new file mode 100644
index 0000000..6c4f450
--- /dev/null
+++ b/doc/source/library/inequality/gini.rst
@@ -0,0 +1,12 @@
+:mod:`inequality.gini` -- Gini inequality and decomposition measures
+======================================================================
+
+The :mod:`inequality.gini` module provides Gini inequality based measures
+
+
+.. versionadded:: 1.6
+
+.. automodule:: pysal.inequality.gini
+    :synopsis: Gini based inequality measures
+    :members:
+
diff --git a/doc/source/library/inequality/index.rst b/doc/source/library/inequality/index.rst
new file mode 100644
index 0000000..77e7a2c
--- /dev/null
+++ b/doc/source/library/inequality/index.rst
@@ -0,0 +1,8 @@
+:mod:`pysal.inequality` --- Spatial Inequality Analysis
+=========================================================
+
+.. toctree::
+    :maxdepth: 1
+    :glob:
+
+    *
diff --git a/doc/source/library/inequality/theil.rst b/doc/source/library/inequality/theil.rst
new file mode 100644
index 0000000..466b42b
--- /dev/null
+++ b/doc/source/library/inequality/theil.rst
@@ -0,0 +1,12 @@
+:mod:`inequality.theil` -- Theil inequality and decomposition measures
+======================================================================
+
+The :mod:`inequality.theil` module provides Theil inequality based measures
+
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.inequality.theil
+    :synopsis: Theil based inequality measures
+    :members:
+
diff --git a/doc/source/library/network/index.rst b/doc/source/library/network/index.rst
new file mode 100644
index 0000000..2675074
--- /dev/null
+++ b/doc/source/library/network/index.rst
@@ -0,0 +1,7 @@
+:mod:`pysal.network` --- Network Constrained Analysis
+=====================================================
+
+.. toctree::
+
+   network
+    
diff --git a/doc/source/library/network/network.rst b/doc/source/library/network/network.rst
new file mode 100644
index 0000000..b280d44
--- /dev/null
+++ b/doc/source/library/network/network.rst
@@ -0,0 +1,13 @@
+:mod:`pysal.network` --- Network Constrained Analysis
+======================================================
+
+The :mod:`network` Network Analysis for PySAL
+
+.. versionadded:: 1.9
+
+.. automodule:: pysal.network.network
+    :synopsis: Network Analysis for PySAL
+    :members:
+
+
+
diff --git a/doc/source/library/region/index.rst b/doc/source/library/region/index.rst
new file mode 100644
index 0000000..3495897
--- /dev/null
+++ b/doc/source/library/region/index.rst
@@ -0,0 +1,9 @@
+:mod:`pysal.region` --- Spatially Constrained Clustering
+=============================================================
+
+.. toctree::
+
+    maxp
+    randomregion
+
+
diff --git a/doc/source/library/region/maxp.rst b/doc/source/library/region/maxp.rst
new file mode 100644
index 0000000..c7d952e
--- /dev/null
+++ b/doc/source/library/region/maxp.rst
@@ -0,0 +1,8 @@
+:mod:`region.maxp` -- maxp regionalization
+===========================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.region.maxp
+    :synopsis: Regionalization based on contiguity constraints and LISA seeds
+    :members:
diff --git a/doc/source/library/region/randomregion.rst b/doc/source/library/region/randomregion.rst
new file mode 100644
index 0000000..4eb2e9e
--- /dev/null
+++ b/doc/source/library/region/randomregion.rst
@@ -0,0 +1,8 @@
+:mod:`region.randomregion` -- Random region creation
+===========================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.region.randomregion
+    :synopsis: Creation of random regions subject to various constraints
+    :members:
diff --git a/doc/source/library/spatial_dynamics/directional.rst b/doc/source/library/spatial_dynamics/directional.rst
new file mode 100644
index 0000000..4ca2860
--- /dev/null
+++ b/doc/source/library/spatial_dynamics/directional.rst
@@ -0,0 +1,8 @@
+:mod:`spatial_dynamics.directional` -- Directional LISA Analytics
+=================================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.spatial_dynamics.directional
+    :synopsis: 
+    :members:
diff --git a/doc/source/library/spatial_dynamics/ergodic.rst b/doc/source/library/spatial_dynamics/ergodic.rst
new file mode 100644
index 0000000..2374701
--- /dev/null
+++ b/doc/source/library/spatial_dynamics/ergodic.rst
@@ -0,0 +1,8 @@
+:mod:`spatial_dynamics.ergodic` -- Summary measures for ergodic Markov chains 
+=============================================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.spatial_dynamics.ergodic
+    :synopsis: summary measures for ergodic Markov chains
+    :members:
diff --git a/doc/source/library/spatial_dynamics/index.rst b/doc/source/library/spatial_dynamics/index.rst
new file mode 100644
index 0000000..5fc133f
--- /dev/null
+++ b/doc/source/library/spatial_dynamics/index.rst
@@ -0,0 +1,13 @@
+:mod:`pysal.spatial_dynamics` --- Spatial Dynamics
+==================================================
+
+.. toctree::
+    :maxdepth: 1
+
+    directional
+    ergodic
+    interaction
+    markov
+    rank
+
+
diff --git a/doc/source/library/spatial_dynamics/interaction.rst b/doc/source/library/spatial_dynamics/interaction.rst
new file mode 100644
index 0000000..6e6c138
--- /dev/null
+++ b/doc/source/library/spatial_dynamics/interaction.rst
@@ -0,0 +1,8 @@
+:mod:`spatial_dynamics.interaction` -- Space-time interaction tests
+===================================================================
+
+.. versionadded:: 1.1
+
+.. automodule:: pysal.spatial_dynamics.interaction
+    :synopsis: 
+    :members:
diff --git a/doc/source/library/spatial_dynamics/markov.rst b/doc/source/library/spatial_dynamics/markov.rst
new file mode 100644
index 0000000..e740558
--- /dev/null
+++ b/doc/source/library/spatial_dynamics/markov.rst
@@ -0,0 +1,8 @@
+:mod:`spatial_dynamics.markov` -- Markov based methods
+======================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.spatial_dynamics.markov
+    :synopsis: 
+    :members:
diff --git a/doc/source/library/spatial_dynamics/rank.rst b/doc/source/library/spatial_dynamics/rank.rst
new file mode 100644
index 0000000..cf6fb82
--- /dev/null
+++ b/doc/source/library/spatial_dynamics/rank.rst
@@ -0,0 +1,8 @@
+:mod:`spatial_dynamics.rank` -- Rank and spatial rank mobility measures
+=======================================================================
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.spatial_dynamics.rank
+    :synopsis: 
+    :members:
diff --git a/doc/source/library/spreg/diagnostics.rst b/doc/source/library/spreg/diagnostics.rst
new file mode 100644
index 0000000..db94b0e
--- /dev/null
+++ b/doc/source/library/spreg/diagnostics.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.diagnostics`- Diagnostics
+=========================================
+
+
+The :mod:`spreg.diagnostics` module provides a set of standard non-spatial diagnostic tests.
+
+.. versionadded:: 1.1
+
+
+.. automodule:: pysal.spreg.diagnostics
+    :synopsis: Code for spreg diagnostics
+    :members:
diff --git a/doc/source/library/spreg/diagnostics_sp.rst b/doc/source/library/spreg/diagnostics_sp.rst
new file mode 100644
index 0000000..abc17db
--- /dev/null
+++ b/doc/source/library/spreg/diagnostics_sp.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.diagnostics_sp` --- Spatial Diagnostics
+====================================================
+
+The :mod:`spreg.diagnostics_sp` module provides spatial diagnostic tests.
+
+.. versionadded:: 1.1
+
+
+.. automodule:: pysal.spreg.diagnostics_sp
+    :synopsis: Code for spreg spatial diagnostics
+    :members:
+
diff --git a/doc/source/library/spreg/diagnostics_tsls.rst b/doc/source/library/spreg/diagnostics_tsls.rst
new file mode 100644
index 0000000..6780bf0
--- /dev/null
+++ b/doc/source/library/spreg/diagnostics_tsls.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.diagnostics_tsls` --- Diagnostics for 2SLS
+======================================================
+
+The :mod:`spreg.diagnostics_tsls` module provides diagnostic tests for two stage least squares based models.
+
+.. versionadded:: 1.3
+
+
+.. automodule:: pysal.spreg.diagnostics_tsls
+    :synopsis: Code for spreg 2SLS diagnostics
+    :members:
+
diff --git a/doc/source/library/spreg/error_sp.rst b/doc/source/library/spreg/error_sp.rst
new file mode 100644
index 0000000..1be4f3a
--- /dev/null
+++ b/doc/source/library/spreg/error_sp.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.error_sp` --- GM/GMM Estimation of Spatial Error and Spatial Combo Models
+=====================================================================================
+
+The :mod:`spreg.error_sp` module provides spatial error and spatial combo (spatial lag with spatial error) regression estimation with and without endogenous variables; based on Kelejian and Prucha (1998 and 1999).
+
+.. versionadded:: 1.3
+
+
+.. automodule:: pysal.spreg.error_sp
+    :synopsis: Code for spreg spatial error and spatial combo regression
+    :members:
+
diff --git a/doc/source/library/spreg/error_sp_het.rst b/doc/source/library/spreg/error_sp_het.rst
new file mode 100644
index 0000000..eb36114
--- /dev/null
+++ b/doc/source/library/spreg/error_sp_het.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.error_sp_het` --- GM/GMM Estimation of Spatial Error and Spatial Combo Models with Heteroskedasticity 
+=================================================================================================================
+
+The :mod:`spreg.error_sp_het` module provides spatial error and spatial combo (spatial lag with spatial error) regression estimation with and without endogenous variables, and allowing for heteroskedasticity; based on Arraiz et al (2010) and Anselin (2011).
+
+.. versionadded:: 1.3
+
+
+.. automodule:: pysal.spreg.error_sp_het
+    :synopsis: Code for spreg spatial error and spatial combo regression with heteroskedasticity 
+    :members:
+
diff --git a/doc/source/library/spreg/error_sp_het_regimes.rst b/doc/source/library/spreg/error_sp_het_regimes.rst
new file mode 100644
index 0000000..3520622
--- /dev/null
+++ b/doc/source/library/spreg/error_sp_het_regimes.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.error_sp_het_regimes` --- GM/GMM Estimation of Spatial Error and Spatial Combo Models with Heteroskedasticity with Regimes
+======================================================================================================================================
+
+The :mod:`spreg.error_sp_het_regimes` module provides spatial error and spatial combo (spatial lag with spatial error) regression estimation with regimes and with and without endogenous variables, and allowing for heteroskedasticity; based on Arraiz et al (2010) and Anselin (2011).
+
+.. versionadded:: 1.5
+
+
+.. automodule:: pysal.spreg.error_sp_het_regimes
+    :synopsis: Code for spreg spatial error and spatial combo regression with heteroskedasticity with regimes 
+    :members:
+
diff --git a/doc/source/library/spreg/error_sp_hom.rst b/doc/source/library/spreg/error_sp_hom.rst
new file mode 100644
index 0000000..ec5c730
--- /dev/null
+++ b/doc/source/library/spreg/error_sp_hom.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.error_sp_hom` --- GM/GMM Estimation of Spatial Error and Spatial Combo Models
+=========================================================================================
+
+The :mod:`spreg.error_sp_hom` module provides spatial error and spatial combo (spatial lag with spatial error) regression estimation with and without endogenous variables, and includes inference on the spatial error parameter (lambda); based on Drukker et al. (2010) and Anselin (2011).
+
+.. versionadded:: 1.3
+
+
+.. automodule:: pysal.spreg.error_sp_hom
+    :synopsis: Code for spreg spatial error and spatial combo regression with inference on lambda
+    :members:
+
diff --git a/doc/source/library/spreg/error_sp_hom_regimes.rst b/doc/source/library/spreg/error_sp_hom_regimes.rst
new file mode 100644
index 0000000..9a1796c
--- /dev/null
+++ b/doc/source/library/spreg/error_sp_hom_regimes.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.error_sp_hom_regimes` --- GM/GMM Estimation of Spatial Error and Spatial Combo Models with Regimes
+==============================================================================================================
+
+The :mod:`spreg.error_sp_hom_regimes` module provides spatial error and spatial combo (spatial lag with spatial error) regression estimation with regimes and with and without endogenous variables, and includes inference on the spatial error parameter (lambda); based on Drukker et al. (2010) and Anselin (2011).
+
+.. versionadded:: 1.5
+
+
+.. automodule:: pysal.spreg.error_sp_hom_regimes
+    :synopsis: Code for spreg spatial error and spatial combo regression with inference on lambda and with regimes
+    :members:
+
diff --git a/doc/source/library/spreg/error_sp_regimes.rst b/doc/source/library/spreg/error_sp_regimes.rst
new file mode 100644
index 0000000..e8e96d4
--- /dev/null
+++ b/doc/source/library/spreg/error_sp_regimes.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.error_sp_regimes` --- GM/GMM Estimation of Spatial Error and Spatial Combo Models with Regimes
+==========================================================================================================
+
+The :mod:`spreg.error_sp_regimes` module provides spatial error and spatial combo (spatial lag with spatial error) regression estimation with regimes and with and without endogenous variables; based on Kelejian and Prucha (1998 and 1999).
+
+.. versionadded:: 1.5
+
+
+.. automodule:: pysal.spreg.error_sp_regimes
+    :synopsis: Code for spreg spatial error and spatial combo with regimes regression
+    :members:
+
diff --git a/doc/source/library/spreg/index.rst b/doc/source/library/spreg/index.rst
new file mode 100644
index 0000000..1116b6d
--- /dev/null
+++ b/doc/source/library/spreg/index.rst
@@ -0,0 +1,29 @@
+:mod:`pysal.spreg` --- Regression and Diagnostics
+=================================================
+
+.. toctree::
+   :maxdepth: 1
+
+   ols
+   ols_regimes
+   probit
+   twosls
+   twosls_regimes
+   twosls_sp
+   twosls_sp_regimes
+   diagnostics
+   diagnostics_sp
+   diagnostics_tsls
+   error_sp
+   error_sp_regimes
+   error_sp_het
+   error_sp_het_regimes
+   error_sp_hom
+   error_sp_hom_regimes
+   regimes
+   ml_error
+   ml_error_regimes
+   ml_lag
+   ml_lag_regimes
+
+
diff --git a/doc/source/library/spreg/ml_error.rst b/doc/source/library/spreg/ml_error.rst
new file mode 100644
index 0000000..2ad22b1
--- /dev/null
+++ b/doc/source/library/spreg/ml_error.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.ml_error` --- ML Estimation of Spatial Error  Model
+=====================================================================================
+
+The :mod:`spreg.ml_error` module provides spatial error model  estimation with maximum likelihood following Anselin (1988).
+
+.. versionadded:: 1.7
+
+
+.. automodule:: pysal.spreg.ml_error
+    :synopsis: Code for spreg spatial error estimation via ML
+    :members:
+
diff --git a/doc/source/library/spreg/ml_error_regimes.rst b/doc/source/library/spreg/ml_error_regimes.rst
new file mode 100644
index 0000000..41ca555
--- /dev/null
+++ b/doc/source/library/spreg/ml_error_regimes.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.ml_error_regimes` --- ML Estimation of Spatial Error Model with Regimes
+=====================================================================================
+
+The :mod:`spreg.ml_error_regimes` module provides spatial error model with regimes  estimation with maximum likelihood following Anselin (1988).
+
+.. versionadded:: 1.7
+
+
+.. automodule:: pysal.spreg.ml_error_regimes
+    :synopsis: Code for spreg spatial error model with regimes estimation via ML
+    :members:
+
diff --git a/doc/source/library/spreg/ml_lag.rst b/doc/source/library/spreg/ml_lag.rst
new file mode 100644
index 0000000..601c134
--- /dev/null
+++ b/doc/source/library/spreg/ml_lag.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.ml_lag` --- ML Estimation of Spatial Lag  Model
+=====================================================================================
+
+The :mod:`spreg.ml_lag` module provides spatial lag model  estimation with maximum likelihood following Anselin (1988).
+
+.. versionadded:: 1.7
+
+
+.. automodule:: pysal.spreg.ml_lag
+    :synopsis: Code for spreg spatial lag estimation via ML
+    :members:
+
diff --git a/doc/source/library/spreg/ml_lag_regimes.rst b/doc/source/library/spreg/ml_lag_regimes.rst
new file mode 100644
index 0000000..5f32e56
--- /dev/null
+++ b/doc/source/library/spreg/ml_lag_regimes.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.ml_lag_regimes` --- ML Estimation of Spatial Lag Model with Regimes
+=====================================================================================
+
+The :mod:`spreg.ml_lag_regimes` module provides spatial lag model with regimes  estimation with maximum likelihood following Anselin (1988).
+
+.. versionadded:: 1.7
+
+
+.. automodule:: pysal.spreg.ml_lag_regimes
+    :synopsis: Code for spreg spatial lag model with regimes estimation via ML
+    :members:
+
diff --git a/doc/source/library/spreg/ols.rst b/doc/source/library/spreg/ols.rst
new file mode 100644
index 0000000..ca8a3f1
--- /dev/null
+++ b/doc/source/library/spreg/ols.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.ols` --- Ordinary Least Squares
+============================================
+
+The :mod:`spreg.ols` module provides OLS regression estimation.
+
+.. versionadded:: 1.1
+
+
+.. automodule:: pysal.spreg.ols
+    :synopsis: Code for spreg OLS regression
+    :members:
+
diff --git a/doc/source/library/spreg/ols_regimes.rst b/doc/source/library/spreg/ols_regimes.rst
new file mode 100644
index 0000000..1286ea5
--- /dev/null
+++ b/doc/source/library/spreg/ols_regimes.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.ols_regimes` --- Ordinary Least Squares with Regimes
+================================================================
+
+The :mod:`spreg.ols_regimes` module provides OLS with regimes regression estimation.
+
+.. versionadded:: 1.5
+
+
+.. automodule:: pysal.spreg.ols_regimes
+    :synopsis: Code for spreg OLS with regimes regression
+    :members:
+
diff --git a/doc/source/library/spreg/probit.rst b/doc/source/library/spreg/probit.rst
new file mode 100644
index 0000000..07461a3
--- /dev/null
+++ b/doc/source/library/spreg/probit.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.probit` --- Probit
+===============================================
+
+The :mod:`spreg.probit` module provides probit regression estimation.
+
+.. versionadded:: 1.4
+
+
+.. automodule:: pysal.spreg.probit
+    :synopsis: Code for spreg probit regression
+    :members:
+
diff --git a/doc/source/library/spreg/regimes.rst b/doc/source/library/spreg/regimes.rst
new file mode 100644
index 0000000..141f379
--- /dev/null
+++ b/doc/source/library/spreg/regimes.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.regimes` --- Spatial Regimes
+============================================
+
+The :mod:`spreg.regimes` module provides different spatial regime estimation procedures.
+
+.. versionadded:: 1.5
+
+
+.. automodule:: pysal.spreg.regimes
+    :synopsis: Code for spatial regimes
+    :members:
+
diff --git a/doc/source/library/spreg/twosls.rst b/doc/source/library/spreg/twosls.rst
new file mode 100644
index 0000000..9a47e7b
--- /dev/null
+++ b/doc/source/library/spreg/twosls.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.twosls` --- Two Stage Least Squares
+===============================================
+
+The :mod:`spreg.twosls` module provides 2SLS regression estimation.
+
+.. versionadded:: 1.3
+
+
+.. automodule:: pysal.spreg.twosls
+    :synopsis: Code for spreg 2SLS regression
+    :members:
+
diff --git a/doc/source/library/spreg/twosls_regimes.rst b/doc/source/library/spreg/twosls_regimes.rst
new file mode 100644
index 0000000..95f2400
--- /dev/null
+++ b/doc/source/library/spreg/twosls_regimes.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.twosls_regimes` --- Two Stage Least Squares with Regimes
+====================================================================
+
+The :mod:`spreg.twosls_regimes` module provides 2SLS with regimes regression estimation.
+
+.. versionadded:: 1.5
+
+
+.. automodule:: pysal.spreg.twosls_regimes
+    :synopsis: Code for spreg 2SLS with regimes regression
+    :members:
+
diff --git a/doc/source/library/spreg/twosls_sp.rst b/doc/source/library/spreg/twosls_sp.rst
new file mode 100644
index 0000000..e0052d5
--- /dev/null
+++ b/doc/source/library/spreg/twosls_sp.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.twosls_sp` --- Spatial Two Stage Least Squares
+==========================================================
+
+The :mod:`spreg.twosls_sp` module provides S2SLS regression estimation.
+
+.. versionadded:: 1.3
+
+
+.. automodule:: pysal.spreg.twosls_sp
+    :synopsis: Code for spreg S2SLS regression
+    :members:
+
diff --git a/doc/source/library/spreg/twosls_sp_regimes.rst b/doc/source/library/spreg/twosls_sp_regimes.rst
new file mode 100644
index 0000000..bc30c61
--- /dev/null
+++ b/doc/source/library/spreg/twosls_sp_regimes.rst
@@ -0,0 +1,12 @@
+:mod:`spreg.twosls_sp_regimes` --- Spatial Two Stage Least Squares with Regimes
+===============================================================================
+
+The :mod:`spreg.twosls_sp_regimes` module provides S2SLS with regimes regression estimation.
+
+.. versionadded:: 1.5
+
+
+.. automodule:: pysal.spreg.twosls_sp_regimes
+    :synopsis: Code for spreg S2SLS with regimes regression
+    :members:
+
diff --git a/doc/source/library/weights/Contiguity.rst b/doc/source/library/weights/Contiguity.rst
new file mode 100644
index 0000000..b35faa0
--- /dev/null
+++ b/doc/source/library/weights/Contiguity.rst
@@ -0,0 +1,11 @@
+:mod:`weights.Contiguity` --- Contiguity based spatial weights 
+==============================================================
+
+The :mod:`weights.Contiguity.` module provides for the construction and manipulation of spatial  weights matrices based on contiguity criteria.
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.weights.Contiguity
+    :synopsis:  Spatial weights for PySAL: Python Spatial Analysis Library
+    :members:
+
diff --git a/doc/source/library/weights/Distance.rst b/doc/source/library/weights/Distance.rst
new file mode 100644
index 0000000..0976586
--- /dev/null
+++ b/doc/source/library/weights/Distance.rst
@@ -0,0 +1,11 @@
+:mod:`weights.Distance` --- Distance based spatial weights
+==========================================================
+
+The :mod:`weights.Distance` module provides for spatial weights defined
+on distance relationships.
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.weights.Distance
+    :synopsis: 
+    :members:
diff --git a/doc/source/library/weights/Wsets.rst b/doc/source/library/weights/Wsets.rst
new file mode 100644
index 0000000..f3f9dea
--- /dev/null
+++ b/doc/source/library/weights/Wsets.rst
@@ -0,0 +1,9 @@
+:mod:`weights.Wsets` --- Set operations on spatial weights
+==========================================================
+
+The :mod:`weights.user` module provides for set operations on weights objects
+.. versionadded:: 1.0
+
+.. automodule:: pysal.weights.Wsets
+    :synopsis: 
+    :members:
diff --git a/doc/source/library/weights/index.rst b/doc/source/library/weights/index.rst
new file mode 100644
index 0000000..f97b7c9
--- /dev/null
+++ b/doc/source/library/weights/index.rst
@@ -0,0 +1,12 @@
+:mod:`pysal.weights` --- Spatial Weights
+==========================================
+
+.. toctree::
+
+   weights
+   util
+   user
+   Contiguity
+   Distance
+   Wsets
+   spatial_lag
diff --git a/doc/source/library/weights/spatial_lag.rst b/doc/source/library/weights/spatial_lag.rst
new file mode 100644
index 0000000..ae528af
--- /dev/null
+++ b/doc/source/library/weights/spatial_lag.rst
@@ -0,0 +1,14 @@
+:mod:`weights.spatial_lag` --- Spatial lag operators
+====================================================
+
+The :mod:`weights.spatial_lag` Spatial lag operators for PySAL
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.weights.spatial_lag
+    :synopsis: Spatial lag operators for PySAL
+    :members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/library/weights/user.rst b/doc/source/library/weights/user.rst
new file mode 100644
index 0000000..0a40f39
--- /dev/null
+++ b/doc/source/library/weights/user.rst
@@ -0,0 +1,10 @@
+
+:mod:`weights.user` --- Convenience functions for spatial weights
+=================================================================
+
+The :mod:`weights.user` module provides convenience functions for spatial weights
+.. versionadded:: 1.0
+
+.. automodule:: pysal.weights.user
+    :synopsis: 
+    :members:
diff --git a/doc/source/library/weights/util.rst b/doc/source/library/weights/util.rst
new file mode 100644
index 0000000..7f5bcd8
--- /dev/null
+++ b/doc/source/library/weights/util.rst
@@ -0,0 +1,10 @@
+
+:mod:`weights.util` --- Utility functions on spatial weights
+===============================================================
+
+The :mod:`weights.util` module provides utility functions on spatial weights
+.. versionadded:: 1.0
+
+.. automodule:: pysal.weights.util
+    :synopsis: 
+    :members:
diff --git a/doc/source/library/weights/weights.rst b/doc/source/library/weights/weights.rst
new file mode 100644
index 0000000..ffeb9f4
--- /dev/null
+++ b/doc/source/library/weights/weights.rst
@@ -0,0 +1,13 @@
+:mod:`pysal.weights` --- Spatial weights matrices
+=================================================
+
+The :mod:`weights` Spatial weights for PySAL
+
+.. versionadded:: 1.0
+
+.. automodule:: pysal.weights.weights
+    :synopsis: Spatial weights for PySAL
+    :members:
+
+
+
diff --git a/doc/source/license.rst b/doc/source/license.rst
new file mode 100644
index 0000000..024fe49
--- /dev/null
+++ b/doc/source/license.rst
@@ -0,0 +1,33 @@
+PySAL License
+=============
+
+|  Copyright (c) 2007-2014, PySAL Developers
+|  All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+  list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+  notice, this list of conditions and the following disclaimer in the
+  documentation and/or other materials provided with the distribution.
+
+* Neither the name of the GeoDa Center for Geospatial Analysis and Computation
+  nor the names of PySAL contributors may be used to endorse or promote products
+  derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/doc/source/news.rst b/doc/source/news.rst
new file mode 100644
index 0000000..71ea07f
--- /dev/null
+++ b/doc/source/news.rst
@@ -0,0 +1,55 @@
+.. news:
+
+Upcoming Events
+===============
+
+`2014-11-12: PySAL Workshop at the North American Meetings of the Regional Science Association Meetings, Bethesda <http://www.narsc.org/newsite/?page_id=67>`_ 
+
+
+News
+====
+
+(2014-09-23) `PySAL Tutorial at GIScience 2014,
+Vienna <http://www.giscience.org/workshops_tutorials.html>`__
+
+(2014-09-08) `PySAL Workshop at FOSS4G 2014,
+Portland <https://2014.foss4g.org/schedule/workshops/#wshop-content-568>`__
+
+(2014-08-08) `National Science Foundation Funding for New Approaches to Spatial Distribution Dynamics <https://geoplan.asu.edu/research-projects/new-approaches-spatial-distribution-dynamics>`_
+
+(2014-07-25) `PySAL 1.8.0 released <http://PySAL.readthedocs.org/en/v1.8/users/installation.html>`_
+
+(2014-01-31) `PySAL 1.7.0 released <https://github.com/pysal/pysal/wiki/PySAL-1.7-Release-(2014-01-31)>`_
+  
+(2013-11-13) `PySAL Workshop at North American Regional Science Association Meetings, Atlanta, GA <http://www.narsc.org/newsite/?page_id=2547>`_ 
+
+(2013-07-31) `PySAL 1.6.0 released <https://github.com/pysal/pysal/wiki/PySAL-1.6-Released-(2013-07-31)>`_
+
+(2013-01-30) `PySAL 1.5.0 released <http://code.google.com/p/pysal/wiki/Announce1_5>`_
+
+(2012-10-24) `PySAL short course at OSGRS 2012 <https://twitter.com/OGRS2012/status/261106998861504512>`_
+
+(2012-09-18) `PySAL short course at GIScience 2012 <http://www.giscience.org/workshops.html>`_
+
+(2012-07-31) `PySAL 1.4.0 released <http://code.google.com/p/pysal/wiki/Announce1_4>`_
+
+(2012-07-30) `Short course on PySAL for Spatial Regression <https://www.geodapress.com/workshops/spatial-regression#description>`_
+
+(2012-07-30) `PySAL presentation at Joint Statistical Meetings
+<https://www.amstat.org/meetings/jsm/2012/onlineprogram/AbstractDetails.cfm?abstractid=303498>`_
+
+(2012-07-18) `PySAL at SciPy 2012 <http://conference.scipy.org/scipy2012/schedule/conf_schedule_1.php>`_
+
+(2012-01-31) `PySAL 1.3.0 released <http://code.google.com/p/pysal/wiki/Announce1_3>`_
+
+(2011-07-31) `PySAL 1.2.0 released <http://code.google.com/p/pysal/wiki/Announce1_2>`_
+
+(2011-01-31) `PySAL 1.1.0 released <http://code.google.com/p/pysal/wiki/Announce1_1>`_
+
+(2010-07-31) `PySAL 1.0.0 released <http://code.google.com/p/pysal/wiki/Announce1_0>`_ 
+
+
+(2010-01-6) New Website for PySAL!
+
+(2009-11-03) `National Institute of Justice funding <http://geoplan.asu.edu/node/3855>`_
+
diff --git a/doc/source/users/index.rst b/doc/source/users/index.rst
new file mode 100644
index 0000000..a68eca0
--- /dev/null
+++ b/doc/source/users/index.rst
@@ -0,0 +1,11 @@
+==========
+User Guide
+==========
+
+
+.. toctree::
+   :maxdepth: 1
+
+   What is PySAL? <introduction>
+   Installing PySAL <installation>
+   Getting Started with PySAL <tutorials/index>
diff --git a/doc/source/users/installation.rst b/doc/source/users/installation.rst
new file mode 100644
index 0000000..3658886
--- /dev/null
+++ b/doc/source/users/installation.rst
@@ -0,0 +1,160 @@
+.. _installation:
+
+==============
+Install  PySAL 
+==============
+
+Windows users can download an .exe installer `here on 
+Sourceforge <http://sourceforge.net/projects/pysal/files/?source=navbar>`_.
+
+
+PySAL is built upon the Python scientific stack including numpy and
+scipy. While these libraries are packaged for several platforms, the
+Anaconda and Enthought Python distributions include them along with the core
+Python library.
+
+- `Anaconda Python distribution <http://continuum.io/downloads.html>`_
+- `Enthought Canopy <https://www.enthought.com/downloads>`_
+
+Note that while both Anaconda and Enthought Canopy will satisfy the
+dependencies for PySAL, the version of PySAL included in these distributions
+might be behind the latest stable release of PySAL.  You can update to the latest
+stable version of PySAL with either of these distributions as follows:
+
+1. In a terminal start the python version associated with the distribution.
+   Make sure you are not using a different (system) version of Python. To
+   check this use `which python` from a terminal to see if Anaconda or
+   Enthought appear in the output.
+2. `pip install -U pysal`
+
+
+
+
+If you do not wish to use either Anaconda or Enthought, ensure the following software packages are available on your machine:
+
+* `Python <http://www.python.org/download>`_ 2.6, or 2.7 
+* `numpy <http://new.scipy.org/download.html>`_ 1.3 or later
+* `scipy <http://new.scipy.org/download.html>`_ 0.11 or later
+
+Getting your feet wet
+----------------------
+
+You can start using PySAL right away on the web with Wakari, 
+PythonAnywhere, or SageMathCloud. 
+
+wakari http://continuum.io/wakari
+
+PythonAnywhere https://www.pythonanywhere.com/
+
+SageMathCloud https://cloud.sagemath.com/
+
+
+Download and install 
+--------------------
+
+PySAL is available on the `Python Package Index
+<http://pypi.python.org/pypi/pysal>`_, which means it can be
+downloaded and installed manually or from the command line using 
+`pip`, as follows::
+
+ $ pip install pysal
+
+Alternatively, grab the source distribution (.tar.gz) and decompress it to your selected destination. Open a command shell and navigate to the decompressed pysal folder. Type::
+
+ $ python setup.py install
+
+
+Development version on GitHub 
+-----------------------------
+
+Developers can checkout PySAL using **git**::
+
+ $ git clone https://github.com/pysal/pysal.git 
+
+Open a command shell and navigate to the cloned pysal
+directory. Type::
+
+ $ python setup.py develop
+
+The 'develop' subcommand builds the modules in place 
+and modifies sys.path to include the code.
+The advantage of this method is that you get the latest code 
+but don't have to fuss with editing system environment variables.
+
+To test your setup, start a Python session and type::
+
+ >>> import pysal
+
+Keep up to date with pysal development by 'pulling' the latest
+changes::
+
+ $ git pull
+
+Windows
+~~~~~~~~
+
+To keep up to date with PySAL development, you will need a Git client that allows you to access and 
+update the code from our repository. We recommend 
+`GitHub Windows <http://windows.github.com/>`_ for a more graphical client, or
+`Git Bash <https://code.google.com/p/msysgit/downloads/list?q=label:Featured>`_ for a
+command line client. This one gives you a nice Unix-like shell with
+familiar commands. Here is `a nice tutorial
+<http://openhatch.org/missions/windows-setup/>`_ on getting going with Open
+Source software on Windows. 
+
+After cloning pysal, install it in develop mode so Python knows where to find it. 
+
+Open a command shell and navigate to the cloned pysal
+directory. Type::
+
+ $ python setup.py develop
+
+To test your setup, start a Python session and type::
+
+ >>> import pysal
+
+Keep up to date with pysal development by 'pulling' the latest
+changes::
+
+ $ git pull
+
+
+Troubleshooting
+===============
+
+If you experience problems when building, installing, or testing pysal, ask for
+help on the
+`OpenSpace <http://geodacenter.asu.edu/support/community>`_ 
+list or
+browse the archives of the
+`pysal-dev <http://groups.google.com/group/pysal-dev?pli=1>`_ 
+google group. 
+
+Please include the output of the following commands in your message:
+
+1) Platform information::
+
+    python -c 'import os,sys;print os.name, sys.platform'
+    uname -a
+
+2) Python version::
+    
+    python -c 'import sys; print sys.version'
+
+3) SciPy version::
+
+    python -c 'import scipy; print scipy.__version__'
+
+3) NumPy version::
+
+    python -c 'import numpy; print numpy.__version__'
+
+4) Feel free to add any other relevant information.
+   For example, the full output (both stdout and stderr) of the pysal
+   installation command can be very helpful. Since this output can be
+   rather large, ask before sending it into the mailing list (or
+   better yet, to one of the developers, if asked).
+
+
+
+
diff --git a/doc/source/users/introduction.rst b/doc/source/users/introduction.rst
new file mode 100644
index 0000000..2194b59
--- /dev/null
+++ b/doc/source/users/introduction.rst
@@ -0,0 +1,68 @@
+.. _introduction:
+
+============
+Introduction
+============
+
+.. contents::
+
+History
+*******
+
+PySAL grew out of a collaborative effort between Luc Anselin's group
+previously located at the University of Illinois, Champaign-Urbana, and Serge
+Rey who was at San Diego State University.  It was born out of a recognition that
+the respective projects at the two institutions,  `PySpace (now GeoDaSpace) 
+<http://geodacenter.asu.edu/pyspaceimg>`_  and `STARS
+<http://regionalanalysislab.org/index.php/Main/STARS>`_ - Space Time Analysis
+of Regional Systems, could benefit from a shared analytical core, since
+this would limit code duplication and free up additional developer time to
+focus on enhancements of the respective applications.
+
+This recognition also came at a time when Python was starting to make major
+inroads in geographic information systems as represented by projects such as
+the `Python Cartographic Library <http://zmapserver.sourceforge.net/PCL/>`_,
+`Shapely <http://trac.gispython.org/lab/wiki/Shapely>`_ and ESRI's adoption of
+Python as a scripting language, among others. At the same time there was a
+dearth of Python modules for spatial statistics, spatial econometrics, location
+modeling and other areas of spatial analysis, and the role for PySAL was then
+expanded beyond its support of STARS and GeoDaSpace to provide a library of core
+spatial analytical functions that could support the next generation of spatial
+analysis applications.
+
+In 2008 the home for PySAL moved to the `GeoDa Center for Geospatial Analysis
+and Computation <http://geodacenter.asu.edu/>`_ at Arizona State University.
+
+Scope
+*****
+
+It is important to underscore what PySAL is, and is not, designed to do. First
+and foremost, PySAL is a **library** in the fullest sense of the word.
+Developers looking for a suite of spatial analytical methods that they can
+incorporate into application development should feel at home using PySAL.
+Spatial analysts who may be carrying out research projects requiring customized
+scripting, extensive simulation analysis, or those seeking to advance the state
+of the art in spatial analysis should also find PySAL to be a useful
+foundation for their work.
+
+End users looking for a user friendly graphical user interface for spatial
+analysis should not turn to PySAL directly. Instead, we would direct them to
+projects like STARS and the GeoDaX suite of software products which wrap PySAL
+functionality in GUIs.  At the same time, we expect that with developments such
+as the Python based plug-in architectures for `QGIS
+<http://www.qgis.org/wiki/Python_Plugin_Repositories>`_, `GRASS
+<http://grass.osgeo.org/wiki/GRASS_and_Python>`_, and the toolbox extensions
+for `ArcGIS
+<http://training.esri.com/gateway/index.cfm?fa=catalog.courseDetail&CourseID=50089911_9.X>`_,
+that end user access to PySAL functionality will be widening in the near
+future.
+
+
+Research Papers and Presentations
+*********************************
+    * Rey, Sergio J. (2012) `PySAL: A Python Library for Exploratory Spatial Data Analysis and Geocomputation <http://www.youtube.com/watch?v=FN1nH4Fkd_Y>`_ (Movie) SciPy 2012.
+    * Rey, Sergio J. and Luc Anselin. (2010) `PySAL: A Python Library of
+      Spatial Analytical Methods. <http://books.google.com/books?hl=en&lr=&id=c0EP_6eYsjAC&oi=fnd&pg=PA174&dq=pysal&ots=JzI8vk8D4T&sig=J6FEAnbG5Wzw2nn2-0nfj4B6c3Q#v=onepage&q=pysal&f=false>`_ In M. Fischer and A. Getis (eds.) Handbook of Applied Spatial Analysis: Software Tools, Methods and Applications. Springer, Berlin. 
+    * Rey, Sergio J. and Luc Anselin. (2009) `PySAL: A Python Library for Spatial Analysis and Geocomputation <http://www.archive.org/details/scipy09_day2_10-Serge_Rey>`_. (Movie) Python for Scientific Computing. Caltech, Pasadena, CA August 2009.
+    * Rey, Sergio J. (2009). `Show Me the Code: Spatial Analysis and Open Source <http://www.springerlink.com/content/91u84l471h043282/>`_. *Journal of Geographical Systems* 11: 191-2007.
+    * Rey, S.J., Anselin, L., & M. Hwang. (2008). `Dynamic Manipulation of Spatial Weights Using Web Services. <http://geodacenter.asu.edu/node/174>`_ GeoDa Center Working Paper 2008-12.
diff --git a/doc/source/users/tutorials/autocorrelation.rst b/doc/source/users/tutorials/autocorrelation.rst
new file mode 100644
index 0000000..1b16c4c
--- /dev/null
+++ b/doc/source/users/tutorials/autocorrelation.rst
@@ -0,0 +1,830 @@
+.. testsetup:: * 
+
+        import pysal
+        import numpy as np
+        np.random.seed(12345)
+
+#######################
+Spatial Autocorrelation
+#######################
+
+.. contents::
+
+Introduction
+============
+
+Spatial autocorrelation pertains to the non-random pattern of attribute values
+over a set of spatial units. This can take two general forms: positive
+autocorrelation which reflects value similarity in space, and negative
+autocorrelation or value dissimilarity in space. In either case the
+autocorrelation arises when the observed spatial pattern is different from what would
+be expected under a random process operating in space.
+
+Spatial autocorrelation can be analyzed from two different perspectives. Global
+autocorrelation analysis involves the study of the entire map pattern and
+generally asks the question as to whether the pattern displays clustering or
+not. Local autocorrelation, on the other hand, shifts the focus to explore
+within the global pattern to identify clusters or so called hot spots that may be
+either driving the overall clustering pattern, or that reflect heterogeneities
+that depart from global pattern.
+
+In what follows, we first highlight the global spatial autocorrelation classes
+in PySAL. This is followed by an illustration of the analysis of local spatial
+autocorrelation.
+
+Global Autocorrelation
+======================
+
+PySAL implements five different tests for global spatial autocorrelation:
+the Gamma index of spatial autocorrelation, join count statistics, 
+Moran's I, Geary's C, and Getis and Ord's G.
+
+Gamma Index of Spatial Autocorrelation
+--------------------------------------
+
+The Gamma Index of spatial autocorrelation consists of the application of the principle
+behind a general cross-product statistic to measuring spatial autocorrelation. [#]_
+The idea is to assess whether two similarity matrices for n objects, i.e., n by n
+matrices A and B measure the same type of similarity. This is reflected in a so-called
+Gamma Index :math:`\Gamma = \sum_i \sum_j a_{ij}.b_{ij}`. In other words, the statistic
+consists of the sum over all cross-products of matching elements (i,j) in the two 
+matrices.
+
+The application of this principle to spatial autocorrelation consists of turning
+the first similarity matrix into a measure of attribute similarity and the second
+matrix into a measure of locational similarity. Naturally, the second matrix is the
+a spatial :doc:`weight <weights>` matrix. The first matrix can be any reasonable measure of attribute
+similarity or dissimilarity, such as a cross-product, squared difference or absolute
+difference.
+
+Formally, then, the Gamma index is:
+
+.. math::
+
+        \Gamma = \sum_i \sum_j a_{ij}.w_{ij}
+        
+where the :math:`w_{ij}` are the elements of the weights matrix and 
+:math:`a_{ij}` are corresponding measures of attribute similarity.
+
+Inference for this statistic is based on a permutation approach in which the values
+are shuffled around among the locations and the statistic is recomputed each
+time. This creates a reference distribution for the statistic under the null
+hypothesis of spatial randomness. The observed statistic is then compared to this
+reference distribution and a pseudo-significance computed as
+
+.. math::
+
+       p = (m + 1) / (n + 1)
+       
+where m is the number of values from the reference distribution that are equal to
+or greater than the observed join count and n is the number of permutations.
+
+The Gamma test is a two-sided test in the sense that both extremely high values (e.g.,
+larger than any value in the reference distribution) and extremely low values
+(e.g., smaller than any value in the reference distribution) can be considered
+to be significant. Depending on how the measure of attribute similarity is defined,
+a high value will indicate positive or negative spatial autocorrelation, and vice
+versa. For example, for a cross-product measure of attribute similarity, high values
+indicate positive spatial autocorrelation and low values negative spatial autocorrelation.
+For a squared difference measure, it is the reverse. This is similar to the 
+interpretation of the :ref:`moran` statistic and :ref:`geary` statistic respectively.
+
+Many spatial autocorrelation test statistics can be shown to be special cases of the
+Gamma index. In most instances, the Gamma index is an unstandardized version of the
+commonly used statistics. As such, the Gamma index is scale dependent, since no
+normalization is carried out (such as deviations from the mean or rescaling by the
+variance). Also, since the sum is over all the elements, the value of a Gamma
+statistic will grow with the sample size, everything else being the same.
+
+PySAL implements four forms of the Gamma index. Three of these are pre-specified
+and one allows the user to pass any function that computes a measure of attribute
+similarity. This function should take three parameters: the vector of observations,
+an index i and an index j.
+
+We will illustrate the Gamma index using the same small artificial example
+as we use for the  :ref:`moran1`  in order to illustrate the similarities
+and differences between them. The data consist of a regular 4 by 4 lattice with
+values of 0 in the top half and values of 1 in the bottom half. We start with the usual 
+imports, and set the random seed to 12345 in order
+to be able to replicate the results of the permutation approach.
+
+
+        >>> import pysal
+        >>> import numpy as np
+        >>> np.random.seed(12345)
+        
+We create the binary weights matrix for the 4 x 4 lattice and generate the
+observation vector y:
+
+.. doctest::
+
+        >>> w=pysal.lat2W(4,4)
+        >>> y=np.ones(16)
+        >>> y[0:8]=0 
+
+The Gamma index function has five arguments, three of which are optional.
+The first two arguments are the vector of observations (y) and the spatial
+weights object (w). Next are ``operation``, the measure of attribute similarity,
+the default of which is ``operation = 'c'`` for cross-product similarity, 
+:math:`a_{ij} = y_i.y_j`. The other two built-in options are ``operation = 's'`` for
+squared difference, :math:`a_{ij} = (y_i - y_j)^2` and ``operation = 'a'`` for
+absolute difference, :math:`a_{ij} = | y_i - y_j |`. The fourth option is to
+pass an arbitrary attribute similarity function, as in ``operation = func``, where ``func``
+is a function with three arguments, ``def func(y,i,j)`` with y as the vector
+of observations, and i and j as indices. This function should return a single
+value for attribute similarity.
+
+The fourth argument allows the observed values to be standardized before the
+calculation of the Gamma index. To some extent, this addresses the scale dependence
+of the index, but not its dependence on the number of observations. The default
+is no standardization, ``standardize = 'no'``. To force standardization,
+set ``standardize = 'yes'`` or ``'y'``. The final argument is the number of
+permutations, ``permutations`` with the default set to 999.
+
+As a first illustration, we invoke the Gamma index using all the default
+values, i.e. cross-product similarity, no standardization, and permutations
+set to 999. The interesting statistics are the magnitude of the Gamma index ``g``,
+the standardized Gamma index using the mean and standard deviation from the
+reference distribution, ``g_z`` and the pseudo-p value obtained from the
+permutation, ``g_sim_p``. In addition, the minimum (``min_g``), maximum (``max_g``)
+and mean (``mean_g``) of the reference distribution are available as well.
+
+.. doctest::
+
+        >>> g = pysal.Gamma(y,w)
+        >>> g.g
+        20.0
+        >>> "%.3f"%g.g_z
+        '3.188'
+        >>> g.p_sim_g
+        0.0030000000000000001
+        >>> g.min_g
+        0.0
+        >>> g.max_g
+        20.0
+        >>> g.mean_g
+        11.093093093093094
+
+Note that the value for Gamma is exactly twice the BB statistic obtained in the
+example below, since the attribute similarity criterion is identical, but Gamma is
+not divided by 2.0. The observed value is very extreme, with only two replications
+from the permutation equalling the value of 20.0. This indicates significant
+positive spatial autocorrelation.
+
+As a second illustration, we use the squared difference criterion, which
+corresponds to the BW Join Count statistic. We reset the random seed to
+keep comparability of the results.
+
+.. doctest::
+
+        >>> np.random.seed(12345)
+        >>> g1 = pysal.Gamma(y,w,operation='s')
+        >>> g1.g
+        8.0
+        >>> "%.3f"%g1.g_z
+        '-3.706'
+        >>> g1.p_sim_g
+        0.001
+        >>> g1.min_g
+        14.0
+        >>> g1.max_g
+        48.0
+        >>> g1.mean_g
+        25.623623623623622
+
+The Gamma index value of 8.0 is exactly twice the value of the BW statistic for
+this example. However, since the Gamma index is used for a two-sided test, this
+value is highly significant, and with a negative z-value, this suggests positive 
+spatial autocorrelation (similar
+to Geary's C). In other words, this result is consistent with the finding for the
+Gamma index that used cross-product similarity.
+
+As a third example, we use the absolute difference for attribute similarity.
+The results are identical to those for squared difference since these two
+criteria are equivalent for 0-1 values.
+
+.. doctest::
+
+        >>> np.random.seed(12345)
+        >>> g2 = pysal.Gamma(y,w,operation='a')
+        >>> g2.g
+        8.0
+        >>> "%.3f"%g2.g_z
+        '-3.706'
+        >>> g2.p_sim_g
+        0.001
+        >>> g2.min_g
+        14.0
+        >>> g2.max_g
+        48.0
+        >>> g2.mean_g
+        25.623623623623622
+    
+We next illustrate the effect of standardization, using the default operation.
+As shown, the value of the statistic is quite different from the unstandardized
+form, but the inference is equivalent.
+
+.. doctest::
+
+        >>> np.random.seed(12345)
+        >>> g3 = pysal.Gamma(y,w,standardize='y')
+        >>> g3.g
+        32.0
+        >>> "%.3f"%g3.g_z
+        '3.706'
+        >>> g3.p_sim_g
+        0.001
+        >>> g3.min_g
+        -48.0
+        >>> g3.max_g
+        20.0
+        >>> "%.3f"%g3.mean_g
+        '-3.247'
+
+Note that all the tests shown here have used the weights matrix in binary form.
+However, since the Gamma index is perfectly general,
+any standardization can be applied to the weights.
+
+Finally, we illustrate the use of an arbitrary attribute similarity function.
+In order to compare to the results above, we will define a function that 
+produces a cross product similarity measure. We will then pass this function
+to the ``operation`` argument of the Gamma index.
+
+.. doctest::
+
+        >>> np.random.seed(12345)
+        >>> def func(z,i,j):
+        ...     q = z[i]*z[j]
+        ...     return q
+        ... 
+        >>> g4 = pysal.Gamma(y,w,operation=func)
+        >>> g4.g
+        20.0
+        >>> "%.3f"%g4.g_z
+        '3.188'
+        >>> g4.p_sim_g
+        0.0030000000000000001
+
+As expected, the results are identical to those obtained with the default
+operation. 
+
+
+.. _moran1:
+
+Join Count Statistics
+---------------------
+
+The join count statistics measure global spatial autocorrelation for binary data, i.e.,
+with observations coded as 1 or B (for Black) and 0 or W (for White). They follow the
+very simple principle of counting joins, i.e., the arrangement of values between
+pairs of observations where the pairs correspond to neighbors. The three resulting
+join count statistics are BB, WW and BW. Both BB and WW are measures of positive
+spatial autocorrelation, whereas BW is an indicator of negative spatial autocorrelation.
+
+To implement the join count statistics, we need the spatial weights matrix in 
+binary (not row-standardized) form. With :math:`y` as the vector of observations
+and the spatial :doc:`weight <weights>` as :math:`w_{i,j}`, the three statistics can be expressed as:
+
+.. math::
+
+       BB = (1/2) \sum_{i}\sum_{j} y_i y_j w_{ij}
+     
+.. math::      
+  
+       WW = (1/2) \sum_{i}\sum_{j} (1 - y_i)(1 - y_j) w_{ij}
+
+.. math::
+
+       BW = (1/2) \sum_{i}\sum_{j} (y_i - y_j)^2 w_{ij}
+     
+By convention, the join counts are divided by 2 to avoid double counting. Also, since
+the three joins exhaust all the possibilities, they sum to one half (because of the
+division by 2) of the total sum of weights :math:`J = (1/2)S_0 = (1/2)\sum_{i}\sum_{j} w_{ij}`.
+
+Inference for the join count statistics can be based on either an analytical approach
+or a computational approach. The analytical approach starts from the binomial distribution
+and derives the moments of the statistics under the assumption of free sampling
+and non-free sampling. The resulting mean and variance are used to construct a
+standardized z-variable which can be approximated as a standard normal variate. [#]_
+However, the approximation is often poor in practice. We therefore only implement the
+computational approach.
+
+Computational inference is based on a permutation approach in which the values of y
+are randomly reshuffled many times to obtain a reference distribution of the statistics
+under the null hypothesis of spatial randomness. The observed join count is then
+compared to this reference distribution and a pseudo-significance computed as
+
+.. math::
+
+       p = (m + 1) / (n + 1)
+       
+where m is the number of values from the reference distribution that are equal to
+or greater than the observed join count and n is the number of permutations. Note
+that the join counts are a one sided-test. If the counts are extremely smaller
+than the reference distribution, this is not an indication of significance. For
+example, if the BW counts are extremely small, this is not an indication of
+*negative* BW autocorrelation, but instead points to the presence of BB or WW
+autocorrelation.
+
+We will illustrate the join count statistics with a simple artificial example
+of a 4 by 4 square lattice with values of 0 in the top half and values of 1 in
+the bottom half.
+
+We start with the usual imports, and set the random seed to 12345 in order
+to be able to replicate the results of the permutation approach.
+
+.. doctest::
+
+        >>> import pysal
+        >>> import numpy as np
+        >>> np.random.seed(12345)
+        
+We create the binary weights matrix for the 4 x 4 lattice and generate the
+observation vector y:
+
+.. doctest::
+
+        >>> w=pysal.lat2W(4,4)
+        >>> y=np.ones(16)
+        >>> y[0:8]=0 
+
+We obtain an instance of the joint count statistics BB, BW and WW as (J is
+half the sum of all the weights and should equal the sum of BB, WW and BW):
+
+.. doctest::
+
+        >>> jc=pysal.Join_Counts(y,w)
+        >>> jc.bb
+        10.0
+        >>> jc.bw
+        4.0
+        >>> jc.ww
+        10.0
+        >>> jc.J
+        24.0
+
+The number of permutations is set to 999 by default. For other values, this parameter
+needs to be passed explicitly, as in:
+
+
+        >>> jc=pysal.Join_Counts(y,w,permutations=99)
+        
+The results in our simple example show that the BB counts are 10. There are
+in fact 3 horizontal joins in each of the bottom rows of the lattice as well as
+4 vertical joins, which makes for bb = 3 + 3 + 4 = 10. The BW joins are 4, matching the
+separation between the bottom and top part.
+
+The permutation results give a pseudo-p value for BB of 0.003, suggesting highly
+significant positive spatial autocorrelation. The average BB count
+for the sample of 999 replications is 5.5, quite a bit lower than the count of 10
+we obtain. Only two instances of the replicated samples yield a value equal to 10,
+none is greater (the randomly permuted samples yield bb values between 0 and 10).
+
+.. doctest::
+
+        >>> len(jc.sim_bb)
+        999
+        >>> jc.p_sim_bb
+        0.0030000000000000001
+        >>> np.mean(jc.sim_bb)
+        5.5465465465465469
+        >>> np.max(jc.sim_bb)
+        10.0
+        >>> np.min(jc.sim_bb)
+        0.0
+
+The results for BW (negative spatial autocorrelation) show a probability of 1.0
+under the null hypothesis. This means that all the values of BW from the randomly
+permuted data sets were larger than the observed value of 4. In fact the range
+of these values is between 7 and 24. In other words, this again strongly points
+towards the presence of positive spatial autocorrelation. The observed number of
+BB and WW joins (10 each) is so high that there are hardly any BW joins (4).
+
+.. doctest::
+
+        >>> len(jc.sim_bw)
+        999
+        >>> jc.p_sim_bw
+        1.0
+        >>> np.mean(jc.sim_bw)
+        12.811811811811811
+        >>> np.max(jc.sim_bw)
+        24.0
+        >>> np.min(jc.sim_bw)
+        7.0
+
+.. _moran:
+
+Moran's I
+---------
+
+Moran's I measures the global spatial autocorrelation in an attribute :math:`y` measured over :math:`n` spatial units and is given as:
+
+.. math::
+
+        I = n/S_0  \sum_{i}\sum_j z_i w_{i,j} z_j / \sum_i z_i z_i
+
+where :math:`w_{i,j}` is a spatial :doc:`weight <weights>`, :math:`z_i = y_i - \bar{y}`, and :math:`S_0=\sum_i\sum_j w_{i,j}`.  We illustrate the use of Moran's I with a case study of homicide rates for a group of 78 counties surrounding St. Louis over the period 1988-93. [#]_
+We start with the usual imports:
+
+
+        >>> import pysal
+        >>> import numpy as np
+
+Next, we read in the homicide rates:
+
+.. doctest::
+
+        >>> f = pysal.open(pysal.examples.get_path("stl_hom.txt"))
+        >>> y = np.array(f.by_col['HR8893'])
+
+To calculate Moran's I we first need to read in a GAL file for a rook weights
+matrix and create an instance of W:
+
+.. doctest::
+
+        >>> w = pysal.open(pysal.examples.get_path("stl.gal")).read()
+        
+The instance of Moran's I can then be obtained with:
+
+.. doctest::
+
+        >>> mi = pysal.Moran(y, w, two_tailed=False)
+        >>> "%.3f"%mi.I
+        '0.244'
+        >>> mi.EI
+        -0.012987012987012988
+        >>> "%.5f"%mi.p_norm
+        '0.00014'
+
+From these results, we see that the observed value for I is significantly above its expected value, under the assumption of normality for the homicide rates. 
+
+If we peek inside the mi object to learn more:
+
+        >>> help(mi)
+
+which generates::
+
+        Help on instance of Moran in module pysal.esda.moran:
+
+        class Moran
+         |  Moran's I Global Autocorrelation Statistic
+         |  
+         |  Parameters
+         |  ----------
+         |  
+         |  y               : array
+         |                    variable measured across n spatial units
+         |  w               : W
+         |                    spatial weights instance
+         |  permutations    : int
+         |                    number of random permutations for calculation of pseudo-p_values
+         |  
+         |  
+         |  Attributes
+         |  ----------
+         |  y            : array
+         |                 original variable
+         |  w            : W
+         |                 original w object
+         |  permutations : int
+         |                 number of permutations
+         |  I            : float
+         |                 value of Moran's I
+         |  EI           : float
+         |                 expected value under normality assumption
+         |  VI_norm      : float
+         |                 variance of I under normality assumption
+         |  seI_norm     : float
+         |                 standard deviation of I under normality assumption
+         |  z_norm       : float
+         |                 z-value of I under normality assumption
+         |  p_norm       : float
+         |                 p-value of I under normality assumption (one-sided)
+         |                 for two-sided tests, this value should be multiplied by 2
+         |  VI_rand      : float
+         |                 variance of I under randomization assumption
+         |  seI_rand     : float
+         |                 standard deviation of I under randomization assumption
+         |  z_rand       : float
+         |                 z-value of I under randomization assumption
+         |  p_rand       : float
+         |                 p-value of I under randomization assumption (1-tailed)
+         |  sim          : array (if permutations>0)
+        
+we see that we can base the inference not only on the normality assumption, but also on random permutations of the values on the spatial units to generate a reference distribution for I under the null:
+
+.. doctest::
+
+        >>> np.random.seed(10)
+        >>> mir = pysal.Moran(y, w, permutations = 9999)
+
+The pseudo p value based on these permutations is: 
+
+.. doctest::
+
+        >>> print mir.p_sim
+        0.0022
+
+in other words there were 14 permutations that generated values for I that
+were as extreme as the original value, so the p value becomes (14+1)/(9999+1). [#]_
+Alternatively, we could use the realized values for I from the permutations and
+compare the original I using a z-transformation to get:
+
+.. doctest::
+
+        >>> print mir.EI_sim
+        -0.0118217511619
+        >>> print mir.z_sim
+        4.55451777821
+        >>> print mir.p_z_sim
+	2.62529422013e-06
+
+When the variable of interest (:math:`y`) is rates based on populations with different sizes, 
+the Moran's I value for :math:`y` needs to be adjusted to account for the differences among populations. [#]_
+To apply this adjustment, we can create an instance of the Moran_Rate class rather than the Moran class.
+For example, let's assume that we want to estimate the Moran's I for the rates of newborn infants who died of 
+Sudden Infant Death Syndrome (SIDS). We start this estimation by reading in the total number of newborn infants (BIR79)
+and the total number of newborn infants who died of SIDS (SID79):
+
+.. doctest::
+
+        >>> f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+        >>> b = np.array(f.by_col('BIR79'))
+        >>> e = np.array(f.by_col('SID79'))
+
+Next, we create an instance of W:
+
+.. doctest::
+
+        >>> w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+
+Now, we create an instance of Moran_Rate:
+
+.. doctest::
+
+        >>> mi = pysal.esda.moran.Moran_Rate(e, b, w, two_tailed=False)
+        >>> "%6.4f" % mi.I
+        '0.1662'
+        >>> "%6.4f" % mi.EI
+        '-0.0101'
+        >>> "%6.4f" % mi.p_norm
+        '0.0042'
+
+From these results, we see that the observed value for I is significantly higher than its expected value,
+after the adjustment for the differences in population.
+
+.. _geary:
+
+Geary's C
+---------
+The fourth statistic for global spatial autocorrelation implemented in PySAL is Geary's C:
+
+.. math::
+
+        C=\frac{(n-1)}{2S_0} \sum_i\sum_j w_{i,j} (y_i-y_j)^2 / \sum_i z_i^2
+
+with all the terms defined as above. Applying this to the St. Louis data:
+
+.. doctest::
+
+    >>> np.random.seed(12345)
+    >>> f = pysal.open(pysal.examples.get_path("stl_hom.txt"))
+    >>> y = np.array(f.by_col['HR8893'])
+    >>> w = pysal.open(pysal.examples.get_path("stl.gal")).read()
+    >>> gc = pysal.Geary(y, w)
+    >>> "%.3f"%gc.C
+    '0.597'
+    >>> gc.EC
+    1.0
+    >>> "%.3f"%gc.z_norm
+    '-5.449'
+
+we see that the statistic :math:`C` is significantly lower than its expected
+value :math:`EC`. Although the sign of the standardized statistic is negative (in contrast to what held for :math:`I`, the interpretation is the same, namely evidence of strong positive spatial autocorrelation in the homicide rates.
+
+Similar to what we saw for Moran's I, we can base inference on Geary's :math:`C` using
+random spatial permutations, which are actually run as a default with the
+number of permutations=999 (this is why we set the seed of the random number
+generator to 12345 to replicate the result):
+
+.. doctest::
+
+    >>> gc.p_sim
+    0.001
+
+which indicates that none of the C values from the permuted samples was as extreme as our observed value.
+
+Getis and Ord's G
+-----------------
+The last statistic for global spatial autcorrelation implemented in PySAL is Getis and Ord's G:
+
+.. math::
+
+        G(d)=\frac{\sum_i\sum_j w_{i,j}(d) y_i y_j}{\sum_i\sum_j y_i y_j}
+
+where :math:`d` is a threshold distance used to define a spatial :doc:`weight <weights>`.
+Only :class:`pysal.weights.Distance.DistanceBand` weights objects are applicable to Getis and Ord's G.
+Applying this to the St. Louis data:
+
+.. doctest::
+
+        >>> dist_w = pysal.threshold_binaryW_from_shapefile('../pysal/examples/stl_hom.shp',0.6)
+        >>> dist_w.transform = "B"
+        >>> from pysal.esda.getisord import G
+        >>> g = G(y, dist_w)
+        >>> print g.G
+        0.103483215873
+        >>> print g.EG
+        0.0752580752581
+        >>> print g.z_norm
+        3.28090342959
+        >>> print g.p_norm
+        0.000517375830488
+
+Although we switched the contiguity-based weights object into another distance-based one,
+we see that the statistic :math:`G` is significantly higher than its expected
+value :math:`EG` under the assumption of normality for the homicide rates.
+
+Similar to what we saw for Moran's I and Geary's C, we can base inference on Getis and Ord's G using random spatial permutations:
+
+.. doctest::
+
+    >>> np.random.seed(12345)
+    >>> g = G(y, dist_w, permutations=9999)
+    >>> print g.p_z_sim
+    0.000564384586974
+    >>> print g.p_sim
+    0.0065
+
+with the first p-value based on a z-transform of the observed G relative to the
+distribution of values obtained in the permutations, and the second based on
+the cumulative probability of the observed value in the empirical distribution.
+
+Local Autocorrelation
+=====================
+
+.. _lisa:
+
+To measure local autocorrelation quantitatively, 
+PySAL implements Local Indicators of Spatial Association (LISAs) for Moran's I and Getis and Ord's G.
+
+Local Moran's I
+----------------
+
+PySAL implements local Moran's I as follows:
+
+.. math::
+
+        I_i =  \sum_j z_i w_{i,j} z_j / \sum_i z_i z_i
+
+which results in :math:`n` values of local spatial autocorrelation, 1 for each spatial unit. Continuing on with the St. Louis example, the LISA statistics are obtained with:
+
+.. doctest::
+
+       >>> f = pysal.open(pysal.examples.get_path("stl_hom.txt"))
+       >>> y = np.array(f.by_col['HR8893'])
+       >>> w = pysal.open(pysal.examples.get_path("stl.gal")).read()
+       >>> np.random.seed(12345)
+       >>> lm = pysal.Moran_Local(y,w)
+       >>> lm.n
+       78
+       >>> len(lm.Is)
+       78
+        
+thus we see 78 LISAs are stored in the vector lm.Is. Inference about these values is obtained through conditional randomization [#]_ which leads to pseudo p-values for each LISA:
+
+.. doctest::
+
+    >>> lm.p_sim
+    array([ 0.176,  0.073,  0.405,  0.267,  0.332,  0.057,  0.296,  0.242,
+            0.055,  0.062,  0.273,  0.488,  0.44 ,  0.354,  0.415,  0.478,
+            0.473,  0.374,  0.415,  0.21 ,  0.161,  0.025,  0.338,  0.375,
+            0.285,  0.374,  0.208,  0.3  ,  0.373,  0.411,  0.478,  0.414,
+            0.009,  0.429,  0.269,  0.015,  0.005,  0.002,  0.077,  0.001,
+            0.088,  0.459,  0.435,  0.365,  0.231,  0.017,  0.033,  0.04 ,
+            0.068,  0.101,  0.284,  0.309,  0.113,  0.457,  0.045,  0.269,
+            0.118,  0.346,  0.328,  0.379,  0.342,  0.39 ,  0.376,  0.467,
+            0.357,  0.241,  0.26 ,  0.401,  0.185,  0.172,  0.248,  0.4  ,
+            0.482,  0.159,  0.373,  0.455,  0.083,  0.128])
+
+To identify the significant [#]_ LISA values we can use numpy indexing:
+
+.. doctest::
+
+        >>> sig = lm.p_sim<0.05
+        >>> lm.p_sim[sig]
+        array([ 0.025,  0.009,  0.015,  0.005,  0.002,  0.001,  0.017,  0.033,
+                0.04 ,  0.045])
+
+and then use this indexing on the q attribute to find out which quadrant of the Moran scatter plot each of the significant values is contained in:
+
+.. doctest::
+
+        >>> lm.q[sig]
+        array([4, 1, 3, 1, 3, 1, 1, 3, 3, 3])
+
+As in the case of global Moran's I, when the variable of interest is rates based on populations with different sizes,
+we need to account for the differences among population to estimate local Moran's Is. 
+Continuing on with the SIDS example above, the adjusted local Moran's Is are obtained with:
+
+.. doctest::
+
+	>>> f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+	>>> b = np.array(f.by_col('BIR79'))
+	>>> e = np.array(f.by_col('SID79'))
+	>>> w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+    >>> np.random.seed(12345)
+    >>> lm = pysal.esda.moran.Moran_Local_Rate(e, b, w)
+    >>> lm.Is[:10]
+    array([-0.13452366, -1.21133985,  0.05019761,  0.06127125, -0.12627466,
+            0.23497679,  0.26345855, -0.00951288, -0.01517879, -0.34513514])
+
+As demonstrated above, significant Moran's Is can be identified by using numpy indexing:
+
+.. doctest::
+
+        >>> sig = lm.p_sim<0.05
+        >>> lm.p_sim[sig]
+        array([ 0.021,  0.04 ,  0.047,  0.015,  0.001,  0.017,  0.032,  0.031,
+                0.019,  0.014,  0.004,  0.048,  0.003])
+
+
+Local G and G*
+--------------
+
+Getis and Ord's G can be localized in two forms: :math:`G_i` and :math:`G^*_i`.
+
+.. math::
+
+        G_i(d) = \frac{\sum_j w_{i,j}(d) y_j - W_i\bar{y}(i)}{s(i)\{[(n-1)S_{1i} - W^2_i]/(n-2)\}^(1/2)}, j \neq i
+
+.. math::
+
+        G^*_i(d) = \frac{\sum_j w_{i,j}(d) y_j - W^*_i\bar{y}}{s\{[(nS^*_{1i}) - (W^*_i)^2]/(n-1)\}^(1/2)}, j = i
+
+where we have :math:`W_i = \sum_{j \neq i} w_{i,j}(d)`, :math:`\bar{y}(i) = \frac{\sum_j y_j}{(n-1)}`, :math:`s^2(i) = \frac{\sum_j y^2_j}{(n-1)} - [\bar{y}(i)]^2`, :math:`W^*_i = W_i + w{i,i}`, :math:`S_{1i} = \sum_j w^2_{i,j} (j \neq i)`, and :math:`S^*_{1i} = \sum_j w^2_{i,j} (\forall j)`, :math:`\bar{y}` and :math:`s^2` denote the usual sample mean and variance of :math:`y`.
+
+Continuing on with the St. Louis example, the :math:`G_i` and :math:`G^*_i` statistics are obtained with:
+
+.. doctest::
+
+        >>> from pysal.esda.getisord import G_Local
+        >>> np.random.seed(12345)
+        >>> lg = G_Local(y, dist_w)
+        >>> lg.n
+        78
+        >>> len(lg.Gs)
+        78
+        >>> lgstar = G_Local(y, dist_w, star=True)
+        >>> lgstar.n
+        78
+        >>> len(lgstar.Gs)
+        78
+        
+thus we see 78 :math:`G_i` and :math:`G^*_i` are stored in the vector lg.Gs and lgstar.Gs, respectively. Inference about these values is obtained through conditional randomization as in the case of local Moran's I:
+
+.. doctest::
+
+    >>> lg.p_sim
+    array([ 0.301,  0.037,  0.457,  0.011,  0.062,  0.006,  0.094,  0.163,
+            0.075,  0.078,  0.419,  0.286,  0.138,  0.443,  0.36 ,  0.484,
+            0.434,  0.251,  0.415,  0.21 ,  0.177,  0.001,  0.304,  0.042,
+            0.285,  0.394,  0.208,  0.089,  0.244,  0.493,  0.478,  0.433,
+            0.006,  0.429,  0.037,  0.105,  0.005,  0.216,  0.23 ,  0.023,
+            0.105,  0.343,  0.395,  0.305,  0.264,  0.017,  0.033,  0.01 ,
+            0.001,  0.115,  0.034,  0.225,  0.043,  0.312,  0.045,  0.092,
+            0.118,  0.428,  0.258,  0.379,  0.408,  0.39 ,  0.475,  0.493,
+            0.357,  0.298,  0.232,  0.454,  0.149,  0.161,  0.226,  0.4  ,
+            0.482,  0.159,  0.27 ,  0.423,  0.083,  0.128])
+
+
+To identify the significant :math:`G_i` values we can use numpy indexing:
+
+.. doctest::
+
+
+    >>> sig = lg.p_sim<0.05
+    >>> lg.p_sim[sig]
+    array([ 0.037,  0.011,  0.006,  0.001,  0.042,  0.006,  0.037,  0.005,
+            0.023,  0.017,  0.033,  0.01 ,  0.001,  0.034,  0.043,  0.045])
+
+Further Information 
+====================
+
+For further details see the :doc:`ESDA  API <../../library/esda/index>`.
+
+
+
+
+.. rubric:: Footnotes
+
+
+.. [#] Hubert, L., R. Golledge and C.M. Costanzo (1981). Generalized procedures for evaluating spatial autocorrelation. Geographical Analysis 13, 224-233.
+.. [#] Technical details and derivations can be found in A.D. Cliff and J.K. Ord (1981). Spatial Processes, Models and Applications. London, Pion, pp. 34-41.
+.. [#] Messner, S.,  L. Anselin, D. Hawkins, G. Deane, S. Tolnay, R. Baller (2000). An Atlas of the Spatial Patterning of County-Level Homicide, 1960-1990. Pittsburgh, PA, National Consortium on Violence Research (NCOVR)
+.. [#] Because the permutations are random, results from those presented here may vary if you replicate this example.
+.. [#] Assuncao, R. E. and Reis, E. A. 1999. A new proposal to adjust Moran's I for population density. Statistics in Medicine. 18, 2147-2162.
+.. [#] The n-1 spatial units other than i are used to generate the empirical distribution of the LISA statistics for each i.
+.. [#] Caution is required in interpreting the significance of the LISA statistics due to difficulties with multiple comparisons and a lack of independence across the individual tests. For further discussion see Anselin, L. (1995). "Local indicators of spatial association – LISA". Geographical Analysis, 27, 93-115.
+
diff --git a/doc/source/users/tutorials/dynamics.rst b/doc/source/users/tutorials/dynamics.rst
new file mode 100644
index 0000000..cd57dfe
--- /dev/null
+++ b/doc/source/users/tutorials/dynamics.rst
@@ -0,0 +1,805 @@
+.. testsetup:: *
+
+    import pysal
+    import numpy as np
+
+################
+Spatial Dynamics
+################
+
+.. contents::
+
+Introduction
+============
+
+PySAL implements a number of exploratory approaches to analyze the 
+dynamics of longitudinal spatial data, or observations on fixed areal 
+units over multiple time periods. Examples could include time series 
+of voting patterns in US Presidential elections, time series of remote 
+sensing images, labor market dynamics, regional business cycles, among 
+many others. Two broad sets of spatial dynamics methods are implemented 
+to analyze these data types. The first are Markov based methods, while 
+the second are based on Rank dynamics.
+
+Additionally, methods are included in this module to analyze patterns of individual events which have spatial and temporal coordinates associated with them. Examples include locations and times of individual cases of disease or crimes. Methods are included here to determine if these event patterns exhibit space-time interaction. 
+
+Markov Based Methods
+====================
+
+The Markov based methods include classic Markov chains and extensions of
+these approaches to deal with spatially referenced data. In what follows
+we illustrate the functionality of these Markov methods. Readers
+interested in the methodological foundations of these approaches are
+directed to [#]_.
+
+Classic Markov
+--------------
+
+We start with a look at a simple example of classic Markov methods
+implemented in PySAL. A Markov chain may be in one of :math:`k` different
+states at any point in time. These states are exhaustive and mutually
+exclusive. For example, if one had a time series of remote sensing images
+used to develop land use classifications, then the states could be defined
+as the specific land use classes and interest would center on the
+transitions in and out of different classes for each pixel. 
+
+For example, let's construct a small artificial chain consisting of 3 states
+(a,b,c) and 5 different pixels at three different points in time:
+
+
+.. doctest::
+   :options: +NORMALIZE_WHITESPACE
+
+        >>> import pysal 
+        >>> import numpy as np
+        >>> c = np.array([['b','a','c'],['c','c','a'],['c','b','c'],['a','a','b'],['a','b','c']])
+        >>> c
+        array([['b', 'a', 'c'],
+               ['c', 'c', 'a'],
+               ['c', 'b', 'c'],
+               ['a', 'a', 'b'],
+               ['a', 'b', 'c']],
+              dtype='|S1')
+
+        
+So the first pixel was in class 'b' in period 1, class 'a' in period 2,
+and class 'c' in period 3. We can summarize the overall transition
+dynamics for the set of pixels by treating it as a Markov chain:
+
+.. doctest::
+   :options: +NORMALIZE_WHITESPACE
+
+        >>> m = pysal.Markov(c)
+        >>> m.classes
+        array(['a', 'b', 'c'],
+              dtype='|S1')
+
+
+The Markov instance m has an attribute class extracted from the chain -
+the assumption is that the observations are on the rows of the input and
+the different points in time on the columns. In addition to extracting the
+classes as an attribute, our Markov instance will also have a transitions
+matrix:
+
+.. doctest::
+
+        >>> m.transitions
+        array([[ 1.,  2.,  1.],
+               [ 1.,  0.,  2.],
+               [ 1.,  1.,  1.]])
+
+indicating that of the four pixels that began a transition interval in
+class 'a', 1 remained in that class, 2 transitioned to class 'b' and 1
+transitioned to class 'c'. 
+
+This simple example illustrates the basic creation of a Markov instance,
+but the small sample size makes it unrealistic for the more advanced
+features of this approach.  For a larger example, we will look at an
+application of Markov methods to understanding regional income dynamics in
+the US. Here we will load in data on per capita income observed annually
+from 1929 to 2010 for the lower 48 US states:
+
+.. doctest::
+
+        >>> f = pysal.open("../pysal/examples/usjoin.csv")
+        >>> pci = np.array([f.by_col[str(y)] for y in range(1929,2010)])
+        >>> pci.shape
+        (81, 48)
+
+The first row of the array is the per capita income for the first year:
+
+.. doctest::
+
+        >>> pci[0, :]
+        array([ 323,  600,  310,  991,  634, 1024, 1032,  518,  347,  507,  948,
+                607,  581,  532,  393,  414,  601,  768,  906,  790,  599,  286,
+                621,  592,  596,  868,  686,  918,  410, 1152,  332,  382,  771,
+                455,  668,  772,  874,  271,  426,  378,  479,  551,  634,  434,
+                741,  460,  673,  675])
+        
+In order to apply the classic Markov approach to this series, we first
+have to discretize the distribution by defining our classes. There are
+many ways to do this, but here we will use the quintiles for each annual
+income distribution to define the classes:
+
+.. doctest::
+
+        >>> q5 = np.array([pysal.Quantiles(y).yb for y in pci]).transpose()
+        >>> q5.shape
+        (48, 81)
+        >>> q5[:, 0]
+        array([0, 2, 0, 4, 2, 4, 4, 1, 0, 1, 4, 2, 2, 1, 0, 1, 2, 3, 4, 4, 2, 0, 2,
+               2, 2, 4, 3, 4, 0, 4, 0, 0, 3, 1, 3, 3, 4, 0, 1, 0, 1, 2, 2, 1, 3, 1,
+               3, 3])
+ 
+A number of things need to be noted here. First, we are relying on the
+classification methods in PySAL for defining our quintiles. The class
+Quantiles uses quintiles as the default and will create an instance of
+this class that has multiple attributes, the one we are extracting in the
+first line is yb - the class id for each observation. The second thing to
+note is the transpose operator which gets our resulting array q5 in the
+proper structure required for use of Markov. Thus we see that the first
+spatial unit (Alabama with an income of 323) fell in the first quintile
+in 1929, while the last unit (Wyoming with an income of 675) fell in the
+fourth quintile [#]_.
+
+So now we have a time series for each state of its quintile membership.
+For example, Colorado's quintile time series is:
+
+.. doctest::
+
+        >>> q5[4, :]
+        array([2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 3,
+               3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 3, 3,
+               3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4,
+               4, 4, 4, 4, 4, 3, 3, 3, 4, 3, 3, 3])
+
+indicating that it has occupied the 3rd, 4th and 5th quintiles in the
+distribution at different points in time. To summarize the transition
+dynamics for all units, we instantiate a Markov object:
+
+.. doctest::
+
+        >>> m5 = pysal.Markov(q5)
+        >>> m5.transitions
+        array([[ 729.,   71.,    1.,    0.,    0.],
+               [  72.,  567.,   80.,    3.,    0.],
+               [   0.,   81.,  631.,   86.,    2.],
+               [   0.,    3.,   86.,  573.,   56.],
+               [   0.,    0.,    1.,   57.,  741.]])
+
+Assuming we can treat these transitions as a first order Markov chain, we can estimate
+the transition probabilities:
+
+.. doctest::
+
+        >>> m5.p
+        matrix([[ 0.91011236,  0.0886392 ,  0.00124844,  0.        ,  0.        ],
+                [ 0.09972299,  0.78531856,  0.11080332,  0.00415512,  0.        ],
+                [ 0.        ,  0.10125   ,  0.78875   ,  0.1075    ,  0.0025    ],
+                [ 0.        ,  0.00417827,  0.11977716,  0.79805014,  0.07799443],
+                [ 0.        ,  0.        ,  0.00125156,  0.07133917,  0.92740926]])
+
+as well as the long run steady state distribution:
+
+.. doctest::
+
+        >>> m5.steady_state
+        matrix([[ 0.20774716],
+                [ 0.18725774],
+                [ 0.20740537],
+                [ 0.18821787],
+                [ 0.20937187]])
+
+With the transition probability matrix in hand, we can estimate the first
+mean passage time:
+
+.. doctest::
+
+        >>> pysal.ergodic.fmpt(m5.p)
+        matrix([[   4.81354357,   11.50292712,   29.60921231,   53.38594954,
+                  103.59816743],
+                [  42.04774505,    5.34023324,   18.74455332,   42.50023268,
+                   92.71316899],
+                [  69.25849753,   27.21075248,    4.82147603,   25.27184624,
+                   75.43305672],
+                [  84.90689329,   42.85914824,   17.18082642,    5.31299186,
+                   51.60953369],
+                [  98.41295543,   56.36521038,   30.66046735,   14.21158356,
+                    4.77619083]])
+
+Thus, for a state with income in the first quintile, it takes on average
+11.5 years for it to first enter the second quintile, 29.6 to get to the
+third quintile, 53.4 years to enter the fourth, and 103.6 years to reach
+the richest quintile.
+
+
+Spatial Markov
+--------------
+
+Thus far we have treated all the spatial units as independent to estimate
+the transition probabilities. This hides a number of implicit assumptions.
+First, the transition dynamics are assumed to hold for all units and for
+all time periods. Second, interactions between the transitions of
+individual units are ignored. In other words regional context may be
+important to understand regional income dynamics, but the classic Markov
+approach is silent on this issue.
+
+PySAL includes a number of spatially explicit extensions to the Markov
+framework. The first is the spatial Markov class that we illustrate here.
+We first are going to transform the income series to relative incomes (by
+standardizing by each period by the mean):
+
+.. doctest::
+
+    >>> import pysal
+    >>> f = pysal.open("../pysal/examples/usjoin.csv")
+    >>> pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)])
+    >>> pci = pci.transpose()
+    >>> rpci = pci / (pci.mean(axis = 0))
+
+
+Next, we require a spatial weights object, and here we will create one
+from an external GAL file:
+
+.. doctest::
+
+    >>> w = pysal.open("../pysal/examples/states48.gal").read()
+    >>> w.transform = 'r'
+
+Finally, we create an instance of the Spatial Markov class using 5 states
+for the chain:
+
+.. doctest::
+
+    >>> sm = pysal.Spatial_Markov(rpci, w, fixed = True, k = 5)
+
+Here we are keeping the quintiles fixed, meaning the data are pooled over
+space and time and the quintiles calculated for the pooled data. This is
+why we first transformed the data to relative incomes.  We can next
+examine the global transition probability matrix for relative incomes:
+
+.. doctest::
+
+    >>> sm.p
+    matrix([[ 0.91461837,  0.07503234,  0.00905563,  0.00129366,  0.        ],
+            [ 0.06570302,  0.82654402,  0.10512484,  0.00131406,  0.00131406],
+            [ 0.00520833,  0.10286458,  0.79427083,  0.09505208,  0.00260417],
+            [ 0.        ,  0.00913838,  0.09399478,  0.84856397,  0.04830287],
+            [ 0.        ,  0.        ,  0.        ,  0.06217617,  0.93782383]])
+
+The Spatial Markov allows us to compare the global transition dynamics to
+those conditioned on regional context. More specifically, the transition
+dynamics are split across economies who have spatial lags in different
+quintiles at the beginning of the year. In our example we have 5 classes,
+so 5 different conditioned transition probability matrices are estimated:
+ 
+.. doctest::
+
+    >>> for p in sm.P:
+    ...     print p
+    ...     
+    [[ 0.96341463  0.0304878   0.00609756  0.          0.        ]
+     [ 0.06040268  0.83221477  0.10738255  0.          0.        ]
+     [ 0.          0.14        0.74        0.12        0.        ]
+     [ 0.          0.03571429  0.32142857  0.57142857  0.07142857]
+     [ 0.          0.          0.          0.16666667  0.83333333]]
+    [[ 0.79831933  0.16806723  0.03361345  0.          0.        ]
+     [ 0.0754717   0.88207547  0.04245283  0.          0.        ]
+     [ 0.00537634  0.06989247  0.8655914   0.05913978  0.        ]
+     [ 0.          0.          0.06372549  0.90196078  0.03431373]
+     [ 0.          0.          0.          0.19444444  0.80555556]]
+    [[ 0.84693878  0.15306122  0.          0.          0.        ]
+     [ 0.08133971  0.78947368  0.1291866   0.          0.        ]
+     [ 0.00518135  0.0984456   0.79274611  0.0984456   0.00518135]
+     [ 0.          0.          0.09411765  0.87058824  0.03529412]
+     [ 0.          0.          0.          0.10204082  0.89795918]]
+    [[ 0.8852459   0.09836066  0.          0.01639344  0.        ]
+     [ 0.03875969  0.81395349  0.13953488  0.          0.00775194]
+     [ 0.0049505   0.09405941  0.77722772  0.11881188  0.0049505 ]
+     [ 0.          0.02339181  0.12865497  0.75438596  0.09356725]
+     [ 0.          0.          0.          0.09661836  0.90338164]]
+    [[ 0.33333333  0.66666667  0.          0.          0.        ]
+     [ 0.0483871   0.77419355  0.16129032  0.01612903  0.        ]
+     [ 0.01149425  0.16091954  0.74712644  0.08045977  0.        ]
+     [ 0.          0.01036269  0.06217617  0.89637306  0.03108808]
+     [ 0.          0.          0.          0.02352941  0.97647059]]
+    
+
+The probability of a poor state remaining poor is 0.963 if their
+neighbors are in the 1st quintile and 0.798 if their neighbors are
+in the 2nd quintile. The probability of a rich economy remaining
+rich is 0.977 if their neighbors are in the 5th quintile, but if their
+neighbors are in the 4th quintile this drops to 0.903.
+
+We can also explore the different steady state distributions implied by
+these different transition probabilities:
+
+.. doctest::
+
+    >>> sm.S
+    array([[ 0.43509425,  0.2635327 ,  0.20363044,  0.06841983,  0.02932278],
+           [ 0.13391287,  0.33993305,  0.25153036,  0.23343016,  0.04119356],
+           [ 0.12124869,  0.21137444,  0.2635101 ,  0.29013417,  0.1137326 ],
+           [ 0.0776413 ,  0.19748806,  0.25352636,  0.22480415,  0.24654013],
+           [ 0.01776781,  0.19964349,  0.19009833,  0.25524697,  0.3372434 ]])
+
+The long run distribution for states with poor (rich) neighbors has
+0.435 (0.018) of the values in the first quintile, 0.263 (0.200) in
+the second quintile, 0.204 (0.190) in the third, 0.0684 (0.255) in the
+fourth and 0.029 (0.337) in the fifth quintile. And, finally the first mean
+passage times:
+
+.. doctest::
+
+    >>> for f in sm.F:
+    ...     print f
+    ...     
+    [[   2.29835259   28.95614035   46.14285714   80.80952381  279.42857143]
+     [  33.86549708    3.79459555   22.57142857   57.23809524  255.85714286]
+     [  43.60233918    9.73684211    4.91085714   34.66666667  233.28571429]
+     [  46.62865497   12.76315789    6.25714286   14.61564626  198.61904762]
+     [  52.62865497   18.76315789   12.25714286    6.           34.1031746 ]]
+    [[   7.46754205    9.70574606   25.76785714   74.53116883  194.23446197]
+     [  27.76691978    2.94175577   24.97142857   73.73474026  193.4380334 ]
+     [  53.57477715   28.48447637    3.97566318   48.76331169  168.46660482]
+     [  72.03631562   46.94601483   18.46153846    4.28393653  119.70329314]
+     [  77.17917276   52.08887197   23.6043956     5.14285714   24.27564033]]
+    [[   8.24751154    6.53333333   18.38765432   40.70864198  112.76732026]
+     [  47.35040872    4.73094099   11.85432099   34.17530864  106.23398693]
+     [  69.42288828   24.76666667    3.794921     22.32098765   94.37966594]
+     [  83.72288828   39.06666667   14.3           3.44668119   76.36702977]
+     [  93.52288828   48.86666667   24.1           9.8           8.79255406]]
+    [[  12.87974382   13.34847151   19.83446328   28.47257282   55.82395142]
+     [  99.46114206    5.06359731   10.54545198   23.05133495   49.68944423]
+     [ 117.76777159   23.03735526    3.94436301   15.0843986    43.57927247]
+     [ 127.89752089   32.4393006    14.56853107    4.44831643   31.63099455]
+     [ 138.24752089   42.7893006    24.91853107   10.35          4.05613474]]
+    [[  56.2815534     1.5          10.57236842   27.02173913  110.54347826]
+     [  82.9223301     5.00892857    9.07236842   25.52173913  109.04347826]
+     [  97.17718447   19.53125       5.26043557   21.42391304  104.94565217]
+     [ 127.1407767    48.74107143   33.29605263    3.91777427   83.52173913]
+     [ 169.6407767    91.24107143   75.79605263   42.5           2.96521739]]
+
+States with incomes in the first quintile with neighbors in the
+first quintile return to the first quintile after 2.298 years, after
+leaving the first quintile. They enter the fourth quintile 
+80.810 years after leaving the first quintile, on average.
+Poor states within neighbors in the fourth quintile return to the
+first quintile, on average, after 12.88 years, and would enter the
+fourth quintile after 28.473 years.
+
+
+LISA Markov
+-----------
+
+The Spatial Markov conditions the transitions on the value of the spatial
+lag for an observation at the beginning of the transition period. An
+alternative approach to spatial dynamics is to consider the joint
+transitions of an observation and its spatial lag in the distribution.
+By exploiting the form of the static :ref:`LISA <lisa>` and embedding it
+in a dynamic context we develop the LISA Markov in which the states of the
+chain are defined as the four quadrants in the Moran scatter plot.
+Continuing on with our US example:
+
+.. doctest::
+
+    >>> import numpy as np
+    >>> f = pysal.open("../pysal/examples/usjoin.csv")
+    >>> pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)]).transpose()
+    >>> w = pysal.open("../pysal/examples/states48.gal").read()
+    >>> lm = pysal.LISA_Markov(pci, w)
+    >>> lm.classes
+    array([1, 2, 3, 4])
+
+The LISA transitions are:
+
+.. doctest::
+
+  >>> lm.transitions
+  array([[  1.08700000e+03,   4.40000000e+01,   4.00000000e+00,
+            3.40000000e+01],
+         [  4.10000000e+01,   4.70000000e+02,   3.60000000e+01,
+            1.00000000e+00],
+         [  5.00000000e+00,   3.40000000e+01,   1.42200000e+03,
+            3.90000000e+01],
+         [  3.00000000e+01,   1.00000000e+00,   4.00000000e+01,
+            5.52000000e+02]])
+ 
+
+and the estimated transition probability matrix is:
+
+.. doctest::
+
+    >>> lm.p
+    matrix([[ 0.92985458,  0.03763901,  0.00342173,  0.02908469],
+            [ 0.07481752,  0.85766423,  0.06569343,  0.00182482],
+            [ 0.00333333,  0.02266667,  0.948     ,  0.026     ],
+            [ 0.04815409,  0.00160514,  0.06420546,  0.88603531]])
+
+
+
+The diagonal elements indicate the staying probabilities and we see that
+there is greater mobility for observations in quadrants 1 and 3 than 2 and
+4.
+
+The implied long run steady state distribution of the chain is
+
+.. doctest::
+
+    >>> lm.steady_state
+    matrix([[ 0.28561505],
+            [ 0.14190226],
+            [ 0.40493672],
+            [ 0.16754598]])
+
+again reflecting the dominance of quadrants 1 and 3 (positive
+autocorrelation). [#]_ Finally the first mean passage time for the LISAs is:
+
+.. doctest::
+
+    >>> pysal.ergodic.fmpt(lm.p)
+    matrix([[  3.50121609,  37.93025465,  40.55772829,  43.17412009],
+            [ 31.72800152,   7.04710419,  28.68182751,  49.91485137],
+            [ 52.44489385,  47.42097495,   2.46952168,  43.75609676],
+            [ 38.76794022,  51.51755827,  26.31568558,   5.96851095]])
+
+
+
+Rank Based Methods
+==================
+
+The second set of spatial dynamic methods in PySAL are based on rank
+correlations and spatial extensions of the classic rank statistics.
+
+Spatial Rank Correlation
+------------------------
+
+Kendall's :math:`\tau` is based on a comparison of the number of pairs of :math:`n`
+observations that have concordant ranks between two variables. For spatial
+dynamics in PySAL, the two variables in question are the values of an attribute
+measured at two points in time over :math:`n` spatial units. This classic
+measure of rank correlation indicates how much relative stability there
+has been in the map pattern over the two periods.
+
+The spatial :math:`\tau` decomposes these pairs into those that are
+spatial neighbors and those that are not, and examines whether the rank
+correlation is different between the two sets. [4]_ To illustrate this we
+turn to the case of regional incomes in Mexico over the 1940 to 2010
+period:
+
+.. doctest::
+
+    >>> import pysal
+    >>> f = pysal.open("../pysal/examples/mexico.csv")
+    >>> vnames = ["pcgdp%d"%dec for dec in range(1940, 2010, 10)]
+    >>> y = np.transpose(np.array([f.by_col[v] for v in vnames]))
+
+We also introduce the concept of regime weights that defines the neighbor
+set as those spatial units belonging to the same region. In this example
+the variable "esquivel99" represents a categorical classification of
+Mexican states into regions:
+
+.. doctest::
+
+    >>> regime = np.array(f.by_col['esquivel99'])
+    >>> w = pysal.weights.block_weights(regime)
+    >>> np.random.seed(12345)
+
+Now we will calculate the spatial tau for decade transitions from 1940 through
+2000 and report the observed spatial tau against that expected if the rank
+changes were randomly distributed in space by using 99 permutations:
+
+.. doctest::
+
+    >>> res=[pysal.SpatialTau(y[:,i],y[:,i+1],w,99) for i in range(6)]
+    >>> for r in res:
+    ...     ev = r.taus.mean()
+    ...     "%8.3f %8.3f %8.3f"%(r.tau_spatial, ev, r.tau_spatial_psim)
+    ...     
+    '   0.397    0.659    0.010'
+    '   0.492    0.706    0.010'
+    '   0.651    0.772    0.020'
+    '   0.714    0.752    0.210'
+    '   0.683    0.705    0.270'
+    '   0.810    0.819    0.280'
+
+The observed level of spatial concordance during the 1940-50 transition was
+0.397 which is significantly lower (p=0.010) than the average level of spatial
+concordance (0.659) from randomly permuted incomes in Mexico. Similar patterns
+are found for the next two transition periods as well. In other words the
+amount of rank concordance is significantly distinct between pairs of
+observations that are geographical neighbors and those that are not in these
+first three transition periods. This reflects the greater degree of spatial
+similarity within rather than  between the regimes making the
+discordant pairs dominated by neighboring pairs. 
+
+Rank Decomposition
+------------------
+
+For a sequence of time periods, :math:`\theta` measures the extent to which rank
+changes for a variable measured over :math:`n` locations are in the same direction
+within mutually exclusive and exhaustive partitions (regimes) of the
+:math:`n` locations.
+
+Theta is defined as the sum of the absolute sum of rank changes within
+the regimes over the sum of all absolute rank changes. [#]_
+
+.. doctest::
+
+    >>> import pysal
+    >>> f = pysal.open("../pysal/examples/mexico.csv")
+    >>> vnames = ["pcgdp%d"%dec for dec in range(1940, 2010, 10)]
+    >>> y = np.transpose(np.array([f.by_col[v] for v in vnames]))
+    >>> regime = np.array(f.by_col['esquivel99'])
+    >>> np.random.seed(10)
+    >>> t = pysal.Theta(y, regime, 999)
+    >>> t.theta
+    array([[ 0.41538462,  0.28070175,  0.61363636,  0.62222222,  0.33333333,
+             0.47222222]])
+    >>> t.pvalue_left
+    array([ 0.307,  0.077,  0.823,  0.552,  0.045,  0.735])
+
+
+Space-Time Interaction Tests
+============================
+
+The third set of spatial dynamic methods in PySAL are global tests of space-time interaction. The purpose of these tests is to detect clustering within space-time event patterns. These patterns are composed of unique events that are labeled with spatial and temporal coordinates. The tests are designed to detect clustering of events in both space and time beyond "any purely spatial or purely temporal clustering" [#]_, that is, to determine if the events are "interacting." Essentially, the [...]
+
+
+Knox Test
+---------
+
+The Knox test for space-time interaction employs user-defined critical thresholds in space and time to define proximity between events. All pairs of events are examined to determine if the distance between them in space and time is within the respective thresholds. The Knox statistic is calculated as the total number of event pairs where the spatial and temporal distances separating the pair are within the specified thresholds [#]_. If interaction is present, the test statistic will be l [...]
+
+Formally, the specification of the Knox test is given as:
+
+.. math::
+ 	 
+ 	 X=\sum_{i}^{n}\sum_{j}^{n}a_{ij}^{s}a_{ij}^{t}\\
+
+.. math::
+
+         \begin{align} \nonumber
+         a_{ij}^{s} &=
+         \begin{cases}
+         1, & \text{if $d^s_{ij}<\delta$}\\
+         0, & \text{otherwise}
+	 \end{cases}
+         \end{align}
+         
+.. math::
+
+         \begin{align} \nonumber
+	 a_{ij}^{t} &=
+	 \begin{cases}
+	 1, & \text{if $d^t_{ij}<\tau$}\\
+	 0, & \text{otherwise}
+	 \end{cases}
+         \end{align}
+
+Where :math:`n` = number of events, :math:`a^{s}` = adjacency in space, :math:`a^{t}` = adjacency in time, :math:`d^{s}` = distance in space, and :math:`d^{t}` = distance in time. Critical space and time distance thresholds are defined as :math:`\delta` and :math:`\tau`, respectively. 
+
+We illustrate the use of the Knox test using data from a study of Burkitt's Lymphoma in Uganda during the period 1961-75 [#]_. We start by importing Numpy, PySAL and the interaction module:
+
+
+.. doctest::
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> import pysal.spatial_dynamics.interaction as interaction
+    >>> np.random.seed(100) 
+
+The example data are then read in and used to create an instance of SpaceTimeEvents. This reformats the data so the test can be run by PySAL. This class requires the input of a point shapefile. The shapefile must contain a column that includes a timestamp for each point in the dataset. The class requires that the user input a path to an appropriate shapefile and the name of the column containing the timestamp. In this example, the appropriate column name is 'T'.
+
+.. doctest::
+
+    >>> path = "../pysal/examples/burkitt"
+    >>> events = interaction.SpaceTimeEvents(path,'T')
+
+Next, we run the Knox test with distance and time thresholds of 20 and 5,respectively. This counts the events that are closer than 20 units in space, and 5 units in time.  
+
+.. doctest::
+
+    >>> result = interaction.knox(events.space, events.t ,delta=20,tau=5,permutations=99)
+
+Finally we examine the results. We call the statistic from the results dictionary. This reports that there are 13 events close in both space and time, based on our threshold definitions. 
+
+.. doctest::
+
+    >>> print(result['stat'])
+    13
+
+Then we look at the pseudo-significance of this value, calculated by permuting the timestamps and rerunning the statistics. Here, 99 permutations were used, but an alternative number can be specified by the user. In this case, the results indicate that we fail to reject the null hypothesis of no space-time interaction using an alpha value of 0.05. 
+
+.. doctest::
+
+    >>> print("%2.2f"%result['pvalue'])
+    0.17
+
+
+Modified Knox Test
+------------------
+
+A modification to the Knox test was proposed by Baker [#]_. Baker's modification measures the difference between the original observed Knox statistic and its expected value. This difference serves as the test statistic. Again, the significance of this statistic is assessed using a Monte Carlo permutation procedure. 
+
+
+.. math::
+ 	 
+ 	 T=\frac{1}{2}\bigg(\sum_{i=1}^{n}\sum_{j=1}^{n}f_{ij}g_{ij} - \frac{1}{n-1}\sum_{k=1}^{n}\sum_{l=1}^{n}\sum_{j=1}^{n}f_{kj}g_{lj}\bigg)\\
+
+
+Where :math:`n` = number of events, :math:`f` = adjacency in space, :math:`g` = adjacency in time (calculated in a manner equivalent to :math:`a^{s}` and :math:`a^{t}` above in the Knox test). The first part of this statistic is equivalent to the original Knox test, while the second part is the expected value under spatio-temporal randomness. 
+
+Here we illustrate the use of the modified Knox test using the data on Burkitt's Lymphoma cases in Uganda from above. We start by importing Numpy, PySAL and the interaction module. Next the example data are then read in and used to create an instance of SpaceTimeEvents.
+
+
+.. doctest::
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> import pysal.spatial_dynamics.interaction as interaction 
+    >>> np.random.seed(100) 
+    >>> path = "../pysal/examples/burkitt"
+    >>> events = interaction.SpaceTimeEvents(path,'T')
+
+Next, we run the modified Knox test with distance and time thresholds of 20 and 5,respectively. This counts the events that are closer than 20 units in space, and 5 units in time.  
+
+.. doctest::
+
+    >>> result = interaction.modified_knox(events.space, events.t,delta=20,tau=5,permutations=99)
+
+Finally we examine the results. We call the statistic from the results dictionary. This reports a statistic value of 2.810160.  
+
+.. doctest::
+
+    >>> print("%2.8f"%result['stat'])
+    2.81016043
+
+Next we look at the pseudo-significance of this value, calculated by permuting the timestamps and rerunning the statistics. Here, 99 permutations were used, but an alternative number can be specified by the user. In this case, the results indicate that we fail to reject the null hypothesis of no space-time interaction using an alpha value of 0.05. 
+
+.. doctest::
+
+    >>> print("%2.2f"%result['pvalue'])
+    0.11
+
+
+Mantel Test
+-----------
+
+Akin to the Knox test in its simplicity, the Mantel test keeps the distance information discarded by the Knox test. The unstandardized Mantel statistic is calculated by summing the product of the spatial and temporal distances between all event pairs [#]_. To prevent multiplication by 0 in instances of colocated or simultaneous events, Mantel proposed adding a constant to the distance measurements. Additionally, he suggested a reciprocal transform of the resulting distance measurement to [...]
+
+.. math::
+
+	 Z=\sum_{i}^{n}\sum_{j}^{n}(d_{ij}^{s}+c)^{p}(d_{ij}^{t}+c)^{p} 
+
+Where, again, :math:`d^{s}` and :math:`d^{t}` denote distance in space and time, respectively. The constant, :math:`c`, and the power, :math:`p`, are parameters set by the user. The default values are 0 and 1, respectively. A standardized version of the Mantel test is implemented here in PySAL, however. The standardized statistic (:math:`r`) is a measure of correlation between the spatial and temporal distance matrices. This is expressed formally as: 
+
+.. math::
+
+         r=\frac{1}{n^2-n-1}\sum_{i}^{n}\sum_{j}^{n}\Bigg[\frac{d_{ij}^{s}-\bar{d^{s}}}{\sigma_{d^{s}}}\Bigg] \Bigg[\frac{d_{ij}^{t}-\bar{d^{t}}}{\sigma_{d^{t}}}\Bigg]
+
+Where :math:`\bar{d^{s}}` refers to the average distance in space, and :math:`\bar{d^{t}}` the average distance in time. For notational convenience :math:`\sigma_{d^{t}}` and :math:`\sigma_{d^{t}}` refer to the sample (not population) standard deviations, for distance in space and time, respectively. The same constant and power transformations may also be applied to the spatial and temporal distance matrices employed by the standardized Mantel. Significance is determined through a Monte  [...]
+
+Again, we use the Burkitt's Lymphoma data to illustrate the test. We start with the usual imports and read in the example data. 
+
+
+.. doctest::
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> import pysal.spatial_dynamics.interaction as interaction
+    >>> np.random.seed(100)
+    >>> path = "../pysal/examples/burkitt"
+    >>> events = interaction.SpaceTimeEvents(path,'T')
+
+The following example runs the standardized Mantel test with constants of 0 and transformations of 1, meaning the distance matrices will remain unchanged; however, as recommended by Mantel, a small constant should be added and an inverse transformation (i.e. -1) specified. 
+
+.. doctest::
+
+    >>> result = interaction.mantel(events.space, events.t,99,scon=0.0,spow=1.0,tcon=0.0,tpow=1.0)
+
+Next, we examine the result of the test. 
+
+.. doctest::
+
+    >>> print("%6.6f"%result['stat'])
+    0.014154
+
+Finally, we look at the pseudo-significance of this value, calculated by permuting the timestamps and rerunning the statistic for each of the 99 permuatations. Again, note, the number of permutations can be changed by the user. According to these parameters, the results fail to reject the null hypothesis of no space-time interaction between the events.
+
+.. doctest::
+
+    >>> print("%2.2f"%result['pvalue'])
+    0.27
+
+Jacquez Test
+------------
+
+Instead of using a set distance in space and time to determine proximity (like the Knox test) the Jacquez test employs a nearest neighbor distance approach. This allows the test to account for changes in underlying population density. The statistic is calculated as the number of event pairs that are within the set of :math:`k` nearest neighbors for each other in both space and time [#]_. Significance of this count is established using a Monte Carlo permutation method. The test is express [...]
+
+.. math::
+
+	J_{k}=\sum_{i=1}^{n} \sum_{j=1}^{n} a_{ijk}^{s}a_{ijk}^{t}\\
+
+.. math::
+
+        \begin{align} \nonumber
+	a_{ijk}^{s} = 
+	\begin{cases}
+	1, & \text{if event \emph{j} is a \emph{k} nearest neighbor of event \emph{i} in space}\\
+	0, & \text{otherwise}
+	\end{cases} 
+        \end{align}
+
+.. math::
+
+        \begin{align} \nonumber
+	a_{ijk}^{t} = 
+	\begin{cases}
+	1, & \text{if event \emph{j} is a \emph{k} nearest neighbor of event \emph{i} in time}\\
+	0, & \text{otherwise}
+	\end{cases}
+        \end{align}
+
+Where :math:`n` = number of cases; :math:`a^{s}` = adjacency in space; :math:`a^{t}` = adjacency in time. To illustrate the test, the Burkitt's Lymphoma data are employed again. We start with the usual imports and read in the example data. 
+
+.. doctest::
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> import pysal.spatial_dynamics.interaction as interaction
+    >>> np.random.seed(100)
+    >>> path = "../pysal/examples/burkitt"
+    >>> events = interaction.SpaceTimeEvents(path,'T')
+
+
+The following runs the Jacquez test on the example data for a value of :math:`k` = 3 and reports the resulting statistic. In this case, there are 13 instances where events are nearest neighbors in both space and time. The significance of this can be assessed by calling the p-value from the results dictionary. Again, there is not enough evidence to reject the null hypothesis of no space-time interaction. 
+
+.. doctest::
+
+    >>> result = interaction.jacquez(events.space, events.t ,k=3,permutations=99)
+    >>> print result['stat'] # doctest: +SKIP
+    13
+    >>> print "%3.1f"%result['pvalue'] # doctest: +SKIP
+    0.2
+
+Spatial Dynamics API
+====================
+
+For further details see the :doc:`Spatial Dynamics API <../../library/spatial_dynamics/index>`.
+
+.. rubric:: Footnotes
+
+.. [#] Rey, S.J. 2001.
+       "`Spatial empirics for economic growth and convergence
+       <http://findarticles.com/p/articles/mi_hb4740/is_3_33/ai_n28858625/>`_",
+       34 Geographical Analysis, 33, 195-214.
+.. [#] The states are ordered alphabetically.
+.. [#] The complex values of the steady state distribution arise from
+       complex eigenvalues in the transition probability matrix which may indicate
+       cyclicality in the chain.
+.. [#] Rey, S.J. (2004) "`Spatial dependence in the evolution of regional
+       income distributions
+       <http://econpapers.repec.org/paper/wpawuwpur/0105001.htm>`_,"
+       in A. Getis, J. Mur and H.Zoeller (eds). Spatial Econometrics and Spatial
+       Statistics. Palgrave, London, pp. 194-213.
+.. [#] Kulldorff, M. (1998). Statistical methods for spatial epidemiology: tests
+       for randomness. In Gatrell, A. and Loytonen, M., editors, GIS and 
+       Health, pages 49–62. Taylor & Francis, London.
+.. [#] Tango, T. (2010). Statistical Methods for Disease Clustering. Springer,
+       New York.
+.. [#] Knox, E. (1964). The detection of space-time interactions. Journal of the
+       Royal Statistical Society. Series C (Applied Statistics), 13(1):25–30.
+.. [#] R.D. Baker. (2004). Identifying space-time disease clusters. Acta Tropica, 
+       91(3):291-299.
+.. [#] Kulldorff, M. and Hjalmars, U. (1999). The Knox method and other tests
+       for space- time interaction. Biometrics, 55(2):544–552.
+.. [#] Williams, E., Smith, P., Day, N., Geser, A., Ellice, J., and Tukei, P.
+       (1978). Space-time clustering of Burkitt’s lymphoma in the West Nile 
+       district of Uganda: 1961-1975. British Journal of Cancer, 37(1):109.
+.. [#] Mantel, N. (1967). The detection of disease clustering and a generalized
+       regression approach. Cancer Research, 27(2):209–220.
+.. [#] Jacquez, G. (1996). A k nearest neighbour test for space-time 
+       interaction. Statistics in Medicine, 15(18):1935–1949.
+
diff --git a/doc/source/users/tutorials/econometrics.rst b/doc/source/users/tutorials/econometrics.rst
new file mode 100644
index 0000000..878c696
--- /dev/null
+++ b/doc/source/users/tutorials/econometrics.rst
@@ -0,0 +1,23 @@
+
+
+####################
+Spatial Econometrics
+####################
+
+Comprehensive user documentation on spreg can be found in
+Anselin, L. and S.J. Rey (2014) `Modern Spatial Econometrics in Practice:
+A Guide to GeoDa, GeoDaSpace and PySAL.
+<http://www.amazon.com/Modern-Spatial-Econometrics-Practice-GeoDaSpace-ebook/dp/B00RI9I44K>`_
+GeoDa Press, Chicago.
+
+
+spreg API
+=========
+
+
+For further details see the :doc:`spreg API <../../library/spreg/index>`.
+
+
+
+
+
diff --git a/doc/source/users/tutorials/examples.rst b/doc/source/users/tutorials/examples.rst
new file mode 100644
index 0000000..1c79fd3
--- /dev/null
+++ b/doc/source/users/tutorials/examples.rst
@@ -0,0 +1,142 @@
+******************************************************************
+PySAL: Example Data Sets 
+******************************************************************
+
+PySAL comes with a number of example data sets that are used in some of the
+documentation strings in the source code. All the example data sets can be
+found in the **examples** directory.
+
+10740
+=====
+Polygon shapefile for Albuquerque New Mexico.
+
+* 10740.dbf: attribute database file
+* 10740.shp: shapefile
+* 10740.shx: spatial index
+* 10740_queen.gal: queen contiguity GAL format
+* 10740_rook.gal: rook contiguity GAL format
+
+
+book
+====
+Synthetic data to illustrate spatial weights. Source: Anselin, L. and S.J. Rey (in
+progress) Spatial Econometrics: Foundations.
+
+* book.gal: rook contiguity for regular lattice
+* book.txt: attribute data for regular lattice
+
+calempdensity
+=============
+Employment density for California counties. Source: Anselin, L. and S.J. Rey (in
+progress) Spatial Econometrics: Foundations.
+
+* calempdensity.csv: data on employment and employment density in California
+  counties.
+
+chicago77
+=========
+Chicago Community Areas (n=77). Source: Anselin, L. and S.J. Rey (in
+progress) Spatial Econometrics: Foundations.
+
+* Chicago77.dbf: attribute data
+* Chicago77.shp: shapefile
+* Chicago77.shx: spatial index
+
+
+desmith
+=======
+Example data for autocorrelation analysis. Source: de Smith et al (2009)
+`Geospatial Analysis
+<http://www.spatialanalysisonline.com/output/html/MoranIandGearyC.html>`_ (Used
+with permission)
+
+* desmith.txt: attribute data for 10 spatial units
+* desmith.gal: spatial weights in GAL format
+
+juvenile
+========
+Cardiff juvenile delinquent residences.
+
+* juvenile.dbf: attribute data
+* juvenile.html: documentation
+* juvenile.shp: shapefile
+* juvenile.shx: spatial index
+* juvenile.gwt: spatial weights in GWT format
+
+mexico
+======
+State regional income Mexican states 1940-2000. Source:  Rey, S.J. and M.L.
+Sastre Gutierrez. "Interregional inequality dynamics in Mexico." Spatial
+Economic Analysis. Forthcoming.
+
+* mexico.csv: attribute data
+* mexico.gal: spatial weights in GAL format
+
+rook31
+======
+Small test shapefile
+
+* rook31.dbf: attribute data
+* rook31.gal: spatia weights in GAL format
+* rook31.shp: shapefile
+* rook31.shx: spatial index
+
+
+sacramento2
+===========
+1998 and 2001 Zip Code Business Patterns (Census Bureau) for Sacramento MSA
+
+ * sacramento2.dbf
+ * sacramento2.sbn
+ * sacramento2.sbx
+ * sacramento2.shp
+ * sacramento2.shx
+
+shp_test
+========
+Sample Shapefiles used only for testing purposes. Each example include a ".shp" Shapefile, ".shx" Shapefile Index, ".dbf" DBase file, and a ".prj" ESRI Projection file.
+
+Examples include:
+
+ * Point: Example of an ESRI Shapefile of Type 1 (Point).
+ * Line: Example of an ESRI Shapefile of Type 3 (Line).
+ * Polygon: Example of an ESRI Shapefile of Type 5 (Polygon).
+
+sids2
+=====
+North Carolina county SIDS death counts and rates
+ 
+ * sids2.dbf:  attribute data
+ * sids2.html: documentation
+ * sids2.shp:  shapefile
+ * sids2.shx:  spatial index
+ * sids2.gal:  GAL file for spatial weights
+
+stl_hom
+=======
+Homicides and selected socio-economic characteristics for counties surrounding St Louis, MO. Data aggregated for three time periods: 1979-84 (steady decline in homicides), 1984-88 (stable period), and 1988-93 (steady increase in homicides). Source: S. Messner, L. Anselin, D. Hawkins, G. Deane, S. Tolnay, R. Baller (2000). An Atlas of the Spatial Patterning of County-Level Homicide, 1960-1990. Pittsburgh, PA, National Consortium on Violence Research (NCOVR).
+
+ * stl_hom.html: Metadata
+ * stl_hom.txt: txt file with attribute data
+ * stl_hom.wkt: A Well-Known-Text representation of the geometry.
+ * stl_hom.csv: attribute data and WKT geometry.
+ * stl.hom.gal: GAL file for spatial weights
+ 
+
+US Regional Incomes
+===================
+Per capita income for the lower 48 US states, 1929-2010
+
+ * us48.shp: shapefile 
+ * us48.dbf: dbf for shapefile
+ * us48.shx: index for shapefile
+ * usjoin.csv: attribute data (comma delimited file)
+
+Virginia
+========
+Virginia Counties Shapefile.
+
+  * virginia.shp: Shapefile
+  * virginia.shx: shapefile index
+  * virginia.dbf: attributes
+  * virginia.prj: shapefile projection
diff --git a/doc/source/users/tutorials/fileio.rst b/doc/source/users/tutorials/fileio.rst
new file mode 100644
index 0000000..a341799
--- /dev/null
+++ b/doc/source/users/tutorials/fileio.rst
@@ -0,0 +1,429 @@
+.. _fileio:
+
+.. testsetup:: *
+        
+        import pysal
+        import numpy as np
+
+******************************************
+An Overview of the FileIO system in PySAL.
+******************************************
+
+.. contents::
+
+Introduction
+============
+
+PySAL contains a new file input-output API that should be used for all file IO
+operations. The goal is to abstract file handling and return native PySAL data
+types when reading from known file types. A list of known extensions can be
+found by issuing the following command::
+
+    pysal.open.check()
+
+Note that in some cases the FileIO module will peek inside your file to
+determine its type. For example "geoda_txt" is just a unique scheme for ".txt"
+files, so when opening a ".txt" pysal will peek inside the file to determine it
+if has the necessary header information and dispatch accordingly. In the event
+that pysal does not understand your file IO operations will be dispatched to
+python's internal open.
+
+Examples: Reading files
+=======================
+
+Shapefiles
+----------
+
+.. doctest::
+
+    >>> import pysal
+    >>> shp = pysal.open('../pysal/examples/10740.shp')
+    >>> poly = shp.next()
+    >>> type(poly)
+    <class 'pysal.cg.shapes.Polygon'>
+    >>> len(shp)
+    195
+    >>> shp.get(len(shp)-1).id
+    195
+    >>> polys = list(shp)
+    >>> len(polys)
+    195
+
+DBF Files
+---------
+
+.. doctest::
+
+    >>> import pysal
+    >>> db = pysal.open('../pysal/examples/10740.dbf','r')
+    >>> db.header
+    ['GIST_ID', 'FIPSSTCO', 'TRT2000', 'STFID', 'TRACTID']
+    >>> db.field_spec
+    [('N', 8, 0), ('C', 5, 0), ('C', 6, 0), ('C', 11, 0), ('C', 10, 0)]
+    >>> db.next()
+    [1, '35001', '000107', '35001000107', '1.07']
+    >>> db[0]
+    [[1, '35001', '000107', '35001000107', '1.07']]
+    >>> db[0:3]
+    [[1, '35001', '000107', '35001000107', '1.07'], [2, '35001', '000108', '35001000108', '1.08'], [3, '35001', '000109', '35001000109', '1.09']]
+    >>> db[0:5,1]
+    ['35001', '35001', '35001', '35001', '35001']
+    >>> db[0:5,0:2]
+    [[1, '35001'], [2, '35001'], [3, '35001'], [4, '35001'], [5, '35001']]
+    >>> db[-1,-1]
+    ['9712']
+
+CSV Files
+---------
+
+.. doctest::
+
+    >>> import pysal
+    >>> db = pysal.open('../pysal/examples/stl_hom.csv')
+    >>> db.header
+    ['WKT', 'NAME', 'STATE_NAME', 'STATE_FIPS', 'CNTY_FIPS', 'FIPS', 'FIPSNO', 'HR7984', 'HR8488', 'HR8893', 'HC7984', 'HC8488', 'HC8893', 'PO7984', 'PO8488', 'PO8893', 'PE77', 'PE82', 'PE87', 'RDAC80', 'RDAC85', 'RDAC90']
+    >>> db[0]
+    [['POLYGON ((-89.585220336914062 39.978794097900391,-89.581146240234375 40.094867706298828,-89.603988647460938 40.095306396484375,-89.60589599609375 40.136119842529297,-89.6103515625 40.3251953125,-89.269027709960938 40.329566955566406,-89.268562316894531 40.285579681396484,-89.154655456542969 40.285774230957031,-89.152763366699219 40.054969787597656,-89.151618957519531 39.919403076171875,-89.224777221679688 39.918678283691406,-89.411857604980469 39.918041229248047,-89.41243743896484 [...]
+    >>> fromWKT = pysal.core.util.wkt.WKTParser()
+    >>> db.cast('WKT',fromWKT)
+    >>> type(db[0][0][0])
+    <class 'pysal.cg.shapes.Polygon'>
+    >>> db[0][0][1:]
+    ['Logan', 'Illinois', 17, 107, 17107, 17107, 2.115428, 1.290722, 1.624458, 4, 2, 3, 189087, 154952, 184677, 5.10432, 6.59578, 5.832951, -0.991256, -0.940265, -0.845005]
+    >>> polys = db.by_col('WKT')
+    >>> from pysal.cg import standalone
+    >>> standalone.get_bounding_box(polys)[:]
+    [-92.70067596435547, 36.88180923461914, -87.91657257080078, 40.329566955566406]
+
+
+WKT Files
+---------
+
+.. doctest::
+
+    >>> import pysal
+    >>> wkt = pysal.open('../pysal/examples/stl_hom.wkt', 'r')
+    >>> polys = wkt.read()
+    >>> wkt.close()
+    >>> print len(polys)
+    78
+    >>> print polys[1].centroid
+    (-91.19578469430738, 39.990883050220845)
+
+
+GeoDa Text Files
+----------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> geoda_txt = pysal.open('../pysal/examples/stl_hom.txt', 'r')
+    >>> geoda_txt.header
+    ['FIPSNO', 'HR8488', 'HR8893', 'HC8488']
+    >>> print len(geoda_txt)
+    78
+    >>> geoda_txt.dat[0]
+    ['17107', '1.290722', '1.624458', '2']
+    >>> geoda_txt._spec
+    [<type 'int'>, <type 'float'>, <type 'float'>, <type 'int'>]
+    >>> geoda_txt.close()
+
+GAL Binary Weights Files
+------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> gal = pysal.open('../pysal/examples/sids2.gal','r')
+    >>> w = gal.read()
+    >>> gal.close()
+    >>> w.n
+    100
+
+GWT Weights Files
+-----------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> gwt = pysal.open('../pysal/examples/juvenile.gwt', 'r')
+    >>> w = gwt.read()
+    >>> gwt.close()
+    >>> w.n
+    168
+
+ArcGIS Text Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> arcgis_txt = pysal.open('../pysal/examples/arcgis_txt.txt','r','arcgis_text')
+    >>> w = arcgis_txt.read()
+    >>> arcgis_txt.close()
+    >>> w.n
+    3
+
+ArcGIS DBF Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> arcgis_dbf = pysal.open('../pysal/examples/arcgis_ohio.dbf','r','arcgis_dbf')
+    >>> w = arcgis_dbf.read()
+    >>> arcgis_dbf.close()
+    >>> w.n
+    88
+
+ArcGIS SWM Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> arcgis_swm = pysal.open('../pysal/examples/ohio.swm','r')
+    >>> w = arcgis_swm.read()
+    >>> arcgis_swm.close()
+    >>> w.n
+    88
+
+DAT Weights Files
+-----------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> dat = pysal.open('../pysal/examples/wmat.dat','r')
+    >>> w = dat.read()
+    >>> dat.close()
+    >>> w.n
+    49
+
+MATLAB MAT Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> mat = pysal.open('../pysal/examples/spat-sym-us.mat','r')
+    >>> w = mat.read()
+    >>> mat.close()
+    >>> w.n
+    46
+
+LOTUS WK1 Weights Files
+-----------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> wk1 = pysal.open('../pysal/examples/spat-sym-us.wk1','r')
+    >>> w = wk1.read()
+    >>> wk1.close()
+    >>> w.n
+    46
+
+GeoBUGS Text Weights Files
+--------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> geobugs_txt = pysal.open('../pysal/examples/geobugs_scot','r','geobugs_text')
+    >>> w = geobugs_txt.read()
+    WARNING: there are 3 disconnected observations
+    Island ids:  [6, 8, 11]
+    >>> geobugs_txt.close()
+    >>> w.n
+    56
+
+STATA Text Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> stata_txt = pysal.open('../pysal/examples/stata_sparse.txt','r','stata_text')
+    >>> w = stata_txt.read()
+    WARNING: there are 7 disconnected observations
+    Island ids:  [5, 9, 10, 11, 12, 14, 15]
+    >>> stata_txt.close()
+    >>> w.n
+    56
+
+.. _mtx:
+
+MatrixMarket MTX Weights Files
+------------------------------
+
+This file format or its variant is currently under consideration of the PySAL team 
+to store general spatial weights in a sparse matrix form.
+
+.. doctest::
+
+    >>> import pysal
+    >>> mtx = pysal.open('../pysal/examples/wmat.mtx','r')
+    >>> w = mtx.read()
+    >>> mtx.close()
+    >>> w.n
+    49
+
+Examples: Writing files
+=======================
+
+GAL Binary Weights Files
+------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> gal = pysal.open('../pysal/examples/virginia_queen.gal','w')
+    >>> gal.write(w)
+    >>> gal.close()
+
+GWT Weights Files
+-----------------
+
+Currently, it is not allowed to write a GWT file.
+
+ArcGIS Text Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> arcgis_txt = pysal.open('../pysal/examples/virginia_queen.txt','w','arcgis_text')
+    >>> arcgis_txt.write(w, useIdIndex=True)
+    >>> arcgis_txt.close()
+
+ArcGIS DBF Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> arcgis_dbf = pysal.open('../pysal/examples/virginia_queen.dbf','w','arcgis_dbf')
+    >>> arcgis_dbf.write(w, useIdIndex=True)
+    >>> arcgis_dbf.close()
+
+ArcGIS SWM Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> arcgis_swm = pysal.open('../pysal/examples/virginia_queen.swm','w')
+    >>> arcgis_swm.write(w, useIdIndex=True)
+    >>> arcgis_swm.close()
+
+DAT Weights Files
+-----------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> dat = pysal.open('../pysal/examples/virginia_queen.dat','w')
+    >>> dat.write(w)
+    >>> dat.close()
+
+MATLAB MAT Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> mat = pysal.open('../pysal/examples/virginia_queen.mat','w')
+    >>> mat.write(w)
+    >>> mat.close()
+
+LOTUS WK1 Weights Files
+-----------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> wk1 = pysal.open('../pysal/examples/virginia_queen.wk1','w')
+    >>> wk1.write(w)
+    >>> wk1.close()
+
+GeoBUGS Text Weights Files
+--------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> geobugs_txt = pysal.open('../pysal/examples/virginia_queen','w','geobugs_text')
+    >>> geobugs_txt.write(w)
+    >>> geobugs_txt.close()
+
+STATA Text Weights Files
+-------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> stata_txt = pysal.open('../pysal/examples/virginia_queen.txt','w','stata_text')
+    >>> stata_txt.write(w,matrix_form=True)
+    >>> stata_txt.close()
+
+MatrixMarket MTX Weights Files
+------------------------------
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.queen_from_shapefile('../pysal/examples/virginia.shp',idVariable='FIPS')
+    >>> w.n
+    136
+    >>> mtx = pysal.open('../pysal/examples/virginia_queen.mtx','w')
+    >>> mtx.write(w)
+    >>> mtx.close()
+
+Examples: Converting the format of spatial weights files
+========================================================
+
+PySAL provides a utility tool to convert a weights file from one format to another.
+
+From GAL to ArcGIS SWM format
+
+.. doctest::
+
+    >>> from pysal.core.util.weight_converter import weight_convert
+    >>> gal_file = '../pysal/examples/sids2.gal'
+    >>> swm_file = '../pysal/examples/sids2.swm'
+    >>> weight_convert(gal_file, swm_file, useIdIndex=True)
+    >>> wold = pysal.open(gal_file, 'r').read()
+    >>> wnew = pysal.open(swm_file, 'r').read()
+    >>> wold.n == wnew.n
+    True
+
+
+For further details see the :doc:`FileIO API <../../library/core/FileIO>`.
diff --git a/doc/source/users/tutorials/index.rst b/doc/source/users/tutorials/index.rst
new file mode 100644
index 0000000..9f59d7b
--- /dev/null
+++ b/doc/source/users/tutorials/index.rst
@@ -0,0 +1,20 @@
+.. _users-tutorials:
+
+Getting Started with PySAL
+==========================
+
+
+.. toctree::
+   :maxdepth: 1
+       
+   Introduction to the Tutorials <intro>
+   File Input and Output <fileio>
+   Spatial Weights <weights>
+   Spatial Autocorrelation <autocorrelation>
+   Spatial Econometrics <econometrics>
+   Spatial Smoothing <smoothing>
+   Regionalization <region>
+   Spatial Dynamics <dynamics>
+   Shapely Extension <shapely>
+   Sample Datasets <examples>
+   Next Steps <next>
diff --git a/doc/source/users/tutorials/intro.rst b/doc/source/users/tutorials/intro.rst
new file mode 100644
index 0000000..0fe12ba
--- /dev/null
+++ b/doc/source/users/tutorials/intro.rst
@@ -0,0 +1,108 @@
+#############################
+Introduction to the Tutorials
+#############################
+
+
+Assumptions
+===========
+
+The tutorials presented here are designed to illustrate a selection of the
+functionality in PySAL. Further details on PySAL functionality not covered in
+these tutorials can be found in the :doc:`API <../../library/index>`. The reader is
+**assumed to have working knowledge of the particular spatial analytical
+methods** illustrated. Background on spatial analysis can be found in the
+references cited in the tutorials.
+
+It is also assumed that the reader has already :doc:`installed PySAL <../installation>`.
+
+Examples
+============
+
+The examples use several sample data sets that are included in the pysal/examples
+directory. In the examples that follow, we refer to those using the path::
+
+  ../pysal/examples/filename_of_example
+ 
+You may need to adjust this path to match the location of the sample files on
+your system.
+
+
+Getting Help
+============
+
+Help for PySAL is available from a number of sources.
+
+email lists
+-----------
+The main channel for user support is the `openspace mailing list <http://groups.google.com/group/openspace-list>`_.
+
+
+Questions regarding the development of PySAL should be directed to
+`pysal-dev  <http://groups.google.com/group/pysal-dev>`_.
+
+Documentation
+-------------
+Documentation is available on-line at `pysal.org <http://pysal.org>`_.
+
+You can also obtain help at the interpreter:
+
+	>>> import pysal
+	>>> help(pysal)
+	
+which would bring up help on PySAL::
+
+	Help on package pysal:
+
+	NAME
+	    pysal
+
+	FILE
+	    /Users/serge/Dropbox/pysal/src/trunk/pysal/__init__.py
+
+	DESCRIPTION
+	    Python Spatial Analysis Library
+	    ===============================
+	    
+	    
+	    Documentation
+	    -------------
+	    PySAL documentation is available in two forms: python docstrings and a html webpage at http://pysal.org/
+	    
+	    Available sub-packages
+	    ----------------------
+	    
+	    cg
+	:
+
+Note that you can use this on any option within PySAL:
+
+	>>> w=pysal.lat2W()
+	>>> help(w)
+
+which brings up::
+
+	Help on W in module pysal.weights object:
+
+	class W(__builtin__.object)
+	 |  Spatial weights
+	 |  
+	 |  Parameters
+	 |  ----------
+	 |  neighbors       : dictionary
+	 |                    key is region ID, value is a list of neighbor IDS
+	 |                    Example:  {'a':['b'],'b':['a','c'],'c':['b']}
+	 |  weights = None  : dictionary
+	 |                    key is region ID, value is a list of edge weights
+	 |                    If not supplied all edge wegiths are assumed to have a weight of 1.
+	 |                    Example: {'a':[0.5],'b':[0.5,1.5],'c':[1.5]}
+	 |  id_order = None : list 
+	 |                    An ordered list of ids, defines the order of
+	 |                    observations when iterating over W if not set,
+	 |                    lexicographical ordering is used to iterate and the
+	 |                    id_order_set property will return False.  This can be
+	 |                    set after creation by setting the 'id_order' property.
+	 |  
+	 
+
+Note that the help is truncated at the bottom of the terminal window and more of the contents can be seen by scrolling (hit any key).
+	
diff --git a/doc/source/users/tutorials/next.rst b/doc/source/users/tutorials/next.rst
new file mode 100644
index 0000000..18057ea
--- /dev/null
+++ b/doc/source/users/tutorials/next.rst
@@ -0,0 +1,17 @@
+ 
+Next Steps with PySAL
+---------------------
+
+The tutorials you have (hopefully) just gone through should be enough to
+get you going with PySAL.  They covered some, but not all, of the
+modules in PySAL, and at that, only a selection of the functionality of
+particular classes that were included in the tutorials. To learn more about
+PySAL you should consult the `documentation <../../library/index.html>`_.
+
+
+PySAL is an open source, community-based project and we highly value contributions
+from individuals to the project. There are many ways to contribute, from filing
+bug reports, suggesting feature requests, helping with documentation, to
+becoming a developer. Individuals interested in joining the team should send an
+email to pysal-dev at googlegroups.com or contact one of the `developers <https://github.com/pysal/pysal/graphs/contributors>`_
+directly.
diff --git a/doc/source/users/tutorials/region.rst b/doc/source/users/tutorials/region.rst
new file mode 100644
index 0000000..f31dd28
--- /dev/null
+++ b/doc/source/users/tutorials/region.rst
@@ -0,0 +1,211 @@
+..
+
+.. testsetup:: *
+
+    import pysal
+    import numpy as np
+
+***************
+Regionalization
+***************
+
+Introduction
+============
+
+PySAL offers a number of tools for the construction of regions.  For the
+purposes of this section, a "region" is a group of "areas," and there are
+generally multiple regions in a particular dataset.  At this time, PySAL
+offers the max-p regionalization algorithm and tools for constructing random
+regions.
+
+max-p
+=====
+
+Most regionalization algorithms require the user to define a priori the number
+of regions to be built (e.g. k-means clustering). The max-p algorithm [#]_
+determines the number of regions (p) endogenously based on a set of areas, a
+matrix of attributes on each area and a floor constraint.  The floor
+constraint defines the minimum bound that a variable must reach for each
+region; for example, a constraint might be the minimum population each region
+must have.  max-p further enforces a contiguity constraint on the areas within
+regions.
+
+To illustrate this we will use data on per capita income from the lower 48 US
+states over the period 1929-2010. The goal is to form contiguous regions of
+states displaying similar levels of income throughout this period:
+
+.. doctest:: 
+
+    >>> import pysal
+    >>> import numpy as np
+    >>> import random
+    >>> f = pysal.open("../pysal/examples/usjoin.csv")
+    >>> pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)])
+    >>> pci = pci.transpose()
+    >>> pci.shape
+    (48, 81)
+
+We also require set of binary contiguity :ref:`weights<weights>` for the Maxp class:
+
+.. doctest:: 
+
+    >>> w = pysal.open("../pysal/examples/states48.gal").read()
+
+Once we have the attribute data and our weights object we can create an instance of Maxp:
+
+.. doctest:: 
+
+    >>> np.random.seed(100)
+    >>> random.seed(10)
+    >>> r = pysal.Maxp(w, pci, floor = 5, floor_variable = np.ones((48, 1)), initial = 99)
+
+Here we are forming regions with a minimum of 5 states in each region, so we set the floor_variable to a simple unit vector to ensure this floor constraint is satisfied. We also specify the initial number of feasible solutions to 99 - which are then searched over to pick the optimal feasible solution to then commence with the more expensive swapping component of the algorithm. [#]_
+
+The Maxp instance s has a number of attributes regarding the solution. First is the definition of the regions:
+
+.. doctest:: 
+
+    >>> r.regions
+    [['44', '34', '3', '25', '1', '4', '47'], ['12', '46', '20', '24', '13'], ['14', '45', '35', '30', '39'], ['6', '27', '17', '29', '5', '43'], ['33', '40', '28', '15', '41', '9', '23', '31', '38'], ['37', '8', '0', '7', '21', '2'], ['32', '19', '11', '10', '22'], ['16', '26', '42', '18', '36']]
+
+which is a list of eight lists of region ids. For example, the first nested list indicates there are seven states in the first region, while the last region has five states.  To determine which states these are we can read in the names from the original csv file:
+
+.. doctest:: 
+
+    >>> f.header
+    ['Name', 'STATE_FIPS', '1929', '1930', '1931', '1932', '1933', '1934', '1935', '1936', '1937', '1938', '1939', '1940', '1941', '1942', '1943', '1944', '1945', '1946', '1947', '1948', '1949', '1950', '1951', '1952', '1953', '1954', '1955', '1956', '1957', '1958', '1959', '1960', '1961', '1962', '1963', '1964', '1965', '1966', '1967', '1968', '1969', '1970', '1971', '1972', '1973', '1974', '1975', '1976', '1977', '1978', '1979', '1980', '1981', '1982', '1983', '1984', '1985', '1986', ' [...]
+    >>> names = f.by_col('Name')
+    >>> names = np.array(names)
+    >>> print names
+    ['Alabama' 'Arizona' 'Arkansas' 'California' 'Colorado' 'Connecticut'
+     'Delaware' 'Florida' 'Georgia' 'Idaho' 'Illinois' 'Indiana' 'Iowa'
+     'Kansas' 'Kentucky' 'Louisiana' 'Maine' 'Maryland' 'Massachusetts'
+     'Michigan' 'Minnesota' 'Mississippi' 'Missouri' 'Montana' 'Nebraska'
+     'Nevada' 'New Hampshire' 'New Jersey' 'New Mexico' 'New York'
+     'North Carolina' 'North Dakota' 'Ohio' 'Oklahoma' 'Oregon' 'Pennsylvania'
+     'Rhode Island' 'South Carolina' 'South Dakota' 'Tennessee' 'Texas' 'Utah'
+     'Vermont' 'Virginia' 'Washington' 'West Virginia' 'Wisconsin' 'Wyoming']
+    
+
+and then loop over the region definitions to identify the specific states comprising each of the regions:
+
+.. doctest:: 
+
+    >>> for region in r.regions:
+    ...     ids = map(int,region)
+    ...     print names[ids]
+    ...     
+    ['Washington' 'Oregon' 'California' 'Nevada' 'Arizona' 'Colorado' 'Wyoming']
+    ['Iowa' 'Wisconsin' 'Minnesota' 'Nebraska' 'Kansas']
+    ['Kentucky' 'West Virginia' 'Pennsylvania' 'North Carolina' 'Tennessee']
+    ['Delaware' 'New Jersey' 'Maryland' 'New York' 'Connecticut' 'Virginia']
+    ['Oklahoma' 'Texas' 'New Mexico' 'Louisiana' 'Utah' 'Idaho' 'Montana'
+     'North Dakota' 'South Dakota']
+    ['South Carolina' 'Georgia' 'Alabama' 'Florida' 'Mississippi' 'Arkansas']
+    ['Ohio' 'Michigan' 'Indiana' 'Illinois' 'Missouri']
+    ['Maine' 'New Hampshire' 'Vermont' 'Massachusetts' 'Rhode Island']
+
+
+We can evaluate our solution by developing a pseudo pvalue for the regionalization.
+This is done by comparing the within region sum of squares for the solution against
+simulated solutions where areas are randomly assigned to regions that maintain
+the cardinality of the original solution. This method must be explicitly called once the 
+Maxp instance has been created:
+
+.. doctest:: 
+
+    >>> r.inference()
+    >>> r.pvalue
+    0.01
+
+so we see we have a regionalization that is significantly different than a chance partitioning.
+
+
+Random Regions
+==============
+
+PySAL offers functionality to generate random regions based on user-defined
+constraints.  There are three optional parameters to constrain the
+regionalization: number of regions, cardinality and contiguity.  The default
+case simply takes a list of area IDs and randomly selects the number of
+regions and then allocates areas to each region.  The user can also pass a
+vector of integers to the cardinality parameter to designate the number of
+areas to randomly assign to each region.  The contiguity parameter takes a
+:ref:`spatial weights object <weights>` and uses that to ensure that each
+region is made up of spatially contiguous areas.  When the contiguity
+constraint is enforced, it is possible to arrive at infeasible solutions; the
+maxiter parameter can be set to make multiple attempts to find a feasible
+solution.  The following examples show some of the possible combinations of
+constraints.
+
+.. doctest:: 
+
+    >>> import random
+    >>> import numpy as np
+    >>> import pysal
+    >>> from pysal.region import Random_Region
+    >>> nregs = 13
+    >>> cards = range(2,14) + [10]
+    >>> w = pysal.lat2W(10,10,rook = False)
+    >>> ids = w.id_order
+    >>>
+    >>> # unconstrained
+    >>> random.seed(10)
+    >>> np.random.seed(10)
+    >>> t0 = Random_Region(ids)
+    >>> t0.regions[0]
+    [19, 14, 43, 37, 66, 3, 79, 41, 38, 68, 2, 1, 60]
+    >>> # cardinality and contiguity constrained (num_regions implied)
+    >>> random.seed(60)
+    >>> np.random.seed(60)
+    >>> t1 = pysal.region.Random_Region(ids, num_regions = nregs, cardinality = cards, contiguity = w)
+    >>> t1.regions[0]
+    [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+    >>> # cardinality constrained (num_regions implied)
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t2 = Random_Region(ids, num_regions = nregs, cardinality = cards)
+    >>> t2.regions[0]
+    [37, 62]
+    >>> # number of regions and contiguity constrained
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t3 = Random_Region(ids, num_regions = nregs, contiguity = w)
+    >>> t3.regions[1]
+    [71, 72, 70, 93, 51, 91, 85, 74, 63, 73, 61, 62, 82]
+    >>> # cardinality and contiguity constrained
+    >>> random.seed(60)
+    >>> np.random.seed(60)
+    >>> t4 = Random_Region(ids, cardinality = cards, contiguity = w)
+    >>> t4.regions[0]
+    [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+    >>> # number of regions constrained
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t5 = Random_Region(ids, num_regions = nregs)
+    >>> t5.regions[0]
+    [37, 62, 26, 41, 35, 25, 36]
+    >>> # cardinality constrained
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t6 = Random_Region(ids, cardinality = cards)
+    >>> t6.regions[0]
+    [37, 62]
+    >>> # contiguity constrained
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t7 = Random_Region(ids, contiguity = w)
+    >>> t7.regions[0]
+    [37, 27, 36, 17]
+    >>>
+
+Further Information 
+====================
+
+For further details see the :doc:`Regionalization  API <../../library/region/index>`.
+
+
+.. rubric:: Footnotes
+
+.. [#] Duque, J. C., L. Anselin and S. J. Rey. 2011. "The max-p-regions problem."  *Journal of Regional Science* `DOI: 10.1111/j.1467-9787.2011.00743.x <http://onlinelibrary.wiley.com/doi/10.1111/j.1467-9787.2011.00743.x/abstract>`_
+.. [#] Because this is a randomized algorithm, results may vary when replicating this example. To reproduce a regionalization solution, you should first set the random seed generator. See http://docs.scipy.org/doc/numpy/reference/generated/numpy.random.seed.html for more information.
diff --git a/doc/source/users/tutorials/shapely.rst b/doc/source/users/tutorials/shapely.rst
new file mode 100644
index 0000000..85aeaaf
--- /dev/null
+++ b/doc/source/users/tutorials/shapely.rst
@@ -0,0 +1,94 @@
+.. testsetup:: *
+        
+        import pysal
+        import numpy as np
+
+###########################################
+Using PySAL with Shapely for GIS Operations
+###########################################
+
+
+.. versionadded:: 1.3
+
+Introduction
+============
+
+The `Shapely <http://pypi.python.org/pypi/Shapely>`_ project is a BSD-licensed
+Python package for manipulation and analysis of planar geometric objects, and
+depends on the widely used GEOS library.
+
+PySAL supports interoperation with the Shapely library through Shapely's Python
+Geo Interface. All PySAL geometries provide a __geo_interface__ property which
+models the geometries as a GeoJSON object. Shapely geometry objects also export
+the __geo_interface__ property and can be adapted to PySAL geometries using
+the :py:func:`pysal.cg.asShape` function.
+
+Additionally, PySAL provides an optional contrib module that handles the
+conversion between pysal and shapely data strucutures for you.  The module can
+be found in at, :py:mod:`pysal.contrib.shapely_ext`.
+
+Installation
+============
+
+Please refer to the `Shapely <http://pypi.python.org/pypi/Shapely>`_
+website for instructions on installing Shapely and its
+dependencies, *without which PySAL's Shapely extension will not work.*
+
+Usage
+=====
+
+Using the Python Geo Interface...
+
+.. doctest::
+
+    >>> import pysal
+    >>> import shapely.geometry
+    >>> # The get_path function returns the absolute system path to pysal's
+    >>> # included example files no matter where they are installed on the system.
+    >>> fpath = pysal.examples.get_path('stl_hom.shp')
+    >>> # Now, open the shapefile using pysal's FileIO
+    >>> shps = pysal.open(fpath , 'r')
+    >>> # We can read a polygon...
+    >>> polygon = shps.next()
+    >>> # To use this polygon with shapely we simply convert it with
+    >>> # Shapely's asShape method.
+    >>> polygon = shapely.geometry.asShape(polygon)
+    >>> # now we can operate on our polygons like normal shapely objects...
+    >>> print "%.4f"%polygon.area
+    0.1701
+    >>> # We can do things like buffering...
+    >>> eroded_polygon = polygon.buffer(-0.01)
+    >>> print "%.4f"%eroded_polygon.area
+    0.1533
+    >>> # and containment testing...
+    >>> polygon.contains(eroded_polygon)
+    True
+    >>> eroded_polygon.contains(polygon)
+    False
+    >>> # To go back to pysal shapes we call pysal.cg.asShape...
+    >>> eroded_polygon = pysal.cg.asShape(eroded_polygon)
+    >>> type(eroded_polygon)
+    <class 'pysal.cg.shapes.Polygon'>
+
+Using The PySAL shapely_ext module...
+
+.. doctest::
+
+    >>> import pysal
+    >>> from pysal.contrib import shapely_ext
+    >>> fpath = pysal.examples.get_path('stl_hom.shp')
+    >>> shps = pysal.open(fpath , 'r')
+    >>> polygon = shps.next()
+    >>> eroded_polygon = shapely_ext.buffer(polygon, -0.01)
+    >>> print "%0.4f"%eroded_polygon.area
+    0.1533
+    >>> shapely_ext.contains(polygon,eroded_polygon)
+    True
+    >>> shapely_ext.contains(eroded_polygon,polygon)
+    False
+    >>> type(eroded_polygon)
+    <class 'pysal.cg.shapes.Polygon'>
+
+
+    
+
diff --git a/doc/source/users/tutorials/smoothing.rst b/doc/source/users/tutorials/smoothing.rst
new file mode 100644
index 0000000..1a52e45
--- /dev/null
+++ b/doc/source/users/tutorials/smoothing.rst
@@ -0,0 +1,405 @@
+.. testsetup:: * 
+
+        import pysal
+        import numpy as np
+
+******************
+Spatial Smoothing
+******************
+
+.. contents::
+
+Introduction
+============
+
+In the spatial analysis of attributes measured for areal units, it is often
+necessary to transform an extensive variable, such as number of disease cases
+per census tract, into an intensive variable that takes into account the
+underlying population at risk.  Raw rates, counts divided by population values,
+are a common standardization in the literature, yet these tend to have unequal
+reliability due to different population sizes across the spatial units.  This problem becomes
+severe for areas with small population values, since the raw rates for those
+areas tend to have higher variance.
+
+A variety of spatial smoothing methods have been suggested to address this problem by aggregating
+the counts and population values for the areas neighboring an observation and
+using these new measurements for its rate computation.  PySAL provides a range
+of smoothing techniques that exploit different types of moving windows and
+non-parametric weighting schemes as well as the Empirical Bayesian principle.
+In addition, PySAL offers several methods for calculating age-standardized
+rates, since age standardization is critical in estimating rates of some events
+where the probability of an event occurrence is different across different age
+groups.
+
+In what follows, we overview the methods for age standardization and spatial smoothing 
+and describe their implementations in PySAL. [#]_
+
+Age Standardization in PySAL
+============================
+
+Raw rates, counts divided by populations values, are based on an implicit assumption 
+that the risk of an event is constant over all age/sex categories in the population. 
+For many phenomena, however, the risk is not uniform and often highly correlated with age. 
+To take this into account explicitly, the risks for individual age categories can be estimated 
+separately and averaged to produce a representative value for an area. 
+
+PySAL supports three approaches to this age standardization: crude, direct, and indirect 
+standardization.
+
+Crude Age Standardization
+-------------------------
+
+In this approach, the rate for an area is simply the sum of age-specific rates weighted by 
+the ratios of each age group in the total population. 
+
+To obtain the rates based on this approach, we first need to create two variables
+that correspond to event counts and population values, respectively.
+
+.. doctest:: 
+
+    >>> import numpy as np
+    >>> e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+    >>> b = np.array([100, 100, 110, 90, 100, 90, 110, 90])
+
+Each set of numbers should include n by h elements where n and h are the number of areal units
+and the number of age groups. In the above example there are two regions with 4 age groups.
+Age groups are identical across regions. The first four elements in b represent the populations of 4 age 
+groups in the first region, and the last four elements the populations of the same age groups in the second 
+region. 
+
+To apply the crude age standardization, we need to make the following function call:
+
+.. doctest::  
+
+    >>> from pysal.esda import smoothing as sm
+    >>> sm.crude_age_standardization(e, b, 2)
+    array([ 0.2375    ,  0.26666667])
+
+In the function call above, the last argument indicates the number of area units.
+The outcome in the second line shows that the age-standardized rates for two areas 
+are about 0.24 and 0.27, respectively.
+
+Direct Age Standardization
+--------------------------
+
+Direct age standardization is a variation of the crude age standardization.
+While crude age standardization uses the ratios of each age group in the observed population,
+direct age standardization weights age-specific rates by the ratios of each age group in a reference 
+population. This reference population, the so-called standard million, is another required 
+argument in the PySAL implementation of direct age standardization:
+
+.. doctest:: 
+
+    >>> s = np.array([100, 90, 100, 90, 100, 90, 100, 90])
+    >>> rate = sm.direct_age_standardization(e, b, s, 2, alpha=0.05)
+    >>> np.array(rate).round(6)
+    array([[ 0.23744 ,  0.192049,  0.290485],
+           [ 0.266507,  0.217714,  0.323051]])
+
+The outcome of direct age standardization includes a set of standardized rates and their confidence 
+intervals. The confidence intervals can vary according to the value for the last argument, alpha.
+
+Indirect Age Standardization
+----------------------------
+
+While direct age standardization effectively addresses the variety in the risks across 
+age groups, its indirect counterpart is better suited to handle the potential
+imprecision of age-specific rates due to the small population size. This method
+uses age-specific rates from the standard million instead of the observed
+population. It then weights the rates by the ratios of each age group in the
+observed population. To compute the age-specific rates from the standard
+million, the PySAL implementation of indirect age standardization requires
+another argument that contains the counts of the events occurred in the
+standard million.
+
+.. doctest:: 
+
+    >>> s_e = np.array([10, 15, 12, 10, 5, 3, 20, 8])
+    >>> rate = sm.indirect_age_standardization(e, b, s_e, s, 2, alpha=0.05)
+    >>> np.array(rate).round(6)
+    array([[ 0.208055,  0.170156,  0.254395],
+           [ 0.298892,  0.246631,  0.362228]])
+
+The outcome of indirect age standardization is the same as that of its direct counterpart.
+
+Spatial Smoothing in PySAL
+==========================
+
+Mean and Median Based Smoothing
+-------------------------------
+
+A simple approach to rate smoothing is to find a local average or median from the rates of each 
+observation and its neighbors. The first method adopting this approach is the so-called locally 
+weighted averages or disk smoother. In this method a rate for each observation is replaced 
+by an average of rates for its neighbors. A :ref:`spatial weights object
+<weights>` is used to specify the neighborhood relationships among
+observations. To obtain locally weighted averages of the homicide rates in the
+counties surrounding St. Louis during 1979-84, we first read the corresponding
+data table and extract data values for the homicide counts (the 11th column)
+and total population (the 13th column):
+
+.. doctest:: 
+
+    >>> import pysal
+    >>> stl = pysal.open('../pysal/examples/stl_hom.csv', 'r')
+    >>> e, b = np.array(stl[:,10]), np.array(stl[:,13])
+
+We then read the spatial weights file defining neighborhood relationships among the counties 
+and ensure that the :ref:`order <id_order>` of observations in the weights object is the same as that in the data table. 
+
+.. doctest:: 
+
+    >>> w = pysal.open('../pysal/examples/stl.gal', 'r').read()
+    >>> if not w.id_order_set: w.id_order = range(1,len(stl) + 1)
+
+Now we calculate locally weighted averages of the homicide rates.
+
+.. doctest:: 
+
+    >>> rate = sm.Disk_Smoother(e, b, w)
+    >>> rate.r
+    array([  4.56502262e-05,   3.44027685e-05,   3.38280487e-05,
+             4.78530468e-05,   3.12278573e-05,   2.22596997e-05,
+             ...
+             5.29577710e-05,   5.51034691e-05,   4.65160450e-05,
+             5.32513363e-05,   3.86199097e-05,   1.92952422e-05])
+
+A variation of locally weighted averages is to use median instead of mean.
+In other words, the rate for an observation can be replaced by the median of the rates of its neighbors.
+This method is called locally weighted median and can be applied in the following way:
+
+.. doctest:: 
+
+    >>> rate = sm.Spatial_Median_Rate(e, b, w)
+    >>> rate.r
+    array([  3.96047383e-05,   3.55386859e-05,   3.28308921e-05,
+             4.30731238e-05,   3.12453969e-05,   1.97300409e-05,
+             ...
+             6.10668237e-05,   5.86355507e-05,   3.67396656e-05,
+             4.82535850e-05,   5.51831429e-05,   2.99877050e-05])
+
+In this method the procedure to find local medians can be iterated until no further change occurs. 
+The resulting local medians are called iteratively resmoothed medians.
+
+.. doctest:: 
+
+    >>> rate = sm.Spatial_Median_Rate(e, b, w, iteration=10)
+    >>> rate.r
+    array([  3.10194715e-05,   2.98419439e-05,   3.10194715e-05,
+             3.10159267e-05,   2.99214885e-05,   2.80530524e-05,
+             ...
+             3.81364519e-05,   4.72176972e-05,   3.75320135e-05,
+             3.76863269e-05,   4.72176972e-05,   3.75320135e-05])
+
+The pure local medians can also be replaced by a weighted median. To obtain weighted medians, 
+we need to create an array of weights. For example, we can use the total population of the counties 
+as auxiliary weights:
+
+.. doctest:: 
+
+    >>> rate = sm.Spatial_Median_Rate(e, b, w, aw=b)
+    >>> rate.r
+    array([  5.77412020e-05,   4.46449551e-05,   5.77412020e-05,
+             5.77412020e-05,   4.46449551e-05,   3.61363528e-05,
+             ...
+             5.49703305e-05,   5.86355507e-05,   3.67396656e-05,
+             3.67396656e-05,   4.72176972e-05,   2.99877050e-05])
+
+When obtaining locally weighted medians, we can consider only a specific subset of neighbors 
+rather than all of them. A representative method following this approach is the headbanging smoother. 
+In this method all areal units are represented by their geometric centroids. 
+Among the neighbors of each observation, only near collinear points are considered for median search. 
+Then, triples of points are selected from the near collinear points, and local medians are computed 
+from the triples' rates. [#]_
+We apply this headbanging smoother to the rates of the deaths from Sudden Infant Death Syndrome (SIDS) 
+for North Carolina counties during 1974-78. We first need to read the source data and extract the event 
+counts (the 9th column) and population values (the 9th column). 
+In this example the population values correspond to the numbers of live births during 1974-78. 
+
+.. doctest:: 
+
+    >>> sids_db = pysal.open('../pysal/examples/sids2.dbf', 'r')
+    >>> e, b = np.array(sids_db[:,9]), np.array(sids_db[:,8])
+
+Now we need to find triples for each observation. To support the search of triples, PySAL 
+provides a class called Headbanging_Triples. This class requires an array of point observations, 
+a spatial weights object, and the number of triples as its arguments:
+
+.. doctest:: 
+
+    >>> from pysal import knnW
+    >>> sids = pysal.open('../pysal/examples/sids2.shp', 'r')
+    >>> sids_d = np.array([i.centroid for i in sids])
+    >>> sids_w = knnW(sids_d,k=5)
+    >>> if not sids_w.id_order_set: sids_w.id_order = sids_w.id_order
+    >>> triples = sm.Headbanging_Triples(sids_d,sids_w,k=5)
+
+The second line in the above example shows how to extract centroids of polygons. 
+In this example we define 5 neighbors for each observation by using nearest neighbors criteria.
+In the last line we define the maximum number of triples to be found as 5.
+
+Now we use the triples to compute the headbanging median rates:
+
+.. doctest:: 
+
+    >>> rate = sm.Headbanging_Median_Rate(e,b,triples)
+    >>> rate.r
+    array([ 0.00075586,  0.        ,  0.0008285 ,  0.0018315 ,  0.00498891,
+            0.00482094,  0.00133156,  0.0018315 ,  0.00413223,  0.00142116,
+            ...
+            0.00221541,  0.00354767,  0.00259903,  0.00392952,  0.00207125,
+            0.00392952,  0.00229253,  0.00392952,  0.00229253,  0.00229253])
+
+As in the locally weighted medians, we can use a set of auxiliary weights and resmooth the medians 
+iteratively.
+
+Non-parametric Smoothing
+------------------------
+
+Non-parametric smoothing methods compute rates without making any assumptions of distributional 
+properties of rate estimates. A representative method in this approach is spatial filtering. 
+PySAL provides the most simplistic form of spatial filtering where a user-specified grid is imposed 
+on the data set and a moving window withi a fixed or adaptive radius visits each vertex of the grid to 
+compute the rate at the vertex. Using the previous SIDS example, we can use Spatial_Filtering class:
+
+.. doctest:: 
+
+    >>> bbox = [sids.bbox[:2], sids.bbox[2:]]
+    >>> rate = sm.Spatial_Filtering(bbox, sids_d, e, b, 10, 10, r=1.5)
+    >>> rate.r
+    array([ 0.00152555,  0.00079271,  0.00161253,  0.00161253,  0.00139513,
+            0.00139513,  0.00139513,  0.00139513,  0.00139513,  0.00156348,
+            ...
+            0.00240216,  0.00237389,  0.00240641,  0.00242211,  0.0024854 ,
+            0.00255477,  0.00266573,  0.00288918,  0.0028991 ,  0.00293492])
+
+The first and second arguments of the Spatial_Filtering class are a minimum bounding box containing the 
+observations and a set of centroids representing the observations.
+Be careful that the bounding box is NOT the bounding box of the centroids.
+The fifth and sixth arguments are to specify the numbers of grid cells along x and y axes.
+The last argument, r, is to define the radius of the moving window. When this parameter is set,
+a fixed radius is applied to all grid vertices. To make the size of moving window variable,
+we can specify the minimum number of population in the moving window without specifying r:
+
+.. doctest:: 
+
+    >>> rate = sm.Spatial_Filtering(bbox, sids_d, e, b, 10, 10, pop=10000)
+    >>> rate.r
+    array([ 0.00157398,  0.00157398,  0.00157398,  0.00157398,  0.00166885,
+            0.00166885,  0.00166885,  0.00166885,  0.00166885,  0.00166885,
+            ...
+            0.00202977,  0.00215322,  0.00207378,  0.00207378,  0.00217173,
+            0.00232408,  0.00222717,  0.00245399,  0.00267857,  0.00267857])
+
+The spatial rate smoother is another non-parametric smoothing method that PySAL supports.
+This smoother is very similar to the locally weighted averages. In this method, however, 
+the weighted sum is applied to event counts and population values separately. 
+The resulting weighted sum of event counts is then divided by the counterpart of population 
+values. To obtain neighbor information, we need to use a spatial weights matrix as before. 
+
+.. doctest:: 
+
+    >>> rate = sm.Spatial_Rate(e, b, sids_w)
+    >>> rate.r
+    array([ 0.00114976,  0.00104622,  0.00110001,  0.00153257,  0.00399662,
+            0.00361428,  0.00146807,  0.00238521,  0.00288871,  0.00145228,
+            ...
+            0.00240839,  0.00376101,  0.00244941,  0.0028813 ,  0.00240839,
+            0.00261705,  0.00226554,  0.0031575 ,  0.00254536,  0.0029003 ])
+
+Another variation of spatial rate smoother is kernel smoother. PySAL supports kernel smoothing 
+by using a kernel spatial weights instance in place of a general spatial weights object.
+
+.. doctest:: 
+
+    >>> from pysal import Kernel
+    >>> kw = Kernel(sids_d)
+    >>> if not kw.id_order_set: kw.id_order = range(0,len(sids_d))
+    >>> rate = sm.Kernel_Smoother(e, b, kw)
+    >>> rate.r
+    array([ 0.0009831 ,  0.00104298,  0.00137113,  0.00166406,  0.00556741,
+            0.00442273,  0.00158202,  0.00243354,  0.00282158,  0.00099243,
+            ...
+            0.00221017,  0.00328485,  0.00257988,  0.00370461,  0.0020566 ,
+            0.00378135,  0.00240358,  0.00432019,  0.00227857,  0.00251648])
+
+Age-adjusted rate smoother is another non-parametric smoother that PySAL provides.
+This smoother applies direct age standardization while computing spatial rates. 
+To illustrate the age-adjusted rate smoother, we create a new set of event counts and population values 
+as well as a new kernel weights object.
+
+.. doctest:: 
+
+    >>> e = np.array([10, 8, 1, 4, 3, 5, 4, 3, 2, 1, 5, 3])
+    >>> b = np.array([100, 90, 15, 30, 25, 20, 30, 20, 80, 80, 90, 60])
+    >>> s = np.array([98, 88, 15, 29, 20, 23, 33, 25, 76, 80, 89, 66])
+    >>> points=[(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+    >>> kw=Kernel(points)
+    >>> if not kw.id_order_set: kw.id_order = range(0,len(points))
+
+In the above example we created 6 observations each of which has two age groups. To apply age-adjusted 
+rate smoothing, we use the Age_Adjusted_Smoother class as follows:
+
+.. doctest:: 
+
+    >>> rate = sm.Age_Adjusted_Smoother(e, b, kw, s)
+    >>> rate.r
+    array([ 0.10519625,  0.08494318,  0.06440072,  0.06898604,  0.06952076,
+            0.05020968])
+
+Empirical Bayes Smoothers
+-------------------------
+
+The last group of smoothing methods that PySAL supports is based upon the Bayesian principle. These methods adjust 
+a raw rate by taking into account information in the other raw rates. 
+As a reference PySAL provides a method for a-spatial Empirical Bayes smoothing:
+
+.. doctest:: 
+
+    >>> e, b = sm.sum_by_n(e, np.ones(12), 6), sm.sum_by_n(b, np.ones(12), 6)
+    >>> rate = sm.Empirical_Bayes(e, b)
+    >>> rate.r
+    array([ 0.09080775,  0.09252352,  0.12332267,  0.10753624,  0.03301368,
+            0.05934766])
+
+In the first line of the above example we aggregate the event counts and population values by observation.
+Next we applied the Empirical_Bayes class to the aggregated counts and population values.
+
+A spatial Empirical Bayes smoother is also implemented in PySAL. This method requires an additional 
+argument, i.e., a spatial weights object. We continue to reuse the kernel spatial weights object we built before.
+
+.. doctest:: 
+
+    >>> rate = sm.Spatial_Empirical_Bayes(e, b, kw) 
+    >>> rate.r
+    array([ 0.10105263,  0.10165261,  0.16104362,  0.11642038,  0.0226908 ,
+            0.05270639])
+
+Excess Risk
+-----------
+
+Besides a variety of spatial smoothing methods, PySAL provides a class for estimating excess risk from event counts 
+and population values. Excess risks are the ratios of observed event counts over expected event counts.
+An example for the class usage is as follows:
+
+.. doctest:: 
+
+    >>> risk = sm.Excess_Risk(e, b)
+    >>> risk.r
+    array([ 1.23737916,  1.45124717,  2.32199546,  1.82857143,  0.24489796,
+            0.69659864])
+
+Further Information 
+====================
+
+For further details see the :doc:`Smoothing API <../../library/esda/smoothing>`.
+
+.. rubric:: Footnotes
+
+.. [#] Although this tutorial provides an introduction to the PySAL implementations for spatial smoothing, it is not exhaustive. Complete documentation for the implementations can be found by accessing the help from within a Python interpreter. 
+.. [#] For the details of triple selection and headbanging smoothing please
+       refer to Anselin, L., Lozano, N., and Koschinsky, J. (2006). "`Rate
+       Transformations and Smoothing
+       <http://geodacenter.asu.edu/pdf/smoothing_06.pdf>`_". GeoDa Center
+       Research Report.
+
diff --git a/doc/source/users/tutorials/weights.rst b/doc/source/users/tutorials/weights.rst
new file mode 100644
index 0000000..d33af9d
--- /dev/null
+++ b/doc/source/users/tutorials/weights.rst
@@ -0,0 +1,944 @@
+.. _weights:
+
+.. testsetup:: *
+
+    import pysal
+    import numpy as np
+
+***************
+Spatial Weights
+***************
+
+.. contents::
+
+Introduction
+============
+
+Spatial weights are central components of many areas of spatial analysis. In
+general terms, for a spatial data set composed of n locations (points, areal
+units, network edges, etc.), the spatial weights matrix expresses the potential
+for interaction between observations at each pair i,j of locations. There is a rich
+variety of ways to specify the structure of these weights, and
+PySAL supports the creation, manipulation and analysis of spatial weights
+matrices across three different general types:
+
+ * Contiguity Based Weights
+ * Distance Based Weights
+ * Kernel Weights
+
+These different types of weights are implemented as instances of the PySAL weights class 
+:class:`~pysal.weights.W`. 
+
+In what follows, we provide a high level overview of spatial weights in PySAL, starting with the three different types of weights, followed by
+a closer look at the properties of the W class and some related functions. [#]_
+
+PySAL Spatial Weight Types
+==========================
+PySAL weights are handled in objects of the :class:`pysal.weights.W`. The
+conceptual idea of spatial weights is that of a nxn matrix in which the
+diagonal elements (:math:`w_{ii}`) are set to zero by definition and the rest of
+the cells (:math:`w_{ij}`) capture the potential of interaction. However, these
+matrices tend to be fairly sparse (i.e. many cells contain zeros) and hence a
+full nxn array would not be an efficient representation. PySAL employs a
+different way of storing that is structured in two main dictionaries [#]_ :
+neighbors which, for each observation (key) contains a list of the other ones 
+(value) with potential for interaction (:math:`w_{ij} \neq 0`); and weights, 
+which contains the weight values for each of those observations (in the same 
+order). This way, large datasets can be stored when keeping the full matrix 
+would not be possible because of memory constraints. In addition to the sparse
+representation via the weights and neighbors dictionaries, a PySAL W object
+also has an attribute called sparse, which is a `scipy.sparse
+<http://docs.scipy.org/doc/scipy/reference/sparse.html>`_ CSR
+representation of the spatial weights. (See :ref:`wsp` for an alternative
+PySAL weights object.) 
+
+.. _contiguity:
+
+Contiguity Based Weights
+------------------------
+
+To illustrate the general weights object, we start with a simple contiguity
+matrix constructed for a 5 by 5 lattice (composed of 25 spatial units):
+
+.. doctest::
+
+    >>> import pysal
+    >>> w = pysal.lat2W(5, 5)
+
+The w object has a number of attributes:
+
+.. doctest::
+
+    >>> w.n
+    25
+    >>> w.pct_nonzero
+    0.128
+    >>> w.weights[0]
+    [1.0, 1.0]
+    >>> w.neighbors[0]
+    [5, 1]
+    >>> w.neighbors[5]
+    [0, 10, 6]
+    >>> w.histogram
+    [(2, 4), (3, 12), (4, 9)]
+
+n is the number of spatial units, so conceptually we could be thinking that the
+weights are stored in a 25x25 matrix. The second attribute
+(pct_nonzero) shows the sparseness of the matrix. The key
+attributes used to store contiguity relations in W are the neighbors and
+weights attributes. In the example above we see that the observation
+with id 0 (Python is zero-offset) has two neighbors with ids [5, 1] each of
+which have equal weights of 1.0.
+
+The histogram attribute is a set of tuples indicating the cardinality of the
+neighbor relations. In this case we have a regular lattice, so there are 4 units that have 2
+neighbors (corner cells), 12 units with 3 neighbors (edge cells), and 9 units
+with 4 neighbors (internal cells).
+
+In the above example, the default criterion for contiguity on the lattice was
+that of the rook which takes as neighbors any pair of cells that share an edge.
+Alternatively, we could have used the queen criterion to include the vertices
+of the lattice to define contiguities:
+
+.. doctest::
+
+	>>> wq = pysal.lat2W(rook = False)
+	>>> wq.neighbors[0]
+	[5, 1, 6]
+	>>> 
+
+The bishop criterion, which designates pairs of cells as neighbors if they share
+only a vertex, is yet a third alternative for contiguity weights. A bishop matrix
+can be computed as the :ref:`difference` between the rook and queen cases.
+
+The lat2W function is particularly useful in setting up simulation experiments
+requiring a regular grid. For empirical research, a common use case is to have
+a shapefile, which is a nontopological vector data structure, and a need
+to carry out some form of spatial analysis that requires spatial weights. Since
+topology is not stored in the underlying file there is a need to construct
+the spatial weights prior to carrying out the analysis. In PySAL spatial
+weights can be obtained directly from shapefiles:
+
+.. doctest::
+
+    >>> w = pysal.rook_from_shapefile("../pysal/examples/columbus.shp")
+    >>> w.n
+    49
+    >>> print "%.4f"%w.pct_nonzero
+    0.0833
+    >>> w.histogram
+    [(2, 7), (3, 10), (4, 17), (5, 8), (6, 3), (7, 3), (8, 0), (9, 1)]
+
+If queen, rather than rook, contiguity is required then the following would work:
+
+.. doctest::
+
+    >>> w = pysal.queen_from_shapefile("../pysal/examples/columbus.shp")
+    >>> print "%.4f"%w.pct_nonzero
+    0.0983
+    >>> w.histogram
+    [(2, 5), (3, 9), (4, 12), (5, 5), (6, 9), (7, 3), (8, 4), (9, 1), (10, 1)]
+    
+
+
+Distance Based Weights
+----------------------
+
+In addition to using contiguity to define  neighbor relations, more general
+functions of the distance separating observations can be used to specify the
+weights.
+
+Please note that distance calculations are coded for a flat surface, so you
+will need to have your shapefile projected in advance for the output to be
+correct.
+
+k-nearest neighbor weights
+--------------------------
+
+The neighbors for a given observations can be defined using a k-nearest neighbor criterion.
+For example we could use the the centroids of our
+5x5 lattice as point locations to measure the distances. First, we import numpy to 
+create the coordinates as a 25x2 numpy array named data (numpy arrays are the only
+form of input supported at this point):
+
+.. doctest::
+
+    >>> import numpy as np
+    >>> x,y=np.indices((5,5))
+    >>> x.shape=(25,1)
+    >>> y.shape=(25,1)
+    >>> data=np.hstack([x,y])
+    
+    
+then define the knn set as:
+
+.. doctest::
+
+    >>> wknn3 = pysal.knnW(data, k = 3)
+    >>> wknn3.neighbors[0]
+    [1, 5, 6]
+    >>> wknn3.s0
+    75.0
+    >>> w4 = pysal.knnW(data, k = 4)
+    >>> set(w4.neighbors[0]) == set([1, 5, 6, 2])
+    True
+    >>> w4.s0
+    100.0
+    >>> w4.weights[0]
+    [1.0, 1.0, 1.0, 1.0]
+
+Alternatively, we can use a utility function to build a knn W straight from a
+shapefile:
+
+.. doctest::
+    
+    >>> wknn5 = pysal.knnW_from_shapefile(pysal.examples.get_path('columbus.shp'), k=5)
+    >>> wknn5.neighbors[0]
+    [2, 1, 3, 7, 4]
+
+Distance band weights
+---------------------
+
+Knn weights ensure that all observations have the same number of neighbors.  [#]_
+An alternative distance based set of weights relies on distance bands or
+thresholds to define the neighbor set for each spatial unit as those other units
+falling within a threshold distance of the focal unit:
+
+.. doctest::
+
+    >>> wthresh = pysal.threshold_binaryW_from_array(data, 2)
+    >>> set(wthresh.neighbors[0]) == set([1, 2, 5, 6, 10])
+    True
+    >>> set(wthresh.neighbors[1]) == set( [0, 2, 5, 6, 7, 11, 3])
+    True
+    >>> wthresh.weights[0]
+    [1, 1, 1, 1, 1]
+    >>> wthresh.weights[1]
+    [1, 1, 1, 1, 1, 1, 1]
+    >>> 
+
+As can be seen in the above example, the number of neighbors is likely to vary
+across observations with distance band weights in contrast to what holds for
+knn weights.
+
+Distance band weights can be generated for shapefiles as well as arrays of points. [#]_ First, the 
+minimum nearest neighbor distance should be determined so that each unit is assured of at least one 
+neighbor:
+
+.. doctest::
+
+    >>> thresh = pysal.min_threshold_dist_from_shapefile("../pysal/examples/columbus.shp")
+    >>> thresh
+    0.61886415807685413
+
+with this threshold in hand, the distance band weights are obtained as:
+
+.. doctest::
+
+    >>> wt = pysal.threshold_binaryW_from_shapefile("../pysal/examples/columbus.shp", thresh)
+    >>> wt.min_neighbors
+    1
+    >>> wt.histogram
+    [(1, 4), (2, 8), (3, 6), (4, 2), (5, 5), (6, 8), (7, 6), (8, 2), (9, 6), (10, 1), (11, 1)]
+    >>> set(wt.neighbors[0]) == set([1,2])
+    True
+    >>> set(wt.neighbors[1]) == set([3,0])
+    True
+
+Distance band weights can also be specified to take on continuous values rather
+than binary, with the values set to the inverse distance separating each pair
+within a given threshold distance. We illustrate this with a small set of 6
+points:
+
+.. doctest::
+
+    >>> points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+    >>> wid = pysal.threshold_continuousW_from_array(points,14.2)
+    >>> wid.weights[0]
+    [0.10000000000000001, 0.089442719099991588]
+
+If we change the distance decay exponent to -2.0 the result is so called gravity weights:
+
+.. doctest::
+
+    >>> wid2 = pysal.threshold_continuousW_from_array(points,14.2,alpha = -2.0)
+    >>> wid2.weights[0]
+    [0.01, 0.0079999999999999984]
+
+
+Kernel Weights
+--------------
+
+A combination of distance based thresholds together with  continuously valued
+weights is supported through kernel weights:
+
+.. doctest::
+
+    >>> points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+    >>> kw = pysal.Kernel(points)
+    >>> kw.weights[0]
+    [1.0, 0.500000049999995, 0.4409830615267465]
+    >>> kw.neighbors[0]
+    [0, 1, 3]
+    >>> kw.bandwidth
+    array([[ 20.000002],
+           [ 20.000002],
+           [ 20.000002],
+           [ 20.000002],
+           [ 20.000002],
+           [ 20.000002]])
+
+
+The bandwidth attribute plays the role of the distance threshold with kernel
+weights, while the form of the kernel function determines the distance decay
+in the derived continuous weights (the following are available:
+'triangular','uniform','quadratic','epanechnikov','quartic','bisquare','gaussian').
+In the above example, the bandwidth is set to the default value and fixed
+across the observations.  The user could specify a different value for a fixed
+bandwidth:
+
+.. doctest::
+
+    >>> kw15 = pysal.Kernel(points,bandwidth = 15.0)
+    >>> kw15[0]
+    {0: 1.0, 1: 0.33333333333333337, 3: 0.2546440075000701}
+    >>> kw15.neighbors[0]
+    [0, 1, 3]
+    >>> kw15.bandwidth
+    array([[ 15.],
+           [ 15.],
+           [ 15.],
+           [ 15.],
+           [ 15.],
+           [ 15.]])
+
+which results in fewer neighbors for the first unit.  Adaptive bandwidths (i.e., different bandwidths
+for each unit) can also be user specified:
+
+.. doctest::
+
+    >>> bw = [25.0,15.0,25.0,16.0,14.5,25.0]
+    >>> kwa = pysal.Kernel(points,bandwidth = bw)
+    >>> kwa.weights[0]
+    [1.0, 0.6, 0.552786404500042, 0.10557280900008403]
+    >>> kwa.neighbors[0]
+    [0, 1, 3, 4]
+    >>> kwa.bandwidth
+    array([[ 25. ],
+           [ 15. ],
+           [ 25. ],
+           [ 16. ],
+           [ 14.5],
+           [ 25. ]])
+
+Alternatively the adaptive bandwidths could be defined endogenously:
+
+.. doctest::
+
+    >>> kwea = pysal.Kernel(points,fixed = False)
+    >>> kwea.weights[0]
+    [1.0, 0.10557289844279438, 9.99999900663795e-08]
+    >>> kwea.neighbors[0]
+    [0, 1, 3]
+    >>> kwea.bandwidth
+    array([[ 11.18034101],
+           [ 11.18034101],
+           [ 20.000002  ],
+           [ 11.18034101],
+           [ 14.14213704],
+           [ 18.02775818]])
+
+Finally, the kernel function could be changed (with endogenous adaptive bandwidths):
+
+.. doctest::
+
+    >>> kweag = pysal.Kernel(points,fixed = False,function = 'gaussian')
+    >>> kweag.weights[0]
+    [0.3989422804014327, 0.2674190291577696, 0.2419707487162134]
+    >>> kweag.bandwidth
+    array([[ 11.18034101],
+           [ 11.18034101],
+           [ 20.000002  ],
+           [ 11.18034101],
+           [ 14.14213704],
+           [ 18.02775818]])
+
+
+More details on kernel weights can be found in 
+:class:`~pysal.weights.Distance.Kernel`. 
+
+
+A Closer look at W
+==================
+
+Although the three different types of spatial weights illustrated above cover a wide array of approaches
+towards specifying spatial relations, they all share common attributes from the base W class in PySAL. Here 
+we take a closer look at some of the more useful properties of this class.
+
+Attributes of W
+-----------------------------
+W objects come with a whole bunch of useful attributes that may help you when
+dealing with spatial weights matrices. To see a list of all of them, same as
+with any other Python object, type:
+
+    >>> import pysal
+    >>> help(pysal.W)
+
+If you want to be more specific and learn, for example, about the attribute
+`s0`, then type:
+
+    >>> help(pysal.W.s0)
+    Help on property:
+
+        float
+            
+        .. math::
+                
+            s0 = \sum_i \sum_j w_{i,j}
+
+Weight Transformations
+----------------------
+
+Often there is a need to apply a transformation to the spatial weights, such as in the case of row standardization.
+Here each value in the row of the spatial weights matrix is rescaled to sum to one:
+
+.. math::
+   
+     ws_{i,j} = w_{i,j}/ \sum_j w_{i,j}
+
+This and other weights transformations in PySAL are supported by the transform property of the W class. To see this 
+let's build a simple contiguity object for the Columbus data set:
+
+.. doctest::
+
+    >>> w = pysal.rook_from_shapefile("../pysal/examples/columbus.shp")
+    >>> w.weights[0]
+    [1.0, 1.0]
+
+We can row standardize this by setting the transform property:
+
+.. doctest::
+
+    >>> w.transform = 'r'
+    >>> w.weights[0]
+    [0.5, 0.5]
+
+Supported transformations are the following:
+    
+    * '`b`': binary.
+    * '`r`': row standardization.
+    * '`v`': variance stabilizing.
+
+If the original weights (unstandardized) are required, the transform property can be reset:
+
+.. doctest::
+
+    >>> w.transform = 'o'
+    >>> w.weights[0]
+    [1.0, 1.0]
+ 
+Behind the scenes the transform property is updating all other characteristics of the spatial weights that are a function of the
+values and these standardization operations, freeing the user from having to keep these other attributes updated. To determine the current
+value of the transformation, simply query this attribute:
+
+.. doctest::
+
+    >>> w.transform
+    'O'
+
+More details on other transformations that are supported in W can be found in
+:class:`pysal.weights.W`. 
+
+
+
+W related functions
+===================
+
+Generating a full array
+-----------------------
+As the underlying data structure of the weights in W is based on a sparse representation, there may be a need to work with the full numpy array.
+This is supported through the full method of W:
+
+.. doctest::
+
+    >>> wf = w.full()
+    >>> len(wf)
+    2
+
+The first element of the return from w.full is the numpy array:
+    
+.. doctest::
+
+    >>> wf[0].shape
+    (49, 49)
+
+while the second element contains the ids for the row (column) ordering of the array:
+
+.. doctest::
+
+    >>> wf[1][0:5]
+    [0, 1, 2, 3, 4]
+
+If only the array is required, a simple Python slice can be used:
+
+.. doctest::
+
+    >>> wf = w.full()[0]
+    
+
+Shimbel Matrices
+----------------
+The Shimbel matrix for a set of n objects contains the shortest path distance
+separating each pair of units.  This has wide use in spatial analysis for
+solving different types of clustering and optimization problems. Using the
+function `shimbel` with a `W` instance as an argument generates this
+information:
+
+.. doctest::
+
+    >>> w = pysal.lat2W(3,3)
+    >>> ws = pysal.shimbel(w)
+    >>> ws[0]
+    [-1, 1, 2, 1, 2, 3, 2, 3, 4]
+
+Thus we see that observation 0 (the northwest cell of our 3x3 lattice) is a first order neighbor to observations 1 and 3, second order
+neighbor to observations 2, 4, and 6, a third order neighbor to 5, and 7, and a fourth order neighbor to observation 8 (the extreme southeast 
+cell in the lattice). The position of the -1 simply denotes the focal unit.
+
+Higher Order Contiguity Weights
+-------------------------------
+
+Closely related to the shortest path distances is the concept of a spatial weight based on a particular order of contiguity. For example, we could
+define the second order contiguity relations using:
+
+.. doctest::
+
+    >>> w2 = pysal.higher_order(w, 2)
+    >>> w2.neighbors[0]
+    [4, 6, 2]
+
+or a fourth order set of weights:
+
+.. doctest::
+
+    >>> w4 = pysal.higher_order(w, 4)
+    WARNING: there are 5 disconnected observations
+    Island ids:  [1, 3, 4, 5, 7]
+    >>> w4.neighbors[0]
+    [8]
+
+In both cases a new instance of the W class is returned with the weights and neighbors defined using the particular order of contiguity.
+
+Spatial Lag
+-----------
+
+The final function related to spatial weights that we illustrate here is used to construct a new variable called the spatial lag. The spatial
+lag is a function of the attribute values observed at neighboring locations. For example, if we continue with our regular 3x3 lattice and
+create an attribute variable y:
+
+.. doctest::
+
+    >>> import numpy as np
+    >>> y = np.arange(w.n)
+    >>> y
+    array([0, 1, 2, 3, 4, 5, 6, 7, 8])
+
+then the spatial lag can be constructed with:
+    
+.. doctest::
+
+    >>> yl = pysal.lag_spatial(w,y)
+    >>> yl
+    array([  4.,   6.,   6.,  10.,  16.,  14.,  10.,  18.,  12.])
+
+Mathematically, the spatial lag is a weighted sum of neighboring attribute values
+
+.. math::
+    
+    yl_i = \sum_j w_{i,j} y_j
+
+In the example above, the weights were binary, based on the rook criterion. If we row standardize our W object first
+and then recalculate the lag, it takes the form of a weighted average of the neighboring attribute values:
+
+.. doctest::
+
+    >>> w.transform = 'r'
+    >>> ylr = pysal.lag_spatial(w,y)
+    >>> ylr
+    array([ 2.        ,  2.        ,  3.        ,  3.33333333,  4.        ,
+            4.66666667,  5.        ,  6.        ,  6.        ])
+
+.. _id_order:
+
+One important consideration in calculating the spatial lag is that the ordering
+of the values in y aligns with the underlying order in W.  In cases where the
+source for your attribute data is different from the one to construct your
+weights you may need to reorder your y values accordingly.  To check if this is
+the case you can find the order in W as follows:
+
+.. doctest::
+
+    >>> w.id_order
+    [0, 1, 2, 3, 4, 5, 6, 7, 8]
+
+In this case the lag_spatial function assumes that the first value in the y
+attribute corresponds to unit 0 in the lattice (northwest cell), while the last
+value in y would correspond to unit 8 (southeast cell). In other words, for the
+value of the spatial lag to be valid the number of elements in y must match w.n
+and the orderings must be aligned. 
+
+Fortunately, for the common use case where both the attribute and weights information come from a
+shapefile (and its dbf), PySAL handles the alignment automatically: [#]_
+
+.. doctest::
+
+    >>> w = pysal.rook_from_shapefile("../pysal/examples/columbus.shp")
+    >>> f = pysal.open("../pysal/examples/columbus.dbf")
+    >>> f.header
+    ['AREA', 'PERIMETER', 'COLUMBUS_', 'COLUMBUS_I', 'POLYID', 'NEIG', 'HOVAL', 'INC', 'CRIME', 'OPEN', 'PLUMB', 'DISCBD', 'X', 'Y', 'NSA', 'NSB', 'EW', 'CP', 'THOUS', 'NEIGNO']
+    >>> y = np.array(f.by_col['INC'])
+    >>> w.transform = 'r'
+    >>> y
+    array([ 19.531   ,  21.232   ,  15.956   ,   4.477   ,  11.252   ,
+            16.028999,   8.438   ,  11.337   ,  17.586   ,  13.598   ,
+             7.467   ,  10.048   ,   9.549   ,   9.963   ,   9.873   ,
+             7.625   ,   9.798   ,  13.185   ,  11.618   ,  31.07    ,
+            10.655   ,  11.709   ,  21.155001,  14.236   ,   8.461   ,
+             8.085   ,  10.822   ,   7.856   ,   8.681   ,  13.906   ,
+            16.940001,  18.941999,   9.918   ,  14.948   ,  12.814   ,
+            18.739   ,  17.017   ,  11.107   ,  18.476999,  29.833   ,
+            22.207001,  25.872999,  13.38    ,  16.961   ,  14.135   ,
+            18.323999,  18.950001,  11.813   ,  18.796   ])
+    >>> yl = pysal.lag_spatial(w,y)
+    >>> yl
+    array([ 18.594     ,  13.32133333,  14.123     ,  14.94425   ,
+            11.817857  ,  14.419     ,  10.283     ,   8.3364    ,
+            11.7576665 ,  19.48466667,  10.0655    ,   9.1882    ,
+             9.483     ,  10.07716667,  11.231     ,  10.46185714,
+            21.94100033,  10.8605    ,  12.46133333,  15.39877778,
+            14.36333333,  15.0838    ,  19.93666633,  10.90833333,
+             9.7       ,  11.403     ,  15.13825   ,  10.448     ,
+            11.81      ,  12.64725   ,  16.8435    ,  26.0662505 ,
+            15.6405    ,  18.05175   ,  15.3824    ,  18.9123996 ,
+            12.2418    ,  12.76675   ,  18.5314995 ,  22.79225025,
+            22.575     ,  16.8435    ,  14.2066    ,  14.20075   ,
+            15.2515    ,  18.6079995 ,  26.0200005 ,  15.818     ,  14.303     ])
+    
+    >>> w.id_order
+    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48]
+
+Non-Zero Diagonal
+-----------------
+The typical weights matrix has zeros along the main diagonal. This has the
+practical result of excluding the self from any computation.  However, this is
+not always the desired situation, and so PySAL offers a function that adds
+values to the main diagonal of a W object.
+
+As an example, we can build a basic rook weights matrix, which has zeros on
+the diagonal, then insert ones along the diagonal: 
+
+.. doctest::
+
+    >>> w = pysal.lat2W(5, 5, id_type='string')
+    >>> w['id0']
+    {'id5': 1.0, 'id1': 1.0}
+    >>> w_const = pysal.weights.insert_diagonal(w)
+    >>> w_const['id0']
+    {'id5': 1.0, 'id0': 1.0, 'id1': 1.0}
+
+The default is to add ones to the diagonal, but the function allows any values to
+be added.
+
+
+WSets
+=====
+
+PySAL offers set-like manipulation of spatial weights matrices. While a W is
+more complex than a set, the two objects have a number of commonalities
+allowing for traditional set operations to have similar functionality on a W.
+Conceptually, we treat each neighbor pair as an element of a set, and then
+return the appropriate pairs based on the operation invoked (e.g. union,
+intersection, etc.).  A key distinction between a set and a W is that a W
+must keep track of the universe of possible pairs, even those that do not
+result in a neighbor relationship.  
+
+PySAL follows the naming conventions for Python sets, but adds optional flags
+allowing the user to control the shape of the weights object returned.  At
+this time, all the functions discussed in this section return a binary W no
+matter the weights objects passed in.
+
+Union
+-----
+
+The union of two weights objects returns a binary weights object, W, that
+includes all neighbor pairs that exist in either weights object.  This
+function can be used to simply join together two weights objects, say one for
+Arizona counties and another for California counties.  It can also be used 
+to join two weights objects that overlap as in the example below. 
+
+.. doctest::
+
+    >>> w1 = pysal.lat2W(4,4)
+    >>> w2 = pysal.lat2W(6,4)
+    >>> w = pysal.w_union(w1, w2)
+    >>> w1[0] == w[0]
+    True
+    >>> w1.neighbors[15]
+    [11, 14]
+    >>> w2.neighbors[15]
+    [11, 14, 19]
+    >>> w.neighbors[15]
+    [19, 11, 14]
+
+Intersection
+------------
+
+The intersection of two weights objects returns a binary weights object, W,
+that includes only those neighbor pairs that exist in both weights objects.
+Unlike the union case, where all pairs in either matrix are returned, the
+intersection only returns a subset of the pairs.  This leaves open the
+question of the shape of the weights matrix to return.  For example, you have
+one weights matrix of census tracts for City A and second matrix of tracts for
+Utility Company B's service area, and want to find the W for the tracts that
+overlap.  Depending on the research question, you may want the returned W to
+have the same dimensions as City A's weights matrix, the same as the utility
+company's weights matrix, a new dimensionality based on all the census tracts
+in either matrix or with the dimensionality of just those tracts in the
+overlapping area. All of these options are available via the w_shape parameter
+and the order that the matrices are passed to the function.  The following
+example uses the all case:
+
+.. doctest::
+
+    >>> w1 = pysal.lat2W(4,4)
+    >>> w2 = pysal.lat2W(6,4)
+    >>> w = pysal.w_intersection(w1, w2, 'all')
+    WARNING: there are 8 disconnected observations
+    Island ids:  [16, 17, 18, 19, 20, 21, 22, 23]
+    >>> w1[0] == w[0]
+    True
+    >>> w1.neighbors[15]
+    [11, 14]
+    >>> w2.neighbors[15]
+    [11, 14, 19]
+    >>> w.neighbors[15]
+    [11, 14]
+    >>> w2.neighbors[16]
+    [12, 20, 17]
+    >>> w.neighbors[16]
+    []
+
+.. _difference:
+
+Difference
+----------
+
+The difference of two weights objects returns a binary weights object, W, that
+includes only neighbor pairs from the first object that are not in the second.
+Similar to the intersection function, the user must select the shape of the
+weights object returned using the w_shape parameter.  The user must also
+consider the constrained parameter which controls whether the observations and
+the neighbor pairs are differenced or just the neighbor pairs are differenced.
+If you were to apply the difference function to our city and utility company
+example from the intersection section above, you must decide whether or not
+pairs that exist along the border of the regions should be considered
+different or not.  It boils down to whether the tracts should be differenced
+first and then the differenced pairs identified (constrained=True), or if the
+differenced pairs should be identified based on the sets of pairs in the
+original weights matrices (constrained=False).  In the example below we
+difference weights matrices from regions with partial overlap.
+
+.. doctest::
+
+    >>> w1 = pysal.lat2W(6,4)
+    >>> w2 = pysal.lat2W(4,4)
+    >>> w1.neighbors[15]
+    [11, 14, 19]
+    >>> w2.neighbors[15]
+    [11, 14]
+    >>> w = pysal.w_difference(w1, w2, w_shape = 'w1', constrained = False)
+    WARNING: there are 12 disconnected observations
+    Island ids:  [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
+    >>> w.neighbors[15]
+    [19]
+    >>> w.neighbors[19]
+    [15, 18, 23]
+    >>> w = pysal.w_difference(w1, w2, w_shape = 'min', constrained = False)
+    >>> 15 in w.neighbors
+    False
+    >>> w.neighbors[19]
+    [18, 23]
+    >>> w = pysal.w_difference(w1, w2, w_shape = 'w1', constrained = True)
+    WARNING: there are 16 disconnected observations
+    Island ids:  [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
+    >>> w.neighbors[15]
+    []
+    >>> w.neighbors[19]
+    [18, 23]
+    >>> w = pysal.w_difference(w1, w2, w_shape = 'min', constrained = True)
+    >>> 15 in w.neighbors
+    False
+    >>> w.neighbors[19]
+    [18, 23]
+
+The difference function can be used to construct a bishop
+:ref:`contiguity weights matrix <contiguity>` 
+by differencing a queen and rook matrix.
+
+.. doctest::
+
+        >>> wr = pysal.lat2W(5,5)
+        >>> wq = pysal.lat2W(5,5,rook = False)
+        >>> wb = pysal.w_difference(wq, wr,constrained = False)
+        >>> wb.neighbors[0]
+        [6]
+
+
+Symmetric Difference
+--------------------
+
+Symmetric difference of two weights objects returns a binary weights object,
+W, that includes only neighbor pairs that are not shared by either matrix.
+This function offers options similar to those in the difference function
+described above.
+
+.. doctest::
+
+    >>> w1 = pysal.lat2W(6, 4)
+    >>> w2 = pysal.lat2W(2, 4)
+    >>> w_lower = pysal.w_difference(w1, w2, w_shape = 'min', constrained = True)
+    >>> w_upper = pysal.lat2W(4, 4)
+    >>> w = pysal.w_symmetric_difference(w_lower, w_upper, 'all', False)
+    >>> w_lower.id_order
+    [8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
+    >>> w_upper.id_order
+    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
+    >>> w.id_order
+    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
+    >>> w.neighbors[11]
+    [7]
+    >>> w = pysal.w_symmetric_difference(w_lower, w_upper, 'min', False)
+    WARNING: there are 8 disconnected observations
+    Island ids:  [0, 1, 2, 3, 4, 5, 6, 7]
+    >>> 11 in w.neighbors
+    False
+    >>> w.id_order
+    [0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23]
+    >>> w = pysal.w_symmetric_difference(w_lower, w_upper, 'all', True)
+    WARNING: there are 16 disconnected observations
+    Island ids:  [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
+    >>> w.neighbors[11]
+    []
+    >>> w = pysal.w_symmetric_difference(w_lower, w_upper, 'min', True)
+    WARNING: there are 8 disconnected observations
+    Island ids:  [0, 1, 2, 3, 4, 5, 6, 7]
+    >>> 11 in w.neighbors
+    False
+
+Subset
+------
+
+Subset of a weights object returns a binary weights object, W, that includes
+only those observations provided by the user.  It also can be used to add
+islands to a previously existing weights object.
+
+.. doctest::
+
+    >>> w1 = pysal.lat2W(6, 4)
+    >>> w1.id_order
+    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
+    >>> ids = range(16)
+    >>> w = pysal.w_subset(w1, ids)
+    >>> w.id_order
+    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
+    >>> w1[0] == w[0]
+    True
+    >>> w1.neighbors[15]
+    [11, 14, 19]
+    >>> w.neighbors[15]
+    [11, 14]
+
+
+.. _wsp:
+
+WSP
+===
+A thin PySAL weights object is available to users with extremely large weights
+matrices, on the order of 2 million or more observations, or users interested
+in holding many large weights matrices in RAM simultaneously. The
+:class:`pysal.weights.WSP` is a thin weights object that does not include the
+neighbors and weights dictionaries, but does contain the scipy.sparse form of
+the weights.  For many PySAL functions the W and WSP objects can be used
+interchangeably.  
+
+A WSP object can be constructed from a `Matrix Market
+<http://math.nist.gov/MatrixMarket/>`_ file (see :ref:`mtx` for more info on
+reading and writing mtx files in PySAL):
+
+.. doctest::
+
+    >>> mtx = pysal.open("../pysal/examples/wmat.mtx", 'r')
+    >>> wsp = mtx.read(sparse=True)
+
+or built directly from a scipy.sparse object:
+
+.. doctest::
+
+    >>> import scipy.sparse
+    >>> rows = [0, 1, 1, 2, 2, 3]
+    >>> cols = [1, 0, 2, 1, 3, 3]
+    >>> weights =  [1, 0.75, 0.25, 0.9, 0.1, 1]
+    >>> sparse = scipy.sparse.csr_matrix((weights, (rows, cols)), shape=(4,4))
+    >>> w = pysal.weights.WSP(sparse)
+
+The WSP object has subset of the attributes of a W object; for example:
+
+.. doctest::
+
+    >>> w.n
+    4
+    >>> w.s0
+    4.0
+    >>> w.trcWtW_WW
+    6.3949999999999996
+
+The following functionality is available to convert from a W to a WSP:
+
+.. doctest::
+
+    >>> w = pysal.weights.lat2W(5,5)
+    >>> w.s0
+    80.0
+    >>> wsp = pysal.weights.WSP(w.sparse)
+    >>> wsp.s0
+    80.0
+
+and from a WSP to W:
+
+.. doctest::
+
+    >>> sp = pysal.weights.lat2SW(5, 5)
+    >>> wsp = pysal.weights.WSP(sp)
+    >>> wsp.s0
+    80
+    >>> w = pysal.weights.WSP2W(wsp)
+    >>> w.s0
+    80
+    
+
+Further Information 
+====================
+
+For further details see the :doc:`Weights  API <../../library/weights/index>`.
+
+
+
+.. rubric:: Footnotes
+
+.. [#] Although this tutorial provides an introduction to the functionality of the PySAL weights class, it is not exhaustive. Complete documentation for the class and associated functions can be found by accessing the help from within a Python interpreter. 
+.. [#] The dictionaries for the weights and value attributes in W are read-only.
+.. [#] Ties at the k-nn distance band are randomly broken to ensure each observation has exactly k neighbors.
+.. [#] If the shapefile contains geographical coordinates these distance calculations will be misleading and the user should first project their coordinates using a GIS.
+.. [#] The ordering exploits the one-to-one relation between a record in the DBF file and the shape in the shapefile.
+	
+	
diff --git a/pysal/COPYING b/pysal/COPYING
new file mode 100644
index 0000000..9125da2
--- /dev/null
+++ b/pysal/COPYING
@@ -0,0 +1,25 @@
+Copyright (c) 2009-13 Sergio J. Rey <sjsrey at gmail.com>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright notice,
+      this list of conditions and the following disclaimer.
+    * Redistributions  in binary form must reproduce the above copyright
+      notice, this list of conditions and the following disclaimer in the
+      documentation and/or other materials provided with the distribution. 
+    * Neither the name of the PySAL project team nor the names of its 
+      contributors may be used to endorse or promote products derived from
+      this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/pysal/__init__.py b/pysal/__init__.py
new file mode 100644
index 0000000..16fcaf4
--- /dev/null
+++ b/pysal/__init__.py
@@ -0,0 +1,94 @@
+"""
+Python Spatial Analysis Library
+===============================
+
+
+Documentation
+-------------
+PySAL documentation is available in two forms: python docstrings and an html \
+        webpage at http://pysal.org/
+
+Available sub-packages
+----------------------
+
+cg
+    Basic data structures and tools for Computational Geometry
+core
+    Basic functions used by several sub-packages
+esda
+    Tools for Exploratory Spatial Data Analysis
+examples
+    Example data sets used by several sub-packages for examples and testing
+network
+    Spatial analysis on networks
+region
+    Regionalization algorithms and spatially constrained clustering
+spatial_dynamics
+    Space-time exploratory methods and clustering
+spreg
+    Spatial regression and econometrics
+weights
+    Tools for creating and manipulating weights
+contrib
+    Package for interfacing with third-party libraries
+
+Utilities
+---------
+`fileio`_
+    Tool for file input and output, supports many well known file formats
+"""
+import pysal.cg
+import pysal.core
+
+from pysal.version import version
+
+# toplevel imports to be explicit
+from pysal.esda.moran import Moran, Moran_BV, Moran_BV_matrix, Moran_Local
+from pysal.esda.geary import Geary
+from pysal.esda.join_counts import Join_Counts
+from pysal.esda.gamma import Gamma
+from pysal.esda.getisord import G, G_Local
+from pysal.esda.mapclassify import quantile, binC, bin, bin1d, Equal_Interval, \
+    Percentiles
+from pysal.esda.mapclassify import Box_Plot, Quantiles, Std_Mean, Maximum_Breaks
+from pysal.esda.mapclassify import Natural_Breaks, Fisher_Jenks, Jenks_Caspall
+from pysal.esda.mapclassify import Jenks_Caspall_Sampled, Jenks_Caspall_Forced
+from pysal.esda.mapclassify import User_Defined, Max_P_Classifier, gadf
+from pysal.esda.mapclassify import K_classifiers
+from pysal.inequality.theil import Theil, TheilD, TheilDSim
+from pysal.region.maxp import Maxp, Maxp_LISA
+from pysal.spatial_dynamics import Markov, Spatial_Markov, LISA_Markov, \
+    SpatialTau, Theta, Tau
+from pysal.spatial_dynamics import ergodic
+from pysal.spatial_dynamics import directional
+from pysal.weights import W, lat2W, block_weights, comb, full, shimbel, \
+    order, higher_order, higher_order_sp, remap_ids, hexLat2W, WSP, regime_weights
+from pysal.weights.Distance import knnW, Kernel, DistanceBand
+from pysal.weights.Contiguity import buildContiguity
+from pysal.weights.spatial_lag import lag_spatial
+from pysal.weights.Wsets import w_union, w_intersection, w_difference
+from pysal.weights.Wsets import w_symmetric_difference, w_subset
+from pysal.weights.user import queen_from_shapefile, rook_from_shapefile, \
+    knnW_from_array, knnW_from_shapefile, threshold_binaryW_from_array,\
+    threshold_binaryW_from_shapefile, threshold_continuousW_from_array,\
+    threshold_continuousW_from_shapefile, kernelW, kernelW_from_shapefile,\
+    adaptive_kernelW, adaptive_kernelW_from_shapefile,\
+    min_threshold_dist_from_shapefile, build_lattice_shapefile
+from pysal.core.util.weight_converter import weight_convert
+import pysal.spreg
+import pysal.examples
+from pysal.network.network import Network, NetworkG, NetworkK, NetworkF
+
+
+# Load the IOHandlers
+from pysal.core import IOHandlers
+# Assign pysal.open to dispatcher
+open = pysal.core.FileIO.FileIO
+
+#__all__=[]
+#import esda,weights
+#__all__+=esda.__all__
+#__all__+=weights.__all__
+
+# Constants
+MISSINGVALUE = None  # used by fileIO to flag missing values.
diff --git a/pysal/cg/__init__.py b/pysal/cg/__init__.py
new file mode 100644
index 0000000..691068a
--- /dev/null
+++ b/pysal/cg/__init__.py
@@ -0,0 +1,10 @@
+"""
+A module for computational geometry.
+"""
+from shapes import *
+from standalone import *
+from locators import *
+from rtree import *
+from kdtree import *
+from sphere import *
+#why don't we import collection?
diff --git a/pysal/cg/kdtree.py b/pysal/cg/kdtree.py
new file mode 100644
index 0000000..f8ebd49
--- /dev/null
+++ b/pysal/cg/kdtree.py
@@ -0,0 +1,246 @@
+"""
+KDTree for PySAL: Python Spatial Analysis Library.
+
+Adds support for Arc Distance to scipy.spatial.KDTree.
+"""
+import sys
+import math
+import scipy.spatial
+import numpy
+from scipy import inf
+import sphere
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+DISTANCE_METRICS = ['Euclidean', 'Arc']
+FLOAT_EPS = numpy.finfo(float).eps
+
+
+class Arc_KDTree(scipy.spatial.KDTree):
+    def __init__(self, data, leafsize=10, radius=1.0):
+        """
+        KDTree using Arc Distance instead of Euclidean Distance.
+
+        Returned distances are based on radius.
+        For Example, pass in the the radius of earth in miles to get back miles.
+        Assumes data are Lng/Lat, does not account for geoids.
+
+        For more information see docs for scipy.spatial.KDTree
+
+        Examples
+        --------
+        >>> pts = [(0,90), (0,0), (180,0), (0,-90)]
+        >>> kd = Arc_KDTree(pts, radius = sphere.RADIUS_EARTH_KM)
+        >>> d,i = kd.query((90,0), k=4)
+        >>> d
+        array([ 10007.54339801,  10007.54339801,  10007.54339801,  10007.54339801])
+        >>> circumference = 2*math.pi*sphere.RADIUS_EARTH_KM
+        >>> round(d[0],5) == round(circumference/4.0,5)
+        True
+        """
+        self.radius = radius
+        self.circumference = 2 * math.pi * radius
+        scipy.spatial.KDTree.__init__(self, map(sphere.toXYZ, data), leafsize)
+
+    def _toXYZ(self, x):
+        if not issubclass(type(x), numpy.ndarray):
+            x = numpy.array(x)
+        if len(x.shape) == 2 and x.shape[1] == 3:  # assume point is already in XYZ
+            return x
+        if len(x.shape) == 1 and x.shape[0] == 3:  # assume point is already in XYZ
+            return x
+        elif len(x.shape) == 1:
+            x = numpy.array(sphere.toXYZ(x))
+        else:
+            x = map(sphere.toXYZ, x)
+        return x
+
+    def count_neighbors(self, other, r, p=2):
+        """
+        See scipy.spatial.KDTree.count_neighbors
+
+        Parameters
+        ----------
+        p: ignored, kept to maintain compatibility with scipy.spatial.KDTree
+
+        Examples
+        --------
+        >>> pts = [(0,90), (0,0), (180,0), (0,-90)]
+        >>> kd = Arc_KDTree(pts, radius = sphere.RADIUS_EARTH_KM)
+        >>> kd.count_neighbors(kd,0)
+        4
+        >>> circumference = 2.0*math.pi*sphere.RADIUS_EARTH_KM
+        >>> kd.count_neighbors(kd,circumference/2.0)
+        16
+        """
+        if r > 0.5 * self.circumference:
+            raise ValueError("r, must not exceed 1/2 circumference of the sphere (%f)." % self.circumference * 0.5)
+        r = sphere.arcdist2linear(r, self.radius)
+        return scipy.spatial.KDTree.count_neighbors(self, other, r)
+
+    def query(self, x, k=1, eps=0, p=2, distance_upper_bound=inf):
+        """
+        See scipy.spatial.KDTree.query
+
+        Parameters
+        ----------
+        x : array-like, last dimension self.m
+            query points are lng/lat.
+        p: ignored, kept to maintain compatibility with scipy.spatial.KDTree
+
+        Examples
+        --------
+        >>> pts = [(0,90), (0,0), (180,0), (0,-90)]
+        >>> kd = Arc_KDTree(pts, radius = sphere.RADIUS_EARTH_KM)
+        >>> d,i = kd.query((90,0), k=4)
+        >>> d
+        array([ 10007.54339801,  10007.54339801,  10007.54339801,  10007.54339801])
+        >>> circumference = 2*math.pi*sphere.RADIUS_EARTH_KM
+        >>> round(d[0],5) == round(circumference/4.0,5)
+        True
+        >>> d,i = kd.query(kd.data, k=3)
+        >>> d2,i2 = kd.query(pts, k=3)
+        >>> (d == d2).all()
+        True
+        >>> (i == i2).all()
+        True
+        """
+        eps = sphere.arcdist2linear(eps, self.radius)
+        if distance_upper_bound != inf:
+            distance_upper_bound = sphere.arcdist2linear(
+                distance_upper_bound, self.radius)
+        d, i = scipy.spatial.KDTree.query(self, self._toXYZ(x), k,
+                                          eps=eps, distance_upper_bound=distance_upper_bound)
+        dims = len(d.shape)
+        r = self.radius
+        if dims == 0:
+            return sphere.linear2arcdist(d, r), i
+        if dims == 1:
+            #TODO: implement linear2arcdist on numpy arrays
+            d = [sphere.linear2arcdist(x, r) for x in d]
+        elif dims == 2:
+            d = [[sphere.linear2arcdist(x, r) for x in row] for row in d]
+        return numpy.array(d), i
+
+    def query_ball_point(self, x, r, p=2, eps=0):
+        """
+        See scipy.spatial.KDTree.query_ball_point
+
+        Parameters
+        ----------
+        p: ignored, kept to maintain compatibility with scipy.spatial.KDTree
+
+        Examples
+        --------
+        >>> pts = [(0,90), (0,0), (180,0), (0,-90)]
+        >>> kd = Arc_KDTree(pts, radius = sphere.RADIUS_EARTH_KM)
+        >>> circumference = 2*math.pi*sphere.RADIUS_EARTH_KM
+        >>> kd.query_ball_point(pts, circumference/4.)
+        array([[0, 1, 2], [0, 1, 3], [0, 2, 3], [1, 2, 3]], dtype=object)
+        >>> kd.query_ball_point(pts, circumference/2.)
+        array([[0, 1, 2, 3], [0, 1, 2, 3], [0, 1, 2, 3], [0, 1, 2, 3]], dtype=object)
+        """
+        eps = sphere.arcdist2linear(eps, self.radius)
+        #scipy.sphere.KDTree.query_ball_point appears to ignore the eps argument.
+        # we have some floating point errors moving back and forth between cordinate systems,
+        # so we'll account for that be adding some to our radius, 3*float's eps value.
+        if r > 0.5 * self.circumference:
+            raise ValueError("r, must not exceed 1/2 circumference of the sphere (%f)." % self.circumference * 0.5)
+        r = sphere.arcdist2linear(r, self.radius) + FLOAT_EPS * 3
+        return scipy.spatial.KDTree.query_ball_point(self, self._toXYZ(x), r, eps=eps)
+
+    def query_ball_tree(self, other, r, p=2, eps=0):
+        """
+        See scipy.spatial.KDTree.query_ball_tree
+
+        Parameters
+        ----------
+        p: ignored, kept to maintain compatibility with scipy.spatial.KDTree
+
+        Examples
+        --------
+        >>> pts = [(0,90), (0,0), (180,0), (0,-90)]
+        >>> kd = Arc_KDTree(pts, radius = sphere.RADIUS_EARTH_KM)
+        >>> kd.query_ball_tree(kd, kd.circumference/4.)
+        [[0, 1, 2], [0, 1, 3], [0, 2, 3], [1, 2, 3]]
+        >>> kd.query_ball_tree(kd, kd.circumference/2.)
+        [[0, 1, 2, 3], [0, 1, 2, 3], [0, 1, 2, 3], [0, 1, 2, 3]]
+        """
+        eps = sphere.arcdist2linear(eps, self.radius)
+        #scipy.sphere.KDTree.query_ball_point appears to ignore the eps argument.
+        # we have some floating point errors moving back and forth between cordinate systems,
+        # so we'll account for that be adding some to our radius, 3*float's eps value.
+        if self.radius != other.radius:
+            raise ValueError("Both trees must have the same radius.")
+        if r > 0.5 * self.circumference:
+            raise ValueError("r, must not exceed 1/2 circumference of the sphere (%f)." % self.circumference * 0.5)
+        r = sphere.arcdist2linear(r, self.radius) + FLOAT_EPS * 3
+        return scipy.spatial.KDTree.query_ball_tree(self, other, r, eps=eps)
+
+    def query_pairs(self, r, p=2, eps=0):
+        """
+        See scipy.spatial.KDTree.query_pairs
+
+        Parameters
+        ----------
+        p: ignored, kept to maintain compatibility with scipy.spatial.KDTree
+
+        Examples
+        --------
+        >>> pts = [(0,90), (0,0), (180,0), (0,-90)]
+        >>> kd = Arc_KDTree(pts, radius = sphere.RADIUS_EARTH_KM)
+        >>> kd.query_pairs(kd.circumference/4.)
+        set([(0, 1), (1, 3), (2, 3), (0, 2)])
+        >>> kd.query_pairs(kd.circumference/2.)
+        set([(0, 1), (1, 2), (1, 3), (2, 3), (0, 3), (0, 2)])
+        """
+        if r > 0.5 * self.circumference:
+            raise ValueError("r, must not exceed 1/2 circumference of the sphere (%f)." % self.circumference * 0.5)
+        r = sphere.arcdist2linear(r, self.radius) + FLOAT_EPS * 3
+        return scipy.spatial.KDTree.query_pairs(self, r, eps=eps)
+
+    def sparse_distance_matrix(self, other, max_distance, p=2):
+        """
+        See scipy.spatial.KDTree.sparse_distance_matrix
+
+        Parameters
+        ----------
+        p: ignored, kept to maintain compatibility with scipy.spatial.KDTree
+
+        Examples
+        --------
+        >>> pts = [(0,90), (0,0), (180,0), (0,-90)]
+        >>> kd = Arc_KDTree(pts, radius = sphere.RADIUS_EARTH_KM)
+        >>> kd.sparse_distance_matrix(kd, kd.circumference/4.).todense()
+        matrix([[     0.        ,  10007.54339801,  10007.54339801,      0.        ],
+                [ 10007.54339801,      0.        ,      0.        ,  10007.54339801],
+                [ 10007.54339801,      0.        ,      0.        ,  10007.54339801],
+                [     0.        ,  10007.54339801,  10007.54339801,      0.        ]])
+        >>> kd.sparse_distance_matrix(kd, kd.circumference/2.).todense()
+        matrix([[     0.        ,  10007.54339801,  10007.54339801,  20015.08679602],
+                [ 10007.54339801,      0.        ,  20015.08679602,  10007.54339801],
+                [ 10007.54339801,  20015.08679602,      0.        ,  10007.54339801],
+                [ 20015.08679602,  10007.54339801,  10007.54339801,      0.        ]])
+        """
+        if self.radius != other.radius:
+            raise ValueError("Both trees must have the same radius.")
+        if max_distance > 0.5 * self.circumference:
+            raise ValueError("max_distance, must not exceed 1/2 circumference of the sphere (%f)." % self.circumference * 0.5)
+        max_distance = sphere.arcdist2linear(
+            max_distance, self.radius) + FLOAT_EPS * 3
+        D = scipy.spatial.KDTree.sparse_distance_matrix(
+            self, other, max_distance)
+        D = D.tocoo()
+        #print D.data
+        a2l = lambda x: sphere.linear2arcdist(x, self.radius)
+        #print map(a2l,D.data)
+        return scipy.sparse.coo_matrix((map(a2l, D.data), (D.row, D.col))).todok()
+
+
+def KDTree(data, leafsize=10, distance_metric='Euclidean', radius=1.0):
+    if distance_metric == 'Euclidean':
+        if int(scipy.version.version.split(".")[1]) < 12:
+            return scipy.spatial.KDTree(data, leafsize)
+        else:
+            return scipy.spatial.cKDTree(data, leafsize)
+    elif distance_metric == 'Arc':
+        return Arc_KDTree(data, leafsize, radius)
diff --git a/pysal/cg/locators.py b/pysal/cg/locators.py
new file mode 100644
index 0000000..ba35828
--- /dev/null
+++ b/pysal/cg/locators.py
@@ -0,0 +1,990 @@
+"""
+Computational geometry code for PySAL: Python Spatial Analysis Library.
+"""
+
+__author__ = "Sergio J. Rey, Xinyue Ye, Charles Schmidt, Andrew Winslow"
+__credits__ = "Copyright (c) 2005-2011 Sergio J. Rey"
+
+import math
+import copy
+import doctest
+from rtree import *
+from standalone import *
+from shapes import *
+
+__all__ = ["IntervalTree", "Grid", "BruteForcePointLocator",
+           "PointLocator", "PolygonLocator"]
+
+
+class IntervalTree:
+    """
+    Representation of an interval tree. An interval tree is a data structure which is used to
+    quickly determine which intervals in a set contain a value or overlap with a query interval.
+
+    References
+    ----------
+
+    de Berg, van Kreveld, Overmars, Schwarzkopf. Computational Geometry: Algorithms and Application.
+    212-217. Springer-Verlag, Berlin, 2000.
+    """
+
+    class _Node:
+        """
+        Private class representing a node in an interval tree.
+        """
+
+        def __init__(self, val, left_list, right_list, left_node, right_node):
+            self.val = val
+            self.left_list = left_list
+            self.right_list = right_list
+            self.left_node = left_node
+            self.right_node = right_node
+
+        def query(self, q):
+            i = 0
+            if q < self.val:
+                while i < len(self.left_list) and self.left_list[i][0] <= q:
+                    i += 1
+                return [rec[2] for rec in self.left_list[0:i]]
+            else:
+                while i < len(self.right_list) and self.right_list[i][1] >= q:
+                    i += 1
+                return [rec[2] for rec in self.right_list[0:i]]
+
+        def add(self, i):
+            """
+            Adds an interval to the IntervalTree node.
+            """
+            if not i[0] <= self.val <= i[1]:
+                raise Exception('Attempt to add an interval to an inappropriate IntervalTree node')
+            index = 0
+            while index < len(self.left_list) and self.left_list[index] < i[0]:
+                index = index + 1
+            self.left_list.insert(index, i)
+            index = 0
+            while index < len(self.right_list) and self.right_list[index] > i[1]:
+                index = index + 1
+            self.right_list.insert(index, i)
+
+        def remove(self, i):
+            """
+            Removes an interval from the IntervalTree node.
+            """
+            l = 0
+            r = len(self.left_list)
+            while l < r:
+                m = (l + r) / 2
+                if self.left_list[m] < i[0]:
+                    l = m + 1
+                elif self.left_list[m] > i[0]:
+                    r = m
+                else:
+                    if self.left_list[m] == i:
+                        self.left_list.pop(m)
+                    else:
+                        raise Exception('Attempt to remove an unknown interval')
+            l = 0
+            r = len(self.right_list)
+            while l < r:
+                m = (l + r) / 2
+                if self.right_list[m] > i[1]:
+                    l = m + 1
+                elif self.right_left[m] < i[1]:
+                    r = m
+                else:
+                    if self.right_list[m] == i:
+                        self.right_list.pop(m)
+                    else:
+                        raise Exception('Attempt to remove an unknown interval')
+
+    def __init__(self, intervals):
+        """
+        __init__((number, number, x) list) -> IntervalTree
+        Returns an interval tree containing specified intervals.
+
+        Parameters
+        ----------
+        intervals : a list of (lower, upper, item) elements to build the interval tree
+
+        Examples
+        --------
+
+        >>> intervals = [(-1, 2, 'A'), (5, 9, 'B'), (3, 6, 'C')]
+        >>> it = IntervalTree(intervals)
+        >>> isinstance(it, IntervalTree)
+        True
+        """
+        self._build(intervals)
+
+    def _build(self, intervals):
+        """
+        Build an interval tree containing _intervals_.
+        Each interval should be of the form (start, end, object).
+
+        build((number, number, x) list) -> None
+
+        Test tag: <tc>#is#IntervalTree.build</tc>
+        """
+        bad_is = filter(lambda i: i[0] > i[1], intervals)
+        if bad_is != []:
+            raise Exception('Attempt to build IntervalTree with invalid intervals: ' + str(bad_is))
+        eps = list(set([i[0] for i in intervals] + [i[1] for i in intervals]))
+        eps.sort()
+        self.root = self._recursive_build(copy.copy(intervals), eps)
+
+    def query(self, q):
+        """
+        Returns the intervals intersected by a value or interval.
+
+        query((number, number) or number) -> x list
+
+        Parameters
+        ----------
+
+        q : a value or interval to find intervals intersecting
+
+        Examples
+        --------
+
+        >>> intervals = [(-1, 2, 'A'), (5, 9, 'B'), (3, 6, 'C')]
+        >>> it = IntervalTree(intervals)
+        >>> it.query((7, 14))
+        ['B']
+        >>> it.query(1)
+        ['A']
+        """
+        if isinstance(q, tuple):
+            return self._query_range(q, self.root)
+        else:
+            return self._query_points(q)
+
+    def _query_range(self, q, root):
+        if root is None:
+            return []
+        if root.val < q[0]:
+            return self._query_range(q, root.right_node) + root.query(q[0])
+        elif root.val > q[1]:
+            return self._query_range(q, root.left_node) + root.query(q[1])
+        else:
+            return root.query(root.val) + self._query_range(q, root.left_node) + self._query_range(q, root.right_node)
+
+    def _query_points(self, q):
+        found = []
+        cur = self.root
+        while cur is not None:
+            found.extend(cur.query(q))
+            if q < cur.val:
+                cur = cur.left_node
+            else:
+                cur = cur.right_node
+        return found
+
+    def _recursive_build(self, intervals, eps):
+        def sign(x):
+            if x < 0:
+                return -1
+            elif x > 0:
+                return 1
+            else:
+                return 0
+
+        def binary_search(list, q):
+            l = 0
+            r = len(list)
+            while l < r:
+                m = (l + r) / 2
+                if list[m] < q:
+                    l = m + 1
+                else:
+                    r = m
+            return l
+
+        if eps == []:
+            return None
+        median = eps[len(eps) / 2]
+        hit_is = []
+        rem_is = []
+        for i in intervals:
+            if i[0] <= median <= i[1]:
+                hit_is.append(i)
+            else:
+                rem_is.append(i)
+        left_list = copy.copy(hit_is)
+        left_list.sort(lambda a, b: sign(a[0] - b[0]))
+        right_list = copy.copy(hit_is)
+        right_list.sort(lambda a, b: sign(b[1] - a[1]))
+        eps = list(set([i[0] for i in intervals] + [i[1] for i in intervals]))
+        eps.sort()
+        bp = binary_search(eps, median)
+        left_eps = eps[:bp]
+        right_eps = eps[bp:]
+        node = (IntervalTree._Node(median, left_list, right_list,
+                                   self._recursive_build(rem_is, left_eps),
+                                   self._recursive_build(rem_is, right_eps)))
+        return node
+
+
+class Grid:
+    """
+    Representation of a binning data structure.
+    """
+
+    def __init__(self, bounds, resolution):
+        """
+        Returns a grid with specified properties.
+
+        __init__(Rectangle, number) -> Grid
+
+        Parameters
+        ----------
+        bounds      : the area for the grid to encompass
+        resolution  : the diameter of each bin
+
+        Examples
+        --------
+        TODO: complete this doctest
+        >>> g = Grid(Rectangle(0, 0, 10, 10), 1)
+        """
+        if resolution == 0:
+            raise Exception('Cannot create grid with resolution 0')
+        self.res = resolution
+        self.hash = {}
+        self.x_range = (bounds.left, bounds.right)
+        self.y_range = (bounds.lower, bounds.upper)
+        try:
+            self.i_range = int(math.ceil(
+                (self.x_range[1] - self.x_range[0]) / self.res))
+            self.j_range = int(math.ceil(
+                (self.y_range[1] - self.y_range[0]) / self.res))
+        except Exception:
+            raise Exception('Invalid arguments for Grid(): (' +
+                            str(x_range) + ', ' + str(y_range) + ', ' + str(res) + ')')
+
+    def in_grid(self, loc):
+        """
+        Returns whether a 2-tuple location _loc_ lies inside the grid bounds.
+
+        Test tag: <tc>#is#Grid.in_grid</tc>
+        """
+        return (self.x_range[0] <= loc[0] <= self.x_range[1] and
+                self.y_range[0] <= loc[1] <= self.y_range[1])
+
+    def __grid_loc(self, loc):
+        i = min(self.i_range, max(int((loc[0] - self.x_range[0]) /
+                                      self.res), 0))
+        j = min(self.j_range, max(int((loc[1] - self.y_range[0]) /
+                                      self.res), 0))
+        return (i, j)
+
+    def add(self, item, pt):
+        """
+        Adds an item to the grid at a specified location.
+
+        add(x, Point) -> x
+
+        Parameters
+        ----------
+        item  : the item to insert into the grid
+        pt : the location to insert the item at
+
+        Examples
+        --------
+
+        >>> g = Grid(Rectangle(0, 0, 10, 10), 1)
+        >>> g.add('A', Point((4.2, 8.7)))
+        'A'
+        """
+        if not self.in_grid(pt):
+            raise Exception('Attempt to insert item at location outside grid bounds: ' + str(pt))
+        grid_loc = self.__grid_loc(pt)
+        if grid_loc in self.hash:
+            self.hash[grid_loc].append((pt, item))
+        else:
+            self.hash[grid_loc] = [(pt, item)]
+        return item
+
+    def remove(self, item, pt):
+        """
+        Removes an item from the grid at a specified location.
+
+        remove(x, Point) -> x
+
+        Parameters
+        ----------
+        item : the item to remove from the grid
+        pt : the location the item was added at
+
+        Examples
+        --------
+
+        >>> g = Grid(Rectangle(0, 0, 10, 10), 1)
+        >>> g.add('A', Point((4.2, 8.7)))
+        'A'
+        >>> g.remove('A', Point((4.2, 8.7)))
+        'A'
+        """
+        if not self.in_grid(pt):
+            raise Exception('Attempt to remove item at location outside grid bounds: ' + str(pt))
+        grid_loc = self.__grid_loc(pt)
+        self.hash[grid_loc].remove((pt, item))
+        if self.hash[grid_loc] == []:
+            del self.hash[grid_loc]
+        return item
+
+    def bounds(self, bounds):
+        """
+        Returns a list of items found in the grid within the bounds specified.
+
+        bounds(Rectangle) -> x list
+
+        Parameters
+        ----------
+        item     : the item to remove from the grid
+        pt       : the location the item was added at
+
+        Examples
+        --------
+
+        >>> g = Grid(Rectangle(0, 0, 10, 10), 1)
+        >>> g.add('A', Point((1.0, 1.0)))
+        'A'
+        >>> g.add('B', Point((4.0, 4.0)))
+        'B'
+        >>> g.bounds(Rectangle(0, 0, 3, 3))
+        ['A']
+        >>> g.bounds(Rectangle(2, 2, 5, 5))
+        ['B']
+        >>> sorted(g.bounds(Rectangle(0, 0, 5, 5)))
+        ['A', 'B']
+        """
+        x_range = (bounds.left, bounds.right)
+        y_range = (bounds.lower, bounds.upper)
+        items = []
+        lower_left = self.__grid_loc((x_range[0], y_range[0]))
+        upper_right = self.__grid_loc((x_range[1], y_range[1]))
+        for i in xrange(lower_left[0], upper_right[0] + 1):
+            for j in xrange(lower_left[1], upper_right[1] + 1):
+                if (i, j) in self.hash:
+                    items.extend(map(lambda item: item[1], filter(lambda item: x_range[0] <= item[0][0] <= x_range[1] and y_range[0] <= item[0][1] <= y_range[1], self.hash[(i, j)])))
+        return items
+
+    def proximity(self, pt, r):
+        """
+        Returns a list of items found in the grid within a specified distance of a point.
+
+        proximity(Point, number) -> x list
+
+        Parameters
+        ----------
+        pt : the location to search around
+        r  : the distance to search around the point
+
+        Examples
+        --------
+        >>> g = Grid(Rectangle(0, 0, 10, 10), 1)
+        >>> g.add('A', Point((1.0, 1.0)))
+        'A'
+        >>> g.add('B', Point((4.0, 4.0)))
+        'B'
+        >>> g.proximity(Point((2.0, 1.0)), 2)
+        ['A']
+        >>> g.proximity(Point((6.0, 5.0)), 3.0)
+        ['B']
+        >>> sorted(g.proximity(Point((4.0, 1.0)), 4.0))
+        ['A', 'B']
+        """
+        items = []
+        lower_left = self.__grid_loc((pt[0] - r, pt[1] - r))
+        upper_right = self.__grid_loc((pt[0] + r, pt[1] + r))
+        for i in xrange(lower_left[0], upper_right[0] + 1):
+            for j in xrange(lower_left[1], upper_right[1] + 1):
+                if (i, j) in self.hash:
+                    items.extend(map(lambda item: item[1], filter(lambda item: get_points_dist(pt, item[0]) <= r, self.hash[(i, j)])))
+        return items
+
+    def nearest(self, pt):
+        """
+        Returns the nearest item to a point.
+
+        nearest(Point) -> x
+
+        Parameters
+        ----------
+        pt : the location to search near
+
+        Examples
+        --------
+        >>> g = Grid(Rectangle(0, 0, 10, 10), 1)
+        >>> g.add('A', Point((1.0, 1.0)))
+        'A'
+        >>> g.add('B', Point((4.0, 4.0)))
+        'B'
+        >>> g.nearest(Point((2.0, 1.0)))
+        'A'
+        >>> g.nearest(Point((7.0, 5.0)))
+        'B'
+        """
+        search_size = self.res
+        while (self.proximity(pt, search_size) == [] and
+               (get_points_dist((self.x_range[0], self.y_range[0]), pt) > search_size or
+                get_points_dist((self.x_range[1], self.y_range[0]), pt) > search_size or
+                get_points_dist((self.x_range[0], self.y_range[1]), pt) > search_size or
+                get_points_dist((self.x_range[1], self.y_range[1]), pt) > search_size)):
+            search_size = 2 * search_size
+        items = []
+        lower_left = self.__grid_loc(
+            (pt[0] - search_size, pt[1] - search_size))
+        upper_right = self.__grid_loc(
+            (pt[0] + search_size, pt[1] + search_size))
+        for i in xrange(lower_left[0], upper_right[0] + 1):
+            for j in xrange(lower_left[1], upper_right[1] + 1):
+                if (i, j) in self.hash:
+                    items.extend(map(lambda item: (get_points_dist(pt, item[
+                        0]), item[1]), self.hash[(i, j)]))
+        if items == []:
+            return None
+        return min(items)[1]
+
+
+class BruteForcePointLocator:
+    """
+    A class which does naive linear search on a set of Point objects.
+    """
+    def __init__(self, points):
+        """
+        Creates a naive index of the points specified.
+
+        __init__(Point list) -> BruteForcePointLocator
+
+        Parameters
+        ----------
+        points : a list of points to index (Point list)
+
+        Examples
+        --------
+        >>> pl = BruteForcePointLocator([Point((0, 0)), Point((5, 0)), Point((0, 10))])
+        """
+        self._points = points
+
+    def nearest(self, query_point):
+        """
+        Returns the nearest point indexed to a query point.
+
+        nearest(Point) -> Point
+
+        Parameters
+        ----------
+        query_point : a point to find the nearest indexed point to
+
+        Examples
+        --------
+        >>> points = [Point((0, 0)), Point((1, 6)), Point((5.4, 1.4))]
+        >>> pl = BruteForcePointLocator(points)
+        >>> n = pl.nearest(Point((1, 1)))
+        >>> str(n)
+        '(0.0, 0.0)'
+        """
+        return min(self._points, key=lambda p: get_points_dist(p, query_point))
+
+    def region(self, region_rect):
+        """
+        Returns the indexed points located inside a rectangular query region.
+
+        region(Rectangle) -> Point list
+
+        Parameters
+        ----------
+        region_rect : the rectangular range to find indexed points in
+
+        Examples
+        --------
+        >>> points = [Point((0, 0)), Point((1, 6)), Point((5.4, 1.4))]
+        >>> pl = BruteForcePointLocator(points)
+        >>> pts = pl.region(Rectangle(-1, -1, 10, 10))
+        >>> len(pts)
+        3
+        """
+        return filter(lambda p: get_rectangle_point_intersect(region_rect, p) is not None, self._points)
+
+    def proximity(self, origin, r):
+        """
+        Returns the indexed points located within some distance of an origin point.
+
+        proximity(Point, number) -> Point list
+
+        Parameters
+        ----------
+        origin  : the point to find indexed points near
+        r       : the maximum distance to find indexed point from the origin point
+
+        Examples
+        --------
+        >>> points = [Point((0, 0)), Point((1, 6)), Point((5.4, 1.4))]
+        >>> pl = BruteForcePointLocator(points)
+        >>> neighs = pl.proximity(Point((1, 0)), 2)
+        >>> len(neighs)
+        1
+        >>> p = neighs[0]
+        >>> isinstance(p, Point)
+        True
+        >>> str(p)
+        '(0.0, 0.0)'
+        """
+        return filter(lambda p: get_points_dist(p, origin) <= r, self._points)
+
+
+class PointLocator:
+    """
+    An abstract representation of a point indexing data structure.
+    """
+
+    def __init__(self, points):
+        """
+        Returns a point locator object.
+
+        __init__(Point list) -> PointLocator
+
+        Parameters
+        ----------
+        points : a list of points to index
+
+        Examples
+        --------
+        >>> points = [Point((0, 0)), Point((1, 6)), Point((5.4, 1.4))]
+        >>> pl = PointLocator(points)
+        """
+        self._locator = BruteForcePointLocator(points)
+
+    def nearest(self, query_point):
+        """
+        Returns the nearest point indexed to a query point.
+
+        nearest(Point) -> Point
+
+        Parameters
+        ----------
+        query_point : a point to find the nearest indexed point to
+
+        Examples
+        --------
+        >>> points = [Point((0, 0)), Point((1, 6)), Point((5.4, 1.4))]
+        >>> pl = PointLocator(points)
+        >>> n = pl.nearest(Point((1, 1)))
+        >>> str(n)
+        '(0.0, 0.0)'
+        """
+        return self._locator.nearest(query_point)
+
+    def region(self, region_rect):
+        """
+        Returns the indexed points located inside a rectangular query region.
+
+        region(Rectangle) -> Point list
+
+        Parameters
+        ----------
+        region_rect : the rectangular range to find indexed points in
+
+        Examples
+        --------
+        >>> points = [Point((0, 0)), Point((1, 6)), Point((5.4, 1.4))]
+        >>> pl = PointLocator(points)
+        >>> pts = pl.region(Rectangle(-1, -1, 10, 10))
+        >>> len(pts)
+        3
+        """
+        return self._locator.region(region_rect)
+    overlapping = region
+
+    def polygon(self, polygon):
+        """
+        Returns the indexed points located inside a polygon
+        """
+
+        # get points in polygon bounding box
+
+        # for points in bounding box, check for inclusion in polygon
+
+    def proximity(self, origin, r):
+        """
+        Returns the indexed points located within some distance of an origin point.
+
+        proximity(Point, number) -> Point list
+
+        Parameters
+        ----------
+        origin  : the point to find indexed points near
+        r       : the maximum distance to find indexed point from the origin point
+
+        Examples
+        --------
+        >>> points = [Point((0, 0)), Point((1, 6)), Point((5.4, 1.4))]
+        >>> pl = PointLocator(points)
+        >>> len(pl.proximity(Point((1, 0)), 2))
+        1
+        """
+        return self._locator.proximity(origin, r)
+
+
+class PolygonLocator:
+    """
+    An abstract representation of a polygon indexing data structure.
+    """
+
+    def __init__(self, polygons):
+        """
+        Returns a polygon locator object.
+
+        __init__(Polygon list) -> PolygonLocator
+
+        Parameters
+        ----------
+        polygons : a list of polygons to index
+
+        Examples
+        --------
+        >>> p1 = Polygon([Point((0, 1)), Point((4, 5)), Point((5, 1))])
+        >>> p2 = Polygon([Point((3, 9)), Point((6, 7)), Point((1, 1))])
+        >>> pl = PolygonLocator([p1, p2])
+        >>> isinstance(pl, PolygonLocator)
+        True
+        """
+
+        self._locator = polygons
+        # create and rtree
+        self._rtree = RTree()
+        for polygon in polygons:
+            x = polygon.bounding_box.left
+            y = polygon.bounding_box.lower
+            X = polygon.bounding_box.right
+            Y = polygon.bounding_box.upper
+            self._rtree.insert(polygon, Rect(x, y, X, Y))
+
+    def inside(self, query_rectangle):
+        """
+        Returns polygons that are inside query_rectangle
+
+        Examples
+        --------
+
+        >>> p1 = Polygon([Point((0, 1)), Point((4, 5)), Point((5, 1))])
+        >>> p2 = Polygon([Point((3, 9)), Point((6, 7)), Point((1, 1))])
+        >>> p3 = Polygon([Point((7, 1)), Point((8, 7)), Point((9, 1))])
+        >>> pl = PolygonLocator([p1, p2, p3])
+        >>> qr = Rectangle(0, 0, 5, 5)
+        >>> res = pl.inside( qr )
+        >>> len(res)
+        1
+        >>> qr = Rectangle(3, 7, 5, 8)
+        >>> res = pl.inside( qr )
+        >>> len(res)
+        0
+        >>> qr = Rectangle(10, 10, 12, 12)
+        >>> res = pl.inside( qr )
+        >>> len(res)
+        0
+        >>> qr = Rectangle(0, 0, 12, 12)
+        >>> res = pl.inside( qr )
+        >>> len(res)
+        3
+
+        Notes
+        -----
+
+        inside means the intersection of the query rectangle and a
+        polygon is not empty and is equal to the area of the polygon
+        """
+        left = query_rectangle.left
+        right = query_rectangle.right
+        upper = query_rectangle.upper
+        lower = query_rectangle.lower
+
+        # rtree rect
+        qr = Rect(left, lower, right, upper)
+        # bb overlaps
+        res = [r.leaf_obj() for r in self._rtree.query_rect(qr)
+               if r.is_leaf()]
+
+        qp = Polygon([Point((left, lower)), Point((right, lower)),
+                      Point((right, upper)), Point((left, upper))])
+        ip = []
+        GPPI = get_polygon_point_intersect
+        for poly in res:
+            flag = True
+            lower = poly.bounding_box.lower
+            right = poly.bounding_box.right
+            upper = poly.bounding_box.upper
+            left = poly.bounding_box.left
+            p1 = Point((left, lower))
+            p2 = Point((right, upper))
+            if GPPI(qp, p1) and GPPI(qp, p2):
+                ip.append(poly)
+        return ip
+
+    def overlapping(self, query_rectangle):
+        """
+        Returns list of polygons that overlap query_rectangle
+
+        Examples
+        --------
+
+        >>> p1 = Polygon([Point((0, 1)), Point((4, 5)), Point((5, 1))])
+        >>> p2 = Polygon([Point((3, 9)), Point((6, 7)), Point((1, 1))])
+        >>> p3 = Polygon([Point((7, 1)), Point((8, 7)), Point((9, 1))])
+        >>> pl = PolygonLocator([p1, p2, p3])
+        >>> qr = Rectangle(0, 0, 5, 5)
+        >>> res = pl.overlapping( qr )
+        >>> len(res)
+        2
+        >>> qr = Rectangle(3, 7, 5, 8)
+        >>> res = pl.overlapping( qr )
+        >>> len(res)
+        1
+        >>> qr = Rectangle(10, 10, 12, 12)
+        >>> res = pl.overlapping( qr )
+        >>> len(res)
+        0
+        >>> qr = Rectangle(0, 0, 12, 12)
+        >>> res = pl.overlapping( qr )
+        >>> len(res)
+        3
+        >>> qr = Rectangle(8, 3, 9, 4)
+        >>> p1 = Polygon([Point((2, 1)), Point((2, 3)), Point((4, 3)), Point((4,1))])
+        >>> p2 = Polygon([Point((7, 1)), Point((7, 5)), Point((10, 5)), Point((10, 1))])
+        >>> pl = PolygonLocator([p1, p2])
+        >>> res = pl.overlapping(qr)
+        >>> len(res)
+        1
+
+        Notes
+        -----
+        overlapping means the intersection of the query rectangle and a
+        polygon is not empty and is no larger than the area of the polygon
+        """
+        left = query_rectangle.left
+        right = query_rectangle.right
+        upper = query_rectangle.upper
+        lower = query_rectangle.lower
+
+        # rtree rect
+        qr = Rect(left, lower, right, upper)
+
+        # bb overlaps
+        res = [r.leaf_obj() for r in self._rtree.query_rect(qr)
+               if r.is_leaf()]
+        # have to check for polygon overlap using segment intersection
+
+        # add polys whose bb contains at least one of the corners of the query
+        # rectangle
+
+        sw = (left, lower)
+        se = (right, lower)
+        ne = (right, upper)
+        nw = (left, upper)
+        pnts = [sw, se, ne, nw]
+        cs = []
+        for pnt in pnts:
+            c = [r.leaf_obj() for r in self._rtree.query_point(
+                pnt) if r.is_leaf()]
+            cs.extend(c)
+
+        cs = list(set(cs))
+
+        overlapping = []
+
+        # first find polygons with at least one vertex inside query rectangle
+        remaining = copy.copy(res)
+        for polygon in res:
+            vertices = polygon.vertices
+            for vertex in vertices:
+                xb = vertex[0] >= left
+                xb *= vertex[0] < right
+                yb = vertex[1] >= lower
+                yb *= vertex[1] < upper
+                if xb * yb:
+                    overlapping.append(polygon)
+                    remaining.remove(polygon)
+                    break
+
+        # for remaining polys in bb overlap check if vertex chains intersect
+        # segments of the query rectangle
+        left_edge = LineSegment(Point((left, lower)), Point((left,
+                                                             upper)))
+        right_edge = LineSegment(Point((right, lower)), Point((right,
+                                                               upper)))
+        lower_edge = LineSegment(Point((left, lower)), Point((right,
+                                                              lower)))
+        upper_edge = LineSegment(Point((left, upper)), Point((right,
+                                                              upper)))
+        for polygon in remaining:
+            vertices = copy.copy(polygon.vertices)
+            if vertices[-1] != vertices[0]:
+                vertices.append(vertices[0])  # put on closed cartographic form
+            nv = len(vertices)
+            for i in range(nv - 1):
+                head = vertices[i]
+                tail = vertices[i + 1]
+                edge = LineSegment(head, tail)
+                li = get_segments_intersect(edge, left_edge)
+                if li:
+                    overlapping.append(polygon)
+                    break
+                elif get_segments_intersect(edge, right_edge):
+                    overlapping.append(polygon)
+                    break
+                elif get_segments_intersect(edge, lower_edge):
+                    overlapping.append(polygon)
+                    break
+                elif get_segments_intersect(edge, upper_edge):
+                    overlapping.append(polygon)
+                    break
+        # check remaining for explicit containment of the bounding rectangle
+        # cs has candidates for this check
+        sw = Point(sw)
+        se = Point(se)
+        ne = Point(ne)
+        nw = Point(nw)
+        for polygon in cs:
+            if get_polygon_point_intersect(polygon, sw):
+                overlapping.append(polygon)
+                break
+            elif get_polygon_point_intersect(polygon, se):
+                overlapping.append(polygon)
+                break
+            elif get_polygon_point_intersect(polygon, ne):
+                overlapping.append(polygon)
+                break
+            elif get_polygon_point_intersect(polygon, nw):
+                overlapping.append(polygon)
+                break
+        return list(set(overlapping))
+
+    def nearest(self, query_point, rule='vertex'):
+        """
+        Returns the nearest polygon indexed to a query point based on
+        various rules.
+
+        nearest(Polygon) -> Polygon
+
+        Parameters
+        ----------
+        query_point  : a point to find the nearest indexed polygon to
+
+        rule         : representative point for polygon in nearest query.
+                 vertex -- measures distance between vertices and query_point
+                 centroid -- measures distance between centroid and
+                 query_point
+                 edge   -- measures the distance between edges and query_point
+
+        Examples
+        --------
+        >>> p1 = Polygon([Point((0, 1)), Point((4, 5)), Point((5, 1))])
+        >>> p2 = Polygon([Point((3, 9)), Point((6, 7)), Point((1, 1))])
+        >>> pl = PolygonLocator([p1, p2])
+        >>> try: n = pl.nearest(Point((-1, 1)))
+        ... except NotImplementedError: print "future test: str(min(n.vertices())) == (0.0, 1.0)"
+        future test: str(min(n.vertices())) == (0.0, 1.0)
+        """
+        raise NotImplementedError
+
+    def region(self, region_rect):
+        """
+        Returns the indexed polygons located inside a rectangular query region.
+
+        region(Rectangle) -> Polygon list
+
+        Parameters
+        ----------
+        region_rect  : the rectangular range to find indexed polygons in
+
+        Examples
+        --------
+        >>> p1 = Polygon([Point((0, 1)), Point((4, 5)), Point((5, 1))])
+        >>> p2 = Polygon([Point((3, 9)), Point((6, 7)), Point((1, 1))])
+        >>> pl = PolygonLocator([p1, p2])
+        >>> n = pl.region(Rectangle(0, 0, 4, 10))
+        >>> len(n)
+        2
+        """
+        n = self._locator
+        for polygon in n:
+            points = polygon.vertices
+            pl = BruteForcePointLocator(points)
+            pts = pl.region(region_rect)
+            if len(pts) == 0:
+                n.remove(polygon)
+        return n
+
+    def contains_point(self, point):
+        """
+        Returns polygons that contain point
+
+
+        Parameters
+        ----------
+        point: point (x,y)
+
+        Returns
+        -------
+        list of polygons containing point
+
+        Examples
+        --------
+        >>> p1 = Polygon([Point((0,0)), Point((6,0)), Point((4,4))])
+        >>> p2 = Polygon([Point((1,2)), Point((4,0)), Point((4,4))])
+        >>> p1.contains_point((2,2))
+        1
+        >>> p2.contains_point((2,2))
+        1
+        >>> pl = PolygonLocator([p1, p2])
+        >>> len(pl.contains_point((2,2)))
+        2
+        >>> p2.contains_point((1,1))
+        0
+        >>> p1.contains_point((1,1))
+        1
+        >>> len(pl.contains_point((1,1)))
+        1
+        >>> p1.centroid
+        (3.3333333333333335, 1.3333333333333333)
+        >>> pl.contains_point((1,1))[0].centroid
+        (3.3333333333333335, 1.3333333333333333)
+
+        """
+        # bbounding box containment
+        res = [r.leaf_obj() for r in self._rtree.query_point(point)
+               if r.is_leaf()]
+        # explicit containment check for candidate polygons needed
+        return [poly for poly in res if poly.contains_point(point)]
+
+    def proximity(self, origin, r, rule='vertex'):
+        """
+        Returns the indexed polygons located within some distance of an
+        origin point based on various rules.
+
+        proximity(Polygon, number) -> Polygon list
+
+        Parameters
+        ----------
+        origin  : the point to find indexed polygons near
+        r       : the maximum distance to find indexed polygon from the origin point
+
+        rule    : representative point for polygon in nearest query.
+                vertex -- measures distance between vertices and query_point
+                centroid -- measures distance between centroid and
+                query_point
+                edge   -- measures the distance between edges and query_point
+
+        Examples
+        --------
+        >>> p1 = Polygon([Point((0, 1)), Point((4, 5)), Point((5, 1))])
+        >>> p2 = Polygon([Point((3, 9)), Point((6, 7)), Point((1, 1))])
+        >>> pl = PolygonLocator([p1, p2])
+        >>> try:
+        ...     len(pl.proximity(Point((0, 0)), 2))
+        ... except NotImplementedError:
+        ...     print "future test: len(pl.proximity(Point((0, 0)), 2)) == 2"
+        future test: len(pl.proximity(Point((0, 0)), 2)) == 2
+        """
+        raise NotImplementedError
+
diff --git a/pysal/cg/rtree.py b/pysal/cg/rtree.py
new file mode 100644
index 0000000..4e8e9a2
--- /dev/null
+++ b/pysal/cg/rtree.py
@@ -0,0 +1,639 @@
+#pylint: disable-msg=C0103, C0301
+"""
+Pure Python implementation of RTree spatial index
+
+
+
+Adaptation of
+http://code.google.com/p/pyrtree/
+
+R-tree.
+see doc/ref/r-tree-clustering-split-algo.pdf
+"""
+
+__author__ = "Sergio J. Rey"
+
+__all__ = ['RTree', 'Rect', 'Rtree']
+
+MAXCHILDREN = 10
+MAX_KMEANS = 5
+BUFFER = 0.0000001
+import math
+import random
+import time
+import array
+
+
+class Rect(object):
+    """
+    A rectangle class that stores: an axis aligned rectangle, and: two
+     flags (swapped_x and swapped_y).  (The flags are stored
+     implicitly via swaps in the order of minx/y and maxx/y.)
+    """
+
+    __slots__ = ("x", "y", "xx", "yy", "swapped_x", "swapped_y")
+
+    def __getstate__(self):
+        return (self.x, self.y, self.xx, self.yy, self.swapped_x, self.swapped_y)
+
+    def __setstate__(self, state):
+        self.x, self.y, self.xx, self.yy, self.swapped_x, self.swapped_y = state
+
+    def __init__(self, minx, miny, maxx, maxy):
+        self.swapped_x = (maxx < minx)
+        self.swapped_y = (maxy < miny)
+        self.x = minx
+        self.y = miny
+        self.xx = maxx
+        self.yy = maxy
+
+        if self.swapped_x:
+            self.x, self.xx = maxx, minx
+        if self.swapped_y:
+            self.y, self.yy = maxy, miny
+
+    def coords(self):
+        return self.x, self.y, self.xx, self.yy
+
+    def overlap(self, orect):
+        return self.intersect(orect).area()
+
+    def write_raw_coords(self, toarray, idx):
+        toarray[idx] = self.x
+        toarray[idx + 1] = self.y
+        toarray[idx + 2] = self.xx
+        toarray[idx + 3] = self.yy
+        if (self.swapped_x):
+            toarray[idx] = self.xx
+            toarray[idx + 2] = self.x
+        if (self.swapped_y):
+            toarray[idx + 1] = self.yy
+            toarray[idx + 3] = self.y
+
+    def area(self):
+        w = self.xx - self.x
+        h = self.yy - self.y
+        return w * h
+
+    def extent(self):
+        x = self.x
+        y = self.y
+        return (x, y, self.xx - x, self.yy - y)
+
+    def grow(self, amt):
+        a = amt * 0.5
+        return Rect(self.x - a, self.y - a, self.xx + a, self.yy + a)
+
+    def intersect(self, o):
+        if self is NullRect:
+            return NullRect
+        if o is NullRect:
+            return NullRect
+
+        nx, ny = max(self.x, o.x), max(self.y, o.y)
+        nx2, ny2 = min(self.xx, o.xx), min(self.yy, o.yy)
+        w, h = nx2 - nx, ny2 - ny
+
+        if w <= 0 or h <= 0:
+            return NullRect
+
+        return Rect(nx, ny, nx2, ny2)
+
+    def does_contain(self, o):
+        return self.does_containpoint((o.x, o.y)) and self.does_containpoint((o.xx, o.yy))
+
+    def does_intersect(self, o):
+        return (self.intersect(o).area() > 0)
+
+    def does_containpoint(self, p):
+        x, y = p
+        return (x >= self.x and x <= self.xx and y >= self.y and y <= self.yy)
+
+    def union(self, o):
+        if o is NullRect:
+            return Rect(self.x, self.y, self.xx, self.yy)
+        if self is NullRect:
+            return Rect(o.x, o.y, o.xx, o.yy)
+
+        x = self.x
+        y = self.y
+        xx = self.xx
+        yy = self.yy
+        ox = o.x
+        oy = o.y
+        oxx = o.xx
+        oyy = o.yy
+
+        nx = x if x < ox else ox
+        ny = y if y < oy else oy
+        nx2 = xx if xx > oxx else oxx
+        ny2 = yy if yy > oyy else oyy
+
+        res = Rect(nx, ny, nx2, ny2)
+
+        return res
+
+    def union_point(self, o):
+        x, y = o
+        return self.union(Rect(x, y, x, y))
+
+    def diagonal_sq(self):
+        if self is NullRect:
+            return 0
+        w = self.xx - self.x
+        h = self.yy - self.y
+        return w * w + h * h
+
+    def diagonal(self):
+        return math.sqrt(self.diagonal_sq())
+
+NullRect = Rect(0.0, 0.0, 0.0, 0.0)
+NullRect.swapped_x = False
+NullRect.swapped_y = False
+
+
+def union_all(kids):
+    cur = NullRect
+    for k in kids:
+        cur = cur.union(k.rect)
+    assert(False == cur.swapped_x)
+    return cur
+
+
+def Rtree():
+    return RTree()
+
+
+class RTree(object):
+    def __init__(self):
+        self.count = 0
+        self.stats = {
+            "overflow_f": 0,
+            "avg_overflow_t_f": 0.0,
+            "longest_overflow": 0.0,
+            "longest_kmeans": 0.0,
+            "sum_kmeans_iter_f": 0,
+            "count_kmeans_iter_f": 0,
+            "avg_kmeans_iter_f": 0.0
+        }
+
+        # This round: not using objects directly -- they
+        #   take up too much memory, and efficiency goes down the toilet
+        #   (obviously) if things start to page.
+        #  Less obviously: using object graph directly leads to really long GC
+        #   pause times, too.
+        # Instead, it uses pools of arrays:
+        self.count = 0
+        self.leaf_count = 0
+        self.rect_pool = array.array('d')
+        self.node_pool = array.array('L')
+        self.leaf_pool = []  # leaf objects.
+
+        self.cursor = _NodeCursor.create(self, NullRect)
+
+    def _ensure_pool(self, idx):
+        if len(self.rect_pool) < (4 * idx):
+            self.rect_pool.extend([0, 0, 0, 0] * idx)
+            self.node_pool.extend([0, 0] * idx)
+
+    def insert(self, o, orect):
+        self.cursor.insert(o, orect)
+        assert(self.cursor.index == 0)
+
+    def query_rect(self, r):
+        for x in self.cursor.query_rect(r):
+            yield x
+
+    def query_point(self, p):
+        for x in self.cursor.query_point(p):
+            yield x
+
+    def walk(self, pred):
+        return self.cursor.walk(pred)
+
+    def intersection(self, boundingbox):
+        """
+        replicate c rtree method
+
+        Returns
+        -------
+
+        ids : list
+              list of object ids whose bounding boxes intersect with query
+              bounding box
+
+        """
+        # grow the bounding box slightly to handle coincident edges
+
+        bb = boundingbox[:]
+        bb[0] = bb[0] - BUFFER
+        bb[1] = bb[1] - BUFFER
+        bb[2] = bb[2] + BUFFER
+        bb[3] = bb[3] + BUFFER
+
+        qr = Rect(bb[0], bb[1], bb[2], bb[3])
+        return [r.leaf_obj() for r in self.query_rect(qr) if r.is_leaf()]
+
+    def add(self, id, boundingbox):
+        """
+        replicate c rtree method
+
+        Arguments
+        ---------
+
+        id: object id
+
+        boundingbox: list
+                   bounding box [minx, miny, maxx, maxy]
+        """
+        bb = boundingbox
+        self.cursor.insert(id, Rect(bb[0], bb[1], bb[2], bb[3]))
+
+
+class _NodeCursor(object):
+    @classmethod
+    def create(cls, rooto, rect):
+        idx = rooto.count
+        rooto.count += 1
+
+        rooto._ensure_pool(idx + 1)
+        #rooto.node_pool.extend([0,0])
+        #rooto.rect_pool.extend([0,0,0,0])
+
+        retv = _NodeCursor(rooto, idx, rect, 0, 0)
+
+        retv._save_back()
+        return retv
+
+    @classmethod
+    def create_with_children(cls, children, rooto):
+        rect = union_all([c for c in children])
+        nr = Rect(rect.x, rect.y, rect.xx, rect.yy)
+        assert(not rect.swapped_x)
+        nc = _NodeCursor.create(rooto, rect)
+        nc._set_children(children)
+        assert(not nc.is_leaf())
+        return nc
+
+    @classmethod
+    def create_leaf(cls, rooto, leaf_obj, leaf_rect):
+        rect = Rect(leaf_rect.x, leaf_rect.y, leaf_rect.xx, leaf_rect.yy)
+        rect.swapped_x = True  # Mark as leaf by setting the xswap flag.
+        res = _NodeCursor.create(rooto, rect)
+        idx = res.index
+        res.first_child = rooto.leaf_count
+        rooto.leaf_count += 1
+        res.next_sibling = 0
+        rooto.leaf_pool.append(leaf_obj)
+        res._save_back()
+        res._become(idx)
+        assert(res.is_leaf())
+        return res
+
+    __slots__ = ("root", "npool", "rpool", "index", "rect",
+                 "next_sibling", "first_child")
+
+    def __getstate__(self):
+        return (self.root, self.npool, self.rpool, self.index, self.rect, self.next_sibling, self.first_child)
+
+    def __setstate__(self, state):
+        self.root, self.npool, self.rpool, self.index, self.rect, self.next_sibling, self.first_child = state
+
+    def __init__(self, rooto, index, rect, first_child, next_sibling):
+        self.root = rooto
+        self.rpool = rooto.rect_pool
+        self.npool = rooto.node_pool
+
+        self.index = index
+        self.rect = rect
+        self.next_sibling = next_sibling
+        self.first_child = first_child
+
+    def walk(self, predicate):
+        if (predicate(self, self.leaf_obj())):
+            yield self
+            if not self.is_leaf():
+                for c in self.children():
+                    for cr in c.walk(predicate):
+                        yield cr
+
+    def query_rect(self, r):
+        """ Return things that intersect with 'r'. """
+        def p(o, x):
+            return r.does_intersect(o.rect)
+        for rr in self.walk(p):
+            yield rr
+
+    def query_point(self, point):
+        """ Query by a point """
+        def p(o, x):
+            return o.rect.does_containpoint(point)
+
+        for rr in self.walk(p):
+            yield rr
+
+    def lift(self):
+        return _NodeCursor(self.root,
+                           self.index,
+                           self.rect,
+                           self.first_child,
+                           self.next_sibling)
+
+    def _become(self, index):
+        recti = index * 4
+        nodei = index * 2
+        rp = self.rpool
+        x = rp[recti]
+        y = rp[recti + 1]
+        xx = rp[recti + 2]
+        yy = rp[recti + 3]
+
+        if (x == 0.0 and y == 0.0 and xx == 0.0 and yy == 0.0):
+            self.rect = NullRect
+        else:
+            self.rect = Rect(x, y, xx, yy)
+
+        self.next_sibling = self.npool[nodei]
+        self.first_child = self.npool[nodei + 1]
+        self.index = index
+
+    def is_leaf(self):
+        return self.rect.swapped_x
+
+    def has_children(self):
+        return not self.is_leaf() and 0 != self.first_child
+
+    def holds_leaves(self):
+        if 0 == self.first_child:
+            return True
+        else:
+            return self.has_children() and self.get_first_child().is_leaf()
+
+    def get_first_child(self):
+        fc = self.first_child
+        c = _NodeCursor(self.root, 0, NullRect, 0, 0)
+        c._become(self.first_child)
+        return c
+
+    def leaf_obj(self):
+        if self.is_leaf():
+            return self.root.leaf_pool[self.first_child]
+        else:
+            return None
+
+    def _save_back(self):
+        rp = self.rpool
+        recti = self.index * 4
+        nodei = self.index * 2
+
+        if self.rect is not NullRect:
+            self.rect.write_raw_coords(rp, recti)
+        else:
+            rp[recti] = 0
+            rp[recti + 1] = 0
+            rp[recti + 2] = 0
+            rp[recti + 3] = 0
+
+        self.npool[nodei] = self.next_sibling
+        self.npool[nodei + 1] = self.first_child
+
+    def nchildren(self):
+        i = self.index
+        c = 0
+        for x in self.children():
+            c += 1
+        return c
+
+    def insert(self, leafo, leafrect):
+        index = self.index
+
+        # tail recursion, made into loop:
+        while True:
+            if self.holds_leaves():
+                self.rect = self.rect.union(leafrect)
+                self._insert_child(_NodeCursor.create_leaf(
+                    self.root, leafo, leafrect))
+
+                self._balance()
+
+                # done: become the original again
+                self._become(index)
+                return
+            else:
+                # Not holding leaves, move down a level in the tree:
+
+                # Micro-optimization:
+                #  inlining union() calls -- logic is:
+                # ignored,child = min([ ((c.rect.union(leafrect)).area() - c.rect.area(),c.index) for c in self.children() ])
+                child = None
+                minarea = -1.0
+                for c in self.children():
+                    x, y, xx, yy = c.rect.coords()
+                    lx, ly, lxx, lyy = leafrect.coords()
+                    nx = x if x < lx else lx
+                    nxx = xx if xx > lxx else lxx
+                    ny = y if y < ly else ly
+                    nyy = yy if yy > lyy else lyy
+                    a = (nxx - nx) * (nyy - ny)
+                    if minarea < 0 or a < minarea:
+                        minarea = a
+                        child = c.index
+                # End micro-optimization
+
+                self.rect = self.rect.union(leafrect)
+                self._save_back()
+                self._become(child)  # recurse.
+
+    def _balance(self):
+        if (self.nchildren() <= MAXCHILDREN):
+            return
+
+        t = time.clock()
+
+        cur_score = -10
+
+        s_children = [c.lift() for c in self.children()]
+
+        memo = {}
+
+        clusterings = [k_means_cluster(
+            self.root, k, s_children) for k in range(2, MAX_KMEANS)]
+        score, bestcluster = max(
+            [(silhouette_coeff(c, memo), c) for c in clusterings])
+
+        nodes = [_NodeCursor.create_with_children(
+            c, self.root) for c in bestcluster if len(c) > 0]
+
+        self._set_children(nodes)
+
+        dur = (time.clock() - t)
+        c = float(self.root.stats["overflow_f"])
+        oa = self.root.stats["avg_overflow_t_f"]
+        self.root.stats["avg_overflow_t_f"] = (
+            dur / (c + 1.0)) + (c * oa / (c + 1.0))
+        self.root.stats["overflow_f"] += 1
+        self.root.stats["longest_overflow"] = max(
+            self.root.stats["longest_overflow"], dur)
+
+    def _set_children(self, cs):
+        self.first_child = 0
+
+        if 0 == len(cs):
+            return
+
+        pred = None
+        for c in cs:
+            if pred is not None:
+                pred.next_sibling = c.index
+                pred._save_back()
+            if 0 == self.first_child:
+                self.first_child = c.index
+            pred = c
+        pred.next_sibling = 0
+        pred._save_back()
+        self._save_back()
+
+    def _insert_child(self, c):
+        c.next_sibling = self.first_child
+        self.first_child = c.index
+        c._save_back()
+        self._save_back()
+
+    def children(self):
+        if (0 == self.first_child):
+            return
+
+        idx = self.index
+        fc = self.first_child
+        ns = self.next_sibling
+        r = self.rect
+
+        self._become(self.first_child)
+        while True:
+            yield self
+            if 0 == self.next_sibling:
+                break
+            else:
+                self._become(self.next_sibling)
+
+        # Go back to becoming the same node we were.
+        #self._become(idx)
+        self.index = idx
+        self.first_child = fc
+        self.next_sibling = ns
+        self.rect = r
+
+
+def avg_diagonals(node, onodes, memo_tab):
+    nidx = node.index
+    sv = 0.0
+    diag = 0.0
+    for onode in onodes:
+        k1 = (nidx, onode.index)
+        k2 = (onode.index, nidx)
+        if k1 in memo_tab:
+            diag = memo_tab[k1]
+        elif k2 in memo_tab:
+            diag = memo_tab[k2]
+        else:
+            diag = node.rect.union(onode.rect).diagonal()
+            memo_tab[k1] = diag
+
+        sv += diag
+
+    return sv / len(onodes)
+
+
+def silhouette_w(node, cluster, next_closest_cluster, memo):
+    ndist = avg_diagonals(node, cluster, memo)
+    sdist = avg_diagonals(node, next_closest_cluster, memo)
+    return (sdist - ndist) / max(sdist, ndist)
+
+
+def silhouette_coeff(clustering, memo_tab):
+    # special case for a clustering of 1.0
+    if (len(clustering) == 1):
+        return 1.0
+
+    coeffs = []
+    for cluster in clustering:
+        others = [c for c in clustering if c is not cluster]
+        others_cntr = [center_of_gravity(c) for c in others]
+        ws = [silhouette_w(node, cluster, others[closest(
+            others_cntr, node)], memo_tab) for node in cluster]
+        cluster_coeff = sum(ws) / len(ws)
+        coeffs.append(cluster_coeff)
+    return sum(coeffs) / len(coeffs)
+
+
+def center_of_gravity(nodes):
+    totarea = 0.0
+    xs, ys = 0, 0
+    for n in nodes:
+        if n.rect is not NullRect:
+            x, y, w, h = n.rect.extent()
+            a = w * h
+            xs = xs + (a * (x + (0.5 * w)))
+            ys = ys + (a * (y + (0.5 * h)))
+            totarea = totarea + a
+    return (xs / totarea), (ys / totarea)
+
+
+def closest(centroids, node):
+    x, y = center_of_gravity([node])
+    dist = -1
+    ridx = -1
+
+    for (i, (xx, yy)) in enumerate(centroids):
+        dsq = ((xx - x) ** 2) + ((yy - y) ** 2)
+        if -1 == dist or dsq < dist:
+            dist = dsq
+            ridx = i
+    return ridx
+
+
+def k_means_cluster(root, k, nodes):
+    t = time.clock()
+    if len(nodes) <= k:
+        return [[n] for n in nodes]
+
+    ns = list(nodes)
+    root.stats["count_kmeans_iter_f"] += 1
+
+    # Initialize: take n random nodes.
+    #random.shuffle(ns)
+
+    cluster_starts = ns[:k]
+    cluster_centers = [center_of_gravity([n]) for n in ns[:k]]
+
+    # Loop until stable:
+    while True:
+        root.stats["sum_kmeans_iter_f"] += 1
+        clusters = [[] for c in cluster_centers]
+
+        for n in ns:
+            idx = closest(cluster_centers, n)
+            clusters[idx].append(n)
+
+        #FIXME HACK TODO: is it okay for there to be empty clusters?
+        clusters = [c for c in clusters if len(c) > 0]
+
+        for c in clusters:
+            if (len(c) == 0):
+                print("Errorrr....")
+                print("Nodes: %d, centers: %s" % (len(ns),
+                                                  repr(cluster_centers)))
+
+            assert(len(c) > 0)
+
+        rest = ns
+        first = False
+
+        new_cluster_centers = [center_of_gravity(c) for c in clusters]
+        if new_cluster_centers == cluster_centers:
+            root.stats["avg_kmeans_iter_f"] = float(root.stats["sum_kmeans_iter_f"] / root.stats["count_kmeans_iter_f"])
+            root.stats["longest_kmeans"] = max(
+                root.stats["longest_kmeans"], (time.clock() - t))
+            return clusters
+        else:
+            cluster_centers = new_cluster_centers
diff --git a/pysal/cg/segmentLocator.py b/pysal/cg/segmentLocator.py
new file mode 100644
index 0000000..07c4c64
--- /dev/null
+++ b/pysal/cg/segmentLocator.py
@@ -0,0 +1,403 @@
+import math
+import scipy
+import numpy
+from pysal.cg.shapes import Rectangle, Point, LineSegment
+from pysal.cg.standalone import get_segment_point_dist, get_bounding_box
+import random
+import time
+
+__all__ = ["SegmentGrid", "SegmentLocator",
+           "Polyline_Shapefile_SegmentLocator"]
+DEBUG = False
+
+
+class BruteSegmentLocator(object):
+    def __init__(self, segments):
+        self.data = segments
+        self.n = len(segments)
+
+    def nearest(self, pt):
+        d = self.data
+        distances = [get_segment_point_dist(
+            d[i], pt)[0] for i in xrange(self.n)]
+        return numpy.argmin(distances)
+
+
+class SegmentLocator(object):
+    def __init__(self, segments, nbins=500):
+        self.data = segments
+        if hasattr(segments, 'bounding_box'):
+            bbox = segment.bounding_box
+        else:
+            bbox = get_bounding_box(segments)
+        self.bbox = bbox
+        res = max((bbox.right - bbox.left), (bbox.upper -
+                                             bbox.lower)) / float(nbins)
+        self.grid = SegmentGrid(bbox, res)
+        for i, seg in enumerate(segments):
+            self.grid.add(seg, i)
+
+    def nearest(self, pt):
+        d = self.data
+        possibles = self.grid.nearest(pt)
+        distances = [get_segment_point_dist(d[i], pt)[0] for i in possibles]
+        #print "possibles",possibles
+        #print "distances",distances
+        #print "argmin", numpy.argmin(distances)
+        return possibles[numpy.argmin(distances)]
+
+
+class Polyline_Shapefile_SegmentLocator(object):
+    def __init__(self, shpfile, nbins=500):
+        self.data = shpfile
+        bbox = Rectangle(*shpfile.bbox)
+        res = max((bbox.right - bbox.left), (bbox.upper -
+                                             bbox.lower)) / float(nbins)
+        self.grid = SegmentGrid(bbox, res)
+        for i, polyline in enumerate(shpfile):
+            for p, part in enumerate(polyline.segments):
+                for j, seg in enumerate(part):
+                    self.grid.add(seg, (i, p, j))
+
+    def nearest(self, pt):
+        d = self.data
+        possibles = self.grid.nearest(pt)
+        distances = [get_segment_point_dist(
+            d[i].segments[p][j], pt)[0] for (i, p, j) in possibles]
+        #print "possibles",possibles
+        #print "distances",distances
+        #print "argmin", numpy.argmin(distances)
+        return possibles[numpy.argmin(distances)]
+
+
+class SegmentGrid(object):
+    """
+    Notes:
+        SegmentGrid is a low level Grid class.
+        This class does not maintain a copy of the geometry in the grid.
+        It returns only approx. Solutions.
+        This Grid should be wrapped by a locator.
+    """
+    def __init__(self, bounds, resolution):
+        """
+        Returns a grid with specified properties.
+
+        __init__(Rectangle, number) -> SegmentGrid
+
+        Parameters
+        ----------
+        bounds      : the area for the grid to encompass
+        resolution  : the diameter of each bin
+
+        Examples
+        --------
+        TODO: complete this doctest
+        >>> g = SegmentGrid(Rectangle(0, 0, 10, 10), 1)
+        """
+        if resolution == 0:
+            raise Exception('Cannot create grid with resolution 0')
+        self.res = resolution
+        self.hash = {}
+        self._kd = None
+        self._kd2 = None
+        self._hashKeys = None
+        self.x_range = (bounds.left, bounds.right)
+        self.y_range = (bounds.lower, bounds.upper)
+        try:
+            self.i_range = int(math.ceil((self.x_range[1] -
+                                          self.x_range[0]) / self.res)) + 1
+            self.j_range = int(math.ceil((self.y_range[1] -
+                                          self.y_range[0]) / self.res)) + 1
+            self.mask = numpy.zeros((self.i_range, self.j_range), bool)
+            self.endMask = numpy.zeros((self.i_range, self.j_range), bool)
+        except Exception:
+            raise Exception('Invalid arguments for SegmentGrid(): (' + str(self.x_range) + ', ' + str(self.y_range) + ', ' + str(self.res) + ')')
+    @property
+    def hashKeys(self):
+        if self._hashKeys == None:
+            self._hashKeys = numpy.array(self.hash.keys(),dtype=float)
+        return self._hashKeys
+
+    @property
+    def kd(self):
+        if self._kd == None:
+            self._kd = scipy.spatial.cKDTree(self.hashKeys)
+        return self._kd
+
+    @property
+    def kd2(self):
+        if self._kd2 == None:
+            self._kd2 = scipy.spatial.KDTree(self.hashKeys)
+        return self._kd2
+
+    def in_grid(self, loc):
+        """
+        Returns whether a 2-tuple location _loc_ lies inside the grid bounds.
+        """
+        return (self.x_range[0] <= loc[0] <= self.x_range[1] and
+                self.y_range[0] <= loc[1] <= self.y_range[1])
+
+    def _grid_loc(self, loc):
+        i = int((loc[0] - self.x_range[0]) / self.res)  # floored
+        j = int((loc[1] - self.y_range[0]) / self.res)  # floored
+        #i = min(self.i_range-1, max(int((loc[0] - self.x_range[0])/self.res), 0))
+        #j = min(self.j_range-1, max(int((loc[1] - self.y_range[0])/self.res), 0))
+        #print "bin:", loc, " -> ", (i,j)
+        return (i, j)
+
+    def _real_loc(self, grid_loc):
+        x = (grid_loc[0] * self.res) + self.x_range[0]
+        y = (grid_loc[1] * self.res) + self.y_range[0]
+        return x, y
+
+    def bin_loc(self, loc, id):
+        grid_loc = self._grid_loc(loc)
+        if grid_loc not in self.hash:
+            self.hash[grid_loc] = set()
+            self.mask[grid_loc] = True
+        self.hash[grid_loc].add(id)
+        return grid_loc
+
+    def add(self, segment, id):
+        """
+        Adds segment to the grid.
+
+        add(segment, id) -> bool
+
+        Parameters
+        ----------
+        id -- id to be stored int he grid.
+        segment -- the segment which identifies where to store 'id' in the grid.
+
+        Examples
+        --------
+        >>> g = SegmentGrid(Rectangle(0, 0, 10, 10), 1)
+        >>> g.add(LineSegment(Point((0.2, 0.7)), Point((4.2, 8.7))), 0)
+        True
+        """
+        if not (self.in_grid(segment.p1) and self.in_grid(segment.p2)):
+            raise Exception('Attempt to insert item at location outside grid bounds: ' + str(segment))
+        i, j = self.bin_loc(segment.p1, id)
+        I, J = self.bin_loc(segment.p2, id)
+        self.endMask[i, j] = True
+        self.endMask[I, J] = True
+
+        bbox = segment.bounding_box
+        left = bbox.left
+        lower = bbox.lower
+        res = self.res
+        line = segment.line
+        tiny = res / 1000.
+        for i in xrange(1 + min(i, I), max(i, I)):
+            #print 'i',i
+            x = self.x_range[0] + (i * res)
+            y = line.y(x)
+            self.bin_loc((x - tiny, y), id)
+            self.bin_loc((x + tiny, y), id)
+        for j in xrange(1 + min(j, J), max(j, J)):
+            #print 'j',j
+            y = self.y_range[0] + (j * res)
+            x = line.x(y)
+            self.bin_loc((x, y - tiny), id)
+            self.bin_loc((x, y + tiny), id)
+        self._kd = None
+        self._kd2 = None
+        return True
+
+    def remove(self, segment):
+        self._kd = None
+        self._kd2 = None
+        pass
+
+    def nearest(self, pt):
+        """
+        Return a set of ids.
+
+        The ids identify line segments within a radius of the query point.
+        The true nearest segment is guaranteed to be within the set.
+
+        Filtering possibles is the responsibility of the locator not the grid.
+        This means the Grid doesn't need to keep a reference to the underlying segments,
+        which in turn means the Locator can keep the segments on disk.
+
+        Locators can be customized to different data stores (shape files, SQL, etc.)
+        """
+        grid_loc = numpy.array(self._grid_loc(pt))
+        possibles = set()
+
+        if DEBUG:
+            print "in_grid:", self.in_grid(pt)
+            i = pylab.matshow(self.mask, origin='lower',
+                              extent=self.x_range + self.y_range, fignum=1)
+        # Use KD tree to search out the nearest filled bin.
+        # it may be faster to not use kdtree, or at least check grid_loc first
+        # The KD tree is build on the keys of self.hash, a dictionary of stored bins.
+        dist, i = self.kd.query(grid_loc, 1)
+
+        ### Find non-empty bins within a radius of the query point.
+        # Location of Q point
+        row, col = grid_loc
+        # distance to nearest filled cell +2.
+        # +1 returns inconsistent results (compared to BruteSegmentLocator)
+        # +2 seems to do the trick.
+        radius = int(math.ceil(dist)) + 2
+        if radius < 30:
+            a, b = numpy.ogrid[-radius:radius + 1, -radius:radius +
+                               1]   # build square index arrays centered at 0,0
+            index = a ** 2 + b ** 2 <= radius ** 2                        # create a boolean mask to filter indicies outside radius
+            a, b = index.nonzero()
+                # grad the (i,j)'s of the elements within radius.
+            rows, cols = row + a - radius, col + b - radius                   # recenter the (i,j)'s over the Q point
+            #### Filter indicies by bounds of the grid.
+            ### filters must be applied one at a time
+            ### I havn't figure out a way to group these
+            filter = rows >= 0
+            rows = rows[filter]
+            cols = cols[filter]  # i >= 0
+            filter = rows < self.i_range
+            rows = rows[filter]
+            cols = cols[filter]  # i < i_range
+            filter = cols >= 0
+            rows = rows[
+                filter]
+            cols = cols[filter]  # j >= 0
+            filter = cols < self.j_range
+            rows = rows[
+                filter]
+            cols = cols[filter]  # j < j_range
+            if DEBUG:
+                maskCopy = self.mask.copy().astype(float)
+                maskCopy += self.endMask.astype(float)
+                maskCopy[rows, cols] += 1
+                maskCopy[row, col] += 3
+                i = pylab.matshow(maskCopy, origin='lower', extent=self.x_range + self.y_range, fignum=1)
+                #raw_input('pause')
+            ### All that was just setup for this one line...
+            idx = self.mask[rows, cols].nonzero()[0] # Filter out empty bins.
+            rows, cols = rows[idx], cols[idx]        # (i,j)'s of the filled grid cells within radius.
+
+            for t in zip(rows, cols):
+                possibles.update(self.hash[t])
+
+            if DEBUG:
+                print "possibles", possibles
+        else:
+        ### The old way...
+        ### previously I was using kd.query_ball_point on, but the performance was terrible.
+            I = self.kd2.query_ball_point(grid_loc, radius)
+            for i in I:
+                t = tuple(self.kd.data[i])
+                possibles.update(self.hash[t])
+        return list(possibles)
+
+
+def random_segments(n):
+    segs = []
+    for i in xrange(n):
+        a, b, c, d = [random.random() for x in [1, 2, 3, 4]]
+        seg = LineSegment(Point((a, b)), Point((c, d)))
+        segs.append(seg)
+    return segs
+
+
+def random_points(n):
+    return [Point((random.random(), random.random())) for x in xrange(n)]
+
+
+def combo_check(bins, segments, qpoints):
+    G = SegmentLocator(segments, bins)
+    G2 = BruteSegmentLocator(segs)
+    for pt in qpoints:
+        a = G.nearest(pt)
+        b = G2.nearest(pt)
+        if a != b:
+            print a, b, a == b
+            global DEBUG
+            DEBUG = True
+            a = G.nearest(pt)
+            print a
+            a = segments[a]
+            b = segments[b]
+            print "pt to a (grid)", get_segment_point_dist(a, pt)
+            print "pt to b (brut)", get_segment_point_dist(b, pt)
+            raw_input()
+            pylab.clf()
+            DEBUG = False
+
+
+def brute_check(segments, qpoints):
+    t0 = time.time()
+    G2 = BruteSegmentLocator(segs)
+    t1 = time.time()
+    print "Created Brute in %0.4f seconds" % (t1 - t0)
+    t2 = time.time()
+    q = map(G2.nearest, qpoints)
+    t3 = time.time()
+    print "Brute Found %d matches in %0.4f seconds" % (len(qpoints), t3 - t2)
+    print "Total Brute Time:", t3 - t0
+    print
+    return q
+
+
+def grid_check(bins, segments, qpoints, visualize=False):
+    t0 = time.time()
+    G = SegmentLocator(segments, bins)
+    t1 = time.time()
+    G.grid.kd
+    t2 = time.time()
+    print "Created Grid in %0.4f seconds" % (t1 - t0)
+    print "Created KDTree in %0.4f seconds" % (t2 - t1)
+    if visualize:
+        i = pylab.matshow(G.grid.mask, origin='lower',
+                          extent=G.grid.x_range + G.grid.y_range)
+
+    t2 = time.time()
+    q = map(G.nearest, qpoints)
+    t3 = time.time()
+    print "Grid Found %d matches in %0.4f seconds" % (len(qpoints), t3 - t2)
+    print "Total Grid Time:", t3 - t0
+    qps = len(qpoints) / (t3 - t2)
+    print "q/s:", qps
+    #print
+    return qps
+
+
+def binSizeTest():
+    q = 100
+    minN = 1000
+    maxN = 10000
+    stepN = 1000
+    minB = 250
+    maxB = 2000
+    stepB = 250
+    sizes = range(minN, maxN, stepN)
+    binSizes = range(minB, maxB, stepB)
+    results = numpy.zeros((len(sizes), len(binSizes)))
+    for row, n in enumerate(sizes):
+        segs = random_segments(n)
+        qpts = random_points(q)
+        for col, bins in enumerate(binSizes):
+            print "N, Bins:", n, bins
+            qps = test_grid(bins, segs, qpts)
+            results[row, col] = qps
+    return results
+
+if __name__ == '__main__':
+    import pylab
+    pylab.ion()
+
+    n = 100
+    q = 1000
+
+    t0 = time.time()
+    segs = random_segments(n)
+    t1 = time.time()
+    qpts = random_points(q)
+    t2 = time.time()
+    print "segments:", t1 - t0
+    print "points:", t2 - t1
+    #test_brute(segs,qpts)
+    #test_grid(50, segs, qpts)
+
+    SG = SegmentLocator(segs)
+    grid = SG.grid
diff --git a/pysal/cg/shapes.py b/pysal/cg/shapes.py
new file mode 100644
index 0000000..2864005
--- /dev/null
+++ b/pysal/cg/shapes.py
@@ -0,0 +1,1913 @@
+"""
+Computational geometry code for PySAL: Python Spatial Analysis Library.
+
+"""
+
+__author__ = "Sergio J. Rey, Xinyue Ye, Charles Schmidt, Andrew Winslow"
+__credits__ = "Copyright (c) 2005-2009 Sergio J. Rey"
+
+import doctest
+import math
+from warnings import warn
+from sphere import arcdist
+
+__all__ = ['Point', 'LineSegment', 'Line', 'Ray', 'Chain', 'Polygon',
+           'Rectangle', 'asShape']
+
+
+def asShape(obj):
+    """
+    Returns a pysal shape object from obj.
+    obj must support the __geo_interface__.
+    """
+    if hasattr(obj, '__geo_interface__'):
+        geo = obj.__geo_interface__
+    else:
+        geo = obj
+    if hasattr(geo, 'type'):
+        raise TypeError('%r does not appear to be a shape object' % (obj))
+    geo_type = geo['type'].lower()
+    #if geo_type.startswith('multi'):
+    #    raise NotImplementedError, "%s are not supported at this time."%geo_type
+    if geo_type in _geoJSON_type_to_Pysal_type:
+        return _geoJSON_type_to_Pysal_type[geo_type].__from_geo_interface__(geo)
+    else:
+        raise NotImplementedError(
+            "%s is not supported at this time." % geo_type)
+
+
+class Point(object):
+    """
+    Geometric class for point objects.
+
+    Attributes
+    ----------
+    None
+    """
+    def __init__(self, loc):
+        """
+        Returns an instance of a Point object.
+
+        __init__((number, number)) -> Point
+
+        Test tag: <tc>#is#Point.__init__</tc>
+        Test tag: <tc>#tests#Point.__init__</tc>
+
+        Parameters
+        ----------
+        loc : tuple location (number x-tuple, x > 1)
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p = Point((1, 3))
+        """
+        self.__loc = tuple(map(float, loc))
+
+    @classmethod
+    def __from_geo_interface__(cls, geo):
+        return cls(geo['coordinates'])
+
+    @property
+    def __geo_interface__(self):
+        return {'type': 'Point', 'coordinates': self.__loc}
+
+    def __lt__(self, other):
+        """
+        Tests if the Point is < another object.
+
+        __ne__(x) -> bool
+
+        Parameters
+        ----------
+        other : an object to test equality against
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> Point((0,1)) < Point((0,1))
+        False
+        >>> Point((0,1)) < Point((1,1))
+        True
+        """
+        return (self.__loc) < (other.__loc)
+
+    def __le__(self, other):
+        """
+        Tests if the Point is <= another object.
+
+        __ne__(x) -> bool
+
+        Parameters
+        ----------
+        other : an object to test equality against
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> Point((0,1)) <= Point((0,1))
+        True
+        >>> Point((0,1)) <= Point((1,1))
+        True
+        """
+        return (self.__loc) <= (other.__loc)
+
+    def __eq__(self, other):
+        """
+        Tests if the Point is equal to another object.
+
+        __eq__(x) -> bool
+
+        Parameters
+        ----------
+        other : an object to test equality against
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> Point((0,1)) == Point((0,1))
+        True
+        >>> Point((0,1)) == Point((1,1))
+        False
+        """
+        try:
+            return (self.__loc) == (other.__loc)
+        except AttributeError:
+            return False
+
+    def __ne__(self, other):
+        """
+        Tests if the Point is not equal to another object.
+
+        __ne__(x) -> bool
+
+        Parameters
+        ----------
+        other : an object to test equality against
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> Point((0,1)) != Point((0,1))
+        False
+        >>> Point((0,1)) != Point((1,1))
+        True
+        """
+        try:
+            return (self.__loc) != (other.__loc)
+        except AttributeError:
+            return True
+
+    def __gt__(self, other):
+        """
+        Tests if the Point is > another object.
+
+        __ne__(x) -> bool
+
+        Parameters
+        ----------
+        other : an object to test equality against
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> Point((0,1)) > Point((0,1))
+        False
+        >>> Point((0,1)) > Point((1,1))
+        False
+        """
+        return (self.__loc) > (other.__loc)
+
+    def __ge__(self, other):
+        """
+        Tests if the Point is >= another object.
+
+        __ne__(x) -> bool
+
+        Parameters
+        ----------
+        other : an object to test equality against
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> Point((0,1)) >= Point((0,1))
+        True
+        >>> Point((0,1)) >= Point((1,1))
+        False
+        """
+        return (self.__loc) >= (other.__loc)
+
+    def __hash__(self):
+        """
+        Returns the hash of the Point's location.
+
+        x.__hash__() -> hash(x)
+
+        Parameters
+        ----------
+        None
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> hash(Point((0,1))) == hash(Point((0,1)))
+        True
+        >>> hash(Point((0,1))) == hash(Point((1,1)))
+        False
+        """
+        return hash(self.__loc)
+
+    def __getitem__(self, *args):
+        """
+        Return the coordinate for the given dimension.
+
+        x.__getitem__(i) -> x[i]
+
+        Parameters
+        ----------
+        i : index of the desired dimension.
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p = Point((5.5,4.3))
+        >>> p[0] == 5.5
+        True
+        >>> p[1] == 4.3
+        True
+        """
+        return self.__loc.__getitem__(*args)
+
+    def __getslice__(self, *args):
+        """
+        Return the coordinate for the given dimensions.
+
+        x.__getitem__(i,j) -> x[i:j]
+
+        Parameters
+        ----------
+        i : index to start slice
+        j : index to end slice (excluded).
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p = Point((3,6,2))
+        >>> p[:2] == (3,6)
+        True
+        >>> p[1:2] == (6,)
+        True
+        """
+        return self.__loc.__getslice__(*args)
+
+    def __len__(self):
+        """
+        Returns the number of dimension in the point.
+
+        __len__() -> int
+
+        Parameters
+        ----------
+        None
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> len(Point((1,2)))
+        2
+        """
+        return len(self.__loc)
+
+    def __repr__(self):
+        """
+        Returns the string representation of the Point
+
+        __repr__() -> string
+
+        Parameters
+        ----------
+        None
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> Point((0,1))
+        (0.0, 1.0)
+        """
+        return self.__loc.__repr__()
+
+    def __str__(self):
+        """
+        Returns a string representation of a Point object.
+
+        __str__() -> string
+
+        Test tag: <tc>#is#Point.__str__</tc>
+        Test tag: <tc>#tests#Point.__str__</tc>
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p = Point((1, 3))
+        >>> str(p)
+        '(1.0, 3.0)'
+        """
+        return str(self.__loc)
+
+
+class LineSegment(object):
+    """
+    Geometric representation of line segment objects.
+
+    Parameters
+    ----------
+
+    start_pt     : Point
+                   Point where segment begins
+    end_pt       : Point
+                   Point where segment ends
+
+    Attributes
+    ----------
+
+    p1              : Point
+                      Starting point
+    p2              : Point
+                      Ending point
+    bounding_box    : tuple
+                      The bounding box of the segment (number 4-tuple)
+    len             : float
+                      The length of the segment
+    line            : Line
+                      The line on which the segment lies
+
+    """
+
+    def __init__(self, start_pt, end_pt):
+        """
+        Creates a LineSegment object.
+
+        __init__(Point, Point) -> LineSegment
+
+        Test tag: <tc>#is#LineSegment.__init__</tc>
+        Test tag: <tc>#tests#LineSegment.__init__</tc>
+
+
+        Attributes
+        ----------
+        None
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))
+        """
+        self._p1 = start_pt
+        self._p2 = end_pt
+        self._reset_props()
+
+    def __str__(self):
+        return "LineSegment(" + str(self._p1) + ", " + str(self._p2) + ")"
+
+    def __eq__(self, other):
+        """
+        Returns true if self and other are the same line segment
+
+        Examples
+        --------
+        >>> l1 = LineSegment(Point((1, 2)), Point((5, 6)))
+        >>> l2 = LineSegment(Point((5, 6)), Point((1, 2)))
+        >>> l1 == l2
+        True
+        >>> l2 == l1
+        True
+        """
+        if not isinstance(other, self.__class__):
+            return False
+        if (other.p1 == self._p1 and other.p2 == self._p2):
+            return True
+        elif (other.p2 == self._p1 and other.p1 == self._p2):
+            return True
+        return False
+
+    def intersect(self, other):
+        """
+        Test whether segment intersects with other segment
+
+        Handles endpoints of segments being on other segment
+
+        Examples
+        --------
+
+        >>> ls = LineSegment(Point((5,0)), Point((10,0)))
+        >>> ls1 = LineSegment(Point((5,0)), Point((10,1)))
+        >>> ls.intersect(ls1)
+        True
+        >>> ls2 = LineSegment(Point((5,1)), Point((10,1)))
+        >>> ls.intersect(ls2)
+        False
+        >>> ls2 = LineSegment(Point((7,-1)), Point((7,2)))
+        >>> ls.intersect(ls2)
+        True
+        >>>
+        """
+        ccw1 = self.sw_ccw(other.p2)
+        ccw2 = self.sw_ccw(other.p1)
+        ccw3 = other.sw_ccw(self.p1)
+        ccw4 = other.sw_ccw(self.p2)
+
+        return ccw1*ccw2 <= 0 and ccw3*ccw4 <=0
+
+
+
+    def _reset_props(self):
+        """
+        HELPER METHOD. DO NOT CALL.
+
+        Resets attributes which are functions of other attributes. The getters for these attributes (implemented as
+        properties) then recompute their values if they have been reset since the last call to the getter.
+
+        _reset_props() -> None
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))
+        >>> ls._reset_props()
+        """
+        self._bounding_box = None
+        self._len = None
+        self._line = False
+
+    def _get_p1(self):
+        """
+        HELPER METHOD. DO NOT CALL.
+
+        Returns the p1 attribute of the line segment.
+
+        _get_p1() -> Point
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))
+        >>> r = ls._get_p1()
+        >>> r == Point((1, 2))
+        True
+        """
+        return self._p1
+
+    def _set_p1(self, p1):
+        """
+        HELPER METHOD. DO NOT CALL.
+
+        Sets the p1 attribute of the line segment.
+
+        _set_p1(Point) -> Point
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))
+        >>> r = ls._set_p1(Point((3, -1)))
+        >>> r == Point((3.0, -1.0))
+        True
+        """
+        self._p1 = p1
+        self._reset_props()
+        return self._p1
+
+    p1 = property(_get_p1, _set_p1)
+
+    def _get_p2(self):
+        """
+        HELPER METHOD. DO NOT CALL.
+
+        Returns the p2 attribute of the line segment.
+
+        _get_p2() -> Point
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))
+        >>> r = ls._get_p2()
+        >>> r == Point((5, 6))
+        True
+        """
+        return self._p2
+
+    def _set_p2(self, p2):
+        """
+        HELPER METHOD. DO NOT CALL.
+
+        Sets the p2 attribute of the line segment.
+
+        _set_p2(Point) -> Point
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))
+        >>> r = ls._set_p2(Point((3, -1)))
+        >>> r == Point((3.0, -1.0))
+        True
+        """
+        self._p2 = p2
+        self._reset_props()
+        return self._p2
+
+    p2 = property(_get_p2, _set_p2)
+
+    def is_ccw(self, pt):
+        """
+        Returns whether a point is counterclockwise of the segment. Exclusive.
+
+        is_ccw(Point) -> bool
+
+        Test tag: <tc>#is#LineSegment.is_ccw</tc>
+        Test tag: <tc>#tests#LineSegment.is_ccw</tc>
+
+        Parameters
+        ----------
+        pt : point lying ccw or cw of a segment
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((0, 0)), Point((5, 0)))
+        >>> ls.is_ccw(Point((2, 2)))
+        True
+        >>> ls.is_ccw(Point((2, -2)))
+        False
+        """
+        v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1])
+        v2 = (pt[0] - self._p1[0], pt[1] - self._p1[1])
+
+        return v1[0] * v2[1] - v1[1] * v2[0] > 0
+
+    def is_cw(self, pt):
+        """
+        Returns whether a point is clockwise of the segment. Exclusive.
+
+        is_cw(Point) -> bool
+
+        Test tag: <tc>#is#LineSegment.is_cw</tc>
+        Test tag: <tc>#tests#LineSegment.is_cw</tc>
+
+        Parameters
+        ----------
+        pt : point lying ccw or cw of a segment
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((0, 0)), Point((5, 0)))
+        >>> ls.is_cw(Point((2, 2)))
+        False
+        >>> ls.is_cw(Point((2, -2)))
+        True
+        """
+        v1 = (self._p2[0] - self._p1[0], self._p2[1] - self._p1[1])
+        v2 = (pt[0] - self._p1[0], pt[1] - self._p1[1])
+        return v1[0] * v2[1] - v1[1] * v2[0] < 0
+
+    def sw_ccw(self, pt):
+        """
+        Sedgewick test for pt being ccw of segment
+
+        Returns
+        -------
+
+        1 if turn from self.p1 to self.p2 to pt is ccw
+        -1 if turn from self.p1 to self.p2 to pt is cw
+        -1 if the points are collinear and self.p1 is in the middle
+        1 if the points are collinear and self.p2 is in the middle
+        0 if the points are collinear and pt is in the middle
+        
+        """
+
+        p0 = self.p1
+        p1 = self.p2
+        p2 = pt
+
+        dx1 = p1[0] - p0[0]
+        dy1 = p1[1] - p0[1]
+        dx2 = p2[0] - p0[0]
+        dy2 = p2[1] - p0[1]
+
+        if dy1*dx2 < dy2*dx1:
+            return 1
+        if dy1*dx2 > dy2*dx1:
+            return -1
+        if (dx1*dx2 < 0 or dy1*dy2 <0):
+                return -1
+        if dx1*dx1 + dy1*dy1 >= dx2*dx2 + dy2*dy2:
+            return 0
+        else:
+            return 1
+
+
+
+
+    def get_swap(self):
+        """
+        Returns a LineSegment object which has its endpoints swapped.
+
+        get_swap() -> LineSegment
+
+        Test tag: <tc>#is#LineSegment.get_swap</tc>
+        Test tag: <tc>#tests#LineSegment.get_swap</tc>
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))
+        >>> swap = ls.get_swap()
+        >>> swap.p1[0]
+        5.0
+        >>> swap.p1[1]
+        6.0
+        >>> swap.p2[0]
+        1.0
+        >>> swap.p2[1]
+        2.0
+        """
+        return LineSegment(self._p2, self._p1)
+
+    @property
+    def bounding_box(self):
+        """
+        Returns the minimum bounding box of a LineSegment object.
+
+        Test tag: <tc>#is#LineSegment.bounding_box</tc>
+        Test tag: <tc>#tests#LineSegment.bounding_box</tc>
+
+        bounding_box -> Rectangle
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((1, 2)), Point((5, 6)))
+        >>> ls.bounding_box.left
+        1.0
+        >>> ls.bounding_box.lower
+        2.0
+        >>> ls.bounding_box.right
+        5.0
+        >>> ls.bounding_box.upper
+        6.0
+        """
+        if self._bounding_box is None:  # If LineSegment attributes p1, p2 changed, recompute
+            self._bounding_box = Rectangle(
+                min([self._p1[0], self._p2[0]]), min([
+                    self._p1[1], self._p2[1]]),
+                max([self._p1[0], self._p2[0]]), max([self._p1[1], self._p2[1]]))
+        return Rectangle(
+            self._bounding_box.left, self._bounding_box.lower, self._bounding_box.right,
+            self._bounding_box.upper)
+
+    @property
+    def len(self):
+        """
+        Returns the length of a LineSegment object.
+
+        Test tag: <tc>#is#LineSegment.len</tc>
+        Test tag: <tc>#tests#LineSegment.len</tc>
+
+        len() -> number
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((2, 2)), Point((5, 2)))
+        >>> ls.len
+        3.0
+        """
+        if self._len is None:  # If LineSegment attributes p1, p2 changed, recompute
+            self._len = math.hypot(self._p1[0] - self._p2[0],
+                                   self._p1[1] - self._p2[1])
+        return self._len
+
+    @property
+    def line(self):
+        """
+        Returns a Line object of the line which the segment lies on.
+
+        Test tag: <tc>#is#LineSegment.line</tc>
+        Test tag: <tc>#tests#LineSegment.line</tc>
+
+        line() -> Line
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = LineSegment(Point((2, 2)), Point((3, 3)))
+        >>> l = ls.line
+        >>> l.m
+        1.0
+        >>> l.b
+        0.0
+        """
+        if self._line == False:
+            dx = self._p1[0] - self._p2[0]
+            dy = self._p1[1] - self._p2[1]
+            if dx == 0 and dy == 0:
+                self._line = None
+            elif dx == 0:
+                self._line = VerticalLine(self._p1[0])
+            else:
+                m = dy / float(dx)
+                b = self._p1[1] - m * self._p1[0]  # y - mx
+                self._line = Line(m, b)
+        return self._line
+
+
+class VerticalLine:
+    """
+    Geometric representation of verticle line objects.
+
+    Attributes
+    ----------
+    x       : float
+              x-intercept
+    """
+    def __init__(self, x):
+        """
+        Returns a VerticalLine object.
+
+        __init__(number) -> VerticalLine
+
+        Parameters
+        ----------
+        x : the x-intercept of the line
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = VerticalLine(0)
+        >>> ls.m
+        inf
+        >>> ls.b
+        nan
+        """
+        self._x = float(x)
+        self.m = float('inf')
+        self.b = float('nan')
+
+    def x(self, y):
+        """
+        Returns the x-value of the line at a particular y-value.
+
+        x(number) -> number
+
+        Parameters
+        ----------
+        y : the y-value to compute x at
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> l = VerticalLine(0)
+        >>> l.x(0.25)
+        0.0
+        """
+        return self._x
+
+    def y(self, x):
+        """
+        Returns the y-value of the line at a particular x-value.
+
+        y(number) -> number
+
+        Parameters
+        ----------
+        x : the x-value to compute y at
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> l = VerticalLine(1)
+        >>> l.y(1)
+        nan
+        """
+        return float('nan')
+
+
+class Line:
+    """
+    Geometric representation of line objects.
+
+    Attributes
+    ----------
+    m       : float
+              slope
+    b       : float
+              y-intercept
+
+    """
+
+    def __init__(self, m, b):
+        """
+        Returns a Line object.
+
+        __init__(number, number) -> Line
+
+        Test tag: <tc>#is#Line.__init__</tc>
+        Test tag: <tc>#tests#Line.__init__</tc>
+
+        Parameters
+        ----------
+        m : the slope of the line
+        b : the y-intercept of the line
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = Line(1, 0)
+        >>> ls.m
+        1.0
+        >>> ls.b
+        0.0
+        """
+        if m == float('inf') or m == float('inf'):
+            raise ArithmeticError('Slope cannot be infinite.')
+        self.m = float(m)
+        self.b = float(b)
+
+    def x(self, y):
+        """
+        Returns the x-value of the line at a particular y-value.
+
+        x(number) -> number
+
+        Parameters
+        ----------
+        y : the y-value to compute x at
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> l = Line(0.5, 0)
+        >>> l.x(0.25)
+        0.5
+        """
+        if self.m == 0:
+            raise ArithmeticError('Cannot solve for X when slope is zero.')
+        return (y - self.b) / self.m
+
+    def y(self, x):
+        """
+        Returns the y-value of the line at a particular x-value.
+
+        y(number) -> number
+
+        Parameters
+        ----------
+        x : the x-value to compute y at
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> l = Line(1, 0)
+        >>> l.y(1)
+        1.0
+        """
+        if self.m == 0:
+            return self.b
+        return self.m * x + self.b
+
+
+class Ray:
+    """
+    Geometric representation of ray objects.
+
+    Attributes
+    ----------
+
+    o       : Point
+              Origin (point where ray originates)
+    p       : Point
+              Second point on the ray (not point where ray originates)
+    """
+
+    def __init__(self, origin, second_p):
+        """
+        Returns a ray with the values specified.
+
+        __init__(Point, Point) -> Ray
+
+        Parameters
+        ----------
+        origin   : the point where the ray originates
+        second_p : the second point specifying the ray (not the origin)
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> l = Ray(Point((0, 0)), Point((1, 0)))
+        >>> str(l.o)
+        '(0.0, 0.0)'
+        >>> str(l.p)
+        '(1.0, 0.0)'
+        """
+        self.o = origin
+        self.p = second_p
+
+
+class Chain(object):
+    """
+    Geometric representation of a chain, also known as a polyline.
+
+    Attributes
+    ----------
+
+    vertices    : list
+                  List of Points of the vertices of the chain in order.
+    len         : float
+                  The geometric length of the chain.
+
+    """
+
+    def __init__(self, vertices):
+        """
+        Returns a chain created from the points specified.
+
+        __init__(Point list or list of Point lists) -> Chain
+
+        Parameters
+        ----------
+        vertices : list -- Point list or list of Point lists.
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))])
+        """
+        if isinstance(vertices[0], list):
+            self._vertices = [part for part in vertices]
+        else:
+            self._vertices = [vertices]
+        self._reset_props()
+
+    @classmethod
+    def __from_geo_interface__(cls, geo):
+        verts = [Point(pt) for pt in geo['coordinates']]
+        return cls(verts)
+
+    @property
+    def __geo_interface__(self):
+        return {'type': 'LineString', 'coordinates': self.vertices}
+
+    def _reset_props(self):
+        """
+        HELPER METHOD. DO NOT CALL.
+
+        Resets attributes which are functions of other attributes. The getters for these attributes (implemented as
+        properties) then recompute their values if they have been reset since the last call to the getter.
+
+        _reset_props() -> None
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> ls = Chain([Point((1, 2)), Point((5, 6))])
+        >>> ls._reset_props()
+        """
+        self._len = None
+        self._arclen = None
+        self._bounding_box = None
+
+    @property
+    def vertices(self):
+        """
+        Returns the vertices of the chain in clockwise order.
+
+        vertices -> Point list
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))])
+        >>> verts = c.vertices
+        >>> len(verts)
+        4
+        """
+        return sum([part for part in self._vertices], [])
+
+    @property
+    def parts(self):
+        """
+        Returns the parts of the chain.
+
+        parts -> Point list
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> c = Chain([[Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))],[Point((2,1)),Point((2,2)),Point((1,2)),Point((1,1))]])
+        >>> len(c.parts)
+        2
+        """
+        return [[v for v in part] for part in self._vertices]
+
+    @property
+    def bounding_box(self):
+        """
+        Returns the bounding box of the chain.
+
+        bounding_box -> Rectangle
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> c = Chain([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))])
+        >>> c.bounding_box.left
+        0.0
+        >>> c.bounding_box.lower
+        0.0
+        >>> c.bounding_box.right
+        2.0
+        >>> c.bounding_box.upper
+        1.0
+        """
+        if self._bounding_box is None:
+            vertices = self.vertices
+            self._bounding_box = Rectangle(
+                min([v[0] for v in vertices]), min([v[1] for v in vertices]),
+                max([v[0] for v in vertices]), max([v[1] for v in vertices]))
+        return self._bounding_box
+
+    @property
+    def len(self):
+        """
+        Returns the geometric length of the chain.
+
+        len -> number
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> c = Chain([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((2, 1))])
+        >>> c.len
+        3.0
+        >>> c = Chain([[Point((0, 0)), Point((1, 0)), Point((1, 1))],[Point((10,10)),Point((11,10)),Point((11,11))]])
+        >>> c.len
+        4.0
+        """
+        def dist(v1, v2):
+            return math.hypot(v1[0] - v2[0], v1[1] - v2[1])
+
+        def part_perimeter(part):
+            return sum([dist(part[i], part[i + 1]) for i in xrange(len(part) - 1)])
+
+        if self._len is None:
+            self._len = sum([part_perimeter(part) for part in self._vertices])
+        return self._len
+
+    @property
+    def arclen(self):
+        """
+        Returns the geometric length of the chain computed using arcdistance (meters).
+
+        len -> number
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        """
+        def part_perimeter(part):
+            return sum([arcdist(part[i], part[i + 1]) * 1000. for i in xrange(len(part) - 1)])
+        if self._arclen is None:
+            self._arclen = sum(
+                [part_perimeter(part) for part in self._vertices])
+        return self._arclen
+
+    @property
+    def segments(self):
+        """
+        Returns the segments that compose the Chain
+        """
+        return [[LineSegment(a, b) for (a, b) in zip(part[:-1], part[1:])] for part in self._vertices]
+
+
+class Ring(object):
+    """
+    Geometric representation of a Linear Ring
+
+    Linear Rings must be closed, the first and last point must be the same. Open rings will be closed.
+
+    This class exists primarily as a geometric primitive to form complex polygons with multiple rings and holes.
+
+    The ordering of the vertices is ignored and will not be altered.
+
+    Parameters
+    ----------
+    vertices : list -- a list of vertices
+
+    Attributes
+    __________
+    vertices        : list
+                      List of Points with the vertices of the ring
+    len             : int
+                      Number of vertices
+    perimeter       : float
+                      Geometric length of the perimeter of the ring
+    bounding_box    : Rectangle
+                      Bounding box of the ring
+    area            : float
+                      area enclosed by the ring
+    centroid        : tuple
+                      The centroid of the ring defined by the 'center of gravity' or 'center or mass'
+    """
+    def __init__(self, vertices):
+        if vertices[0] != vertices[-1]:
+            vertices = vertices[:] + vertices[0:1]
+            #raise ValueError, "Supplied vertices do not form a closed ring, the first and last vertices are not the same"
+        self.vertices = tuple(vertices)
+        self._perimeter = None
+        self._bounding_box = None
+        self._area = None
+        self._centroid = None
+
+    def __len__(self):
+        return len(self.vertices)
+
+    @property
+    def len(self):
+        return len(self)
+
+    @staticmethod
+    def dist(v1, v2):
+        return math.hypot(v1[0] - v2[0], v1[1] - v2[1])
+
+    @property
+    def perimeter(self):
+        if self._perimeter is None:
+            dist = self.dist
+            v = self.vertices
+            self._perimeter = sum([dist(v[i], v[i + 1])
+                                   for i in xrange(-1, len(self) - 1)])
+        return self._perimeter
+
+    @property
+    def bounding_box(self):
+        """
+        Returns the bounding box of the ring
+
+        bounding_box -> Rectangle
+
+        Examples
+        --------
+        >>> r = Ring([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1)), Point((0,0))])
+        >>> r.bounding_box.left
+        0.0
+        >>> r.bounding_box.lower
+        0.0
+        >>> r.bounding_box.right
+        2.0
+        >>> r.bounding_box.upper
+        1.0
+        """
+        if self._bounding_box is None:
+            vertices = self.vertices
+            x = [v[0] for v in vertices]
+            y = [v[1] for v in vertices]
+            self._bounding_box = Rectangle(min(x), min(y), max(x), max(y))
+        return self._bounding_box
+
+    @property
+    def area(self):
+        """
+        Returns the area of the ring.
+
+        area -> number
+
+        Examples
+        --------
+        >>> r = Ring([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1)), Point((0,0))])
+        >>> r.area
+        2.0
+        """
+        return abs(self.signed_area)
+
+    @property
+    def signed_area(self):
+        if self._area is None:
+            vertices = self.vertices
+            x = [v[0] for v in vertices]
+            y = [v[1] for v in vertices]
+            N = len(self)
+
+            A = 0.0
+            for i in xrange(N - 1):
+                A += (x[i] * y[i + 1] - x[i + 1] * y[i])
+            A = A / 2.0
+            self._area = A
+        return self._area
+
+    @property
+    def centroid(self):
+        """
+        Returns the centroid of the ring.
+
+        centroid -> Point
+
+        Notes
+        -----
+        The centroid returned by this method is the geometric centroid.
+        Also known as the 'center of gravity' or 'center of mass'.
+
+
+        Examples
+        --------
+        >>> r = Ring([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1)), Point((0,0))])
+        >>> str(r.centroid)
+        '(1.0, 0.5)'
+        """
+        if self._centroid is None:
+            vertices = self.vertices
+            x = [v[0] for v in vertices]
+            y = [v[1] for v in vertices]
+            A = self.signed_area
+            N = len(self)
+            cx = 0
+            cy = 0
+            for i in xrange(N - 1):
+                f = (x[i] * y[i + 1] - x[i + 1] * y[i])
+                cx += (x[i] + x[i + 1]) * f
+                cy += (y[i] + y[i + 1]) * f
+            cx = 1.0 / (6 * A) * cx
+            cy = 1.0 / (6 * A) * cy
+            self._centroid = Point((cx, cy))
+        return self._centroid
+
+
+class Polygon(object):
+    """
+    Geometric representation of polygon objects.
+
+    Attributes
+    ----------
+    vertices        : list
+                      List of Points with the vertices of the Polygon in
+                      clockwise order
+    len             : int
+                      Number of vertices including holes
+    perimeter       : float
+                      Geometric length of the perimeter of the Polygon
+    bounding_box    : Rectangle
+                      Bounding box of the polygon
+    bbox            : List
+                      [left, lower, right, upper]
+    area            : float
+                      Area enclosed by the polygon
+    centroid        : tuple
+                      The 'center of gravity', i.e. the mean point of the polygon.
+    """
+
+    def __init__(self, vertices, holes=None):
+        """
+        Returns a polygon created from the objects specified.
+
+        __init__(Point list or list of Point lists, holes list ) -> Polygon
+
+        Parameters
+        ----------
+        vertices : list -- a list of vertices or a list of lists of vertices.
+        holes    : list -- a list of sub-polygons to be considered as holes.
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p1 = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        """
+        self._part_rings = []
+        self._hole_rings = []
+
+        def clockwise(part):
+            if standalone.is_clockwise(part):
+                return part[:]
+            else:
+                return part[::-1]
+
+        if isinstance(vertices[0], list):
+            self._part_rings = map(Ring, vertices)
+            self._vertices = [clockwise(part) for part in vertices]
+        else:
+            self._part_rings = [Ring(vertices)]
+            self._vertices = [clockwise(vertices)]
+        if holes is not None and holes != []:
+            if isinstance(holes[0], list):
+                self._hole_rings = map(Ring, holes)
+                self._holes = [clockwise(hole) for hole in holes]
+            else:
+                self._hole_rings = [Ring(holes)]
+                self._holes = [clockwise(holes)]
+        else:
+            self._holes = [[]]
+        self._reset_props()
+
+    @classmethod
+    def __from_geo_interface__(cls, geo):
+        """
+        While pysal does not differentiate polygons and multipolygons GEOS,Shapely and geoJSON do.
+        In GEOS, etc, polygons may only have a single exterior ring, all other parts are holes.
+        MultiPolygons are simply a list of polygons.
+        """
+        geo_type = geo['type'].lower()
+        if geo_type == 'multipolygon':
+            parts = []
+            holes = []
+            for polygon in geo['coordinates']:
+                verts = [[Point(pt) for pt in part] for part in polygon]
+                parts += verts[0:1]
+                holes += verts[1:]
+            if not holes:
+                holes = None
+            return cls(parts, holes)
+        else:
+            verts = [[Point(pt) for pt in part] for part in geo['coordinates']]
+            return cls(verts[0:1], verts[1:])
+
+    @property
+    def __geo_interface__(self):
+        if len(self.parts) > 1:
+            geo = {'type': 'MultiPolygon', 'coordinates': [[
+                part] for part in self.parts]}
+            if self._holes[0]:
+                geo['coordinates'][0] += self._holes
+            return geo
+        if self._holes[0]:
+            return {'type': 'Polygon', 'coordinates': self._vertices + self._holes}
+        else:
+            return {'type': 'Polygon', 'coordinates': self._vertices}
+
+    def _reset_props(self):
+        self._perimeter = None
+        self._bounding_box = None
+        self._bbox = None
+        self._area = None
+        self._centroid = None
+        self._len = None
+
+    def __len__(self):
+        return self.len
+
+    @property
+    def len(self):
+        """
+        Returns the number of vertices in the polygon.
+
+        len -> int
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))])
+        >>> p1.len
+        4
+        >>> len(p1)
+        4
+        """
+        if self._len is None:
+            self._len = len(self.vertices)
+        return self._len
+
+    @property
+    def vertices(self):
+        """
+        Returns the vertices of the polygon in clockwise order.
+
+        vertices -> Point list
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p1 = Polygon([Point((0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))])
+        >>> len(p1.vertices)
+        4
+        """
+        return sum([part for part in self._vertices], []) + sum([part for part in self._holes], [])
+
+    @property
+    def holes(self):
+        """
+        Returns the holes of the polygon in clockwise order.
+
+        holes -> Point list
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p = Polygon([Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], [Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1, 1))])
+        >>> len(p.holes)
+        1
+        """
+        return [[v for v in part] for part in self._holes]
+
+    @property
+    def parts(self):
+        """
+        Returns the parts of the polygon in clockwise order.
+
+        parts -> Point list
+
+        Attributes
+        ----------
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p = Polygon([[Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))], [Point((2,1)),Point((2,2)),Point((1,2)),Point((1,1))]])
+        >>> len(p.parts)
+        2
+        """
+        return [[v for v in part] for part in self._vertices]
+
+    @property
+    def perimeter(self):
+        """
+        Returns the perimeter of the polygon.
+
+        perimeter() -> number
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        >>> p.perimeter
+        4.0
+        """
+        def dist(v1, v2):
+            return math.hypot(v1[0] - v2[0], v1[1] - v2[1])
+
+        def part_perimeter(part):
+            return sum([dist(part[i], part[i + 1]) for i in xrange(-1, len(part) - 1)])
+
+        if self._perimeter is None:
+            self._perimeter = (sum([part_perimeter(part) for part in self._vertices]) +
+                               sum([part_perimeter(hole) for hole in self._holes]))
+        return self._perimeter
+
+    @property
+    def bbox(self):
+        """
+        Returns the bounding box of the polygon as a list
+
+        See also bounding_box
+        """
+        if self._bbox is None:
+            self._bbox = [ self.bounding_box.left,
+                    self.bounding_box.lower,
+                    self.bounding_box.right,
+                    self.bounding_box.upper]
+        return self._bbox
+
+
+    @property
+    def bounding_box(self):
+        """
+        Returns the bounding box of the polygon.
+
+        bounding_box -> Rectangle
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p = Polygon([Point((0, 0)), Point((2, 0)), Point((2, 1)), Point((0, 1))])
+        >>> p.bounding_box.left
+        0.0
+        >>> p.bounding_box.lower
+        0.0
+        >>> p.bounding_box.right
+        2.0
+        >>> p.bounding_box.upper
+        1.0
+        """
+        if self._bounding_box is None:
+            vertices = self.vertices
+            self._bounding_box = Rectangle(
+                min([v[0] for v in vertices]), min([v[1] for v in vertices]),
+                max([v[0] for v in vertices]), max([v[1] for v in vertices]))
+        return self._bounding_box
+
+    @property
+    def area(self):
+        """
+        Returns the area of the polygon.
+
+        area -> number
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> p = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        >>> p.area
+        1.0
+        >>> p = Polygon([Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],[Point((2,1)),Point((2,2)),Point((1,2)),Point((1,1))])
+        >>> p.area
+        99.0
+        """
+        def part_area(part_verts):
+            area = 0
+            for i in xrange(-1, len(part_verts) - 1):
+                area += (part_verts[i][0] + part_verts[i + 1][0]) * \
+                    (part_verts[i][1] - part_verts[i + 1][1])
+            area = area * 0.5
+            if area < 0:
+                area = -area
+            return area
+
+        return (sum([part_area(part) for part in self._vertices]) -
+                sum([part_area(hole) for hole in self._holes]))
+
+    @property
+    def centroid(self):
+        """
+        Returns the centroid of the polygon
+
+        centroid -> Point
+
+        Notes
+        -----
+        The centroid returned by this method is the geometric centroid and respects multipart polygons with holes.
+        Also known as the 'center of gravity' or 'center of mass'.
+
+
+        Examples
+        --------
+        >>> p = Polygon([Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], [Point((1, 1)), Point((1, 2)), Point((2, 2)), Point((2, 1))])
+        >>> p.centroid
+        (5.0353535353535355, 5.0353535353535355)
+        """
+        CP = [ring.centroid for ring in self._part_rings]
+        AP = [ring.area for ring in self._part_rings]
+        CH = [ring.centroid for ring in self._hole_rings]
+        AH = [-ring.area for ring in self._hole_rings]
+
+        A = AP + AH
+        cx = sum([pt[0] * area for pt, area in zip(CP + CH, A)]) / sum(A)
+        cy = sum([pt[1] * area for pt, area in zip(CP + CH, A)]) / sum(A)
+        return cx, cy
+
+    def contains_point(self, point):
+        """
+        Test if polygon contains point
+
+        Examples
+        --------
+        >>> p = Polygon([Point((0,0)), Point((4,0)), Point((4,5)), Point((2,3)), Point((0,5))])
+        >>> p.contains_point((3,3))
+        1
+        >>> p.contains_point((0,5))
+        0
+        >>> p.contains_point((2,3))
+        0
+        >>> p.contains_point((4,5))
+        0
+        >>> p.contains_point((4,0))
+        1
+        >>>
+
+        Handles holes
+
+        >>> p = Polygon([Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], [Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1, 1))])
+        >>> p.contains_point((1.0,1.0))
+        0
+        >>> p.contains_point((2.0,2.0))
+        1
+        >>> p.contains_point((10,10))
+        0
+        >>>
+
+
+        Notes
+        -----
+        Points falling exactly on polygon edges may yield unpredictable
+        results
+        """
+
+        # ray from point to just outside left edge of bb
+        left = self.bounding_box.left - 0.000001
+        y = point[1]
+        right = point[0]
+        cn = 0
+        verts = self.vertices
+        c = Point((left, y))
+        d = Point((right, y))
+        ray = LineSegment(c, d)
+        for i in xrange(-1, len(self.vertices) - 1):
+            a = verts[i]
+            b = verts[i + 1]
+            ab = LineSegment(a, b)
+            ac = LineSegment(a, c)
+            bc = LineSegment(b, c)
+            if ac.is_ccw(d) == bc.is_ccw(d):
+                pass
+            elif ab.is_ccw(c) == ab.is_ccw(d):
+                pass
+            else:
+                cn += 1
+        return cn % 2
+
+
+class Rectangle:
+    """
+    Geometric representation of rectangle objects.
+
+    Attributes
+    ----------
+
+    left    : float
+              Minimum x-value of the rectangle
+    lower   : float
+              Minimum y-value of the rectangle
+    right   : float
+              Maximum x-value of the rectangle
+    upper   : float
+              Maximum y-value of the rectangle
+    """
+
+    def __init__(self, left, lower, right, upper):
+        """
+        Returns a Rectangle object.
+
+        __init__(number, number, number, number) -> Rectangle
+
+        Parameters
+        ----------
+        left  : the minimum x-value of the rectangle
+        lower : the minimum y-value of the rectangle
+        right : the maximum x-value of the rectangle
+        upper : the maximum y-value of the rectangle
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> r = Rectangle(-4, 3, 10, 17)
+        >>> r.left #minx
+        -4.0
+        >>> r.lower #miny
+        3.0
+        >>> r.right #maxx
+        10.0
+        >>> r.upper #maxy
+        17.0
+        """
+        if right < left or upper < lower:
+            raise ArithmeticError('Rectangle must have positive area.')
+        self.left = float(left)
+        self.lower = float(lower)
+        self.right = float(right)
+        self.upper = float(upper)
+
+    def __nonzero__(self):
+        """
+        ___nonzero__ is used "to implement truth value testing and the built-in operation bool()" -- http://docs.python.org/reference/datamodel.html
+
+        Rectangles will evaluate to Flase if they have Zero Area.
+        >>> r = Rectangle(0,0,0,0)
+        >>> bool(r)
+        False
+        >>> r = Rectangle(0,0,1,1)
+        >>> bool(r)
+        True
+        """
+        return bool(self.area)
+
+    def __eq__(self, other):
+        if other:
+            return self[:] == other[:]
+        return False
+
+    def __add__(self, other):
+        x, y, X, Y = self[:]
+        x1, y2, X1, Y1 = other[:]
+        return Rectangle(min(self.left, other.left), min(self.lower, other.lower), max(self.right, other.right), max(self.upper, other.upper))
+
+    def __getitem__(self, key):
+        """
+        >>> r = Rectangle(-4, 3, 10, 17)
+        >>> r[:]
+        [-4.0, 3.0, 10.0, 17.0]
+        """
+        l = [self.left, self.lower, self.right, self.upper]
+        return l.__getitem__(key)
+
+    def set_centroid(self, new_center):
+        """
+        Moves the rectangle center to a new specified point.
+
+        set_centroid(Point) -> Point
+
+        Parameters
+        ----------
+        new_center : the new location of the centroid of the polygon
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> r = Rectangle(0, 0, 4, 4)
+        >>> r.set_centroid(Point((4, 4)))
+        >>> r.left
+        2.0
+        >>> r.right
+        6.0
+        >>> r.lower
+        2.0
+        >>> r.upper
+        6.0
+        """
+        shift = (new_center[0] - (self.left + self.right) / 2,
+                 new_center[1] - (self.lower + self.upper) / 2)
+        self.left = self.left + shift[0]
+        self.right = self.right + shift[0]
+        self.lower = self.lower + shift[1]
+        self.upper = self.upper + shift[1]
+
+    def set_scale(self, scale):
+        """
+        Rescales the rectangle around its center.
+
+        set_scale(number) -> number
+
+        Parameters
+        ----------
+        scale : the ratio of the new scale to the old scale (e.g. 1.0 is current size)
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> r = Rectangle(0, 0, 4, 4)
+        >>> r.set_scale(2)
+        >>> r.left
+        -2.0
+        >>> r.right
+        6.0
+        >>> r.lower
+        -2.0
+        >>> r.upper
+        6.0
+        """
+        center = ((self.left + self.right) / 2, (self.lower + self.upper) / 2)
+        self.left = center[0] + scale * (self.left - center[0])
+        self.right = center[0] + scale * (self.right - center[0])
+        self.lower = center[1] + scale * (self.lower - center[1])
+        self.upper = center[1] + scale * (self.upper - center[1])
+
+    @property
+    def area(self):
+        """
+        Returns the area of the Rectangle.
+
+        area -> number
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> r = Rectangle(0, 0, 4, 4)
+        >>> r.area
+        16.0
+        """
+        return (self.right - self.left) * (self.upper - self.lower)
+
+    @property
+    def width(self):
+        """
+        Returns the width of the Rectangle.
+
+        width -> number
+
+        Attributes
+        ----------
+
+        Examples
+        --------
+        >>> r = Rectangle(0, 0, 4, 4)
+        >>> r.width
+        4.0
+        """
+        return self.right - self.left
+
+    @property
+    def height(self):
+        """
+        Returns the height of the Rectangle.
+
+        height -> number
+
+        Examples
+        --------
+        >>> r = Rectangle(0, 0, 4, 4)
+        >>> r.height
+        4.0
+        """
+        return self.upper - self.lower
+
+
+_geoJSON_type_to_Pysal_type = {'point': Point, 'linestring': Chain,
+                               'polygon': Polygon, 'multipolygon': Polygon}
+import standalone  # moving this to top breaks unit tests !
+
+
diff --git a/pysal/cg/sphere.py b/pysal/cg/sphere.py
new file mode 100644
index 0000000..df670c9
--- /dev/null
+++ b/pysal/cg/sphere.py
@@ -0,0 +1,502 @@
+"""
+sphere: Tools for working with spherical geometry.
+
+Author(s):
+    Charles R Schmidt schmidtc at gmail.com
+    Luc Anselin luc.anselin at asu.edu
+    Xun Li xun.li at asu.edu
+
+"""
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>, Luc Anselin <luc.anselin at asu.edu, Xun Li <xun.li at asu.edu"
+
+import math
+import random
+import numpy
+import scipy.spatial
+import scipy.constants
+from scipy.spatial.distance import euclidean
+from math import pi, cos, sin, asin
+
+__all__ = ['RADIUS_EARTH_KM', 'RADIUS_EARTH_MILES', 'arcdist', 'arcdist2linear', 'brute_knn', 'fast_knn', 'fast_threshold', 'linear2arcdist', 'toLngLat', 'toXYZ', 'lonlat','harcdist','geointerpolate','geogrid']
+
+
+RADIUS_EARTH_KM = 6371.0  
+RADIUS_EARTH_MILES = (
+    RADIUS_EARTH_KM * scipy.constants.kilo) / scipy.constants.mile
+
+
+def arcdist(pt0, pt1, radius=RADIUS_EARTH_KM):
+    """
+    Parameters
+    ----------
+    pt0 : point
+        assumed to be in form (lng,lat)
+    pt1 : point
+        assumed to be in form (lng,lat)
+    radius : radius of the sphere
+        defaults to Earth's radius
+
+        Source: http://nssdc.gsfc.nasa.gov/planetary/factsheet/earthfact.html
+
+    Returns 
+    -------
+    The arc distance between pt0 and pt1 using supplied radius
+
+    Examples
+    --------
+    >>> pt0 = (0,0)
+    >>> pt1 = (180,0)
+    >>> d = arcdist(pt0,pt1,RADIUS_EARTH_MILES)
+    >>> d == math.pi*RADIUS_EARTH_MILES
+    True
+    """
+    return linear2arcdist(euclidean(toXYZ(pt0), toXYZ(pt1)), radius)
+
+
+def arcdist2linear(arc_dist, radius=RADIUS_EARTH_KM):
+    """
+    Convert an arc distance (spherical earth) to a linear distance (R3) in the unit sphere.
+
+    Examples
+    --------
+    >>> pt0 = (0,0)
+    >>> pt1 = (180,0)
+    >>> d = arcdist(pt0,pt1,RADIUS_EARTH_MILES)
+    >>> d == math.pi*RADIUS_EARTH_MILES
+    True
+    >>> arcdist2linear(d,RADIUS_EARTH_MILES)
+    2.0
+    """
+    c = 2 * math.pi * radius
+    d = (2 - (2 * math.cos(math.radians((arc_dist * 360.0) / c)))) ** (0.5)
+    return d
+
+
+def linear2arcdist(linear_dist, radius=RADIUS_EARTH_KM):
+    """
+    Convert a linear distance in the unit sphere (R3) to an arc distance based on supplied radius
+
+    Examples
+    --------
+    >>> pt0 = (0,0)
+    >>> pt1 = (180,0)
+    >>> d = arcdist(pt0,pt1,RADIUS_EARTH_MILES)
+    >>> d == linear2arcdist(2.0, radius = RADIUS_EARTH_MILES)
+    True
+    """
+    if linear_dist == float('inf'):
+        return float('inf')
+    elif linear_dist > 2.0:
+        raise ValueError("linear_dist, must not exceed the diameter of the unit sphere, 2.0")
+    c = 2 * math.pi * radius
+    a2 = linear_dist ** 2
+    theta = math.degrees(math.acos((2 - a2) / (2.)))
+    d = (theta * c) / 360.0
+    return d
+
+
+def toXYZ(pt):
+    """
+    Parameters
+    ----------
+    pt0 : point
+        assumed to be in form (lng,lat)
+    pt1 : point
+        assumed to be in form (lng,lat)
+
+    Returns
+    -------
+    x, y, z
+    """
+    phi, theta = map(math.radians, pt)
+    phi, theta = phi + pi, theta + (pi / 2)
+    x = 1 * sin(theta) * cos(phi)
+    y = 1 * sin(theta) * sin(phi)
+    z = 1 * cos(theta)
+    return x, y, z
+
+
+def toLngLat(xyz):
+    x, y, z = xyz
+    if z == -1 or z == 1:
+        phi = 0
+    else:
+        phi = math.atan2(y, x)
+        if phi > 0:
+            phi = phi - math.pi
+        elif phi < 0:
+            phi = phi + math.pi
+    theta = math.acos(z) - (math.pi / 2)
+    return phi, theta
+
+
+def brute_knn(pts, k, mode='arc'):
+    """
+    valid modes are ['arc','xrz']
+    """
+    n = len(pts)
+    full = numpy.zeros((n, n))
+    for i in xrange(n):
+        for j in xrange(i + 1, n):
+            if mode == 'arc':
+                lng0, lat0 = pts[i]
+                lng1, lat1 = pts[j]
+                dist = arcdist(pts[i], pts[j], radius=RADIUS_EARTH_KM)
+            elif mode == 'xyz':
+                dist = euclidean(pts[i], pts[j])
+            full[i, j] = dist
+            full[j, i] = dist
+    w = {}
+    for i in xrange(n):
+        w[i] = full[i].argsort()[1:k + 1].tolist()
+    return w
+
+
+def fast_knn(pts, k, return_dist=False):
+    """
+    Computes k nearest neighbors on a sphere.
+
+    Parameters
+    ----------
+    pts :  list of x,y pairs
+    k   :  int
+        Number of points to query
+    return_dist : bool
+        Return distances in the 'wd' container object
+
+    Returns
+    -------
+    wn  :  list
+        list of neighbors
+    wd  : list
+        list of neighbor distances (optional)
+
+    """
+    pts = numpy.array(pts)
+    kd = scipy.spatial.KDTree(pts)
+    d, w = kd.query(pts, k + 1)
+    w = w[:, 1:]
+    wn = {}
+    for i in xrange(len(pts)):
+        wn[i] = w[i].tolist()
+    if return_dist:
+        d = d[:, 1:]
+        wd = {}
+        for i in xrange(len(pts)):
+            wd[i] = [linear2arcdist(x,
+                                    radius=RADIUS_EARTH_MILES) for x in d[i].tolist()]
+        return wn, wd
+    return wn
+
+
+def fast_threshold(pts, dist, radius=RADIUS_EARTH_KM):
+    d = arcdist2linear(dist, radius)
+    kd = scipy.spatial.KDTree(pts)
+    r = kd.query_ball_tree(kd, d)
+    wd = {}
+    for i in xrange(len(pts)):
+        l = r[i]
+        l.remove(i)
+        wd[i] = l
+    return wd
+    
+    
+########### new functions 
+
+def lonlat(pointslist):
+    """
+    Converts point order from lat-lon tuples to lon-lat (x,y) tuples
+    
+    Parameters
+    ----------
+    
+    pointslist : list of lat-lon tuples (Note, has to be a list, even for one point)
+    
+    Returns
+    -------
+    
+    newpts      : list with tuples of points in lon-lat order
+    
+    Example
+    -------
+    >>> points = [(41.981417, -87.893517), (41.980396, -87.776787), (41.980906, -87.696450)]
+    >>> newpoints = lonlat(points)
+    >>> newpoints
+    [(-87.893517, 41.981417), (-87.776787, 41.980396), (-87.69645, 41.980906)]
+    
+    """
+    newpts = [(i[1],i[0]) for i in pointslist]
+    return newpts
+    
+def haversine(x):
+    """
+    Computes the haversine formula
+    
+    Parameters
+    ----------
+    x    : angle in radians
+    
+    Returns
+    -------
+         : square of sine of half the radian (the haversine formula)
+         
+    Example
+    -------
+    >>> haversine(math.pi)     # is 180 in radians, hence sin of 90 = 1
+    1.0
+    
+    """
+    
+    x = math.sin(x/2)
+    return x*x
+
+# Lambda functions
+
+# degree to radian conversion
+d2r = lambda x: x * math.pi / 180.0
+
+# radian to degree conversion
+r2d = lambda x: x * 180.0 / math.pi
+
+def radangle(p0,p1):
+    """
+    Radian angle between two points on a sphere in lon-lat (x,y)
+    
+    Parameters
+    ----------
+    p0    : first point as a lon,lat tuple
+    p1    : second point as a lon,lat tuple
+    
+    Returns
+    -------
+    d     : radian angle in radians
+    
+    Example
+    -------
+    >>> p0 = (-87.893517, 41.981417)
+    >>> p1 = (-87.519295, 41.657498)
+    >>> radangle(p0,p1)
+    0.007460167953189258
+    
+    Note
+    ----
+    Uses haversine formula, function haversine and degree to radian
+    conversion lambda function d2r
+    
+    """
+    x0, y0 = d2r(p0[0]),d2r(p0[1])
+    x1, y1 = d2r(p1[0]),d2r(p1[1])
+    d = 2.0 * math.asin(math.sqrt(haversine(y1 - y0) + 
+                        math.cos(y0) * math.cos(y1)*haversine(x1 - x0)))
+    return d
+
+def harcdist(p0,p1,lonx=True,radius=RADIUS_EARTH_KM):
+    """
+    Alternative arc distance function, uses haversine formula
+    
+    Parameters
+    ----------
+    p0       : first point as a tuple in decimal degrees
+    p1       : second point as a tuple in decimal degrees
+    lonx     : boolean to assess the order of the coordinates, 
+               for lon,lat (default) = True, for lat,lon = False
+    radius   : radius of the earth at the equator as a sphere
+               default: RADIUS_EARTH_KM (6371.0 km)
+               options: RADIUS_EARTH_MILES (3959.0 miles)
+                        None (for result in radians)
+                        
+    Returns
+    -------
+    d        : distance in units specified, km, miles or radians (for None)
+    
+    Example
+    -------
+    >>> p0 = (-87.893517, 41.981417)
+    >>> p1 = (-87.519295, 41.657498)
+    >>> harcdist(p0,p1)  
+    47.52873002976876
+    >>> harcdist(p0,p1,radius=None)
+    0.007460167953189258
+    
+    Note
+    ----
+    Uses radangle function to compute radian angle
+    
+    """
+    if not(lonx):
+        p = lonlat([p0,p1])
+        p0 = p[0]
+        p1 = p[1]
+        
+    d = radangle(p0,p1)
+    if radius is not None:
+        d = d*radius
+    return d
+
+def geointerpolate(p0,p1,t,lonx=True):
+    """
+    Finds a point on a sphere along the great circle distance between two points 
+    on a sphere
+    also known as a way point in great circle navigation
+    
+    Parameters
+    ----------
+    p0       : first point as a tuple in decimal degrees
+    p1       : second point as a tuple in decimal degrees
+    t        : proportion along great circle distance between p0 and p1
+               e.g., t=0.5 would find the mid-point
+    lonx     : boolean to assess the order of the coordinates, 
+               for lon,lat (default) = True, for lat,lon = False 
+               
+    Returns
+    -------
+    x,y      : tuple in decimal degrees of lon-lat (default) or lat-lon,
+               depending on setting of lonx; in other words, the same
+               order is used as for the input
+               
+    Example
+    -------
+    >>> p0 = (-87.893517, 41.981417)
+    >>> p1 = (-87.519295, 41.657498)
+    >>> geointerpolate(p0,p1,0.1)          # using lon-lat
+    (-87.85592403438788, 41.949079912574796)
+    >>> p3 = (41.981417, -87.893517)
+    >>> p4 = (41.657498, -87.519295)
+    >>> geointerpolate(p3,p4,0.1,lonx=False)   # using lat-lon
+    (41.949079912574796, -87.85592403438788)
+    
+    """
+    
+    if not(lonx):
+        p = lonlat([p0,p1])
+        p0 = p[0]
+        p1 = p[1]
+        
+    d = radangle(p0,p1)
+    k = 1.0 / math.sin(d)
+    t = t*d
+    A = math.sin(d-t) * k
+    B = math.sin(t) * k
+    
+    x0, y0 = d2r(p0[0]),d2r(p0[1])
+    x1, y1 = d2r(p1[0]),d2r(p1[1])
+    
+    x = A * math.cos(y0) * math.cos(x0) + B * math.cos(y1) * math.cos(x1)
+    y = A * math.cos(y0) * math.sin(x0) + B * math.cos(y1) * math.sin(x1)
+    z = A * math.sin(y0) + B * math.sin(y1)
+
+    newpx = r2d(math.atan2(y, x))
+    newpy = r2d(math.atan2(z, math.sqrt(x*x + y*y)))
+    if not(lonx):
+        return newpy,newpx
+    return newpx,newpy
+ 
+def geogrid(pup,pdown,k,lonx=True):
+    """
+    Computes a k+1 by k+1 set of grid points for a bounding box in lat-lon
+    uses geointerpolate
+    
+    Parameters
+    ----------
+    pup     : tuple with lat-lon or lon-lat for upper left corner of bounding box
+    pdown   : tuple with lat-lon or lon-lat for lower right corner of bounding box
+    k       : number of grid cells (grid points will be one more)
+    lonx    : boolean to assess the order of the coordinates, 
+              for lon,lat (default) = True, for lat,lon = False
+    
+    Returns
+    -------
+    grid    : list of tuples with lat-lon or lon-lat for grid points, row by row,
+              starting with the top row and moving to the bottom; coordinate tuples
+              are returned in same order as input
+              
+    Example
+    -------
+    >>> pup = (42.023768,-87.946389)    # Arlington Heights IL
+    >>> pdown = (41.644415,-87.524102)  # Hammond, IN
+    >>> geogrid(pup,pdown,3,lonx=False)
+    [(42.023768, -87.946389), (42.02393997819538, -87.80562679358316), (42.02393997819538, -87.66486420641684), (42.023768, -87.524102), (41.897317, -87.94638900000001), (41.8974888973743, -87.80562679296166), (41.8974888973743, -87.66486420703835), (41.897317, -87.524102), (41.770866000000005, -87.94638900000001), (41.77103781320412, -87.80562679234043), (41.77103781320412, -87.66486420765956), (41.770866000000005, -87.524102), (41.644415, -87.946389), (41.64458672568646, -87.8056267917 [...]
+    
+    """
+    if lonx:
+        corners = [pup,pdown]
+    else:
+        corners = lonlat([pup,pdown])
+    tpoints = [float(i)/k for i in range(k)[1:]]
+    leftcorners = [corners[0],(corners[0][0],corners[1][1])]
+    rightcorners = [(corners[1][0],corners[0][1]),corners[1]]
+    leftside = [leftcorners[0]]
+    rightside = [rightcorners[0]]
+    for t in tpoints:
+        newpl = geointerpolate(leftcorners[0],leftcorners[1],t)
+        leftside.append(newpl)
+        newpr = geointerpolate(rightcorners[0],rightcorners[1],t)
+        rightside.append(newpr)
+    leftside.append(leftcorners[1])
+    rightside.append(rightcorners[1])
+    
+    grid = []
+    for i in range(len(leftside)):
+        grid.append(leftside[i])
+        for t in tpoints:
+            newp = geointerpolate(leftside[i],rightside[i],t)
+            grid.append(newp)
+        grid.append(rightside[i])
+    if not(lonx):
+        grid = lonlat(grid)
+    return grid
+    
+       
+
+if __name__ == '__main__':
+    def random_ll():
+        long = (random.random() * 360) - 180
+        lat = (random.random() * 180) - 90
+        return long, lat
+
+    for i in range(1):
+        n = 99
+        # generate random surface points.
+        pts = [random_ll() for i in xrange(n)]
+        # convert to unit sphere points.
+        pts2 = map(toXYZ, pts)
+
+        w = brute_knn(pts, 4, 'arc')
+        w2 = brute_knn(pts2, 4, 'xyz')
+        w3 = fast_knn(pts2, 4)
+        assert w == w2 == w3
+
+    ### Make knn1
+    import pysal
+#    f = pysal.open('/Users/charlie/Documents/data/stl_hom/stl_hom.shp', 'r')
+    f = pysal.open(pysal.examples.get_path('stl_hom.shp'),'r')
+    shapes = f.read()
+    pts = [shape.centroid for shape in shapes]
+    w0 = brute_knn(pts, 4, 'xyz')
+    w1 = brute_knn(pts, 4, 'arc')
+    pts = map(toXYZ, pts)
+    w2 = brute_knn(pts, 4, 'xyz')
+    w3 = fast_knn(pts, 4)
+
+    wn, wd = fast_knn(pts, 4, True)
+    ids = range(1, len(pts) + 1)
+    
+    ### new sphere examples
+    p0 = (-87.893517, 41.981417)
+    p1 = (-87.519295, 41.657498) 
+    p3 = (41.981417, -87.893517)
+    p4 = (41.657498, -87.519295)
+    d1 =harcdist(p0,p1,radius=RADIUS_EARTH_MILES)
+    print "d1",d1
+    d2 = harcdist(p3,p4,lonx=False,radius=None)
+    print "d2",d2
+    pn1 = geointerpolate(p0,p1,0.1)
+    print "pn1",pn1
+    pn2 = geointerpolate(p3,p4,0.1,lonx=False)
+    print "pn2",pn2
+    pup = (42.023768,-87.946389)    # Arlington Heights IL
+    pdown = (41.644415,-87.524102)  # Hammond, IN
+    grid=geogrid(pup,pdown,3,lonx=False)
+    print "grid",grid
+    
diff --git a/pysal/cg/standalone.py b/pysal/cg/standalone.py
new file mode 100644
index 0000000..709edea
--- /dev/null
+++ b/pysal/cg/standalone.py
@@ -0,0 +1,913 @@
+"""
+Helper functions for computational geometry in PySAL
+
+"""
+
+__author__ = "Sergio J. Rey, Xinyue Ye, Charles Schmidt, Andrew Winslow"
+__credits__ = "Copyright (c) 2005-2009 Sergio J. Rey"
+
+import doctest
+import math
+import copy
+from shapes import *
+from itertools import islice
+import scipy.spatial
+from pysal.common import *
+
+EPSILON_SCALER = 3
+
+
+__all__ = ['bbcommon', 'get_bounding_box', 'get_angle_between', 'is_collinear', 'get_segments_intersect', 'get_segment_point_intersect', 'get_polygon_point_intersect', 'get_rectangle_point_intersect', 'get_ray_segment_intersect', 'get_rectangle_rectangle_intersection', 'get_polygon_point_dist', 'get_points_dist', 'get_segment_point_dist', 'get_point_at_angle_and_dist', 'convex_hull', 'is_clockwise', 'point_touches_rectangle', 'get_shared_segments', 'distance_matrix']
+
+
+def bbcommon(bb, bbother):
+    """
+    Old Stars method for bounding box overlap testing
+    Also defined in pysal.weights._cont_binning
+
+    Examples
+    --------
+
+    >>> b0 = [0,0,10,10]
+    >>> b1 = [10,0,20,10]
+    >>> bbcommon(b0,b1)
+    1
+    """
+    chflag = 0
+    if not ((bbother[2] < bb[0]) or (bbother[0] > bb[2])):
+        if not ((bbother[3] < bb[1]) or (bbother[1] > bb[3])):
+            chflag = 1
+    return chflag
+
+
+def get_bounding_box(items):
+    """
+
+    Examples
+    --------
+    >>> bb = get_bounding_box([Point((-1, 5)), Rectangle(0, 6, 11, 12)])
+    >>> bb.left
+    -1.0
+    >>> bb.lower
+    5.0
+    >>> bb.right
+    11.0
+    >>> bb.upper
+    12.0
+    """
+
+    def left(o):
+        if hasattr(o, 'bounding_box'):  # Polygon, Ellipse
+            return o.bounding_box.left
+        elif hasattr(o, 'left'):  # Rectangle
+            return o.left
+        else:  # Point
+            return o[0]
+
+    def right(o):
+        if hasattr(o, 'bounding_box'):  # Polygon, Ellipse
+            return o.bounding_box.right
+        elif hasattr(o, 'right'):  # Rectangle
+            return o.right
+        else:  # Point
+            return o[0]
+
+    def lower(o):
+        if hasattr(o, 'bounding_box'):  # Polygon, Ellipse
+            return o.bounding_box.lower
+        elif hasattr(o, 'lower'):  # Rectangle
+            return o.lower
+        else:  # Point
+            return o[1]
+
+    def upper(o):
+        if hasattr(o, 'bounding_box'):  # Polygon, Ellipse
+            return o.bounding_box.upper
+        elif hasattr(o, 'upper'):  # Rectangle
+            return o.upper
+        else:  # Point
+            return o[1]
+
+    return Rectangle(min(map(left, items)), min(map(lower, items)), max(map(right, items)), max(map(upper, items)))
+
+
+def get_angle_between(ray1, ray2):
+    """
+    Returns the angle formed between a pair of rays which share an origin
+    get_angle_between(Ray, Ray) -> number
+
+    Parameters
+    ----------
+    ray1   : a ray forming the beginning of the angle measured
+    ray2   : a ray forming the end of the angle measured
+
+    Examples
+    --------
+    >>> get_angle_between(Ray(Point((0, 0)), Point((1, 0))), Ray(Point((0, 0)), Point((1, 0))))
+    0.0
+    """
+
+    if ray1.o != ray2.o:
+        raise ValueError('Rays must have the same origin.')
+    vec1 = (ray1.p[0] - ray1.o[0], ray1.p[1] - ray1.o[1])
+    vec2 = (ray2.p[0] - ray2.o[0], ray2.p[1] - ray2.o[1])
+    rot_theta = -math.atan2(vec1[1], vec1[0])
+    rot_matrix = [[math.cos(rot_theta), -math.sin(rot_theta)], [
+        math.sin(rot_theta), math.cos(rot_theta)]]
+    rot_vec2 = (rot_matrix[0][0] * vec2[0] + rot_matrix[0][1] * vec2[1],
+                rot_matrix[1][0] * vec2[0] + rot_matrix[1][1] * vec2[1])
+    return math.atan2(rot_vec2[1], rot_vec2[0])
+
+
+def is_collinear(p1, p2, p3):
+    """
+    Returns whether a triplet of points is collinear.
+
+    is_collinear(Point, Point, Point) -> bool
+
+    Parameters
+    ----------
+    p1 : a point (Point)
+    p2 : another point (Point)
+    p3 : yet another point (Point)
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> is_collinear(Point((0, 0)), Point((1, 1)), Point((5, 5)))
+    True
+    >>> is_collinear(Point((0, 0)), Point((1, 1)), Point((5, 0)))
+    False
+    """
+    eps = np.finfo(type(p1[0])).eps
+
+    return (abs((p2[0] - p1[0]) * (p3[1] - p1[1]) - (p2[1] - p1[1]) * (p3[0] - p1[0])) < EPSILON_SCALER * eps)
+
+
+def get_segments_intersect(seg1, seg2):
+    """
+    Returns the intersection of two segments.
+
+    get_segments_intersect(LineSegment, LineSegment) -> Point or LineSegment
+
+    Parameters
+    ----------
+    seg1 : a segment to check intersection for
+    seg2 : a segment to check intersection for
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> seg1 = LineSegment(Point((0, 0)), Point((0, 10)))
+    >>> seg2 = LineSegment(Point((-5, 5)), Point((5, 5)))
+    >>> i = get_segments_intersect(seg1, seg2)
+    >>> isinstance(i, Point)
+    True
+    >>> str(i)
+    '(0.0, 5.0)'
+    >>> seg3 = LineSegment(Point((100, 100)), Point((100, 101)))
+    >>> i = get_segments_intersect(seg2, seg3)
+    """
+
+    p1 = seg1.p1
+    p2 = seg1.p2
+    p3 = seg2.p1
+    p4 = seg2.p2
+    a = p2[0] - p1[0]
+    b = p3[0] - p4[0]
+    c = p2[1] - p1[1]
+    d = p3[1] - p4[1]
+    det = float(a * d - b * c)
+    if det == 0:
+        if seg1 == seg2:
+            return LineSegment(seg1.p1, seg1.p2)
+        else:
+            a = get_segment_point_intersect(seg2, seg1.p1)
+            b = get_segment_point_intersect(seg2, seg1.p2)
+            c = get_segment_point_intersect(seg1, seg2.p1)
+            d = get_segment_point_intersect(seg1, seg2.p2)
+
+            if a and b:  # seg1 in seg2
+                return LineSegment(seg1.p1, seg1.p2)
+            if c and d:  # seg2 in seg1
+                return LineSegment(seg2.p1, seg2.p2)
+            if (a or b) and (c or d):
+                p1 = a if a else b
+                p2 = c if c else d
+                return LineSegment(p1, p2)
+
+        return None
+    a_inv = d / det
+    b_inv = -b / det
+    c_inv = -c / det
+    d_inv = a / det
+    m = p3[0] - p1[0]
+    n = p3[1] - p1[1]
+    x = a_inv * m + b_inv * n
+    y = c_inv * m + d_inv * n
+    intersect_exists = 0 <= x <= 1 and 0 <= y <= 1
+    if not intersect_exists:
+        return None
+    return Point((p1[0] + x * (p2[0] - p1[0]), p1[1] + x * (p2[1] - p1[1])))
+
+
+def get_segment_point_intersect(seg, pt):
+    """
+    Returns the intersection of a segment and point.
+
+    get_segment_point_intersect(LineSegment, Point) -> Point
+
+    Parameters
+    ----------
+    seg : a segment to check intersection for
+    pt  : a point to check intersection for
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> seg = LineSegment(Point((0, 0)), Point((0, 10)))
+    >>> pt = Point((0, 5))
+    >>> i = get_segment_point_intersect(seg, pt)
+    >>> str(i)
+    '(0.0, 5.0)'
+    >>> pt2 = Point((5, 5))
+    >>> get_segment_point_intersect(seg, pt2)
+    """
+    eps = np.finfo(type(pt[0])).eps
+
+    if is_collinear(pt, seg.p1, seg.p2):
+        if get_segment_point_dist(seg, pt)[0] < EPSILON_SCALER * eps:
+            return pt
+        else:
+            return None
+
+    vec1 = (pt[0] - seg.p1[0], pt[1] - seg.p1[1])
+    vec2 = (seg.p2[0] - seg.p1[0], seg.p2[1] - seg.p1[1])
+    if abs(vec1[0] * vec2[1] - vec1[1] * vec2[0]) < eps:
+        return pt
+    return None
+
+
+def get_polygon_point_intersect(poly, pt):
+    """
+    Returns the intersection of a polygon and point.
+
+    get_polygon_point_intersect(Polygon, Point) -> Point
+
+    Parameters
+    ----------
+    poly : a polygon to check intersection for
+    pt   : a point to check intersection for
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> poly = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+    >>> pt = Point((0.5, 0.5))
+    >>> i = get_polygon_point_intersect(poly, pt)
+    >>> str(i)
+    '(0.5, 0.5)'
+    >>> pt2 = Point((2, 2))
+    >>> get_polygon_point_intersect(poly, pt2)
+    """
+    def pt_lies_on_part_boundary(pt, vertices):
+        return filter(
+            lambda i: get_segment_point_dist(LineSegment(
+                vertices[i], vertices[i + 1]), pt)[0] == 0,
+            xrange(-1, len(vertices) - 1)) != []
+
+    ret = None
+    if get_rectangle_point_intersect(poly.bounding_box, pt) is None:  # Weed out points that aren't even close
+        return None
+    elif filter(lambda verts: pt_lies_on_part_boundary(pt, verts), poly._vertices) != []:
+        ret = pt
+    elif filter(lambda verts: _point_in_vertices(pt, verts), poly._vertices) != []:
+        ret = pt
+    if poly._holes != [[]]:
+        if filter(lambda verts: pt_lies_on_part_boundary(pt, verts), poly.holes) != []:
+            # pt lies on boundary of hole.
+            pass
+        if filter(lambda verts: _point_in_vertices(pt, verts), poly.holes) != []:
+            # pt lines inside a hole.
+            ret = None
+        #raise NotImplementedError, 'Cannot compute containment for polygon with holes'
+    return ret
+
+
+def get_rectangle_point_intersect(rect, pt):
+    """
+    Returns the intersection of a rectangle and point.
+
+    get_rectangle_point_intersect(Rectangle, Point) -> Point
+
+    Parameters
+    ----------
+    rect : a rectangle to check intersection for
+    pt   : a point to check intersection for
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> rect = Rectangle(0, 0, 5, 5)
+    >>> pt = Point((1, 1))
+    >>> i = get_rectangle_point_intersect(rect, pt)
+    >>> str(i)
+    '(1.0, 1.0)'
+    >>> pt2 = Point((10, 10))
+    >>> get_rectangle_point_intersect(rect, pt2)
+    """
+    if rect.left <= pt[0] <= rect.right and rect.lower <= pt[1] <= rect.upper:
+        return pt
+    return None
+
+
+def get_ray_segment_intersect(ray, seg):
+    """
+    Returns the intersection of a ray and line segment.
+
+    get_ray_segment_intersect(Ray, Point) -> Point or LineSegment
+
+    Parameters
+    ----------
+
+    ray : a ray to check intersection for
+    seg : a line segment to check intersection for
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> ray = Ray(Point((0, 0)), Point((0, 1)))
+    >>> seg = LineSegment(Point((-1, 10)), Point((1, 10)))
+    >>> i = get_ray_segment_intersect(ray, seg)
+    >>> isinstance(i, Point)
+    True
+    >>> str(i)
+    '(0.0, 10.0)'
+    >>> seg2 = LineSegment(Point((10, 10)), Point((10, 11)))
+    >>> get_ray_segment_intersect(ray, seg2)
+    """
+    d = max(math.hypot(seg.p1[0] - ray.o[0], seg.p1[1] - ray.o[1]),
+            math.hypot(seg.p2[0] - ray.o[0], seg.p2[1] - ray.o[1])) + 1  # Upper bound on origin to segment dist (+1)
+    ratio = d / math.hypot(ray.o[0] - ray.p[0], ray.o[1] - ray.p[1])
+    ray_seg = LineSegment(
+        ray.o, Point((ray.o[0] + ratio * (ray.p[0] - ray.o[0]),
+                      ray.o[1] + ratio * (ray.p[1] - ray.o[1]))))
+    return get_segments_intersect(seg, ray_seg)
+
+
+def get_rectangle_rectangle_intersection(r0, r1, checkOverlap=True):
+    """
+    Returns the intersection between two rectangles.
+
+    Note: Algorithm assumes the rectangles overlap.
+          checkOverlap=False should be used with extreme caution.
+
+    get_rectangle_rectangle_intersection(r0, r1) -> Rectangle, Segment, Point or None
+
+    Parameters
+    ----------
+    r0   : a Rectangle
+    r1   : a Rectangle
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> r0 = Rectangle(0,4,6,9)
+    >>> r1 = Rectangle(4,0,9,7)
+    >>> ri = get_rectangle_rectangle_intersection(r0,r1)
+    >>> ri[:]
+    [4.0, 4.0, 6.0, 7.0]
+    >>> r0 = Rectangle(0,0,4,4)
+    >>> r1 = Rectangle(2,1,6,3)
+    >>> ri = get_rectangle_rectangle_intersection(r0,r1)
+    >>> ri[:]
+    [2.0, 1.0, 4.0, 3.0]
+    >>> r0 = Rectangle(0,0,4,4)
+    >>> r1 = Rectangle(2,1,3,2)
+    >>> ri = get_rectangle_rectangle_intersection(r0,r1)
+    >>> ri[:] == r1[:]
+    True
+    """
+    if checkOverlap:
+        if not bbcommon(r0, r1):
+            #raise ValueError, "Rectangles do not intersect"
+            return None
+    left = max(r0.left, r1.left)
+    lower = max(r0.lower, r1.lower)
+    right = min(r0.right, r1.right)
+    upper = min(r0.upper, r1.upper)
+
+    if upper == lower and left == right:
+        return Point((left, lower))
+    elif upper == lower:
+        return LineSegment(Point((left, lower)), Point((right, lower)))
+    elif left == right:
+        return LineSegment(Point((left, lower)), Point((left, upper)))
+
+    return Rectangle(left, lower, right, upper)
+
+
+def get_polygon_point_dist(poly, pt):
+    """
+    Returns the distance between a polygon and point.
+
+    get_polygon_point_dist(Polygon, Point) -> number
+
+    Parameters
+    ----------
+    poly : a polygon to compute distance from
+    pt   : a point to compute distance from
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> poly = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+    >>> pt = Point((2, 0.5))
+    >>> get_polygon_point_dist(poly, pt)
+    1.0
+    >>> pt2 = Point((0.5, 0.5))
+    >>> get_polygon_point_dist(poly, pt2)
+    0.0
+    """
+    if get_polygon_point_intersect(poly, pt) is not None:
+        return 0.0
+    part_prox = []
+    for vertices in poly._vertices:
+        part_prox.append(min([get_segment_point_dist(LineSegment(vertices[i], vertices[i + 1]), pt)[0]
+                              for i in xrange(-1, len(vertices) - 1)]))
+    return min(part_prox)
+
+
+def get_points_dist(pt1, pt2):
+    """
+    Returns the distance between a pair of points.
+
+    get_points_dist(Point, Point) -> number
+
+    Parameters
+    ----------
+    pt1 : a point
+    pt2 : the other point
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> get_points_dist(Point((4, 4)), Point((4, 8)))
+    4.0
+    >>> get_points_dist(Point((0, 0)), Point((0, 0)))
+    0.0
+    """
+    return math.hypot(pt1[0] - pt2[0], pt1[1] - pt2[1])
+
+
+def get_segment_point_dist(seg, pt):
+    """
+    Returns the distance between a line segment and point and distance along the segment of the closest
+    point on the segment to the point as a ratio of the length of the segment.
+
+    get_segment_point_dist(LineSegment, Point) -> (number, number)
+
+    Parameters
+    ----------
+    seg  : a line segment to compute distance from
+    pt   : a point to compute distance from
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> seg = LineSegment(Point((0, 0)), Point((10, 0)))
+    >>> pt = Point((5, 5))
+    >>> get_segment_point_dist(seg, pt)
+    (5.0, 0.5)
+    >>> pt2 = Point((0, 0))
+    >>> get_segment_point_dist(seg, pt2)
+    (0.0, 0.0)
+    """
+    src_p = seg.p1
+    dest_p = seg.p2
+
+    # Shift line to go through origin
+    points_0 = pt[0] - src_p[0]
+    points_1 = pt[1] - src_p[1]
+    points_2 = 0
+    points_3 = 0
+    points_4 = dest_p[0] - src_p[0]
+    points_5 = dest_p[1] - src_p[1]
+
+    segment_length = get_points_dist(src_p, dest_p)
+
+    # Meh, robustness...maybe should incorporate this into a more general
+    # approach later
+    if segment_length == 0:
+        return (get_points_dist(pt, src_p), 0)
+
+    u_x = points_4 / segment_length
+    u_y = points_5 / segment_length
+
+    inter_x = u_x * u_x * points_0 + u_x * u_y * points_1
+    inter_y = u_x * u_y * points_0 + u_y * u_y * points_1
+
+    src_proj_dist = get_points_dist((0, 0), (inter_x, inter_y))
+    dest_proj_dist = get_points_dist((inter_x, inter_y), (points_4, points_5))
+
+    if src_proj_dist > segment_length or dest_proj_dist > segment_length:
+        src_pt_dist = get_points_dist(
+            (points_2, points_3), (points_0, points_1))
+        dest_pt_dist = get_points_dist(
+            (points_4, points_5), (points_0, points_1))
+        if src_pt_dist < dest_pt_dist:
+            return (src_pt_dist, 0)
+        else:
+            return (dest_pt_dist, 1)
+    else:
+        return (get_points_dist((inter_x, inter_y), (points_0, points_1)), src_proj_dist / segment_length)
+
+
+def get_point_at_angle_and_dist(ray, angle, dist):
+    """
+    Returns the point at a distance and angle relative to the origin of a ray.
+
+    get_point_at_angle_and_dist(Ray, number, number) -> Point
+
+    Parameters
+    ----------
+    ray   : the ray which the angle and distance are relative to
+    angle : the angle relative to the ray at which the point is located
+    dist  : the distance from the ray origin at which the point is located
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> ray = Ray(Point((0, 0)), Point((1, 0)))
+    >>> pt = get_point_at_angle_and_dist(ray, math.pi, 1.0)
+    >>> isinstance(pt, Point)
+    True
+    >>> round(pt[0], 8)
+    -1.0
+    >>> round(pt[1], 8)
+    0.0
+    """
+    v = (ray.p[0] - ray.o[0], ray.p[1] - ray.o[1])
+    cur_angle = math.atan2(v[1], v[0])
+    dest_angle = cur_angle + angle
+    return Point((ray.o[0] + dist * math.cos(dest_angle), ray.o[1] + dist * math.sin(dest_angle)))
+
+
+def convex_hull(points):
+    """
+    Returns the convex hull of a set of points.
+
+    convex_hull(Point list) -> Polygon
+
+    Parameters
+    ----------
+    points : a list of points to compute the convex hull for
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> points = [Point((0, 0)), Point((4, 4)), Point((4, 0)), Point((3, 1))]
+    >>> convex_hull(points)
+    [(0.0, 0.0), (4.0, 0.0), (4.0, 4.0)]
+    """
+    points = copy.copy(points)
+    lowest = min(points, key=lambda p: (p[1], p[0]))
+
+    points.remove(lowest)
+    points.sort(key=lambda p: math.atan2(p[1] - lowest[1], p[0] - lowest[0]))
+
+    stack = [lowest]
+
+    def right_turn(p1, p2, p3):
+        # Returns if p1 -> p2 -> p3 forms a 'right turn'
+        vec1 = (p2[0] - p1[0], p2[1] - p1[1])
+        vec2 = (p3[0] - p2[0], p3[1] - p2[1])
+        return vec2[0] * vec1[1] - vec2[1] * vec1[0] >= 0
+
+    for p in points:
+        stack.append(p)
+        while len(stack) > 3 and right_turn(stack[-3], stack[-2], stack[-1]):
+            stack.pop(-2)
+
+    return stack
+
+
+def is_clockwise(vertices):
+    """
+    Returns whether a list of points describing a polygon are clockwise or counterclockwise.
+
+    is_clockwise(Point list) -> bool
+
+    Parameters
+    ----------
+    vertices : a list of points that form a single ring
+
+    Examples
+    --------
+    >>> is_clockwise([Point((0, 0)), Point((10, 0)), Point((0, 10))])
+    False
+    >>> is_clockwise([Point((0, 0)), Point((0, 10)), Point((10, 0))])
+    True
+    >>> v = [(-106.57798, 35.174143999999998), (-106.583412, 35.174141999999996), (-106.58417999999999, 35.174143000000001), (-106.58377999999999, 35.175542999999998), (-106.58287999999999, 35.180543), (-106.58263099999999, 35.181455), (-106.58257999999999, 35.181643000000001), (-106.58198299999999, 35.184615000000001), (-106.58148, 35.187242999999995), (-106.58127999999999, 35.188243), (-106.58138, 35.188243), (-106.58108, 35.189442999999997), (-106.58104, 35.189644000000001), (-106.580 [...]
+    >>> is_clockwise(v)
+    True
+    """
+    if len(vertices) < 3:
+        return True
+    area = 0.0
+    ax, ay = vertices[0]
+    for bx, by in vertices[1:]:
+        area += ax * by - ay * bx
+        ax, ay = bx, by
+    bx, by = vertices[0]
+    area += ax * by - ay * bx
+    return area < 0.0
+
+
+def ccw(vertices):
+    """
+    Returns whether a list of points is counterclockwise
+
+    >>> ccw([Point((0, 0)), Point((10, 0)), Point((0, 10))])
+    True
+    >>> ccw([Point((0, 0)), Point((0, 10)), Point((10, 0))])
+    False
+    """
+
+    if is_clockwise(vertices):
+        return False
+    else:
+        return True
+
+
+def seg_intersect(a, b, c, d):
+    """
+    Tests if two segments (a,b) (c,d) intersect
+
+    >>> a = Point((0,1))
+    >>> b = Point((0,10))
+    >>> c = Point((-2,5))
+    >>> d = Point((2,5))
+    >>> e = Point((-3,5))
+    >>> seg_intersect(a, b, c, d)
+    True
+    >>> seg_intersect(a, b, c, e)
+    False
+    """
+    if ccw([a, c, d]) == ccw([b, c, d]):
+        return False
+    elif ccw([a, b, c]) == ccw([a, b, d]):
+        return False
+    else:
+        return True
+
+
+def _point_in_vertices(pt, vertices):
+    """
+    HELPER METHOD. DO NOT CALL.
+
+    Returns whether a point is contained in a polygon specified by a sequence of vertices.
+
+    _point_in_vertices(Point, Point list) -> bool
+
+    Parameters
+    ----------
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> _point_in_vertices(Point((1, 1)), [Point((0, 0)), Point((10, 0)), Point((0, 10))])
+    True
+    """
+
+    def neg_ray_intersect(p1, p2, p3):
+        # Returns whether a ray in the negative-x direction from p3 intersects the segment between
+        if not min(p1[1], p2[1]) <= p3[1] <= max(p1[1], p2[1]):
+            return False
+        if p1[1] > p2[1]:
+            vec1 = (p2[0] - p1[0], p2[1] - p1[1])
+        else:
+            vec1 = (p1[0] - p2[0], p1[1] - p2[1])
+        vec2 = (p3[0] - p1[0], p3[1] - p1[1])
+        return vec1[0] * vec2[1] - vec2[0] * vec1[1] >= 0
+
+    vert_y_set = set([v[1] for v in vertices])
+    while pt[1] in vert_y_set:
+        pt = (pt[0], pt[1] + -1e-14 + random.random(
+        ) * 2e-14)  # Perturb the location very slightly
+    inters = 0
+    for i in xrange(-1, len(vertices) - 1):
+        v1 = vertices[i]
+        v2 = vertices[i + 1]
+        if neg_ray_intersect(v1, v2, pt):
+            inters += 1
+
+    return inters % 2 == 1
+
+
+def point_touches_rectangle(point, rect):
+    """
+    Returns True if the point is in the rectangle or touches it's boundary.
+
+    point_touches_rectangle(point, rect) -> bool
+
+    Parameters
+    ----------
+    point : Point or Tuple
+    rect  : Rectangle
+
+    Examples
+    --------
+    >>> rect = Rectangle(0,0,10,10)
+    >>> a = Point((5,5))
+    >>> b = Point((10,5))
+    >>> c = Point((11,11))
+    >>> point_touches_rectangle(a,rect)
+    1
+    >>> point_touches_rectangle(b,rect)
+    1
+    >>> point_touches_rectangle(c,rect)
+    0
+    """
+    chflag = 0
+    if point[0] >= rect.left and point[0] <= rect.right:
+        if point[1] >= rect.lower and point[1] <= rect.upper:
+            chflag = 1
+    return chflag
+
+
+def get_shared_segments(poly1, poly2, bool_ret=False):
+    """
+    Returns the line segments in common to both polygons.
+
+    get_shared_segments(poly1, poly2) -> list
+
+    Parameters
+    ----------
+    poly1   : a Polygon
+    poly2   : a Polygon
+
+    Attributes
+    ----------
+
+    Examples
+    --------
+    >>> x = [0, 0, 1, 1]
+    >>> y = [0, 1, 1, 0]
+    >>> poly1 = Polygon( map(Point,zip(x,y)) )
+    >>> x = [a+1 for a in x]
+    >>> poly2 = Polygon( map(Point,zip(x,y)) )
+    >>> get_shared_segments(poly1, poly2, bool_ret=True)
+    True
+
+    """
+    #get_rectangle_rectangle_intersection inlined for speed.
+    r0 = poly1.bounding_box
+    r1 = poly2.bounding_box
+    wLeft = max(r0.left, r1.left)
+    wLower = max(r0.lower, r1.lower)
+    wRight = min(r0.right, r1.right)
+    wUpper = min(r0.upper, r1.upper)
+
+    segmentsA = set()
+    common = list()
+    partsA = poly1.parts
+    for part in poly1.parts + [p for p in poly1.holes if p]:
+        if part[0] != part[-1]:  # not closed
+            part = part[:] + part[0:1]
+        a = part[0]
+        for b in islice(part, 1, None):
+            # inlining point_touches_rectangle for speed
+            x, y = a
+            # check if point a is in the bounding box intersection
+            if x >= wLeft and x <= wRight and y >= wLower and y <= wUpper:
+                x, y = b
+                # check if point b is in the bounding box intersection
+                if x >= wLeft and x <= wRight and y >= wLower and y <= wUpper:
+                    if a > b:
+                        segmentsA.add((b, a))
+                    else:
+                        segmentsA.add((a, b))
+            a = b
+    for part in poly2.parts + [p for p in poly2.holes if p]:
+        if part[0] != part[-1]:  # not closed
+            part = part[:] + part[0:1]
+        a = part[0]
+        for b in islice(part, 1, None):
+            # inlining point_touches_rectangle for speed
+            x, y = a
+            if x >= wLeft and x <= wRight and y >= wLower and y <= wUpper:
+                x, y = b
+                if x >= wLeft and x <= wRight and y >= wLower and y <= wUpper:
+                    if a > b:
+                        seg = (b, a)
+                    else:
+                        seg = (a, b)
+                    if seg in segmentsA:
+                        common.append(LineSegment(*seg))
+                        if bool_ret:
+                            return True
+            a = b
+    if bool_ret:
+        if len(common) > 0:
+            return True
+        else:
+            return False
+    return common
+
+
+def distance_matrix(X, p=2.0, threshold=5e7):
+    """
+    Distance Matrices
+
+    XXX Needs optimization/integration with other weights in pysal
+
+    Parameters
+    ----------
+    X          : An, n by k numpy.ndarray
+                    Where n is number of observations
+                    k is number of dimmensions (2 for x,y)
+    p          : float
+                    Minkowski p-norm distance metric parameter:
+                    1<=p<=infinity
+                    2: Euclidean distance
+                    1: Manhattan distance
+    threshold  : positive integer
+                    If (n**2)*32 > threshold use scipy.spatial.distance_matrix instead
+                    of working in ram, this is roughly the ammount of ram (in bytes) that will be used.
+
+    Examples
+    --------
+    >>> x,y=[r.flatten() for r in np.indices((3,3))]
+    >>> data = np.array([x,y]).T
+    >>> d=distance_matrix(data)
+    >>> np.array(d)
+    array([[ 0.        ,  1.        ,  2.        ,  1.        ,  1.41421356,
+             2.23606798,  2.        ,  2.23606798,  2.82842712],
+           [ 1.        ,  0.        ,  1.        ,  1.41421356,  1.        ,
+             1.41421356,  2.23606798,  2.        ,  2.23606798],
+           [ 2.        ,  1.        ,  0.        ,  2.23606798,  1.41421356,
+             1.        ,  2.82842712,  2.23606798,  2.        ],
+           [ 1.        ,  1.41421356,  2.23606798,  0.        ,  1.        ,
+             2.        ,  1.        ,  1.41421356,  2.23606798],
+           [ 1.41421356,  1.        ,  1.41421356,  1.        ,  0.        ,
+             1.        ,  1.41421356,  1.        ,  1.41421356],
+           [ 2.23606798,  1.41421356,  1.        ,  2.        ,  1.        ,
+             0.        ,  2.23606798,  1.41421356,  1.        ],
+           [ 2.        ,  2.23606798,  2.82842712,  1.        ,  1.41421356,
+             2.23606798,  0.        ,  1.        ,  2.        ],
+           [ 2.23606798,  2.        ,  2.23606798,  1.41421356,  1.        ,
+             1.41421356,  1.        ,  0.        ,  1.        ],
+           [ 2.82842712,  2.23606798,  2.        ,  2.23606798,  1.41421356,
+             1.        ,  2.        ,  1.        ,  0.        ]])
+    >>>
+    """
+    if X.ndim == 1:
+        X.shape = (X.shape[0], 1)
+    if X.ndim > 2:
+        raise TypeError("wtf?")
+    n, k = X.shape
+
+    if (n ** 2) * 32 > threshold:
+        return scipy.spatial.distance_matrix(X, X, p)
+    else:
+        M = np.ones((n, n))
+        D = np.zeros((n, n))
+        for col in range(k):
+            x = X[:, col]
+            xM = x * M
+            dx = xM - xM.T
+            if p % 2 != 0:
+                dx = np.abs(dx)
+            dx2 = dx ** p
+            D += dx2
+        D = D ** (1.0 / p)
+        return D
+
+
diff --git a/pysal/cg/tests/__init__.py b/pysal/cg/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/cg/tests/test_geoJSON.py b/pysal/cg/tests/test_geoJSON.py
new file mode 100644
index 0000000..cea05ad
--- /dev/null
+++ b/pysal/cg/tests/test_geoJSON.py
@@ -0,0 +1,26 @@
+import pysal
+import doctest
+import unittest
+
+
+class test_MultiPloygon(unittest.TestCase):
+
+    def test___init__1(self):
+        """
+        Tests conversion of polygons with multiple shells to 
+        geoJSON multipolygons. and back.
+        """
+        shp = pysal.open(pysal.examples.get_path("NAT.SHP"),'r')
+        multipolygons = [p for p in shp if len(p.parts) > 1]
+        geoJSON = [p.__geo_interface__ for p in multipolygons]
+        for poly in multipolygons:
+            json = poly.__geo_interface__
+            shape = pysal.cg.asShape(json)
+            self.assertEquals(json['type'],'MultiPolygon')
+            self.assertEquals(str(shape.holes), str(poly.holes))
+            self.assertEquals(str(shape.parts), str(poly.parts))
+
+if __name__ == '__main__':
+    unittest.main()
+    #runner = unittest.TextTestRunner()
+    #runner.run(suite)
diff --git a/pysal/cg/tests/test_locators.py b/pysal/cg/tests/test_locators.py
new file mode 100644
index 0000000..b36b5a6
--- /dev/null
+++ b/pysal/cg/tests/test_locators.py
@@ -0,0 +1,65 @@
+
+"""locators Unittest."""
+from pysal.cg import *
+import unittest
+
+
+class PolygonLocator_Tester(unittest.TestCase):
+    """setup class for unit tests."""
+    def setUp(self):
+        p1 = Polygon([Point((0, 1)), Point((4, 5)), Point((5, 1))])
+        p2 = Polygon([Point((3, 9)), Point((6, 7)), Point((1, 1))])
+        p3 = Polygon([Point((7, 1)), Point((8, 7)), Point((9, 1))])
+        self.polygons = [p1, p2, p3]
+        self.pl = PolygonLocator(self.polygons)
+
+        pt = Point
+        pg = Polygon
+        polys = []
+        for i in range(5):
+            l = i * 10
+            r = l + 10
+            b = 10
+            t = 20
+            sw = pt((l, b))
+            se = pt((r, b))
+            ne = pt((r, t))
+            nw = pt((l, t))
+            polys.append(pg([sw, se, ne, nw]))
+        self.pl2 = PolygonLocator(polys)
+
+    def test_PolygonLocator(self):
+        qr = Rectangle(3, 7, 5, 8)
+        res = self.pl.inside(qr)
+        self.assertEqual(len(res), 0)
+
+    def test_inside(self):
+        qr = Rectangle(3, 3, 5, 5)
+        res = self.pl.inside(qr)
+        self.assertEqual(len(res), 0)
+        qr = Rectangle(0, 0, 5, 5)
+        res = self.pl.inside(qr)
+        self.assertEqual(len(res), 1)
+
+    def test_overlapping(self):
+
+        qr = Rectangle(3, 3, 5, 5)
+        res = self.pl.overlapping(qr)
+        self.assertEqual(len(res), 2)
+        qr = Rectangle(8, 3, 10, 10)
+        res = self.pl.overlapping(qr)
+        self.assertEqual(len(res), 1)
+
+        qr = Rectangle(2, 12, 35, 15)
+        res = self.pl2.overlapping(qr)
+        self.assertEqual(len(res), 4)
+
+suite = unittest.TestSuite()
+test_classes = [PolygonLocator_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/cg/tests/test_rtree.py b/pysal/cg/tests/test_rtree.py
new file mode 100644
index 0000000..bfa5ca1
--- /dev/null
+++ b/pysal/cg/tests/test_rtree.py
@@ -0,0 +1,68 @@
+
+"""pyrtree Unittest."""
+from pysal.cg import RTree, Rect
+import unittest
+
+
+class Pyrtree_Tester(unittest.TestCase):
+    """setup class for unit tests."""
+    def setUp(self):
+        k = 10
+        w = 20
+        objects = {}
+        id = 0
+        for i in range(k):
+            mn_y = i * w
+            mx_y = mn_y + w
+            for j in range(k):
+                mn_x = j * w
+                mx_x = mn_x + w
+                objects[id] = Rect(mn_x, mn_y, mx_x, mx_y)
+                id += 1
+        self.objects = objects
+
+    def test_rtree(self):
+        t = RTree()
+        for object in self.objects:
+            t.insert(object, self.objects[object])
+        self.assertEqual(len(self.objects), 100)
+
+        qr = Rect(5, 5, 25, 25)
+
+        # find objects with mbrs intersecting with qr
+        res = [r.leaf_obj() for r in t.query_rect(qr) if r.is_leaf()]
+        self.assertEqual(len(res), 4)
+        res.sort()
+        self.assertEqual(res, [0, 1, 10, 11])
+
+        # vertices are shared by all coincident rectangles
+        res = [r.leaf_obj(
+        ) for r in t.query_point((20.0, 20.0)) if r.is_leaf()]
+        self.assertEqual(len(res), 4)
+
+        res = [r.leaf_obj() for r in t.query_point((21, 20)) if r.is_leaf()]
+        self.assertEqual(len(res), 2)
+
+        # single internal point
+        res = [r.leaf_obj() for r in t.query_point((21, 21)) if r.is_leaf()]
+        self.assertEqual(len(res), 1)
+
+        # single external point
+        res = [r.leaf_obj() for r in t.query_point((-12, 21)) if r.is_leaf()]
+        self.assertEqual(len(res), 0)
+
+        qr = Rect(5, 6, 65, 7)
+
+        res = [r.leaf_obj() for r in t.query_rect((qr)) if r.is_leaf()]
+        self.assertEqual(len(res), 4)
+
+
+suite = unittest.TestSuite()
+test_classes = [Pyrtree_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/cg/tests/test_segmentLocator.py b/pysal/cg/tests/test_segmentLocator.py
new file mode 100644
index 0000000..612419d
--- /dev/null
+++ b/pysal/cg/tests/test_segmentLocator.py
@@ -0,0 +1,48 @@
+"""Segment Locator Unittest."""
+from pysal.cg import *
+from pysal.cg.segmentLocator import *
+import unittest
+
+
+class SegmentGrid_Tester(unittest.TestCase):
+    """setup class for unit tests."""
+    def setUp(self):
+        # 10x10 grid with four line segments, one for each edge of the grid.
+        self.grid = SegmentGrid(Rectangle(0, 0, 10, 10), 1)
+        self.grid.add(LineSegment(Point((0.0, 0.0)), Point((0.0, 10.0))), 0)
+        self.grid.add(LineSegment(Point((0.0, 10.0)), Point((10.0, 10.0))), 1)
+        self.grid.add(LineSegment(Point((10.0, 10.0)), Point((10.0, 0.0))), 2)
+        self.grid.add(LineSegment(Point((10.0, 0.0)), Point((0.0, 0.0))), 3)
+
+    def test_nearest_1(self):
+        self.assertEquals([0, 1, 2, 3], self.grid.nearest(Point((
+            5.0, 5.0))))  # Center
+        self.assertEquals(
+            [0], self.grid.nearest(Point((0.0, 5.0))))  # Left Edge
+        self.assertEquals(
+            [1], self.grid.nearest(Point((5.0, 10.0))))  # Top Edge
+        self.assertEquals(
+            [2], self.grid.nearest(Point((10.0, 5.0))))  # Right Edge
+        self.assertEquals(
+            [3], self.grid.nearest(Point((5.0, 0.0))))  # Bottom Edge
+
+    def test_nearest_2(self):
+        self.assertEquals([0, 1, 3], self.grid.nearest(Point((-
+                                                              100000.0, 5.0))))  # Left Edge
+        self.assertEquals([1, 2, 3], self.grid.nearest(Point((
+            100000.0, 5.0))))  # Right Edge
+        self.assertEquals([0, 2, 3], self.grid.nearest(Point((5.0,
+                                                              -100000.0))))  # Bottom Edge
+        self.assertEquals([0, 1, 2], self.grid.nearest(Point((5.0,
+                                                              100000.0))))  # Top Edge
+
+
+suite = unittest.TestSuite()
+test_classes = [SegmentGrid_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/cg/tests/test_shapes.py b/pysal/cg/tests/test_shapes.py
new file mode 100644
index 0000000..7dc5733
--- /dev/null
+++ b/pysal/cg/tests/test_shapes.py
@@ -0,0 +1,758 @@
+from pysal.cg import Point, LineSegment, Line, Ray, Chain, Rectangle, Polygon
+import doctest
+import unittest
+
+
+class test_Point(unittest.TestCase):
+
+    def test___init__1(self):
+        """
+        Tests whether points are created without issue.
+
+        Test tag: <tc>#tests#Point.__init__</tc>
+        """
+        for l in [(-5.0, 10.0), (0.0, -6.0), (float(1e300), float(-1e300))]:
+            p = Point(l)
+
+    def test___str__1(self):
+        """
+        Tests whether the string produced is valid for corner cases.
+
+        Test tag: <tc>#tests#Point__str__</tc>
+        """
+        for l in [(-5, 10), (0, -6.0), (float(1e300), -1e300)]:
+            p = Point(l)
+            self.assertEquals(str(p), str((float(l[0]), float(
+                l[1]))))  # Recast to floats like point does
+
+
+class test_LineSegment(unittest.TestCase):
+
+    def test_is_ccw1(self):
+        """
+        Test corner cases for horizontal segment starting at origin.
+
+        Test tag: <tc>#tests#LineSegment.is_ccw</tc>
+        """
+        ls = LineSegment(Point((0, 0)), Point((5, 0)))
+        self.assertFalse(ls.is_ccw(
+            Point((10, 0))))  # At positive boundary beyond segment
+        self.assertFalse(ls.is_ccw(Point((3, 0))))  # On segment
+        self.assertFalse(ls.is_ccw(
+            Point((-10, 0))))  # At negative boundary beyond segment
+        self.assertFalse(ls.is_ccw(Point((0, 0))))  # Endpoint of segment
+        self.assertFalse(ls.is_ccw(Point((5, 0))))  # Endpoint of segment
+
+    def test_is_ccw2(self):
+        """
+        Test corner cases for vertical segment ending at origin.
+
+        Test tag: <tc>#tests#LineSegment.is_ccw</tc>
+        """
+        ls = LineSegment(Point((0, -5)), Point((0, 0)))
+        self.assertFalse(ls.is_ccw(
+            Point((0, 10))))  # At positive boundary beyond segment
+        self.assertFalse(ls.is_ccw(Point((0, -3))))  # On segment
+        self.assertFalse(ls.is_ccw(
+            Point((0, -10))))  # At negative boundary beyond segment
+        self.assertFalse(ls.is_ccw(Point((0, -5))))  # Endpoint of segment
+        self.assertFalse(ls.is_ccw(Point((0, 0))))  # Endpoint of segment
+
+    def test_is_ccw3(self):
+        """
+        Test corner cases for non-axis-aligned segment not through origin.
+
+        Test tag: <tc>#tests#LineSegment.is_ccw</tc>
+        """
+        ls = LineSegment(Point((0, 1)), Point((5, 6)))
+        self.assertFalse(ls.is_ccw(
+            Point((10, 11))))  # At positive boundary beyond segment
+        self.assertFalse(ls.is_ccw(Point((3, 4))))  # On segment
+        self.assertFalse(ls.is_ccw(
+            Point((-10, -9))))  # At negative boundary beyond segment
+        self.assertFalse(ls.is_ccw(Point((0, 1))))  # Endpoint of segment
+        self.assertFalse(ls.is_ccw(Point((5, 6))))  # Endpoint of segment
+
+    def test_is_cw1(self):
+        """
+        Test corner cases for horizontal segment starting at origin.
+
+        Test tag: <tc>#tests#LineSegment.is_cw</tc>
+        """
+        ls = LineSegment(Point((0, 0)), Point((5, 0)))
+        self.assertFalse(ls.is_cw(
+            Point((10, 0))))  # At positive boundary beyond segment
+        self.assertFalse(ls.is_cw(Point((3, 0))))  # On segment
+        self.assertFalse(ls.is_cw(
+            Point((-10, 0))))  # At negative boundary beyond segment
+        self.assertFalse(ls.is_cw(Point((0, 0))))  # Endpoint of segment
+        self.assertFalse(ls.is_cw(Point((5, 0))))  # Endpoint of segment
+
+    def test_is_cw2(self):
+        """
+        Test corner cases for vertical segment ending at origin.
+
+        Test tag: <tc>#tests#LineSegment.is_cw</tc>
+        """
+        ls = LineSegment(Point((0, -5)), Point((0, 0)))
+        self.assertFalse(ls.is_cw(
+            Point((0, 10))))  # At positive boundary beyond segment
+        self.assertFalse(ls.is_cw(Point((0, -3))))  # On segment
+        self.assertFalse(ls.is_cw(
+            Point((0, -10))))  # At negative boundary beyond segment
+        self.assertFalse(ls.is_cw(Point((0, -5))))  # Endpoint of segment
+        self.assertFalse(ls.is_cw(Point((0, 0))))  # Endpoint of segment
+
+    def test_is_cw3(self):
+        """
+        Test corner cases for non-axis-aligned segment not through origin.
+
+        Test tag: <tc>#tests#LineSegment.is_cw</tc>
+        """
+        ls = LineSegment(Point((0, 1)), Point((5, 6)))
+        self.assertFalse(ls.is_cw(
+            Point((10, 11))))  # At positive boundary beyond segment
+        self.assertFalse(ls.is_cw(Point((3, 4))))  # On segment
+        self.assertFalse(ls.is_cw(
+            Point((-10, -9))))  # At negative boundary beyond segment
+        self.assertFalse(ls.is_cw(Point((0, 1))))  # Endpoint of segment
+        self.assertFalse(ls.is_cw(Point((5, 6))))  # Endpoint of segment
+
+    def test_get_swap1(self):
+        """
+        Tests corner cases.
+
+        Test tag: <tc>#tests#LineSegment.get_swap</tc>
+        """
+        ls = LineSegment(Point((0, 0)), Point((10, 0)))
+        swap = ls.get_swap()
+        self.assertEquals(ls.p1, swap.p2)
+        self.assertEquals(ls.p2, swap.p1)
+
+        ls = LineSegment(Point((-5, 0)), Point((5, 0)))
+        swap = ls.get_swap()
+        self.assertEquals(ls.p1, swap.p2)
+        self.assertEquals(ls.p2, swap.p1)
+
+        ls = LineSegment(Point((0, 0)), Point((0, 0)))
+        swap = ls.get_swap()
+        self.assertEquals(ls.p1, swap.p2)
+        self.assertEquals(ls.p2, swap.p1)
+
+        ls = LineSegment(Point((5, 5)), Point((5, 5)))
+        swap = ls.get_swap()
+        self.assertEquals(ls.p1, swap.p2)
+        self.assertEquals(ls.p2, swap.p1)
+
+    def test_bounding_box(self):
+        """
+        Tests corner cases.
+
+        Test tag: <tc>#tests#LineSegment.bounding_box</tc>
+        """
+        ls = LineSegment(Point((0, 0)), Point((0, 10)))
+        self.assertEquals(ls.bounding_box.left, 0)
+        self.assertEquals(ls.bounding_box.lower, 0)
+        self.assertEquals(ls.bounding_box.right, 0)
+        self.assertEquals(ls.bounding_box.upper, 10)
+
+        ls = LineSegment(Point((0, 0)), Point((-3, -4)))
+        self.assertEquals(ls.bounding_box.left, -3)
+        self.assertEquals(ls.bounding_box.lower, -4)
+        self.assertEquals(ls.bounding_box.right, 0)
+        self.assertEquals(ls.bounding_box.upper, 0)
+
+        ls = LineSegment(Point((-5, 0)), Point((3, 0)))
+        self.assertEquals(ls.bounding_box.left, -5)
+        self.assertEquals(ls.bounding_box.lower, 0)
+        self.assertEquals(ls.bounding_box.right, 3)
+        self.assertEquals(ls.bounding_box.upper, 0)
+
+    def test_len1(self):
+        """
+        Tests corner cases.
+
+        Test tag: <tc>#tests#LineSegment.len</tc>
+        """
+        ls = LineSegment(Point((0, 0)), Point((0, 0)))
+        self.assertEquals(ls.len, 0)
+
+        ls = LineSegment(Point((0, 0)), Point((-3, 0)))
+        self.assertEquals(ls.len, 3)
+
+    def test_line1(self):
+        """
+        Tests corner cases.
+
+        Test tag: <tc>#tests#LineSegment.line</tc>
+        """
+        import math
+        ls = LineSegment(Point((0, 0)), Point((1, 0)))
+        self.assertEquals(ls.line.m, 0)
+        self.assertEquals(ls.line.b, 0)
+
+        ls = LineSegment(Point((0, 0)), Point((0, 1)))
+        self.assertEquals(ls.line.m, float('inf'))
+        self.assertTrue(math.isnan(ls.line.b))
+
+        ls = LineSegment(Point((0, 0)), Point((0, -1)))
+        self.assertEquals(ls.line.m, float('inf'))
+        self.assertTrue(math.isnan(ls.line.b))
+
+        ls = LineSegment(Point((0, 0)), Point((0, 0)))
+        self.assertEquals(ls.line, None)
+
+        ls = LineSegment(Point((5,0)), Point((10,0)))
+        ls1 = LineSegment(Point((5,0)), Point((10,1)))
+        self.assertTrue(ls.intersect(ls1))
+        ls2 = LineSegment(Point((5,1)), Point((10,1)))
+        self.assertFalse(ls.intersect(ls2))
+        ls2 = LineSegment(Point((7,-1)), Point((7,2)))
+        self.assertTrue(ls.intersect(ls2))
+        
+
+
+
+
+class test_Line(unittest.TestCase):
+
+    def test___init__1(self):
+        """
+        Tests a variety of generic cases.
+
+        Test tag: <tc>#tests#Line.__init__</tc>
+        """
+        for m, b in [(4, 0.0), (-140, 5), (0, 0)]:
+            l = Line(m, b)
+
+    def test_y1(self):
+        """
+        Tests a variety of generic and special cases (+-infinity).
+
+        Test tag: <tc>#tests#Line.y</tc>
+        """
+        l = Line(0, 0)
+        self.assertEquals(l.y(0), 0)
+        self.assertEquals(l.y(-1e600), 0)
+        self.assertEquals(l.y(1e600), 0)
+
+        l = Line(1, 1)
+        self.assertEquals(l.y(2), 3)
+        self.assertEquals(l.y(-1e600), -1e600)
+        self.assertEquals(l.y(1e600), 1e600)
+
+        l = Line(-1, 1)
+        self.assertEquals(l.y(2), -1)
+        self.assertEquals(l.y(-1e600), 1e600)
+        self.assertEquals(l.y(1e600), -1e600)
+
+    def test_x1(self):
+        """
+        Tests a variety of generic and special cases (+-infinity).
+
+        Test tag: <tc>#tests#Line.x</tc>
+        """
+        l = Line(0, 0)
+        #self.assertEquals(l.x(0), 0)
+        with self.assertRaises(ArithmeticError):
+            l.x(0)
+        with self.assertRaises(ArithmeticError):
+            l.x(-1e600)
+        with self.assertRaises(ArithmeticError):
+            l.x(1e600)
+
+        l = Line(1, 1)
+        self.assertEquals(l.x(3), 2)
+        self.assertEquals(l.x(-1e600), -1e600)
+        self.assertEquals(l.x(1e600), 1e600)
+
+        l = Line(-1, 1)
+        self.assertEquals(l.x(2), -1)
+        self.assertEquals(l.x(-1e600), 1e600)
+        self.assertEquals(l.x(1e600), -1e600)
+
+
+class test_Ray(unittest.TestCase):
+
+    def test___init__1(self):
+        """
+        Tests generic cases.
+
+        <tc>#tests#Ray.__init__</tc>
+        """
+        r = Ray(Point((0, 0)), Point((1, 1)))
+        r = Ray(Point((8, -3)), Point((-5, 9)))
+
+
+class test_Chain(unittest.TestCase):
+
+    def test___init__1(self):
+        """
+        Generic testing that no exception is thrown.
+
+        Test tag: <tc>#tests#Chain.__init__</tc>
+        """
+        c = Chain([Point((0, 0))])
+        c = Chain([[Point((0, 0)), Point((1, 1))], [Point((2, 5))]])
+
+    def test_vertices1(self):
+        """
+        Testing for repeated vertices and multiple parts.
+
+        Test tag: <tc>#tests#Chain.vertices</tc>
+        """
+        vertices = [Point((0, 0)), Point((1, 1)), Point((2, 5)),
+                    Point((0, 0)), Point((1, 1)), Point((2, 5))]
+        self.assertEquals(Chain(vertices).vertices, vertices)
+
+        vertices = [[Point((0, 0)), Point((1, 1)), Point((2, 5))],
+                    [Point((0, 0)), Point((1, 1)), Point((2, 5))]]
+        self.assertEquals(Chain(vertices).vertices, vertices[0] + vertices[1])
+
+    def test_parts1(self):
+        """
+        Generic testing of parts functionality.
+
+        Test tag: <tc>#tests#Chain.parts</tc>
+        """
+        vertices = [Point((0, 0)), Point((1, 1)), Point((2, 5)),
+                    Point((0, 0)), Point((1, 1)), Point((2, 5))]
+        self.assertEquals(Chain(vertices).parts, [vertices])
+
+        vertices = [[Point((0, 0)), Point((1, 1)), Point((2, 5))],
+                    [Point((0, 0)), Point((1, 1)), Point((2, 5))]]
+        self.assertEquals(Chain(vertices).parts, vertices)
+
+    def test_bounding_box1(self):
+        """
+        Test correctness with multiple parts.
+
+        Test tag: <tc>#tests#Chain.bounding_box</tc>
+        """
+        vertices = [[Point((0, 0)), Point((1, 1)), Point((2, 6))],
+                    [Point((-5, -5)), Point((0, 0)), Point((2, 5))]]
+        bb = Chain(vertices).bounding_box
+        self.assertEquals(bb.left, -5)
+        self.assertEquals(bb.lower, -5)
+        self.assertEquals(bb.right, 2)
+        self.assertEquals(bb.upper, 6)
+
+    def test_len1(self):
+        """
+        Test correctness with multiple parts and zero-length point-to-point distances.
+
+        Test tag: <tc>#tests#Chain.len</tc>
+        """
+        vertices = [[Point((0, 0)), Point((1, 0)), Point((1, 5))],
+                    [Point((-5, -5)), Point((-5, 0)), Point((0, 0)), Point((0, 0))]]
+        self.assertEquals(Chain(vertices).len, 6 + 10)
+
+
+class test_Polygon(unittest.TestCase):
+
+    def test___init__1(self):
+        """
+        Test various input configurations (list vs. lists of lists, holes)
+
+        <tc>#tests#Polygon.__init__</tc>
+        """
+        # Input configurations tested (in order of test):
+        # one part, no holes
+        # multi parts, no holes
+        # one part, one hole
+        # multi part, one hole
+        # one part, multi holes
+        # multi part, multi holes
+        p = Polygon([Point(
+            (0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))])
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point((30, 30)), Point((40, 30)), Point((40, 40)), Point((30, 40))]])
+        p = Polygon(
+            [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                    holes=[Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))])
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point(
+                         (
+                             30, 30)), Point(
+                                 (40, 30)), Point((40, 40)), Point((30, 40))]],
+                    holes=[Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))])
+        p = Polygon(
+            [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                    holes=[[Point(
+                        (2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))],
+                        [Point((6, 6)), Point((6, 8)), Point((8, 8)), Point((8, 6))]])
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point(
+                         (
+                             30, 30)), Point(
+                                 (40, 30)), Point((40, 40)), Point((30, 40))]],
+                    holes=[[Point(
+                        (2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))],
+                        [Point((6, 6)), Point((6, 8)), Point((8, 8)), Point((8, 6))]])
+
+    def test_area1(self):
+        """
+        Test multiple parts.
+
+        Test tag: <tc>#tests#Polygon.area</tc>
+        """
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point((30, 30)), Point((40, 30)), Point((40, 40)), Point((30, 40))]])
+        self.assertEquals(p.area, 200)
+
+    def test_area2(self):
+        """
+        Test holes.
+
+        Test tag: <tc>#tests#Polygon.area</tc>
+        """
+        p = Polygon(
+            [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                    holes=[Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))])
+        self.assertEquals(p.area, 100 - 4)
+
+        p = Polygon(
+            [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                    holes=[[Point(
+                        (2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))],
+                        [Point((6, 6)), Point((6, 8)), Point((8, 8)), Point((8, 6))]])
+        self.assertEquals(p.area, 100 - (4 + 4))
+
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point(
+                         (
+                             30, 30)), Point(
+                                 (40, 30)), Point((40, 40)), Point((30, 40))]],
+                    holes=[[Point(
+                        (2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))],
+                        [Point((36, 36)), Point((36, 38)), Point((38, 38)), Point((38, 36))]])
+        self.assertEquals(p.area, 200 - (4 + 4))
+
+    def test_area4(self):
+        """
+        Test polygons with vertices in both orders (cw, ccw).
+
+        Test tag: <tc>#tests#Polygon.area</tc>
+        """
+        p = Polygon([Point(
+            (0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))])
+        self.assertEquals(p.area, 100)
+
+        p = Polygon([Point(
+            (0, 0)), Point((0, 10)), Point((10, 10)), Point((10, 0))])
+        self.assertEquals(p.area, 100)
+
+    def test_bounding_box1(self):
+        """
+        Test polygons with multiple parts.
+
+        Test tag: <tc>#tests#Polygon.bounding_box</tc>
+        """
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point((30, 30)), Point((40, 30)), Point((40, 40)), Point((30, 40))]])
+        bb = p.bounding_box
+        self.assertEquals(bb.left, 0)
+        self.assertEquals(bb.lower, 0)
+        self.assertEquals(bb.right, 40)
+        self.assertEquals(bb.upper, 40)
+
+    def test_centroid1(self):
+        """
+        Test polygons with multiple parts of the same size.
+
+        Test tag: <tc>#tests#Polygon.centroid</tc>
+        """
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point((30, 30)), Point((40, 30)), Point((40, 40)), Point((30, 40))]])
+        c = p.centroid
+        self.assertEquals(c[0], 20)
+        self.assertEquals(c[1], 20)
+
+    def test_centroid2(self):
+        """
+        Test polygons with multiple parts of different size.
+
+        Test tag: <tc>#tests#Polygon.centroid</tc>
+        """
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point((30, 30)), Point((35, 30)), Point((35, 35)), Point((30, 35))]])
+        c = p.centroid
+        self.assertEquals(c[0], 10.5)
+        self.assertEquals(c[1], 10.5)
+
+    def test_holes1(self):
+        """
+        Test for correct vertex values/order.
+
+        Test tag: <tc>#tests#Polygon.holes</tc>
+        """
+        p = Polygon(
+            [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                    holes=[Point((2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))])
+        self.assertEquals(len(p.holes), 1)
+        e_holes = [Point((2, 2)), Point((2, 4)), Point((4, 4)), Point((4, 2))]
+        self.assertTrue(p.holes[0] in [e_holes, [e_holes[-1]] + e_holes[:3],
+                                       e_holes[-2:] + e_holes[:2], e_holes[-3:] + [e_holes[0]]])
+
+    def test_holes2(self):
+        """
+        Test for multiple holes.
+
+        Test tag: <tc>#tests#Polygon.holes</tc>
+        """
+        p = Polygon(
+            [Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                    holes=[[Point(
+                        (2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))],
+                        [Point((6, 6)), Point((6, 8)), Point((8, 8)), Point((8, 6))]])
+        holes = p.holes
+        self.assertEquals(len(holes), 2)
+
+    def test_parts1(self):
+        """
+        Test for correct vertex values/order.
+
+        Test tag: <tc>#tests#Polygon.parts</tc>
+        """
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point((30, 30)), Point((40, 30)), Point((30, 40))]])
+        self.assertEquals(len(p.parts), 2)
+
+        part1 = [Point(
+            (0, 0)), Point((0, 10)), Point((10, 10)), Point((10, 0))]
+        part2 = [Point((30, 30)), Point((30, 40)), Point((40, 30))]
+        if len(p.parts[0]) == 4:
+            self.assertTrue(p.parts[0] in [part1, part1[-1:] + part1[:3],
+                                           part1[-2:] + part1[:2], part1[-3:] + part1[:1]])
+            self.assertTrue(p.parts[1] in [part2, part2[-1:] +
+                                           part2[:2], part2[-2:] + part2[:1]])
+        elif len(p.parts[0]) == 3:
+            self.assertTrue(p.parts[0] in [part2, part2[-1:] +
+                                           part2[:2], part2[-2:] + part2[:1]])
+            self.assertTrue(p.parts[1] in [part1, part1[-1:] + part1[:3],
+                                           part1[-2:] + part1[:2], part1[-3:] + part1[:1]])
+        else:
+            self.fail()
+
+    def test_perimeter1(self):
+        """
+        Test with multiple parts.
+
+        Test tag: <tc>#tests#Polygon.perimeter</tc>
+        """
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point((30, 30)), Point((40, 30)), Point((40, 40)), Point((30, 40))]])
+        self.assertEquals(p.perimeter, 80)
+
+    def test_perimeter2(self):
+        """
+        Test with holes.
+
+        Test tag: <tc>#tests#Polygon.perimeter</tc>
+        """
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point(
+                         (
+                             30, 30)), Point(
+                                 (40, 30)), Point((40, 40)), Point((30, 40))]],
+                    holes=[[Point(
+                        (2, 2)), Point((4, 2)), Point((4, 4)), Point((2, 4))],
+                        [Point((6, 6)), Point((6, 8)), Point((8, 8)), Point((8, 6))]])
+        self.assertEquals(p.perimeter, 80 + 16)
+
+    def test_vertices1(self):
+        """
+        Test for correct values/order of vertices.
+
+        Test tag: <tc>#tests#Polygon.vertices</tc>
+        """
+        p = Polygon([Point(
+            (0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))])
+        self.assertEquals(len(p.vertices), 4)
+        e_verts = [Point(
+            (0, 0)), Point((0, 10)), Point((10, 10)), Point((10, 0))]
+        self.assertTrue(p.vertices in [e_verts, e_verts[-1:] + e_verts[:3],
+                                       e_verts[-2:] + e_verts[:2], e_verts[-3:] + e_verts[:1]])
+
+    def test_vertices2(self):
+        """
+        Test for multiple parts.
+
+        Test tag: <tc>#tests#Polygon.vertices</tc>
+        """
+        p = Polygon(
+            [[Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))],
+                     [Point((30, 30)), Point((40, 30)), Point((40, 40)), Point((30, 40))]])
+        self.assertEquals(len(p.vertices), 8)
+
+    def test_contains_point(self):
+        p = Polygon([Point((0, 0)), Point((10, 0)), Point((10, 10)), Point((0, 10))], [Point((1, 2)), Point((2, 2)), Point((2, 1)), Point((1, 1))])
+        self.assertEquals(p.contains_point((0, 0)), 1)
+        self.assertEquals(p.contains_point((1, 1)), 0)
+        self.assertEquals(p.contains_point((2, 2)), 1)
+        self.assertEquals(p.contains_point((5, 5)), 1)
+        self.assertEquals(p.contains_point((10, 10)), 0)
+
+
+class test_Rectangle(unittest.TestCase):
+
+    def test___init__1(self):
+        """
+        Test exceptions are thrown correctly.
+
+        Test tag: <tc>#tests#Rectangle.__init__</tc>
+        """
+        try:
+            r = Rectangle(1, 1, -1, 5)  # right < left
+        except ArithmeticError:
+            pass
+        else:
+            self.fail()
+
+        try:
+            r = Rectangle(1, 1, 5, -1)  # upper < lower
+        except ArithmeticError:
+            pass
+        else:
+            self.fail()
+
+    def test_set_centroid1(self):
+        """
+        Test with rectangles of zero width or height.
+
+        Test tag: <tc>#tests#Rectangle.set_centroid</tc>
+        """
+        r = Rectangle(5, 5, 5, 10)  # Zero width
+        r.set_centroid(Point((0, 0)))
+        self.assertEquals(r.left, 0)
+        self.assertEquals(r.lower, -2.5)
+        self.assertEquals(r.right, 0)
+        self.assertEquals(r.upper, 2.5)
+
+        r = Rectangle(10, 5, 20, 5)  # Zero height
+        r.set_centroid(Point((40, 40)))
+        self.assertEquals(r.left, 35)
+        self.assertEquals(r.lower, 40)
+        self.assertEquals(r.right, 45)
+        self.assertEquals(r.upper, 40)
+
+        r = Rectangle(0, 0, 0, 0)  # Zero width and height
+        r.set_centroid(Point((-4, -4)))
+        self.assertEquals(r.left, -4)
+        self.assertEquals(r.lower, -4)
+        self.assertEquals(r.right, -4)
+        self.assertEquals(r.upper, -4)
+
+    def test_set_scale1(self):
+        """
+        Test repeated scaling.
+
+        Test tag: <tc>#tests#Rectangle.set_scale</tc>
+        """
+        r = Rectangle(2, 2, 4, 4)
+
+        r.set_scale(0.5)
+        self.assertEquals(r.left, 2.5)
+        self.assertEquals(r.lower, 2.5)
+        self.assertEquals(r.right, 3.5)
+        self.assertEquals(r.upper, 3.5)
+
+        r.set_scale(2)
+        self.assertEquals(r.left, 2)
+        self.assertEquals(r.lower, 2)
+        self.assertEquals(r.right, 4)
+        self.assertEquals(r.upper, 4)
+
+    def test_set_scale2(self):
+        """
+        Test scaling of rectangles with zero width/height..
+
+        Test tag: <tc>#tests#Rectangle.set_scale</tc>
+        """
+        r = Rectangle(5, 5, 5, 10)  # Zero width
+        r.set_scale(2)
+        self.assertEquals(r.left, 5)
+        self.assertEquals(r.lower, 2.5)
+        self.assertEquals(r.right, 5)
+        self.assertEquals(r.upper, 12.5)
+
+        r = Rectangle(10, 5, 20, 5)  # Zero height
+        r.set_scale(2)
+        self.assertEquals(r.left, 5)
+        self.assertEquals(r.lower, 5)
+        self.assertEquals(r.right, 25)
+        self.assertEquals(r.upper, 5)
+
+        r = Rectangle(0, 0, 0, 0)  # Zero width and height
+        r.set_scale(100)
+        self.assertEquals(r.left, 0)
+        self.assertEquals(r.lower, 0)
+        self.assertEquals(r.right, 0)
+        self.assertEquals(r.upper, 0)
+
+        r = Rectangle(0, 0, 0, 0)  # Zero width and height
+        r.set_scale(0.01)
+        self.assertEquals(r.left, 0)
+        self.assertEquals(r.lower, 0)
+        self.assertEquals(r.right, 0)
+        self.assertEquals(r.upper, 0)
+
+    def test_area1(self):
+        """
+        Test rectangles with zero width/height
+
+        Test tag: <tc>#tests#Rectangle.area</tc>
+        """
+        r = Rectangle(5, 5, 5, 10)  # Zero width
+        self.assertEquals(r.area, 0)
+
+        r = Rectangle(10, 5, 20, 5)  # Zero height
+        self.assertEquals(r.area, 0)
+
+        r = Rectangle(0, 0, 0, 0)  # Zero width and height
+        self.assertEquals(r.area, 0)
+
+    def test_height1(self):
+        """
+        Test rectangles with zero height.
+
+        Test tag: <tc>#tests#Rectangle.height</tc>
+        """
+        r = Rectangle(10, 5, 20, 5)  # Zero height
+        self.assertEquals(r.height, 0)
+
+    def test_width1(self):
+        """
+        Test rectangles with zero width.
+
+        Test tag: <tc>#tests#Rectangle.width</tc>
+        """
+        r = Rectangle(5, 5, 5, 10)  # Zero width
+        self.assertEquals(r.width, 0)
+
+
+#suite = unittest.TestSuite()
+#suite.addTest(doctest.DocTestSuite('pysal.cg.shapes'))
+#A = unittest.TestLoader().loadTestsFromTestCase(_TestPoint)
+#B = unittest.TestLoader().loadTestsFromTestCase(_TestLineSegment)
+#C = unittest.TestLoader().loadTestsFromTestCase(_TestLine)
+#D = unittest.TestLoader().loadTestsFromTestCase(_TestRay)
+#E = unittest.TestLoader().loadTestsFromTestCase(_TestChain)
+#F = unittest.TestLoader().loadTestsFromTestCase(_TestPolygon)
+#G = unittest.TestLoader().loadTestsFromTestCase(_TestRectangle)
+#suite.addTests([A,B,C,D,E,D,G])
+if __name__ == '__main__':
+    unittest.main()
+    #runner = unittest.TextTestRunner()
+    #runner.run(suite)
diff --git a/pysal/cg/tests/test_standalone.py b/pysal/cg/tests/test_standalone.py
new file mode 100644
index 0000000..a5ee52f
--- /dev/null
+++ b/pysal/cg/tests/test_standalone.py
@@ -0,0 +1,601 @@
+import unittest
+import numpy as np
+import math
+
+from pysal.cg.shapes import *
+from pysal.cg.standalone import *
+
+
+class TestBbcommon(unittest.TestCase):
+    def test_bbcommon(self):
+        b0 = [0, 0, 10, 10]
+        b1 = [5, 5, 15, 15]
+        self.assertEqual(1, bbcommon(b0, b1))
+
+    def test_bbcommon_same(self):
+        b0 = [0, 0, 10, 10]
+        b1 = [0, 0, 10, 10]
+        self.assertEqual(1, bbcommon(b0, b1))
+
+    def test_bbcommon_nested(self):
+        b0 = [0, 0, 10, 10]
+        b1 = [1, 1, 9, 9]
+        self.assertEqual(1, bbcommon(b0, b1))
+
+    def test_bbcommon_top(self):
+        b0 = [0, 0, 10, 10]
+        b1 = [3, 5, 6, 15]
+        self.assertEqual(1, bbcommon(b0, b1))
+
+    def test_bbcommon_shared_edge(self):
+        b0 = [0, 0, 10, 10]
+        b1 = [0, 10, 10, 20]
+        self.assertEqual(1, bbcommon(b0, b1))
+
+    def test_bbcommon_shared_corner(self):
+        b0 = [0, 0, 10, 10]
+        b1 = [10, 10, 20, 20]
+        self.assertEqual(1, bbcommon(b0, b1))
+
+    def test_bbcommon_floats(self):
+        b0 = [0.0, 0.0, 0.1, 0.1]
+        b1 = [0.05, 0.05, 0.15, 0.15]
+        self.assertEqual(1, bbcommon(b0, b1))
+
+
+class TestGetBoundingBox(unittest.TestCase):
+    def test_get_bounding_box(self):
+        items = [Point((-1, 5)), Rectangle(0, 6, 11, 12)]
+        expected = [-1, 5, 11, 12]
+        self.assertEqual(expected, get_bounding_box(items)[:])
+
+
+class TestGetAngleBetween(unittest.TestCase):
+    def test_get_angle_between(self):
+        ray1 = Ray(Point((0, 0)), Point((1, 0)))
+        ray2 = Ray(Point((0, 0)), Point((1, 0)))
+        self.assertEqual(0.0, get_angle_between(ray1, ray2))
+
+    def test_get_angle_between_expect45(self):
+        ray1 = Ray(Point((0, 0)), Point((1, 0)))
+        ray2 = Ray(Point((0, 0)), Point((1, 1)))
+        self.assertEqual(45.0, math.degrees(get_angle_between(ray1, ray2)))
+
+    def test_get_angle_between_expect90(self):
+        ray1 = Ray(Point((0, 0)), Point((1, 0)))
+        ray2 = Ray(Point((0, 0)), Point((0, 1)))
+        self.assertEqual(90.0, math.degrees(get_angle_between(ray1, ray2)))
+
+
+class TestIsCollinear(unittest.TestCase):
+    def test_is_collinear(self):
+        self.assertEqual(True, is_collinear(Point((0, 0)), Point((
+            1, 1)), Point((5, 5))))
+
+    def test_is_collinear_expectFalse(self):
+        self.assertEqual(False, is_collinear(Point((0, 0)), Point((
+            1, 1)), Point((5, 0))))
+
+    def test_is_collinear_AlongX(self):
+        self.assertEqual(True, is_collinear(Point((0, 0)), Point((
+            1, 0)), Point((5, 0))))
+
+    def test_is_collinear_AlongY(self):
+        self.assertEqual(True, is_collinear(
+            Point((0, 0)), Point((0, 1)), Point((0, -1))))
+
+    def test_is_collinear_smallFloat(self):
+        """
+        Given: p1 = (0.1, 0.2), p2 = (0.2, 0.3), p3 = (0.3, 0.4)
+
+        Line(p1,p2):  y = mx + b
+            m = (0.3-0.2) / (0.2-0.1) = .1/.1 = 1
+            y - mx = b
+            b = 0.3 - 1*0.2 = 0.1
+            b = 0.2 - 1*0.1 = 0.1
+
+            y = 1*x + 0.1
+
+        Line(p2,p3): y = mx + b
+            m = (0.4-0.3) / (0.3-0.2) = .1/.1 = 1
+            y - mx = b
+            b = 0.4 - 1*0.3 = 0.1
+            b = 0.4 - 1*0.2 = 0.1
+
+            y = 1*x + 0.1
+
+        Line(p1,p2) == Line(p2,p3)
+        Therefore p1,p2,p3 are collinear.
+
+        Due to floating point rounding areas the standard test,
+            ((p2[0]-p1[0])*(p3[1]-p1[1]) - (p2[1]-p1[1])*(p3[0]-p1[0])) == 0
+        will fail.  To get around this we use an epsilon.  numpy.finfo function
+        return an smallest epsilon for the given data types such that,
+            (numpy.finfo(float).eps + 1.0) != 1.0
+
+        Therefore if
+            abs((p2[0]-p1[0])*(p3[1]-p1[1]) - (p2[1]-p1[1])*(
+                p3[0]-p1[0])) < numpy.finfo(p1[0]).eps
+        The points are collinear.
+        """
+        self.assertEqual(True, is_collinear(
+            Point((0.1, 0.2)), Point((0.2, 0.3)), Point((0.3, 0.4))))
+
+    def test_is_collinear_random(self):
+        for i in range(10):
+            a, b, c = np.random.random(3) * 10 ** (i)
+            self.assertEqual(True, is_collinear(
+                Point((a, a)), Point((b, b)), Point((c, c))))
+
+    def test_is_collinear_random2(self):
+        for i in range(1000):
+            a, b, c = np.random.random(3)
+            self.assertEqual(True, is_collinear(
+                Point((a, a)), Point((b, b)), Point((c, c))))
+
+
+class TestGetSegmentsIntersect(unittest.TestCase):
+    def test_get_segments_intersect(self):
+        seg1 = LineSegment(Point((0, 0)), Point((0, 10)))
+        seg2 = LineSegment(Point((-5, 5)), Point((5, 5)))
+        self.assertEqual((0.0, 5.0), get_segments_intersect(seg1, seg2)[:])
+
+    def test_get_segments_intersect_shared_vert(self):
+        seg1 = LineSegment(Point((0, 0)), Point((0, 10)))
+        seg2 = LineSegment(Point((-5, 5)), Point((0, 10)))
+        self.assertEqual((0.0, 10.0), get_segments_intersect(seg1, seg2)[:])
+
+    def test_get_segments_intersect_floats(self):
+        seg1 = LineSegment(Point((0, 0)), Point((0, .10)))
+        seg2 = LineSegment(Point((-.5, .05)), Point((.5, .05)))
+        self.assertEqual((0.0, .05), get_segments_intersect(seg1, seg2)[:])
+
+    def test_get_segments_intersect_angles(self):
+        seg1 = LineSegment(Point((0, 0)), Point((1, 1)))
+        seg2 = LineSegment(Point((1, 0)), Point((0, 1)))
+        self.assertEqual((0.5, 0.5), get_segments_intersect(seg1, seg2)[:])
+
+    def test_get_segments_intersect_no_intersect(self):
+        seg1 = LineSegment(Point((-5, 5)), Point((5, 5)))
+        seg2 = LineSegment(Point((100, 100)), Point((100, 101)))
+        self.assertEqual(None, get_segments_intersect(seg1, seg2))
+
+    def test_get_segments_intersect_overlap(self):
+        seg1 = LineSegment(Point((0.1, 0.1)), Point((0.6, 0.6)))
+        seg2 = LineSegment(Point((0.3, 0.3)), Point((0.9, 0.9)))
+        expected = LineSegment(Point((0.3, 0.3)), Point((0.6, 0.6)))
+        self.assertEqual(expected, get_segments_intersect(seg1, seg2))
+
+    def test_get_segments_intersect_same(self):
+        seg1 = LineSegment(Point((-5, 5)), Point((5, 5)))
+        self.assertEqual(seg1, get_segments_intersect(seg1, seg1))
+
+    def test_get_segments_intersect_nested(self):
+        seg1 = LineSegment(Point((0.1, 0.1)), Point((0.9, 0.9)))
+        seg2 = LineSegment(Point((0.3, 0.3)), Point((0.6, 0.6)))
+        self.assertEqual(seg2, get_segments_intersect(seg1, seg2))
+
+
+class TestGetSegmentPointIntersect(unittest.TestCase):
+    def test_get_segment_point_intersect(self):
+        seg = LineSegment(Point((0, 0)), Point((0, 10)))
+        pt = Point((0, 5))
+        self.assertEqual(pt, get_segment_point_intersect(seg, pt))
+
+    def test_get_segment_point_intersect_left_end(self):
+        seg = LineSegment(Point((0, 0)), Point((0, 10)))
+        pt = seg.p1
+        self.assertEqual(pt, get_segment_point_intersect(seg, pt))
+
+    def test_get_segment_point_intersect_right_end(self):
+        seg = LineSegment(Point((0, 0)), Point((0, 10)))
+        pt = seg.p2
+        self.assertEqual(pt, get_segment_point_intersect(seg, pt))
+
+    def test_get_segment_point_intersect_angle(self):
+        seg = LineSegment(Point((0, 0)), Point((1, 1)))
+        pt = Point((.1, .1))
+        self.assertEqual(pt, get_segment_point_intersect(seg, pt))
+
+    def test_get_segment_point_intersect_no_intersect(self):
+        seg = LineSegment(Point((0, 0)), Point((0, 10)))
+        pt = Point((5, 5))
+        self.assertEqual(None, get_segment_point_intersect(seg, pt))
+
+    def test_get_segment_point_intersect_no_intersect_collinear(self):
+        seg = LineSegment(Point((0, 0)), Point((0, 10)))
+        pt = Point((0, 20))
+        self.assertEqual(None, get_segment_point_intersect(seg, pt))
+
+    def test_get_segment_point_intersect_floats(self):
+        seg = LineSegment(Point((0.3, 0.3)), Point((.9, .9)))
+        pt = Point((.5, .5))
+        self.assertEqual(pt, get_segment_point_intersect(seg, pt))
+
+    def test_get_segment_point_intersect_floats(self):
+        seg = LineSegment(Point((0.0, 0.0)), Point((
+            2.7071067811865475, 2.7071067811865475)))
+        pt = Point((1.0, 1.0))
+        self.assertEqual(pt, get_segment_point_intersect(seg, pt))
+
+    def test_get_segment_point_intersect_floats_no_intersect(self):
+        seg = LineSegment(Point((0.3, 0.3)), Point((.9, .9)))
+        pt = Point((.1, .1))
+        self.assertEqual(None, get_segment_point_intersect(seg, pt))
+
+
+class TestGetPolygonPointIntersect(unittest.TestCase):
+    def test_get_polygon_point_intersect(self):
+        poly = Polygon([Point(
+            (0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        pt = Point((0.5, 0.5))
+        self.assertEqual(pt, get_polygon_point_intersect(poly, pt))
+
+    def test_get_polygon_point_intersect_on_edge(self):
+        poly = Polygon([Point(
+            (0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        pt = Point((1.0, 0.5))
+        self.assertEqual(pt, get_polygon_point_intersect(poly, pt))
+
+    def test_get_polygon_point_intersect_on_vertex(self):
+        poly = Polygon([Point(
+            (0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        pt = Point((1.0, 1.0))
+        self.assertEqual(pt, get_polygon_point_intersect(poly, pt))
+
+    def test_get_polygon_point_intersect_outside(self):
+        poly = Polygon([Point(
+            (0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        pt = Point((2.0, 2.0))
+        self.assertEqual(None, get_polygon_point_intersect(poly, pt))
+
+
+class TestGetRectanglePointIntersect(unittest.TestCase):
+    def test_get_rectangle_point_intersect(self):
+        rect = Rectangle(0, 0, 5, 5)
+        pt = Point((1, 1))
+        self.assertEqual(pt, get_rectangle_point_intersect(rect, pt))
+
+    def test_get_rectangle_point_intersect_on_edge(self):
+        rect = Rectangle(0, 0, 5, 5)
+        pt = Point((2.5, 5))
+        self.assertEqual(pt, get_rectangle_point_intersect(rect, pt))
+
+    def test_get_rectangle_point_intersect_on_vertex(self):
+        rect = Rectangle(0, 0, 5, 5)
+        pt = Point((5, 5))
+        self.assertEqual(pt, get_rectangle_point_intersect(rect, pt))
+
+    def test_get_rectangle_point_intersect_outside(self):
+        rect = Rectangle(0, 0, 5, 5)
+        pt = Point((10, 10))
+        self.assertEqual(None, get_rectangle_point_intersect(rect, pt))
+
+
+class TestGetRaySegmentIntersect(unittest.TestCase):
+    def test_get_ray_segment_intersect(self):
+        ray = Ray(Point((0, 0)), Point((0, 1)))
+        seg = LineSegment(Point((-1, 10)), Point((1, 10)))
+        self.assertEqual((0.0, 10.), get_ray_segment_intersect(ray, seg)[:])
+
+    def test_get_ray_segment_intersect_orgin(self):
+        ray = Ray(Point((0, 0)), Point((0, 1)))
+        seg = LineSegment(Point((-1, 0)), Point((1, 0)))
+        self.assertEqual((0.0, 0.0), get_ray_segment_intersect(ray, seg)[:])
+
+    def test_get_ray_segment_intersect_edge(self):
+        ray = Ray(Point((0, 0)), Point((0, 1)))
+        seg = LineSegment(Point((0, 2)), Point((2, 2)))
+        self.assertEqual((0.0, 2.0), get_ray_segment_intersect(ray, seg)[:])
+
+    def test_get_ray_segment_intersect_no_intersect(self):
+        ray = Ray(Point((0, 0)), Point((0, 1)))
+        seg = LineSegment(Point((10, 10)), Point((10, 11)))
+        self.assertEqual(None, get_ray_segment_intersect(ray, seg))
+
+    def test_get_ray_segment_intersect_segment(self):
+        ray = Ray(Point((0, 0)), Point((5, 5)))
+        seg = LineSegment(Point((1, 1)), Point((2, 2)))
+        self.assertEqual(seg, get_ray_segment_intersect(ray, seg))
+
+
+class TestGetRectangleRectangleIntersection(unittest.TestCase):
+    def test_get_rectangle_rectangle_intersection_leftright(self):
+        r0 = Rectangle(0, 4, 6, 9)
+        r1 = Rectangle(4, 0, 9, 7)
+        expected = [4.0, 4.0, 6.0, 7.0]
+        self.assertEqual(
+            expected, get_rectangle_rectangle_intersection(r0, r1)[:])
+
+    def test_get_rectangle_rectangle_intersection_topbottom(self):
+        r0 = Rectangle(0, 0, 4, 4)
+        r1 = Rectangle(2, 1, 6, 3)
+        expected = [2.0, 1.0, 4.0, 3.0]
+        self.assertEqual(
+            expected, get_rectangle_rectangle_intersection(r0, r1)[:])
+
+    def test_get_rectangle_rectangle_intersection_nested(self):
+        r0 = Rectangle(0, 0, 4, 4)
+        r1 = Rectangle(2, 1, 3, 2)
+        self.assertEqual(r1, get_rectangle_rectangle_intersection(r0, r1))
+
+    def test_get_rectangle_rectangle_intersection_shared_corner(self):
+        r0 = Rectangle(0, 0, 4, 4)
+        r1 = Rectangle(4, 4, 8, 8)
+        self.assertEqual(Point(
+            (4, 4)), get_rectangle_rectangle_intersection(r0, r1))
+
+    def test_get_rectangle_rectangle_intersection_shared_edge(self):
+        r0 = Rectangle(0, 0, 4, 4)
+        r1 = Rectangle(0, 4, 4, 8)
+        self.assertEqual(LineSegment(Point((0, 4)), Point(
+            (4, 4))), get_rectangle_rectangle_intersection(r0, r1))
+
+    def test_get_rectangle_rectangle_intersection_shifted_edge(self):
+        r0 = Rectangle(0, 0, 4, 4)
+        r1 = Rectangle(2, 4, 6, 8)
+        self.assertEqual(LineSegment(Point((2, 4)), Point(
+            (4, 4))), get_rectangle_rectangle_intersection(r0, r1))
+
+    def test_get_rectangle_rectangle_intersection_no_intersect(self):
+        r0 = Rectangle(0, 0, 4, 4)
+        r1 = Rectangle(5, 5, 8, 8)
+        self.assertEqual(None, get_rectangle_rectangle_intersection(r0, r1))
+
+
+class TestGetPolygonPointDist(unittest.TestCase):
+    def test_get_polygon_point_dist(self):
+        poly = Polygon([Point(
+            (0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        pt = Point((2, 0.5))
+        expected = 1.0
+        self.assertEqual(expected, get_polygon_point_dist(poly, pt))
+
+    def test_get_polygon_point_dist_inside(self):
+        poly = Polygon([Point(
+            (0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        pt = Point((0.5, 0.5))
+        expected = 0.0
+        self.assertEqual(expected, get_polygon_point_dist(poly, pt))
+
+    def test_get_polygon_point_dist_on_vertex(self):
+        poly = Polygon([Point(
+            (0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        pt = Point((1.0, 1.0))
+        expected = 0.0
+        self.assertEqual(expected, get_polygon_point_dist(poly, pt))
+
+    def test_get_polygon_point_dist_on_edge(self):
+        poly = Polygon([Point(
+            (0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
+        pt = Point((0.5, 1.0))
+        expected = 0.0
+        self.assertEqual(expected, get_polygon_point_dist(poly, pt))
+
+
+class TestGetPointsDist(unittest.TestCase):
+    def test_get_points_dist(self):
+        pt1 = Point((0.5, 0.5))
+        pt2 = Point((0.5, 0.5))
+        self.assertEqual(0, get_points_dist(pt1, pt2))
+
+    def test_get_points_dist_diag(self):
+        pt1 = Point((0, 0))
+        pt2 = Point((1, 1))
+        self.assertEqual(2 ** (0.5), get_points_dist(pt1, pt2))
+
+    def test_get_points_dist_alongX(self):
+        pt1 = Point((-1000, 1 / 3.0))
+        pt2 = Point((1000, 1 / 3.0))
+        self.assertEqual(2000, get_points_dist(pt1, pt2))
+
+    def test_get_points_dist_alongY(self):
+        pt1 = Point((1 / 3.0, -500))
+        pt2 = Point((1 / 3.0, 500))
+        self.assertEqual(1000, get_points_dist(pt1, pt2))
+
+
+class TestGetSegmentPointDist(unittest.TestCase):
+    def test_get_segment_point_dist(self):
+        seg = LineSegment(Point((0, 0)), Point((10, 0)))
+        pt = Point((5, 5))
+        self.assertEqual((5.0, 0.5), get_segment_point_dist(seg, pt))
+
+    def test_get_segment_point_dist_on_endPoint(self):
+        seg = LineSegment(Point((0, 0)), Point((10, 0)))
+        pt = Point((0, 0))
+        self.assertEqual((0.0, 0.0), get_segment_point_dist(seg, pt))
+
+    def test_get_segment_point_dist_on_middle(self):
+        seg = LineSegment(Point((0, 0)), Point((10, 0)))
+        pt = Point((5, 0))
+        self.assertEqual((0.0, 0.5), get_segment_point_dist(seg, pt))
+
+    def test_get_segment_point_diag(self):
+        seg = LineSegment(Point((0, 0)), Point((10, 10)))
+        pt = Point((5, 5))
+        self.assertAlmostEqual(0.0, get_segment_point_dist(seg, pt)[0])
+        self.assertAlmostEqual(0.5, get_segment_point_dist(seg, pt)[1])
+
+    def test_get_segment_point_diag_with_dist(self):
+        seg = LineSegment(Point((0, 0)), Point((10, 10)))
+        pt = Point((0, 10))
+        self.assertAlmostEqual(50 ** (0.5), get_segment_point_dist(seg, pt)[0])
+        self.assertAlmostEqual(0.5, get_segment_point_dist(seg, pt)[1])
+
+
+class TestGetPointAtAngleAndDist(unittest.TestCase):
+    def test_get_point_at_angle_and_dist(self):
+        ray = Ray(Point((0, 0)), Point((1, 0)))
+        pt = get_point_at_angle_and_dist(ray, math.pi, 1.0)
+        self.assertAlmostEqual(-1.0, pt[0])
+        self.assertAlmostEqual(0.0, pt[1])
+
+    def test_get_point_at_angle_and_dist_diag(self):
+        ray = Ray(Point((0, 0)), Point((1, 1)))
+        pt = get_point_at_angle_and_dist(ray, math.pi, 2 ** (0.5))
+        self.assertAlmostEqual(-1.0, pt[0])
+        self.assertAlmostEqual(-1.0, pt[1])
+
+    def test_get_point_at_angle_and_dist_diag_90(self):
+        ray = Ray(Point((0, 0)), Point((1, 1)))
+        pt = get_point_at_angle_and_dist(ray, -math.pi / 2.0, 2 ** (0.5))
+        self.assertAlmostEqual(1.0, pt[0])
+        self.assertAlmostEqual(-1.0, pt[1])
+
+    def test_get_point_at_angle_and_dist_diag_45(self):
+        ray = Ray(Point((0, 0)), Point((1, 1)))
+        pt = get_point_at_angle_and_dist(ray, -math.pi / 4.0, 1)
+        self.assertAlmostEqual(1.0, pt[0])
+        self.assertAlmostEqual(0.0, pt[1])
+
+
+class TestConvexHull(unittest.TestCase):
+    def test_convex_hull(self):
+        points = [Point((0, 0)), Point((4, 4)), Point((4, 0)), Point((3, 1))]
+        self.assertEqual([Point((0.0, 0.0)), Point(
+            (4.0, 0.0)), Point((4.0, 4.0))], convex_hull(points))
+
+
+class TestIsClockwise(unittest.TestCase):
+    def test_is_clockwise(self):
+        vertices = [Point((0, 0)), Point((0, 10)), Point((10, 0))]
+        self.assertEqual(True, is_clockwise(vertices))
+
+    def test_is_clockwise_expect_false(self):
+        vertices = [Point((0, 0)), Point((10, 0)), Point((0, 10))]
+        self.assertEqual(False, is_clockwise(vertices))
+
+    def test_is_clockwise_big(self):
+        vertices = [(
+            -106.57798, 35.174143999999998), (-106.583412, 35.174141999999996),
+                    (-106.58417999999999, 35.174143000000001), (-106.58377999999999, 35.175542999999998),
+                    (-106.58287999999999, 35.180543), (
+                        -106.58263099999999, 35.181455),
+                    (-106.58257999999999, 35.181643000000001), (-106.58198299999999, 35.184615000000001),
+                    (-106.58148, 35.187242999999995), (
+                        -106.58127999999999, 35.188243),
+                    (-106.58138, 35.188243), (-106.58108, 35.189442999999997),
+                    (-106.58104, 35.189644000000001), (
+                        -106.58028, 35.193442999999995),
+                    (-106.580029, 35.194541000000001), (-106.57974399999999,
+                                                        35.195785999999998),
+                    (-106.579475, 35.196961999999999), (-106.57922699999999,
+                                                        35.198042999999998),
+                    (-106.578397, 35.201665999999996), (-106.57827999999999,
+                                                        35.201642999999997),
+                    (-106.57737999999999, 35.201642999999997), (-106.57697999999999, 35.201543000000001),
+                    (-106.56436599999999, 35.200311999999997), (
+                        -106.56058, 35.199942999999998),
+                    (-106.56048, 35.197342999999996), (
+                        -106.56048, 35.195842999999996),
+                    (-106.56048, 35.194342999999996), (
+                        -106.56048, 35.193142999999999),
+                    (-106.56048, 35.191873999999999), (
+                        -106.56048, 35.191742999999995),
+                    (-106.56048, 35.190242999999995), (-106.56037999999999,
+                                                       35.188642999999999),
+                    (-106.56037999999999, 35.187242999999995), (-106.56037999999999, 35.186842999999996),
+                    (-106.56037999999999, 35.186552999999996), (-106.56037999999999, 35.185842999999998),
+                    (-106.56037999999999, 35.184443000000002), (-106.56037999999999, 35.182943000000002),
+                    (-106.56037999999999, 35.181342999999998), (-106.56037999999999, 35.180433000000001),
+                    (-106.56037999999999, 35.179943000000002), (-106.56037999999999, 35.178542999999998),
+                    (-106.56037999999999, 35.177790999999999), (-106.56037999999999, 35.177143999999998),
+                    (-106.56037999999999, 35.175643999999998), (-106.56037999999999, 35.174444000000001),
+                    (-106.56037999999999, 35.174043999999995), (
+                        -106.560526, 35.174043999999995),
+                    (-106.56478, 35.174043999999995), (-106.56627999999999,
+                                                       35.174143999999998),
+                    (-106.566541, 35.174144999999996), (
+                        -106.569023, 35.174157000000001),
+                    (-106.56917199999999, 35.174157999999998), (
+                        -106.56938, 35.174143999999998),
+                    (-106.57061499999999, 35.174143999999998), (-106.57097999999999, 35.174143999999998),
+                    (-106.57679999999999, 35.174143999999998), (-106.57798, 35.174143999999998)]
+        self.assertEqual(True, is_clockwise(vertices))
+
+
+class TestPointTouchesRectangle(unittest.TestCase):
+    def test_point_touches_rectangle_inside(self):
+        rect = Rectangle(0, 0, 10, 10)
+        point = Point((5, 5))
+        self.assertEqual(True, point_touches_rectangle(point, rect))
+
+    def test_point_touches_rectangle_on_edge(self):
+        rect = Rectangle(0, 0, 10, 10)
+        point = Point((10, 5))
+        self.assertEqual(True, point_touches_rectangle(point, rect))
+
+    def test_point_touches_rectangle_on_corner(self):
+        rect = Rectangle(0, 0, 10, 10)
+        point = Point((10, 10))
+        self.assertEqual(True, point_touches_rectangle(point, rect))
+
+    def test_point_touches_rectangle_outside(self):
+        rect = Rectangle(0, 0, 10, 10)
+        point = Point((11, 11))
+        self.assertEqual(False, point_touches_rectangle(point, rect))
+
+
+class TestGetSharedSegments(unittest.TestCase):
+    def test_get_shared_segments(self):
+        poly1 = Polygon([Point(
+            (0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))])
+        poly2 = Polygon([Point(
+            (1, 0)), Point((1, 1)), Point((2, 1)), Point((2, 0))])
+        poly3 = Polygon([Point(
+            (0, 1)), Point((0, 2)), Point((1, 2)), Point((1, 1))])
+        poly4 = Polygon([Point(
+            (1, 1)), Point((1, 2)), Point((2, 2)), Point((2, 1))])
+        self.assertEqual(
+            True, get_shared_segments(poly1, poly2, bool_ret=True))
+        self.assertEqual(
+            True, get_shared_segments(poly1, poly3, bool_ret=True))
+        self.assertEqual(
+            True, get_shared_segments(poly3, poly4, bool_ret=True))
+        self.assertEqual(
+            True, get_shared_segments(poly4, poly2, bool_ret=True))
+
+        self.assertEqual(
+            False, get_shared_segments(poly1, poly4, bool_ret=True))
+        self.assertEqual(
+            False, get_shared_segments(poly3, poly2, bool_ret=True))
+
+    def test_get_shared_segments_non_bool(self):
+        poly1 = Polygon([Point(
+            (0, 0)), Point((0, 1)), Point((1, 1)), Point((1, 0))])
+        poly2 = Polygon([Point(
+            (1, 0)), Point((1, 1)), Point((2, 1)), Point((2, 0))])
+        poly3 = Polygon([Point(
+            (0, 1)), Point((0, 2)), Point((1, 2)), Point((1, 1))])
+        poly4 = Polygon([Point(
+            (1, 1)), Point((1, 2)), Point((2, 2)), Point((2, 1))])
+        self.assertEqual(LineSegment(Point((1, 0)), Point((1, 1))),
+                         get_shared_segments(poly1, poly2)[0])
+        self.assertEqual(LineSegment(Point((0, 1)), Point((1, 1))),
+                         get_shared_segments(poly1, poly3)[0])
+        self.assertEqual(LineSegment(Point((1, 2)), Point((1, 1))),
+                         get_shared_segments(poly3, poly4)[0])
+        self.assertEqual(LineSegment(Point((2, 1)), Point((1, 1))),
+                         get_shared_segments(poly4, poly2)[0])
+        #expected =  [LineSegment(Point((1, 1)), Point((1, 0)))]
+        #assert expected == get_shared_segments(poly1, poly3)
+        #expected =  [LineSegment(Point((1, 1)), Point((1, 0)))]
+        #assert expected == get_shared_segments(poly3, poly4)
+        #expected =  [LineSegment(Point((1, 1)), Point((1, 0)))]
+        #assert expected == get_shared_segments(poly4, poly2)
+
+
+class TestDistanceMatrix(unittest.TestCase):
+    def test_distance_matrix(self):
+        points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+        dist = distance_matrix(np.array(points), 2)
+        for i in range(0, len(points)):
+            for j in range(i, len(points)):
+                x, y = points[i]
+                X, Y = points[j]
+                d = ((x - X) ** 2 + (y - Y) ** 2) ** (0.5)
+                self.assertEqual(dist[i, j], d)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/common.py b/pysal/common.py
new file mode 100644
index 0000000..58e167a
--- /dev/null
+++ b/pysal/common.py
@@ -0,0 +1,25 @@
+
+# external imports
+
+try:
+    import numpy as np
+    import numpy.linalg as la
+except:
+    print 'numpy 1.3 is required'
+    raise
+try:
+    import scipy as sp
+    import scipy.stats as stats
+    from cg.kdtree import KDTree
+    from scipy.spatial.distance import pdist, cdist
+except:
+    print 'scipy 0.7+ is required'
+    raise
+
+
+import copy
+import math
+import random
+import sys
+import time
+import unittest
diff --git a/pysal/contrib/README b/pysal/contrib/README
new file mode 100644
index 0000000..ad99aa4
--- /dev/null
+++ b/pysal/contrib/README
@@ -0,0 +1,57 @@
+:mod:`pysal.contrib` -- Contributed Modules 
+===========================================
+
+**Intro**
+
+The PySAL Contrib library contains user contributions that enhance PySAL, but
+are not fit for inclusion in the general library. The primary reason a
+contribution would not be allowed in the general library is external
+dependencies. PySAL has a strict no dependency policy (aside from Numpy/Scipy).
+This helps ensure the library is easy to install and maintain.
+
+However, this policy often limits our ability to make use of existing code or
+exploit performance enhancements from C-extensions. This contrib module is
+designed to alleviate this problem. There are no restrictions on external
+dependencies in contrib. 
+
+**Ground Rules**
+
+ 1. Contribs must not be used within the general library.
+ 2. *Explicit imports*: each contrib must be imported manually.
+ 3. *Documentation*: each contrib must be documented, dependencies especially.
+
+**Contribs**
+
+Currently the following contribs are available:
+
+ 1. World To View Transform -- A class for modeling viewing windows, used by Weights Viewer.
+
+    - .. versionadded:: 1.3
+    - Path: pysal.contrib.weights_viewer.transforms
+    - Requires: None
+
+ 2. Weights Viewer -- A Graphical tool for examining spatial weights.
+
+    - .. versionadded:: 1.3
+    - Path: pysal.contrib.weights_viewer.weights_viewer
+    - Requires: wxPython
+
+ 3. Shapely Extension -- Exposes shapely methods as standalone functions
+
+    - .. versionadded:: 1.3
+    - Path: pysal.contrib.shapely_ext
+    - Requires: shapely
+
+ 4. Shared Perimeter Weights -- calculate shared perimeters weights.
+
+    - .. versionadded:: 1.3
+    - Path: pysal.contrib.shared_perimeter_weights
+    - Requires: shapely
+
+5. Clusterpy Extension -- spatially constrained clustering algorithms.
+
+	- .. versionadded:: 1.8
+	- Path: pysal.contrib.clusterpy
+	- Requires: clusterpy
+
+
diff --git a/pysal/contrib/__init__.py b/pysal/contrib/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/contrib/clusterpy/README.md b/pysal/contrib/clusterpy/README.md
new file mode 100644
index 0000000..d4ed06d
--- /dev/null
+++ b/pysal/contrib/clusterpy/README.md
@@ -0,0 +1,15 @@
+# PySAL Contrib Module for clusterPy
+
+**Author: Serge Rey <sjsrey at gmail.com>**
+
+
+[clusterPy][cp] is a library of spatially constrained clustering algorithms.  This contrib module provides an interface to the family of spatially constrained clustering algorithms offered by clusterPy. It also extends a select set of clusterPy classes to leverage optimized data structures in PySAL, and it provides utility functions that support use of clusterPy functionality with other PySAL modules.
+
+
+## Notes
+
+- 2014-05-28 This is working with clusterPy 0.9.9 installed from [pip][pip].
+
+[cp]: http://www.rise-group.org/risem/clusterpy/
+[cpr]: https://github.com/clusterpy/clusterpy
+[pip]: https://pypi.python.org/pypi/clusterPy/0.9.9
diff --git a/pysal/contrib/clusterpy/__init__.py b/pysal/contrib/clusterpy/__init__.py
new file mode 100644
index 0000000..27bb912
--- /dev/null
+++ b/pysal/contrib/clusterpy/__init__.py
@@ -0,0 +1,12 @@
+try:
+    import clusterpy
+except ImportError:
+    print 'pysal.contrib.clusterpy requires clusterpy'
+    print 'clusterpy not found.'
+else:
+    from clusterpy_ext import Layer
+    from clusterpy_ext import loadArcData
+    from clusterpy_ext import importCsvData
+    from clusterpy_ext import addRook2Layer
+    from clusterpy_ext import addQueen2Layer
+    from clusterpy_ext import addArray2Layer
diff --git a/pysal/contrib/clusterpy/clusterpy.ipynb b/pysal/contrib/clusterpy/clusterpy.ipynb
new file mode 100644
index 0000000..9974d81
--- /dev/null
+++ b/pysal/contrib/clusterpy/clusterpy.ipynb
@@ -0,0 +1,3320 @@
+{
+ "metadata": {
+  "name": "",
+  "signature": "sha256:a138ff2bb82f0ff84b1c6ddbfcdd96a0d757ce0a91b7848a1d377b8e5b58d79b"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import pysal.contrib.clusterpy as cp\n",
+      "%pylab inline"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "ClusterPy: Library of spatially constrained clustering algorithms\n",
+        "Populating the interactive namespace from numpy and matplotlib"
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "\n"
+       ]
+      }
+     ],
+     "prompt_number": 1
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import numpy as np\n",
+      "import pysal as ps\n",
+      "from collections import Counter"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 2
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "columbus = cp.loadArcData(ps.examples.get_path('columbus.shp'))\n",
+      "columbus.fieldNames\n",
+      "n = len(columbus.Wqueen)\n",
+      "#columbus.generateData('Uniform', 'rook', 1, 1, 10)\n",
+      "columbus.dataOperation(\"CONSTANT = 1\")\n",
+      "columbus.cluster('maxpTabu', ['CRIME',  'CONSTANT'], threshold=4, dissolve=0, std=0)\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Adding CONSTANT to fieldNames\n",
+        "Adding values from 1 to Y\n",
+        "Adding variables\n",
+        "Done\n",
+        "Getting variables\n",
+        "Variables successfully extracted\n",
+        "Running max-p-regions model (Duque, Anselin and Rey, 2010)\n",
+        "Local search method: Tabu Search\n",
+        "Number of areas:  49\n",
+        "threshold value:  4\n",
+        "FINAL SOLUTION: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [6, 6, 6, 6, 7, 1, 6, 6, 1, 1, 3, 3, 3, 3, 7, 3, 1, 8, 8, 5, 8, 1, 5, 8, 7, 7, 0, 0, 0, 0, 4, 5, 9, 4, 9, 4, 2, 0, 4, 5, 5, 4, 2, 9, 2, 4, 5, 2, 9]\n",
+        "FINAL OF:  2615.44707346\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n"
+       ]
+      }
+     ],
+     "prompt_number": 3
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "Counter(columbus.region2areas)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 4,
+       "text": [
+        "Counter({4: 6, 5: 6, 6: 6, 0: 5, 1: 5, 3: 5, 2: 4, 7: 4, 8: 4, 9: 4})"
+       ]
+      }
+     ],
+     "prompt_number": 4
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "columbus.cluster('arisel', ['CRIME'], 5, wType='rook', inits=10, dissolve=0)\n",
+      "#calif.cluster('arisel', ['PCR2002'], 9, wType='rook', inits=10, dissolve=1)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n",
+        "Running original Arisel algorithm\n",
+        "Number of areas:  49\n",
+        "Number of regions:  5\n",
+        "initial Solution: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [2, 2, 2, 2, 0, 1, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 1, 4, 0, 1, 4, 1, 1, 4, 0, 0, 0, 0, 0, 0, 4, 1, 3, 4, 3, 4, 3, 0, 4, 1, 1, 4, 3, 3, 3, 4, 1, 3, 3]\n",
+        "initial O.F:  4292.34464226\n",
+        "FINAL SOLUTION: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [2, 2, 2, 2, 0, 1, 2, 2, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 4, 1, 0, 4, 1, 4, 3, 0, 4, 1, 1, 4, 3, 1, 1, 4, 1, 1, 1]\n",
+        "FINAL OF:  3695.95184733\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n"
+       ]
+      }
+     ],
+     "prompt_number": 5
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# regionalization solutions are added as a list of region ids at the end\n",
+      "columbus.fieldNames"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 6,
+       "text": [
+        "['ID',\n",
+        " 'AREA',\n",
+        " 'PERIMETER',\n",
+        " 'COLUMBUS_',\n",
+        " 'COLUMBUS_I',\n",
+        " 'POLYID',\n",
+        " 'NEIG',\n",
+        " 'HOVAL',\n",
+        " 'INC',\n",
+        " 'CRIME',\n",
+        " 'OPEN',\n",
+        " 'PLUMB',\n",
+        " 'DISCBD',\n",
+        " 'X',\n",
+        " 'Y',\n",
+        " 'NSA',\n",
+        " 'NSB',\n",
+        " 'EW',\n",
+        " 'CP',\n",
+        " 'THOUS',\n",
+        " 'NEIGNO',\n",
+        " 'CONSTANT',\n",
+        " 'maxpTabu_20140823211458',\n",
+        " 'arisel_20140823211459']"
+       ]
+      }
+     ],
+     "prompt_number": 6
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "warisel = ps.block_weights(columbus.region2areas)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 7
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "warisel.neighbors"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 8,
+       "text": [
+        "{0: [1L, 2L, 3L, 6L, 7L],\n",
+        " 1: [0L, 2L, 3L, 6L, 7L],\n",
+        " 2: [0L, 1L, 3L, 6L, 7L],\n",
+        " 3: [0L, 1L, 2L, 6L, 7L],\n",
+        " 4: [10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 5: [8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 6: [0L, 1L, 2L, 3L, 7L],\n",
+        " 7: [0L, 1L, 2L, 3L, 6L],\n",
+        " 8: [5L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 9: [5L, 8L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 10: [4L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 11: [4L,\n",
+        "  10L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 12: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 13: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 14: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 15: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 16: [5L, 8L, 9L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 17: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 18: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 19: [5L, 8L, 9L, 16L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 20: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 21: [5L, 8L, 9L, 16L, 19L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 22: [5L, 8L, 9L, 16L, 19L, 21L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 23: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 24: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 25: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 26: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 27: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 28: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 29: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 30: [33L, 35L, 38L, 41L, 45L],\n",
+        " 31: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 32: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  37L],\n",
+        " 33: [30L, 35L, 38L, 41L, 45L],\n",
+        " 34: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 35: [30L, 33L, 38L, 41L, 45L],\n",
+        " 36: [42L],\n",
+        " 37: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L],\n",
+        " 38: [30L, 33L, 35L, 41L, 45L],\n",
+        " 39: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 40: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 41: [30L, 33L, 35L, 38L, 45L],\n",
+        " 42: [36L],\n",
+        " 43: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 44L, 46L, 47L, 48L],\n",
+        " 44: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 46L, 47L, 48L],\n",
+        " 45: [30L, 33L, 35L, 38L, 41L],\n",
+        " 46: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 47L, 48L],\n",
+        " 47: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 48L],\n",
+        " 48: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L]}"
+       ]
+      }
+     ],
+     "prompt_number": 8
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wregimes = ps.block_weights(columbus.region2areas)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 9
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "columbus.region2areas[5]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 10,
+       "text": [
+        "1"
+       ]
+      }
+     ],
+     "prompt_number": 10
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wregimes.n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 11,
+       "text": [
+        "49"
+       ]
+      }
+     ],
+     "prompt_number": 11
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wregimes.neighbors"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 12,
+       "text": [
+        "{0: [1L, 2L, 3L, 6L, 7L],\n",
+        " 1: [0L, 2L, 3L, 6L, 7L],\n",
+        " 2: [0L, 1L, 3L, 6L, 7L],\n",
+        " 3: [0L, 1L, 2L, 6L, 7L],\n",
+        " 4: [10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 5: [8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 6: [0L, 1L, 2L, 3L, 7L],\n",
+        " 7: [0L, 1L, 2L, 3L, 6L],\n",
+        " 8: [5L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 9: [5L, 8L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 10: [4L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 11: [4L,\n",
+        "  10L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 12: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 13: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 14: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 15: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 16: [5L, 8L, 9L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 17: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 18: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 19: [5L, 8L, 9L, 16L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 20: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 21: [5L, 8L, 9L, 16L, 19L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 22: [5L, 8L, 9L, 16L, 19L, 21L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 23: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 24: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 25: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 26: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 27: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 28: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  29L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 29: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  32L,\n",
+        "  37L],\n",
+        " 30: [33L, 35L, 38L, 41L, 45L],\n",
+        " 31: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 34L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 32: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  37L],\n",
+        " 33: [30L, 35L, 38L, 41L, 45L],\n",
+        " 34: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 39L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 35: [30L, 33L, 38L, 41L, 45L],\n",
+        " 36: [42L],\n",
+        " 37: [4L,\n",
+        "  10L,\n",
+        "  11L,\n",
+        "  12L,\n",
+        "  13L,\n",
+        "  14L,\n",
+        "  15L,\n",
+        "  17L,\n",
+        "  18L,\n",
+        "  20L,\n",
+        "  23L,\n",
+        "  24L,\n",
+        "  25L,\n",
+        "  26L,\n",
+        "  27L,\n",
+        "  28L,\n",
+        "  29L,\n",
+        "  32L],\n",
+        " 38: [30L, 33L, 35L, 41L, 45L],\n",
+        " 39: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 40L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 40: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 43L, 44L, 46L, 47L, 48L],\n",
+        " 41: [30L, 33L, 35L, 38L, 45L],\n",
+        " 42: [36L],\n",
+        " 43: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 44L, 46L, 47L, 48L],\n",
+        " 44: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 46L, 47L, 48L],\n",
+        " 45: [30L, 33L, 35L, 38L, 41L],\n",
+        " 46: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 47L, 48L],\n",
+        " 47: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 48L],\n",
+        " 48: [5L, 8L, 9L, 16L, 19L, 21L, 22L, 31L, 34L, 39L, 40L, 43L, 44L, 46L, 47L]}"
+       ]
+      }
+     ],
+     "prompt_number": 12
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Attrribute data from a csv file and a W from a gal file"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "mexico = cp.importCsvData(ps.examples.get_path('mexico.csv'))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 13
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "mexico.fieldNames"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 14,
+       "text": [
+        "['ID',\n",
+        " 'State',\n",
+        " 'pcgdp1940',\n",
+        " 'pcgdp1950',\n",
+        " 'pcgdp1960',\n",
+        " 'pcgdp1970',\n",
+        " 'pcgdp1980',\n",
+        " 'pcgdp1990',\n",
+        " 'pcgdp2000',\n",
+        " 'hanson03',\n",
+        " 'hanson98',\n",
+        " 'esquivel99',\n",
+        " 'inegi',\n",
+        " 'inegi2']"
+       ]
+      }
+     ],
+     "prompt_number": 14
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "w = ps.open(ps.examples.get_path('mexico.gal')).read()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 15
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "w.n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 16,
+       "text": [
+        "32"
+       ]
+      }
+     ],
+     "prompt_number": 16
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "cp.addRook2Layer(ps.examples.get_path('mexico.gal'), mexico)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 17
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "mexico.Wrook"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 18,
+       "text": [
+        "{0: [31, 13],\n",
+        " 1: [2, 25],\n",
+        " 2: [1],\n",
+        " 3: [30, 22, 26],\n",
+        " 4: [19, 26, 29],\n",
+        " 5: [6, 9, 24, 25],\n",
+        " 6: [18, 23, 31, 9, 5],\n",
+        " 7: [13, 15],\n",
+        " 8: [16, 14],\n",
+        " 9: [5, 6, 31, 13, 17, 24],\n",
+        " 10: [23, 21, 31, 15, 13],\n",
+        " 11: [15, 14, 16, 20, 19],\n",
+        " 12: [21, 23, 29, 20, 28, 14],\n",
+        " 13: [17, 31, 0, 23, 10, 15, 7, 9],\n",
+        " 14: [21, 12, 28, 20, 16, 11, 15, 8],\n",
+        " 15: [7, 13, 10, 21, 14, 11],\n",
+        " 16: [14, 8, 20, 11],\n",
+        " 17: [24, 9, 31, 13],\n",
+        " 18: [6, 27, 23, 31],\n",
+        " 19: [11, 20, 29, 4],\n",
+        " 20: [29, 19, 11, 16, 14, 28, 12],\n",
+        " 21: [23, 12, 14, 15, 10],\n",
+        " 22: [30, 3],\n",
+        " 23: [18, 27, 29, 12, 21, 10, 31, 6, 13],\n",
+        " 24: [25, 5, 9, 17],\n",
+        " 25: [1, 5, 24],\n",
+        " 26: [3, 4, 29],\n",
+        " 27: [18, 29, 23],\n",
+        " 28: [12, 20, 14],\n",
+        " 29: [26, 4, 19, 20, 12, 23, 27],\n",
+        " 30: [3, 22],\n",
+        " 31: [18, 23, 10, 0, 13, 17, 9, 6]}"
+       ]
+      }
+     ],
+     "prompt_number": 18
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "mexico.cluster('arisel', ['pcgdp1940'], 5, wType='rook', inits=10, dissolve=0)\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n",
+        "Running original Arisel algorithm\n",
+        "Number of areas:  32\n",
+        "Number of regions:  5\n",
+        "initial Solution: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [2, 3, 3, 2, 2, 2, 2, 2, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 2]\n",
+        "initial O.F:  287200188.741\n",
+        "FINAL SOLUTION: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [2, 3, 3, 0, 0, 2, 2, 2, 4, 2, 0, 0, 2, 2, 0, 0, 0, 0, 2, 0, 0, 2, 1, 0, 0, 2, 0, 2, 0, 2, 0, 0]\n",
+        "FINAL OF:  186036319.667\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n"
+       ]
+      }
+     ],
+     "prompt_number": 19
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "mexico.fieldNames"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 20,
+       "text": [
+        "['ID',\n",
+        " 'State',\n",
+        " 'pcgdp1940',\n",
+        " 'pcgdp1950',\n",
+        " 'pcgdp1960',\n",
+        " 'pcgdp1970',\n",
+        " 'pcgdp1980',\n",
+        " 'pcgdp1990',\n",
+        " 'pcgdp2000',\n",
+        " 'hanson03',\n",
+        " 'hanson98',\n",
+        " 'esquivel99',\n",
+        " 'inegi',\n",
+        " 'inegi2',\n",
+        " 'arisel_20140823211523']"
+       ]
+      }
+     ],
+     "prompt_number": 20
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "mexico.getVars('pcgdp1940')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 21,
+       "text": [
+        "{0: [10384.0],\n",
+        " 1: [22361.0],\n",
+        " 2: [9573.0],\n",
+        " 3: [3758.0],\n",
+        " 4: [2934.0],\n",
+        " 5: [8578.0],\n",
+        " 6: [8537.0],\n",
+        " 7: [6909.0],\n",
+        " 8: [17816.0],\n",
+        " 9: [12132.0],\n",
+        " 10: [4359.0],\n",
+        " 11: [2181.0],\n",
+        " 12: [4414.0],\n",
+        " 13: [5309.0],\n",
+        " 14: [3408.0],\n",
+        " 15: [3327.0],\n",
+        " 16: [6936.0],\n",
+        " 17: [4836.0],\n",
+        " 18: [9073.0],\n",
+        " 19: [1892.0],\n",
+        " 20: [3569.0],\n",
+        " 21: [11016.0],\n",
+        " 22: [21965.0],\n",
+        " 23: [4372.0],\n",
+        " 24: [4840.0],\n",
+        " 25: [6399.0],\n",
+        " 26: [2459.0],\n",
+        " 27: [7508.0],\n",
+        " 28: [3605.0],\n",
+        " 29: [5203.0],\n",
+        " 30: [7990.0],\n",
+        " 31: [3734.0]}"
+       ]
+      }
+     ],
+     "prompt_number": 21
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# mexico example all together\n",
+      "\n",
+      "csvfile = ps.examples.get_path('mexico.csv')\n",
+      "galfile = ps.examples.get_path('mexico.gal')\n",
+      "\n",
+      "mexico = cp.importCsvData(csvfile)\n",
+      "cp.addRook2Layer(galfile, mexico)\n",
+      "mexico.cluster('arisel', ['pcgdp1940'], 5, wType='rook', inits=10, dissolve=0)\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n",
+        "Running original Arisel algorithm\n",
+        "Number of areas:  32\n",
+        "Number of regions:  5\n",
+        "initial Solution: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [2, 0, 0, 1, 1, 2, 2, 2, 4, 2, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 1, 2, 3, 2, 2, 2, 1, 2, 1, 2, 1, 2]\n",
+        "initial O.F:  228683641.198\n",
+        "FINAL SOLUTION: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [2, 0, 0, 1, 1, 2, 2, 2, 4, 2, 1, 1, 2, 2, 1, 1, 1, 1, 2, 1, 1, 2, 3, 1, 1, 2, 1, 2, 1, 2, 1, 1]\n",
+        "FINAL OF:  186036319.667\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n"
+       ]
+      }
+     ],
+     "prompt_number": 22
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "mexico.region2areas.index(2)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 23,
+       "text": [
+        "0"
+       ]
+      }
+     ],
+     "prompt_number": 23
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "mexico.Wrook[0]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 24,
+       "text": [
+        "[31, 13]"
+       ]
+      }
+     ],
+     "prompt_number": 24
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "mexico.getVars('State')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 25,
+       "text": [
+        "{0: ['Aguascalientes'],\n",
+        " 1: ['Baja California'],\n",
+        " 2: ['Baja California Sur'],\n",
+        " 3: ['Campeche'],\n",
+        " 4: ['Chiapas'],\n",
+        " 5: ['Chihuahua'],\n",
+        " 6: ['Coahuila'],\n",
+        " 7: ['Colima'],\n",
+        " 8: ['Distrito Federal'],\n",
+        " 9: ['Durango'],\n",
+        " 10: ['Guanajuato'],\n",
+        " 11: ['Guerrero'],\n",
+        " 12: ['Hidalgo'],\n",
+        " 13: ['Jalisco'],\n",
+        " 14: ['Mexico'],\n",
+        " 15: ['Michoacan'],\n",
+        " 16: ['Morelos'],\n",
+        " 17: ['Nayarit'],\n",
+        " 18: ['Nuevo Leon'],\n",
+        " 19: ['Oaxaca'],\n",
+        " 20: ['Puebla'],\n",
+        " 21: ['Quertaro'],\n",
+        " 22: ['Quintana Roo'],\n",
+        " 23: ['San Luis Potosi'],\n",
+        " 24: ['Sinaloa'],\n",
+        " 25: ['Sonora'],\n",
+        " 26: ['Tabasco'],\n",
+        " 27: ['Tamaulipas'],\n",
+        " 28: ['Tlaxcala'],\n",
+        " 29: ['Veracruz'],\n",
+        " 30: ['Yucatan'],\n",
+        " 31: ['Zacatecas']}"
+       ]
+      }
+     ],
+     "prompt_number": 25
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "regions = np.array(mexico.region2areas)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 26
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "regions"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 27,
+       "text": [
+        "array([2, 0, 0, 1, 1, 2, 2, 2, 4, 2, 1, 1, 2, 2, 1, 1, 1, 1, 2, 1, 1, 2, 3,\n",
+        "       1, 1, 2, 1, 2, 1, 2, 1, 1])"
+       ]
+      }
+     ],
+     "prompt_number": 27
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "Counter(regions)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 28,
+       "text": [
+        "Counter({1: 16, 2: 12, 0: 2, 3: 1, 4: 1})"
+       ]
+      }
+     ],
+     "prompt_number": 28
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Shapefile and mapping results with PySAL Viz"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usf = ps.examples.get_path('us48.shp')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 29
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "us = cp.loadArcData(usf.split(\".\")[0])"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 30
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "us.Wqueen"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 31,
+       "text": [
+        "{0: [10, 7],\n",
+        " 1: [3, 4, 5, 7],\n",
+        " 2: [11],\n",
+        " 3: [1, 4, 9],\n",
+        " 4: [1, 3, 5, 9, 12, 14],\n",
+        " 5: [1, 4, 7, 14, 22, 29],\n",
+        " 6: [9, 12, 25, 47],\n",
+        " 7: [0, 1, 5, 10, 21, 22],\n",
+        " 8: [11, 13, 15],\n",
+        " 9: [12, 3, 4, 6],\n",
+        " 10: [0, 23, 21, 7],\n",
+        " 11: [8, 2, 13],\n",
+        " 12: [33, 4, 6, 9, 14, 25],\n",
+        " 13: [8, 17, 18, 11, 15],\n",
+        " 14: [33, 4, 5, 12, 29, 31],\n",
+        " 15: [8, 16, 19, 13, 17],\n",
+        " 16: [15, 19, 24, 26, 27, 28],\n",
+        " 17: [18, 13, 15],\n",
+        " 18: [17, 13],\n",
+        " 19: [16, 26, 15],\n",
+        " 20: [24, 25, 30, 47],\n",
+        " 21: [10, 23, 34, 22, 7],\n",
+        " 22: [34, 5, 39, 7, 21, 29],\n",
+        " 23: [10, 34, 21],\n",
+        " 24: [16, 27, 20, 30, 47],\n",
+        " 25: [20, 12, 30, 6, 33],\n",
+        " 26: [16, 19, 28],\n",
+        " 27: [16, 24, 28, 30, 32],\n",
+        " 28: [16, 32, 26, 27],\n",
+        " 29: [34, 35, 5, 39, 14, 22, 31],\n",
+        " 30: [32, 33, 37, 20, 24, 25, 27],\n",
+        " 31: [33, 35, 29, 14],\n",
+        " 32: [36, 27, 28, 37, 30],\n",
+        " 33: [35, 37, 44, 12, 14, 25, 30, 31],\n",
+        " 34: [39, 29, 21, 22, 23],\n",
+        " 35: [33, 38, 39, 44, 29, 31],\n",
+        " 36: [32, 42, 43, 37],\n",
+        " 37: [32, 33, 36, 40, 41, 42, 44, 30],\n",
+        " 38: [35, 44, 45, 39],\n",
+        " 39: [38, 34, 35, 29, 22],\n",
+        " 40: [41, 42, 37, 46],\n",
+        " 41: [40, 44, 37, 45],\n",
+        " 42: [40, 43, 36, 37, 46],\n",
+        " 43: [42, 36],\n",
+        " 44: [33, 35, 37, 38, 41, 45],\n",
+        " 45: [41, 44, 38],\n",
+        " 46: [40, 42],\n",
+        " 47: [24, 20, 6]}"
+       ]
+      }
+     ],
+     "prompt_number": 31
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "us.fieldNames"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 32,
+       "text": [
+        "['ID',\n",
+        " 'AREA',\n",
+        " 'PERIMETER',\n",
+        " 'STATE_',\n",
+        " 'STATE_ID',\n",
+        " 'STATE_NAME',\n",
+        " 'STATE_FIPS',\n",
+        " 'SUB_REGION',\n",
+        " 'STATE_ABBR']"
+       ]
+      }
+     ],
+     "prompt_number": 32
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "uscsv = ps.examples.get_path(\"usjoin.csv\")"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 33
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "f = ps.open(uscsv)\n",
+      "pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)]).T"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 34
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "pci"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 35,
+       "text": [
+        "array([[  323,   267,   224, ..., 31988, 32819, 32274],\n",
+        "       [  600,   520,   429, ..., 33470, 33445, 32077],\n",
+        "       [  310,   228,   215, ..., 31070, 31800, 31493],\n",
+        "       ..., \n",
+        "       [  460,   408,   356, ..., 29769, 31265, 31843],\n",
+        "       [  673,   588,   469, ..., 35839, 36594, 35676],\n",
+        "       [  675,   585,   476, ..., 43453, 45177, 42504]])"
+       ]
+      }
+     ],
+     "prompt_number": 35
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy = cp.Layer()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 36
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "cp.addQueen2Layer(ps.examples.get_path('states48.gal'), usy)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 37
+    },
+    {
+     "cell_type": "code",
+     "collapsed": true,
+     "input": [
+      "names = [\"Y_%d\"%v for v in range(1929,2010)]\n",
+      "cp.addArray2Layer(pci, usy, names)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n"
+       ]
+      }
+     ],
+     "prompt_number": 38
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "names"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 39,
+       "text": [
+        "['Y_1929',\n",
+        " 'Y_1930',\n",
+        " 'Y_1931',\n",
+        " 'Y_1932',\n",
+        " 'Y_1933',\n",
+        " 'Y_1934',\n",
+        " 'Y_1935',\n",
+        " 'Y_1936',\n",
+        " 'Y_1937',\n",
+        " 'Y_1938',\n",
+        " 'Y_1939',\n",
+        " 'Y_1940',\n",
+        " 'Y_1941',\n",
+        " 'Y_1942',\n",
+        " 'Y_1943',\n",
+        " 'Y_1944',\n",
+        " 'Y_1945',\n",
+        " 'Y_1946',\n",
+        " 'Y_1947',\n",
+        " 'Y_1948',\n",
+        " 'Y_1949',\n",
+        " 'Y_1950',\n",
+        " 'Y_1951',\n",
+        " 'Y_1952',\n",
+        " 'Y_1953',\n",
+        " 'Y_1954',\n",
+        " 'Y_1955',\n",
+        " 'Y_1956',\n",
+        " 'Y_1957',\n",
+        " 'Y_1958',\n",
+        " 'Y_1959',\n",
+        " 'Y_1960',\n",
+        " 'Y_1961',\n",
+        " 'Y_1962',\n",
+        " 'Y_1963',\n",
+        " 'Y_1964',\n",
+        " 'Y_1965',\n",
+        " 'Y_1966',\n",
+        " 'Y_1967',\n",
+        " 'Y_1968',\n",
+        " 'Y_1969',\n",
+        " 'Y_1970',\n",
+        " 'Y_1971',\n",
+        " 'Y_1972',\n",
+        " 'Y_1973',\n",
+        " 'Y_1974',\n",
+        " 'Y_1975',\n",
+        " 'Y_1976',\n",
+        " 'Y_1977',\n",
+        " 'Y_1978',\n",
+        " 'Y_1979',\n",
+        " 'Y_1980',\n",
+        " 'Y_1981',\n",
+        " 'Y_1982',\n",
+        " 'Y_1983',\n",
+        " 'Y_1984',\n",
+        " 'Y_1985',\n",
+        " 'Y_1986',\n",
+        " 'Y_1987',\n",
+        " 'Y_1988',\n",
+        " 'Y_1989',\n",
+        " 'Y_1990',\n",
+        " 'Y_1991',\n",
+        " 'Y_1992',\n",
+        " 'Y_1993',\n",
+        " 'Y_1994',\n",
+        " 'Y_1995',\n",
+        " 'Y_1996',\n",
+        " 'Y_1997',\n",
+        " 'Y_1998',\n",
+        " 'Y_1999',\n",
+        " 'Y_2000',\n",
+        " 'Y_2001',\n",
+        " 'Y_2002',\n",
+        " 'Y_2003',\n",
+        " 'Y_2004',\n",
+        " 'Y_2005',\n",
+        " 'Y_2006',\n",
+        " 'Y_2007',\n",
+        " 'Y_2008',\n",
+        " 'Y_2009']"
+       ]
+      }
+     ],
+     "prompt_number": 39
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.fieldNames"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 40,
+       "text": [
+        "['Y_1929',\n",
+        " 'Y_1930',\n",
+        " 'Y_1931',\n",
+        " 'Y_1932',\n",
+        " 'Y_1933',\n",
+        " 'Y_1934',\n",
+        " 'Y_1935',\n",
+        " 'Y_1936',\n",
+        " 'Y_1937',\n",
+        " 'Y_1938',\n",
+        " 'Y_1939',\n",
+        " 'Y_1940',\n",
+        " 'Y_1941',\n",
+        " 'Y_1942',\n",
+        " 'Y_1943',\n",
+        " 'Y_1944',\n",
+        " 'Y_1945',\n",
+        " 'Y_1946',\n",
+        " 'Y_1947',\n",
+        " 'Y_1948',\n",
+        " 'Y_1949',\n",
+        " 'Y_1950',\n",
+        " 'Y_1951',\n",
+        " 'Y_1952',\n",
+        " 'Y_1953',\n",
+        " 'Y_1954',\n",
+        " 'Y_1955',\n",
+        " 'Y_1956',\n",
+        " 'Y_1957',\n",
+        " 'Y_1958',\n",
+        " 'Y_1959',\n",
+        " 'Y_1960',\n",
+        " 'Y_1961',\n",
+        " 'Y_1962',\n",
+        " 'Y_1963',\n",
+        " 'Y_1964',\n",
+        " 'Y_1965',\n",
+        " 'Y_1966',\n",
+        " 'Y_1967',\n",
+        " 'Y_1968',\n",
+        " 'Y_1969',\n",
+        " 'Y_1970',\n",
+        " 'Y_1971',\n",
+        " 'Y_1972',\n",
+        " 'Y_1973',\n",
+        " 'Y_1974',\n",
+        " 'Y_1975',\n",
+        " 'Y_1976',\n",
+        " 'Y_1977',\n",
+        " 'Y_1978',\n",
+        " 'Y_1979',\n",
+        " 'Y_1980',\n",
+        " 'Y_1981',\n",
+        " 'Y_1982',\n",
+        " 'Y_1983',\n",
+        " 'Y_1984',\n",
+        " 'Y_1985',\n",
+        " 'Y_1986',\n",
+        " 'Y_1987',\n",
+        " 'Y_1988',\n",
+        " 'Y_1989',\n",
+        " 'Y_1990',\n",
+        " 'Y_1991',\n",
+        " 'Y_1992',\n",
+        " 'Y_1993',\n",
+        " 'Y_1994',\n",
+        " 'Y_1995',\n",
+        " 'Y_1996',\n",
+        " 'Y_1997',\n",
+        " 'Y_1998',\n",
+        " 'Y_1999',\n",
+        " 'Y_2000',\n",
+        " 'Y_2001',\n",
+        " 'Y_2002',\n",
+        " 'Y_2003',\n",
+        " 'Y_2004',\n",
+        " 'Y_2005',\n",
+        " 'Y_2006',\n",
+        " 'Y_2007',\n",
+        " 'Y_2008',\n",
+        " 'Y_2009']"
+       ]
+      }
+     ],
+     "prompt_number": 40
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.getVars('Y_1929')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 41,
+       "text": [
+        "{0: [323],\n",
+        " 1: [600],\n",
+        " 2: [310],\n",
+        " 3: [991],\n",
+        " 4: [634],\n",
+        " 5: [1024],\n",
+        " 6: [1032],\n",
+        " 7: [518],\n",
+        " 8: [347],\n",
+        " 9: [507],\n",
+        " 10: [948],\n",
+        " 11: [607],\n",
+        " 12: [581],\n",
+        " 13: [532],\n",
+        " 14: [393],\n",
+        " 15: [414],\n",
+        " 16: [601],\n",
+        " 17: [768],\n",
+        " 18: [906],\n",
+        " 19: [790],\n",
+        " 20: [599],\n",
+        " 21: [286],\n",
+        " 22: [621],\n",
+        " 23: [592],\n",
+        " 24: [596],\n",
+        " 25: [868],\n",
+        " 26: [686],\n",
+        " 27: [918],\n",
+        " 28: [410],\n",
+        " 29: [1152],\n",
+        " 30: [332],\n",
+        " 31: [382],\n",
+        " 32: [771],\n",
+        " 33: [455],\n",
+        " 34: [668],\n",
+        " 35: [772],\n",
+        " 36: [874],\n",
+        " 37: [271],\n",
+        " 38: [426],\n",
+        " 39: [378],\n",
+        " 40: [479],\n",
+        " 41: [551],\n",
+        " 42: [634],\n",
+        " 43: [434],\n",
+        " 44: [741],\n",
+        " 45: [460],\n",
+        " 46: [673],\n",
+        " 47: [675]}"
+       ]
+      }
+     ],
+     "prompt_number": 41
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.Wrook"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 42,
+       "text": [
+        "{}"
+       ]
+      }
+     ],
+     "prompt_number": 42
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.cluster('arisel', ['Y_1980'], 8, wType='queen', inits=10, dissolve=0)\n",
+      "#mexico.cluster('arisel', ['pcgdp1940'], 5, wType='rook', inits=10, dissolve=0)\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n",
+        "Running original Arisel algorithm\n",
+        "Number of areas:  48\n",
+        "Number of regions:  8\n",
+        "initial Solution: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [4, 0, 4, 2, 0, 6, 0, 4, 4, 1, 0, 0, 0, 0, 4, 4, 5, 0, 7, 0, 0, 4, 0, 1, 0, 2, 5, 7, 1, 7, 4, 1, 0, 0, 1, 0, 3, 4, 1, 4, 0, 1, 5, 0, 1, 4, 0, 0]\n",
+        "initial O.F:  23338316.0\n",
+        "FINAL SOLUTION: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [4, 1, 4, 2, 0, 6, 7, 4, 4, 1, 0, 0, 1, 0, 4, 1, 5, 0, 7, 0, 0, 4, 0, 1, 1, 2, 5, 7, 1, 7, 4, 1, 0, 1, 1, 0, 3, 4, 1, 4, 1, 1, 5, 0, 1, 4, 0, 0]\n",
+        "FINAL OF:  21804253.0\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n"
+       ]
+      }
+     ],
+     "prompt_number": 43
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 43
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "us = cp.Layer()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 44
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "cp.addQueen2Layer(ps.examples.get_path('states48.gal'), us)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 45
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "uscsv = ps.examples.get_path(\"usjoin.csv\")"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 46
+    },
+    {
+     "cell_type": "code",
+     "collapsed": true,
+     "input": [
+      "f = ps.open(uscsv)\n",
+      "pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)]).T\n",
+      "names = [\"Y_%d\"%v for v in range(1929,2010)]\n",
+      "cp.addArray2Layer(pci, us, names)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n"
+       ]
+      }
+     ],
+     "prompt_number": 47
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.cluster('arisel', ['Y_1980'], 8, wType='queen', inits=10, dissolve=0)\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n",
+        "Running original Arisel algorithm\n",
+        "Number of areas:  48\n",
+        "Number of regions:  8\n",
+        "initial Solution: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [0, 6, 0, 2, 6, 5, 6, 0, 0, 7, 6, 6, 6, 6, 0, 0, 4, 6, 4, 6, 6, 0, 6, 7, 6, 2, 4, 1, 7, 4, 0, 7, 6, 6, 2, 6, 4, 0, 7, 0, 6, 7, 3, 6, 2, 0, 6, 6]\n",
+        "initial O.F:  21166215.0\n",
+        "FINAL SOLUTION: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [0, 6, 0, 2, 6, 5, 1, 0, 0, 7, 6, 6, 6, 6, 0, 0, 4, 6, 4, 6, 6, 0, 6, 7, 7, 2, 4, 1, 7, 1, 0, 7, 6, 6, 2, 6, 4, 0, 7, 0, 6, 7, 3, 6, 2, 0, 6, 6]\n",
+        "FINAL OF:  19355956.0\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n"
+       ]
+      }
+     ],
+     "prompt_number": 48
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "us_alpha = cp.importCsvData(ps.examples.get_path('usjoin.csv'))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 49
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "alpha_fips = us_alpha.getVars('STATE_FIPS')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n"
+       ]
+      }
+     ],
+     "prompt_number": 50
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "alpha_fips"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 51,
+       "text": [
+        "{0: [1],\n",
+        " 1: [4],\n",
+        " 2: [5],\n",
+        " 3: [6],\n",
+        " 4: [8],\n",
+        " 5: [9],\n",
+        " 6: [10],\n",
+        " 7: [12],\n",
+        " 8: [13],\n",
+        " 9: [16],\n",
+        " 10: [17],\n",
+        " 11: [18],\n",
+        " 12: [19],\n",
+        " 13: [20],\n",
+        " 14: [21],\n",
+        " 15: [22],\n",
+        " 16: [23],\n",
+        " 17: [24],\n",
+        " 18: [25],\n",
+        " 19: [26],\n",
+        " 20: [27],\n",
+        " 21: [28],\n",
+        " 22: [29],\n",
+        " 23: [30],\n",
+        " 24: [31],\n",
+        " 25: [32],\n",
+        " 26: [33],\n",
+        " 27: [34],\n",
+        " 28: [35],\n",
+        " 29: [36],\n",
+        " 30: [37],\n",
+        " 31: [38],\n",
+        " 32: [39],\n",
+        " 33: [40],\n",
+        " 34: [41],\n",
+        " 35: [42],\n",
+        " 36: [44],\n",
+        " 37: [45],\n",
+        " 38: [46],\n",
+        " 39: [47],\n",
+        " 40: [48],\n",
+        " 41: [49],\n",
+        " 42: [50],\n",
+        " 43: [51],\n",
+        " 44: [53],\n",
+        " 45: [54],\n",
+        " 46: [55],\n",
+        " 47: [56]}"
+       ]
+      }
+     ],
+     "prompt_number": 51
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "dbf = ps.open(ps.examples.get_path('us48.dbf'))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 52
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "dbf.header"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 53,
+       "text": [
+        "['AREA',\n",
+        " 'PERIMETER',\n",
+        " 'STATE_',\n",
+        " 'STATE_ID',\n",
+        " 'STATE_NAME',\n",
+        " 'STATE_FIPS',\n",
+        " 'SUB_REGION',\n",
+        " 'STATE_ABBR']"
+       ]
+      }
+     ],
+     "prompt_number": 53
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "state_fips = dbf.by_col('STATE_FIPS')\n",
+      "names = dbf.by_col('STATE_NAME')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 54
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "names"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 55,
+       "text": [
+        "['Washington',\n",
+        " 'Montana',\n",
+        " 'Maine',\n",
+        " 'North Dakota',\n",
+        " 'South Dakota',\n",
+        " 'Wyoming',\n",
+        " 'Wisconsin',\n",
+        " 'Idaho',\n",
+        " 'Vermont',\n",
+        " 'Minnesota',\n",
+        " 'Oregon',\n",
+        " 'New Hampshire',\n",
+        " 'Iowa',\n",
+        " 'Massachusetts',\n",
+        " 'Nebraska',\n",
+        " 'New York',\n",
+        " 'Pennsylvania',\n",
+        " 'Connecticut',\n",
+        " 'Rhode Island',\n",
+        " 'New Jersey',\n",
+        " 'Indiana',\n",
+        " 'Nevada',\n",
+        " 'Utah',\n",
+        " 'California',\n",
+        " 'Ohio',\n",
+        " 'Illinois',\n",
+        " 'Delaware',\n",
+        " 'West Virginia',\n",
+        " 'Maryland',\n",
+        " 'Colorado',\n",
+        " 'Kentucky',\n",
+        " 'Kansas',\n",
+        " 'Virginia',\n",
+        " 'Missouri',\n",
+        " 'Arizona',\n",
+        " 'Oklahoma',\n",
+        " 'North Carolina',\n",
+        " 'Tennessee',\n",
+        " 'Texas',\n",
+        " 'New Mexico',\n",
+        " 'Alabama',\n",
+        " 'Mississippi',\n",
+        " 'Georgia',\n",
+        " 'South Carolina',\n",
+        " 'Arkansas',\n",
+        " 'Louisiana',\n",
+        " 'Florida',\n",
+        " 'Michigan']"
+       ]
+      }
+     ],
+     "prompt_number": 55
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "state_fips = map(int, state_fips)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 56
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "state_fips"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 57,
+       "text": [
+        "[53,\n",
+        " 30,\n",
+        " 23,\n",
+        " 38,\n",
+        " 46,\n",
+        " 56,\n",
+        " 55,\n",
+        " 16,\n",
+        " 50,\n",
+        " 27,\n",
+        " 41,\n",
+        " 33,\n",
+        " 19,\n",
+        " 25,\n",
+        " 31,\n",
+        " 36,\n",
+        " 42,\n",
+        " 9,\n",
+        " 44,\n",
+        " 34,\n",
+        " 18,\n",
+        " 32,\n",
+        " 49,\n",
+        " 6,\n",
+        " 39,\n",
+        " 17,\n",
+        " 10,\n",
+        " 54,\n",
+        " 24,\n",
+        " 8,\n",
+        " 21,\n",
+        " 20,\n",
+        " 51,\n",
+        " 29,\n",
+        " 4,\n",
+        " 40,\n",
+        " 37,\n",
+        " 47,\n",
+        " 48,\n",
+        " 35,\n",
+        " 1,\n",
+        " 28,\n",
+        " 13,\n",
+        " 45,\n",
+        " 5,\n",
+        " 22,\n",
+        " 12,\n",
+        " 26]"
+       ]
+      }
+     ],
+     "prompt_number": 57
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# the csv file has the states ordered alphabetically, but this isn't the case for the order in the shapefile so we have to reorder before any choropleths are drawn\n",
+      "alpha_fips = [i[0] for i in alpha_fips.values()]\n",
+      "reorder = [ alpha_fips.index(s) for s in state_fips]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 58
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "regions = usy.region2areas"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 59
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "regions"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 60,
+       "text": [
+        "[0,\n",
+        " 6,\n",
+        " 0,\n",
+        " 2,\n",
+        " 6,\n",
+        " 5,\n",
+        " 1,\n",
+        " 0,\n",
+        " 0,\n",
+        " 7,\n",
+        " 6,\n",
+        " 6,\n",
+        " 6,\n",
+        " 6,\n",
+        " 0,\n",
+        " 0,\n",
+        " 4,\n",
+        " 6,\n",
+        " 4,\n",
+        " 6,\n",
+        " 6,\n",
+        " 0,\n",
+        " 6,\n",
+        " 7,\n",
+        " 7,\n",
+        " 2,\n",
+        " 4,\n",
+        " 1,\n",
+        " 7,\n",
+        " 1,\n",
+        " 0,\n",
+        " 7,\n",
+        " 6,\n",
+        " 6,\n",
+        " 2,\n",
+        " 6,\n",
+        " 4,\n",
+        " 0,\n",
+        " 7,\n",
+        " 0,\n",
+        " 6,\n",
+        " 7,\n",
+        " 3,\n",
+        " 6,\n",
+        " 2,\n",
+        " 0,\n",
+        " 6,\n",
+        " 6]"
+       ]
+      }
+     ],
+     "prompt_number": 60
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "from pysal.contrib.viz import mapping as maps"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 61
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "shp = ps.examples.get_path('us48.shp')\n",
+      "regions = np.array(regions)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 62
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "maps.plot_choropleth(shp, regions[reorder], 'unique_values')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADtCAYAAAAcNaZ2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd8FNXax3/nzMz29ISQCiGUAKGXSJeOFKWKysUCCBaw\n36tXvXZFUHxt6LWCCqKiICgIUqT3GnpvIYUAaZttM3PO+8duQoCU3WSTbLz7/egH2J0588zsmWee\nec5TAD9+/Pjx48ePHz9+/Pjx48ePHz9+/Pjx48ePHz9+/Pjx48ePHz9+/Pjx48ePn7pDgEaT+fGQ\nITxAo7ECSKhtefz4KQta2wL48eMNOCD0TkhA59hYB4BOtS2PHz9lIXpxrMkA4gCoXhzTUxoCuAKg\n4IbPkwEcrHFpfJtI159ZtSpF2bQAcAxuziezwxEuEIIeDRoEbLlwoZtVln+qXvH8+Kkc3lK6kQA+\nG9g0Eu1jQ [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x149ca390>"
+       ]
+      }
+     ],
+     "prompt_number": 63
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.cluster('arisel', ['Y_1929'], 8, wType='queen', inits=10, dissolve=0)\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Getting variables\n",
+        "Variables successfully extracted\n",
+        "Running original Arisel algorithm\n",
+        "Number of areas:  48\n",
+        "Number of regions:  8\n",
+        "initial Solution: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [0, 4, 0, 1, 4, 6, 6, 0, 0, 4, 5, 5, 4, 4, 2, 0, 3, 5, 6, 5, 4, 0, 4, 7, 4, 1, 3, 6, 0, 6, 0, 4, 5, 0, 4, 5, 6, 0, 4, 2, 0, 4, 3, 2, 4, 2, 4, 7]\n",
+        "initial O.F:  320792.0\n",
+        "FINAL SOLUTION: "
+       ]
+      },
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        " [0, 4, 0, 1, 7, 6, 6, 0, 0, 4, 5, 5, 4, 4, 0, 0, 3, 5, 6, 5, 4, 0, 4, 4, 4, 1, 3, 6, 0, 6, 0, 4, 5, 0, 4, 5, 6, 0, 4, 0, 0, 4, 3, 2, 4, 2, 5, 7]\n",
+        "FINAL OF:  311057.0\n",
+        "Done\n",
+        "Adding variables\n",
+        "Done\n"
+       ]
+      }
+     ],
+     "prompt_number": 64
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "regions = usy.region2areas"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 65
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "regions = np.array(regions)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 66
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "maps.plot_choropleth(shp, regions[reorder], 'unique_values')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADtCAYAAAAcNaZ2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd8FMX7xz8zW66nkZCEltBLgNCrgBTp+EXFhuWriKAi\nKijqzwKKBUEFFQVBBTtWRAREEBSQ3os06TWUkH5ly8zvj01CgJS75C65+L33C15J7nZnn92dfXbm\nmacAIUKECBEiRIgQIUKECBEiRIgQIUKECBEiRIgQIUKECBEiRIgQIUKECBEiRIgQlQeLzZ4y/IXJ\n3GKzuwDUrmh5QoQoClrRAoQI4Q8450JSu86o16ylAqBtRcsTIkRRiH5sawSAmgB0P7bpK4kAUgFk\nXfV5UwB7yl2a4CY29+e5CpWiaJoAOAAv+5PbmRNNqYDGrTs6DuzY0llxu74LrHghQpQOfyndWACz\nWlzXA3WaJ [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x1495def0>"
+       ]
+      }
+     ],
+     "prompt_number": 67
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "names = [\"Y_%d\"%i for i in range(1929, 2010)]\n",
+      "#usy.cluster('arisel', ['Y_1929'], 8, wType='queen', inits=10, dissolve=0)\n",
+      "usy.cluster('arisel', names, 8, wType='queen', inits=10, dissolve=0)\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "regions = usy.region2areas\n",
+      "regions = np.array(regions)\n",
+      "maps.plot_choropleth(shp, regions[reorder], 'unique_values', title='All Years')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ps.version"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.cluster('arisel', names[:40], 8, wType='queen', inits=10, dissolve=0)\n",
+      "regions = usy.region2areas\n",
+      "regions = np.array(regions)\n",
+      "maps.plot_choropleth(shp, regions[reorder], 'unique_values', title='1929-68')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.cluster('arisel', names[40:], 8, wType='queen', inits=10, dissolve=0)\n",
+      "regions = usy.region2areas\n",
+      "regions = np.array(regions)\n",
+      "maps.plot_choropleth(shp, regions[reorder], 'unique_values', title='1969-2009')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.cluster('arisel', names[40:], 8, wType='queen', inits=10, dissolve=0)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.dataOperation(\"CONSTANT = 1\")\n",
+      "usy.Wrook = usy.Wqueen\n",
+      "usy.cluster('maxpTabu', ['Y_1929', 'Y_1929'], threshold=1000, dissolve=0)\n",
+      "regions = usy.region2areas\n",
+      "regions = np.array(regions)\n",
+      "maps.plot_choropleth(shp, regions[reorder], 'unique_values', title='maxp 1929')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "Counter(regions)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.getVars('Y_1929')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.Wrook"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.cluster('maxpTabu', ['Y_1929', 'CONSTANT'], threshold=8, dissolve=0)\n",
+      "regions = usy.region2areas\n",
+      "regions = np.array(regions)\n",
+      "maps.plot_choropleth(shp, regions[reorder], 'unique_values', title='maxp 1929')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "regions"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "Counter(regions)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "vars = names"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "vars.append('CONSTANT')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "vars"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "usy.cluster('maxpTabu', vars, threshold=8, dissolve=0)\n",
+      "regions = usy.region2areas\n",
+      "regions = np.array(regions)\n",
+      "maps.plot_choropleth(shp, regions[reorder], 'unique_values', title='maxp 1929-2009')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "Counter(regions)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "south = cp.loadArcData(ps.examples.get_path(\"south.shp\"))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "south.fieldNames"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# uncomment if you have some time ;->\n",
+      "#south.cluster('arisel', ['HR70'], 20, wType='queen', inits=10, dissolve=0)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "#regions = south.region2areas"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "shp = ps.examples.get_path('south.shp')\n",
+      "#maps.plot_choropleth(shp, np.array(regions), 'unique_values')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "south.dataOperation(\"CONSTANT = 1\")\n",
+      "south.cluster('maxpTabu', ['HR70', 'CONSTANT'], threshold=70, dissolve=0)\n",
+      "regions = south.region2areas\n",
+      "regions = np.array(regions)\n",
+      "maps.plot_choropleth(shp, regions, 'unique_values', title='maxp HR70 threshold=70')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "Counter(regions)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file
diff --git a/pysal/contrib/clusterpy/clusterpy_ext.py b/pysal/contrib/clusterpy/clusterpy_ext.py
new file mode 100644
index 0000000..e48b202
--- /dev/null
+++ b/pysal/contrib/clusterpy/clusterpy_ext.py
@@ -0,0 +1,284 @@
+import clusterpy as _clusterpy
+import pysal as ps
+import struct
+
+__author__ = "Sergio Rey <sjsrey at gmail.com>"
+
+__ALL__= ['Layer', 'loadArcData', 'importCsvData', 'addRook2Layer', 'addQueen2Layer', 'addArray2Layer' ]
+
+
+def _importArcData(filename):
+    """Creates a new Layer from a shapefile (<file>.shp)
+
+    This function wraps and extends a core clusterPy function to utilize PySAL
+    W constructors and dbf readers.
+
+
+    Parameters
+    ==========
+
+    filename: string
+              suffix of shapefile (fileName not fileName.shp)
+
+
+    Returns
+    =======
+    layer: clusterpy layer instance
+
+
+
+    """
+    layer = _clusterpy.Layer()
+    layer.name = filename.split('/')[-1]
+    #print "Loading " + filename + ".dbf"
+    dbf = ps.open(filename+".dbf")
+    fields = dbf.header
+    #data, fields, specs = importDBF(filename + '.dbf')
+    data = {}
+    #print "Loading " + filename + ".shp"
+    if fields[0] != "ID":
+        fields = ["ID"] + fields
+        for y in range(dbf.n_records):
+            data[y] = [y] + dbf.by_row(y)
+    else:
+        for y in range(dbf.n_records):
+            data[y] = dbf.by_row_(y)
+
+    layer.fieldNames = fields
+    layer.Y = data
+    shpf = filename+".shp"
+    layer.shpType = 5
+    #print 'pysal reader'
+    layer.Wrook = ps.rook_from_shapefile(filename+".shp").neighbors
+    layer.Wqueen = ps.queen_from_shapefile(filename+".shp").neighbors
+    #print "Done"
+    return layer
+
+_clusterpy.importArcData = _importArcData
+
+################# Public functions #######################
+
+def Layer():
+    """Provide a clusterpy Layer instance
+
+    Parameters
+    ==========
+
+    none
+
+    Returns
+    =======
+
+    layer: clusterpy.Layer instance
+
+    Examples
+    ========
+    >>> import pysal.contrib.clusterpy as cp
+    >>> l = cp.Layer()
+    >>> type(l)
+    <type 'instance'>
+    >>> l.Wrook
+    {}
+
+    """
+    return _clusterpy.Layer()
+
+def loadArcData(shapeFileName):
+    """
+    Handler to use PySAL W and dbf readers in place of clusterpy's
+
+    Parameters
+    ==========
+    shapeFileName: string
+                   filename including .shp extension
+
+    Returns
+    =======
+    layer: clusterpy layer instance
+
+    
+
+    Examples
+    ========
+    >>> import pysal.contrib.clusterpy as cp
+    >>> import pysal as ps
+    >>> shpFile = ps.examples.get_path('columbus.shp')
+    >>> columbus = cp.loadArcData(shpFile)
+    >>> columbus.Wrook[0]
+    [1, 2]
+    >>> columbus.Wrook[1]
+    [0, 2, 3]
+    >>> columbus.fieldNames[0:10]
+    ['ID', 'AREA', 'PERIMETER', 'COLUMBUS_', 'COLUMBUS_I', 'POLYID', 'NEIG', 'HOVAL', 'INC', 'CRIME']
+    """
+    base = shapeFileName.split(".")[0]
+    return _clusterpy.importArcData(base)
+
+def importCsvData(filename, layer=None):
+    """
+    Read a csv file of attributes into a layer
+
+    Parameters
+    ==========
+
+    filename: string
+              csf file to load
+
+    layer: clusterpy layer instance (default: None)
+           if a layer is passed, new attributes, Ws are attached to the layer.
+               Otherwise a new layer is created and returned
+           
+
+
+    Returns
+    =======
+    layer: clusterpy layer instance
+
+
+    Examples
+    ========
+    >>> import pysal.contrib.clusterpy as cp
+    >>> l = cp.Layer()
+    >>> mexico = cp.importCsvData(ps.examples.get_path('mexico.csv'))
+    >>> mexico.fieldNames
+    ['ID', 'State', 'pcgdp1940', 'pcgdp1950', 'pcgdp1960', 'pcgdp1970', 'pcgdp1980', 'pcgdp1990', 'pcgdp2000', 'hanson03', 'hanson98', 'esquivel99', 'inegi', 'inegi2']
+
+    Notes
+    =====
+
+    This assumes the csv file is organized with records on the rows and attributes on the columns
+
+
+    """
+
+    if not layer:
+        layer = Layer()
+    csv = ps.open(filename,'r')
+    fields = csv.header
+    data = {}
+    if fields[0] != "ID":
+        fields = ["ID"] + fields
+        for i, rec in enumerate(csv.data):
+            data[i] = [i] + csv.by_row(i)
+    else:
+        for i, rec in enumerate(csv.data):
+            data[i] = csv.by_row(i) 
+    layer.Y = data
+    layer.fieldNames = fields
+    return layer
+
+def addGal2Layer(galfile, layer, contiguity='rook'):
+    """
+    Attach an adjacency object to a layer
+
+    Parameters
+    ==========
+    galfile: string
+             galfile
+
+    layer: clusterpy layer
+
+    contiguity: type of contguity ['rook'|'queen']
+
+
+    Returns
+    =======
+    None
+
+    Examples
+    ========
+    >>> import pysal as ps
+    >>> import pysal.contrib.clusterpy as cp
+    >>> csvfile = ps.examples.get_path('mexico.csv')
+    >>> galfile = ps.examples.get_path('mexico.gal')
+    >>> mexico = cp.importCsvData(csvfile)
+    >>> cp.addRook2Layer(galfile, mexico)
+    >>> mexico.Wrook[0]
+    [31, 13]
+
+
+    """
+    gal = ps.open(galfile).read().neighbors
+    w = {}
+    for key in gal:
+        w[int(key)] =  map(int, gal[key]) 
+    
+    if contiguity.upper()== "ROOK":
+        layer.Wrook = w
+    elif contiguity.upper() == "QUEEN":
+        layer.Wqueen = w
+    else:
+        print 'Unsupported contiguity type: ', contiguity
+
+def addRook2Layer(galfile, layer):
+    """
+    User function for adding rook to layer
+
+    See addGal2Layer
+    """
+    addGal2Layer(galfile, layer)
+
+def addQueen2Layer(galfile, layer):
+    """
+    User function for adding queen to layer
+
+    See addGal2Layer
+    """
+    addGal2Layer(galfile, layer, contiguity='QUEEN')
+
+def addArray2Layer(array, layer, names=None):
+    """
+    Add a numpy array to a clusterpy layer
+
+
+    Parameters
+    ==========
+    array: nd-array
+           nxk with n observations on k attributes
+
+    layer: clusterpy layer object
+
+    names: list
+           k strings for attribute names
+
+    Returns
+    =======
+    None
+
+    Examples
+    ========
+    # Note this will report as fail since clusterpy prints 'Adding variables
+    # for each variable added. But the variables will be correctly added
+    >>> #import pysal as ps
+    >>> #import pysal.contrib.clusterpy as cp
+    >>> #import numpy as np
+    >>> #uscsv = ps.examples.get_path("usjoin.csv")
+    >>> #f = ps.open(uscsv)
+    >>> #pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)]).T 
+    >>> #usy = cp.Layer()
+    >>> #names = ["Y_%d"%v for v in range(1929,2010)]
+    >>> #cp.addArray2Layer(pci, usy, names)
+
+    """
+    n,k = array.shape
+    if not names:
+        names = ["X_%d"% v for v in range(k)]
+        
+    for j,name in enumerate(names):
+        v = {}
+        for i in xrange(n):
+            v[i] = array[i,j]
+        layer.addVariable([name], v)
+
+if __name__ == '__main__':
+
+    import numpy as np
+
+    columbus = loadArcData(ps.examples.get_path('columbus.shp'))
+    n = len(columbus.Wqueen)
+    columbus.dataOperation("CONSTANT = 1")
+    np.random.seed(12345)
+    columbus.cluster('maxpTabu', ['CRIME',  'CONSTANT'], threshold=4, dissolve=0, std=0)
+    #np.random.seed(12345)
+    #columbus.cluster('arisel', ['CRIME'], 5, wType='rook', inits=10, dissolve=0)
+
diff --git a/pysal/contrib/db/PGDump.py b/pysal/contrib/db/PGDump.py
new file mode 100644
index 0000000..7f192d0
--- /dev/null
+++ b/pysal/contrib/db/PGDump.py
@@ -0,0 +1,104 @@
+#!/usr/bin/python
+__author__ = "Philip Stephens <philip.stephens at asu.edu "
+
+__all__ = ['db2shape', 'db_table2gal']
+
+from django.contrib.gis.gdal.geomtype import OGRGeomType
+from osgeo import ogr
+import pysal
+import os
+
+def db2shape(connstring, input, output):
+    """
+    dumps a postgis database table to shapefile 
+
+    Arguments
+    ---------
+    connstring = A connection string with 4 parameters. Example is 
+    "PG: host='localhost' dbname='pysaldb' user='myusername'
+    password='my_password'"
+
+    input : the db table
+
+    output : a filename
+
+    Examples
+    --------
+    TBD 
+    TODO: make pysal db tables read-only and available on network
+
+    Note
+    ----
+
+    If a file exists with the same name as 'output', it will be deleted
+    before being overwritten.
+    """
+    conn = ogr.Open(connstring)
+    layer = conn.GetLayerByName(input)
+    type = layer.GetGeomType()  # returns an int
+    geom_type = OGRGeomType._types[type]  # map int to string description
+
+    # Schema definition of SHP file
+    out_driver = ogr.GetDriverByName( 'ESRI Shapefile' )
+    if os.path.exists(output):
+        out_driver.DeleteDataSource(output)
+    out_ds = out_driver.CreateDataSource(output)
+    out_srs = None
+    out_layer = out_ds.CreateLayer(geom_type, out_srs, type)
+    fd = ogr.FieldDefn('name',ogr.OFTString)
+    out_layer.CreateField(fd)
+
+    #layer = conn.ExecuteSQL(sql)
+
+    feat = layer.GetNextFeature()
+    while feat is not None:
+        featDef = ogr.Feature(out_layer.GetLayerDefn())
+        featDef.SetGeometry(feat.GetGeometryRef())
+        #featDef.SetField('name',feat.TITLE)
+        out_layer.CreateFeature(featDef)
+        feat.Destroy()
+        feat = layer.GetNextFeature()
+    conn.Destroy()
+    out_ds.Destroy()
+
+def db_table2gal(connstring, input, weights_type=Contiguity):
+    """
+    generates a GAL file from a postgis database table 
+
+    Arguments
+    ---------
+    connstring = A connection string with 4 parameters. Example is 
+    "PG: host='localhost' dbname='pysaldb' user='myusername'
+    password='my_password'"
+
+    input : the db table
+
+    weights_type : type of spatial weights calculation, defaults to
+    Contiguity
+
+    Examples
+    --------
+    TBD 
+
+    Note
+    ----
+    TBD
+
+    pseudo
+    ------
+    connect to db
+    simplify
+    create topo
+    query topogeom
+    return aswkt or asgeojson
+    convert returned to pysal w 
+    write w to GAL
+
+
+    """
+    pass
+
+
+if __name__ == '__main__':
+    import nose
+    nose.runmodule()
diff --git a/pysal/contrib/db/README b/pysal/contrib/db/README
new file mode 100644
index 0000000..8ecc211
--- /dev/null
+++ b/pysal/contrib/db/README
@@ -0,0 +1,125 @@
+Pysal.Contrib.db
+================
+
+This module enables simplified connection and query on spatial databases. 
+It enables developers to return the query results in a 
+Pysal-digestible format such as geojson or Well Known Text (WKT).
+It explicitly supports the following spatial databases:
+
+ * postgis (October, 2012)
+
+Caveat: This is experimental, use with Caution!
+
+Depends
+=======
+
+ * psycopg2
+ * pysal
+ * osgeo
+ * django.contrib.gis
+ * any of the supported databases and associated CRUD privileges 
+
+Suggests
+========
+
+ * PL/PYTHON
+ * Shapely
+ * sqlalchemy
+
+Usage
+=====
+
+The first function PySAL supports is dumping data from a PostGIS table into
+shapefile format. We'll access the db and dump data using
+the ogr module. 
+
+The secondary utility to be developed is a cursor manager. Developers may use this cursor to write
+their own database functions for their particular spatial analysis
+applications.
+
+For instance::
+ 
+ >>> import psycopg2 as pg
+ >>> conn = pg.connect("dbname=pysaldb user=stephens")
+ >>> cur = conn.cursor()
+ >>> cur.execute(<your SQL>)
+
+
+Installation
+============
+
+TBD
+
+Configuration
+=============
+
+If you have PostGres environment variables set in your userspace, you can
+instruct Pysal to use them rather than passing in database credentials each
+time. Look at the __init__.py for more information.
+
+I'm leaving the details of setting up a PostGIS database to the user. 
+I developed this module on Ubuntu 12.04. I used the psql console to create the
+spatial indices for the pysal example data, and used the SPIT plugin in QGIS
+to upload the pysal example data into the PostGIS db. There is a bug for only
+some of the example data using SPIT to upload. To remedy it, use the ftools
+plugin to convert multipart polygons to single parts. Vector -> Geometry
+tools -> multi...
+
+
+Notes
+-----
+
+When choosing between psycopg2 and sqlalchemy, sqlalchemy is db
+agnostic, meaning your code will work with any db. Also, 
+sqlalchemy ships with Enthought Python so the dependencies are handled by
+Pysal's new (as of 1.5) dependency policy.
+
+However you connect and query the db, return results from
+postgis as geojson or wkt so python/pysal can deal with them directly.
+
+Or, as an alternative, SQLalchemy offers the ORM path, which is more
+abstract and maps database objects to python constructs.
+
+What I want to write here is basically a cursor object manager that other
+functions or classes in pysal can use to query a database object. For example,
+in the weights class, an alternative distance-based calculator function can post
+a hard-coded query into a db using a cursor enabled by this module. This does
+not necessarily have to be a complicated module: accept a database name, a user
+name, a password, and a table; receive a cursor for that table. Not even sure if
+this module is necessary? Perhaps just so we do not repeat ourselves in the
+potentially many instances of using postgis functionality. Be sure to check out
+pandas.io for tips.
+
+
+
+Dataset Notes
+-------------
+As noted in the README, I used the psql console, pgAdmin3 and QGIS to set up
+the data. I was able to create a spatial index on almost all of the example data
+sets except Chicago77, which the console insisted did not exist, though it
+plainly appeared in pgAdmin3.
+
+From IRC with strk:
+11:32 peas am I right in thinking that the ability to easily create a topo
+representation of a postgis table is not yet available?
+looks like in future opportunities
+peas: the support was improved since the Paris presentation
+strk
+the toTopoGeom (maybe presented as a funding opportunity in those slides) it
+is now available
+strk
+should make it pretty simple to build the topology
+11:37 peas
+strk: i'll check it out, thanks again
+strk
+my blog has some recipes too
+
+
+TODO
+====
+
+I did not set the SRID when I imported the pysal example data into the
+postgis database. That may have to be remedied
+before making any of that data public, if it comes to that. 
+
+Update: I did figure out how to set the SRID on import.
diff --git a/pysal/contrib/db/README_PLPYTHON b/pysal/contrib/db/README_PLPYTHON
new file mode 100644
index 0000000..05e0e0e
--- /dev/null
+++ b/pysal/contrib/db/README_PLPYTHON
@@ -0,0 +1,10 @@
+Pl/Python
+---------
+
+An alternative approach.
+
+Installation
+------------
+sudo apt-get install postgresql-plpython-9.1
+createlang plpythonu <dbname> (in this case, pysaldb)
+
diff --git a/pysal/contrib/network/__init__.py b/pysal/contrib/network/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/contrib/network/access.py b/pysal/contrib/network/access.py
new file mode 100644
index 0000000..9fae70f
--- /dev/null
+++ b/pysal/contrib/network/access.py
@@ -0,0 +1,56 @@
+"""
+A library of spatial network accessibility functions.
+Not to be used without permission.
+
+Contact: 
+
+Andrew Winslow
+GeoDa Center for Geospatial Analysis
+Arizona State University
+Tempe, AZ
+Andrew.Winslow at asu.edu
+"""
+
+import math
+import unittest
+
+import test
+
+
+def coverage(dists, bandwidth):
+    """
+    Takes a list of numeric distances and a numeric bandwidth and returns the 
+    number of distances less than or equal to the bandwidth.
+    """
+    return len(filter(lambda d: d <= bandwidth, dists))
+
+def equity(dists):
+    """
+    Takes a list of numeric distances and returns the smallest of them.
+    """
+    return min(dists)
+
+def potential_entropy(dists, power=1):
+    """
+    Takes a list of numeric distances and returns the sum of the values
+    of a function of a distances. The function is e^(-power*distance).  
+    """
+    return sum([math.e**(-power*d) for d in dists])
+
+def potential_gravity(dists, power=2):
+    """
+    Takes a list of numeric distances and returns the sum of the values
+    of a function of a distances. The function is 1/(d^power).
+    """
+    return sum([1.0/(d**power) for d in filter(lambda d: d > 0, dists)])
+
+def travel_cost(dists):
+    """
+    Takes a list of distances and compute the sum. 
+    """
+    return sum(dists)
+   
+    
+
+
+
diff --git a/pysal/contrib/network/crimes.dbf b/pysal/contrib/network/crimes.dbf
new file mode 100644
index 0000000..523c2c8
Binary files /dev/null and b/pysal/contrib/network/crimes.dbf differ
diff --git a/pysal/contrib/network/crimes.shp b/pysal/contrib/network/crimes.shp
new file mode 100644
index 0000000..efc7d25
Binary files /dev/null and b/pysal/contrib/network/crimes.shp differ
diff --git a/pysal/contrib/network/crimes.shx b/pysal/contrib/network/crimes.shx
new file mode 100644
index 0000000..8a794bc
Binary files /dev/null and b/pysal/contrib/network/crimes.shx differ
diff --git a/pysal/contrib/network/distances.csv b/pysal/contrib/network/distances.csv
new file mode 100644
index 0000000..f9223b3
--- /dev/null
+++ b/pysal/contrib/network/distances.csv
@@ -0,0 +1,65 @@
+source id,destination id,distance
+1,1,0.0
+1,2,1625.91349431
+1,3,3295.50605182
+1,4,3825.24715194
+1,5,2053.92436941
+1,6,237.789536126
+1,7,1006.385016
+1,8,930.053249685
+3,1,3295.50605182
+3,2,3302.80671651
+3,3,0.0
+3,4,4464.60749293
+3,5,1807.7941268
+3,6,3277.17175715
+3,7,2880.68312579
+3,8,2641.72564757
+5,1,2053.92436941
+5,2,1511.33307526
+5,3,1807.7941268
+5,4,2887.51768661
+5,5,0.0
+5,6,1924.72832003
+5,7,2233.68249843
+5,8,1149.4089984
+7,1,1006.385016
+7,2,2429.77883642
+7,3,2880.68312579
+7,4,4588.68703106
+7,5,2233.68249843
+7,6,1200.33087099
+7,7,0.0
+7,8,1447.20469755
+8,1,930.053249685
+8,2,1041.45388498
+8,3,2641.72564757
+8,4,3144.97039345
+8,5,1149.4089984
+8,6,777.660767315
+8,7,1447.20469755
+8,8,0.0
+2,1,1625.91349431
+2,2,0.0
+2,3,3302.80671651
+2,4,2200.03426545
+2,5,1511.33307526
+2,6,1390.52348765
+2,7,2429.77883642
+2,8,1041.45388498
+4,1,3825.24715194
+4,2,2200.03426545
+4,3,4464.60749293
+4,4,0.0
+4,5,2887.51768661
+4,6,3588.96627183
+4,7,4588.68703106
+4,8,3144.97039345
+6,1,237.789536126
+6,2,1390.52348765
+6,3,3277.17175715
+6,4,3588.96627183
+6,5,1924.72832003
+6,6,0.0
+6,7,1200.33087099
+6,8,777.660767315
diff --git a/pysal/contrib/network/kernel.py b/pysal/contrib/network/kernel.py
new file mode 100644
index 0000000..8aca468
--- /dev/null
+++ b/pysal/contrib/network/kernel.py
@@ -0,0 +1,184 @@
+"""
+A library of spatial network kernel density functions.
+Not to be used without permission.
+
+Contact: 
+
+Andrew Winslow
+GeoDa Center for Geospatial Analysis
+Arizona State University
+Tempe, AZ
+Andrew.Winslow at asu.edu
+"""
+
+import operator
+import unittest
+import test
+import priordict as priordict
+import network as pynet
+from math import exp, sqrt, pi
+import time
+
+def triangular(z):
+    return 1 - abs(z)
+
+def uniform(z):
+    return abs(z)
+
+def quadratic(z):
+    return 0.75*(1 - z*z)
+
+def quartic(z):
+    return (3.0/pi)*(1-z*z)*(1-z*z)
+    #return (15*1.0/16)*(1-z*z)*(1-z*z)
+
+def gaussian(z):
+    return sqrt(2*pi)*exp(-0.5*z*z)
+
+def dijkstras_w_prev(G, start, r=1e600):
+    D = {}  # dictionary of final distances
+    P = {}  # dictionary of previous nodes
+    Q = priordict.PriorityDictionary()   # est.dist. of non-final vert.
+    Q[start] = 0
+    P[start] = None
+    for v in Q:
+        D[v] = Q[v]
+        if v == None or D[v] > r:
+            break
+        for w in G[v]:
+            vwLength = D[v] + G[v][w]
+            if w in D:
+                pass
+            elif w not in Q or vwLength < Q[w]:
+                Q[w] = vwLength
+                P[w] = v
+    return (D, P)
+
+def kernel_density(network, events, bandwidth, orig_nodes, kernel='quadratic'):
+    """
+    This function estimates Kernel densities on a planar undirected network. 
+    It implements the equal-split discontinuous Kernel function developed by Okabe et al. (2009).
+    Particularly, it computes Kernel densities by using equation 19 and 20 
+    in the paper of Okabe et al. (2009). 
+
+    Parameters
+    ----------
+    network: A dictionary of dictionaries like {n1:{n2:d12,...},...}
+             A planar undirected network
+             It is assumed that this network is divided by a certain cell size 
+             and is restructured to incorporate the new nodes resulting from the division as well as 
+             events. Therefore, nodes in the network can be classified into three groups:
+             i) original nodes, 2) event points, and 3) cell points.  
+    events: a list of tuples
+            a tuple is the network-projected coordinate of an event
+            that takes the form of (x,y)
+    bandwidth: a float
+            Kernel bandwidth
+    orig_nodes: a list of tuples
+            a tuple is the coordinate of a node that is part of the original base network
+            each tuple takes the form of (x,y)
+    kernel: string
+            the type of Kernel function
+            allowed values: 'quadratic', 'gaussian', 'quartic', 'uniform', 'triangular'
+
+    Returns
+    -------   
+    A dictioinary where keys are node and values are their densities
+    Example: {n1:d1,n2:d2,...}
+
+    <tc>#is#kernel_density</tc>
+    """
+
+    # beginning of step i
+    density = {}
+    for n in network:
+        density[n] = []
+    # end of step i
+
+    # beginning of step ii
+    def compute_split_multiplier(prev_D, n):
+        '''
+        computes the demoninator of the formula 19
+
+        Parameters
+        ----------
+        prev_D: a dictionary storing pathes from n to other nodes in the network
+                its form is like: {n1:prev_node_of_n1(=n2), n2:prev_node_of_n2(=n3),...}
+        n: a tuple containing the geographic coordinate of a starting point 
+           its form is like: (x,y)
+
+        Returns
+        -------
+        An integer
+
+        '''
+        split_multiplier = 1 
+        p = prev_D[n] 
+        while p != None:
+            if len(network[p]) > 1:
+                split_multiplier *= (len(network[p]) - 1)
+            if p not in prev_D: 
+                p = None
+            else:
+                p = prev_D[p]
+        return split_multiplier
+    # end of step ii
+
+    kernel_funcs = {'triangular':triangular, 'uniform': uniform, 
+                    'quadratic': quadratic, 'quartic':quartic, 'gaussian':gaussian}
+    #t1 = time.time()
+    # beginning of step iii
+    kernel_func = kernel_funcs[kernel]
+    for e in events:
+        # beginning of step a
+        src_D = pynet.dijkstras(network, e, bandwidth, True)
+        # end of step a
+        # beginning of step b
+        density[e].append(kernel_func(0))
+        # end of step b
+        # beginning of step c
+        for n in src_D[0]: # src_D[0] - a dictionary of nodes whose distance from e is smaller than e 
+            if src_D[0][n] == 0: continue
+            # src_D[1] - a dictionary from which a path from e to n can be traced
+            d = src_D[0][n]
+            if d <= bandwidth:
+                n_degree = 2.0
+                if n in events and n in orig_nodes and len(network[n]) > 0:
+                    n_degree = len(network[n])
+                unsplit_density = kernel_func(d*1.0/bandwidth*1.0) 
+                # src_D[1] - a dictionary from which a path from e to n can be traced
+                split_multiplier = compute_split_multiplier(src_D[1], n) 
+                density[n].append((1.0/split_multiplier)*(2.0/n_degree)*unsplit_density)
+                #if str(n[0]) == '724900.335127' and str(n[1]) == '872127.948935':
+                #    print 'event', e
+                #    print 'distance', d
+                #    print 'unsplit_density', unsplit_density
+                #    print 'n_degree', n_degree
+                #    print 'split_multiplier', split_multiplier
+                #    print 'density', (1.0/split_multiplier)*(2.0/n_degree)*unsplit_density
+        # end of step c
+
+    # beginning of step iv 
+    #t1 = time.time()
+    no_events = len(events)
+    for node in density:
+        if len(density[node]) > 0:
+            #if str(node[0]) == '724900.335127' and str(node[1]) == '872127.948935':
+            #    print density[node]
+            density[node] = sum(density[node])/no_events
+            #density[node] = sum(density[node])*1.0/len(density[node])
+        else:
+            density[node] = 0.0
+    # end of step iv
+
+    #for node in events:
+    #    del density[node]
+
+    #print 'normalizing density: %s' % (str(time.time() - t1))
+
+    return density
+
+    
+
+
+
diff --git a/pysal/contrib/network/kfuncs.py b/pysal/contrib/network/kfuncs.py
new file mode 100644
index 0000000..58cc327
--- /dev/null
+++ b/pysal/contrib/network/kfuncs.py
@@ -0,0 +1,75 @@
+"""
+A library of spatial network k-function functions.
+Not to be used without permission.
+
+Contact: 
+
+Andrew Winslow
+GeoDa Center for Geospatial Analysis
+Arizona State University
+Tempe, AZ
+Andrew.Winslow at asu.edu
+"""
+
+import unittest
+import test 
+
+def _fxrange(start, end, incr):
+    """
+    A float version of the xrange() built-in function.
+
+    _fxrange(number, number, number) -> iterator
+
+    Arguments:
+    start -- the lower end of the range (inclusive)
+    end -- the upper end of the range (exclusive)
+    incr -- the step size. must be positive.
+    """
+    i = 0
+    while True:
+        t = start + i*incr
+        if t >= end:
+            break
+        yield t
+        i += 1    
+
+def _binary_search(list, q):
+    """
+    Returns the index in a list where an item should be found.
+     
+    Arguments:
+    list -- a list of items
+    q -- a value to be searched for
+    """
+    l = 0
+    r = len(list)
+    while l < r:
+        m = (l + r)/2
+        if list[m] > q:
+            r = m
+        else:
+            l = m + 1
+    return l
+
+def kt_values(t_specs, distances, scaling_const):
+    """
+    Returns a dictionary of t numerics to k(t) numerics.
+
+    kt_values(number list, number list, number) -> number to number dictionary
+
+    Arguments:
+    t_specs -- a 3-tuple of (t_min, t_max, t_delta) specifying the t-values to compute k(t) for
+    distances -- a list of distances to compute k(t) from
+    scaling_const -- a constant to multiple k(t) by for each t 
+    """
+    ks = {}
+    distances.sort()
+    if type(t_specs) == tuple:
+        t_specs = [t for t in _fxrange(t_specs[0], t_specs[1], t_specs[2])]
+        
+    for t in t_specs:
+        ks[t] = scaling_const*_binary_search(distances, t)
+    return ks     
+
+
+
diff --git a/pysal/contrib/network/klincs.py b/pysal/contrib/network/klincs.py
new file mode 100644
index 0000000..3bbdd4b
--- /dev/null
+++ b/pysal/contrib/network/klincs.py
@@ -0,0 +1,233 @@
+#!/usr/env python
+
+"""
+A library for computing local K function for network-constrained data
+
+Author:
+Andrew Winslow Andrew.Winslow at asu.edu
+Myunghwa Hwang mhwang4 at gmail.com
+
+"""
+import unittest
+import numpy as np
+import geodanet.network as pynet
+import geodanet.kfuncs as pykfuncs
+import geodanet.simulator as pysim
+import time
+import random
+import platform                                                                                                                 
+try:
+    if platform.system() == 'Darwin':
+        import multiprocessing
+    else:
+        multiprocessing = None
+except ImportError:
+    multiprocessing = None 
+
+class WeightedRandomSampleGenerator(object):
+    """
+    A generator for randomly sampling n elements from 
+    a population group with consideration to a given set of weights
+    """
+
+    def __init__(self, weights, population, n):
+	"""
+	weights: an iterable with m numeric elements
+	population: a numpy array with m elements
+	n: an integer representing sample size
+	"""
+        self.totals = np.cumsum(weights)
+        self.population = population
+        self.n = n
+        self.norm = self.totals[-1]
+
+    def next(self):
+        sample = []
+        for i in xrange(self.n):
+            throw = np.random.rand()*self.norm
+            sample.append(self.population[np.searchsorted(self.totals, throw)])
+        return sample
+
+    def __call__(self):
+        return self.next()
+
+class RandomSampleGenerator(object):
+    """
+    A generator for randomly sampling n elements 
+    from a population group
+    """
+    def __init__(self, population, n):
+	"""
+	population: a numpy array with m elements
+	n: an integer representing sample size
+	"""
+        self.population = population
+        self.n = n
+
+    def next(self):
+        return random.sample(self.population, self.n)
+
+    def __call__(self):
+        return self.next()
+
+def local_k(network, events, refs, scale_set, cache=None):
+    """
+    Computes local K function
+
+    network: an undirected network data to which reference points are injected
+    refs: a set of reference points on the given network
+          points unprojected into the network
+    events: a set of event points on the given network
+            points projected into the network
+    scale_set: a tuple defining spatial scales to be examined
+               (min, max, interval)
+    """
+
+    node2localK = {}
+    net_distances = {}
+    if cache: net_distances = cache
+    for node in refs:
+        node = node[1][0]
+        a_dest = network[node].keys()[0]
+        node_proj = (node, a_dest, 0, network[node][a_dest])
+        if node not in net_distances:
+            net_distances[node] = pynet.dijkstras(network, node, scale_set[1])
+        if a_dest not in net_distances:
+            net_distances[a_dest] = pynet.dijkstras(network, node, scale_set[1])
+        distances = pynet.proj_distances_undirected(network, node_proj, events, scale_set[1], cache=net_distances).values()
+        node2localK[node] = pykfuncs.kt_values(scale_set, distances, 1)
+    return node2localK, net_distances
+
+def cluster_type(obs, lower, upper):
+    if obs < lower: return -1
+    if obs > upper: return 1
+    return 0
+
+def simulate_local_k_01(args):
+    sims = args[0]
+    n = args[1]
+    net_file = args[2]
+    network = args[3]
+    events = args[4]
+    refs = args[5]
+    scale_set = args[6]
+    cache = args[7]
+
+    #print 'simulated_local_k_01'
+    simulator = pysim.Simulation(net_file)
+    sims_outcomes = []
+    for sim in xrange(sims):
+        points = simulator.getRandomPoints(n, projected=True)
+        sim_events = []
+        for edge in points:
+            for point in points[edge]:
+                sim_events.append(point)
+        res, dists = local_k(network, sim_events, refs, scale_set, cache=cache)
+        sims_outcomes.append(res)
+
+    return sims_outcomes
+
+def simulate_local_k_02(args):
+    sims = args[0]
+    n = args[1]
+    refs = args[2]
+    scale_set = args[3]
+    cache = args[4]
+
+    #print 'simulated_local_k_02'
+    sims_outcomes = []
+    sampler = RandomSampleGenerator(refs, n).next
+    for sim in xrange(sims):
+        sim_events = sampler()
+        sim_localk = {}
+        for node in refs:
+            all_distances = cache[node[1][0]]
+            distances = []
+            for event in sim_events:
+                event = event[1][0]
+                if event in all_distances:
+                    distances.append(all_distances[event]) 
+            sim_localk[node[1][0]] = pykfuncs.kt_values(scale_set, distances, 1)
+        sims_outcomes.append(sim_localk)
+    
+    return sims_outcomes
+
+def k_cluster(network, events, refs, scale_set, sims, sig=0.1, sim_network=None, cpus=1):
+
+    """
+    Parameters:
+    network: a network to which reference points are injected
+    events: a set of event points projected into the network
+    refs: a set of reference points unprojected into the network
+    scale_set: tuple same as (min, max, resolution)
+    sims: integer; the number of simulations
+    sig: float; siginificance level
+    sim_network: the source shape file containing the network data
+                 this is used to simualte point patterns for inference 
+    cpus: integer: the number of cpus
+          multiprocessing can be used for inference
+    """
+
+    """
+    1. For an observed set of n events on the network, calculate local K function 
+    values for all m reference points
+    """
+    node2localK, net_dists = local_k(network, events, refs, scale_set)
+    """
+    When n < m (simulator == None):
+    2. Select n out of m reference points randomly and 
+    calculate local K function values for these randomly sampled points
+    When n >= m (simulator != None):
+    2. Randomly simulate n points on network edges and
+    calculate local K function values for these randomly simulated points
+    3. Repeat 2 as many as the number of simulations
+    Note: on Darwin systems, simulation will be parallelized
+    """
+    n = len(events)
+    sims_outcomes = []
+    if not multiprocessing or cpus == 1:
+        if sim_network:
+            sims_outcomes = simulate_local_k_01((sims, n, sim_network, network, events, refs, scale_set, net_dists))
+        else:
+            sims_outcomes = simulate_local_k_02((sims, n, refs, scale_set, net_dists))
+    elif multiprocessing and cpus >= 2:
+        pool = multiprocessing.Pool(cpus)
+        sims_list = range(sims)
+        sims_list = map(len, [sims_list[i::cpus] for i in xrange(cpus)])
+        partial_outcomes = None
+        if sim_network:
+             partial_outcomes = pool.map(simulate_local_k_01, 
+                         [(sim, n, sim_network, network, events, refs, scale_set, net_dists) for sim in sims_list])
+        else:
+             partial_outcomes = pool.map(simulate_local_k_02, 
+                         [(sim, n, refs, scale_set, net_dists) for sim in sims_list])
+        sims_outcomes = partial_outcomes[0]
+        for partial in partial_outcomes[1:]:
+             sims_outcomes.extend(partial)
+
+    """
+    4. Determine lower and upper envelopes for the observed K function values 
+       as well as the type of cluster (dispersion or clustering)
+    """
+    # 4. P-value evaluation
+    lower_envelope = {}
+    upper_envelope = {}
+    lower_p = int(sims*sig/2)
+    upper_p = int(sims*(1-sig/2))
+    localKs = {}
+    for node in refs:
+        node = node[1][0]
+        lower_envelope[node] = {}
+        upper_envelope[node] = {}
+        localKs[node] = {}
+        for scale in node2localK[node].keys():
+            local_outcomes = [sim[node][scale] for sim in sims_outcomes]
+            local_outcomes.sort()
+            obs = node2localK[node][scale]
+            lower = local_outcomes[lower_p]
+            upper = local_outcomes[upper_p]
+            cluster = cluster_type(obs, lower, upper)
+            localKs[node][scale] = [obs, lower, upper, cluster]
+
+    return localKs
+
diff --git a/pysal/contrib/network/lincs.py b/pysal/contrib/network/lincs.py
new file mode 100644
index 0000000..d3ee5b0
--- /dev/null
+++ b/pysal/contrib/network/lincs.py
@@ -0,0 +1,349 @@
+#!/usr/bin/env python
+
+"""
+A library for computing local indicators of network-constrained clusters
+
+Author:
+Myunghwa Hwang mhwang4 at gmail.com
+
+"""
+import unittest
+import numpy as np
+import scipy.stats as stats
+import geodanet.network as pynet
+import pysal, copy
+import time
+
+def unconditional_sim(event, base, s): 
+    """ 
+    Parameters:
+        event: n*1 numpy array with integer values
+              observed values for an event variable
+        base: n*1 numpy array with integer values
+              observed values for a population variable
+        s: integer
+              the number of simulations
+
+    Returns:
+            : n*s numpy array
+    """
+    mean_risk = event.sum()*1.0/base.sum()
+    if base.dtype != int:
+        base = np.array([int(v) for v in base])
+    base_zeros = (base == 0.0)
+    base[base_zeros] += 1.0
+    sims = np.random.binomial(base, mean_risk, (s, len(event))).transpose()
+    sims[base_zeros, :] = 0.0
+    return sims
+
+def unconditional_sim_poisson(event, base, s): 
+    """ 
+    Parameters:
+        event: n*1 numpy array with integer values
+              observed values for an event variable
+        base: n*1 numpy array with integer values
+              observed values for a population variable
+        s: integer
+              the number of simulations
+
+    Returns:
+            : n*s numpy array
+    """
+    mean_risk = event.sum()*1.0/base.sum()
+    E = base*mean_risk
+    return np.random.poisson(E, (s, len(event))).transpose()
+
+def conditional_multinomial(event, base, s): 
+    """ 
+    Parameters:
+        event: n*1 numpy array with integer values
+              observed values for an event variable
+        base: n*1 numpy array with integer values
+              observed values for a population variable
+        s: integer
+              the number of simulations
+
+    Returns:
+            : n*s numpy array
+    """
+    m = int(event.sum())
+    props = base*1.0/base.sum()
+    return np.random.multinomial(m, props, s).transpose()
+
+def pseudo_pvalues(obs, sims):
+    """
+    Get pseudo p-values from a set of observed indices and their simulated ones.
+
+    Parameters:
+        obs: n*1 numpy array for observed values
+        sims: n*sims numpy array; sims is the number of simulations
+
+    Returns:
+        p_sim : n*1 numpy array for pseudo p-values
+        E_sim : mean of p_sim
+        SE_sim: standard deviation of p_sim
+        V_sim: variance of p_sim
+        z_sim: standardarized observed values
+        p_z_sim: p-value of z_sim based on normal distribution   
+    """
+
+    sims = np.transpose(sims)
+    permutations = sims.shape[0]
+    above = sims >= obs
+    larger = sum(above)
+    low_extreme = (permutations - larger) < larger
+    larger[low_extreme] = permutations - larger[low_extreme]
+    p_sim = (larger + 1.0)/(permutations + 1.0)
+    E_sim = sims.mean()
+    SE_sim = sims.std()
+    V_sim = SE_sim*SE_sim
+    z_sim = (obs - E_sim)/SE_sim
+    p_z_sim = 1 - stats.norm.cdf(np.abs(z_sim))
+    return p_sim, E_sim, SE_sim, V_sim, z_sim, p_z_sim 
+
+def node_weights(network, attribute=False):
+    """
+    Obtains a spatial weights matrix of edges in a network
+    if two edges share a node, they are neighbors
+
+    Parameters:
+        network: a network with/without attributes
+        attribute: boolean
+                   if true, attributes of edges are added to a dictionary of edges,
+                   which is a return value
+
+    Returns:
+        w: a spatial weights instance
+        id2link: an associative dictionary that connects a sequential id to a unique 
+                 edge on the network
+                 if attribute is true, each item in the dictionary includes the attributes
+
+    """
+    link2id, id2link = {}, {}
+    counter = 0 
+    neighbors, weights = {},{}
+    for n1 in network:
+        for n2 in network[n1]:
+            if (n1,n2) not in link2id or link2id[(n1,n2)] not in neighbors:
+                if (n1,n2) not in link2id:
+                    link2id[(n1,n2)] = counter
+                    link2id[(n2,n1)] = counter
+                    if not attribute:
+                        id2link[counter] = (n1, n2) 
+                    else:
+                        id2link[counter] = tuple([(n1,n2)] + list(network[n1][n2][1:]))
+                    counter += 1
+                neighbors_from_n1 = [(n1, n) for n in network[n1] if n != n2] 
+                neighbors_from_n2 = [(n2, n) for n in network[n2] if n != n1] 
+                neighbors_all = neighbors_from_n1 + neighbors_from_n2
+                neighbor_ids = []
+                for edge in neighbors_all:
+                    if edge not in link2id:
+                        link2id[edge] = counter
+                        link2id[(edge[-1], edge[0])] = counter
+                        if not attribute:
+                            id2link[counter] = edge
+                        else:
+                            id2link[counter] = tuple([edge] + list(network[edge[0]][edge[1]][1:]))
+                        neighbor_ids.append(counter)    
+                        counter += 1
+                    else:
+                        neighbor_ids.append(link2id[edge])
+                neighbors[link2id[(n1,n2)]] = neighbor_ids
+                weights[link2id[(n1,n2)]] = [1.0]*(len(neighbors_from_n1) + len(neighbors_from_n2))
+    return pysal.weights.W(neighbors, weights), id2link 
+
+def edgepoints_from_network(network, attribute=False):
+    """
+    Obtains a list of projected points which are midpoints of edges
+    
+    Parameters:
+        network: a network with/without attributes
+        attribute: boolean
+                   if true, one of return values includes attributes for each edge
+
+    Returns:
+        id2linkpoints: a dictionary that associates a sequential id to a projected, midpoint of each edge
+        id2attr: a dictionary that associates a sequential id to the attributes of each edge
+        link2id: a dictionary that associates each edge to its id
+    """
+    link2id, id2linkpoints, id2attr = {}, {}, {}
+    counter = 0
+    for n1 in network:
+        for n2 in network[n1]:
+            if (n1,n2) not in link2id or (n2,n1) not in link2id:
+                link2id[(n1,n2)] = counter
+                link2id[(n2,n1)] = counter
+                if type(network[n1][n2]) != list:
+                    half_dist = network[n1][n2]/2 
+                else:
+                    half_dist = network[n1][n2][0]/2 
+                if n1[0] < n2[0] or (n1[0] == n2[0] and n1[1] < n2[1]):
+                    id2linkpoints[counter] = (n1,n2,half_dist,half_dist)
+                else:
+                    id2linkpoints[counter] = (n2,n1,half_dist,half_dist)
+                if attribute:
+                    id2attr[counter] = network[n1][n2][1:]
+                counter += 1
+    return id2linkpoints, id2attr, link2id
+
+def dist_weights(network, id2linkpoints, link2id, bandwidth):
+    """
+    Obtains a distance-based spatial weights matrix using network distance
+
+    Parameters:
+        network: an undirected network without additional attributes 
+        id2linkpoints: a dictionary that includes a list of network-projected, midpoints of edges in the network
+        link2id: a dictionary that associates each edge to a unique id
+        bandwidth: a threshold distance for creating a spatial weights matrix
+
+    Returns:
+        w : a distance-based, binary spatial weights matrix
+        id2link: a dictionary that associates a unique id to each edge of the network
+    """
+    linkpoints = id2linkpoints.values()
+    neighbors, id2link = {}, {}
+    net_distances = {}
+    for linkpoint in id2linkpoints:
+        if linkpoints[linkpoint] not in net_distances:
+            net_distances[linkpoints[linkpoint][0]] = pynet.dijkstras(network, linkpoints[linkpoint][0], r=bandwidth)
+            net_distances[linkpoints[linkpoint][1]] = pynet.dijkstras(network, linkpoints[linkpoint][1], r=bandwidth)
+        ngh = pynet.proj_distances_undirected(network, linkpoints[linkpoint], linkpoints, r=bandwidth, cache=net_distances)
+        #ngh = pynet.proj_distances_undirected(network, linkpoints[linkpoint], linkpoints, r=bandwidth)
+        if linkpoints[linkpoint] in ngh:
+            del ngh[linkpoints[linkpoint]]
+        if linkpoint not in neighbors:
+            neighbors[linkpoint] = []
+        for k in ngh.keys():
+            neighbor = link2id[k[:2]]
+            if neighbor not in neighbors[linkpoint]:
+                neighbors[linkpoint].append(neighbor)
+            if neighbor not in neighbors:
+                neighbors[neighbor] = []
+            if linkpoint not in neighbors[neighbor]:
+                neighbors[neighbor].append(linkpoint)
+        id2link[linkpoint] = id2linkpoints[linkpoint][:2]
+    weights = copy.copy(neighbors)
+    for ngh in weights:
+        weights[ngh] = [1.0]*len(weights[ngh])
+    return pysal.weights.W(neighbors, weights), id2link
+
+
+def lincs(network, event, base, weight, dist=None, lisa_func='moran', sim_method="permutations", sim_num=99):
+    """
+    Compute local Moran's I for edges in the network
+
+    Parameters:
+        network: a clean network where each edge has up to three attributes:
+                 Its length, an event variable, and a base variable
+        event: integer
+               an index for the event variable 
+        base: integer 
+              an index for the base variable
+        weight: string
+                type of binary spatial weights
+                two options are allowed: Node-based, Distance-based
+        dist: float
+              threshold distance value for the distance-based weight
+        lisa_func: string
+                   type of LISA functions
+                   three options allowed: moran, g, and g_star
+        sim_method: string
+                    type of simulation methods
+                    four options allowed: permutations, binomial (unconditional),
+                    poisson (unconditional), multinomial (conditional)
+        sim_num: integer
+                 the number of simulations
+
+    Returns:
+               : a dictionary of edges
+                 an edge and its moran's I are the key and item
+               : a Weights object
+                 PySAL spatial weights object
+
+    """
+    if lisa_func in ['g', 'g_star'] and weight == 'Node-based':
+        print 'Local G statistics can work only with distance-based weights matrix'
+        raise 
+
+    if lisa_func == 'moran':
+        lisa_func = pysal.esda.moran.Moran_Local
+    else:
+        lisa_func = pysal.esda.getisord.G_Local
+
+    star = False
+    if lisa_func == 'g_star':
+        star = True    
+
+    if base:
+        def getBase(edges, edge, base):
+            return edges[edge][base]
+    else:
+        def getBase(edges, edge, base):
+            return 1.0
+    w, edges, e, b, edges_geom = None, None, None, None, []
+    if weight == 'Node-based':
+        w, edges = node_weights(network, attribute=True)
+	n = len(edges)
+	e, b = np.zeros(n), np.zeros(n)
+	for edge in edges:
+            edges_geom.append(edges[edge][0])
+	    e[edge] = edges[edge][event]
+            b[edge] = getBase(edges, edge, base)
+        w.id_order = edges.keys()
+    elif dist is not None:
+        id2edgepoints, id2attr, edge2id = edgepoints_from_network(network, attribute=True)
+        for n1 in network:
+            for n2 in network[n1]:
+                network[n1][n2] = network[n1][n2][0]
+        w, edges = dist_weights(network, id2edgepoints, edge2id, dist)
+        n = len(id2attr)
+	e, b = np.zeros(n), np.zeros(n)
+        if base:
+            base -= 1
+	for edge in id2attr:
+            edges_geom.append(edges[edge])
+	    e[edge] = id2attr[edge][event - 1]
+            b[edge] = getBase(id2attr, edge, base)
+        w.id_order = id2attr.keys()
+
+    Is, p_sim, Zs = None,None, None
+    if sim_method == 'permutation':
+        if lisa_func == pysal.esda.moran.Moran_Local:
+	    lisa_i = lisa_func(e*1.0/b,w,transformation="r",permutations=sim_num)
+            Is = lisa_i.Is
+            Zs = lisa_i.q
+        else:
+	    lisa_i = lisa_func(e*1.0/b,w,transform="R",permutations=sim_num,star=star)
+            Is = lisa_i.Gs
+            Zs = lisa_i.Zs
+        p_sim = lisa_i.p_sim
+    else:
+	sims = None
+        if lisa_func == pysal.esda.moran.Moran_Local:
+	    lisa_i = lisa_func(e*1.0/b,w,transformation="r",permutations=0)
+            Is = lisa_i.Is
+            Zs = lisa_i.q
+        else:
+	    lisa_i = lisa_func(e*1.0/b,w,transform="R",permutations=0,star=star)
+	    Is = lisa_i.Gs
+	    Zs = lisa_i.Zs
+	if sim_method == 'binomial':
+	    sims = unconditional_sim(e, b, sim_num)
+	elif sim_method == 'poisson':
+	    sims = unconditional_sim_poisson(e, b, sim_num)
+	else:
+	    sims = conditional_multinomial(e, b, sim_num)
+        if lisa_func == pysal.esda.moran.Moran_Local:
+	    for i in range(sim_num):
+		sims[:,i] = lisa_func(sims[:,i]*1.0/b,w,transformation="r",permutations=0).Is
+        else:
+	    for i in range(sim_num):
+		sims[:,i] = lisa_func(sims[:,i]*1.0/b,w,permutations=0,star=star).Gs
+	sim_res = pseudo_pvalues(Is, sims)
+	p_sim = sim_res[0]
+
+    w.transform = 'O'
+    return zip(edges_geom, e, b, Is, Zs, p_sim), w
+
+        
diff --git a/pysal/contrib/network/network.py b/pysal/contrib/network/network.py
new file mode 100644
index 0000000..5e0e8b2
--- /dev/null
+++ b/pysal/contrib/network/network.py
@@ -0,0 +1,599 @@
+import math
+import pysal
+from pysal.cg.shapes import Point, Chain, LineSegment, Rectangle
+from pysal.cg.locators import Grid
+import random, copy
+from heapq import heappush, heappop
+import time
+
+def no_nodes(G):
+    """
+    returns the number of nodes in a undirected network
+    """
+    return len(G)
+
+def no_edges(G):
+    """
+    returns the number of edges in a undirected network
+    """
+    e = 0.0
+    for n in G:
+        e += len(G[n])
+    return e/2.0 
+
+def tot_net_length(G):
+    """
+    returns the total length of a undirected network
+    """
+    l = 0.0
+    done = set()
+    for n in G:
+        for m in G[n]:
+            if m in done: continue
+            l += G[n][m]
+        done.add(n)
+    return l/2.0
+
+def walk(G, s, S=set()):
+    """ 
+    Returns a traversal path from s on G
+    source: Python Algorithms Mastering Basic Algorithms in the Python Language, 2010, p.104
+    """
+    P, Q, SG = dict(), set(), dict()
+    P[s] = None
+    SG[s] = G[s]
+    Q.add(s)
+    while Q:
+        u = Q.pop()
+        for v in set(G[u].keys()).difference(P, S):
+            Q.add(v)
+            P[v] = u
+            SG[v] = G[v]
+    return SG
+
+def components(G):
+    """ 
+    Returns connected components of G
+    source: Python Algorithms Mastering Basic Algorithms in the Python Language, 2010, p.105
+    Complexity: O(E+V) where E is the number of edges and V is the number of nodes in a graph
+    """
+    comp, seen = [], set()
+    for u in G:
+        if u in seen: continue
+        C = walk(G, u)
+        seen.update(set(C.keys()))
+        comp.append(C)
+    return comp
+
+def no_components(G):
+    return len(components(G))
+
+def net_global_stats(G, boundary=None, detour=True):
+    v = no_nodes(G)
+    e = no_edges(G)
+    L = tot_net_length(G)
+    p = no_components(G)
+    u = e - v + p # cyclomatic number
+    alpha = u*1.0/(2*v - 5)
+    beta = e*1.0/v
+    emax = 3*(v-2)
+    gamma = e*1.0/emax
+    eta = L*1.0/e
+    net_den = None
+    if boundary:
+        s = pysal.open(boundary)
+        if s.type != pysal.cg.shapes.Polygon: 
+            raise ValueError, 'File is not of type POLYGON'
+        net_den = s.next().area
+    net_dist, eucl_dist = 0.0, 0.0
+    det = None
+    if detour:
+        nodes = G.keys()
+        for n in nodes:
+            net_D = dijkstras(G, n)
+            net_dist += sum(net_D.values())
+            eucl_D = [ math.sqrt((n[0] - m[0])**2 + (n[1] - m[1])**2) for m in nodes]
+            eucl_dist += sum(eucl_D)
+        net_dist /= 2.0
+        eucl_dist /= 2.0
+        if net_dist > 0.0:
+            det = eucl_dist*1.0/net_dist
+    return v, e, L, p, u, alpha, beta, emax, gamma, eta, net_den, det
+
+
+def random_projs(G, n):
+    """
+    Returns a list of random locations on the network as projections
+    with the form (src, dest, dist_from, dist_from_src, dist_from_dest)
+    """
+
+    def binary_search(list, q):
+        l = 0
+        r = len(list)
+        while l < r:
+            m = (l + r)/2
+            if list[m][0] > q:
+                r = m
+            else:
+                l = m + 1
+        return list[l][1]
+
+    total_net_len = 0
+    for src in G:
+        for dest in G[src]:
+            total_net_len += G[src][dest]
+
+    lengthogram = [(0, (None, None))]
+    for src in G:
+        for dest in G[src]:
+            lengthogram.append((lengthogram[-1][0] + G[src][dest], (src, dest)))
+
+    projs = []
+    for i in xrange(n):
+        e = binary_search(lengthogram, random.random() * total_net_len)
+        wgt = G[e[0]][e[1]]
+        along = wgt * random.random()
+        # (src, dest, dist_from_src, dist_from_dest)
+        projs.append((e[0], e[1], along, wgt - along))
+
+    return projs
+
+def proj_distances_undirected(G, src, dests, r=1e600, cache=None):
+    if cache and src[0] in cache:
+        SND = cache[src[0]]
+    else:
+        SND = dijkstras(G, src[0], r) # Distance from edge start node to other nodes
+    if cache and src[1] in cache:
+        DND = cache[src[1]]
+    else:
+        DND = dijkstras(G, src[1], r) # Distance from edge end node to other nodes
+    D = {}
+    for d in dests:
+        # If the dest lies on the same edge as the src (or its inverse)
+        if (d[0] == src[0] and d[1] == src[1]) or (d[0] == src[1] and d[1] == src[0]):
+                dist = abs(src[2] - d[2])
+        else:
+            # get the four path distances
+            # src edge start to dest edge start
+            src2src, src2dest, dest2src, dest2dest = 1e600, 1e600, 1e600, 1e600
+            if d[0] in SND: 
+                src2src = src[2] + SND[d[0]] + d[2] 
+            # src edge start to dest edge end
+            if d[1] in SND:
+                src2dest = src[2] + SND[d[1]] + d[3] 
+            # src edge end to dest edge start
+            if d[0] in DND:
+                dest2src = src[3] + DND[d[0]] + d[2]
+             # src edge end to dest edge end
+            if d[1] in DND:
+                dest2dest = src[3] + DND[d[1]] + d[3]
+            dist = min(src2src, src2dest, dest2src, dest2dest)
+
+        if dist <= r:
+            D[d] = dist
+
+    return D
+
+def proj_distances_directed(G, src, dests, r=1e600):
+    ND = dijkstras(G, src[1], r) # Distance from edge destination node to other nodes
+    D = {}
+    for d in dests:
+        if d[0] in ND:
+            if d[0] == src[0] and d[1] == src[1]: # Same edge and dest further along
+                dist = abs(src[2] - d[2])
+                if dist <= r:
+                    D[d] = dist
+            else:
+                # dist from edge proj to end of src edge + 
+                # dist from src edge dest node to dest edge src node +
+                # dist from start of dest edge to edge proj
+                dist = src[3] + ND[d[0]] + d[2] 
+                #print dist
+                if dist <= r:
+                    D[d] = dist
+    return D
+
+def relax(G, u, v, D, P, r=1e600): 
+    """ 
+    Update the distance to v 
+    if the route to v through u is shorter than the existing route to v
+    Code from Hetland 2010 
+    Python Algorithms Mastering Basic Algorithms in the Python Language, p.200
+    """
+    d = D.get(u, r) + G[u][v]
+    if d <= D.get(v, r):
+        D[v], P[v] = d, u
+        return True
+
+def dijkstras(G, start, r=1e600, p=False): 
+    """ 
+    Find a shortest path from s to all nodes in the network G
+    Code from Hetland 2010 
+    Python Algorithms Mastering Basic Algorithms in the Python Language, p.205
+    Complexity: O(M*lgN) where M is the number of edges and N is the number of nodes
+    """
+    D, P, Q, S = {start:0}, {}, [(0,start)], set()  # Distance estimates, tree (path), queue, visited
+    while Q:
+        _, u = heappop(Q)
+        if u in S: 
+            continue 
+        S.add(u)
+        for v in G[u]:
+            relaxed = relax(G, u, v, D, P, r=r)
+            if relaxed: 
+                heappush(Q, (D[v], v)) 
+    if p: 
+        return D, P
+    return D
+
+class Snapper:
+    """
+    Snaps points to their nearest location on the network.
+
+    Uses a novel algorithm which relies on two properties of the input network:
+    1.  Most of the edges are very short relative to the total area 
+        encompassed by the network.
+    2.  The edges have a relatively constant density throughout this area.
+
+    The algorithm works by creating a binning of the midpoints of all the edges.
+    When a query point is given, all the edges in a region around the query
+    point (located by their binned midpoints) are compared to the query point. 
+    If none are found, the neighborhood is enlarged. When a closest edge is found, 
+    the neighborhood is enlarged slightly and checked again. The enlargement is 
+    such that if the closest edge found remains the closest edge, then it will
+    always be the closest edge.
+    """
+
+    def __init__(self, network):
+        """
+        Test tag <tc>#is#Snapper.__init__</tc>
+        """
+
+        """
+        Generate a list of the edge lengths and pick a maximum length
+        allowed based on the median length. This maximum length will be
+        used to use multiple midpoints to represent edges which are 
+        exceptionally long, lest they ruin the efficiency of the algorithm.
+        """
+
+        # Generate list of lengths
+        self.network = network
+        edge_lens = []
+        for n in network:
+            for m in network[n]:
+                if n != m: 
+                    edge_lens.append(pysal.cg.get_points_dist(Point(n), Point(m))) # it can be optional
+        if edge_lens == []:
+            raise ValueError, 'Network has no positive-length edges'
+        edge_lens.sort()
+        max_allowed_edge_len = 5 * edge_lens[len(edge_lens)/2]
+
+        """ 
+        Create a bin structures with proper range to hold all of the edges.
+        The size of the bin is on the order of the length of the longest edge (and
+        of the neighborhoods searched around each query point.
+        """
+        endpoints = network.keys()
+        endpoints_start, endpoints_end = [ep[0] for ep in endpoints], [ep[1] for ep in endpoints]
+        bounds = Rectangle(min(endpoints_start),min(endpoints_end),max(endpoints_start),max(endpoints_end))
+        self.grid = Grid(bounds, max_allowed_edge_len)
+
+        """
+        Insert the midpoint of each edge into the grid. If an edge is too long, 
+        break it into edges of length less than the maximum allowed length and
+        add the midpoint of each.
+        """
+        self.search_rad = max_allowed_edge_len*0.55
+        for n in network:
+            for m in network[n]:
+                edge_len = pysal.cg.get_points_dist(Point(n), Point(m)) # it can be a direct extraction
+                if edge_len > max_allowed_edge_len:
+                    mid_edge = []
+                    num_parts = int(math.ceil(edge_len/max_allowed_edge_len))
+                    part_step = 1.0/num_parts
+                    dx = m[0] - n[0]
+                    dy = m[1] - n[1]
+                    midpoint = (n[0] + dx*part_step/2, n[1] + dy*part_step/2)
+                    for r in [part_step*t for t in xrange(num_parts)]:
+                        mid_edge.append(((n, m), midpoint))
+                        midpoint = (midpoint[0] + dx*part_step, midpoint[1] + dy*part_step)
+                    for me in mid_edge:
+                        self.grid.add(me[0], Point(me[1]))
+                else:
+                    self.grid.add((n, m), Point(((n[0] + m[0])/2, (n[1] + m[1])/2)))
+
+        """
+        During the snapping of a query point we will initialize the closest point on the network
+        to be a dummy location known to be invalid. This must be done in case the neighborhood
+        search does not find any edge midpoints and it must be grown repeatedly. In this case
+        we want to make sure we don't give up having not found a valid closest edge.
+        """
+        self.dummy_proj = (None, None, 0, 0) # Src, dest, dist_from_src, dist_from_dest)
+
+    def snap(self, p):
+        """
+        Test tag <tc>#is#Snapper.snap</tc>
+        """
+
+        """
+        Initialize the closest location found so far to be infinitely far away.
+        Then begin with a neighborhood on the order of the maximum edge allowed and
+        repeatedly growing it. When a closest edge is found, grow once more and check again.
+        """
+        
+        cur_s_rad = self.search_rad
+        found_something = False
+        # Whle neighborhood is empty, enlarge and check again    
+        while not found_something: 
+            if self.grid.proximity(Point(p), cur_s_rad) != []:
+                found_something = True
+            cur_s_rad *= 2
+        # Expand to include any edges whose endpoints might lie just outside
+        # the search radius
+        cur_s_rad += self.search_rad
+        # Now find closest in this neighborhood
+        best_seg_dist = 1e600
+        for e in self.grid.proximity(Point(p), cur_s_rad):
+            seg = LineSegment(Point(e[0]), Point(e[1]))
+            p2seg = pysal.cg.get_segment_point_dist(seg, Point(p))
+            dist = p2seg[0]
+            if p2seg[0] < best_seg_dist:
+                # (src, dest, dist_from_src, dist_from_dest)
+                best_proj = (e[0], e[1], dist*p2seg[1], dist*(1-p2seg[1]))
+                best_seg_dist = p2seg[0]
+        return best_proj
+
+def network_from_endnodes(s, d, wgt, undirected=True):
+    G = {}
+    for g, r in zip(s,d):
+        start = g.vertices[0]
+        end = g.vertices[-1]
+        G.setdefault(start, {})
+        G.setdefault(end, {})
+        r_w = wgt(g,r)
+        G[start][end] = r_w
+        if undirected:
+            G[end][start] = r_w
+    s.close()
+    d.close()
+    return G
+
+def network_from_allvertices(s, d):
+    G = {}
+    for g, r in zip(s, d):
+        vertices = g.vertices
+        for i, vertex in enumerate(vertices[:-1]):
+            n1, n2 = vertex, vertices[i+1]
+            dist = pysal.cg.get_points_dist(Point(n1), Point(n2)) 
+            G.setdefault(n1, {}) 
+            G.setdefault(n2, {}) 
+            G[n1][n2] = dist 
+            G[n2][n1] = dist 
+    s.close()
+    d.close()
+    return G
+
+def read_hierarchical_network(s, d):
+    G, Gj, G_to_Gj = {}, {}, {}
+    for g, r in zip(s, d):
+        vertices = g.vertices
+        Gj.setdefault(vertices[0], {}) 
+        Gj.setdefault(vertices[-1], {}) 
+        d_total = 0.0 
+        for i, vertex in enumerate(vertices[:-1]):
+            n1, n2 = vertex, vertices[i+1]
+            dist = pysal.cg.get_points_dist(Point(n1), Point(n2)) 
+            G.setdefault(n1, {}) 
+            G.setdefault(n2, {}) 
+            G[n1][n2] = dist 
+            G[n2][n1] = dist
+            G_to_Gj[(n1,n2)] = [(vertices[0], vertices[-1]), d_total] # info for the opposite direction 
+            d_total += dist
+        Gj[vertices[0]][vertices[-1]] = d_total
+        Gj[vertices[-1]][vertices[0]] = d_total
+    s.close()
+    d.close()
+    return G, Gj, G_to_Gj
+
+def read_network(filename, wgt_field=None, undirected=True, endnodes=False, hierarchical=False, attrs=None):
+    s = pysal.open(filename)
+    dbf = pysal.open(filename[:-3] + 'dbf')
+    if s.type != pysal.cg.shapes.Chain:
+        raise ValueError, 'File is not of type ARC'
+    if not endnodes and not undirected:
+        raise ValueError, 'Network using all vertices should be undirected'
+    if hierarchical and not (undirected and not endnodes):
+        raise ValueError, 'Hierarchial network should be undirected and use all vertices'
+    if endnodes:
+        if wgt_field and attrs == None:
+            w = dbf.header.index(wgt_field)
+            def wgt(g, r):
+                return r[w]
+        elif wgt_field and attrs:
+            attrs = [wgt_field] + attrs
+            w_indices = [dbf.header.index(field) for field in attrs]
+            def wgt(g, r):
+                return [r[w] for w in w_indices]
+        elif wgt_field is None and attrs:
+            w_indices = [dbf.header.index(field) for field in attrs]
+            def wgt(g, r):
+                d = pysal.cg.get_points_dist(Point(g.vertices[0]), Point(g.vertices[-1]))
+                return [d] + [r[w] for w in w_indices] 
+        else:
+            def wgt(g, r):
+                return pysal.cg.get_points_dist(Point(g.vertices[0]), Point(g.vertices[-1]))
+        return network_from_endnodes(s, dbf, wgt, undirected)
+    if not endnodes and not hierarchical:
+        return network_from_allvertices(s, dbf)
+    if hierarchical:
+        return read_hierarchial_netowrk(s, dbf)
+
+def proj_pnt_coor(proj_pnt):
+    n1, n2 = proj_pnt[0], proj_pnt[1]
+    dist_n12 = pysal.cg.get_points_dist(Point(n1), Point(n2))
+    len_ratio = proj_pnt[2]*1.0/dist_n12
+    xrange, yrange = n2[0] - n1[0], n2[1] - n1[1]
+    x = n1[0] + xrange*len_ratio
+    y = n1[1] + yrange*len_ratio
+    return (x,y)
+
+def inject_points(network, proj_pnts):
+
+    pnts_by_seg = {}
+    proj_pnt_coors = []
+    for pnt in proj_pnts:
+        target_edge = None
+        if (pnt[0], pnt[1]) not in pnts_by_seg and (pnt[1], pnt[0]) not in pnts_by_seg:
+            target_edge = (pnt[0], pnt[1])
+            pnts_by_seg[target_edge] = set()
+        elif (pnt[0], pnt[1]) in pnts_by_seg:
+            target_edge = (pnt[0], pnt[1])
+        elif (pnt[1], pnt[0]) in pnts_by_seg:
+            target_edge = (pnt[1], pnt[0])
+        coor = proj_pnt_coor(pnt)
+        pnts_by_seg[target_edge].add(coor)
+        proj_pnt_coors.append(coor)
+
+    new_network = copy.deepcopy(network)
+    
+    for seg in pnts_by_seg:
+        proj_nodes = set(list(pnts_by_seg[seg]) + [seg[0], seg[1]])
+        proj_nodes = list(proj_nodes)
+        if seg[0][0] == seg[1][0]:
+            proj_nodes.sort(key=lambda coords: coords[1])
+        else:
+            proj_nodes.sort()
+        proj_nodes_len = len(proj_nodes)
+        prev_seg_d, next_seg_d = 0.0, 0.0
+        for i in range(proj_nodes_len - 1):
+            start, end = proj_nodes[i], proj_nodes[i+1]
+            if start not in new_network:
+                new_network[start] = {}
+            if end not in new_network:
+                new_network[end] = {}
+            d = pysal.cg.get_points_dist(Point(start), Point(end))
+            new_network[start][end] = d
+            new_network[end][start] = d
+        if new_network.has_key(seg[0]) and new_network[seg[0]].has_key(seg[1]):
+            del new_network[seg[0]][seg[1]]
+            del new_network[seg[1]][seg[0]]
+        else:
+            print seg, network.has_key(seg[0]), network[seg[0]], network.has_key(seg[1]), network[seg[1]]
+
+    return new_network, proj_pnt_coors
+
+def mesh_network(network, cellwidth, at_center=False):
+    mesh_net = {}
+    done = {}
+    #done = set()
+    for n1 in network:
+        for n2 in network[n1]:
+            #if n2 in done: continue
+            if (n1,n2) in done or (n2,n1) in done:
+                continue
+            len_ratio = cellwidth*1.0/network[n1][n2]
+            start, end = n1, n2
+            # The order for reading a network edge is slightly different from SANET. 
+            # SANET does not seem to have a set of consistent rules. 
+            if n1[0] < n2[0] or (n1[0] == n2[0] and n1[1] < n2[1]):
+                start, end = n2, n1
+            xrange, yrange = end[0] - start[0], end[1] - start[1]
+            dx, dy = xrange*len_ratio, yrange*len_ratio
+            no_segments = int(math.floor(1.0/len_ratio))
+            if at_center:
+                xs = [start[0], start[0] + dx/2.0]
+                ys = [start[1], start[1] + dy/2.0]
+                xs = xs + [xs[-1] + i*dx for i in range(1, no_segments + 1)]
+                ys = ys + [ys[-1] + i*dy for i in range(1, no_segments + 1)] 
+            else:
+                xs = [start[0] + i*dx for i in range(no_segments + 1)]
+                ys = [start[1] + i*dy for i in range(no_segments + 1)]
+            if xs[-1] != end[0] or ys[-1] != end[1]:
+                xs.append(end[0])
+                ys.append(end[1])
+            new_nodes = zip(xs, ys)
+            for i in range(len(new_nodes) - 1):
+                n, m = new_nodes[i], new_nodes[i+1]
+                d = pysal.cg.get_points_dist(Point(n), Point(m))
+                if n not in mesh_net: mesh_net[n] = {}
+                if m not in mesh_net: mesh_net[m] = {}
+                mesh_net[n][m] = d
+                mesh_net[m][n] = d
+            done[(n1,n2)] = True
+        #done.add(n1)
+    return mesh_net
+
+def write_network_to_pysalshp(network, filename, header=None, field_spec=None):
+
+    if not filename.endswith('shp') and not filename.endswith('SHP'):
+        print 'filename would end with shp or SHP'
+        return
+
+    shp = pysal.open(filename, 'w')
+    dbf = pysal.open(filename[:-3] + 'dbf', 'w')
+    if not header:
+        dbf.header = ['ID', 'VALUE']
+    else:
+        dbf.header = ['ID'] + header
+    if not field_spec:
+        dbf.field_spec = [('N', 9, 0), ('N', 15, 8)]
+        def getValue(G, n, m):
+            return [G[n][m]]
+    else:
+        dbf.field_spec = [('N', 9, 0)] + field_spec
+        v = network[network.keys()[0]] 
+        if type(v) == dict:
+            v = v.values()[0]
+        if type(v) == list:
+            wrap_func = list
+        else:
+            def wrap_func(value):
+                return [value]
+        def getValue(G, n, m):
+            return wrap_func(G[n][m])
+             
+    used, counter = set(), 0
+    for n1 in network:
+        for n2 in network[n1]:
+            if n2 in used: continue
+            shp.write(Chain([Point(n1), Point(n2)]))                           
+            dbf.write([counter] + getValue(network,n1,n2))
+            counter += 1
+        used.add(n1)
+
+    shp.close()
+    dbf.close()
+
+def write_valued_network_to_shp(filename, fields, types, net, values, valFunc):
+    oShp = pysal.open(filename, 'w')        
+    oDbf = pysal.open(filename[:-3] + 'dbf', 'w')
+    oDbf.header = fields                    
+    oDbf.field_spec = types                 
+    #for n in net:                           
+    #    for m in net:                       
+    #        oShp.write(Chain([Point(n), Point(m)]))
+    #        oDbf.write([valFunc(values, n), valFunc(values, m)])
+    used, counter = set(), 0
+    for n in net:                           
+        for m in net[n]:
+            if m in used: continue                       
+            oShp.write(Chain([Point(n), Point(m)]))
+            oDbf.write([valFunc(values, n), valFunc(values, m)])
+            counter += 1
+        used.add(n)
+    oShp.close()                            
+    oDbf.close()
+
+def write_list_network_to_shp(filename, fields, types, net):
+    oShp = pysal.open(filename, 'w')        
+    oDbf = pysal.open(filename[:-3] + 'dbf', 'w')
+    oDbf.header = ['ID'] + fields                    
+    oDbf.field_spec = [('N',9,0)] + types                 
+    for i, rec in enumerate(net):
+        geom = rec[0]
+        table_data = list(rec[1:])
+        oShp.write(Chain([Point(geom[0]), Point(geom[1])]))
+        oDbf.write([i] + table_data)
+    oShp.close()
+    oDbf.close()
+
diff --git a/pysal/contrib/network/priordict.py b/pysal/contrib/network/priordict.py
new file mode 100644
index 0000000..744d945
--- /dev/null
+++ b/pysal/contrib/network/priordict.py
@@ -0,0 +1,80 @@
+"""
+Code adapted from source from:
+David Eppstein, UC Irvine, 8 Mar 2002
+"""
+
+from __future__ import generators
+
+class PriorityDictionary(dict):
+
+    def __init__(self):
+        """
+        Initialize PriorityDictionary by creating binary heap
+        of pairs (value,key).  Note that changing or removing a dict entry will
+        not remove the old pair from the heap until it is found by smallest() or
+        until the heap is rebuilt.
+        """
+        self.__heap = []
+        dict.__init__(self)
+
+    def smallest(self):
+        """
+        Find smallest item after removing deleted items from heap.
+        """
+        if len(self) == 0:
+            raise IndexError, "smallest of empty PriorityDictionary"
+        heap = self.__heap
+        while heap[0][1] not in self or self[heap[0][1]] != heap[0][0]:
+            lastItem = heap.pop()
+            insertionPoint = 0
+            while 1:
+                smallChild = 2*insertionPoint+1
+                if smallChild+1 < len(heap) and \
+                        heap[smallChild] > heap[smallChild+1]:
+                    smallChild += 1
+                if smallChild >= len(heap) or lastItem <= heap[smallChild]:
+                    heap[insertionPoint] = lastItem
+                    break
+                heap[insertionPoint] = heap[smallChild]
+                insertionPoint = smallChild
+        return heap[0][1]
+	
+    def __iter__(self):
+        """
+        Create destructive sorted iterator of PriorityDictionary.
+        """
+        def iterfn():
+            while len(self) > 0:
+                x = self.smallest()
+                yield x
+                del self[x]
+        return iterfn()
+	
+    def __setitem__(self,key,val):
+        """
+        Change value stored in dictionary and add corresponding
+        pair to heap.  Rebuilds the heap if the number of deleted items grows
+        too large, to avoid memory leakage.
+        """
+        dict.__setitem__(self,key,val)
+        heap = self.__heap
+        if len(heap) > 2 * len(self):
+            self.__heap = [(v,k) for k,v in self.iteritems()]
+            self.__heap.sort()  # builtin sort likely faster than O(n) heapify
+        else:
+            newPair = (val,key)
+            insertionPoint = len(heap)
+            heap.append(None)
+            while insertionPoint > 0 and \
+                    newPair < heap[(insertionPoint-1)//2]:
+                heap[insertionPoint] = heap[(insertionPoint-1)//2]
+                insertionPoint = (insertionPoint-1)//2
+            heap[insertionPoint] = newPair
+	
+    def setdefault(self,key,val):
+        """
+        Reimplement setdefault to call our customized __setitem__.
+        """
+        if key not in self:
+            self[key] = val
+        return self[key]
diff --git a/pysal/contrib/network/simulator.py b/pysal/contrib/network/simulator.py
new file mode 100644
index 0000000..d9cfcd4
--- /dev/null
+++ b/pysal/contrib/network/simulator.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env python
+
+"""
+Author: Ran Wei, Myunghwa Hwang
+"""
+
+import pysal
+import numpy as np
+
+class Simulation(object):
+   
+    def __init__(self, src_filename):
+        "create a bidirectional network with its total length and total number of links"
+        self.nw = pysal.open(src_filename, 'r')
+        self.G = {} # {edge_index:(d12,(n1,n2))}
+        self.GNet = {} # {n1:{n2:d12}}
+        self.total_length = 0.0
+        self.nwNum = 0
+        for line in self.nw:
+            vertices = line.vertices
+            for i, vertex in enumerate(vertices[:-1]):
+                n1, n2 = vertex, vertices[i+1]
+                self.G.setdefault(self.nwNum, ())
+                self.GNet.setdefault(n1, {})
+                self.GNet.setdefault(n2, {})
+                d = pysal.cg.get_points_dist(pysal.cg.Point(n1), pysal.cg.Point(n2))
+                self.G[self.nwNum] = (d, (n1, n2))
+                self.GNet[n1][n2] = self.nwNum
+                self.GNet[n2][n1] = self.nwNum
+                self.total_length += d
+                self.nwNum += 1
+        self.nw.close()
+        self.imaginaryLineGenerated = False
+            
+    def generateImaginaryLine(self):
+        '''
+           Create an imaginary line that starts from 0 and ends at 1
+           and mark the locations of end points of each link
+        '''
+        self.nwCumPro = [0.0]
+        for e in self.G.keys():
+            self.nwCumPro.append(self.nwCumPro[-1] + (self.G[e][0]/self.total_length))
+                                                                        
+        # self.nwCumProDict --> {edge_index:right_side_end_point_of_the_link_on_the_imaginary_line}
+        self.nwCumProDict = dict(zip(self.G.keys(), self.nwCumPro[1:])) 
+        self.imaginaryLineGenerated = True
+            
+    def getRandomPoints(self, n, projected=False, toShp=False):   
+        '''Create a random point pattern data set on the given network'''
+
+        if not self.imaginaryLineGenerated:
+            self.generateImaginaryLine()
+
+        ## generate n unique random numbers between 0 and 1
+        #randSet = set()
+        #while len(randSet) < n: 
+        #    randSet = set(np.random.random_sample(n))
+        #randSet = np.array(list(randSet))        
+
+        # generate n random numbers between 0 and 1
+        randSet = np.random.random_sample(n)
+
+        # Assign the random numbers to the links on the network
+        # Think nwCumPro as bins; get bin numbers for all random numbers
+        randSet_to_bins=np.digitize(randSet,self.nwCumPro)
+        randSet_to_bins=zip(randSet_to_bins,randSet)
+        randSet_to_bins.sort()
+        # Determine geographic coordinates for each random number
+        nwPtDict = {}
+        for bin_id, rand_number in randSet_to_bins:
+            bid = bin_id - 1
+            n1, n2 = self.G[bid][1] # n1 and n2 are geographic (real) coordinates for the end points of a link
+            origin = 0 if bid <= 0 else self.nwCumProDict[bid-1]
+            length = self.nwCumProDict[bid] - origin
+            # get prop to determine the geographic coordinate of a random number on the link (n1, n2)
+            # length is the length of the link (n1, n2) on the imaginary line
+            # (self.nwCumProDict[bin_id] - rand_number) is the distance between a random point and n2 
+            # on the imaginary line
+            prop = (self.nwCumProDict[bid] - rand_number)*1.0/length
+            nwPtDict.setdefault(bid, [])
+            if not projected:
+                x = n2[0] - (n2[0] - n1[0])*prop # n2[0]: the geographic coordinate of n2 on the X axis
+                y = n2[1] - (n2[1] - n1[1])*prop # n2[1]: the geographic coordinate of n2 on the Y axis
+                nwPtDict[bid].append((x,y))
+            else:
+                dist = self.G[bid][0]
+                proj_pnt = (n1, n2, dist*(1-prop), dist*prop)
+                if toShp:
+                    x = n2[0] - (n2[0] - n1[0])*prop
+                    y = n2[1] - (n2[1] - n1[1])*prop
+                    proj_pnt = tuple(list(proj_pnt) + [x, y])                        
+                nwPtDict[bid].append(proj_pnt)
+            
+        return nwPtDict
+
+    def countPointsOnNetwork(self, points, defaultBase=True):
+        G = {}
+        for k in self.G:
+            n1, n2 = self.G[k][-1]
+            G.setdefault(n1, {})
+            G.setdefault(n2, {})
+            if n2 not in G[n1]:
+                attr = G[n1].setdefault(n2, [self.G[n1][n2], 0])
+            if n1 not in G[n2]:
+                attr = G[n2].setdefault(n1, [self.G[n2][n1], 0])
+            if k in points:
+                attr[-1] += len(points[k])
+            if defaultBase:
+                attr += [1.0]
+            G[n1][n2] = attr
+            G[n2][n1] = attr
+        return G
+
+    def createProjRandomPointsShp(self, n, out_filename):
+        points = nwPtDict = self.getRandomPoints(n, projected=True, toShp=True)
+        shp = pysal.open(out_filename, 'w')
+        dbf = pysal.open(out_filename[:-3] + 'dbf', 'w')
+        dbf.header = ['ID', 'FROM_P1', 'FROM_P2', 'TO_P1', 'TO_P2', 'D_FROM', 'D_TO']
+        dbf.field_spec = [('N',9,0)] + [('N',18,7)]*6
+        counter = 0
+        for k in points:
+            for p in points[k]:
+                p = list(p)
+                shp.write(pysal.cg.Point(tuple(p[-2:])))
+                dbf.write([counter, p[0][0], p[0][1], p[1][0], p[1][1], p[2], p[3]])
+                counter += 1
+        shp.close()
+        dbf.close()
+            
+    def createRandomPointsShp(self, n, out_filename):
+        nwPtDict = self.getRandomPoints(n)
+        self.writePoints(nwPtDict, out_filename)
+
+    def writePoints(self, points, out_filename):
+        shp = pysal.open(out_filename, 'w')
+        dbf = pysal.open(out_filename[:-3] + 'dbf', 'w')
+        dbf.header = ['ID']
+        dbf.field_spec = [('N',9,0)]
+        counter = 0
+        for k in points:
+            for p in points[k]:
+                shp.write(pysal.cg.Point(tuple(p)))
+                dbf.write([counter])
+                counter += 1
+        shp.close()
+        dbf.close()
+
+    def getClusteredPoints(self, centerNum, ptNum, percent, clusterMeta=False):
+
+        # split network into center- and non-center network
+        centerIDs = np.random.randint(0, self.nwNum, centerNum)
+        centers = set(centerIDs)
+        centerG, centerG_length = {}, 0
+        counter, counter2ID = 0, {}
+        for center in centerIDs:
+            centerG[counter] = self.G[center]
+            centerG_length += self.G[center][0]
+            counter2ID[counter] = center
+            counter += 1
+            n1, n2 = self.G[center][1]
+            for neighbor in self.GNet[n1]:
+                if neighbor != n2:
+                    nghLink = self.GNet[n1][neighbor]
+                    centerG[counter] = self.G[nghLink]
+                    centerG_length += self.G[nghLink][0]
+                    counter2ID[counter] = nghLink
+                    counter += 1
+                    centers.add(nghLink)
+            for neighbor in self.GNet[n2]:
+                if neighbor != n1:
+                    nghLink = self.GNet[n2][neighbor]
+                    centerG[counter] = self.G[nghLink]
+                    centerG_length += self.G[nghLink][0]
+                    counter2ID[counter] = nghLink
+                    counter += 1
+                    centers.add(nghLink)
+        nonCenterIDs = set(self.G.keys()).difference(centers)
+        nonCenterG, nonCenterG_length = {}, 0
+        for i, nonCenter in enumerate(nonCenterIDs):
+             nonCenterG[i] = self.G[nonCenter]
+             nonCenterG_length += self.G[nonCenter][0]
+
+        self.oldG, self.old_total_length = self.G, self.total_length
+
+        self.G, self.total_length = centerG, centerG_length
+        n_centerPoints = int(percent*ptNum*1.0)
+        self.imaginaryLineGenerated = False
+        pointsInCenter = self.getRandomPoints(n_centerPoints) 
+        meta = {}
+        if clusterMeta:
+            for cluster in pointsInCenter:
+                num_points = len(pointsInCenter[cluster])
+                centerLink = self.oldG[cluster][1]
+                centerID = counter2ID[cluster]
+                centerLink = self.oldG[centerID][1]
+                meta[centerLink] = [self.oldG[centerID][0], num_points, centerID in centerIDs]
+                #meta[centerID] = [num_points] + list(self.oldG[centerID])
+       
+        self.G, self.total_length = nonCenterG, nonCenterG_length 
+        self.imaginaryLineGenerated = False
+        pointsInNonCenter = self.getRandomPoints(ptNum - n_centerPoints)
+        centers_no = len(centers)
+        for k in pointsInNonCenter:
+            pointsInCenter[k + centers_no] = pointsInNonCenter[k]
+        pointsInCenter.update(pointsInNonCenter)
+
+        self.G, self.total_length = self.oldG, self.old_total_length
+
+        self.imaginaryLineGenerated = False
+
+        return pointsInCenter, meta
+
+    def writeMeta(self, metaData, out_file):
+        shp = pysal.open(out_file, 'w')
+        dbf = pysal.open(out_file[:-3] + 'dbf', 'w')
+        dbf.header = ['LENGTH', 'NO_PNTS', 'INITIAL_CENTER']
+        dbf.field_spec = [('N',9,0)]*2 + [('L',1,0)]
+        for link in metaData:
+            vertices = list(link)
+            vertices = [pysal.cg.Point(v) for v in vertices]
+            shp.write(pysal.cg.Chain(vertices))
+            dbf.write(metaData[link])
+        shp.close()
+        dbf.close()
+
+    def createClusteredPointsShp(self, centerNum, n, percent, out_filename, clusterMetaFile=None):
+        nwPtDict, meta = self.getClusteredPoints(centerNum, n, percent, clusterMetaFile!=None)
+        self.writePoints(nwPtDict, out_filename)
+        if clusterMetaFile:
+            self.writeMeta(meta, clusterMetaFile)
+
+if __name__ == '__main__':
+    sim=Simulation("streets.shp")
+    sim.createProjRandomPointsShp(100, "random_100.shp")
+    sim.createClusteredPointsShp(2, 100, 0.1, "clustered_100_10p.shp", "clustered_100_10p_meta.shp") 
diff --git a/pysal/contrib/network/streets.dbf b/pysal/contrib/network/streets.dbf
new file mode 100644
index 0000000..ce6d200
Binary files /dev/null and b/pysal/contrib/network/streets.dbf differ
diff --git a/pysal/contrib/network/streets.shp b/pysal/contrib/network/streets.shp
new file mode 100644
index 0000000..b5809a7
Binary files /dev/null and b/pysal/contrib/network/streets.shp differ
diff --git a/pysal/contrib/network/streets.shx b/pysal/contrib/network/streets.shx
new file mode 100644
index 0000000..4733484
Binary files /dev/null and b/pysal/contrib/network/streets.shx differ
diff --git a/pysal/contrib/network/test_access.py b/pysal/contrib/network/test_access.py
new file mode 100644
index 0000000..c20e9d0
--- /dev/null
+++ b/pysal/contrib/network/test_access.py
@@ -0,0 +1,53 @@
+"""access unittest"""
+import unittest
+import access as pyacc
+
+class Access_Tester(unittest.TestCase):
+
+    def setUp(self):
+        self.distances = {1:[1,2,3,4],2:[1,1,2,3],3:[2,1,1,2],
+                          4:[3,2,1,1],5:[4,3,2,1]}
+
+    def test_coverage(self):
+        coverage = []
+        for d in self.distances.values():
+            coverage.append(pyacc.coverage(d, 2.5))
+        self.assertEqual(coverage, [2,3,4,3,2])
+
+    def test_equity(self):
+        equity = []
+        for d in self.distances.values():
+            equity.append(pyacc.equity(d))
+        self.assertEqual(equity, [1,1,1,1,1])
+
+    def test_potential_entropy(self):
+        entropy = []
+        for d in self.distances.values():
+            entropy.append(pyacc.potential_entropy(d))
+        entropy_values = [0.57131743166465321, 0.92088123394736132, 
+                1.0064294488161101, 0.92088123394736132, 0.57131743166465321]
+        self.assertEqual(entropy, entropy_values)
+
+    def test_potential_gravity(self):
+        gravity = []
+        for d in self.distances.values():
+            gravity.append(pyacc.potential_gravity(d))
+        gravity_values = [1.4236111111111112, 2.3611111111111112, 2.5, 
+                          2.3611111111111112, 1.4236111111111112]
+        self.assertEqual(gravity, gravity_values)
+
+    def test_travel_cost(self):
+        cost = []
+        for d in self.distances.values():
+            cost.append(pyacc.travel_cost(d))
+        self.assertEqual(cost, [10, 7, 6, 7, 10])
+
+suite = unittest.TestSuite()
+test_classes = [Access_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/contrib/network/test_kernel.py b/pysal/contrib/network/test_kernel.py
new file mode 100644
index 0000000..d9c2b73
--- /dev/null
+++ b/pysal/contrib/network/test_kernel.py
@@ -0,0 +1,44 @@
+"""network unittest"""
+import unittest
+import network as pynet
+import kernel as pykernel
+
+class Kernel_Tester(unittest.TestCase):
+
+    def setUp(self):
+        self.G = {(1,1): {(2,2): 0.125, (3,3): 0.75}, (2,2): {(1,1): 0.125, (4,4): 1.2},
+                   (3,3): {(1,1): 0.75, (4,4): 0.375},
+                   (4,4): {(2,2): 1.2, (3,3): 0.375, (5,5): 0.5},
+                   (5,5): {(4,4): 0.5}, (6,6):{(7,7):1.0}, (7,7):{(6,6):1.0}}
+        self.G_meshed = pynet.mesh_network(self.G, 0.1)
+        self.points = [((3.6666666666666665, 3.6666666666666665), (3.5, 3.5), 
+                         1.8011569244041523e-18, 8.0119209423694433e-18), 
+                       ((4.0, 4.0), (3.8333333333333335, 3.8333333333333335), 
+                         6.4354219496947843e-18, 1.3190733783852405e-17), 
+                       ((6.5999999999999996, 6.5999999999999996), (6.5, 6.5), 
+                         8.6525456558003033e-19, 4.0412843678067672e-18)]
+        self.proj_points = []
+        for p in self.points:
+            self.proj_points.append(pynet.proj_pnt_coor(p))
+
+    def test_dijkstras_w_prev(self):
+        distances, previous_nodes = pykernel.dijkstras_w_prev(self.G, (1,1))
+        distances_values = {(5, 5): 1.625, (3, 3): 0.75, (4, 4): 1.125, (1, 1): 0, (2, 2): 0.125}
+        prev_nodes = {(5, 5): (4, 4), (2, 2): (1, 1), (1, 1): None, (4, 4): (3, 3), (3, 3): (1, 1)}
+        self.assertEqual(distances, distances_values)
+        self.assertEqual(previous_nodes, prev_nodes)
+
+    def test_kernel_density(self):
+        density = pykernel.kernel_density(self.G_meshed, self.proj_points, 0.3, self.G.keys())
+        self.assertEqual(density[(4.0, 4.0)], 0.25)
+
+
+suite = unittest.TestSuite()
+test_classes = [Kernel_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/contrib/network/test_kfuncs.py b/pysal/contrib/network/test_kfuncs.py
new file mode 100644
index 0000000..ec37c06
--- /dev/null
+++ b/pysal/contrib/network/test_kfuncs.py
@@ -0,0 +1,40 @@
+"""network unittest"""
+import unittest
+import network as pynet
+import kfuncs
+
+class Kfuncs_Tester(unittest.TestCase):
+
+    def setUp(self):
+        self.distances = {1:[1,2,3,4],2:[1,1,2,3],3:[2,1,1,2],
+                          4:[3,2,1,1],5:[4,3,2,1]}
+
+    def test__fxrange(self):
+        values = kfuncs._fxrange(0.0,1.0,0.2)
+        for v1, v2 in zip(values, [0.0,0.2,0.4,0.6,0.8,1.0]):
+            self.assertAlmostEqual(v1, v2)
+
+    def test__binary_search(self):
+        v = kfuncs._binary_search([0.0,0.2,0.4,0.6,0.8,1.0],0.9)
+        self.assertEqual(v, 5)
+
+    def test_kt_values(self):
+        expected_values = {1: {0.5: 0, 1.5: 10, 2.5: 20}, 
+                           2: {0.5: 0, 1.5: 20, 2.5: 30}, 
+                           3: {0.5: 0, 1.5: 20, 2.5: 40}, 
+                           4: {0.5: 0, 1.5: 20, 2.5: 30}, 
+                           5: {0.5: 0, 1.5: 10, 2.5: 20}}
+        kfunc_values = {}
+        for k, v in self.distances.items():
+            kfunc_values[k] = kfuncs.kt_values((0.5,3.5,1.0),v,10)
+        self.assertEqual(kfunc_values, expected_values)
+
+suite = unittest.TestSuite()
+test_classes = [Kfuncs_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/contrib/network/test_klincs.py b/pysal/contrib/network/test_klincs.py
new file mode 100644
index 0000000..2204716
--- /dev/null
+++ b/pysal/contrib/network/test_klincs.py
@@ -0,0 +1,136 @@
+"""network unittest"""
+import unittest
+import network as pynet
+import klincs 
+import random
+random.seed(10)
+import pysal
+import numpy as np
+
+class KLINCS_Tester(unittest.TestCase):
+
+    def setUp(self):
+        self.population = range(5)
+        self.weights = [0.1, 0.25, 0.1, 0.2, 0.35]
+        np.random.seed(10)
+        self.network_file = 'streets.shp'
+        self.G = pynet.read_network(self.network_file)
+        self.references = [[i, [n]] for i, n in enumerate(self.G.keys())]
+        self.scale_set = (0, 1500, 500) 
+        self.network_distances_cache = {}
+        search_radius = self.scale_set[1]
+        for i, node in self.references:
+            n = node[0]
+            self.network_distances_cache[n] = pynet.dijkstras(self.G, n, search_radius) 
+        self.snapper = pynet.Snapper(self.G)
+        self.events_file = 'crimes.shp' 
+        points = self.get_points_from_shapefile(self.events_file)
+        self.events = []
+        for p in points:
+            self.events.append(self.snapper.snap(p[0]))
+        self.test_node = (724587.78057580709, 877802.4281426128)
+
+    def get_points_from_shapefile(self, src_filename, src_uid=None):
+        src_file = pysal.open(src_filename)
+        dbf = pysal.open(src_filename[:-3] + 'dbf')
+        src_uid_index = dbf.header.index(src_uid) if src_uid else None 
+        if src_uid_index != None:
+            def get_index(index, record):
+                return record[src_uid_index]
+        else:
+            def get_index(index, record):
+                return index
+        if src_file.type == pysal.cg.shapes.Polygon:
+            def get_geom(g):
+                return g.centroid
+        elif src_file.type == pysal.cg.shapes.Point:
+            def get_geom(g):
+                return (g[0], g[1])
+        srcs = [] 
+        for i, rec in enumerate(src_file):
+            srcs.append([get_geom(rec), get_index(i, dbf.next())]) 
+        src_file.close()
+        return srcs 
+
+    def test_WeightedRandomSampleGenerator(self):
+        generator = klincs.WeightedRandomSampleGenerator(self.weights, self.population, 3)
+        sample = generator.next()
+        self.assertEqual(sample,[4, 0, 3])
+
+    def test_RandomSampleGenerator(self):
+        generator = klincs.RandomSampleGenerator(self.population, 3)
+        sample = generator.next()
+        self.assertEqual(sample,[2, 1, 3])
+
+    def test_local_k(self):
+        network = self.G
+        references = self.references
+        events = self.events
+        scale_set = self.scale_set 
+        cache = self.network_distances_cache
+        node2localK, net_distances = klincs.local_k(network, events, references, scale_set, cache)
+        # node2localK
+        # for each reference node,
+        # local_k returns the number of events within a distance 
+        # the distance is determined by scale_set
+        # example: (724587.78057580709, 877802.4281426128): {0: 0, 1000: 22, 500: 9}
+        # 
+        # net_distances - a dictionary containing network distances 
+        # between nodes in the input network
+        test_node = self.test_node
+        self.assertEqual(node2localK[test_node][500], 9)
+
+    def test_cluster_type(self):
+        cluster1 = klincs.cluster_type(0.25, 0.01, 0.33)
+        cluster2 = klincs.cluster_type(0.45, 0.01, 0.33)
+        self.assertEqual(cluster1, 0)
+        self.assertEqual(cluster2, 1)
+
+    def test_simulate_local_k_01(self):
+        sims = 1
+        n = len(self.events)
+        net_file = self.network_file
+        network = self.G
+        events = self.events
+        refs = self.references
+        scale_set = self.scale_set
+        cache = self.network_distances_cache
+        args = (sims, n, net_file, network, events, refs, scale_set, cache)
+        sim_outcomes = klincs.simulate_local_k_01(args)
+        self.assertEqual(sim_outcomes[0][self.test_node], {0: 0, 1000: 9, 500: 4})
+
+    def test_simulate_local_k_02(self):
+        sims = 1
+        n = 50
+        refs = self.references
+        scale_set = self.scale_set
+        cache = self.network_distances_cache
+        args = (sims, n, refs, scale_set, cache)
+        sim_outcomes = klincs.simulate_local_k_02(args)
+        self.assertEqual(sim_outcomes[0][self.test_node], {0: 1, 1000: 2, 500: 2})
+          
+    def test_k_cluster(self):
+        network = self.G
+        events = self.events
+        refs = self.references
+        scale_set = self.scale_set
+        sims = 1
+        sim_network = self.network_file
+        localKs = klincs.k_cluster(network, events, refs, scale_set, sims, sim_network=sim_network)
+        test_node = self.test_node
+        # in [9,4,4,1]
+        # 9 - the number of events observed at the search radius of 500
+        # 4 - the lowest number of simulated points observed at the search radius of 500
+        # 4 - the highest number of simulated points observed at the search radius of 500
+        # 1 - the type of cluster
+        self.assertEqual(localKs[test_node][500],[9,4,4,1])
+
+suite = unittest.TestSuite()
+test_classes = [KLINCS_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/contrib/network/test_lincs.py b/pysal/contrib/network/test_lincs.py
new file mode 100644
index 0000000..33f39cd
--- /dev/null
+++ b/pysal/contrib/network/test_lincs.py
@@ -0,0 +1,89 @@
+"""network unittest"""
+import unittest
+import network as pynet
+import lincs 
+import random
+random.seed(10)
+import pysal
+import numpy as np
+import copy
+
+class LINCS_Tester(unittest.TestCase):
+
+    def setUp(self):
+        self.base = np.array([100, 80, 50, 120, 90])
+        self.events = np.array([20, 20, 5, 10, 25])
+        np.random.seed(10)
+        self.observed = np.array([0.1,0.15,0.2])
+        self.simulated = np.array([[0.05,0.10,0.25],[0.12,0.11,0.3],[0.11,0.09,0.27]])
+        self.network_file = 'streets.shp'
+        self.G = pynet.read_network(self.network_file)
+        self.test_link = ((724432.38723173144, 877800.08747069736), 
+                          (724587.78057580709, 877802.4281426128))
+        self.G2 = copy.deepcopy(self.G)
+        done = set()
+        for n1 in self.G2:
+            for n2 in self.G2[n1]:
+                if (n1, n2) in done:
+                    continue
+                dist = self.G2[n1][n2]
+                base = int(random.random()*1000)
+                event = int(random.random()*base)
+                self.G2[n1][n2] = [dist, base, event]
+                self.G2[n2][n1] = [dist, base, event]
+                done.add((n1,n2))
+                done.add((n2,n1))
+
+    def test_unconditional_sim(self):
+        simulated_events = lincs.unconditional_sim(self.events, self.base, 2)
+        self.assertEqual(list(simulated_events[0]),[21,15])
+
+    def test_unconditional_sim_poisson(self):
+        simulated_events = lincs.unconditional_sim_poisson(self.events, self.base, 2)
+        self.assertEqual(list(simulated_events[0]),[22,21])
+
+    def test_conditional_multinomial(self):
+        simulated_events = lincs.conditional_multinomial(self.events, self.base, 2)
+        self.assertEqual(list(simulated_events[0]),[21,18])
+
+    def test_pseudo_pvalues(self):
+        pseudo_pvalues = lincs.pseudo_pvalues(self.observed, self.simulated)
+        self.assertEqual(list(pseudo_pvalues[0]),[ 0.5,  0.5,  0.5])
+
+    def test_node_weights(self):
+        w, id2link = lincs.node_weights(self.G)
+        self.assertEqual(w.neighbors[0],[1, 2, 3, 4])
+
+    def test_edgepoints_from_network(self):
+        id2linkpoints, id2attr, link2id = lincs.edgepoints_from_network(self.G)
+        link = id2linkpoints[0]
+        self.assertEqual(link[:2], self.test_link)
+        self.assertEqual(link2id[link[:2]], 0)
+
+    def test_dist_weights(self):
+        id2linkpoints, id2attr, link2id = lincs.edgepoints_from_network(self.G)
+        w, id2link = lincs.dist_weights(self.G, id2linkpoints, link2id, 500)
+        self.assertEqual(w.neighbors[0],[1,154,153,155])
+        self.assertEqual(id2link[0], self.test_link)
+
+    def test_lincs(self):
+        network = self.G2
+        event_index = 2
+        base_index = 1
+        weight = 'Distance-based'
+        dist = 500
+        lisa_func = 'moran'
+        sim_method = 'permutations'
+        sim_num = 2
+        lisa, w = lincs.lincs(network, event_index, base_index, weight, dist, lisa_func, sim_method, sim_num)
+        self.assertEqual(lisa[0][3], -0.64342055427251854)
+
+suite = unittest.TestSuite()
+test_classes = [LINCS_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/contrib/network/test_network.py b/pysal/contrib/network/test_network.py
new file mode 100644
index 0000000..e99e1ca
--- /dev/null
+++ b/pysal/contrib/network/test_network.py
@@ -0,0 +1,186 @@
+"""network unittest"""
+import unittest
+import network as pynet
+import random
+random.seed(10)
+import pysal
+
+class Network_Tester(unittest.TestCase):
+
+    def setUp(self):
+        self.net = 'streets.shp'
+        self.G = pynet.read_network(self.net)
+        self.G2 = {(1,1): {(2,2): 0.125, (3,3): 0.75}, (2,2): {(1,1): 0.125, (4,4): 1.2},
+                   (3,3): {(1,1): 0.75, (4,4): 0.375},
+                   (4,4): {(2,2): 1.2, (3,3): 0.375, (5,5): 0.5},
+                   (5,5): {(4,4): 0.5}, (6,6):{(7,7):1.0}, (7,7):{(6,6):1.0}}
+        self.GDirected = {(1,1): {(2,2): 0.125, (3,3): 0.75}, (2,2): {(1,1): 0.125},
+                   (3,3): {(1,1): 0.75, (4,4): 0.375},
+                   (4,4): {(2,2): 1.2, (3,3): 0.375, (5,5): 0.5},
+                   (5,5): {(4,4): 0.5}, (6,6):{(7,7):1.0}, (7,7):{(6,6):1.0}}
+        self.points = [((4,4), (2,2), 0.51466686561013752, 0.68533313438986243), 
+                       ((4,4), (3,3), 0.077286837052313151, 0.29771316294768685), 
+                       ((6,6), (7,7), 0.82358887253344548, 0.17641112746655452)]
+
+    def test_no_nodes(self):
+        self.assertEqual(pynet.no_nodes(self.G), 230)
+
+    def test_no_edges(self):
+        self.assertEqual(pynet.no_edges(self.G), 303)
+
+    def test_tot_net_length(self):
+        self.assertAlmostEqual(pynet.tot_net_length(self.G), 52207.04600797734, places=1)
+
+    def test_walk(self):
+        correct_path = {(5, 5): {(4, 4): 0.5}, (2, 2): {(4, 4): 1.2, (1, 1): 0.125}, 
+                        (1, 1): {(3, 3): 0.75, (2, 2): 0.125}, 
+                        (4, 4): {(5, 5): 0.5, (3, 3): 0.375, (2, 2): 1.2}, 
+                        (3, 3): {(4, 4): 0.375, (1, 1): 0.75}}
+        traversal_path = pynet.walk(self.G2, (1,1))
+        self.assertEqual(traversal_path, correct_path)        
+
+    def test_components(self):
+        components = pynet.components(self.G)
+        self.assertEqual(pynet.no_nodes(components[0]), 230)        
+        components = pynet.components(self.G2)
+        self.assertEqual(len(components), 2)        
+
+    def test_no_components(self):
+        no_components = pynet.no_components(self.G2)
+        self.assertEqual(no_components, 2)        
+
+    def test_net_global_stats(self):
+        stats = ['v', 'e', 'L', 'p', 'u', 'alpha', 'beta', 'emax', 'gamma', 'eta', 
+                 'net_den', 'detour']
+        values = pynet.net_global_stats(self.G, detour=True)
+        values = dict(zip(stats, values))
+        self.assertEqual(values['v'], 230)
+        self.assertEqual(values['e'], 303)
+        self.assertAlmostEqual(values['L'], 52207.04600797734, places=1)
+        self.assertAlmostEqual(values['eta'], 172.30048187451268)
+        self.assertAlmostEqual(values['beta'], 1.317391304347826, places=2)
+        self.assertAlmostEqual(values['emax'], 684)
+        self.assertAlmostEqual(values['gamma'], 0.44298245614035087, places=2)
+        self.assertAlmostEqual(values['detour'], 0.78002937059822186, places=4)
+        for k in values:
+            print k, values[k]
+
+    def test_random_projs(self):
+        random.seed(10)
+        random_points = pynet.random_projs(self.G2, 3)
+        points = [((7, 7), (6, 6), 0.42888905467511462, 0.57111094532488538), 
+                  ((7, 7), (6, 6), 0.20609823213950174, 0.79390176786049826), 
+                  ((1, 1), (3, 3), 0.61769165440008411, 0.13230834559991589)] 
+        self.assertEqual(random_points, points)
+        
+    def test_proj_distances_undirected(self):
+        source = self.points[0]
+        destinations = self.points[1:]
+        distances = pynet.proj_distances_undirected(self.G2, source, destinations, r=1.0)
+        self.assertAlmostEqual(distances.values()[0], 0.59195370266245062)
+
+    def test_proj_distances_directed(self):
+        source = self.points[0]
+        destinations = self.points
+        distances = pynet.proj_distances_directed(self.G2, source, destinations)
+        distance_values = [0.0, 1.9626199714421757]
+        self.assertEqual(distances.values(), distance_values)
+
+    def test_dijkstras(self):
+        distances = pynet.dijkstras(self.G2, (1,1))
+        distance_values = {(5, 5): 1.625, (2, 2): 0.125, (1, 1): 0, (4, 4): 1.125, (3, 3): 0.75}
+        self.assertEqual(distances, distance_values)
+
+    def test_snap(self):
+        snapper = pynet.Snapper(self.G2)
+        projected_point = snapper.snap((2.5,2.5))
+        self.assertEqual(projected_point, ((2, 2),(4, 4),3.9252311467094367e-17,1.1775693440128314e-16))
+
+    def test_network_from_endnodes(self):
+        shape = pysal.open(self.net)
+        dbf = pysal.open(self.net[:-3] + 'dbf')
+        def weight(geo_object, record):
+            return 1
+        graph = pynet.network_from_endnodes(shape, dbf, weight)
+        neighbors = {(724432.38723173144, 877800.08747069736): 1, 
+                     (725247.70571468933, 877812.36851842562): 1}
+        start_point = (724587.78057580709, 877802.4281426128)
+        self.assertEqual(graph[start_point], neighbors)
+
+    def test_network_from_allvertices(self):
+        shape = pysal.open(self.net)
+        dbf = pysal.open(self.net[:-3] + 'dbf')
+        graph = pynet.network_from_allvertices(shape, dbf)
+        neighbors = {(724432.38723173144, 877800.08747069736): 155.41097171058956, 
+                     (725247.70571468933, 877812.36851842562): 660.00000000003809}
+        start_point = (724587.78057580709, 877802.4281426128)
+        self.assertEqual(graph[start_point], neighbors)
+
+    def test_read_hierarchical_network(self):
+        shape = pysal.open(self.net)
+        dbf = pysal.open(self.net[:-3] + 'dbf')
+        graph_detail, graph_endnode, link = pynet.read_hierarchical_network(shape, dbf)
+        neighbors = {(725220.77363919443, 880985.09708712087): 659.99999999994725, 
+                     (724400.64597190416, 880984.45593258401): 160.12791790928244}
+        vertex1 = (724560.77384088072, 880984.58111639845)
+        vertex2 = (724400.64597190416, 880984.45593258401)
+        self.assertEqual(graph_detail[vertex1], neighbors)
+        self.assertEqual(graph_endnode[vertex1], neighbors)
+        self.assertEqual(link[(vertex1, vertex2)][0], (vertex1, vertex2))
+
+    def test_read_network(self):
+        graph = pynet.read_network(self.net)
+        self.assertEqual(graph, self.G) 
+
+    def test_proj_pnt_coor(self):
+        point = pynet.proj_pnt_coor(self.points[0])
+        point_value = (3.6360755692750462, 3.6360755692750462)
+        self.assertEqual(point, point_value)
+
+    def test_inject_points(self):
+        graph, point_coors = pynet.inject_points(self.G2, self.points)
+        self.assertEqual(len(graph), len(self.G2) + 3)
+
+    def test_mesh_network(self):
+        graph = pynet.mesh_network(self.G2, 0.1)
+        self.assertEqual(len(graph), 41)
+
+    def test_write_network_to_pysalshp(self):
+        out = 'output_network.shp'
+        pynet.write_network_to_pysalshp(self.G, out)
+        graph = pynet.read_network(out)
+        self.assertEqual(graph, self.G)    
+
+    def test_write_valued_network_to_shp(self):
+        out = 'output_network.shp'
+        fields = ['WEIGHT1','WEIGHT2']
+        types = [('N',7,3),('N',7,3)]
+        values = {(1,1):1,(2,2):2,(3,3):3,(4,4):4,(5,5):5,(6,6):6,(7,7):7}
+        def doubleX(values, node):
+            return values[node]*2.0
+        pynet.write_valued_network_to_shp(out,fields,types,self.G2,values,doubleX)
+        graph = pynet.read_network(out)
+        self.assertEqual(len(graph), len(self.G2))    
+
+    def test_list_network_to_shp(self):
+        out = 'output_network.shp'
+        fields = ['WEIGHT']
+        types = [('N',7,3)]
+        list_network = []
+        for node1 in self.G2:
+            for node2 in self.G2[node1]:
+                list_network.append(((node1, node2),random.random()))
+        pynet.write_list_network_to_shp(out,fields,types,list_network)
+        graph = pynet.read_network(out)
+        self.assertEqual(len(graph), len(self.G2))    
+
+
+suite = unittest.TestSuite()
+test_classes = [Network_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/contrib/network/test_weights.py b/pysal/contrib/network/test_weights.py
new file mode 100644
index 0000000..4c970b2
--- /dev/null
+++ b/pysal/contrib/network/test_weights.py
@@ -0,0 +1,22 @@
+"""network unittest"""
+import unittest
+import network as pynet
+import numpy as np
+import weights 
+
+class Weights_Tester(unittest.TestCase):
+
+    def test_dist_weights(self):
+        ids = np.array(map(str,range(1,9)))
+        w = weights.dist_weights('distances.csv','knn',ids,3)
+        self.assertEqual(w.neighbors['1'],['6','8','7'])
+
+suite = unittest.TestSuite()
+test_classes = [Weights_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/contrib/network/weights.py b/pysal/contrib/network/weights.py
new file mode 100644
index 0000000..b30fe42
--- /dev/null
+++ b/pysal/contrib/network/weights.py
@@ -0,0 +1,80 @@
+"""
+A library of spatial network functions.
+Not to be used without permission.
+
+Contact: 
+
+Andrew Winslow
+GeoDa Center for Geospatial Analysis
+Arizona State University
+Tempe, AZ
+Andrew.Winslow at asu.edu
+"""
+
+import csv
+import numpy as np
+from pysal import W
+import unittest
+import test
+
+def dist_weights(distfile, weight_type, ids, cutoff, inverse=False):
+    """
+    Returns a distance-based weights object using user-defined options
+    
+    Parameters
+    ----------
+    distfile: string, a path to distance csv file
+    weighttype: string, either 'threshold' or 'knn'
+    ids: a numpy array of id values
+    cutoff: float or integer; float for 'threshold' weight type and integer for knn type
+    inverse: boolean; true if inversed weights required
+
+    """
+    try:
+        data_csv = csv.reader(open(distfile))        
+        if csv.Sniffer().has_header(distfile):
+            data_csv.next()
+    except:        
+        data_csv = None
+    
+    if weight_type == 'threshold':
+        def neighbor_func(dists, threshold):
+            dists = filter(lambda x: x[0] <= threshold, dists)
+            return dists
+    else:
+        def neighbor_func(dists, k):
+            dists.sort()
+            return dists[:k]
+
+    if inverse:
+        def weight_func(dists, alpha=-1.0):
+            return list((np.array(dists)**alpha).round(decimals=6))
+    else:
+        def weight_func(dists, binary=False):
+            return [1]*len(dists)
+
+    dist_src = {}
+    for row in data_csv:
+        des = dist_src.setdefault(row[0], {})
+        if row[0] != row[1]:
+            des[row[1]] = float(row[2])
+
+    neighbors, weights = {}, {}
+    for id_val in ids:
+        if id_val not in dist_src:
+            raise ValueError, 'An ID value doest not exist in distance file'
+        else:
+            dists = zip(dist_src[id_val].values(), dist_src[id_val].keys())
+        ngh, wgt = [], []
+        if len(dists) > 0:
+            nghs = neighbor_func(dists, cutoff)
+            for d, i in nghs:
+                ngh.append(i)
+                wgt.append(d)
+        neighbors[id_val] = ngh
+        weights[id_val] = weight_func(wgt)
+    w = W(neighbors, weights)
+    w.id_order = ids
+    return w
+
+
diff --git a/pysal/contrib/opendata/__init__.py b/pysal/contrib/opendata/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/contrib/opendata/google.py b/pysal/contrib/opendata/google.py
new file mode 100644
index 0000000..51c24f7
--- /dev/null
+++ b/pysal/contrib/opendata/google.py
@@ -0,0 +1,287 @@
+"""
+google: Tools for extracting information through Google Places API
+
+Author(s):
+    Luc Anselin luc.anselin at asu.edu
+    Xun Li xun.li at asu.edu
+
+"""
+
+__author__ = "Luc Anselin <luc.anselin at asu.edu, Xun Li <xun.li at asu.edu"
+
+import urllib2
+import json
+from pysal.cg import lonlat,geogrid,harcdist
+
+__all__ = ['querypoints','queryradius','googlepoints','ptdict2geojson']
+
+def querypoints(p,bbox=True,lonx=False,k=5):
+    """
+    Utility function to generate a list of grid query points from a bounding box
+    essentially a wrapper around geogrid with a check for bounding box dimension
+    
+    Parameters
+    ----------
+    p      : list of points in alt-lon or lon-lat
+    bbox   : flag for bounding box (upper left, lower right), default = True
+    lonx   : flag for lat-lon order, default is False for lat-lon (True is lon-lat)
+    k      : grid size (points grid will be k+1 by k+1)
+    
+    Returns
+    -------
+    grid   : list of lat-lon of a grid of query points, row by row, from top to bottom
+    
+    """
+    if bbox:
+        if not len(p)==2:
+            raise Exception, "invalid format for bounding box"
+        # create grid points from bounding box
+        grid = geogrid(p[0],p[1],k=k,lonx=lonx)
+        # the list must be in lat-lon format
+        if lonx:
+            grid = lonlat(grid)
+    else:
+        return p
+    return grid
+    
+def queryradius(grid,k,lonx=False):
+    """
+    Utility function to find a good query range distance for grid points
+    
+    Parameters
+    ----------
+    grid    : list of lat-lon tuplets for regular grid points
+              (created with querypoints)
+    k       : dimension of the grid, the list will consist of k+1 x k+1 tuplets
+    lonx    : flag for lat-lon order, default is False for lat-lon (lon-lat is True)
+    
+    Returns
+    -------
+            : maximum distance from the horizontal and vertical sides of
+              the lower right grid cell in meters
+              (to be used as the sradius argument in googlepoints)
+    
+    """
+    # compute distances in horizontal and vertical direction
+    # use the lower right grid cell
+    # along longitude
+    p0 = grid[-2]
+    p1 = grid[-1]
+    # along latitude
+    p3 = grid[-(k+2)]
+    # horizontal distance
+    dist1 = harcdist(p0,p1,lonx=lonx)
+    # vertical distance
+    dist2 = harcdist(p3,p1,lonx=lonx)
+    return round(max([dist1,dist2])*1000.0)
+
+def googlepoints(querypoints,apikey,sradius,stype,newid=True,verbose=True):
+    """
+    Queries the Google Places API for locations (lat-lon) of a 
+    given type of facility
+    
+    Parameters
+    ----------
+    querypoints  : a list of tuples with point coordinates in lat-lon format 
+                   (not lon-lat!)
+    apikey       : the user's Google Places API key
+    sradius      : search radius in meters
+    stype        : facility type from list supported by Google Places API
+    newid        : flag for sequential integer as new key
+                   default = True
+                   should be set to False when result dictionary will
+                   be used as input to googlevalues
+    verbose      : flag for amount of detail in output
+                   default = True, for each point query, the status and
+                   number of points found is printed
+                   False: only a warning is printed when search is truncated
+                   at 200
+    
+    Returns
+    -------
+    findings     : a directory with the facility Google ID as the key
+                   contains lat and lng
+                   
+    Example
+    -------
+    
+    It is not possible to give an actual example, since that requires the
+    user's unique API key. However, to illustrate the type of call and
+    return consider:
+    
+    liquorlist = googlepoints([(41.981417, -87.893517)],'apikeyhere',5000,'liquor_store')
+    
+    which will query a radius of 5000m (5km) around the specified query point for the
+    locations of all liquor stores in the Google data base
+    
+    This retuns a dictionary:
+    
+    {u'ChIJ-etp1PLJD4gRLYGl7Jm1oG4': {u'lat': 41.986742, u'lng': -87.836312},
+     u'ChIJ0fyhh9awD4gR6-vaMEbGNBE': {u'lat': 42.022235, u'lng': -87.941302},
+    ...
+     u'ChIJxfyRaki2D4gRiaHLJANgJns': {u'lat': 42.010285, u'lng': -87.875896}}
+     
+    The key in the dictionary is the Google ID of the facility
+    """
+    base_url = 'https://maps.googleapis.com/maps/api/place/radarsearch/json?location=%s,%s&radius=%s&types=%s&key=%s'
+    # list of urls to query
+    query_urls = [base_url%(lat,lon,sradius,stype,apikey) for lat,lon in querypoints]
+    findings = {}
+    
+    for url in query_urls:
+        rsp = urllib2.urlopen(url)
+        content = rsp.read()
+        data = json.loads(content)
+        if 'results' in data:         # avoid empty records
+            results = data['results']
+            if verbose:
+                print data['status'], len(results)
+            if len(results) == 200:
+                print "WARNING: query truncated at 200"
+            for item in results:
+                place_id = item['place_id']
+                loc = item['geometry']['location']
+                # use place id as key in the dictionary
+                findings[place_id] = loc
+            
+    # replace key with integer sequence number
+    if newid:
+        ii = 0
+        newdict = {}
+        for placeid in findings.keys():
+            dd = findings[placeid]
+            dd['placeid']=placeid  
+            newdict[ii]=dd
+            ii = ii + 1
+    else:
+        return findings
+    
+    return newdict
+    
+def ptdict2geojson(d,location=["lng","lat"],values=[],ofile="output.geojson"):
+    """
+    Turns the dictionary output from googlepoints into a geojson point file
+    
+    Parameters
+    ----------
+    d          : dict object created by googlepoints
+    location   : list with long, lat coordinates in decimal degrees
+                 (no check for order)
+    values     : list of keys for data items other than the id
+                 (do not include the id)
+                 default: no values (id only)
+    ofile      : file name for the output file (include geojson extension)
+                 default: "output.geojson"
+    
+    Returns
+    -------
+               : geojson point file
+    
+    Remarks
+    -------
+    Assumes that the dictionary is complete, i.e., every key has a value associated
+    with it. No checks are made for missing keys.
+    
+    """
+    # extract location info
+    lng = location[0]
+    lat = location[1]
+    # initialize geo dictionary for point features
+    geo = {"type": "FeatureCollection","features":[]}
+    # loop over dictionary
+    for id,loc in d.iteritems():
+        feature = { "type" : "Feature", 
+        "geometry": { "type": "Point", "coordinates": [ loc['lng'],loc['lat']]}}
+        properties = {"id": id}
+        for info in values:
+            properties[info] = loc[info]
+        feature["properties"] = properties
+        geo["features"].append(feature)
+    # output file
+    with open(ofile,'w') as outfile:
+        json.dump(geo,outfile,sort_keys=True,indent=4,ensure_ascii=False)
+    outfile.close()
+    return
+    
+def googlept(pointlist,stype,apikey,lonx=False,bbox=True,k=5,sradius=5000,verbose=True,ofile="output.geojson"):
+    """
+    User function to query google points
+    
+    Parameters
+    ----------
+    pointlist   : a list of tuples with lat-lon (or lon-lat) for query points
+                  or a list with the upper left and lower right points of a bounding box
+    stype       : string with the type of query from the list of supported queries
+                  by the Google Places API
+    apikey      : user's Google Places API key
+    lonx        : flag for order or lat-lon points in tuple
+                  default = False for lat-lon order (lon-lat is True)
+    bbox        : flag for use of bounding box as input
+                  default = True for bounding box; False is list of points
+                  when using a single point, must be in a list [(lat,lon)]
+    k           : default size for the search grid (for bbox=True)
+    sradius     : default search radius, for point list only
+                  with bbox=True search radius is determined internally
+    verbose     : flag for details in google query
+                  default is True for query status and number of points returned
+    ofile       : output file name
+    
+    Returns
+    -------
+                : geojson point file
+    
+    """
+    # create the grid search box
+    grid = querypoints(pointlist,bbox=bbox,lonx=lonx,k=k)
+    # compute search radius (only when bounding box specified)
+    if bbox:
+        sradius = queryradius(grid,k=k,lonx=lonx)
+    # query google places
+    ptdict = googlepoints(grid,apikey,sradius,stype,newid=True,verbose=verbose)
+    # create output file
+    ptdict2geojson(ptdict,location=["lng","lat"],values=["placeid"],ofile=ofile)
+    return
+    
+if __name__ == '__main__':
+    print "Welcome to PySAL Google Points Query"
+    apikey = raw_input("Enter your API key: ")
+    stype = raw_input("Enter the type of query: ")
+    print "For each query point, enter the lat,lon separated by a comma"
+    print "For a bounding box, enter the upper left corner first,\nthen the lower right"
+    plist = []
+    while True:
+        p = raw_input("Enter lat,lon: ")
+        if p:
+            pp = tuple(map(float,p.split(",")))
+            print pp
+            plist.append(pp)
+        else:
+            break
+    if len(plist) == 2:
+        bb = raw_input("Is this a bounding box (enter Yes or No): ")
+        if bb.upper() == 'NO' or bb.upper() == 'N':
+            bbox = False
+        else:
+            bbox = True
+    else:
+        bbox = False
+    k = 5
+    sr = 5000.0
+    if bbox:
+        gp = raw_input("Enter the number of grid points or return for default: ")
+        if gp:
+            k = int(gp)
+    else:
+        sr = raw_input("Enter the search radius in meters\nor return for default: ")
+        if sr:
+            sradius = float(sr)
+    
+    outfile = raw_input("Enter the name for the output file : ")
+    googlept(plist,stype,apikey,lonx=False,bbox=bbox,k=k,sradius=sr,
+          verbose=True,ofile=outfile)
+    print "Output is in file %s " % outfile
+    
+    
+        
+    
+    
diff --git a/pysal/contrib/shapely_ext.py b/pysal/contrib/shapely_ext.py
new file mode 100644
index 0000000..c2a6847
--- /dev/null
+++ b/pysal/contrib/shapely_ext.py
@@ -0,0 +1,301 @@
+import shapely
+import shapely.geometry
+import shapely.ops
+_basegeom = shapely.geometry.base.BaseGeometry
+import pysal
+__all__ = ["to_wkb", "to_wkt", "area", "distance", "length", "boundary", "bounds", "centroid", "representative_point", "convex_hull", "envelope", "buffer", "simplify", "difference", "intersection", "symmetric_difference", "union", "unary_union", "cascaded_union", "has_z", "is_empty", "is_ring", "is_simple", "is_valid", "relate", "contains", "crosses", "disjoint", "equals", "intersects", "overlaps", "touches", "within", "equals_exact", "almost_equals", "project", "interpolate"]
+
+
+def to_wkb(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.to_wkb()
+
+def to_wkt(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.to_wkt()
+
+# Real-valued properties and methods
+# ----------------------------------
+def area(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.area
+
+def distance(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.distance(o2)
+
+def length(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.length
+
+# Topological properties
+# ----------------------
+def boundary(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    res = o.bondary
+    return pysal.cg.shapes.asShape(res)
+
+def bounds(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.bounds
+
+def centroid(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    res = o.centroid
+    return pysal.cg.shapes.asShape(res)
+
+def representative_point(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    res = o.representative_point
+    return pysal.cg.shapes.asShape(res)
+
+def convex_hull(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    res = o.convex_hull
+    return pysal.cg.shapes.asShape(res)
+
+def envelope(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    res = o.envelope
+    return pysal.cg.shapes.asShape(res)
+
+def buffer(shape, radius, resolution=16):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    res = o.buffer(radius, resolution)
+    return pysal.cg.shapes.asShape(res)
+
+def simplify(shape, tolerance, preserve_topology=True):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    res = o.simplify(tolerance, preserve_topology)
+    return pysal.cg.shapes.asShape(res)
+    
+# Binary operations
+# -----------------
+def difference(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    res = o.difference(o2)
+    return pysal.cg.shapes.asShape(res)
+
+def intersection(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    res = o.intersection(o2)
+    return pysal.cg.shapes.asShape(res)
+
+def symmetric_difference(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    res = o.symmetric_difference(o2)
+    return pysal.cg.shapes.asShape(res)
+
+def union(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    res = o.union(o2)
+    return pysal.cg.shapes.asShape(res)
+
+def cascaded_union(shapes):
+    o = []
+    for shape in shapes:
+        if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+        o.append(shapely.geometry.asShape(shape))
+    res = shapely.ops.cascaded_union(o)
+    return pysal.cg.shapes.asShape(res)
+
+def unary_union(shapes):
+    # seems to be the same as cascade_union except that it handles multipart polygons
+    if shapely.__version__ < '1.2.16':
+        raise Exception, "shapely 1.2.16 or higher needed for unary_union; upgrade shapely or try cascade_union instead"
+    o = []
+    for shape in shapes:
+        if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+        o.append(shapely.geometry.asShape(shape))
+    res = shapely.ops.unary_union(o)
+    return pysal.cg.shapes.asShape(res)
+
+# Unary predicates
+# ----------------
+def has_z(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.has_z
+
+def is_empty(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.is_empty
+
+def is_ring(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.is_ring
+
+def is_simple(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.is_simple
+
+def is_valid(shape):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    return o.is_valid
+
+# Binary predicates
+# -----------------
+def relate(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.relate(o2)
+
+def contains(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.contains(o2)
+
+def crosses(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.crosses(o2)
+
+def disjoint(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.disjoint(o2)
+
+def equals(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.equals(o2)
+
+def intersects(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.intersects(o2)
+
+def overlaps(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.overlaps(o2)
+
+def touches(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.touches(o2)
+
+def within(shape, other):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.within(o2)
+
+def equals_exact(shape, other, tolerance):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.equals_exact(o2, tolerance)
+
+def almost_equals(shape, other, decimal=6):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.almost_equals(o2, decimal)
+
+# Linear referencing
+# ------------------
+
+def project(shape, other, normalized=False):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    if not hasattr(other,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    o2 = shapely.geometry.asShape(other)
+    return o.project(o2, normalized)
+
+def interpolate(shape, distance, normalized=False):
+    if not hasattr(shape,'__geo_interface__'): raise TypeError, "%r does not appear to be a shape"%shape
+    o = shapely.geometry.asShape(shape)
+    res = o.interpolate(distance, normalized)
+    return pysal.cg.shapes.asShape(res)
+    
+
+# Copy doc strings from shapely
+for method in __all__:
+    if hasattr(_basegeom, method):
+        locals()[method].__doc__ = getattr(_basegeom,method).__doc__
+
+if __name__=='__main__':
+    #step 0, create 2 points
+    pt1 = pysal.cg.shapes.Point((0,0))
+    pt2 = pysal.cg.shapes.Point((10,10))
+    o = pysal.open('step0.shp','w')
+    o.write(pt1)
+    o.write(pt2)
+    o.close()
+
+    #step 1, buffer 2 points
+    b1 = buffer(pt1,10)
+    b2 = buffer(pt2,10)
+    o = pysal.open('step1.shp','w')
+    o.write(b1)
+    o.write(b2)
+    o.close()
+
+    #step 2, intersect 2 buffers
+    i = intersection(b1,b2)
+    o = pysal.open('step2.shp','w')
+    o.write(i)
+    o.close()
+    
+    #step 3, union 2 buffers
+    u = union(b1, b2)
+    o = pysal.open('step3.shp','w')
+    o.write(u)
+    o.close()
+    
+    #step 4, find convex_hull of union
+    c = convex_hull(u)
+    o = pysal.open('step4.shp','w')
+    o.write(c)
+    o.close()
diff --git a/pysal/contrib/shared_perimeter_weights.py b/pysal/contrib/shared_perimeter_weights.py
new file mode 100644
index 0000000..0827e2d
--- /dev/null
+++ b/pysal/contrib/shared_perimeter_weights.py
@@ -0,0 +1,33 @@
+"""
+shared_perimeter_weights -- calculate shared perimeters weights....
+
+wij = l_ij/P_i
+wji = l_ij/P_j
+
+l_ij = length of shared border i and j
+P_j = perimeter of j
+
+"""
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__all__ = ["spw_from_shapefile"]
+
+
+import pysal
+import shapely.geometry
+
+def spw_from_shapefile(shapefile, idVariable=None):
+    polygons = pysal.open(shapefile,'r').read()
+    polygons = map(shapely.geometry.asShape,polygons)
+    perimeters = [p.length for p in polygons]
+    Wsrc = pysal.rook_from_shapefile(shapefile)
+    new_weights = {}
+    for i in Wsrc.neighbors:
+        a = polygons[i]
+        p = perimeters[i]
+        new_weights[i] = [a.intersection(polygons[j]).length/p for j in Wsrc.neighbors[i]]
+    return pysal.W(Wsrc.neighbors,new_weights)
+
+if __name__=='__main__':
+    fname = pysal.examples.get_path('stl_hom.shp')
+    W = spw_from_shapefile(fname)
+
diff --git a/pysal/contrib/spatialnet/__init__.py b/pysal/contrib/spatialnet/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/contrib/spatialnet/beth_roads.shp b/pysal/contrib/spatialnet/beth_roads.shp
new file mode 100644
index 0000000..7014882
Binary files /dev/null and b/pysal/contrib/spatialnet/beth_roads.shp differ
diff --git a/pysal/contrib/spatialnet/beth_roads.shx b/pysal/contrib/spatialnet/beth_roads.shx
new file mode 100644
index 0000000..53fd135
Binary files /dev/null and b/pysal/contrib/spatialnet/beth_roads.shx differ
diff --git a/pysal/contrib/spatialnet/cleanNetShp.py b/pysal/contrib/spatialnet/cleanNetShp.py
new file mode 100644
index 0000000..0606fe4
--- /dev/null
+++ b/pysal/contrib/spatialnet/cleanNetShp.py
@@ -0,0 +1,141 @@
+"""
+cleanNetShp -- Tools to clean spatial Network Shapefiles.
+"""
+import pysal
+import numpy
+
+__author__ = "Charles R. Schmidt <schmidtc at gmail.com>"
+__all__ = ['snap_verts', 'find_nodes', 'split_at_nodes']
+
+
+def snap_verts(shp,tolerance=0.001,arc=True):
+    """
+    snap_verts -- Snap verts that are within tolerance meters of each other.
+
+    Description -- Snapping should be performed with a very small tolerance.
+                   The goal is not to change the network, but to ensure rounding
+                   errors don't prevent edges from being split at proper intersections.
+                   The default of 1mm should be adequate if the input is of decent quality.
+                   Higher snapping values can be used to correct digitizing errors, but care
+                   should be taken.
+
+    Arguments
+    ---------
+    tolerance -- float -- snapping tolerance in meters
+    arc -- bool -- If true, Ard Distance will be used instead of Euclidean
+
+    Returns
+    -------
+    generator -- each element is a new pysal.cg.Chain with corrected vertices.
+    """
+    kmtol = tolerance/1000.
+
+    data = numpy.concatenate([rec.vertices for rec in shp])
+    
+    if arc:
+        kd = pysal.cg.KDTree(data,distance_metric="Arc",radius = pysal.cg.sphere.RADIUS_EARTH_KM)
+    else:
+        kd = pysal.cg.KDTree(data)
+    q = kd.query_ball_tree(kd,kmtol)
+    ### Next three lines assert that snappings are mutual... if 1 snaps to 8, 8 must snap to 1.
+    for r,a in enumerate(q):
+        for o in a:
+            assert a==q[o]
+    ### non-mutual snapping can happen.
+    ### consider the three points, A (-1,0), B (0,0), C (1,0) and a snapping tolerance of 1.
+    ### A-> B
+    ### B-> A,C
+    ### C-> B
+    ### For now, try lowering adjusting the tolerance to avoid this.
+
+    data2 = numpy.empty_like(data)
+    for i,r in enumerate(q):
+        data2[i] = data[r].mean(0)
+    pos=0
+    for rec in shp:
+        vrts = rec.vertices
+        n = len(vrts)
+        nrec = pysal.cg.Chain(map(tuple,data2[pos:pos+n]))
+        pos+=n
+        yield nrec
+    
+def find_nodes(shp):
+    """
+    find_nodes -- Finds vertices in a line type shapefile that appear more than once and/or are end points of a line
+
+    Arguments
+    ---------
+    shp -- Shapefile Object -- Should be of type Line.
+
+    Returns
+    -------
+    set
+    """
+    node_count = {}
+    for road in shp:
+        vrts = road.vertices
+        for node in vrts:
+            if node not in node_count:
+                node_count[node] = 0
+            node_count[node] += 1
+        node_count[vrts[0]] += 1
+        node_count[vrts[-1]] += 1
+    return set([node for node,c in node_count.iteritems() if c > 1])
+
+def split_at_nodes(shp):
+    """
+    split_at_nodes -- Split line features at nodes
+
+    Arguments
+    ---------
+    shp -- list or shapefile -- Chain features to be split at common nodes.
+
+    Returns
+    -------
+    generator -- yields pysal.cg.Chain objects
+    """
+    nodes = find_nodes(shp)
+    nodeIds = list(nodes)
+    nodeIds.sort()
+    nodeIds = dict([(node,i) for i,node in enumerate(nodeIds)])
+    
+    for road in shp:
+        vrts = road.vertices
+        midVrts = set(road.vertices[1:-1]) #we know end points are nodes
+        midNodes = midVrts.intersection(nodes) # find any nodes in the middle of the feature.
+        midIdx = [vrts.index(node) for node in midNodes] # Get their indices
+        midIdx.sort()
+        if midIdx:
+            #print vrts
+            starts = [0]+midIdx
+            stops = [x+1 for x in midIdx]+[None]
+            for start,stop in zip(starts,stops):
+                feat = pysal.cg.Chain(vrts[start:stop])
+                rec = (nodeIds[feat.vertices[0]],nodeIds[feat.vertices[-1]],False)
+                yield feat,rec
+        else:
+            rec = (nodeIds[road.vertices[0]],nodeIds[road.vertices[-1]],False)
+            yield road,rec
+
+
+def createSpatialNetworkShapefile(inshp,outshp):
+    assert inshp.lower().endswith('.shp')
+    assert outshp.lower().endswith('.shp')
+    shp = pysal.open(inshp,'r')
+    snapped = list(snap_verts(shp,.001))
+    o = pysal.open(outshp,'w')
+    odb = pysal.open(outshp[:-4]+'.dbf','w')
+    odb.header = ["FNODE","TNODE","ONEWAY"]
+    odb.field_spec = [('N',20,0),('N',20,0),('L',1,0)]
+
+    new = list(split_at_nodes(snapped))
+    for feat,rec in new:
+        o.write(feat)
+        odb.write(rec)
+    o.close()
+    odb.close()
+    print "Split %d roads in %d network edges"%(len(shp),len(new))
+
+if __name__=='__main__':
+    createSpatialNetworkShapefile('beth_roads.shp','beth_network.shp')
+
diff --git a/pysal/contrib/spatialnet/eberly.shp b/pysal/contrib/spatialnet/eberly.shp
new file mode 100644
index 0000000..a6e289f
Binary files /dev/null and b/pysal/contrib/spatialnet/eberly.shp differ
diff --git a/pysal/contrib/spatialnet/eberly.shx b/pysal/contrib/spatialnet/eberly.shx
new file mode 100644
index 0000000..cc5763d
Binary files /dev/null and b/pysal/contrib/spatialnet/eberly.shx differ
diff --git a/pysal/contrib/spatialnet/spatialnet.py b/pysal/contrib/spatialnet/spatialnet.py
new file mode 100644
index 0000000..55ec17f
--- /dev/null
+++ b/pysal/contrib/spatialnet/spatialnet.py
@@ -0,0 +1,107 @@
+import pysal
+from pysal.cg.segmentLocator import Polyline_Shapefile_SegmentLocator
+import networkx
+
+EUCLIDEAN_DISTANCE = "Euclidean"
+ARC_DISTANCE = "Arc"
+
+class SpatialNetwork(object):
+    """
+    SpatialNetwork -- Represents a Spatial Network. 
+
+    A Spatial Network in PySAL is a graph who's nodes and edges are represented by geographic features 
+    such as Points for nodes and Lines for edges.
+    An example of a spatial network is a road network.
+
+    Arguments
+    ---------
+    shapefile -- Shapefile contains the geographic represention of the network.
+                 The shapefile must be a polyline type and the associated DBF MUST contain the following fields:
+                    FNODE -- source node -- ID of the source node, the first vertex in the polyline feature.
+                    TNODE -- destination node -- ID of the destination node, the last vertex in the polyline feature.
+                    ONEWAY -- bool -- If True, the edge will be marked oneway starting at FNODE and ending at TNODE
+    distance_metric -- EUCLIDEAN_DISTANCE or ARC_DISTANCE
+    
+    """
+    def __init__(self,shapefile,distance_metric=EUCLIDEAN_DISTANCE):
+        if issubclass(type(shapefile),basestring): #Path
+            self.shp = shp = pysal.open(shapefile,'r')
+        else:
+            raise TypeError,"Expecting a string, shapefile should the path to shapefile"
+        if shp.type != pysal.cg.shapes.Chain:
+            raise ValueError,"Shapefile must contain polyline features"
+        self.dbf = dbf = pysal.open(shapefile[:-4]+'.dbf','r')
+        header = dbf.header
+        if (('FNODE' not in header) or ('TNODE' not in header) or ('ONEWAY' not in header)):
+            raise ValueError,"DBF must contain: FNODE,TNODE,ONEWAY"
+        
+        oneway = [{'F':False,'T':True}[x] for x in dbf.by_col('ONEWAY')]
+        fnode = dbf.by_col('FNODE')
+        tnode = dbf.by_col('TNODE')
+        if distance_metric == EUCLIDEAN_DISTANCE:
+            lengths = [x.len for x in shp]
+        elif distance_metric == ARC_DISTANCE:
+            lengths = [x.arclen for x in shp]
+        else:
+            raise ValueError,"distance_metric must be either EUCLIDEAN_DISTANCE or ARC_DISTANCE"
+        self.lengths = lengths
+        if any(oneway):
+            G = networkx.MultiDiGraph()
+            #def isoneway(x):
+            #    return x[-1]
+            #def isnotoneway(x):
+            #    if(x[-1]):
+            #        return False
+            #    return True
+            #    
+            #A = filter(isoneway,zip(fnode,tnode,oneway))
+            #B = filter(isnotoneway,zip(fnode,tnode,oneway))
+            #C = filter(isnotoneway,zip(tnode,fnode,oneway))
+            #G.add_edges_from(A)
+            #G.add_edges_from(B)
+            #G.add_edges_from(C)
+        else:
+            G = networkx.MultiGraph()
+        #zip(fnode,tnode))
+        self.G = G
+        shp.seek(0)
+        self._locator = Polyline_Shapefile_SegmentLocator(shp)
+    def snap(self,pt):
+        i,p,j = self._locator.nearest(pt) #shpID,partID,segmentID
+        segment = self.shp[i].segments[p][j] #grab segment
+        d,pct = pysal.cg.get_segment_point_dist(segment,pt) #find pct along segment
+        x0,x1 = segment.p1[0],segment.p2[0]
+        x2 = x0 + (x1-x0)*pct # find x location of snap
+        y2 = segment.line.y(x2) # find y location of snap
+
+        #dbf = self.dbf
+        #rec = dict(zip(dbf.header,dbf[i][0]))
+        #edge = (rec['FNODE'],rec['TNODE'])
+        #TODO: Calculate location along edge and distance to edge"
+        #return edge
+        return x2,y2
+        
+    
+if __name__=='__main__':
+    import random
+    net = SpatialNetwork('beth_network.shp',ARC_DISTANCE)
+
+    n = 1000
+    minX,minY,maxX,maxY = net.shp.bbox
+    xRange = maxX-minX
+    yRange = maxY-minY
+    qpts = [(random.random(), random.random()) for i in xrange(n)]
+    qpts = [pysal.cg.Point((minX+(xRange*x),minY+(yRange*y))) for x,y in qpts]
+    o = pysal.open('random_qpts.shp','w')
+    for p in qpts:
+        o.write(p)
+    o.close()
+    o = pysal.open('random_qpts_snapped.shp','w')
+    for qpt in qpts:
+        spt = net.snap(qpt)
+        o.write(pysal.cg.Chain([qpt,spt]))
+    o.close()
+    
+
+
+
diff --git a/pysal/contrib/spatialnet/util.py b/pysal/contrib/spatialnet/util.py
new file mode 100644
index 0000000..c016329
--- /dev/null
+++ b/pysal/contrib/spatialnet/util.py
@@ -0,0 +1,221 @@
+"""
+Utility module for network contrib 
+"""
+
+import pysal as ps
+import networkx as nx
+import numpy as np
+
+__author__ = "Serge Rey <sjsrey at gmail.com>"
+
+def w2dg(w):
+    """
+    Return a networkx directed graph from a PySAL W object
+
+
+    Parameters
+    ----------
+
+    w: Weights 
+
+
+    Returns
+    -------
+    G: A networkx directed graph
+
+
+    Example
+    ------
+
+    >>> import networkx as nx
+    >>> import pysal as ps
+    >>> w = ps.lat2W()
+    >>> guw = w2dg(w)
+    >>> guw.in_degree()
+    {0: 2, 1: 3, 2: 3, 3: 3, 4: 2, 5: 3, 6: 4, 7: 4, 8: 4, 9: 3, 10: 3, 11: 4, 12: 4, 13: 4, 14: 3, 15: 3, 16: 4, 17: 4, 18: 4, 19: 3, 20: 2, 21: 3, 22: 3, 23: 3, 24: 2}
+    >>> dict([(k,len(w.neighbors[k])) for k in w.neighbors])
+    {0: 2, 1: 3, 2: 3, 3: 3, 4: 2, 5: 3, 6: 4, 7: 4, 8: 4, 9: 3, 10: 3, 11: 4, 12: 4, 13: 4, 14: 3, 15: 3, 16: 4, 17: 4, 18: 4, 19: 3, 20: 2, 21: 3, 22: 3, 23: 3, 24: 2}
+    >>> 
+    """
+
+    w_l = [(i,j) for i in w.neighbors for j in w[i]]
+    G = nx.DiGraph()
+    G.add_edges_from(w_l)
+    return G
+
+def w2dwg(w):
+    """
+    Return a directed, weighted graph from a PySAL W object
+
+
+    Parameters
+    ----------
+
+    w: Weights 
+
+
+    Returns
+    -------
+    G: A networkx directed, weighted graph
+
+
+    Example
+    -------
+    >>> import networkx as nx
+    >>> import pysal as ps
+    >>> w = ps.lat2W()
+    >>> w.transform = 'r'
+    >>> gw = w2dwg(w)
+    >>> gw.get_edge_data(0,1)
+    {'weight': 0.5}
+    >>> gw.get_edge_data(1,0)
+    {'weight': 0.33333333333333331}
+    """
+
+    w_l = [(i,j,w[i][j]) for i in w.neighbors for j in w[i]]
+    G = nx.DiGraph() # allow for asymmetries in weights
+    G.add_weighted_edges_from(w_l)
+    return G
+
+
+def dwg2w(g, weight_name = 'weight'):
+    """
+    Returns a PySAL W object from a directed-weighted graph
+
+    Parameters
+    ----------
+
+    g: networkx digraph
+
+    weight_name: name of weight attribute of g
+
+    Returns
+    -------
+    w: PySAL W 
+
+    Example
+    -------
+    >>> w = ps.lat2W()
+    >>> w.transform = 'r'
+    >>> g = w2dwg(w)
+    >>> w1 = dwg2w(g)
+    >>> w1.n
+    25
+    >>> w1.neighbors[0]
+    [1, 5]
+    >>> w1.neighbors[1]
+    [0, 2, 6]
+    >>> w1.weights[0]
+    [0.5, 0.5]
+    >>> w1.weights[1]
+    [0.33333333333333331, 0.33333333333333331, 0.33333333333333331]
+    """
+
+    neighbors = {}
+    weights = {}
+    for node in g.nodes_iter():
+        neighbors[node] = []
+        weights[node] = []
+        for neighbor in g.neighbors_iter(node):
+            neighbors[node].append(neighbor)
+            weight = g.get_edge_data(node,neighbor)
+            if weight:
+                weights[node].append(weight[weight_name])
+            else:
+                weights[node].append(1)
+    return ps.W(neighbors=neighbors, weights=weights)
+
+
+
+
+def edge2w(edgelist, nodetype=str):
+    """
+    Create a PySAL W object from an edgelist
+
+    Parameters
+    ----------
+
+    edge_file: file with edgelist
+
+    nodetype: type for node (str, int, float)
+
+
+    Returns
+    -------
+    W: PySAL W
+
+
+    Example
+    -------
+    >>> lines = ["1 2", "2 3", "3 4", "4 5"]
+    >>> w = edge2w(lines)
+    >>> w.n
+    5
+    >>> w.neighbors["2"]
+    ['1', '3']
+
+    >>> w = edge2w(lines, nodetype=int)
+    >>> w.neighbors[2]
+    [1, 3]
+    >>> lines = ["1 2 {'weight':1.0}", "2 3 {'weight':0.5}", "3 4 {'weight':3.0}"] 
+    >>> w = edge2w(lines, nodetype=int)
+    >>> w.neighbors[2]
+    [1, 3]
+    >>> w.weights[2]
+    [1.0, 0.5]
+
+    """
+    G = nx.parse_edgelist(edgelist, nodetype=nodetype)
+    return dwg2w(G)
+
+    
+
+def adjl2w(adjacency_list, nodetype=str):
+    """
+    Create a PySAL W object from an adjacency list file
+
+    Parameters
+    ----------
+
+    adjacency_list: list of adjacencies
+                    for directed graphs list only outgoing adjacencies
+
+    nodetype: type for node (str, int, float)
+
+
+    Returns
+    -------
+    W: PySAL W
+
+
+    Example
+    -------
+    >>> al = [[1], [0,2], [1,3], [2]]
+    >>> w = adjl2w(al)
+    >>> w.n
+    4
+    >>> w.neighbors['0']
+    ['1']
+    >>> w = adjl2w(al, nodetype=int)
+    >>> w.n
+    4
+    >>> w.neighbors[0]
+    [1]
+
+
+    """
+
+    adjacency_list = [ map(nodetype, neighs) for neighs in adjacency_list]
+    return ps.W(dict([(nodetype(i),neighs) for i,neighs in enumerate(adjacency_list)]))
+
+                       
+
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod()
+
+
+
+
+
+
diff --git a/pysal/contrib/viz/__init__.py b/pysal/contrib/viz/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/contrib/viz/mapping.py b/pysal/contrib/viz/mapping.py
new file mode 100644
index 0000000..9503ec1
--- /dev/null
+++ b/pysal/contrib/viz/mapping.py
@@ -0,0 +1,598 @@
+"""
+Choropleth mapping using PySAL and Matplotlib
+
+ToDo:
+    * map_line_shp, map_point_shp should take a shp object not a shp_link
+    * Same for map_poly_shp(_lonlat)
+
+"""
+
+__author__ = "Sergio Rey <sjsrey at gmail.com>", "Dani Arribas-Bel <daniel.arribas.bel at gmail.com"
+
+
+import pandas as pd
+import pysal as ps
+import numpy as np
+import  matplotlib.pyplot as plt
+from matplotlib import colors as clrs
+import matplotlib as mpl
+from matplotlib.pyplot import fill, text
+from matplotlib import cm
+from matplotlib.patches import Polygon
+from matplotlib.path import Path
+from matplotlib.collections import LineCollection, PathCollection, PolyCollection, PathCollection, PatchCollection
+
+def map_point_shp(shp, which='all'):
+    '''
+    Create a map object from a point shape
+    ...
+
+    Arguments
+    ---------
+
+    shp             : iterable
+                      PySAL point iterable with the attribute `bbox` (e.g.
+                      shape object from `ps.open` a poly shapefile)
+    which           : str/list
+
+    Returns
+    -------
+
+    map             : PatchCollection
+                      Map object with the points from the shape
+
+    '''
+    pts = []
+    if which == 'all':
+        for pt in shp:
+                pts.append(pt)
+    else:
+        for inwhich, pt in zip(which, shp):
+            if inwhich:
+                    pts.append(pt)
+    pts = np.array(pts)
+    sc = plt.scatter(pts[:, 0], pts[:, 1])
+    _ = _add_axes2col(sc, shp.bbox)
+    return sc
+
+def map_line_shp(shp, which='all'):
+    '''
+    Create a map object from a line shape
+    ...
+
+    Arguments
+    ---------
+
+    shp             : iterable
+                      PySAL line iterable with the attribute `bbox` (e.g.
+                      shape object from `ps.open` a poly shapefile)
+    which           : str/list
+
+    Returns
+    -------
+
+    map             : PatchCollection
+                      Map object with the lines from the shape
+                      This includes the attribute `shp2dbf_row` with the
+                      cardinality of every line to its row in the dbf
+                      (zero-offset)
+
+    '''
+    patches = []
+    rows = []
+    i = 0
+    if which == 'all':
+        for shape in shp:
+            for xy in shape.parts:
+                patches.append(xy)
+                rows.append(i)
+            i += 1
+    else:
+        for inwhich, shape in zip(which, shp):
+            if inwhich:
+                for xy in shape.parts:
+                    patches.append(xy)
+                    rows.append(i)
+                i += 1
+    lc = LineCollection(patches)
+    _ = _add_axes2col(lc, shp.bbox)
+    lc.shp2dbf_row = rows
+    return lc
+
+def map_poly_shp(shp, which='all'):
+    '''
+    Create a map object from a polygon shape
+    ...
+
+    Arguments
+    ---------
+
+    shp             : iterable
+                      PySAL polygon iterable with the attribute `bbox` (e.g.
+                      shape object from `ps.open` a poly shapefile)
+    which           : str/list
+                      List of booleans for which polygons of the shapefile to
+                      be included (True) or excluded (False)
+
+    Returns
+    -------
+
+    map             : PatchCollection
+                      Map object with the polygons from the shape
+                      This includes the attribute `shp2dbf_row` with the
+                      cardinality of every polygon to its row in the dbf
+                      (zero-offset)
+
+    '''
+    patches = []
+    rows = []
+    i = 0
+    if which == 'all':
+        for shape in shp:
+            for ring in shape.parts:
+                xy = np.array(ring)
+                patches.append(xy)
+                rows.append(i)
+            i += 1
+    else:
+        for inwhich, shape in zip(which, shp):
+            if inwhich:
+                for ring in shape.parts:
+                    xy = np.array(ring)
+                    patches.append(xy)
+                    rows.append(i)
+                i += 1
+    pc = PolyCollection(patches)
+    _ = _add_axes2col(pc, shp.bbox)
+    pc.shp2dbf_row = rows
+    return pc
+
+def setup_ax(polyCos_list, ax=None):
+    '''
+    Generate an Axes object for a list of collections
+    ...
+
+    Arguments
+    ---------
+    polyCos_list: list
+                  List of Matplotlib collections (e.g. an object from
+                  map_poly_shp)
+    ax          : AxesSubplot
+                  (Optional) Pre-existing axes to which append the collections
+                  and setup
+
+    Returns
+    -------
+    ax          : AxesSubplot
+                  Rescaled axes object with the collection and without frame
+                  or X/Yaxis
+    '''
+    if not ax:
+        ax = plt.axes()
+    # Determine bboxes of new axes
+    xlim = [np.inf, -np.inf]
+    ylim = [np.inf, -np.inf]
+    for polyCo in polyCos_list:
+        axs = polyCo.get_axes()
+        xmin, xmax = axs.get_xlim()
+        ymin, ymax = axs.get_ylim()
+        if xmin < xlim[0]:
+            xlim[0] = xmin
+        if xmax > xlim[1]:
+            xlim[1] = xmax
+        if ymin < ylim[0]:
+            ylim[0] = ymin
+        if ymax > ylim[1]:
+            ylim[1] = ymax
+    ax.set_xlim(xlim)
+    ax.set_ylim(ylim)
+    # Resize bbox of each coll and add it to axes
+    for polyCo in polyCos_list:
+        polyCo.get_axes().set_xlim(ax.get_xlim())
+        polyCo.get_axes().set_ylim(ax.get_ylim())
+        ax.add_collection(polyCo)
+    ax.set_frame_on(False)
+    ax.axes.get_yaxis().set_visible(False)
+    ax.axes.get_xaxis().set_visible(False)
+    return ax
+
+def _add_axes2col(col, bbox):
+    """
+    Adds (inplace) axes with proper limits to a poly/line collection. This is
+    still pretty much a hack! Ideally, you don't have to setup a new figure
+    for this
+    ...
+
+    Arguments
+    ---------
+    col     : Collection
+    bbox    : list
+              Bounding box as [xmin, ymin, xmax, ymax]
+    """
+    tf = plt.figure()
+    ax = plt.axes()
+    minx, miny, maxx, maxy = bbox
+    ax.set_xlim((minx, maxx))
+    ax.set_ylim((miny, maxy))
+    col.set_axes(ax)
+    plt.close(tf)
+    return None
+
+def plot_poly_lines(shp_link,  savein=None, poly_col='none'):
+    '''
+    Quick plotting of shapefiles
+    ...
+
+    Arguments
+    ---------
+    shp_link        : str
+                      Path to shapefile
+    savein          : str
+                      Path to png file where to dump the plot. Optional,
+                      defaults to None
+    poly_col        : str
+                      Face color of polygons
+    '''
+    fig = plt.figure()
+    ax = fig.add_subplot(111)
+    shp = ps.open(shp_link)
+    patchco = map_poly_shp(shp)
+    patchco.set_facecolor('none')
+    patchco.set_edgecolor('0.8')
+    ax = setup_ax([patchco], ax)
+    if savein:
+        plt.savefig(savein)
+    else:
+        plt.show()
+    return None
+
+def plot_choropleth(shp_link, values, type, k=5, cmap=None, \
+        shp_type='poly', sample_fisher=True, title='', \
+        savein=None, figsize=None, dpi=300):
+    '''
+    Wrapper to quickly create and plot from a lat/lon shapefile
+    ...
+
+    Arguments
+    ---------
+
+    shp_link        : str
+                      Path to shapefile
+    values          : array
+                      Numpy array with values to map
+    type            : str
+                      Type of choropleth. Supported methods:
+                        * 'classless'
+                        * 'unique_values'
+                        * 'quantiles' (default)
+                        * 'fisher_jenks'
+                        * 'equal_interval'
+    k               : int
+                      Number of bins to classify values in and assign a color
+                      to (defaults to 5)
+    cmap            : str
+                      Matplotlib coloring scheme. If None (default), uses:
+                        * 'classless': 'Greys'
+                        * 'unique_values': 'Paired'
+                        * 'quantiles': 'hot_r'
+                        * 'fisher_jenks': 'hot_r'
+                        * 'equal_interval': 'hot_r'
+    shp_type        : str
+                      'poly' (default) or 'line', for the kind of shapefile
+                      passed
+    sample_fisher   : Boolean
+                      Defaults to True, controls whether Fisher-Jenks
+                      classification uses a sample (faster) or the entire
+                      array of values. Ignored if 'classification'!='fisher_jenks'
+    title           : str
+                      Optional string for the title
+    savein          : str
+                      Path to png file where to dump the plot. Optional,
+                      defaults to None
+    figsize         : tuple
+                      Figure dimensions
+    dpi             : int
+                      resolution of graphic file
+
+    Returns
+    -------
+
+    map             : PatchCollection
+                      Map object with the polygons from the shapefile and
+                      unique value coloring
+
+    '''
+    shp = ps.open(shp_link)
+    if shp_type == 'poly':
+        map_obj = map_poly_shp(shp)
+    if shp_type == 'line':
+        map_obj = map_line_shp(shp)
+
+    if type == 'classless':
+        if not cmap:
+            cmap = 'Greys'
+        map_obj = base_choropleth_classless(map_obj, values, cmap=cmap)
+    if type == 'unique_values':
+        if not cmap:
+            cmap = 'Paired'
+        map_obj = base_choropleth_unique(map_obj, values, cmap=cmap)
+    if type == 'quantiles':
+        if not cmap:
+            cmap = 'hot_r'
+        map_obj = base_choropleth_classif(map_obj, values, k=k, \
+                classification='quantiles', cmap=cmap)
+    if type == 'fisher_jenks':
+        if not cmap:
+            cmap = 'hot_r'
+        map_obj = base_choropleth_classif(map_obj, values, k=k, \
+                classification='fisher_jenks', cmap=cmap, \
+                sample_fisher=sample_fisher)
+    if type == 'equal_interval':
+        if not cmap:
+            cmap = 'hot_r'
+        map_obj = base_choropleth_classif(map_obj, values, k=k, \
+                classification='equal_interval', cmap=cmap)
+
+    fig = plt.figure(figsize=figsize)
+    ax = fig.add_subplot(111)
+    ax = setup_ax([map_obj], ax)
+    if title:
+        ax.set_title(title)
+    if type=='quantiles' or type=='fisher_jenks' or type=='equal_interval':
+        cmap = map_obj.get_cmap()
+        norm = map_obj.norm
+        boundaries = np.round(map_obj.norm.boundaries, decimals=3)
+        cbar = plt.colorbar(map_obj, cmap=cmap, norm=norm, boundaries=boundaries, \
+                ticks=boundaries, orientation='horizontal', shrink=0.5)
+    if savein:
+        plt.savefig(savein, dpi=dpi)
+    else:
+        plt.show()
+    return None
+
+
+def base_choropleth_classless(map_obj, values, cmap='Greys' ):
+    '''
+    Set classless coloring from a map object
+    ...
+
+    Arguments
+    ---------
+
+    map_obj         : Poly/Line collection
+                      Output from map_X_shp
+    values          : array
+                      Numpy array with values to map
+    cmap            : str
+                      Matplotlib coloring scheme
+
+    Returns
+    -------
+
+    map             : PatchCollection
+                      Map object with the polygons from the shapefile and
+                      classless coloring
+
+    '''
+    cmap = cm.get_cmap(cmap)
+    map_obj.set_cmap(cmap)
+    if isinstance(map_obj, mpl.collections.PolyCollection):
+        pvalues = _expand_values(values, map_obj.shp2dbf_row)
+        map_obj.set_array(pvalues)
+        map_obj.set_edgecolor('k')
+    elif isinstance(map_obj, mpl.collections.LineCollection):
+        pvalues = _expand_values(values, map_obj.shp2dbf_row)
+        map_obj.set_array(pvalues)
+    elif isinstance(map_obj, mpl.collections.PathCollection):
+        if not hasattr(map_obj, 'shp2dbf_row'):
+            map_obj.shp2dbf_row = np.arange(values.shape[0])
+        map_obj.set_array(values)
+    return map_obj
+
+def base_choropleth_unique(map_obj, values,  cmap='hot_r'):
+    '''
+    Set coloring based on unique values from a map object
+    ...
+
+    Arguments
+    ---------
+
+    map_obj         : Poly/Line collection
+                      Output from map_X_shp
+    values          : array
+                      Numpy array with values to map
+    cmap            : str
+                      Matplotlib coloring scheme
+
+    Returns
+    -------
+
+    map             : PatchCollection
+                      Map object with the polygons from the shapefile and
+                      unique value coloring
+
+    '''
+    uvals = np.unique(values)
+    colormap = getattr(plt.cm, cmap)
+    colors = [colormap(i) for i in np.linspace(0, 0.9, len(uvals))]
+    colors = np.random.permutation(colors)
+    colormatch = {val: col for val, col in zip(uvals, colors)}
+
+    if isinstance(map_obj, mpl.collections.PolyCollection):
+        pvalues = _expand_values(values, map_obj.shp2dbf_row)
+        map_obj.set_color([colormatch[i] for i in pvalues])
+        map_obj.set_edgecolor('k')
+    elif isinstance(map_obj, mpl.collections.LineCollection):
+        pvalues = _expand_values(values, map_obj.shp2dbf_row)
+        map_obj.set_color([colormatch[i] for i in pvalues])
+    elif isinstance(map_obj, mpl.collections.PathCollection):
+        if not hasattr(map_obj, 'shp2dbf_row'):
+            map_obj.shp2dbf_row = np.arange(values.shape[0])
+        map_obj.set_array(values)
+    return map_obj
+
+def base_choropleth_classif(map_obj, values, classification='quantiles', \
+        k=5, cmap='hot_r', sample_fisher=True):
+    '''
+    Set coloring based based on different classification
+    methods
+    ...
+
+    Arguments
+    ---------
+
+    map_obj         : Poly/Line collection
+                      Output from map_X_shp
+    values          : array
+                      Numpy array with values to map
+    classification  : str
+                      Classificatio method to use. Options supported:
+                        * 'quantiles' (default)
+                        * 'fisher_jenks'
+                        * 'equal_interval'
+
+    k               : int
+                      Number of bins to classify values in and assign a color
+                      to
+    cmap            : str
+                      Matplotlib coloring scheme
+    sample_fisher   : Boolean
+                      Defaults to True, controls whether Fisher-Jenks
+                      classification uses a sample (faster) or the entire
+                      array of values. Ignored if 'classification'!='fisher_jenks'
+
+    Returns
+    -------
+
+    map             : PatchCollection
+                      Map object with the polygons from the shapefile and
+                      unique value coloring
+
+    '''
+    if classification == 'quantiles':
+        classification = ps.Quantiles(values, k)
+        boundaries = classification.bins.tolist()
+
+    if classification == 'equal_interval':
+        classification = ps.Equal_Interval(values, k)
+        boundaries = classification.bins.tolist()
+
+    if classification == 'fisher_jenks':
+        if sample_fisher:
+            classification = ps.esda.mapclassify.Fisher_Jenks_Sampled(values,k)
+        else:
+            classification = ps.Fisher_Jenks(values,k)
+        boundaries = classification.bins[:]
+
+    map_obj.set_alpha(0.4)
+
+    cmap = cm.get_cmap(cmap, k+1)
+    map_obj.set_cmap(cmap)
+
+    boundaries.insert(0, values.min())
+    norm = clrs.BoundaryNorm(boundaries, cmap.N)
+    map_obj.set_norm(norm)
+
+    if isinstance(map_obj, mpl.collections.PolyCollection):
+        pvalues = _expand_values(values, map_obj.shp2dbf_row)
+        map_obj.set_array(pvalues)
+        map_obj.set_edgecolor('k')
+    elif isinstance(map_obj, mpl.collections.LineCollection):
+        pvalues = _expand_values(values, map_obj.shp2dbf_row)
+        map_obj.set_array(pvalues)
+    elif isinstance(map_obj, mpl.collections.PathCollection):
+        if not hasattr(map_obj, 'shp2dbf_row'):
+            map_obj.shp2dbf_row = np.arange(values.shape[0])
+        map_obj.set_array(values)
+    return map_obj
+
+def _expand_values(values, shp2dbf_row):
+    '''
+    Expand series of values based on dbf order to polygons (to allow plotting
+    of multi-part polygons).
+    ...
+
+    NOTE: this is done externally so it's easy to drop dependency on Pandas
+    when neccesary/time is available.
+
+    Arguments
+    ---------
+    values          : ndarray
+                      Values aligned with dbf rows to be plotted (e.d.
+                      choropleth)
+    shp2dbf_row    : list/sequence
+                      Cardinality list of polygon to dbf row as provided by
+                      map_poly_shp
+
+    Returns
+    -------
+    pvalues         : ndarray
+                      Values repeated enough times in the right order to be
+                      passed from dbf to polygons
+    '''
+    pvalues = pd.Series(values, index=np.arange(values.shape[0]))\
+            .reindex(shp2dbf_row)#Expand values to every poly
+    return pvalues.values
+
+    
+
+if __name__ == '__main__':
+
+    data = 'none'
+    if data == 'poly':
+        shp_link = ps.examples.get_path("sids2.shp")
+        shp_link = ps.examples.get_path("Polygon.shp")
+        dbf = ps.open(shp_link.replace('.shp', '.dbf'))
+        '''
+        values = np.array(dbf.by_col("SIDR74"))
+        #values[: values.shape[0]/2] = 1
+        #values[values.shape[0]/2: ] = 0
+        '''
+        patchco = map_poly_shp(ps.open(shp_link))
+        #patchco = base_choropleth_classif(shp_link, np.random.random(3))
+        #patchco = plot_choropleth(shp_link, np.random.random(3), 'quantiles')
+
+    if data == 'point':
+        shp_link = ps.examples.get_path("burkitt.shp")
+        dbf = ps.open(shp_link.replace('.shp', '.dbf'))
+        patchco = map_point_shp(ps.open(shp_link))
+
+    if data == 'line':
+        shp_link = ps.examples.get_path("eberly_net.shp")
+        dbf = ps.open(shp_link.replace('.shp', '.dbf'))
+        values = np.array(dbf.by_col('TNODE'))
+        mobj = map_line_shp(ps.open(shp_link))
+        patchco = base_choropleth_unique(mobj, values)
+
+    '''
+    which = values > 1.
+
+    for shp_link in [shp_link]:
+
+        fig = plt.figure()
+        patchco = map_poly_shp(shp_link)
+        patchcoB = map_poly_shp(shp_link, which=which)
+        patchco.set_facecolor('none')
+        ax = setup_ax([patchco, patchcoB])
+        fig.add_axes(ax)
+        plt.show()
+        break
+    '''
+
+    xy = (((0, 0), (0, 0)), ((2, 1), (2, 1)), ((3, 1), (3, 1)), ((2, 5), (2, 5)))
+    xy = np.array([[10, 30], [20, 20]])
+    markerobj = mpl.markers.MarkerStyle('o')
+    path = markerobj.get_path().transformed(
+            markerobj.get_transform())
+    scales = np.array([2, 2])
+    fig = plt.figure()
+    ax = fig.add_subplot(111)
+    pc = PathCollection((path,), scales, offsets=xy, \
+            facecolors='r', transOffset=mpl.transforms.IdentityTransform())
+    #pc.set_transform(mpl.transforms.IdentityTransform())
+    #_ = _add_axes2col(pc, [0, 0, 5, 5])
+    ax.add_collection(pc)
+    fig.add_axes(ax)
+    #ax = setup_ax([pc], ax)
+    plt.show()
+
diff --git a/pysal/contrib/viz/mapping_guide.ipynb b/pysal/contrib/viz/mapping_guide.ipynb
new file mode 100644
index 0000000..07ec8cf
--- /dev/null
+++ b/pysal/contrib/viz/mapping_guide.ipynb
@@ -0,0 +1,335 @@
+{
+ "metadata": {
+  "name": "",
+  "signature": "sha256:1ec0d41be57d08ae696d2345210c6409a6c89d84f6beeea54563187008aa78d0"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import numpy as np\n",
+      "import pysal as ps\n",
+      "import random as rdm\n",
+      "from pysal.contrib.viz import mapping as maps\n",
+      "%matplotlib inline\n",
+      "from pylab import *"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 6
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "# Guide for the `mapping` module in `PySAL`\n",
+      "\n",
+      "Contributors:\n",
+      "\n",
+      "* Dani Arribas-Bel `<daniel.arribas.bel at gmail.com>`\n",
+      "\n",
+      "\n",
+      "This document describes the main structure, components and usage of the mapping module in `PySAL`. The is organized around three main layers:\n",
+      "\n",
+      "* A lower-level layer that reads polygon, line and point shapefiles and returns a Matplotlib collection.\n",
+      "* A medium-level layer that performs some usual transformations on a Matplotlib object (e.g. color code polygons according to a vector of values).\n",
+      "* A higher-level layer intended for end-users for particularly useful cases and style preferences pre-defined (e.g. Create a choropleth)."
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Lower-level component\n",
+      "\n",
+      "This includes basic functionality to read spatial data from a file (currently only shapefiles supported) and produce rudimentary Matplotlib objects. The main methods are:"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "* `map_poly_shape`: to read in polygon shapefiles"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "* `map_line_shape`: to read in line shapefiles"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "* `map_point_shape`: to read in point shapefiles"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "These methods all support an option to subset the observations to be plotted (very useful when missing values are present). They can also be overlaid and combined by using the `setup_ax` function. the resulting object is very basic but also very flexible so, for minds used to matplotlib this should be good news as it allows to modify pretty much any property and attribute."
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "### Example"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "shp_link = ps.examples.get_path('columbus.shp')\n",
+      "shp = ps.open(shp_link)\n",
+      "some = [bool(rdm.getrandbits(1)) for i in ps.open(shp_link)]\n",
+      "\n",
+      "fig = figure()\n",
+      "\n",
+      "base = maps.map_poly_shp(shp)\n",
+      "base.set_facecolor('none')\n",
+      "base.set_linewidth(0.75)\n",
+      "base.set_edgecolor('0.8')\n",
+      "some = maps.map_poly_shp(shp, which=some)\n",
+      "some.set_alpha(0.5)\n",
+      "some.set_linewidth(0.)\n",
+      "cents = np.array([poly.centroid for poly in ps.open(shp_link)])\n",
+      "pts = scatter(cents[:, 0], cents[:, 1])\n",
+      "pts.set_color('red')\n",
+      "\n",
+      "ax = maps.setup_ax([base, some, pts])\n",
+      "fig.add_axes(ax)\n",
+      "show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADtCAYAAAAcNaZ2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXd8ZHd57/8+dZpGfaVVWa12tdXdBhscG2IbsDHBYGN6\nMwTCvYSQcBNI7g0k/G5Iwu/+SEgIkFxKggkl9GYwxsa9G9zX9mqrVrvqZXo79ffH0Yw0qxlpRpqR\nVrvf9+s1L2mOTpvRzOd8z/N9ns8jua7rIhAIBII1QV7vExAIBIIzCSG6AoFAsIYI0RUIBII1RIiu\nQCAQrCFCdAUCgWANEaIrEAgEa4gQXYFAIFhDhOgKBALBGiJEVyAQCNYQIboCgUCwhgjRFQgEgjVE\niK5AIBCsIUJ0BQKBYA0RoisQCARriBBdgUAgWEOE6AoEAsEaIkRXIBAI1hAhugKBQLCGqOt9AgLB\nannuuedQ1 [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10e381a90>"
+       ]
+      }
+     ],
+     "prompt_number": 7
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Medium-level component\n",
+      "\n",
+      "This layer comprises functions that perform usual transformations on matplotlib objects, such as color coding objects (points, polygons, etc.) according to a series of values. This includes the following methods:"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "* `base_choropleth_classless`"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "* `base_choropleth_unique`\n",
+      "\n",
+      "### Example"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "net_link = ps.examples.get_path('eberly_net.shp')\n",
+      "net = ps.open(net_link)\n",
+      "values = np.array(ps.open(net_link.replace('.shp', '.dbf')).by_col('TNODE'))\n",
+      "\n",
+      "pts_link = ps.examples.get_path('eberly_net_pts_onnetwork.shp')\n",
+      "pts = ps.open(pts_link)\n",
+      "\n",
+      "fig = figure()\n",
+      "\n",
+      "netm = maps.map_line_shp(net)\n",
+      "netc = maps.base_choropleth_unique(netm, values)\n",
+      "\n",
+      "ptsm = maps.map_point_shp(pts)\n",
+      "ptsm = maps.base_choropleth_classif(ptsm, values)\n",
+      "ptsm.set_alpha(0.5)\n",
+      "ptsm.set_linewidth(0.)\n",
+      "\n",
+      "ax = maps.setup_ax([netc, ptsm])\n",
+      "fig.add_axes(ax)\n",
+      "show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADtCAYAAAAcNaZ2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd4VGX2wPHvTCrpgZBQA4EAoRMgdBt2xbKi2BDL4i4q\n/iwIFhSxsHbFlVWxwLqyrqBiV3YtQUqQ3kLHBAgtISGTPkkmM78/bkJNMpmZe+femZzP8/AYMnfu\nPWA4uTnvuec1ORwOB0IIIbzCrHcAQgjRnEjSFUIIL5KkK4QQXiRJVwghvEiSrhBCeJEkXSGE8CJJ\nuj5mw7p1vPNEN46s7c3SX67mlrFj9Q5JCOGCQL0DEK4Z2DOE3pfn8uriC3noqRA6dpqhd0hCCBeY\n5OEIH1JTAVvSoP1UiL8dmAlcBQzSNy4hRJNJecGX7J8GYX2g9YTaTwwFVusZkRDCRVJe8BXHv4fC\nb6H/JjCZa [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10e08da50>"
+       ]
+      }
+     ],
+     "prompt_number": 8
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "\n",
+      "* `base_choropleth_classif`"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Higher-level component\n",
+      "\n",
+      "This currently includes the following end-user functions:"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "* `plot_poly_lines`: very quick shapefile plotting."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "maps.plot_poly_lines(ps.examples.get_path('columbus.shp'))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADtCAYAAAAcNaZ2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXlX4tjWxp/MIcyK4DyWljV09+33fv+P0LfqdlVXlRMq\nCorMUwhkev9wnVxQVIYEgp7fWixHIEDyZGefvZ/N2LZtg0KhUChTgZ31BlAoFMpbgoouhUKhTBEq\nuhQKhTJFqOhSKBTKFKGiS6FQKFOEii6FQqFMEX7WG0ChjEK328WPHz/w4cMHWJYF27ZH+losFrG3\nt4dQKDTrl0J5o1DRpcwVgiDAsizwPA+O40a+f71eB8MwHmwZhTIcNL1AmSsYhoEkSeh0OmPd37Zt\nKrqUmUJFlzJ3TCq6FMosoekFytwxiegCeBTpEiEmUTCNhCleQkWXMnfIsgxVVce6L8uy+PXrF4B7\nke2NfBmGg [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10e0b7bd0>"
+       ]
+      }
+     ],
+     "prompt_number": 9
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "* `plot_choropleth`: for quick plotting of several types of chocopleths."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "shp_link = ps.examples.get_path('columbus.shp')\n",
+      "values = np.array(ps.open(ps.examples.get_path('columbus.dbf')).by_col('HOVAL'))\n",
+      "\n",
+      "types = ['classless', 'unique_values', 'quantiles', 'fisher_jenks', 'equal_interval']\n",
+      "for typ in types:\n",
+      "    maps.plot_choropleth(shp_link, values, typ, title=typ)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAAD8CAYAAADUv3dIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd8U9X//1+52UnTNG0pbZPuRQeUloKAggwLKAooQ0AQ\nhI/AF+GD/chQhqgoKoJ+UFFQEVRwAR8Q2aCMiuxNS4ECpbSlpTu7GTe/P/rI/VE6s1s5z8fjPpLc\ncc47afrKuee8B8tisVhAIBAIBLdAedoAAoFAeJggoksgEAhuhIgugUAguBEiugQCgeBGiOgSCASC\nGyGiSyAQCG6EiC7BLaxfvx69evVyWft9+vTB2rVrXdY+geAsiOgS/hGwWCywWCxPm0EgNAsRXQKB\nQHAjRHQJTufOnTt47rnnEBAQAH9/f8ycObPeKHTWrFkIDQ2FVCpFWloa/vrrL+bYyZMnkZaWBqlU\nisDAQLz22 [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10e06e750>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAAD8CAYAAADUv3dIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd8VfX9/5/nnLtvdkL2TiCEMGUoKooobmvditIqWuuq\no61a9evo9Nc66qq21EEVcRUcVUEFFREVlSmbyF4h++5xxu+PQABJQsYdiXyeD6/n3LM+7xtuXnmf\nz3kPyTAMA4FAIBDEBDneBggEAsGRhBBdgUAgiCFCdAUCgSCGCNEVCASCGCJEVyAQCGKIEF2BQCCI\nIUJ0BRHj+uuv509/+lO8zegWsiyzcePGeJshOAKQRJyuQNAiutXV1ZSWlsbbFMGPHOHpCgQCQQwR\nois4iB/eZl955ZXce++9AHz66afk5+fz6KOPkpWVRW5uLtOmTWvzWICHHnqI3Nxc8vPzef755w+6\n9vjx43nuu [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10e36afd0>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADxCAYAAABoIWSWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXeYXOWd5/s5qXJ1hc5BHdQttaRWRkIiiyCDAIMsjAEb\nD+MJO/bYntlnl7l7PXvHa4/veHav7dkdB2xjG3tswCQThAVIWEgCSUhq5dxKrdStzqFyOOH+Ud1F\nN0odqjqg83meo1N16pz3fbtU9a33/N5fEAzDMDAxMTExGRPE8R6AiYmJybWEKbomJiYmY4gpuiYm\nJiZjiCm6JiYmJmOIKbomJiYmY4gpuiYmJiZjiCm6Jp9I7r33Xn73u98B8Jvf/IZbbrllnEdkYpJC\nHu8BmJiMlm9961ucPHkyLbIAb7311jiOyMTk8pgzXRMTE5MxxBRdk4yyZ88eFi5cSE5ODo8++iiP\nPvoo//RP/ [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10e0b3090>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADxCAYAAABoIWSWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXeYW/d55/s5BTjoGMwMpnMKexdFsag3qhdKsuzIthRl\nHW/WztqJb+7G1/tsudnN3exuvHHu3bjEJVEc7yqWJcXqhbZYRLEXkRTJIWdIDocz5PQZDHo7OOf+\ngaIZ1gEGmCKez/PgAQbAOecHDPDF77y/9/2+gq7rOgYGBgYGU4I43QMwMDAwuJ4wRNfAwMBgCjFE\n18DAwGAKMUTXwMDAYAoxRNfAwMBgCjFE18DAwGAKMUTXIC/a2tpYtWoVLpcLSZL4i7/4i2tu09zc\nzObNm0s+tuXLl7N9+/ZJ7+c//af/xO/+7u8WYUQGBpciT/cADGYX3/3ud9mwYQOHDx+e8DaCICAI\nQglHlebYs [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x102652150>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADxCAYAAABoIWSWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXl8W9WZ//+WdLVasmVZ8hLvSxbb2fcASYEAIYGyFiht\n6TCdab9tp+10fkNnWjqdTtvfTKelnV9nOsN0g7bfUiBACySEkISsJCELZE8cJ87meF9ka9/u8vtD\ntrATJ/EieUnu++X7utK5955zJEsfnfuc5zyPRlEUBRUVFRWVUUE71h1QUVFRuZFQRVdFRUVlFFFF\nV0VFRWUUUUVXRUVFZRRRRVdFRUVlFFFFV0VFRWUUUUVXZdxw66238txzz131nD/+8Y+sWLFilHo0\nMp588km+853vjHU3VMYZquiqjBs0Gg0ajeaq53z6059mw4YNg6rvd7/7HUuXLk1G14bFYF6Pyo2H\nKroqKldAF [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10e60ced0>"
+       ]
+      }
+     ],
+     "prompt_number": 10
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "# To-Do list\n",
+      "\n",
+      "General concepts and specific ideas to implement over time, with enough description so they can be brought to life.\n",
+      "\n",
+      "* Support for points in medium and higher layer\n",
+      "* LISA cluster maps"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Caution note on plotting points\n",
+      "\n",
+      "Support for points (dots) is still not quite polished. Ideally, one would like to create a `PathCollection` from scratch so it is analogue to the creation of a `PolyCollection` or `LineCollection`. However, for the time being, we are relying on the wrapper `plt.scatter`, which makes it harder to extract the collection and plug it in a different figure. For that reason, it is recommended that, for the time being, one creates the line and/or polygon map as shown in this notebook and [...]
+      "\n",
+      "**NOTE**: the `PathCollection` created by `plt.scatter` is detailed on line 3142 of [`_axes.py`](https://github.com/matplotlib/matplotlib/blob/master/lib/matplotlib/axes/_axes.py). Maybe we can take some inspiration from there to create our own `PathCollection` for points so they live at the same level as polygons."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file
diff --git a/pysal/contrib/viz/taz_example.ipynb b/pysal/contrib/viz/taz_example.ipynb
new file mode 100644
index 0000000..dca4b1f
--- /dev/null
+++ b/pysal/contrib/viz/taz_example.ipynb
@@ -0,0 +1,454 @@
+{
+ "metadata": {
+  "name": ""
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import numpy as np\n",
+      "import pysal as ps\n",
+      "import random as rdm\n",
+      "from pysal.contrib.viz import mapping as maps\n",
+      "from matplotlib.collections import LineCollection"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 28
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "shp = ps.open(ps.examples.get_path(\"taz.shp\"))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 29
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "dbf = ps.open(ps.examples.get_path(\"taz.dbf\"))\n",
+      "              "
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 30
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "dbf.header"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 31,
+       "text": [
+        "['AREA',\n",
+        " 'PERIMETER',\n",
+        " 'CNTY',\n",
+        " 'RSA',\n",
+        " 'AIRDB',\n",
+        " 'TAZ2K',\n",
+        " 'SQ_MILE',\n",
+        " 'ACRE',\n",
+        " 'NEWSEQ',\n",
+        " 'CSA',\n",
+        " 'CSA_NEW',\n",
+        " 'SQMI_TAZ',\n",
+        " 'TAZ_NUM',\n",
+        " 'CountyFIPS']"
+       ]
+      }
+     ],
+     "prompt_number": 31
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "fig = figure(figsize=(9,9))\n",
+      "base = maps.map_poly_shp(shp)\n",
+      "base.set_linewidth(0.75)\n",
+      "base.set_facecolor('none')\n",
+      "base.set_edgecolor('0.8')\n",
+      "ax = maps.setup_ax([base])\n",
+      "fig.add_axes(ax)\n",
+      "show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAgQAAAIECAYAAABmAjaWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXlb4trytp8MhHkWHEARZ227e++zv/9H+J3ere0ss8zz\nDCEk7x+e5EVFBQQCYd3X5eUESQHJWrVqVT1FSZIkgUAgEAgEwkpDq20AgUAgEAgE9SEOAYFAIBAI\nBOIQEAgEAoFAIA4BgUAgEAgEEIeAQCAQCAQCiENAIBAIBAIBxCEgEAiEhUCSJNzc3KDX66HX60EQ\nBAiCgH6/D1EUIUkS6vU6Hh8fQarFCbOAVdsAAoFA0CLypC1J0puf3/tiGAY6ne7dY1qtVrRaLaTT\naWxtbc3ldRBWB+IQEDTHxcUF9Hr9u/+naRoURYGiKAAARVHKgD34t2FfNP02qDb4nGHfh63mBv82\n+PPgOUZZB [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x11951afd0>"
+       ]
+      }
+     ],
+     "prompt_number": 32
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## County as unique values"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "cnty = np.array(dbf.by_col(\"CNTY\"))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 33
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 33
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "\n",
+      "fig = figure(figsize=(9,9))\n",
+      "base = maps.map_poly_shp(shp)\n",
+      "base.set_linewidth(0.75)\n",
+      "base.set_facecolor('none')\n",
+      "base.set_edgecolor('0.8')\n",
+      "counties = maps.base_choropleth_unique(maps.map_poly_shp(shp), cnty)\n",
+      "counties.set_linewidth(0)\n",
+      "counties.set_alpha(.5)\n",
+      "ax = maps.setup_ax([base, counties])\n",
+      "fig.add_axes(ax)\n",
+      "show()\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAgQAAAIECAYAAABmAjaWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvduPa8l13/+tqn3hbZPs6zlzZiTHimNLkGEDMvCTAySA\ngziAEQR5CWAgD/FbEMBA/pnkKQHyZgSRzoxGlkaKMJYljSwJsq14rJvHks7czznT9+a+X6vW74Gb\nHHY3u5tks3nZrA/QOH3YTbLI5q761qq1vosREUGj0Wg0Gs1Gw5c9AI1Go9FoNMtHCwKNRqPRaDRa\nEGg0Go1Go9GCQKPRaDQaDbQg0Gg0Go1GAy0INBqNRqPRADCWPQCNRqPRAESEn/z3/47O8XH/Bsb6\n/5TfMwBZnkP+4R/iN//VvwIrf67RzAstCDQajeYeGFi8ENGV76/74pyDC3HtY1qWhejb38azR4/w\n0qc/vZDXo [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x119518f50>"
+       ]
+      }
+     ],
+     "prompt_number": 34
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "cents = np.array([poly.centroid for poly in shp])\n",
+      "cents[0]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 35,
+       "text": [
+        "array([  601741.78690918,  3939798.30153461])"
+       ]
+      }
+     ],
+     "prompt_number": 35
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wrook = ps.rook_from_shapefile(\"taz.shp\")"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 36
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "w = wrook\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 37
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "cents.min(axis=0)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 38,
+       "text": [
+        "array([  282150.8269443 ,  3615409.10372805])"
+       ]
+      }
+     ],
+     "prompt_number": 38
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "def w2line_graph(w, centroids):\n",
+      "    \n",
+      "    \n",
+      "    \n",
+      "    segments = []\n",
+      "    for i in w.id2i:\n",
+      "        origin = cents[i]\n",
+      "        for j in w.neighbors[i]:\n",
+      "            dest = cents[j]\n",
+      "            ij = [i,j]\n",
+      "            ij.sort()\n",
+      "            segments.append([origin, dest])\n",
+      "    #segs = LineCollection(segments)\n",
+      "    \n",
+      "    #maps._add_axes2col(segs, [minx, miny, maxx, maxy])\n",
+      "    return segments    \n",
+      "\n",
+      "        "
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 39
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "segs = w2line_graph(wrook, cents)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 40
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "fig = figure(figsize=(9,9))\n",
+      "base = maps.map_poly_shp(shp)\n",
+      "base.set_linewidth(0.75)\n",
+      "base.set_facecolor('none')\n",
+      "base.set_edgecolor('0.8')\n",
+      "segs = LineCollection(segs)\n",
+      "maps._add_axes2col(segs, shp.bbox)\n",
+      "segs.set_linewidth(0.20)\n",
+      "ax = maps.setup_ax([base, segs])\n",
+      "fig.add_axes(ax)\n",
+      "show()\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAgQAAAIECAYAAABmAjaWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXlwY9W957+62mXt8m7Z8tput+1uaCCdpDvsSyCQDp0O\n0IRACB0I2cib1LyZmqmpysu8WWqmkspL8oZAGggEQiAQ0kAghNBhDRAC3W237XZ7k7xJlmVJlmRJ\nlq+u5o/DlTfJlmRJV5bOp8plW8u9R7J8z/f8zu/3/YlisVgMFAqFQqFQShpG6AFQKBQKhUIRHioI\nKBQKhUKhUEFAoVAoFAqFCgIKhUKhUCiggoBCoVAoFAqoIKBQKBQKhQIqCCgUCqUgiMViGBgYwNLS\nEpaWlsCyLFiWRTQaBcdxiMVi8Pv9GB4eBq0Wp+QCidADoFAolGKEn7Rjsdi6n5N9icViSKXSpMfU\naDQIBoOw2 [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10eab9d50>"
+       ]
+      }
+     ],
+     "prompt_number": 41
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Intersection weights"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wb = ps.regime_weights(np.array(dbf.by_col(\"CNTY\")))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 42
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wb.n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 43,
+       "text": [
+        "4109"
+       ]
+      }
+     ],
+     "prompt_number": 43
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wint = ps.weights.Wsets.w_intersection(wb, wrook)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 44
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "segs = w2line_graph(wint, cents)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 45
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 45
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "fig = figure(figsize=(9,9))\n",
+      "base = maps.map_poly_shp(shp)\n",
+      "base.set_linewidth(0.75)\n",
+      "base.set_facecolor('none')\n",
+      "base.set_edgecolor('0.8')\n",
+      "segs = LineCollection(segs)\n",
+      "maps._add_axes2col(segs, shp.bbox)\n",
+      "segs.set_linewidth(0.20)\n",
+      "ax = maps.setup_ax([base, segs])\n",
+      "fig.add_axes(ax)\n",
+      "show()\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAgQAAAIECAYAAABmAjaWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXl0W9XZ7//V0SxrlmfLlsc4ju0EAjRtkzIPhUJT0hQI\npVBKCqUTvbfrDuveddfq2997h3XvalfftvdSaIBCobQUSgMUSikpY4FSCLHjOI4nyZYtWZYlWZIl\nWT46+v2xOfIk2ZIs6cjS/qzlZVvDOVuyfPZ3P/t5vo8oHo/HQaFQKBQKpaxhhB4AhUKhUCgU4aGC\ngEKhUCgUChUEFAqFQqFQqCCgUCgUCoUCKggoFAqFQqGACgIKhUKhUCiggoBCoVCKgng8jsHBQSwt\nLWFpaQksy4JlWcRiMXAch3g8jkAggJGREdBqcUo+kAg9AAqFQilF+Ek7Ho+v+znVl1gshlQqTXlM\njUaDUCgEh [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10e210910>"
+       ]
+      }
+     ],
+     "prompt_number": 46
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "segments = w2line_graph(wint, cents)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 47
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "fig = figure(figsize=(9,9))\n",
+      "base = maps.map_poly_shp(shp)\n",
+      "base.set_linewidth(0.75)\n",
+      "base.set_facecolor('none')\n",
+      "base.set_edgecolor('0.8')\n",
+      "counties = maps.base_choropleth_unique(maps.map_poly_shp(shp), cnty)\n",
+      "counties.set_linewidth(0)\n",
+      "counties.set_alpha(.5)\n",
+      "segs = LineCollection(segments)\n",
+      "maps._add_axes2col(segs, shp.bbox)\n",
+      "segs.set_linewidth(0.20)\n",
+      "segs.set_color('0.1')\n",
+      "ax = maps.setup_ax([base, counties, segs])\n",
+      "fig.add_axes(ax)\n",
+      "show()\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAgQAAAIECAYAAABmAjaWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnVl0E1e67/9VqirNoyVb8oTDPI9xDAQ6IdBJSEMg44GY\n5KZv0ufhvp6n+3JXr/N2Xs7DebjnntXd6T4JbkjoJgRCQtM0CTEEzGwMBEJwGCzLg6x5lqrqPhQy\nNrZsSZZUsrx/a3khpKpdn2S59n9/+xsoURRFEAgEAoFAmNHQchtAIBAIBAJBfoggIBAIBAKBQAQB\ngUAgEAgEIggIBAKBQCCACAICgUAgEAgggoBAIBAIBAIARm4DCAQCgQCIogin8wAcjhgAgKKo4dfS\njxOJJFyuRZg1q2XU6wRCISCCgEAgEIpAusSLKIpjHmf6oWkaCoUi45hKJYeqqlvo67PC4ZhbkvdB\nmDkQQUCoO [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x1195fe3d0>"
+       ]
+      }
+     ],
+     "prompt_number": 48
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file
diff --git a/pysal/contrib/weights_viewer/__init__.py b/pysal/contrib/weights_viewer/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/contrib/weights_viewer/transforms.py b/pysal/contrib/weights_viewer/transforms.py
new file mode 100644
index 0000000..a5e1b0b
--- /dev/null
+++ b/pysal/contrib/weights_viewer/transforms.py
@@ -0,0 +1,208 @@
+import pysal
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+
+class WorldToViewTransform(object):
+    """
+    An abstract class modeling a View window.
+    Supports Panning, Zooming, Resizing.
+
+    Is observable.
+
+    Parameters:
+    worldExtent -- Extent,List -- Extent of the world, left,lower,right,upper in world coords
+    pixel_width -- int -- intial width of the view in pixels
+    pixel_height -- int -- intial height of the view in pixels
+
+    Notes:
+    World coordinates are expected to increase the X and Y direction.
+    Pixel coordinates are inverted in the Y direction.
+
+    This class helps tranform world coordinates to screen coordinates.
+    To transform a GraphicsMatrix,
+    matrix.Scale(1.0/model.scale,-1.0/model.scale)
+    matrix.Translate(*model.offset)
+    
+    The transforms will be applied in reverse order,
+    The coordinates will first be translated to the origin (of the current view).
+    The coordinates will then be scaled.
+
+    Eg.
+    >>> view = WorldToViewTransform([-180,-90,180,90],500,500)
+    """
+    def __init__(self,worldExtent,pixel_width,pixel_height):
+        """ Intialize the view to the extent of the world """
+        self.__pixel_width = float(pixel_width)
+        self.__pixel_height = float(pixel_height)
+        self.__world = worldExtent
+        self.extent = worldExtent
+        # In World Coords
+    def __copy__(self):
+        return WorldToViewTransform(self.extent,self.__pixel_width,self.__pixel_height)
+    copy = __copy__
+    def __get_offset(self):
+        """ 
+        Returns the offset of the top left corner of the current view in world coords.
+        Move the world this many units to aling it with the view.
+        """
+        return self.__offset
+    def __set_offset(self,value):
+        """
+        Set the Offset of the top left corner in world coords.
+        """
+        assert len(value) == 2
+        self.__offset = value
+    offset = property(fget=__get_offset,fset=__set_offset)
+    def __get_scale(self):
+        """ Returns the current scale in units/pixel """
+        return self.__scale
+    def __set_scale(self,value):
+        """ Sets the current scale in units/pixel """
+        self.__scale = value
+    scale = property(fget=__get_scale,fset=__set_scale)
+    def __get_extent(self):
+        """Returns the extent of the current view in World Coordinates."""
+        left,upper = self.pixel_to_world(0,0)
+        right,lower = self.pixel_to_world(self.__pixel_width,self.__pixel_height)
+        return pysal.cg.Rectangle(left,lower,right,upper)
+    def __set_extent(self,value):
+        """ Set the extent of the current view in World Coordinates.
+            Preserve fixed scale, take the max of (sx,sy).
+
+            Use this to zoom to a sepcific region when you know the region's 
+            bbox in world coords.
+        """
+        left,lower,right,upper = value
+        width = abs(right-left)
+        height = abs(upper-lower)
+        sx = width/self.__pixel_width
+        sy = height/self.__pixel_height
+        self.__scale = max(sx,sy)
+
+        #The offset translate the world to the origin.
+        #The X offset + world.left == 0
+        #The Y offset + world.upper == 0
+
+        # Move the offset a little, so that the center of the extent is in the center of the view.
+        oleft = (left+(width/2.0)) - (self.__pixel_width*self.__scale/2.0)
+        oupper = (upper-height/2.0) + (self.__pixel_height*self.__scale/2.0)
+
+        #self.__offset = (-left,-upper) # in world coords
+        self.__offset = (-oleft,-oupper) # in world coords
+    extent = property(fget=__get_extent,fset=__set_extent)
+    def __get_width(self):
+        """ Returns the width of the current view in world coords """
+        return self.__pixel_width*self.scale
+    def __set_width(self, value):
+        """
+        Sets the width of the current view, value in pixels
+        
+        Eg.
+        >>> view = WorldToViewTransform([0,0,100,100],500,500)
+        >>> view.extent[:]
+        [0.0, 0.0, 100.0, 100.0]
+        >>> view.width = 250
+        >>> view.extent[:]
+        [0.0, 0.0, 50.0, 100.0]
+        """
+        if self.__pixel_width != value:
+            self.__pixel_width = value
+    width = property(fget=__get_width,fset=__set_width)
+    def __get_height(self):
+        """ Returns the height of the current view in world coords """
+        return self.__pixel_height*self.scale
+    def __set_height(self, value):
+        """
+        Sets the height of the current view, value in pixels
+        
+        Eg.
+        >>> view = WorldToViewTransform([0,0,100,100],500,500)
+        >>> view.extent[:]
+        [0.0, 0.0, 100.0, 100.0]
+        >>> view.height = 250
+        >>> view.extent[:]
+        [0.0, 50.0, 100.0, 100.0]
+        """
+        if self.__pixel_height != value:
+            self.__pixel_height = value
+    height = property(fget=__get_height,fset=__set_height)
+    def __get_pixel_size(self):
+        """
+        Set and Return the current size of the view in pixels.
+        """
+        return self.__pixel_width,self.__pixel_height
+    def __set_pixel_size(self,value):
+        w,h = value
+        if self.__pixel_width != w:
+            self.__pixel_width = w
+        if self.__pixel_height != h:
+            self.__pixel_height = h
+    pixel_size = property(fget=__get_pixel_size,fset=__set_pixel_size)
+
+    def pan(self,dpx,dpy):
+        """ 
+        Pan the view by (dpx,dpy) pixel coordinates.
+        
+        Positive deltas move the world right and down.
+        Negative deltas move the world left and up.
+
+        Eg.
+        >>> view = WorldToViewTransform([0,0,100,100],500,500)
+        >>> view.pan(500,0)
+        >>> view.extent[:]
+        [-100.0, 0.0, 0.0, 100.0]
+        >>> view.pan(-500,500)
+        >>> view.extent[:]
+        [0.0, 100.0, 100.0, 200.0]
+        >>> view.pan(0,-500)
+        >>> view.extent[:]
+        [0.0, 0.0, 100.0, 100.0]
+        >>> view.pan(490,490)
+        >>> view.extent[:]
+        [-98.0, 98.0, 2.0, 198.0]
+        >>> view.pan(-490,-490)
+        >>> view.extent[:]
+        [0.0, 0.0, 100.0, 100.0]
+        """
+        ogx,ogy = self.__offset
+        s = self.scale
+        self.__offset = ogx+(dpx*s),ogy-(dpy*s)
+    def pan_to(self,extent):
+        initScale = self.scale
+        self.extent = extent
+        self.scale = initScale
+    def pixel_to_world(self,px,py):
+        """
+        Returns the world coordinates of the Pixel (px,py).
+
+        Eg.
+        >>> view = WorldToViewTransform([0,0,100,100],500,500)
+        >>> view.pixel_to_world(0,0)
+        (0.0, 100.0)
+        >>> view.pixel_to_world(500,500)
+        (100.0, 0.0)
+        """
+        sx = self.scale
+        sy = -sx
+        ogx,ogy = self.__offset
+        return px*sx - ogx, py*sy - ogy
+    def world_to_pixel(self,x,y):
+        """
+        Returns the pixel of the world coordinate (x,y).
+
+        Eg.
+        >>> view = WorldToViewTransform([0,0,100,100],500,500)
+        >>> view.world_to_pixel(0,0)
+        (0.0, 500.0)
+        >>> view.world_to_pixel(100,100)
+        (500.0, -0.0)
+        """
+        sx = self.scale
+        sy = -sx
+        ogx,ogy = self.__offset
+        return (x+ogx)/sx, (y+ogy)/sy
+
+if __name__=="__main__":
+    import doctest
+    doctest.testmod()
+    view = WorldToViewTransform([0,0,100,100],500,500)
diff --git a/pysal/contrib/weights_viewer/weights_viewer.py b/pysal/contrib/weights_viewer/weights_viewer.py
new file mode 100644
index 0000000..f9aebc4
--- /dev/null
+++ b/pysal/contrib/weights_viewer/weights_viewer.py
@@ -0,0 +1,179 @@
+import wx
+import pysal
+from transforms import WorldToViewTransform
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+
+POINT_RADIUS = 5
+BORDER_COLOR = wx.Colour(0,0,0,255)
+SELECTION_COLOR = wx.Colour(255,128,0,255)
+NEIGHBORS_COLOR = wx.Colour(128,255,0,255)
+BACKGROUND_COLOR = wx.Colour(0,0,0,0)
+
+class WeightsMapFrame(wx.Frame):
+    def __init__(self,parent=None,size=(600,600), style=wx.DEFAULT_FRAME_STYLE, geo=None, w=None):
+        wx.Frame.__init__(self,parent,size=size,style=style)
+        self.Bind
+        self.SetTitle("Weights Inspector")
+        if issubclass(type(geo),basestring):
+            geo = pysal.open(geo,'r')
+        self.geo = geo
+        if issubclass(type(w),basestring):
+            w = pysal.open(w,'r').read()
+        self.w = w
+        self.wm = WeightsMap(self,self.geo,self.w)
+
+class WeightsMap(wx.Panel):
+    """ Display a Weights Inspection Map """
+    def __init__(self, parent, geo, w_obj):
+        wx.Panel.__init__(self,parent,size=(600,600))
+        self.status = parent.CreateStatusBar(3)
+        self.status.SetStatusWidths([-1,-2,-2])
+        self.status.SetStatusText('No Selection',0)
+        self.Bind(wx.EVT_SIZE, self.onSize)
+        self.Bind(wx.EVT_PAINT, self.onPaint)
+        self.Bind(wx.EVT_MOUSE_EVENTS, self.onMouse)
+        self.trns = 0
+        self.background = (255,255,255,255)
+        w,h = self.GetSize()
+        self.buffer = wx.EmptyBitmapRGBA(w,h,alpha=self.trns)
+        self.geo = geo
+        if geo.type == pysal.cg.shapes.Polygon:
+            self.drawfunc = self.drawpoly
+            self.locator = pysal.cg.PolygonLocator(geo)
+        elif geo.type == pysal.cg.shapes.Point:
+            self.drawfunc = self.drawpt
+            self.locator = pysal.cg.PointLocator(geo)
+        else:
+            raise TypeError, "Unsupported Type: %r"%(geo.type)
+        self.w = w_obj
+        self._ids = range(self.w.n)
+        self.transform = WorldToViewTransform(geo.bbox,w,h)
+        self.selection = None
+    def onMouse(self, evt):
+        x,y = evt.X,evt.Y
+        X,Y = self.transform.pixel_to_world(x,y)
+        if self.geo.type == pysal.cg.shapes.Polygon:
+            rs = self.locator.contains_point((X,Y))
+            if rs:
+                selection = rs[0].id-1
+                self.set_selection(selection)
+            else:
+                self.set_selection(None)
+        else:
+            print self.locator.nearest((X,Y))
+    def onSize(self, evt):
+        w,h = self.GetSize()
+        self.buffer = wx.EmptyBitmapRGBA(w,h,alpha=self.trns)
+        self.transform = WorldToViewTransform(self.geo.bbox,w,h)
+        self.draw()
+    def onPaint(self, evt):
+        pdc = wx.PaintDC(self)
+        pdc.Clear()
+        self.draw()
+    def draw(self):
+        dc = wx.MemoryDC()
+        dc.SelectObject(self.buffer)
+        dc.SetBackground(wx.Brush(wx.Colour(*self.background)))
+        dc.Clear()
+        dc.SelectObject(wx.NullBitmap)
+        self.draw_shps(self.buffer)
+        cdc = wx.ClientDC(self)
+        cdc.DrawBitmap(self.buffer,0,0)
+    def drawpoly(self,gc,matrix,fill=False,ids=None):
+        geo = self.geo
+        pth = gc.CreatePath()
+        if not ids:
+            ids = xrange(len(geo))
+        for i in ids:
+            poly = geo.get(i)
+            parts = poly.parts
+            if poly.holes[0]:
+                parts = parts+poly.holes
+            for part in parts:
+                x,y = part[0]
+                pth.MoveToPoint(x,y)
+                for x,y in part[1:]:
+                    pth.AddLineToPoint(x,y)
+                pth.CloseSubpath()
+        pth.Transform(matrix)
+        if fill:
+            gc.FillPath(pth)
+        gc.StrokePath(pth)
+        return gc
+    def drawpt(self,gc,matrix,fill=False,ids=None):
+        r = POINT_RADIUS
+        radius = r/2.0
+        geo = self.geo
+        for pt in geo:
+            x,y = matrix.TransformPoint(*pt)
+            gc.DrawEllipse(x-radius,y-radius,r,r)
+        return gc
+    def draw_shps(self, buff, fill_color=None, ids=None, fill_style=wx.SOLID):
+        transform = self.transform
+        dc = wx.MemoryDC()
+        dc.SelectObject(buff)
+        gc = wx.GraphicsContext.Create(dc)
+        gc.SetPen( gc.CreatePen(wx.Pen(BORDER_COLOR,1)) )
+        if fill_color != None:
+            gc.SetBrush( gc.CreateBrush(wx.Brush(fill_color,fill_style)) )
+            fill = True
+        else:
+            fill = False
+        matrix = gc.CreateMatrix()
+        matrix.Scale(1./transform.scale,1./-transform.scale)
+        matrix.Translate(*transform.offset)
+        self.drawfunc(gc,matrix,fill,ids)
+    def set_selection(self,sel):
+        if self.selection == sel:
+            return
+        self.selection = sel
+
+        cdc = wx.ClientDC(self)
+        if sel != None:
+            w,h = self.transform.pixel_size
+            buff = self.buffer.GetSubBitmap((0,0,w,h))
+            id = self.w.id_order[sel]
+            neighbors = map(self.w.id_order.index,self.w.neighbors[id])
+            self.draw_shps(buff, NEIGHBORS_COLOR, neighbors)
+            if sel in neighbors:
+                self.draw_shps(buff, SELECTION_COLOR, [sel], wx.CROSSDIAG_HATCH)
+            else:
+                self.draw_shps(buff, SELECTION_COLOR, [sel])
+            print sel,":",neighbors
+            cdc.DrawBitmap(buff,0,0)
+            stat0 = "Selection:%s"%id
+            stat1 = "Neighbors:%s"%(','.join(map(str,self.w.neighbors[id])))
+            stat2 = "Weights:%s"%(','.join(map(str,self.w.weights[id])))
+            self.status.SetStatusText(stat0,0)
+            self.status.SetStatusText(stat1,1)
+            self.status.SetStatusText(stat2,2)
+        else:
+            self.status.SetStatusText('No Selection',0)
+            self.status.SetStatusText('',1)
+            self.status.SetStatusText('',2)
+            cdc.DrawBitmap(self.buffer,0,0)
+
+class WeightsMapApp(wx.App):
+    def __init__(self, geo=None, w=None, redirect=False):
+        self.geo = geo
+        self.w = w
+        wx.App.__init__(self, redirect)
+    def OnInit(self):
+        self.SetAppName("Weights Inspector")
+        self.frame = WeightsMapFrame(None,size=(600,600),geo=self.geo,w=self.w)
+        self.SetTopWindow(self.frame)
+        self.frame.Show()
+        return True
+
+
+if __name__=='__main__':
+    #shp = pysal.examples.get_path('sids2.shp')
+    #w = pysal.examples.get_path('sids2.gal')
+    #app = WeightsMapApp(shp,w)
+    #app.MainLoop()
+
+    shp = pysal.examples.get_path('columbus.shp')
+    w = pysal.queen_from_shapefile(shp)
+    app = WeightsMapApp(shp,w)
+    app.MainLoop()
diff --git a/pysal/core/FileIO.py b/pysal/core/FileIO.py
new file mode 100644
index 0000000..2707fea
--- /dev/null
+++ b/pysal/core/FileIO.py
@@ -0,0 +1,346 @@
+"""
+FileIO: Module for reading and writing various file types in a Pythonic way.
+This module should not be used directly, instead...
+import pysal.core.FileIO as FileIO
+Readers and Writers will mimic python file objects.
+.seek(n) seeks to the n'th object
+.read(n) reads n objects, default == all
+.next() reads the next object
+"""
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+
+__all__ = ['FileIO']
+import os.path
+import struct
+from warnings import warn
+import pysal
+
+
+class FileIO_MetaCls(type):
+    """
+    This Meta Class is instantiated when the class is first defined.
+    All subclasses of FileIO also inherit this meta class, which registers their abilities with the FileIO registry.
+    Subclasses must contain FORMATS and MODES (both are type(list))
+    """
+    def __new__(mcs, name, bases, dict):
+        cls = type.__new__(mcs, name, bases, dict)
+        if name != 'FileIO' and name != 'DataTable':
+            if "FORMATS" in dict and "MODES" in dict:
+                #print "Registering %s with FileIO.\n\tFormats: %r\n\tModes: %r"%(name,dict['FORMATS'],dict['MODES'])
+                FileIO._register(cls, dict['FORMATS'], dict['MODES'])
+            else:
+                raise TypeError("FileIO subclasses must have FORMATS and MODES defined")
+        return cls
+
+
+class FileIO(object):  # should be a type?
+    """
+    How this works:
+    FileIO.open(\*args) == FileIO(\*args)
+    When creating a new instance of FileIO the .__new__ method intercepts
+    .__new__ parses the filename to determine the fileType
+    next, .__registry and checked for that type.
+    Each type supports one or more modes ['r','w','a',etc]
+    If we support the type and mode, an instance of the appropriate handler
+    is created and returned.
+    All handlers must inherit from this class, and by doing so are automatically
+    added to the .__registry and are forced to conform to the prescribed API.
+    The metaclass takes cares of the registration by parsing the class definition.
+    It doesn't make much sense to treat weights in the same way as shapefiles and dbfs,
+    ....for now we'll just return an instance of W on mode='r'
+    .... on mode='w', .write will expect an instance of W
+    """
+    __metaclass__ = FileIO_MetaCls
+    __registry = {}  # {'shp':{'r':[OGRshpReader,pysalShpReader]}}
+
+    def __new__(cls, dataPath='', mode='r', dataFormat=None):
+        """
+        Intercepts the instantiation of FileIO and dispatches to the correct handler
+        If no suitable handler is found a python file object is returned.
+        """
+        if cls is FileIO:
+            try:
+                newCls = object.__new__(cls.__registry[cls.getType(dataPath,
+                                                                   mode, dataFormat)][mode][0])
+            except KeyError:
+                return open(dataPath, mode)
+            return newCls
+        else:
+            return object.__new__(cls)
+
+    @staticmethod
+    def getType(dataPath, mode, dataFormat=None):
+        """Parse the dataPath and return the data type"""
+        if dataFormat:
+            ext = dataFormat
+        else:
+            ext = os.path.splitext(dataPath)[1]
+            ext = ext.replace('.', '')
+            ext = ext.lower()
+        if ext == 'txt':
+            f = open(dataPath, 'r')
+            l1 = f.readline()
+            l2 = f.readline()
+            if ext == 'txt':
+                try:
+                    n, k = l1.split(',')
+                    n, k = int(n), int(k)
+                    fields = l2.split(',')
+                    assert len(fields) == k
+                    return 'geoda_txt'
+                except:
+                    return ext
+        return ext
+
+    @classmethod
+    def _register(cls, parser, formats, modes):
+        """ This method is called automatically via the MetaClass of FileIO subclasses
+        This should be private, but that hides it from the MetaClass
+        """
+        assert cls is FileIO
+        for format in formats:
+            if not format in cls.__registry:
+                cls.__registry[format] = {}
+            for mode in modes:
+                if not mode in cls.__registry[format]:
+                    cls.__registry[format][mode] = []
+                cls.__registry[format][mode].append(parser)
+        #cls.check()
+
+    @classmethod
+    def check(cls):
+        """ Prints the contents of the registry """
+        print "PySAL File I/O understands the following file extensions:"
+        for key, val in cls.__registry.iteritems():
+            print "Ext: '.%s', Modes: %r" % (key, val.keys())
+
+    @classmethod
+    def open(cls, *args, **kwargs):
+        """ Alias for FileIO() """
+        return cls(*args, **kwargs)
+
+    class _By_Row:
+        def __init__(self, parent):
+            self.p = parent
+
+        def __repr__(self):
+            if not self.p.ids:
+                return "keys: range(0,n)"
+            else:
+                return "keys: " + self.p.ids.keys().__repr__()
+
+        def __getitem__(self, key):
+            if type(key) == list:
+                r = []
+                if self.p.ids:
+                    for k in key:
+                        r.append(self.p.get(self.p.ids[k]))
+                else:
+                    for k in key:
+                        r.append(self.p.get(k))
+                return r
+            if self.p.ids:
+                return self.p.get(self.p.ids[key])
+            else:
+                return self.p.get(key)
+        __call__ = __getitem__
+
+    def __init__(self, dataPath='', mode='r', dataFormat=None):
+        self.dataPath = dataPath
+        self.dataObj = ''
+        self.mode = mode
+        #pos Should ALWAYS be in the range 0,...,n
+        #for custom IDs set the ids property.
+        self.pos = 0
+        self.__ids = None  # {'id':n}
+        self.__rIds = None
+        self.closed = False
+        self._spec = []
+        self.header = []
+
+    def __getitem__(self, key):
+        return self.by_row.__getitem__(key)
+
+    @property
+    def by_row(self):
+        return self._By_Row(self)
+
+    def __getIds(self):
+        return self.__ids
+
+    def __setIds(self, ids):
+        """ Property Method for .ids
+        Takes a list of ids and maps then to a 0 based index
+        Need to provide a method to set ID's based on a fieldName
+        preferably without reading the whole file.
+        """
+        if isinstance(ids, list):
+            try:
+                assert len(ids) == len(set(ids))
+            except AssertionError:
+                raise KeyError("IDs must be unique")
+            # keys: ID values: i
+            self.__ids = {}
+            # keys: i values: ID
+            self.__rIds = {}
+            for i, id in enumerate(ids):
+                self.__ids[id] = i
+                self.__rIds[i] = id
+        elif isinstance(ids, dict):
+            self.__ids = ids
+            self.__rIds = {}
+            for id, n in ids.iteritems():
+                self.__rIds[n] = id
+        elif not ids:
+            self.__ids = None
+            self.__rIds = None
+    ids = property(fget=__getIds, fset=__setIds)
+
+    @property
+    def rIds(self):
+        return self.__rIds
+
+    def __iter__(self):
+        self.seek(0)
+        return self
+
+    @staticmethod
+    def _complain_ifclosed(closed):
+        """ from StringIO """
+        if closed:
+            raise ValueError("I/O operation on closed file")
+
+    def cast(self, key, typ):
+        """cast key as typ"""
+        if key in self.header:
+            if not self._spec:
+                self._spec = [lambda x:x for key in self.header]
+            if typ is None:
+                self._spec[self.header.index(key)] = lambda x: x
+            else:
+                try:
+                    assert hasattr(typ, '__call__')
+                    self._spec[self.header.index(key)] = typ
+                except AssertionError:
+                    raise TypeError('Cast Objects must be callable')
+        else:
+            raise KeyError("%s" % key)
+
+    def _cast(self, row):
+        if self._spec and row:
+            try:
+                return [f(v) for f, v in zip(self._spec, row)]
+            except ValueError:
+                r = []
+                for f, v in zip(self._spec, row):
+                    try:
+                        if not v and f != str:
+                            raise ValueError
+                        r.append(f(v))
+                    except ValueError:
+                        warn("Value '%r' could not be cast to %s, value set to pysal.MISSINGVALUE" % (v, str(f)), RuntimeWarning)
+                        r.append(pysal.MISSINGVALUE)
+                return r
+
+        else:
+            return row
+
+    def next(self):
+        """A FileIO object is its own iterator, see StringIO"""
+        self._complain_ifclosed(self.closed)
+        r = self.__read()
+        if r is None:
+            raise StopIteration
+        return r
+
+    def close(self):
+        """ subclasses should clean themselves up and then call this method """
+        if not self.closed:
+            self.closed = True
+            del self.dataObj, self.pos
+
+    def get(self, n):
+        """ Seeks the file to n and returns n
+        If .ids is set n should be an id,
+        else, n should be an offset
+        """
+        prevPos = self.tell()
+        self.seek(n)
+        obj = self.__read()
+        self.seek(prevPos)
+        return obj
+
+    def seek(self, n):
+        """ Seek the FileObj to the beginning of the n'th record,
+            if ids are set, seeks to the beginning of the record at id, n"""
+        self._complain_ifclosed(self.closed)
+        self.pos = n
+
+    def tell(self):
+        """ Return id (or offset) of next object """
+        self._complain_ifclosed(self.closed)
+        return self.pos
+
+    def read(self, n=-1):
+        """ Read at most n objects, less if read hits EOF
+        if size is negative or omitted read all objects until EOF
+        returns None if EOF is reached before any objects.
+        """
+        self._complain_ifclosed(self.closed)
+        if n < 0:
+            #return list(self)
+            result = []
+            while 1:
+                try:
+                    result.append(self.__read())
+                except StopIteration:
+                    break
+            return result
+        elif n == 0:
+            return None
+        else:
+            result = []
+            for i in range(0, n):
+                try:
+                    result.append(self.__read())
+                except StopIteration:
+                    break
+            return result
+
+    def __read(self):
+        """ Gets one row from the file handler, and if necessary casts it's objects """
+        row = self._read()
+        if row is None:
+            raise StopIteration
+        row = self._cast(row)
+        return row
+
+    def _read(self):
+        """ Must be implemented by subclasses that support 'r'
+        subclasses should increment .pos
+        and redefine this doc string
+        """
+        self._complain_ifclosed(self.closed)
+        raise NotImplementedError
+
+    def truncate(self, size=None):
+        """ Should be implemented by subclasses
+        and redefine this doc string
+        """
+        self._complain_ifclosed(self.closed)
+        raise NotImplementedError
+
+    def write(self, obj):
+        """ Must be implemented by subclasses that support 'w'
+        subclasses should increment .pos
+        subclasses should also check if obj is an instance of type(list)
+        and redefine this doc string
+        """
+        self._complain_ifclosed(self.closed)
+        "Write obj to dataObj"
+        raise NotImplementedError
+
+    def flush(self):
+        self._complain_ifclosed(self.closed)
+        raise NotImplementedError
diff --git a/pysal/core/IOHandlers/__init__.py b/pysal/core/IOHandlers/__init__.py
new file mode 100644
index 0000000..b9361cf
--- /dev/null
+++ b/pysal/core/IOHandlers/__init__.py
@@ -0,0 +1,20 @@
+import warnings
+warnings.filterwarnings(
+    action='ignore', message=".*__builtin__.file size changed.*")
+import gwt
+import gal
+import dat
+import pyShpIO
+import wkt
+import geoda_txt
+import csvWrapper
+import pyDbfIO
+import arcgis_dbf
+import arcgis_swm
+import arcgis_txt
+import dat
+import geobugs_txt
+import mat
+import mtx
+import stata_txt
+import wk1
diff --git a/pysal/core/IOHandlers/arcgis_dbf.py b/pysal/core/IOHandlers/arcgis_dbf.py
new file mode 100644
index 0000000..d6047ce
--- /dev/null
+++ b/pysal/core/IOHandlers/arcgis_dbf.py
@@ -0,0 +1,231 @@
+import pysal
+import os.path
+import pysal.core.FileIO as FileIO
+from pysal.weights import W
+from pysal.weights.util import remap_ids
+from warnings import warn
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["ArcGISDbfIO"]
+
+
+class ArcGISDbfIO(FileIO.FileIO):
+    """
+    Opens, reads, and writes weights file objects in ArcGIS dbf format.
+
+    Spatial weights objects in the ArcGIS dbf format are used in
+    ArcGIS Spatial Statistics tools.
+    This format is the same as the general dbf format,
+    but the structure of the weights dbf file is fixed unlike other dbf files.
+    This dbf format can be used with the "Generate Spatial Weights Matrix" tool,
+    but not with the tools under the "Mapping Clusters" category.
+
+    The ArcGIS dbf file is assumed to have three or four data columns.
+    When the file has four columns,
+    the first column is meaningless and will be ignored in PySAL
+    during both file reading and file writing.
+    The next three columns hold origin IDs, destinations IDs, and weight values.
+    When the file has three columns,
+    it is assumed that only these data columns exist in the stated order.
+    The name for the orgin IDs column should be the name of
+    ID variable in the original source data table.
+    The names for the destination IDs and weight values columns are NID
+    and WEIGHT, respectively.
+    ArcGIS Spatial Statistics tools support only unique integer IDs.
+    Therefore, the values for origin and destination ID columns should
+    be integer.
+    For the case where the IDs of a weights object are not integers,
+    ArcGISDbfIO allows users to use internal id values corresponding to
+    record numbers, instead of original ids.
+
+    An exemplary structure of an ArcGIS dbf file is as follows:
+    [Line 1]    Field1    RECORD_ID    NID    WEIGHT
+    [Line 2]    0         72           76     1
+    [Line 3]    0         72           79     1
+    [Line 4]    0         72           78     1
+    ...
+
+    Unlike the ArcGIS text format, this format does not seem to include self-neighbors.
+
+    References
+    ----------
+    http://webhelp.esri.com/arcgisdesktop/9.3/index.cfm?TopicName=Convert_Spatial_Weights_Matrix_to_Table_(Spatial_Statistics)
+
+    """
+
+    FORMATS = ['arcgis_dbf']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        self._varName = 'Unknown'
+        args = args[:2]
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.file = pysal.open(self.dataPath, self.mode)
+
+    def _set_varName(self, val):
+        if issubclass(type(val), basestring):
+            self._varName = val
+
+    def _get_varName(self):
+        return self._varName
+    varName = property(fget=_get_varName, fset=_set_varName)
+
+    def read(self, n=-1):
+        self._complain_ifclosed(self.closed)
+        return self._read()
+
+    def seek(self, pos):
+        self.file.seek(pos)
+        self.pos = self.file.pos
+
+    def _read(self):
+        """Reads ArcGIS dbf file
+        Returns a pysal.weights.weights.W object
+
+        Examples
+        --------
+
+        Type 'dir(w)' at the interpreter to see what methods are supported.
+        Open an ArcGIS dbf file and read it into a pysal weights object
+
+        >>> w = pysal.open(pysal.examples.get_path('arcgis_ohio.dbf'),'r','arcgis_dbf').read()
+
+        Get the number of observations from the header
+
+        >>> w.n
+        88
+
+        Get the mean number of neighbors
+
+        >>> w.mean_neighbors
+        5.25
+
+        Get neighbor distances for a single observation
+
+        >>> w[1]
+        {2: 1.0, 11: 1.0, 6: 1.0, 7: 1.0}
+
+        """
+        if self.pos > 0:
+            raise StopIteration
+
+        id_var = self.file.header[1]
+        startPos = len(self.file.header)
+
+        if startPos == 3:
+            startPos = 0
+        elif startPos == 4:
+            startPos = 1
+        else:
+            raise ValueError("Wrong structure, a weights dbf file requires at least three data columns")
+
+        self.varName = id_var
+        id_type = int
+        id_spec = self.file.field_spec[startPos]
+        if id_spec[0] != 'N':
+            raise TypeError('The data type for ids should be integer.')
+        self.id_var = id_var
+
+        weights = {}
+        neighbors = {}
+        for row in self.file:
+            i, j, w = tuple(row)[startPos:]
+            i = id_type(i)
+            j = id_type(j)
+            w = float(w)
+            if i not in weights:
+                weights[i] = []
+                neighbors[i] = []
+            weights[i].append(w)
+            neighbors[i].append(j)
+            self.pos = self.file.pos
+
+        return W(neighbors, weights)
+
+    def write(self, obj, useIdIndex=False):
+        """
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        an ArcGIS dbf file
+        write a weights object to the opened dbf file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('arcgis_ohio.dbf'),'r','arcgis_dbf')
+        >>> w = testfile.read()
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.dbf')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w','arcgis_dbf')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created text file
+
+        >>> wnew =  pysal.open(fname,'r','arcgis_dbf').read()
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+            self.file.header = [self.varName, 'NID', 'WEIGHT']
+
+            id_type = type(obj.id_order[0])
+            if id_type is not int and not useIdIndex:
+                raise TypeError("ArcGIS DBF weight files support only integer IDs")
+            if useIdIndex:
+                id2i = obj.id2i
+                obj = remap_ids(obj, id2i)
+
+            id_spec = ('N', len(str(max(obj.id_order))), 0)
+            self.file.field_spec = [id_spec, id_spec, ('N', 13, 6)]
+
+            for id in obj.id_order:
+                neighbors = zip(obj.neighbors[id], obj.weights[id])
+                for neighbor, weight in neighbors:
+                    self.file.write([id, neighbor, weight])
+                    self.pos = self.file.pos
+
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
+    def flush(self):
+        self._complain_ifclosed(self.closed)
+        self.file.flush()
+
+    def close(self):
+        self.file.close()
+
diff --git a/pysal/core/IOHandlers/arcgis_swm.py b/pysal/core/IOHandlers/arcgis_swm.py
new file mode 100644
index 0000000..488b2d7
--- /dev/null
+++ b/pysal/core/IOHandlers/arcgis_swm.py
@@ -0,0 +1,212 @@
+import pysal
+import os.path
+import numpy as np
+from struct import pack, unpack
+import pysal.core.FileIO as FileIO
+from pysal.weights import W
+from pysal.weights.util import remap_ids
+from warnings import warn
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["ArcGISSwmIO"]
+
+
+class ArcGISSwmIO(FileIO.FileIO):
+    """
+    Opens, reads, and writes weights file objects in ArcGIS swm format.
+
+    Spatial weights objects in the ArcGIS swm format are used in
+    ArcGIS Spatial Statistics tools.
+    Particularly, this format can be directly used with the tools under
+    the category of Mapping Clusters.
+
+    The values for [ORG_i] and [DST_i] should be integers,
+    as ArcGIS Spatial Statistics tools support only unique integer IDs.
+    For the case where a weights object uses non-integer IDs,
+    ArcGISSwmIO allows users to use internal ids corresponding to record numbers,
+    instead of original ids.
+
+    The specifics of each part of the above structure is as follows.
+
+  .. table:: ArcGIS SWM Components
+
+    ============ ============ ==================================== ================================
+        Part      Data type           Description                   Length                        
+    ============ ============ ==================================== ================================
+     ID_VAR_NAME  ASCII TEXT  ID variable name                     Flexible (Up to the 1st ;)     
+     ESRI_SRS     ASCII TEXT  ESRI spatial reference system        Flexible (Btw the 1st ; and \\n)  
+     NO_OBS       l.e. int    Number of observations               4                         
+     ROW_STD      l.e. int    Whether or not row-standardized      4                         
+     WGT_i                                                                                   
+     ORG_i        l.e. int    ID of observaiton i                  4                         
+     NO_NGH_i     l.e. int    Number of neighbors for obs. i (m)   4                         
+     NGHS_i                                                                                  
+     DSTS_i       l.e. int    IDs of all neighbors of obs. i       4*m                       
+     WS_i         l.e. float  Weights for obs. i and its neighbors 8*m                       
+     W_SUM_i      l.e. float  Sum of weights for "                 8                         
+    ============ ============ ==================================== ================================
+
+    """
+
+    FORMATS = ['swm']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        self._varName = 'Unknown'
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.file = open(self.dataPath, self.mode + 'b')
+
+    def _set_varName(self, val):
+        if issubclass(type(val), basestring):
+            self._varName = val
+
+    def _get_varName(self):
+        return self._varName
+    varName = property(fget=_get_varName, fset=_set_varName)
+
+    def read(self, n=-1):
+        self._complain_ifclosed(self.closed)
+        return self._read()
+
+    def seek(self, pos):
+        if pos == 0:
+            self.file.seek(0)
+            self.pos = 0
+
+    def _read(self):
+        """
+        Reads ArcGIS swm file.
+        Returns a pysal.weights.weights.W object
+
+        Examples
+        --------
+
+        Type 'dir(w)' at the interpreter to see what methods are supported.
+        Open an ArcGIS swm file and read it into a pysal weights object
+
+        >>> w = pysal.open(pysal.examples.get_path('ohio.swm'),'r').read()
+
+        Get the number of observations from the header
+
+        >>> w.n
+        88
+
+        Get the mean number of neighbors
+
+        >>> w.mean_neighbors
+        5.25
+
+        Get neighbor distances for a single observation
+
+        >>> w[1]
+        {2: 1.0, 11: 1.0, 6: 1.0, 7: 1.0}
+
+        """
+
+        if self.pos > 0:
+            raise StopIteration
+
+        header01 = self.file.readline()
+        id_var, srs = header01[:-1].split(';')
+        self.varName = id_var
+        self.header_len = len(header01) + 8
+        no_obs, row_std = tuple(unpack('<2l', self.file.read(8)))
+
+        neighbors = {}
+        weights = {}
+        for i in xrange(no_obs):
+            origin, no_nghs = tuple(unpack('<2l', self.file.read(8)))
+            neighbors[origin] = []
+            weights[origin] = []
+            if no_nghs > 0:
+                neighbors[origin] = list(unpack('<%il' %
+                                                no_nghs, self.file.read(4 * no_nghs)))
+                weights[origin] = list(unpack('<%id' %
+                                              no_nghs, self.file.read(8 * no_nghs)))
+                w_sum = list(unpack('<d', self.file.read(8)))[0]
+
+        self.pos += 1
+        return W(neighbors, weights)
+
+    def write(self, obj, useIdIndex=False):
+        """
+        Writes a spatial weights matrix data file in swm format.
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        -------
+
+        an ArcGIS swm file
+        write a weights object to the opened swm file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('ohio.swm'),'r')
+        >>> w = testfile.read()
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.swm')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created text file
+
+        >>> wnew = pysal.open(fname,'r').read()
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname) """
+
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+            if not (type(obj.id_order[0]) in (np.int32, np.int64, int)) and not useIdIndex:
+                raise TypeError("ArcGIS SWM files support only integer IDs")
+            if useIdIndex:
+                id2i = obj.id2i
+                obj = remap_ids(obj, id2i)
+            self.file.write('%s;Unknown\n' % self.varName)
+            self.file.write(pack('<l', obj.n))
+            self.file.write(pack('<l', obj.transform.upper() == 'R'))
+            for obs in obj.weights:
+                self.file.write(pack('<l', obs))
+                no_nghs = len(obj.weights[obs])
+                self.file.write(pack('<l', no_nghs))
+                self.file.write(pack('<%il' % no_nghs, *obj.neighbors[obs]))
+                self.file.write(pack('<%id' % no_nghs, *obj.weights[obs]))
+                self.file.write(pack('<d', sum(obj.weights[obs])))
+            self.pos += 1
+
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
+    def close(self):
+        self.file.close()
+        FileIO.FileIO.close(self)
diff --git a/pysal/core/IOHandlers/arcgis_txt.py b/pysal/core/IOHandlers/arcgis_txt.py
new file mode 100644
index 0000000..77491c4
--- /dev/null
+++ b/pysal/core/IOHandlers/arcgis_txt.py
@@ -0,0 +1,208 @@
+import pysal
+import os.path
+import gwt
+from pysal.weights import W
+from pysal.weights.util import remap_ids
+from warnings import warn
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["ArcGISTextIO"]
+
+
+class ArcGISTextIO(gwt.GwtIO):
+    """
+    Opens, reads, and writes weights file objects in ArcGIS ASCII text format.
+
+    Spatial weights objects in the ArcGIS text format are used in
+    ArcGIS Spatial Statistics tools.
+    This format is a simple text file with ASCII encoding.
+    This format can be directly used with the tools under
+    the category of "Mapping Clusters." But, it cannot be used with
+    the "Generate Spatial Weights Matrix" tool.
+
+    The first line of the ArcGIS text file is a header including the name of
+    a data column that holded the ID variable in the original source data table.
+    After this header line, it includes three data columns
+    for origin id, destination id, and weight values.
+    ArcGIS Spatial Statistics tools support only unique integer ids.
+    Thus, the values in the first two columns should be integers.
+    For the case where a weights object uses non-integer IDs,
+    ArcGISTextIO allows users to use internal ids corresponding to record numbers,
+    instead of original ids.
+
+    An exemplary structure of an ArcGIS text file is as follows:
+    [Line 1]    StationID
+    [Line 2]    1    1    0.0
+    [Line 3]    1    2    0.1
+    [Line 4]    1    3    0.14286
+    [Line 5]    2    1    0.1
+    [Line 6]    2    3    0.05
+    [Line 7]    3    1    0.16667
+    [Line 8]    3    2    0.06667
+    [Line 9]    3    3    0.0
+    ...
+
+    As shown in the above example, this file format allows explicit specification
+    of weights for self-neighbors.
+    When no entry is available for self-neighbors,
+    ArcGIS spatial statistics tools consider they have zero weights.
+    PySAL ArcGISTextIO class ignores self-neighbors if their weights are zero.
+
+    References
+    ----------
+    http://webhelp.esri.com/arcgisdesktop/9.3/index.cfm?TopicName=Modeling_spatial_relationships
+
+    Notes
+    -----
+    When there are an dbf file whose name is identical to the name of the source text file,
+    ArcGISTextIO checks the data type of the ID data column and uses it for reading and
+    writing the text file. Otherwise, it considers IDs are strings.
+
+    """
+
+    FORMATS = ['arcgis_text']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        args = args[:2]
+        gwt.GwtIO.__init__(self, *args, **kwargs)
+
+    def _read(self):
+        """Reads ArcGIS Text file
+        Returns a pysal.weights.weights.W object
+
+        Examples
+        --------
+
+        Type 'dir(w)' at the interpreter to see what methods are supported.
+        Open a text file and read it into a pysal weights object
+
+        >>> w = pysal.open(pysal.examples.get_path('arcgis_txt.txt'),'r','arcgis_text').read()
+
+        Get the number of observations from the header
+
+        >>> w.n
+        3
+
+        Get the mean number of neighbors
+
+        >>> w.mean_neighbors
+        2.0
+
+        Get neighbor distances for a single observation
+
+        >>> w[1]
+        {2: 0.1, 3: 0.14286}
+
+        """
+        if self.pos > 0:
+            raise StopIteration
+
+        id_var = self.file.readline().strip()
+        self.varName = id_var
+        id_order = None
+        id_type = int
+        try:
+            dbf = os.path.join(self.dataPath + '.dbf')
+            if os.path.exists(dbf):
+                db = pysal.open(dbf, 'r')
+                if id_var in db.header:
+                    id_order = db.by_col(id_var)
+                    id_type = type(id_order[0])
+                else:
+                    warn("ID_VAR:'%s' was in in the DBF header, proceeding with unordered string ids." % (id_var), RuntimeWarning)
+            else:
+                warn("DBF relating to ArcGIS TEXT was not found, proceeding with unordered string ids.", RuntimeWarning)
+        except:
+            warn("Exception occurred will reading DBF, proceeding with unordered string ids.", RuntimeWarning)
+
+        if (id_type is not int) or (id_order and type(id_order)[0] is not int):
+            raise TypeError("The data type for ids should be integer.")
+
+        if id_order:
+            self.n = len(id_order)
+            self.shp = os.path.split(self.dataPath)[1].split('.')[0]
+        self.id_var = id_var
+
+        weights, neighbors = self._readlines(id_type)
+        for k in neighbors:
+            if k in neighbors[k]:
+                k_index = neighbors[k].index(k)
+                if weights[k][k_index] == 0.0:
+                    del neighbors[k][k_index]
+                    del weights[k][k_index]
+
+        self.pos += 1
+        return W(neighbors, weights)
+
+    def write(self, obj, useIdIndex=False):
+        """
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        an ArcGIS text file
+        write a weights object to the opened text file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('arcgis_txt.txt'),'r','arcgis_text')
+        >>> w = testfile.read()
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.txt')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w','arcgis_text')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created text file
+
+        >>> wnew =  pysal.open(fname,'r','arcgis_text').read()
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+            id_type = type(obj.id_order[0])
+            if id_type is not int and not useIdIndex:
+                raise TypeError("ArcGIS TEXT weight files support only integer IDs")
+            if useIdIndex:
+                id2i = obj.id2i
+                obj = remap_ids(obj, id2i)
+
+            header = '%s\n' % self.varName
+            self.file.write(header)
+            self._writelines(obj)
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
diff --git a/pysal/core/IOHandlers/csvWrapper.py b/pysal/core/IOHandlers/csvWrapper.py
new file mode 100644
index 0000000..2ef0e0b
--- /dev/null
+++ b/pysal/core/IOHandlers/csvWrapper.py
@@ -0,0 +1,97 @@
+import pysal.core.Tables as Tables
+import csv
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__all__ = ['csvWrapper']
+
+
+class csvWrapper(Tables.DataTable):
+
+    __doc__ = Tables.DataTable.__doc__
+
+    FORMATS = ['csv']
+    READ_MODES = ['r','Ur','rU','U']
+    MODES = READ_MODES[:]
+
+    def __init__(self, *args, **kwargs):
+        """
+
+        Examples
+        --------
+        >>> import pysal
+        >>> file_name = pysal.examples.get_path('stl_hom.csv')
+        >>> f = pysal.open(file_name,'r')
+        >>> y = f.read()
+        >>> f.header
+        ['WKT', 'NAME', 'STATE_NAME', 'STATE_FIPS', 'CNTY_FIPS', 'FIPS', 'FIPSNO', 'HR7984', 'HR8488', 'HR8893', 'HC7984', 'HC8488', 'HC8893', 'PO7984', 'PO8488', 'PO8893', 'PE77', 'PE82', 'PE87', 'RDAC80', 'RDAC85', 'RDAC90']
+        >>> f._spec
+        [<type 'str'>, <type 'str'>, <type 'str'>, <type 'int'>, <type 'int'>, <type 'int'>, <type 'int'>, <type 'float'>, <type 'float'>, <type 'float'>, <type 'int'>, <type 'int'>, <type 'int'>, <type 'int'>, <type 'int'>, <type 'int'>, <type 'float'>, <type 'float'>, <type 'float'>, <type 'float'>, <type 'float'>, <type 'float'>]
+
+
+        """
+        Tables.DataTable.__init__(self, *args, **kwargs)
+        self.__idx = {}
+        self.__len = None
+        self._open()
+
+    def __len__(self):
+        return self.__len
+
+    def _open(self):
+        self.fileObj = open(self.dataPath, self.mode)
+        if self.mode in self.READ_MODES:
+            self.dataObj = csv.reader(self.fileObj)
+            data = list(self.dataObj)
+            if self._determineHeader(data):
+                self.header = data.pop(0)
+            else:
+                self.header = ['field_%d' % i for i in range(len(data[0]))]
+            self._spec = self._determineSpec(data)
+            self.data = data
+            self.fileObj.close()
+            self.__len = len(data)
+
+    def _determineHeader(self, data):
+        #head = [val.strip().replace('-','').replace('.','').isdigit() for val in data[0]]
+        #if True in head: #no numbers in header!
+        #    HEADER = False
+        #    return HEADER
+        headSpec = self._determineSpec([data[0]])
+        restSpec = self._determineSpec(data[1:])
+        if headSpec == restSpec:
+            HEADER = False
+            return HEADER
+        return True
+
+    @staticmethod
+    def _determineSpec(data):
+        cols = len(data[0])
+        spec = []
+        for j in range(cols):
+            isInt = True
+            isFloat = True
+            for row in data:
+                val = row[j]
+                if not val.strip().replace('-', '').replace('.', '').isdigit():
+                    isInt = False
+                    isFloat = False
+                    break
+                else:
+                    if isInt and '.' in val:
+                        isInt = False
+            if isInt:
+                spec.append(int)
+            elif isFloat:
+                spec.append(float)
+            else:
+                spec.append(str)
+        return spec
+
+    def _read(self):
+        if self.pos < len(self):
+            row = self.data[self.pos]
+            self.pos += 1
+            return row
+        else:
+            return None
+
diff --git a/pysal/core/IOHandlers/dat.py b/pysal/core/IOHandlers/dat.py
new file mode 100644
index 0000000..7f668d9
--- /dev/null
+++ b/pysal/core/IOHandlers/dat.py
@@ -0,0 +1,132 @@
+import pysal
+import os.path
+import gwt
+from pysal.weights import W
+from warnings import warn
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["DatIO"]
+
+
+class DatIO(gwt.GwtIO):
+    """
+    Opens, reads, and writes file objects in DAT format.
+
+    Spatial weights objects in DAT format are used in
+    Dr. LeSage's MatLab Econ library.
+    This DAT format is a simple text file with DAT or dat extension.
+    Without header line, it includes three data columns
+    for origin id, destination id, and weight values as follows:
+
+    [Line 1]    2    1    0.25
+    [Line 2]    5    1    0.50
+    ...
+
+    Origin/destination IDs in this file format are simply record
+    numbers starting with 1. IDs are not necessarily integers.
+    Data values for all columns should be numeric.
+
+    """
+
+    FORMATS = ['dat']
+    MODES = ['r', 'w']
+
+    def _read(self):
+        """Reads .dat file
+        Returns a pysal.weights.weights.W object
+
+        Examples
+        --------
+
+        Type 'dir(w)' at the interpreter to see what methods are supported.
+        Open .dat file and read it into a pysal weights object
+
+        >>> w = pysal.open(pysal.examples.get_path('wmat.dat'),'r').read()
+
+        Get the number of observations from the header
+
+        >>> w.n
+        49
+
+        Get the mean number of neighbors
+
+        >>> w.mean_neighbors
+        4.7346938775510203
+
+        Get neighbor distances for a single observation
+
+        >>> w[1]
+        {2.0: 0.3333, 5.0: 0.3333, 6.0: 0.3333}
+
+        """
+        if self.pos > 0:
+            raise StopIteration
+
+        id_type = float
+        weights, neighbors = self._readlines(id_type)
+
+        self.pos += 1
+        return W(neighbors, weights)
+
+    def write(self, obj):
+        """
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        a DAT file
+        write a weights object to the opened DAT file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('wmat.dat'),'r')
+        >>> w = testfile.read()
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.dat')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created dat file
+
+        >>> wnew =  pysal.open(fname,'r').read()
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+            self._writelines(obj)
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
diff --git a/pysal/core/IOHandlers/gal.py b/pysal/core/IOHandlers/gal.py
new file mode 100644
index 0000000..a8fcd47
--- /dev/null
+++ b/pysal/core/IOHandlers/gal.py
@@ -0,0 +1,218 @@
+import pysal.core.FileIO as FileIO
+from pysal.weights import W, WSP
+from scipy import sparse
+import numpy as np
+
+__author__ = 'Charles R Schmidt <schmidtc at gmail.com>'
+__all__ = ['GalIO']
+
+
+class GalIO(FileIO.FileIO):
+    """
+    Opens, reads, and writes file objects in GAL format.
+
+
+    """
+    FORMATS = ['gal']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        self._typ = str
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.file = open(self.dataPath, self.mode)
+
+    def read(self, n=-1, sparse=False):
+        """
+
+        sparse: boolean
+               If true return scipy sparse object
+               If false return pysal w object
+        """
+        self._sparse = sparse
+        self._complain_ifclosed(self.closed)
+        return self._read()
+
+    def seek(self, pos):
+        if pos == 0:
+            self.file.seek(0)
+            self.pos = 0
+
+    def _get_data_type(self):
+        return self._typ
+
+    def _set_data_type(self, typ):
+        if callable(typ):
+            self._typ = typ
+        else:
+            raise TypeError("Expecting a callable")
+    data_type = property(fset=_set_data_type, fget=_get_data_type)
+
+    def _read(self):
+        """
+        Parameters
+        ----------
+        reads in a GalIO object
+
+        Returns
+        -------
+        returns a W object
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+
+        Read in a file GAL file
+
+        >>> testfile = pysal.open(pysal.examples.get_path('sids2.gal'),'r')
+
+        Return a W object
+
+        >>> w = testfile.read()
+        >>> w.n == 100
+        True
+        >>> w.sd == 1.5151237573214935
+        True
+        >>> testfile = pysal.open(pysal.examples.get_path('sids2.gal'),'r')
+
+        Return a sparse matrix for the w information
+
+        >>> wsp = testfile.read(sparse=True)
+        >>> wsp.sparse.nnz
+        462
+
+        """
+        if self._sparse:
+            if self.pos > 0:
+                raise StopIteration
+
+            header = self.file.readline().strip().split()
+            header_n = len(header)
+            n = int(header[0])
+            if header_n > 1:
+                n = int(header[1])
+            ids = []
+            idsappend = ids.append
+            row = []
+            extend = row.extend    # avoid dot in loops
+            col = []
+            append = col.append
+            counter = 0
+            typ = self.data_type
+            for i in xrange(n):
+                id, n_neighbors = self.file.readline().strip().split()
+                id = typ(id)
+                n_neighbors = int(n_neighbors)
+                neighbors_i = map(typ, self.file.readline().strip().split())
+                nn = len(neighbors_i)
+                extend([id] * nn)
+                counter += nn
+                for id_neigh in neighbors_i:
+                    append(id_neigh)
+                idsappend(id)
+            self.pos += 1
+            row = np.array(row)
+            col = np.array(col)
+            data = np.ones(counter)
+            ids = np.unique(row)
+            row = np.array([np.where(ids == j)[0] for j in row]).flatten()
+            col = np.array([np.where(ids == j)[0] for j in col]).flatten()
+            spmat = sparse.csr_matrix((data, (row, col)), shape=(n, n))
+            return WSP(spmat)
+
+        else:
+            if self.pos > 0:
+                raise StopIteration
+            neighbors = {}
+            ids = []
+            # handle case where more than n is specified in first line
+            header = self.file.readline().strip().split()
+            header_n = len(header)
+            n = int(header[0])
+            if header_n > 1:
+                n = int(header[1])
+            w = {}
+            typ = self.data_type
+            for i in range(n):
+                id, n_neighbors = self.file.readline().strip().split()
+                id = typ(id)
+                n_neighbors = int(n_neighbors)
+                neighbors_i = map(typ, self.file.readline().strip().split())
+                neighbors[id] = neighbors_i
+                ids.append(id)
+            self.pos += 1
+            return W(neighbors, id_order=ids)
+
+    def write(self, obj):
+        """
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        a GAL file
+        write a weights object to the opened GAL file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('sids2.gal'),'r')
+        >>> w = testfile.read()
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.gal')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created gal file
+
+        >>> wnew =  pysal.open(fname,'r').read()
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+            IDS = obj.id_order
+            self.file.write('%d\n' % (obj.n))
+            for id in IDS:
+                neighbors = obj.neighbors[id]
+                self.file.write('%s %d\n' % (str(id), len(neighbors)))
+                self.file.write(' '.join(map(str, neighbors)) + '\n')
+            self.pos += 1
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" %
+                            (type(obj)))
+
+    def close(self):
+        self.file.close()
+        FileIO.FileIO.close(self)
+
+
diff --git a/pysal/core/IOHandlers/geobugs_txt.py b/pysal/core/IOHandlers/geobugs_txt.py
new file mode 100644
index 0000000..3743fa0
--- /dev/null
+++ b/pysal/core/IOHandlers/geobugs_txt.py
@@ -0,0 +1,254 @@
+import pysal
+import os.path
+import pysal.core.FileIO as FileIO
+from pysal.weights import W
+from warnings import warn
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["GeoBUGSTextIO"]
+
+
+class GeoBUGSTextIO(FileIO.FileIO):
+    """
+    Opens, reads, and writes weights file objects in the text format
+    used in GeoBUGS. GeoBUGS generates a spatial weights matrix
+    as an R object and writes it out as an ASCII text representation of
+    the R object.
+
+    An exemplary GeoBUGS text file is as follows.
+    list([CARD],[ADJ],[WGT],[SUMNUMNEIGH])
+    where [CARD] and [ADJ] are required but the others are optional.
+    PySAL assumes [CARD] and [ADJ] always exist in an input text file.
+    It can read a GeoBUGS text file, even when its content is not written
+    in the order of [CARD], [ADJ], [WGT], and [SUMNUMNEIGH].
+    It always writes all of [CARD], [ADJ], [WGT], and [SUMNUMNEIGH].
+    PySAL does not apply text wrapping during file writing.
+
+    In the above example,
+
+    [CARD]:
+        num=c([a list of comma-splitted neighbor cardinalities])
+
+    [ADJ]:
+        adj=c([a list of comma-splitted neighbor IDs])
+        if caridnality is zero, neighbor IDs are skipped.
+        The ordering of observations is the same in both [CARD] and
+        [ADJ].
+        Neighbor IDs are record numbers starting from one.
+
+    [WGT]:
+        weights=c([a list of comma-splitted weights])
+        The restrictions for [ADJ] also apply to [WGT].
+
+    [SUMNUMNEIGH]: 
+        sumNumNeigh=[The total number of neighbor pairs]
+        the total number of neighbor pairs  is an integer
+        value and the same as the sum of neighbor cardinalities.
+
+    Notes
+    -----
+    For the files generated from R spdep nb2WB and dput function,
+    it is assumed that the value for the control parameter of dput function
+    is NULL. Please refer to R spdep nb2WB function help file.
+
+    References
+    ----------
+    Thomas, A., Best, N., Lunn, D., Arnold, R., and Spiegelhalter, D.
+
+    (2004) GeoBUGS User Manual.
+
+    R spdep nb2WB function help file.
+
+    """
+
+    FORMATS = ['geobugs_text']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        args = args[:2]
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.file = open(self.dataPath, self.mode)
+
+    def read(self, n=-1):
+        """
+        Reads GeoBUGS text file
+
+        Returns
+        -------
+        a pysal.weights.weights.W object
+
+        Examples
+        --------
+
+        Type 'dir(w)' at the interpreter to see what methods are supported.
+        Open a GeoBUGS text file and read it into a pysal weights object
+
+        >>> w = pysal.open(pysal.examples.get_path('geobugs_scot'),'r','geobugs_text').read()
+        WARNING: there are 3 disconnected observations
+        Island ids:  [6, 8, 11]
+
+        Get the number of observations from the header
+
+        >>> w.n
+        56
+
+        Get the mean number of neighbors
+
+        >>> w.mean_neighbors
+        4.1785714285714288
+
+        Get neighbor distances for a single observation
+
+        >>> w[1]
+        {9: 1.0, 19: 1.0, 5: 1.0}
+
+        """
+        self._complain_ifclosed(self.closed)
+        return self._read()
+
+    def seek(self, pos):
+        if pos == 0:
+            self.file.seek(0)
+            self.pos = 0
+
+    def _read(self):
+        if self.pos > 0:
+            raise StopIteration
+
+        fbody = self.file.read()
+        body_structure = {}
+        for i in ['num', 'adj', 'weights', 'sumNumNeigh']:
+            i_loc = fbody.find(i)
+            if i_loc != -1:
+                body_structure[i] = (i_loc, i)
+        body_sequence = sorted(body_structure.values())
+        body_sequence.append((-1, 'eof'))
+
+        for i in range(len(body_sequence) - 1):
+            part, next_part = body_sequence[i], body_sequence[i + 1]
+            start, end = part[0], next_part[0]
+            part_text = fbody[start:end]
+
+            part_length, start, end = len(part_text), 0, -1
+            for c in xrange(part_length):
+                if part_text[c].isdigit():
+                    start = c
+                    break
+
+            for c in xrange(part_length - 1, 0, -1):
+                if part_text[c].isdigit():
+                    end = c + 1
+                    break
+            part_text = part_text[start: end]
+            part_text = part_text.replace('\n', '')
+            value_type = int
+            if part[1] == 'weights':
+                value_type = float
+            body_structure[part[1]] = [value_type(v)
+                                       for v in part_text.split(',')]
+
+        cardinalities = body_structure['num']
+        adjacency = body_structure['adj']
+        raw_weights = [1.0] * int(sum(cardinalities))
+        if 'weights' in body_structure and isinstance(body_structure['weights'], list):
+            raw_weights = body_structure['weights']
+
+        no_obs = len(cardinalities)
+        neighbors = {}
+        weights = {}
+        pos = 0
+        for i in xrange(no_obs):
+            neighbors[i + 1] = []
+            weights[i + 1] = []
+            no_nghs = cardinalities[i]
+            if no_nghs > 0:
+                neighbors[i + 1] = adjacency[pos: pos + no_nghs]
+                weights[i + 1] = raw_weights[pos: pos + no_nghs]
+            pos += no_nghs
+
+        self.pos += 1
+        return W(neighbors, weights)
+
+    def write(self, obj):
+        """
+        Writes a weights object to the opened text file.
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        a GeoBUGS text file
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('geobugs_scot'),'r','geobugs_text')
+        >>> w = testfile.read()
+        WARNING: there are 3 disconnected observations
+        Island ids:  [6, 8, 11]
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w','geobugs_text')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created text file
+
+        >>> wnew =  pysal.open(fname,'r','geobugs_text').read()
+        WARNING: there are 3 disconnected observations
+        Island ids:  [6, 8, 11]
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+
+            cardinalities, neighbors, weights = [], [], []
+            for i in obj.id_order:
+                cardinalities.append(obj.cardinalities[i])
+                neighbors.extend(obj.neighbors[i])
+                weights.extend(obj.weights[i])
+
+            self.file.write('list(')
+            self.file.write('num=c(%s),' % ','.join(map(str, cardinalities)))
+            self.file.write('adj=c(%s),' % ','.join(map(str, neighbors)))
+            self.file.write('sumNumNeigh=%i)' % sum(cardinalities))
+            self.pos += 1
+
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
+    def close(self):
+        self.file.close()
+        FileIO.FileIO.close(self)
+
diff --git a/pysal/core/IOHandlers/geoda_txt.py b/pysal/core/IOHandlers/geoda_txt.py
new file mode 100644
index 0000000..406c8a9
--- /dev/null
+++ b/pysal/core/IOHandlers/geoda_txt.py
@@ -0,0 +1,91 @@
+import pysal.core.Tables as Tables
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__all__ = ['GeoDaTxtReader']
+
+
+class GeoDaTxtReader(Tables.DataTable):
+    """GeoDa Text File Export Format
+    """
+    __doc__ = Tables.DataTable.__doc__
+    FORMATS = ['geoda_txt']
+    MODES = ['r']
+
+    def __init__(self, *args, **kwargs):
+        """
+        Examples
+        --------
+        >>> import pysal
+        >>> f = pysal.open(pysal.examples.get_path('stl_hom.txt'),'r')
+        >>> f.header
+        ['FIPSNO', 'HR8488', 'HR8893', 'HC8488']
+        >>> len(f)
+        78
+        >>> f.dat[0]
+        ['17107', '1.290722', '1.624458', '2']
+        >>> f.dat[-1]
+        ['29223', '0', '8.451537', '0']
+        >>> f._spec
+        [<type 'int'>, <type 'float'>, <type 'float'>, <type 'int'>]
+
+        """
+        Tables.DataTable.__init__(self, *args, **kwargs)
+        self.__idx = {}
+        self.__len = None
+        self.pos = 0
+        self._open()
+
+    def _open(self):
+        if self.mode == 'r':
+            self.fileObj = open(self.dataPath, 'r')
+            n, k = self.fileObj.readline().strip().split(',')
+            n, k = int(n), int(k)
+            header = self.fileObj.readline().strip().split(',')
+            self.header = [f.replace('"', '') for f in header]
+            try:
+                assert len(self.header) == k
+            except AssertionError:
+                raise TypeError("This is not a valid geoda_txt file.")
+            dat = self.fileObj.readlines()
+            self.dat = [line.strip().split(',') for line in dat]
+            self._spec = self._determineSpec(self.dat)
+            self.__len = len(dat)
+
+    def __len__(self):
+        return self.__len
+
+    def _read(self):
+        if self.pos < len(self):
+            row = self.dat[self.pos]
+            self.pos += 1
+            return row
+        else:
+            raise None
+
+    def close(self):
+        self.fileObj.close()
+        Tables.DataTable.close(self)
+
+    @staticmethod
+    def _determineSpec(data):
+        cols = len(data[0])
+        spec = []
+        for j in range(cols):
+            isInt = True
+            isFloat = True
+            for row in data:
+                val = row[j]
+                if not val.strip().replace('-', '').replace('.', '').isdigit():
+                    isInt = False
+                    isFloat = False
+                    break
+                else:
+                    if isInt and '.' in val:
+                        isInt = False
+            if isInt:
+                spec.append(int)
+            elif isFloat:
+                spec.append(float)
+            else:
+                spec.append(str)
+        return spec
diff --git a/pysal/core/IOHandlers/gwt.py b/pysal/core/IOHandlers/gwt.py
new file mode 100644
index 0000000..858afc2
--- /dev/null
+++ b/pysal/core/IOHandlers/gwt.py
@@ -0,0 +1,286 @@
+import pysal
+import os.path
+import pysal.core.FileIO as FileIO
+from pysal.weights import W
+from warnings import warn
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__all__ = ["GwtIO"]
+
+
+class unique_filter(object):
+    """
+    Util function:
+    When a new instance is passed as an arugment to the builtin filter
+    it will remove duplicate entries without changing the order of the list.
+
+    Besure to ceate a new instance everytime, unless you want a global filter.
+
+    Example:
+    >>> l = ['a','a','b','a','c','v','d','a','v','d']
+    >>> filter(unique_filter(),l)
+    ['a', 'b', 'c', 'v', 'd']
+    """
+    def __init__(self):
+        self.exclude = set()
+
+    def __call__(self, x):
+        if x in self.exclude:
+            return False
+        else:
+            self.exclude.add(x)
+            return True
+
+
+class GwtIO(FileIO.FileIO):
+
+    FORMATS = ['kwt','gwt']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        self._varName = 'Unknown'
+        self._shpName = 'Unknown'
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.file = open(self.dataPath, self.mode)
+
+    def _set_varName(self, val):
+        if issubclass(type(val), basestring):
+            self._varName = val
+
+    def _get_varName(self):
+        return self._varName
+    varName = property(fget=_get_varName, fset=_set_varName)
+
+    def _set_shpName(self, val):
+        if issubclass(type(val), basestring):
+            self._shpName = val
+
+    def _get_shpName(self):
+        return self._shpName
+    shpName = property(fget=_get_shpName, fset=_set_shpName)
+
+    def read(self, n=-1):
+        self._complain_ifclosed(self.closed)
+        return self._read()
+
+    def seek(self, pos):
+        if pos == 0:
+            self.file.seek(0)
+            self.pos = 0
+
+    def _readlines(self, id_type, ret_ids=False):
+        """
+        Reads the main body of gwt-like weights files
+        into two dictionaries containing weights and neighbors.
+        This code part is repeatedly used for many weight file formats.
+        Header lines, however, are different from format to format.
+        So, for code reusability, this part is separated out from
+        _read function by Myunghwa Hwang.
+        """
+        data = [row.strip().split() for row in self.file.readlines()]
+        ids = filter(unique_filter(), [x[0] for x in data])
+        ids = map(id_type, ids)
+        WN = {}
+        for id in ids:  # note: fromkeys is no good here, all keys end up sharing the say dict value
+            WN[id] = {}
+        for i, j, v in data:
+            i = id_type(i)
+            j = id_type(j)
+            WN[i][j] = float(v)
+        weights = {}
+        neighbors = {}
+        for i in WN:
+            weights[i] = WN[i].values()
+            neighbors[i] = WN[i].keys()
+        if ret_ids:
+            return weights, neighbors, ids
+        else:
+            return weights, neighbors
+
+    def _read(self):
+        """Reads .gwt file
+        Returns a pysal.weights.weights.W object
+
+        Examples
+        --------
+
+        Type 'dir(f)' at the interpreter to see what methods are supported.
+        Open .gwt file and read it into a pysal weights object
+
+        >>> f = pysal.open(pysal.examples.get_path('juvenile.gwt'),'r').read()
+
+        Get the number of observations from the header
+
+        >>> f.n
+        168
+
+        Get the mean number of neighbors
+
+        >>> f.mean_neighbors
+        16.678571428571427
+
+        Get neighbor distances for a single observation
+
+        >>> f[1]
+        {2: 14.1421356}
+
+
+        """
+        if self.pos > 0:
+            raise StopIteration
+
+        flag, n, shp, id_var = self.file.readline().strip().split()
+        self.shpName = shp
+        self.varName = id_var
+        id_order = None
+        id_type = str
+        try:
+            base = os.path.split(self.dataPath)[0]
+            dbf = os.path.join(base, self.shpName.replace('.shp', '') + '.dbf')
+            if os.path.exists(dbf):
+                db = pysal.open(dbf, 'r')
+                if id_var in db.header:
+                    id_order = db.by_col(id_var)
+                    id_type = type(id_order[0])
+                else:
+                    warn("ID_VAR:'%s' was not in the DBF header, proceeding with unordered string ids." % (id_var), RuntimeWarning)
+            else:
+                warn("DBF relating to GWT was not found, proceeding with unordered string ids.", RuntimeWarning)
+        except:
+            warn("Exception occurred will reading DBF, proceeding with unordered string ids.", RuntimeWarning)
+        self.flag = flag
+        self.n = n
+        self.shp = shp
+        self.id_var = id_var
+        if id_order is None:
+            weights, neighbors, id_order = self._readlines(id_type, True)
+        else:
+            weights, neighbors = self._readlines(id_type)
+
+        self.pos += 1
+        w = W(neighbors, weights, id_order)
+        #w.transform = 'b'
+        #set meta data
+        w._shpName = self.shpName
+        w._varName = self.varName
+        #warn("Weights have been converted to binary. To retrieve original values use w.transform='o'", RuntimeWarning)
+        return w
+
+    def _writelines(self, obj):
+        """
+        Writes  the main body of gwt-like weights files.
+        This code part is repeatedly used for many weight file formats.
+        Header lines, however, are different from format to format.
+        So, for code reusability, this part is separated out from
+        write function by Myunghwa Hwang.
+        """
+        for id in obj.id_order:
+            neighbors = zip(obj.neighbors[id], obj.weights[id])
+            str_id = "_".join(str(id).split())
+            for neighbor, weight in neighbors:
+                neighbor = "_".join(str(neighbor).split())
+
+                self.file.write('%s %s %6G\n' % (str_id,
+                                                 neighbor, weight))
+                self.pos += 1
+
+    def write(self, obj):
+        """
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        a GWT file
+        write a weights object to the opened GWT file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('juvenile.gwt'),'r')
+        >>> w = testfile.read()
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.gwt')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created gwt file
+
+        >>> wnew =  pysal.open(fname,'r').read()
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+            #transform = obj.transform
+            #obj.transform = 'o'
+            if hasattr(obj, '_shpName'):
+                self.shpName = obj._shpName
+            if hasattr(obj, '_varName'):
+                self.varName = obj._varName
+            header = '%s %i %s %s\n' % ('0', obj.n, self.shpName, self.varName)
+            self.file.write(header)
+            self._writelines(obj)
+            #obj.transform = transform
+
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
+    def close(self):
+        self.file.close()
+        FileIO.FileIO.close(self)
+
+    @staticmethod
+    def __zero_offset(neighbors, weights, original_ids=None):
+        if not original_ids:
+            original_ids = neighbors.keys()
+        old_weights = weights
+        new_weights = {}
+        new_ids = {}
+        old_ids = {}
+        new_neighbors = {}
+        for i in original_ids:
+            new_i = original_ids.index(i)
+            new_ids[new_i] = i
+            old_ids[i] = new_i
+            neighbors_i = neighbors[i]
+            new_neighbors_i = [original_ids.index(j) for j in neighbors_i]
+            new_neighbors[new_i] = new_neighbors_i
+            new_weights[new_i] = weights[i]
+        info = {}
+        info['new_ids'] = new_ids
+        info['old_ids'] = old_ids
+        info['new_neighbors'] = new_neighbors
+        info['new_weights'] = new_weights
+        return info
+
diff --git a/pysal/core/IOHandlers/mat.py b/pysal/core/IOHandlers/mat.py
new file mode 100644
index 0000000..e79da9d
--- /dev/null
+++ b/pysal/core/IOHandlers/mat.py
@@ -0,0 +1,175 @@
+import pysal
+import os.path
+import scipy.io as sio
+import pysal.core.FileIO as FileIO
+from pysal.weights import W
+from pysal.weights.util import full, full2W
+from warnings import warn
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["MatIO"]
+
+
+class MatIO(FileIO.FileIO):
+    """
+    Opens, reads, and writes weights file objects in MATLAB Level 4-5 MAT format.
+
+    MAT files are used in Dr. LeSage's MATLAB Econometrics library.
+    The MAT file format can handle both full and sparse matrices,
+    and it allows for a matrix dimension greater than 256.
+    In PySAL, row and column headers of a MATLAB array are ignored.
+
+    PySAL uses matlab io tools in scipy.
+    Thus, it is subject to all limits that loadmat and savemat in scipy have.
+
+    Notes
+    -----
+    If a given weights object contains too many observations to
+    write it out as a full matrix,
+    PySAL writes out the object as a sparse matrix.
+
+    References
+    ----------
+    MathWorks (2011) "MATLAB 7 MAT-File Format" at
+    http://www.mathworks.com/help/pdf_doc/matlab/matfile_format.pdf.
+
+    scipy matlab io
+    http://docs.scipy.org/doc/scipy/reference/tutorial/io.html
+
+    """
+
+    FORMATS = ['mat']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        self._varName = 'Unknown'
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.file = open(self.dataPath, self.mode + 'b')
+
+    def _set_varName(self, val):
+        if issubclass(type(val), basestring):
+            self._varName = val
+
+    def _get_varName(self):
+        return self._varName
+    varName = property(fget=_get_varName, fset=_set_varName)
+
+    def read(self, n=-1):
+        self._complain_ifclosed(self.closed)
+        return self._read()
+
+    def seek(self, pos):
+        if pos == 0:
+            self.file.seek(0)
+            self.pos = 0
+
+    def _read(self):
+        """Reads MATLAB mat file
+        Returns a pysal.weights.weights.W object
+
+        Examples
+        --------
+
+        Type 'dir(w)' at the interpreter to see what methods are supported.
+        Open a MATLAB mat file and read it into a pysal weights object
+
+        >>> w = pysal.open(pysal.examples.get_path('spat-sym-us.mat'),'r').read()
+
+        Get the number of observations from the header
+
+        >>> w.n
+        46
+
+        Get the mean number of neighbors
+
+        >>> w.mean_neighbors
+        4.0869565217391308
+
+        Get neighbor distances for a single observation
+
+        >>> w[1]
+        {25: 1, 3: 1, 28: 1, 39: 1}
+
+        """
+        if self.pos > 0:
+            raise StopIteration
+
+        mat = sio.loadmat(self.file)
+        mat_keys = [k for k in mat if not k.startswith("_")]
+        full_w = mat[mat_keys[0]]
+
+        self.pos += 1
+        return full2W(full_w)
+
+    def write(self, obj):
+        """
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        a MATLAB mat file
+        write a weights object to the opened mat file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('spat-sym-us.mat'),'r')
+        >>> w = testfile.read()
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.mat')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created mat file
+
+        >>> wnew =  pysal.open(fname,'r').read()
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+            try:
+                w = full(obj)[0]
+            except ValueError:
+                w = obj.sparse
+            sio.savemat(self.file, {'WEIGHT': w})
+            self.pos += 1
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
+    def close(self):
+        self.file.close()
+        FileIO.FileIO.close(self)
+
diff --git a/pysal/core/IOHandlers/mtx.py b/pysal/core/IOHandlers/mtx.py
new file mode 100644
index 0000000..1974f18
--- /dev/null
+++ b/pysal/core/IOHandlers/mtx.py
@@ -0,0 +1,241 @@
+import pysal
+import os.path
+import scipy.io as sio
+import pysal.core.FileIO as FileIO
+from pysal.weights import W, WSP
+from pysal.weights.util import full, full2W
+from warnings import warn
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["MtxIO"]
+
+
+class MtxIO(FileIO.FileIO):
+    """
+    Opens, reads, and writes weights file objects in Matrix Market MTX format.
+
+    The Matrix Market MTX format is used to facilitate the exchange of matrix data.
+    In PySAL, it is being tested as a new file format for delivering
+    the weights information of a spatial weights matrix.
+    Although the MTX format supports both full and sparse matrices with different
+    data types, it is assumed that spatial weights files in the mtx format always
+    use the sparse (or coordinate) format with real data values.
+    For now, no additional assumption (e.g., symmetry) is made of the structure
+    of a weights matrix.
+
+    With the above assumptions,
+    the structure of a MTX file containing a spatial weights matrix
+    can be defined as follows:
+    %%MatrixMarket matrix coordinate real general <--- header 1 (constant)
+    % Comments starts                             <---
+    % ....                                           | 0 or more comment lines
+    % Comments ends                               <---
+    M    N    L                                   <--- header 2, rows, columns, entries
+    I1   J1   A(I1,J1)                            <---
+    ...                                              | L entry lines
+    IL   JL   A(IL,JL)                            <---
+
+    In the MTX foramt, the index for rows or columns starts with 1.
+
+    PySAL uses mtx io tools in scipy.
+    Thus, it is subject to all limits that scipy currently has.
+    Reengineering might be required, since scipy currently reads in
+    the entire entry into memory.
+
+    References
+    ----------
+    MTX format specification
+    http://math.nist.gov/MatrixMarket/formats.html
+
+    scipy matlab io
+    http://docs.scipy.org/doc/scipy/reference/tutorial/io.html
+
+    """
+
+    FORMATS = ['mtx']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.file = open(self.dataPath, self.mode + 'b')
+
+    def read(self, n=-1, sparse=False):
+        """
+        sparse: boolean
+                if true, return pysal WSP object
+                if false, return pysal W object
+        """
+        self._sparse = sparse
+        self._complain_ifclosed(self.closed)
+        return self._read()
+
+    def seek(self, pos):
+        if pos == 0:
+            self.file.seek(0)
+            self.pos = 0
+
+    def _read(self):
+        """Reads MatrixMarket mtx file
+        Returns a pysal.weights.weights.W or pysal.weights.weights.WSP object
+
+        Examples
+        --------
+
+        Type 'dir(w)' at the interpreter to see what methods are supported.
+        Open a MatrixMarket mtx file and read it into a pysal weights object
+
+        >>> f = pysal.open(pysal.examples.get_path('wmat.mtx'),'r')
+
+        >>> w = f.read()
+
+        Get the number of observations from the header
+
+        >>> w.n
+        49
+
+        Get the mean number of neighbors
+
+        >>> w.mean_neighbors
+        4.7346938775510203
+
+        Get neighbor weights for a single observation
+
+        >>> w[1]
+        {2: 0.33329999999999999, 5: 0.33329999999999999, 6: 0.33329999999999999}
+
+        >>> f.close()
+
+        >>> f = pysal.open(pysal.examples.get_path('wmat.mtx'),'r')
+
+        >>> wsp = f.read(sparse=True)
+
+        Get the number of observations from the header
+
+        >>> wsp.n
+        49
+
+        Get row from the weights matrix. Note that the first row in the sparse
+        matrix (the 0th row) corresponds to ID 1 from the original mtx file
+        read in.
+
+        >>> print wsp.sparse[0].todense()
+        [[ 0.      0.3333  0.      0.      0.3333  0.3333  0.      0.      0.      0.
+           0.      0.      0.      0.      0.      0.      0.      0.      0.      0.
+           0.      0.      0.      0.      0.      0.      0.      0.      0.      0.
+           0.      0.      0.      0.      0.      0.      0.      0.      0.      0.
+           0.      0.      0.      0.      0.      0.      0.      0.      0.    ]]
+
+        """
+        if self.pos > 0:
+            raise StopIteration
+        mtx = sio.mmread(self.file)
+        ids = range(1, mtx.shape[0] + 1)  # matrix market indexes start at one
+        wsp = WSP(mtx, ids)
+        if self._sparse:
+            w = wsp
+        else:
+            w = pysal.weights.WSP2W(wsp)
+        self.pos += 1
+        return w
+
+    def write(self, obj):
+        """
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        a MatrixMarket mtx file
+        write a weights object to the opened mtx file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('wmat.mtx'),'r')
+        >>> w = testfile.read()
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.mtx')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created mtx file
+
+        >>> wnew =  pysal.open(fname,'r').read()
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+
+        Go to the beginning of the test file
+
+        >>> testfile.seek(0)
+
+        Create a sparse weights instance from the test file
+
+        >>> wsp = testfile.read(sparse=True)
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w')
+
+        Write the sparse weights object into the open file
+
+        >>> o.write(wsp)
+        >>> o.close()
+
+        Read in the newly created mtx file
+
+        >>> wsp_new =  pysal.open(fname,'r').read(sparse=True)
+
+        Compare values from old to new
+
+        >>> wsp_new.s0 == wsp.s0
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W) or issubclass(type(obj), WSP):
+            w = obj.sparse
+            sio.mmwrite(self.file, w, comment='Generated by PySAL',
+                        field='real', precision=7)
+            self.pos += 1
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
+    def close(self):
+        self.file.close()
+        FileIO.FileIO.close(self)
+
+
diff --git a/pysal/core/IOHandlers/pyDbfIO.py b/pysal/core/IOHandlers/pyDbfIO.py
new file mode 100644
index 0000000..0677b7d
--- /dev/null
+++ b/pysal/core/IOHandlers/pyDbfIO.py
@@ -0,0 +1,306 @@
+import pysal.core.Tables
+import datetime
+import struct
+import itertools
+from warnings import warn
+import pysal
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__all__ = ['DBF']
+
+
+class DBF(pysal.core.Tables.DataTable):
+    """
+    PySAL DBF Reader/Writer
+
+    This DBF handler implements the PySAL DataTable interface.
+
+    Attributes
+    ----------
+
+    header      : list
+                  A list of field names. The header is a python list of
+                  strings.  Each string is a field name and field name must
+                  not be longer than 10 characters.
+    field_spec  : list
+                  A list describing the data types of each field. It is
+                  comprised of a list of tuples, each tuple describing a
+                  field. The format for the tuples is ("Type",len,precision).
+                  Valid Types are 'C' for characters, 'L' for bool, 'D' for
+                  data, 'N' or 'F' for number.
+
+    Examples
+    --------
+
+    >>> import pysal
+    >>> dbf = pysal.open(pysal.examples.get_path('juvenile.dbf'), 'r')
+    >>> dbf.header
+    ['ID', 'X', 'Y']
+    >>> dbf.field_spec
+    [('N', 9, 0), ('N', 9, 0), ('N', 9, 0)]
+
+    """
+    FORMATS = ['dbf']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        """
+        Initializes an instance of the pysal's DBF handler.
+
+        Arguments:
+        dataPath -- str -- Path to file, including file.
+        mode -- str -- 'r' or 'w'
+        """
+        pysal.core.Tables.DataTable.__init__(self, *args, **kwargs)
+        if self.mode == 'r':
+            self.f = f = open(self.dataPath, 'rb')
+            numrec, lenheader = struct.unpack('<xxxxLH22x', f.read(32))
+            numfields = (lenheader - 33) // 32
+            self.n_records = numrec
+            self.n_fields = numfields
+            self.field_info = [('DeletionFlag', 'C', 1, 0)]
+            record_size = 1
+            fmt = 's'
+            self._col_index = {}
+            idx = 0
+            for fieldno in xrange(numfields):
+                name, typ, size, deci = struct.unpack(
+                    '<11sc4xBB14x', f.read(32))
+                name = name.replace('\0', '')
+                    # eliminate NULs from string
+                self._col_index[name] = (idx, record_size)
+                idx += 1
+                fmt += '%ds' % size
+                record_size += size
+                self.field_info.append((name, typ, size, deci))
+            terminator = f.read(1)
+            assert terminator == '\r'
+            self.header_size = self.f.tell()
+            self.record_size = record_size
+            self.record_fmt = fmt
+            self.pos = 0
+            self.header = [fInfo[0] for fInfo in self.field_info[1:]]
+            field_spec = []
+            for fname, ftype, flen, fpre in self.field_info[1:]:
+                field_spec.append((ftype, flen, fpre))
+            self.field_spec = field_spec
+
+            #self.spec = [types[fInfo[0]] for fInfo in self.field_info]
+        elif self.mode == 'w':
+            self.f = open(self.dataPath, 'wb')
+            self.header = None
+            self.field_spec = None
+            self.numrec = 0
+            self.FIRST_WRITE = True
+
+    def __len__(self):
+        if self.mode != 'r':
+            raise IOError("Invalid operation, Cannot read from a file opened in 'w' mode.")
+        return self.n_records
+
+    def seek(self, i):
+        self.f.seek(self.header_size + (self.record_size * i))
+        self.pos = i
+
+    def _get_col(self, key):
+        """return the column vector"""
+        if key not in self._col_index:
+            raise AttributeError('Field: % s does not exist in header' % key)
+        prevPos = self.tell()
+        idx, offset = self._col_index[key]
+        typ, size, deci = self.field_spec[idx]
+        gap = (self.record_size - size)
+        f = self.f
+        f.seek(self.header_size + offset)
+        col = [0] * self.n_records
+        for i in xrange(self.n_records):
+            value = f.read(size)
+            f.seek(gap, 1)
+            if typ == 'N':
+                value = value.replace('\0', '').lstrip()
+                if value == '':
+                    value = pysal.MISSINGVALUE
+                elif deci:
+                    try:
+                        value = float(value)
+                    except ValueError:
+                        value = pysal.MISSINGVALUE
+                else:
+                    try:
+                        value = int(value)
+                    except ValueError:
+                        value = pysal.MISSINGVALUE
+            elif typ == 'D':
+                try:
+                    y, m, d = int(value[:4]), int(value[4:6]), int(value[6:8])
+                    value = datetime.date(y, m, d)
+                except ValueError:
+                    value = pysal.MISSINGVALUE
+            elif typ == 'L':
+                value = (value in 'YyTt' and 'T') or (
+                    value in 'NnFf' and 'F') or '?'
+            elif typ == 'F':
+                value = value.replace('\0', '').lstrip()
+                if value == '':
+                    value = pysal.MISSINGVALUE
+                else:
+                    value = float(value)
+            if isinstance(value, str):
+                value = value.rstrip()
+            col[i] = value
+        self.seek(prevPos)
+        return col
+
+    def read_record(self, i):
+        self.seek(i)
+        rec = list(struct.unpack(
+            self.record_fmt, self.f.read(self.record_size)))
+        if rec[0] != ' ':
+            return self.read_record(i + 1)
+        result = []
+        for (name, typ, size, deci), value in itertools.izip(self.field_info, rec):
+            if name == 'DeletionFlag':
+                continue
+            if typ == 'N':
+                value = value.replace('\0', '').lstrip()
+                if value == '':
+                    value = pysal.MISSINGVALUE
+                elif deci:
+                    try:
+                        value = float(value)
+                    except ValueError:
+                        value = pysal.MISSINGVALUE
+                else:
+                    try:
+                        value = int(value)
+                    except ValueError:
+                        value = pysal.MISSINGVALUE
+            elif typ == 'D':
+                try:
+                    y, m, d = int(value[:4]), int(value[4:6]), int(value[6:8])
+                    value = datetime.date(y, m, d)
+                except ValueError:
+                    #value = datetime.date.min#NULL Date: See issue 114
+                    value = pysal.MISSINGVALUE
+            elif typ == 'L':
+                value = (value in 'YyTt' and 'T') or (
+                    value in 'NnFf' and 'F') or '?'
+            elif typ == 'F':
+                value = value.replace('\0', '').lstrip()
+                if value == '':
+                    value = pysal.MISSINGVALUE
+                else:
+                    value = float(value)
+            if isinstance(value, str):
+                value = value.rstrip()
+            result.append(value)
+        return result
+
+    def _read(self):
+        if self.mode != 'r':
+            raise IOError("Invalid operation, Cannot read from a file opened in 'w' mode.")
+        if self.pos < len(self):
+            rec = self.read_record(self.pos)
+            self.pos += 1
+            return rec
+        else:
+            return None
+
+    def write(self, obj):
+        self._complain_ifclosed(self.closed)
+        if self.mode != 'w':
+            raise IOError("Invalid operation, Cannot write to a file opened in 'r' mode.")
+        if self.FIRST_WRITE:
+            self._firstWrite(obj)
+        if len(obj) != len(self.header):
+            raise TypeError("Rows must contains %d fields" % len(self.header))
+        self.numrec += 1
+        self.f.write(' ')                        # deletion flag
+        for (typ, size, deci), value in itertools.izip(self.field_spec, obj):
+            if value is None:
+                if typ == 'C':
+                    value = ' ' * size
+                else:
+                    value = '\0' * size
+            elif typ == "N" or typ == "F":
+                v = str(value).rjust(size, ' ')
+                #if len(v) == size:
+                #    value = v
+                #else:
+                value = (("%" + "%d.%d" % (size, deci) + "f") % (value))[:size]
+            elif typ == 'D':
+                value = value.strftime('%Y%m%d')
+            elif typ == 'L':
+                value = str(value)[0].upper()
+            else:
+                value = str(value)[:size].ljust(size, ' ')
+            try:
+                assert len(value) == size
+            except:
+                print value, len(value), size
+                raise
+            self.f.write(value)
+            self.pos += 1
+
+    def flush(self):
+        self._complain_ifclosed(self.closed)
+        self._writeHeader()
+        self.f.flush()
+
+    def close(self):
+        if self.mode == 'w':
+            self.flush()
+            # End of file
+            self.f.write('\x1A')
+        self.f.close()
+        pysal.core.Tables.DataTable.close(self)
+
+    def _firstWrite(self, obj):
+        if not self.header:
+            raise IOError("No header, DBF files require a header.")
+        if not self.field_spec:
+            raise IOError("No field_spec, DBF files require a specification.")
+        self._writeHeader()
+        self.FIRST_WRITE = False
+
+    def _writeHeader(self):
+        """ Modified from: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/362715 """
+        POS = self.f.tell()
+        self.f.seek(0)
+        ver = 3
+        now = datetime.datetime.now()
+        yr, mon, day = now.year - 1900, now.month, now.day
+        numrec = self.numrec
+        numfields = len(self.header)
+        lenheader = numfields * 32 + 33
+        lenrecord = sum(field[1] for field in self.field_spec) + 1
+        hdr = struct.pack('<BBBBLHH20x', ver, yr, mon, day, numrec,
+                          lenheader, lenrecord)
+        self.f.write(hdr)
+        # field specs
+        for name, (typ, size, deci) in itertools.izip(self.header, self.field_spec):
+            name = name.ljust(11, '\x00')
+            fld = struct.pack('<11sc4xBB14x', name, typ, size, deci)
+            self.f.write(fld)
+        # terminator
+        self.f.write('\r')
+        if self.f.tell() != POS and not self.FIRST_WRITE:
+            self.f.seek(POS)
+
+if __name__ == '__main__':
+    import pysal
+    file_name = pysal.examples.get_path("10740.dbf")
+    f = pysal.open(file_name, 'r')
+    newDB = pysal.open('copy.dbf', 'w')
+    newDB.header = f.header
+    newDB.field_spec = f.field_spec
+    print f.header
+    for row in f:
+        print row
+        newDB.write(row)
+    newDB.close()
+    copy = pysal.open('copy.dbf', 'r')
+    f.seek(0)
+    print "HEADER: ", copy.header == f.header
+    print "SPEC: ", copy.field_spec == f.field_spec
+    print "DATA: ", list(copy) == list(f)
diff --git a/pysal/core/IOHandlers/pyShpIO.py b/pysal/core/IOHandlers/pyShpIO.py
new file mode 100644
index 0000000..d1a2c55
--- /dev/null
+++ b/pysal/core/IOHandlers/pyShpIO.py
@@ -0,0 +1,189 @@
+"""
+PySAL ShapeFile Reader and Writer based on pure python shapefile module.
+
+"""
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__credits__ = "Copyright (c) 2009 Charles R. Schmidt"
+__all__ = ['PurePyShpWrapper']
+
+#import pysal
+import pysal.core.FileIO  # as FileIO
+from pysal.core.util import shp_file
+import pysal.cg as cg
+from warnings import warn
+import unittest
+
+STRING_TO_TYPE = {'POLYGON': cg.Polygon, 'POINT': cg.Point, 'POINTM':
+                  cg.Point, 'POINTZ': cg.Point, 'ARC': cg.Chain, 'POLYGONZ': cg.Polygon}
+TYPE_TO_STRING = {cg.Polygon: 'POLYGON', cg.Point: 'POINT', cg.Chain:
+                  'ARC'}  # build the reverse map
+#for key,value in STRING_TO_TYPE.iteritems():
+#    TYPE_TO_STRING[value] = key
+
+
+class PurePyShpWrapper(pysal.core.FileIO.FileIO):
+    """
+    FileIO handler for ESRI ShapeFiles.
+
+    Notes
+    -----
+    This class wraps _pyShpIO's shp_file class with the PySAL FileIO API.
+    shp_file can be used without PySAL.
+
+    Attributes
+    ----------
+
+    Formats     : list
+                  A list of support file extensions
+    Modes       : list
+                  A list of support file modes
+
+    Examples
+    --------
+
+    >>> import tempfile
+    >>> f = tempfile.NamedTemporaryFile(suffix='.shp'); fname = f.name; f.close()
+    >>> import pysal
+    >>> i = pysal.open(pysal.examples.get_path('10740.shp'),'r')
+    >>> o = pysal.open(fname,'w')
+    >>> for shp in i:
+    ...     o.write(shp)
+    >>> o.close()
+    >>> open(pysal.examples.get_path('10740.shp'),'rb').read() == open(fname,'rb').read()
+    True
+    >>> open(pysal.examples.get_path('10740.shx'),'rb').read() == open(fname[:-1]+'x','rb').read()
+    True
+    >>> import os
+    >>> os.remove(fname); os.remove(fname.replace('.shp','.shx'))
+
+    """
+    FORMATS = ['shp', 'shx']
+    MODES = ['w', 'r', 'wb', 'rb']
+
+    def __init__(self, *args, **kwargs):
+        pysal.core.FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.dataObj = None
+        if self.mode == 'r' or self.mode == 'rb':
+            self.__open()
+        elif self.mode == 'w' or self.mode == 'wb':
+            self.__create()
+
+    def __len__(self):
+        if self.dataObj:
+            return len(self.dataObj)
+        else:
+            return 0
+
+    def __open(self):
+        self.dataObj = shp_file(self.dataPath)
+        self.header = self.dataObj.header
+        self.bbox = self.dataObj.bbox
+        try:
+            self.type = STRING_TO_TYPE[self.dataObj.type()]
+        except KeyError:
+            raise TypeError('%s does not support shapes of type: %s'
+                            % (self.__class__.__name__, self.dataObj.type()))
+
+    def __create(self):
+        self.write = self.__firstWrite
+
+    def __firstWrite(self, shape):
+        self.type = TYPE_TO_STRING[type(shape)]
+        if self.type == 'POINT':
+            if len(shape) == 3:
+                self.type = 'POINTM'
+            if len(shape) == 4:
+                self.type = 'POINTZ'
+        self.dataObj = shp_file(self.dataPath, 'w', self.type)
+        self.write = self.__writer
+        self.write(shape)
+
+    def __writer(self, shape):
+        if TYPE_TO_STRING[type(shape)] != self.type:
+            raise TypeError("This file only supports %s type shapes" %
+                            self.type)
+        rec = {}
+        rec['Shape Type'] = shp_file.SHAPE_TYPES[self.type]
+        if self.type == 'POINT':
+            rec['X'] = shape[0]
+            rec['Y'] = shape[1]
+            if len(shape) > 2:
+                rec['M'] = shape[2]
+            if len(shape) > 3:
+                rec['Z'] = shape[3]
+            shape = rec
+        else:
+            rec['BBOX Xmin'] = shape.bounding_box.left
+            rec['BBOX Ymin'] = shape.bounding_box.lower
+            rec['BBOX Xmax'] = shape.bounding_box.right
+            rec['BBOX Ymax'] = shape.bounding_box.upper
+            if self.type == 'POLYGON':
+                holes = [hole[::-1] for hole in shape.holes if hole]
+                    #holes should be in CCW order
+                rec['NumParts'] = len(shape.parts) + len(holes)
+                all_parts = shape.parts + holes
+            else:
+                rec['NumParts'] = len(shape.parts)
+                all_parts = shape.parts
+            partsIndex = [0]
+            for l in map(len, all_parts)[:-1]:
+                partsIndex.append(partsIndex[-1] + l)
+            rec['Parts Index'] = partsIndex
+            verts = sum(all_parts, [])
+            verts = [(x, y) for x, y in verts]
+            rec['NumPoints'] = len(verts)
+            rec['Vertices'] = verts
+        self.dataObj.add_shape(rec)
+        self.pos += 1
+
+    def _read(self):
+        try:
+            rec = self.dataObj.get_shape(self.pos)
+        except IndexError:
+            return None
+        self.pos += 1
+        if self.dataObj.type() == 'POINT':
+            shp = self.type((rec['X'], rec['Y']))
+        elif self.dataObj.type() == 'POINTZ':
+            shp = self.type((rec['X'], rec['Y']))
+            shp.Z = rec['Z']
+            shp.M = rec['M']
+        else:
+            if rec['NumParts'] > 1:
+                partsIndex = list(rec['Parts Index'])
+                partsIndex.append(None)
+                parts = [rec['Vertices'][partsIndex[i]:partsIndex[
+                    i + 1]] for i in xrange(rec['NumParts'])]
+                if self.dataObj.type() == 'POLYGON':
+                    is_cw = map(pysal.cg.is_clockwise, parts)
+                    vertices = [part for part, cw in zip(parts, is_cw) if cw]
+                    holes = [part for part, cw in zip(parts, is_cw) if not cw]
+                    if not holes:
+                        holes = None
+                    shp = self.type(vertices, holes)
+                else:
+                    vertices = parts
+                    shp = self.type(vertices)
+            elif rec['NumParts'] == 1:
+                vertices = rec['Vertices']
+                if self.dataObj.type() == 'POLYGON' and not pysal.cg.is_clockwise(vertices):
+                    ### SHAPEFILE WARNING: Polygon %d topology has been fixed. (ccw -> cw)
+                    warn("SHAPEFILE WARNING: Polygon %d topology has been fixed. (ccw -> cw)" % (self.pos), RuntimeWarning)
+                    print "SHAPEFILE WARNING: Polygon %d topology has been fixed. (ccw -> cw)" % (self.pos)
+
+                shp = self.type(vertices)
+            else:
+                warn("Polygon %d has zero parts" % self.pos, RuntimeWarning)
+                shp = self.type([[]])
+                #raise ValueError, "Polygon %d has zero parts"%self.pos
+        if self.ids:
+            shp.id = self.rIds[self.pos - 1]  # shp IDs start at 1.
+        else:
+            shp.id = self.pos  # shp IDs start at 1.
+        return shp
+
+    def close(self):
+        self.dataObj.close()
+        pysal.core.FileIO.FileIO.close(self)
+
diff --git a/pysal/core/IOHandlers/stata_txt.py b/pysal/core/IOHandlers/stata_txt.py
new file mode 100644
index 0000000..b4d1cb4
--- /dev/null
+++ b/pysal/core/IOHandlers/stata_txt.py
@@ -0,0 +1,236 @@
+import pysal
+import os.path
+import pysal.core.FileIO as FileIO
+from pysal.weights import W
+from warnings import warn
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["StataTextIO"]
+
+
+class StataTextIO(FileIO.FileIO):
+    """
+    Opens, reads, and writes weights file objects in STATA text format.
+
+    Spatial weights objects in the STATA text format are used in
+    STATA sppack library through the spmat command.
+    This format is a simple text file delimited by a whitespace.
+    The spmat command does not specify which file extension to use.
+    But, txt seems the default file extension, which is assumed in PySAL.
+
+    The first line of the STATA text file  is
+    a header including the number of observations.
+    After this header line, it includes at least one data column that contains
+    unique ids or record numbers of observations.
+    When an id variable is not specified for the original spatial weights
+    matrix in STATA, record numbers are used to identify individual observations,
+    and the record numbers start with one.
+    The spmat command seems to allow only integer IDs,
+    which is also assumed in PySAL.
+
+    A STATA text file can have one of the following structures according to
+    its export options in STATA.
+
+    Structure 1: encoding using the list of neighbor ids
+    [Line 1]    [Number_of_Observations]
+    [Line 2]    [ID_of_Obs_1] [ID_of_Neighbor_1_of_Obs_1] [ID_of_Neighbor_2_of_Obs_1] .... [ID_of_Neighbor_m_of_Obs_1]
+    [Line 3]    [ID_of_Obs_2]
+    [Line 4]    [ID_of_Obs_3] [ID_of_Neighbor_1_of_Obs_3] [ID_of_Neighbor_2_of_Obs_3]
+    ...
+    Note that for island observations their IDs are still recorded.
+
+    Structure 2: encoding using a full matrix format
+    [Line 1]    [Number_of_Observations]
+    [Line 2]    [ID_of_Obs_1] [w_11] [w_12] ... [w_1n]
+    [Line 3]    [ID_of_Obs_2] [w_21] [w_22] ... [w_2n]
+    [Line 4]    [ID_of_Obs_3] [w_31] [w_32] ... [w_3n]
+    ...
+    [Line n+1]  [ID_of_Obs_n] [w_n1] [w_n2] ... [w_nn]
+    where w_ij can be a form of general weight.
+    That is, w_ij can be both a binary value or a general numeric value.
+    If an observation is an island, all of its w columns contains 0.
+
+    References
+    ----------
+    Drukker D.M., Peng H., Prucha I.R., and Raciborski R. (2011)
+    "Creating and managing spatial-weighting matrices using the spmat command"
+
+    Notes
+    -----
+    The spmat command allows users to add any note to a spatial weights matrix object in STATA.
+    However, all those notes are lost when the matrix is exported.
+    PySAL also does not take care of those notes.
+
+    """
+
+    FORMATS = ['stata_text']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        args = args[:2]
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.file = open(self.dataPath, self.mode)
+
+    def read(self, n=-1):
+        self._complain_ifclosed(self.closed)
+        return self._read()
+
+    def seek(self, pos):
+        if pos == 0:
+            self.file.seek(0)
+            self.pos = 0
+
+    def _read(self):
+        """Reads STATA Text file
+        Returns a pysal.weights.weights.W object
+
+        Examples
+        --------
+
+        Type 'dir(w)' at the interpreter to see what methods are supported.
+        Open a text file and read it into a pysal weights object
+
+        >>> w = pysal.open(pysal.examples.get_path('stata_sparse.txt'),'r','stata_text').read()
+        WARNING: there are 7 disconnected observations
+        Island ids:  [5, 9, 10, 11, 12, 14, 15]
+
+        Get the number of observations from the header
+
+        >>> w.n
+        56
+
+        Get the mean number of neighbors
+
+        >>> w.mean_neighbors
+        4.0
+
+        Get neighbor distances for a single observation
+
+        >>> w[1]
+        {53: 1.0, 51: 1.0, 45: 1.0, 54: 1.0, 7: 1.0}
+
+        """
+        if self.pos > 0:
+            raise StopIteration
+
+        n = int(self.file.readline().strip())
+        line1 = self.file.readline().strip()
+        obs_01 = line1.split(' ')
+        matrix_form = False
+        if len(obs_01) == 1 or float(obs_01[1]) != 0.0:
+            def line2wgt(line):
+                row = [int(i) for i in line.strip().split(' ')]
+                return row[0], row[1:], [1.0] * len(row[1:])
+        else:
+            matrix_form = True
+
+            def line2wgt(line):
+                row = line.strip().split(' ')
+                obs = int(float(row[0]))
+                ngh, wgt = [], []
+                for i in range(n):
+                    w = float(row[i + 1])
+                    if w > 0:
+                        ngh.append(i)
+                        wgt.append(w)
+                return obs, ngh, wgt
+
+        id_order = []
+        weights, neighbors = {}, {}
+        l = line1
+        for i in range(n):
+            obs, ngh, wgt = line2wgt(l)
+            id_order.append(obs)
+            neighbors[obs] = ngh
+            weights[obs] = wgt
+            l = self.file.readline()
+        if matrix_form:
+            for obs in neighbors:
+                neighbors[obs] = [id_order[ngh] for ngh in neighbors[obs]]
+
+        self.pos += 1
+        return W(neighbors, weights)
+
+    def write(self, obj, matrix_form=False):
+        """
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        a STATA text file
+        write a weights object to the opened text file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('stata_sparse.txt'),'r','stata_text')
+        >>> w = testfile.read()
+        WARNING: there are 7 disconnected observations
+        Island ids:  [5, 9, 10, 11, 12, 14, 15]
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.txt')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w','stata_text')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created text file
+
+        >>> wnew =  pysal.open(fname,'r','stata_text').read()
+        WARNING: there are 7 disconnected observations
+        Island ids:  [5, 9, 10, 11, 12, 14, 15]
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+            header = '%s\n' % obj.n
+            self.file.write(header)
+            if matrix_form:
+                def wgt2line(obs_id, neighbor, weight):
+                    w = ['0.0'] * obj.n
+                    for ngh, wgt in zip(neighbor, weight):
+                        w[obj.id2i[ngh]] = str(wgt)
+                    return [str(obs_id)] + w
+            else:
+                def wgt2line(obs_id, neighbor, weight):
+                    return [str(obs_id)] + [str(ngh) for ngh in neighbor]
+            for id in obj.id_order:
+                line = wgt2line(id, obj.neighbors[id], obj.weights[id])
+                self.file.write('%s\n' % ' '.join(line))
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
+    def close(self):
+        self.file.close()
+        FileIO.FileIO.close(self)
+
diff --git a/pysal/core/IOHandlers/template.py b/pysal/core/IOHandlers/template.py
new file mode 100644
index 0000000..03adf57
--- /dev/null
+++ b/pysal/core/IOHandlers/template.py
@@ -0,0 +1,145 @@
+""" Example Reader and Writer
+
+    These are working readers/writers that parse '.foo' and '.bar' files
+
+"""
+
+import pysal
+from pysal.core.FileIO import FileIO
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__all__ = ['TemplateWriter', 'TemplateReaderWriter']
+
+
+# Always subclass FileIO
+class TemplateWriter(FileIO):
+    #REQUIRED, List the formats this class supports.
+    FORMATS = ['foo']
+    #REQUIRED, List the modes supported by this class.
+    # One class can support both reading and writing.
+    # For simplicity this class will only support one.
+    # You could support custom modes, but these could be hard to document.
+    MODES = ['w']
+
+    # use .__init__ to open any need file handlers
+    def __init__(self, *args, **kwargs):
+        # initialize the parent class...
+        FileIO.__init__(self, *args, **kwargs)
+        # this gives you:
+        # self.dataPath == the connection string or path to file
+        # self.mode == the mode the file should be opened in
+
+        self.fileObj = open(self.dataPath, self.mode)
+
+    #writers must subclass .write
+    def write(self, obj):
+        """ .write method of the 'foobar' template, receives an obj """
+
+        # GOOD TO HAVE, this will prevent invalid operations on closed files.
+        self._complain_ifclosed(self.closed)
+
+        # it's up to the writer to understand the object, you should check that object is of the type you expect and raise a TypeError is its now.
+        # we will support writing string objects in this example, all string are derived from basestring...
+        if issubclass(type(obj), basestring):
+
+            #Non-essential ...
+            def foobar(c):
+                if c in 'foobar':
+                    return True
+                else:
+                    return False
+            result = filter(foobar, obj)  # e.g.   'foobara' == filter(foobar,'my little foobar example')
+
+            #do the actual writing...
+            self.fileObj.write(result + '\n')
+            #REQUIRED, increment the internal pos pointer.
+            self.pos += 1
+
+        else:
+            raise TypeError("Expected a string, got: %s" % (type(obj)))
+
+    #default is to raise "NotImplementedError"
+    def flush(self):
+        self._complain_ifclosed(self.closed)
+        self.fileObj.flush()
+
+    #REQUIRED
+    def close(self):
+        self.fileObj.close()
+        #clean up the parent class too....
+        FileIO.close(self)
+
+
+class TemplateReaderWriter(FileIO):
+    FORMATS = ['bar']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        FileIO.__init__(self, *args, **kwargs)
+        self.fileObj = open(self.dataPath, self.mode)
+    #Notice reading is a bit different
+
+    def _filter(self, st):
+        def foobar(c):
+            if c in 'foobar':
+                return True
+            else:
+                return False
+        return filter(foobar, st)  # e.g.   'foobara' == filter(foobar,'my little foobar example')
+
+    def _read(self):
+        """ the _read method should return only ONE object and raise StopIteration at the EOF."""
+        line = self.fileObj.readline()
+        obj = self._filter(line)
+        self.pos += 1  # REQUIRED
+        if line:
+            return obj + '\n'
+        else:
+            raise StopIteration  # REQUIRED
+
+    def write(self, obj):
+        """ .write method of the 'foobar' template, receives an obj """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), basestring):
+            result = self._filter(obj)
+            self.fileObj.write(result + '\n')
+            self.pos += 1
+        else:
+            raise TypeError("Expected a string, got: %s" % (type(obj)))
+
+    def flush(self):
+        self._complain_ifclosed(self.closed)
+        self.fileObj.flush()
+
+    def close(self):
+        self.fileObj.close()
+        FileIO.close(self)
+
+
+if __name__ == '__main__':
+    "note, by running OR importing this module it's automatically added to the pysal fileIO registry."
+    pysal.open.check()
+
+    lines = ['This is an example of template FileIO classes',
+             'Each call to write expects a string object',
+             'that string is filtered and only letters "f,o,b,a,r" are kept',
+             'these kept letters are written to the file and a new line char is appends to each line',
+             'likewise the reader filters each line from a file']
+    f = pysal.open('test.foo', 'w')
+    for line in lines:
+        f.write(line)
+    f.close()
+
+    f = pysal.open('test.bar', 'w')
+    for line in lines:
+        f.write(line)
+    f.close()
+
+    f = pysal.open('test.bar', 'r')
+    s = ''.join(f.read())
+    f.close()
+    print s
+
+    f = open('test.foo', 'r')
+    s2 = f.read()
+    f.close()
+    print s == s2
diff --git a/pysal/core/IOHandlers/tests/__init__.py b/pysal/core/IOHandlers/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/core/IOHandlers/tests/test_arcgis_dbf.py b/pysal/core/IOHandlers/tests/test_arcgis_dbf.py
new file mode 100644
index 0000000..849243f
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_arcgis_dbf.py
@@ -0,0 +1,57 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.arcgis_dbf import ArcGISDbfIO
+import tempfile
+import os
+import warnings
+
+
+class test_ArcGISDbfIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('arcgis_ohio.dbf')
+        self.obj = ArcGISDbfIO(test_file, 'r')
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        with warnings.catch_warnings(record=True) as warn:
+            warnings.simplefilter("always")
+            w = self.obj.read()
+            if len(warn) > 0:
+                assert issubclass(warn[0].category, RuntimeWarning)
+                assert "Missing Value Found, setting value to pysal.MISSINGVALUE" in str(warn[0].message)
+        self.assertEqual(88, w.n)
+        self.assertEqual(5.25, w.mean_neighbors)
+        self.assertEqual([1.0, 1.0, 1.0, 1.0], w[1].values())
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj.read)
+        self.obj.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        with warnings.catch_warnings(record=True) as warn:
+            warnings.simplefilter("always")
+            w = self.obj.read()
+            if len(warn) > 0:
+                assert issubclass(warn[0].category, RuntimeWarning)
+                assert "Missing Value Found, setting value to pysal.MISSINGVALUE" in str(warn[0].message)
+        f = tempfile.NamedTemporaryFile(
+            suffix='.dbf', dir=pysal.examples.get_path(''))
+        fname = f.name
+        f.close()
+        o = pysal.open(fname, 'w', 'arcgis_dbf')
+        o.write(w)
+        o.close()
+        f = pysal.open(fname, 'r', 'arcgis_dbf')
+        wnew = f.read()
+        f.close()
+        self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+        os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_arcgis_swm.py b/pysal/core/IOHandlers/tests/test_arcgis_swm.py
new file mode 100644
index 0000000..4e09f0e
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_arcgis_swm.py
@@ -0,0 +1,44 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.arcgis_swm import ArcGISSwmIO
+import tempfile
+import os
+
+
+class test_ArcGISSwmIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('ohio.swm')
+        self.obj = ArcGISSwmIO(test_file, 'r')
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        w = self.obj.read()
+        self.assertEqual(88, w.n)
+        self.assertEqual(5.25, w.mean_neighbors)
+        self.assertEqual([1.0, 1.0, 1.0, 1.0], w[1].values())
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj.read)
+        self.obj.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        w = self.obj.read()
+        f = tempfile.NamedTemporaryFile(
+            suffix='.swm', dir=pysal.examples.get_path(''))
+        fname = f.name
+        f.close()
+        o = pysal.open(fname, 'w')
+        o.write(w)
+        o.close()
+        wnew = pysal.open(fname, 'r').read()
+        self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+        os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_arcgis_txt.py b/pysal/core/IOHandlers/tests/test_arcgis_txt.py
new file mode 100644
index 0000000..ec93172
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_arcgis_txt.py
@@ -0,0 +1,60 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.arcgis_txt import ArcGISTextIO
+import tempfile
+import os
+import warnings
+
+
+class test_ArcGISTextIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('arcgis_txt.txt')
+        self.obj = ArcGISTextIO(test_file, 'r')
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        with warnings.catch_warnings(record=True) as warn:
+            warnings.simplefilter("always")
+            w = self.obj.read()
+            if len(warn) > 0:
+                assert issubclass(warn[0].category, RuntimeWarning)
+                assert "DBF relating to ArcGIS TEXT was not found, proceeding with unordered string ids." in str(warn[0].message)
+        self.assertEqual(3, w.n)
+        self.assertEqual(2.0, w.mean_neighbors)
+        self.assertEqual([0.1, 0.05], w[2].values())
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj.read)
+        self.obj.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        with warnings.catch_warnings(record=True) as warn:
+            warnings.simplefilter("always")
+            w = self.obj.read()
+            if len(warn) > 0:
+                assert issubclass(warn[0].category, RuntimeWarning)
+                assert "DBF relating to ArcGIS TEXT was not found, proceeding with unordered string ids." in str(warn[0].message)
+        f = tempfile.NamedTemporaryFile(
+            suffix='.txt', dir=pysal.examples.get_path(''))
+        fname = f.name
+        f.close()
+        o = pysal.open(fname, 'w', 'arcgis_text')
+        o.write(w)
+        o.close()
+        with warnings.catch_warnings(record=True) as warn:
+            warnings.simplefilter("always")
+            wnew = pysal.open(fname, 'r', 'arcgis_text').read()
+            if len(warn) > 0:
+                assert issubclass(warn[0].category, RuntimeWarning)
+                assert "DBF relating to ArcGIS TEXT was not found, proceeding with unordered string ids." in str(warn[0].message)
+        self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+        os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_csvWrapper.py b/pysal/core/IOHandlers/tests/test_csvWrapper.py
new file mode 100644
index 0000000..70d0d52
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_csvWrapper.py
@@ -0,0 +1,61 @@
+import unittest
+import pysal
+import tempfile
+import os
+
+
+class test_csvWrapper(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('stl_hom.csv')
+        self.obj = pysal.core.IOHandlers.csvWrapper.csvWrapper(test_file, 'r')
+
+    def test_len(self):
+        self.assertEquals(len(self.obj), 78)
+
+    def test_tell(self):
+        self.assertEquals(self.obj.tell(), 0)
+        self.obj.read(1)
+        self.assertEquals(self.obj.tell(), 1)
+        self.obj.read(50)
+        self.assertEquals(self.obj.tell(), 51)
+        self.obj.read()
+        self.assertEquals(self.obj.tell(), 78)
+
+    def test_seek(self):
+        self.obj.seek(0)
+        self.assertEquals(self.obj.tell(), 0)
+        self.obj.seek(55)
+        self.assertEquals(self.obj.tell(), 55)
+        self.obj.read(1)
+        self.assertEquals(self.obj.tell(), 56)
+
+    def test_read(self):
+        self.obj.seek(0)
+        objs = self.obj.read()
+        self.assertEquals(len(objs), 78)
+        self.obj.seek(0)
+        objsB = list(self.obj)
+        self.assertEquals(len(objsB), 78)
+        for rowA, rowB in zip(objs, objsB):
+            self.assertEquals(rowA, rowB)
+
+    def test_casting(self):
+        self.obj.cast('WKT', pysal.core.util.WKTParser())
+        verts = [(-89.585220336914062, 39.978794097900391), (-89.581146240234375, 40.094867706298828), (-89.603988647460938, 40.095306396484375), (-89.60589599609375, 40.136119842529297), (-89.6103515625, 40.3251953125), (-89.269027709960938, 40.329566955566406), (-89.268562316894531, 40.285579681396484), (-89.154655456542969, 40.285774230957031), (-89.152763366699219, 40.054969787597656), (-89.151618957519531, 39.919403076171875), (-89.224777221679688, 39.918678283691406), (-89.41185760 [...]
+        for i, pt in enumerate(self.obj.next()[0].vertices):
+            self.assertEquals(pt[:], verts[i])
+
+    def test_by_col(self):
+        for field in self.obj.header:
+            self.assertEquals(len(self.obj.by_col[field]), 78)
+
+    def test_slicing(self):
+        chunk = self.obj[50:55, 1:3]
+        self.assertEquals(chunk[0], ['Jefferson', 'Missouri'])
+        self.assertEquals(chunk[1], ['Jefferson', 'Illinois'])
+        self.assertEquals(chunk[2], ['Miller', 'Missouri'])
+        self.assertEquals(chunk[3], ['Maries', 'Missouri'])
+        self.assertEquals(chunk[4], ['White', 'Illinois'])
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_dat.py b/pysal/core/IOHandlers/tests/test_dat.py
new file mode 100644
index 0000000..69935f6
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_dat.py
@@ -0,0 +1,44 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.dat import DatIO
+import tempfile
+import os
+
+
+class test_DatIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('wmat.dat')
+        self.obj = DatIO(test_file, 'r')
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        w = self.obj.read()
+        self.assertEqual(49, w.n)
+        self.assertEqual(4.7346938775510203, w.mean_neighbors)
+        self.assertEqual([0.5, 0.5], w[5.0].values())
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj.read)
+        self.obj.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        w = self.obj.read()
+        f = tempfile.NamedTemporaryFile(
+            suffix='.dat', dir=pysal.examples.get_path(''))
+        fname = f.name
+        f.close()
+        o = pysal.open(fname, 'w')
+        o.write(w)
+        o.close()
+        wnew = pysal.open(fname, 'r').read()
+        self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+        os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_gal.py b/pysal/core/IOHandlers/tests/test_gal.py
new file mode 100644
index 0000000..83f12ff
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_gal.py
@@ -0,0 +1,49 @@
+"""Unit tests for gal.py"""
+import unittest
+import pysal
+import tempfile
+import os
+from pysal.core.IOHandlers.gal import GalIO
+
+
+class test_GalIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('sids2.gal')
+        self.obj = GalIO(test_file, 'r')
+
+    def test___init__(self):
+        self.assertEqual(self.obj._typ, str)
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        # reading a GAL returns a W
+        w = self.obj.read()
+        self.assertEqual(w.n, 100)
+        self.assertAlmostEqual(w.sd, 1.5151237573214935)
+        self.assertEqual(w.s0, 462.0)
+        self.assertEqual(w.s1, 924.0)
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj.read)
+        self.obj.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        w = self.obj.read()
+        f = tempfile.NamedTemporaryFile(suffix='.gal')
+        fname = f.name
+        f.close()
+        o = pysal.open(fname, 'w')
+        o.write(w)
+        o.close()
+        wnew = pysal.open(fname, 'r').read()
+        self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_geobugs_txt.py b/pysal/core/IOHandlers/tests/test_geobugs_txt.py
new file mode 100644
index 0000000..f49e489
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_geobugs_txt.py
@@ -0,0 +1,57 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.geobugs_txt import GeoBUGSTextIO
+import tempfile
+import os
+
+
+class test_GeoBUGSTextIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file_scot = test_file_scot = pysal.examples.get_path(
+            'geobugs_scot')
+        self.test_file_col = test_file_col = pysal.examples.get_path(
+            'spdep_listw2WB_columbus')
+        self.obj_scot = GeoBUGSTextIO(test_file_scot, 'r')
+        self.obj_col = GeoBUGSTextIO(test_file_col, 'r')
+
+    def test_close(self):
+        for obj in [self.obj_scot, self.obj_col]:
+            f = obj
+            f.close()
+            self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        w_scot = self.obj_scot.read()
+        self.assertEqual(56, w_scot.n)
+        self.assertEqual(4.1785714285714288, w_scot.mean_neighbors)
+        self.assertEqual([1.0, 1.0, 1.0], w_scot[1].values())
+
+        w_col = self.obj_col.read()
+        self.assertEqual(49, w_col.n)
+        self.assertEqual(4.6938775510204085, w_col.mean_neighbors)
+        self.assertEqual([0.5, 0.5], w_col[1].values())
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj_scot.read)
+        self.failUnlessRaises(StopIteration, self.obj_col.read)
+        self.obj_scot.seek(0)
+        self.obj_col.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        for obj in [self.obj_scot, self.obj_col]:
+            w = obj.read()
+            f = tempfile.NamedTemporaryFile(
+                suffix='', dir=pysal.examples.get_path(''))
+            fname = f.name
+            f.close()
+            o = pysal.open(fname, 'w', 'geobugs_text')
+            o.write(w)
+            o.close()
+            wnew = pysal.open(fname, 'r', 'geobugs_text').read()
+            self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+            os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_geoda_txt.py b/pysal/core/IOHandlers/tests/test_geoda_txt.py
new file mode 100644
index 0000000..2402309
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_geoda_txt.py
@@ -0,0 +1,26 @@
+'''GeoDa Text File Reader Unit Tests'''
+import unittest
+import pysal
+from pysal.core.IOHandlers.geoda_txt import GeoDaTxtReader as GTR
+
+
+class test_GeoDaTxtReader(unittest.TestCase):
+    def setUp(self):
+        test_file = pysal.examples.get_path('stl_hom.txt')
+        self.obj = GTR(test_file, 'r')
+
+    def test___init__(self):
+        self.assertEqual(
+            self.obj.header, ['FIPSNO', 'HR8488', 'HR8893', 'HC8488'])
+
+    def test___len__(self):
+        expected = 78
+        self.assertEqual(expected, len(self.obj))
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_gwt.py b/pysal/core/IOHandlers/tests/test_gwt.py
new file mode 100644
index 0000000..b6f74da
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_gwt.py
@@ -0,0 +1,54 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.gwt import GwtIO
+import tempfile
+import os
+import warnings
+
+
+class test_GwtIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('juvenile.gwt')
+        self.obj = GwtIO(test_file, 'r')
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        w = self.obj.read()
+        self.assertEqual(168, w.n)
+        self.assertEqual(16.678571428571427, w.mean_neighbors)
+        w.transform = 'B'
+        self.assertEqual([1.0], w[1].values())
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj.read)
+        self.obj.seek(0)
+        self.test_read()
+
+    # Commented out by CRS, GWT 'w' mode removed until we can find a good solution for retaining distances.
+    # see issue #153.
+    # Added back by CRS,
+    def test_write(self):
+        w = self.obj.read()
+        f = tempfile.NamedTemporaryFile(
+            suffix='.gwt', dir=pysal.examples.get_path(''))
+        fname = f.name
+        f.close()
+        o = pysal.open(fname, 'w')
+        #copy the shapefile and ID variable names from the old gwt.
+        # this is only available after the read() method has been called.
+        #o.shpName = self.obj.shpName
+        #o.varName = self.obj.varName
+        o.write(w)
+        o.close()
+        wnew = pysal.open(fname, 'r').read()
+        self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+        os.remove(fname)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_mat.py b/pysal/core/IOHandlers/tests/test_mat.py
new file mode 100644
index 0000000..49f97e4
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_mat.py
@@ -0,0 +1,49 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.mat import MatIO
+import tempfile
+import os
+import warnings
+
+
+class test_MatIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('spat-sym-us.mat')
+        self.obj = MatIO(test_file, 'r')
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        w = self.obj.read()
+        self.assertEqual(46, w.n)
+        self.assertEqual(4.0869565217391308, w.mean_neighbors)
+        self.assertEqual([1.0, 1.0, 1.0, 1.0], w[1].values())
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj.read)
+        self.obj.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        w = self.obj.read()
+        f = tempfile.NamedTemporaryFile(
+            suffix='.mat', dir=pysal.examples.get_path(''))
+        fname = f.name
+        f.close()
+        o = pysal.open(fname, 'w')
+        with warnings.catch_warnings(record=True) as warn:
+            warnings.simplefilter("always")
+            o.write(w)
+            if len(warn) > 0:
+                assert issubclass(warn[0].category, FutureWarning)
+        o.close()
+        wnew = pysal.open(fname, 'r').read()
+        self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+        os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_mtx.py b/pysal/core/IOHandlers/tests/test_mtx.py
new file mode 100644
index 0000000..40df7f0
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_mtx.py
@@ -0,0 +1,57 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.mtx import MtxIO
+import tempfile
+import os
+import warnings
+import scipy.sparse as SP
+
+
+class test_MtxIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('wmat.mtx')
+        self.obj = MtxIO(test_file, 'r')
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        w = self.obj.read()
+        self.assertEqual(49, w.n)
+        self.assertEqual(4.7346938775510203, w.mean_neighbors)
+        self.assertEqual([0.33329999999999999, 0.33329999999999999,
+                          0.33329999999999999], w[1].values())
+        s0 = w.s0
+        self.obj.seek(0)
+        wsp = self.obj.read(sparse=True)
+        self.assertEqual(49, wsp.n)
+        self.assertEqual(s0, wsp.s0)
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj.read)
+        self.obj.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        for i in [False, True]:
+            self.obj.seek(0)
+            w = self.obj.read(sparse=i)
+            f = tempfile.NamedTemporaryFile(
+                suffix='.mtx', dir=pysal.examples.get_path(''))
+            fname = f.name
+            f.close()
+            o = pysal.open(fname, 'w')
+            o.write(w)
+            o.close()
+            wnew = pysal.open(fname, 'r').read(sparse=i)
+            if i:
+                self.assertEqual(wnew.s0, w.s0)
+            else:
+                self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+            os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_pyDbfIO.py b/pysal/core/IOHandlers/tests/test_pyDbfIO.py
new file mode 100644
index 0000000..1b232f9
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_pyDbfIO.py
@@ -0,0 +1,111 @@
+import unittest
+import pysal
+import tempfile
+import os
+
+
+class test_DBF(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('10740.dbf')
+        self.dbObj = pysal.core.IOHandlers.pyDbfIO.DBF(test_file, 'r')
+
+    def test_len(self):
+        self.assertEquals(len(self.dbObj), 195)
+
+    def test_tell(self):
+        self.assertEquals(self.dbObj.tell(), 0)
+        self.dbObj.read(1)
+        self.assertEquals(self.dbObj.tell(), 1)
+        self.dbObj.read(50)
+        self.assertEquals(self.dbObj.tell(), 51)
+        self.dbObj.read()
+        self.assertEquals(self.dbObj.tell(), 195)
+
+    def test_seek(self):
+        self.dbObj.seek(0)
+        self.assertEquals(self.dbObj.tell(), 0)
+        self.dbObj.seek(55)
+        self.assertEquals(self.dbObj.tell(), 55)
+        self.dbObj.read(1)
+        self.assertEquals(self.dbObj.tell(), 56)
+
+    def test_read(self):
+        self.dbObj.seek(0)
+        objs = self.dbObj.read()
+        self.assertEquals(len(objs), 195)
+        self.dbObj.seek(0)
+        objsB = list(self.dbObj)
+        self.assertEquals(len(objsB), 195)
+        for rowA, rowB in zip(objs, objsB):
+            self.assertEquals(rowA, rowB)
+
+    def test_random_access(self):
+        self.dbObj.seek(0)
+        db0 = self.dbObj.read(1)[0]
+        self.assertEquals(db0, [1, '35001', '000107', '35001000107', '1.07'])
+        self.dbObj.seek(57)
+        db57 = self.dbObj.read(1)[0]
+        self.assertEquals(db57, [58, '35001', '001900', '35001001900', '19'])
+        self.dbObj.seek(32)
+        db32 = self.dbObj.read(1)[0]
+        self.assertEquals(db32, [33, '35001', '000500', '35001000500', '5'])
+        self.dbObj.seek(0)
+        self.assertEquals(self.dbObj.next(), db0)
+        self.dbObj.seek(57)
+        self.assertEquals(self.dbObj.next(), db57)
+        self.dbObj.seek(32)
+        self.assertEquals(self.dbObj.next(), db32)
+
+    def test_write(self):
+        f = tempfile.NamedTemporaryFile(suffix='.dbf')
+        fname = f.name
+        f.close()
+        self.dbfcopy = fname
+        self.out = pysal.core.IOHandlers.pyDbfIO.DBF(fname, 'w')
+        self.dbObj.seek(0)
+        self.out.header = self.dbObj.header
+        self.out.field_spec = self.dbObj.field_spec
+        for row in self.dbObj:
+            self.out.write(row)
+        self.out.close()
+
+        orig = open(self.test_file, 'rb')
+        copy = open(self.dbfcopy, 'rb')
+        orig.seek(32)  # self.dbObj.header_size) #skip the header, file date has changed
+        copy.seek(32)  # self.dbObj.header_size) #skip the header, file date has changed
+
+        #PySAL writes proper DBF files with a terminator at the end, not everyone does.
+        n = self.dbObj.record_size * self.dbObj.n_records  # bytes to read.
+        self.assertEquals(orig.read(n), copy.read(n))
+        #self.assertEquals(orig.read(1), copy.read(1)) # last byte may fail
+        orig.close()
+        copy.close()
+        os.remove(self.dbfcopy)
+
+    def test_writeNones(self):
+        import datetime
+        import time
+        f = tempfile.NamedTemporaryFile(
+            suffix='.dbf')
+        fname = f.name
+        f.close()
+        db = pysal.core.IOHandlers.pyDbfIO.DBF(fname, 'w')
+        db.header = ["recID", "date", "strID", "aFloat"]
+        db.field_spec = [('N', 10, 0), ('D', 8, 0), ('C', 10, 0), ('N', 5, 5)]
+        records = []
+        for i in range(10):
+            d = datetime.date(*time.localtime()[:3])
+            rec = [i + 1, d, str(i + 1), (i + 1) / 2.0]
+            records.append(rec)
+        records.append([None, None, '', None])
+        records.append(rec)
+        for rec in records:
+            db.write(rec)
+        db.close()
+        db2 = pysal.core.IOHandlers.pyDbfIO.DBF(fname, 'r')
+        self.assertEquals(records, db2.read())
+
+        os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_pyShpIO.py b/pysal/core/IOHandlers/tests/test_pyShpIO.py
new file mode 100644
index 0000000..ac48681
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_pyShpIO.py
@@ -0,0 +1,85 @@
+import unittest
+import pysal
+import tempfile
+import os
+
+
+class test_PurePyShpWrapper(unittest.TestCase):
+    def setUp(self):
+        test_file = pysal.examples.get_path('10740.shp')
+        self.test_file = test_file
+        self.shpObj = pysal.core.IOHandlers.pyShpIO.PurePyShpWrapper(
+            test_file, 'r')
+        f = tempfile.NamedTemporaryFile(suffix='.shp')
+        shpcopy = f.name
+        f.close()
+        self.shpcopy = shpcopy
+        self.shxcopy = shpcopy.replace('.shp', '.shx')
+
+    def test_len(self):
+        self.assertEquals(len(self.shpObj), 195)
+
+    def test_tell(self):
+        self.assertEquals(self.shpObj.tell(), 0)
+        self.shpObj.read(1)
+        self.assertEquals(self.shpObj.tell(), 1)
+        self.shpObj.read(50)
+        self.assertEquals(self.shpObj.tell(), 51)
+        self.shpObj.read()
+        self.assertEquals(self.shpObj.tell(), 195)
+
+    def test_seek(self):
+        self.shpObj.seek(0)
+        self.assertEquals(self.shpObj.tell(), 0)
+        self.shpObj.seek(55)
+        self.assertEquals(self.shpObj.tell(), 55)
+        self.shpObj.read(1)
+        self.assertEquals(self.shpObj.tell(), 56)
+
+    def test_read(self):
+        self.shpObj.seek(0)
+        objs = self.shpObj.read()
+        self.assertEquals(len(objs), 195)
+
+        self.shpObj.seek(0)
+        objsB = list(self.shpObj)
+        self.assertEquals(len(objsB), 195)
+
+        for shpA, shpB in zip(objs, objsB):
+            self.assertEquals(shpA.vertices, shpB.vertices)
+
+    def test_random_access(self):
+        self.shpObj.seek(57)
+        shp57 = self.shpObj.read(1)[0]
+        self.shpObj.seek(32)
+        shp32 = self.shpObj.read(1)[0]
+
+        self.shpObj.seek(57)
+        self.assertEquals(self.shpObj.read(1)[0].vertices, shp57.vertices)
+        self.shpObj.seek(32)
+        self.assertEquals(self.shpObj.read(1)[0].vertices, shp32.vertices)
+
+    def test_write(self):
+        out = pysal.core.IOHandlers.pyShpIO.PurePyShpWrapper(self.shpcopy, 'w')
+        self.shpObj.seek(0)
+        for shp in self.shpObj:
+            out.write(shp)
+        out.close()
+
+        orig = open(self.test_file, 'rb')
+        copy = open(self.shpcopy, 'rb')
+        self.assertEquals(orig.read(), copy.read())
+        orig.close()
+        copy.close()
+
+        oshx = open(self.test_file.replace('.shp', '.shx'), 'rb')
+        cshx = open(self.shxcopy, 'rb')
+        self.assertEquals(oshx.read(), cshx.read())
+        oshx.close()
+        cshx.close()
+
+        os.remove(self.shpcopy)
+        os.remove(self.shxcopy)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_stata_txt.py b/pysal/core/IOHandlers/tests/test_stata_txt.py
new file mode 100644
index 0000000..81a9a70
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_stata_txt.py
@@ -0,0 +1,61 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.stata_txt import StataTextIO
+import tempfile
+import os
+
+
+class test_StataTextIO(unittest.TestCase):
+    def setUp(self):
+        self.test_file_sparse = test_file_sparse = pysal.examples.get_path(
+            'stata_sparse.txt')
+        self.test_file_full = test_file_full = pysal.examples.get_path(
+            'stata_full.txt')
+        self.obj_sparse = StataTextIO(test_file_sparse, 'r')
+        self.obj_full = StataTextIO(test_file_full, 'r')
+
+    def test_close(self):
+        for obj in [self.obj_sparse, self.obj_full]:
+            f = obj
+            f.close()
+            self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        w_sparse = self.obj_sparse.read()
+        self.assertEqual(56, w_sparse.n)
+        self.assertEqual(4.0, w_sparse.mean_neighbors)
+        self.assertEqual([1.0, 1.0, 1.0, 1.0, 1.0], w_sparse[1].values())
+
+        w_full = self.obj_full.read()
+        self.assertEqual(56, w_full.n)
+        self.assertEqual(4.0, w_full.mean_neighbors)
+        self.assertEqual(
+            [0.125, 0.125, 0.125, 0.125, 0.125], w_full[1].values())
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj_sparse.read)
+        self.failUnlessRaises(StopIteration, self.obj_full.read)
+        self.obj_sparse.seek(0)
+        self.obj_full.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        for obj in [self.obj_sparse, self.obj_full]:
+            w = obj.read()
+            f = tempfile.NamedTemporaryFile(
+                suffix='.txt', dir=pysal.examples.get_path(''))
+            fname = f.name
+            f.close()
+            o = pysal.open(fname, 'w', 'stata_text')
+            if obj == self.obj_sparse:
+                o.write(w)
+            else:
+                o.write(w, matrix_form=True)
+            o.close()
+            wnew = pysal.open(fname, 'r', 'stata_text').read()
+            self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+            os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_wk1.py b/pysal/core/IOHandlers/tests/test_wk1.py
new file mode 100644
index 0000000..2a32f20
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_wk1.py
@@ -0,0 +1,44 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.wk1 import Wk1IO
+import tempfile
+import os
+
+
+class test_Wk1IO(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('spat-sym-us.wk1')
+        self.obj = Wk1IO(test_file, 'r')
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+
+    def test_read(self):
+        w = self.obj.read()
+        self.assertEqual(46, w.n)
+        self.assertEqual(4.0869565217391308, w.mean_neighbors)
+        self.assertEqual([1.0, 1.0, 1.0, 1.0], w[1].values())
+
+    def test_seek(self):
+        self.test_read()
+        self.failUnlessRaises(StopIteration, self.obj.read)
+        self.obj.seek(0)
+        self.test_read()
+
+    def test_write(self):
+        w = self.obj.read()
+        f = tempfile.NamedTemporaryFile(
+            suffix='.wk1', dir=pysal.examples.get_path(''))
+        fname = f.name
+        f.close()
+        o = pysal.open(fname, 'w')
+        o.write(w)
+        o.close()
+        wnew = pysal.open(fname, 'r').read()
+        self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
+        os.remove(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/tests/test_wkt.py b/pysal/core/IOHandlers/tests/test_wkt.py
new file mode 100644
index 0000000..caeb639
--- /dev/null
+++ b/pysal/core/IOHandlers/tests/test_wkt.py
@@ -0,0 +1,31 @@
+import unittest
+import pysal
+from pysal.core.IOHandlers.wkt import WKTReader
+
+
+class test_WKTReader(unittest.TestCase):
+    def setUp(self):
+        self.test_file = test_file = pysal.examples.get_path('stl_hom.wkt')
+        self.obj = WKTReader(test_file, 'r')
+
+    def test_close(self):
+        f = self.obj
+        f.close()
+        self.failUnlessRaises(ValueError, f.read)
+        # w_kt_reader = WKTReader(*args, **kwargs)
+        # self.assertEqual(expected, w_kt_reader.close())
+
+    def test_open(self):
+        f = self.obj
+        expected = ['wkt']
+        self.assertEqual(expected, f.FORMATS)
+
+    def test__read(self):
+        polys = self.obj.read()
+        self.assertEqual(78, len(polys))
+        self.assertEqual((-91.195784694307383, 39.990883050220845),
+                         polys[1].centroid)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/IOHandlers/wk1.py b/pysal/core/IOHandlers/wk1.py
new file mode 100644
index 0000000..fccca98
--- /dev/null
+++ b/pysal/core/IOHandlers/wk1.py
@@ -0,0 +1,330 @@
+import pysal
+import os.path
+import struct
+import pysal.core.FileIO as FileIO
+from pysal.weights import W
+from warnings import warn
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["Wk1IO"]
+
+
+class Wk1IO(FileIO.FileIO):
+    """
+    MATLAB wk1read.m and wk1write.m that were written by Brian M. Bourgault in 10/22/93
+
+    Opens, reads, and writes weights file objects in Lotus Wk1 format.
+
+    Lotus Wk1 file is used in Dr. LeSage's MATLAB Econometrics library.
+
+    A Wk1 file holds a spatial weights object in a full matrix form
+    without any row and column headers.
+    The maximum number of columns supported in a Wk1 file is 256.
+    Wk1 starts the row (column) number from 0 and
+    uses little endian binary endcoding.
+    In PySAL, when the number of observations is n,
+    it is assumed that each cell of a n\*n(=m) matrix either is a blank or
+    have a number.
+
+    The internal structure of a Wk1 file written by PySAL is as follows:
+    [BOF][DIM][CPI][CAL][CMODE][CORD][SPLIT][SYNC][CURS][WIN]
+    [HCOL][MRG][LBL][CELL_1]...[CELL_m][EOF]
+    where [CELL_k] equals to [DTYPE][DLEN][DFORMAT][CINDEX][CVALUE].
+    The parts between [BOF] and [CELL_1] are variable according to the software
+    program used to write a wk1 file. While reading a wk1 file,
+    PySAL ignores them.
+    Each part of this structure is detailed below.
+
+ .. table:: Lotus WK1 fields
+
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |Part         |Description          |Data Type                |Length |Value                        |
+   +=============+=====================+=========================+=======+=============================+
+   |[BOF]        |Begining of field    |unsigned character       |6      |0,0,2,0,6,4                  |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[DIM]        |Matrix dimension                                                                     |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [DIMDTYPE] |Type of dim. rec     |unsigned short           |2      |6                            |
+   |  [DIMLEN]   |Length of dim. rec   |unsigned short           |2      |8                            |
+   |  [DIMVAL]   |Value of dim. rec    |unsigned short           |8      |0,0,n,n                      |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[CPI]        |CPI                                                                                  |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [CPITYPE]  |Type of cpi rec      |unsigned short           |2      |150                          |
+   |  [CPILEN]   |Length of cpi rec    |unsigned short           |2      |6                            |
+   |  [CPIVAL]   |Value of cpi rec     |unsigned char            |6      |0,0,0,0,0,0                  |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[CAL]        |calcount                                                                             |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [CALTYPE]  |Type of calcount rec |unsigned short           |2      |47                           |
+   |  [CALLEN]   |Length calcount rec  |unsigned short           |2      |1                            |
+   |  [CALVAL]   |Value of calcount rec|unsigned char            |1      |0                            |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[CMODE]      |calmode                                                                              |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [CMODETYP] |Type of calmode rec  |unsigned short           |2      |2                            |
+   |  [CMODELEN] |Length of calmode rec|unsigned short           |2      |1                            |
+   |  [CMODEVAL] |Value of calmode rec |signed char              |1      |0                            |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[CORD]       |calorder                                                                             |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [CORDTYPE] |Type of calorder rec |unsigned short           |2      |3                            |
+   |  [CORDLEN]  |Length calorder rec  |unsigned short           |2      |1                            |
+   |  [CORDVAL]  |Value of calorder rec|signed char              |1      |0                            |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[SPLIT]      |split                                                                                |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [SPLTYPE]  |Type of split rec    |unsigned short           |2      |4                            |
+   |  [SPLLEN]   |Length of split rec  |unsigned short           |2      |1                            |
+   |  [SPLVAL]   |Value of split rec   |signed char              |1      |0                            |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[SYNC]       |sync                                                                                 |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [SYNCTYP]  |Type of sync rec     |unsigned short           |2      |5                            |
+   |  [SYNCLEN]  |Length of sync rec   |unsigned short           |2      |1                            |
+   |  [SYNCVAL]  |Value of sync rec    |singed char              |1      |0                            |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[CURS]       |cursor                                                                               |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [CURSTYP]  |Type of cursor rec   |unsigned short           |2      |49                           |
+   |  [CURSLEN]  |Length of cursor rec |unsigned short           |2      |1                            |
+   |  [CURSVAL]  |Value of cursor rec  |signed char              |1      |1                            |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[WIN]        |window                                                                               |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [WINTYPE]  |Type of window rec   |unsigned short           |2      |7                            |
+   |  [WINLEN]   |Length of window rec |unsigned short           |2      |32                           |
+   |  [WINVAL1]  |Value 1 of window rec|unsigned short           |4      |0,0                          |
+   |  [WINVAL2]  |Value 2 of window rec|signed char              |2      |113,0                        |
+   |  [WINVAL3]  |Value 3 of window rec|unsigned short           |26     |10,n,n,0,0,0,0,0,0,0,0,72,0  |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[HCOL]       |hidcol                                                                               |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [HCOLTYP]  |Type of hidcol rec   |unsigned short           |2      |100                          |
+   |  [HCOLLEN]  |Length of hidcol rec |unsigned short           |2      |32                           |
+   |  [HCOLVAL]  |Value of hidcol rec  |signed char              |32     |0*32                         |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[MRG]        |margins                                                                              |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [MRGTYPE]  |Type of margins rec  |unsigned short           |2      |40                           |
+   |  [MRGLEN]   |Length of margins rec|unsigned short           |2      |10                           |
+   |  [MRGVAL]   |Value of margins rec |unsigned short           |10     |4,76,66,2,2                  |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[LBL]        |labels                                                                               |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [LBLTYPE]  |Type of labels rec   |unsigned short           |2      |41                           |
+   |  [LBLLEN]   |Length of labels rec |unsigned short           |2      |1                            |
+   |  [LBLVAL]   |Value of labels rec  |char                     |1      |'                            |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |[CELL_k]                                                                                           |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+   |  [DTYPE]    |Type of cell data    |unsigned short           |2      |[DTYPE][0]==0: end of file   |
+   |             |                     |                         |       |          ==14: number       |
+   |             |                     |                         |       |          ==16: formula      |
+   |             |                     |                         |       |          ==13: integer      |
+   |             |                     |                         |       |          ==11: nrange       |
+   |             |                     |                         |       |          ==else: unknown    |
+   |  [DLEN]     |Length of cell data  |unsigned short           |2      |                             |
+   |  [DFORMAT]  |Format of cell data  |not sure                 |1      |                             |
+   |  [CINDEX]   |Row, column of cell  |unsigned short           |4      |                             |
+   |  [CVALUE]   |Value of cell        |double, [DTYPE][0]==14   |8      |                             |
+   |             |                     |formula,[DTYPE][0]==16   |8 +    |[DTYPE][1] - 13              |
+   |             |                     |integer,[DTYPE][0]==13   |2      |                             |
+   |             |                     |nrange, [DTYPE][0]==11   |24     |                             |
+   |             |                     |else,   [DTYPE][0]==else |       |[DTYPE][1]                   |
+   |  [EOF]      |End of file          |unsigned short           |4      |1,0,0,0                      |
+   +-------------+---------------------+-------------------------+-------+-----------------------------+
+
+
+    """
+
+    FORMATS = ['wk1']
+    MODES = ['r', 'w']
+
+    def __init__(self, *args, **kwargs):
+        self._varName = 'Unknown'
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.file = open(self.dataPath, self.mode + 'b')
+
+    def _set_varName(self, val):
+        if issubclass(type(val), basestring):
+            self._varName = val
+
+    def _get_varName(self):
+        return self._varName
+    varName = property(fget=_get_varName, fset=_set_varName)
+
+    def read(self, n=-1):
+        self._complain_ifclosed(self.closed)
+        return self._read()
+
+    def seek(self, pos):
+        if pos == 0:
+            self.file.seek(0)
+            self.pos = 0
+
+    def _read(self):
+        """
+        Reads Lotus Wk1 file
+
+        Returns
+        -------
+        A pysal.weights.weights.W object
+
+        Examples
+        --------
+
+        Type 'dir(w)' at the interpreter to see what methods are supported.
+        Open a Lotus Wk1 file and read it into a pysal weights object
+
+        >>> w = pysal.open(pysal.examples.get_path('spat-sym-us.wk1'),'r').read()
+
+        Get the number of observations from the header
+
+        >>> w.n
+        46
+
+        Get the mean number of neighbors
+
+        >>> w.mean_neighbors
+        4.0869565217391308
+
+        Get neighbor distances for a single observation
+
+        >>> w[1]
+        {25: 1.0, 3: 1.0, 28: 1.0, 39: 1.0}
+
+        """
+        if self.pos > 0:
+            raise StopIteration
+
+        bof = struct.unpack('<6B', self.file.read(6))
+        if bof != (0, 0, 2, 0, 6, 4):
+            raise ValueError('The header of your file is wrong!')
+
+        neighbors = {}
+        weights = {}
+        dtype, dlen = struct.unpack('<2H', self.file.read(4))
+        while(dtype != 1):
+            if dtype in [13, 14, 16]:
+                self.file.read(1)
+                row, column = struct.unpack('2H', self.file.read(4))
+                format, length = '<d', 8
+                if dtype == 13:
+                    format, length = '<h', 2
+                value = float(struct.unpack(format, self.file.read(length))[0])
+                if value > 0:
+                    ngh = neighbors.setdefault(row, [])
+                    ngh.append(column)
+                    wgt = weights.setdefault(row, [])
+                    wgt.append(value)
+                if dtype == 16:
+                    self.file.read(dlen - 13)
+            elif dtype == 11:
+                self.file.read(24)
+            else:
+                self.file.read(dlen)
+            dtype, dlen = struct.unpack('<2H', self.file.read(4))
+
+        self.pos += 1
+        return W(neighbors, weights)
+
+    def write(self, obj):
+        """
+
+        Parameters
+        ----------
+        .write(weightsObject)
+        accepts a weights object
+
+        Returns
+        ------
+
+        a Lotus wk1 file
+        write a weights object to the opened wk1 file.
+
+        Examples
+        --------
+
+        >>> import tempfile, pysal, os
+        >>> testfile = pysal.open(pysal.examples.get_path('spat-sym-us.wk1'),'r')
+        >>> w = testfile.read()
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.wk1')
+
+        Reassign to new var
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Open the new file in write mode
+
+        >>> o = pysal.open(fname,'w')
+
+        Write the Weights object into the open file
+
+        >>> o.write(w)
+        >>> o.close()
+
+        Read in the newly created text file
+
+        >>> wnew =  pysal.open(fname,'r').read()
+
+        Compare values from old to new
+
+        >>> wnew.pct_nonzero == w.pct_nonzero
+        True
+
+        Clean up temporary file created for this example
+
+        >>> os.remove(fname)
+
+        """
+        self._complain_ifclosed(self.closed)
+        if issubclass(type(obj), W):
+            f = self.file
+            n = obj.n
+            if n > 256:
+                raise ValueError('WK1 file format supports only up to 256 observations.')
+            pack = struct.pack
+            f.write(pack('<6B', 0, 0, 2, 0, 6, 4))
+            f.write(pack('<6H', 6, 8, 0, 0, n, n))
+            f.write(pack('<2H6B', 150, 6, 0, 0, 0, 0, 0, 0))
+            f.write(pack('<2H1B', 47, 1, 0))
+            f.write(pack('<2H1b', 2, 1, 0))
+            f.write(pack('<2H1b', 3, 1, 0))
+            f.write(pack('<2H1b', 4, 1, 0))
+            f.write(pack('<2H1b', 5, 1, 0))
+            f.write(pack('<2H1b', 49, 1, 1))
+            f.write(pack('<4H2b13H', 7, 32, 0, 0, 113, 0, 10,
+                         n, n, 0, 0, 0, 0, 0, 0, 0, 0, 72, 0))
+            hidcol = tuple(['<2H32b', 100, 32] + [0] * 32)
+            f.write(pack(*hidcol))
+            f.write(pack('<7H', 40, 10, 4, 76, 66, 2, 2))
+            f.write(pack('<2H1c', 41, 1, "'"))
+
+            id2i = obj.id2i
+            for i, w_i in enumerate(obj):
+                row = [0.0] * n
+                for k in w_i[1]:
+                    row[id2i[k]] = w_i[1][k]
+                for c, v in enumerate(row):
+                    cell = tuple(['<2H1b2H1d', 14, 13, 113, i, c, v])
+                    f.write(pack(*cell))
+            f.write(pack('<4B', 1, 0, 0, 0))
+            self.pos += 1
+
+        else:
+            raise TypeError("Expected a pysal weights object, got: %s" % (
+                type(obj)))
+
+    def close(self):
+        self.file.close()
+        FileIO.FileIO.close(self)
+
+
diff --git a/pysal/core/IOHandlers/wkt.py b/pysal/core/IOHandlers/wkt.py
new file mode 100644
index 0000000..320afc2
--- /dev/null
+++ b/pysal/core/IOHandlers/wkt.py
@@ -0,0 +1,98 @@
+import pysal.core.FileIO as FileIO
+from pysal.core.util import WKTParser
+from pysal import cg
+import re
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__all__ = ['WKTReader']
+#####################################################################
+## ToDo: Add Well-Known-Binary support...
+##       * WKB spec:
+##  http://webhelp.esri.com/arcgisserver/9.3/dotNet/index.htm#geodatabases/the_ogc_103951442.htm
+##
+##
+#####################################################################
+
+
+class WKTReader(FileIO.FileIO):
+    """
+    Parameters
+    ----------
+    Reads Well-Known Text
+    Returns a list of PySAL Polygon objects
+
+    Examples
+    --------
+    Read in WKT-formatted file
+
+    >>> import pysal
+    >>> f = pysal.open(pysal.examples.get_path('stl_hom.wkt'), 'r')
+
+    Convert wkt to pysal polygons
+
+    >>> polys = f.read()
+
+    Check length
+
+    >>> len(polys)
+    78
+
+    Return centroid of polygon at index 1
+
+    >>> polys[1].centroid
+    (-91.19578469430738, 39.990883050220845)
+
+    Type dir(polys[1]) at the python interpreter to get a list of supported methods
+
+    """
+    MODES = ['r']
+    FORMATS = ['wkt']
+
+    def __init__(self, *args, **kwargs):
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+        self.__idx = {}
+        self.__pos = 0
+        self.__open()
+
+    def open(self):
+        self.__open()
+
+    def __open(self):
+        self.dataObj = open(self.dataPath, self.mode)
+        self.wkt = WKTParser()
+
+    def _read(self):
+        FileIO.FileIO._complain_ifclosed(self.closed)
+        if self.__pos not in self.__idx:
+            self.__idx[self.__pos] = self.dataObj.tell()
+        line = self.dataObj.readline()
+        if line:
+            shape = self.wkt.fromWKT(line)
+            shape.id = self.pos
+            self.__pos += 1
+            self.pos += 1
+            return shape
+        else:
+            self.seek(0)
+            return None
+
+    def seek(self, n):
+        FileIO.FileIO.seek(self, n)
+        pos = self.pos
+        if pos in self.__idx:
+            self.dataObj.seek(self.__idx[pos])
+            self.__pos = pos
+        else:
+            while pos not in self.__idx:
+                s = self._read()
+                if not s:
+                    raise IndexError("%d not in range(0,%d)" % (
+                        pos, max(self.__idx.keys())))
+            self.pos = pos
+            self.__pos = pos
+            self.dataObj.seek(self.__idx[pos])
+
+    def close(self):
+        self.dataObj.close()
+        FileIO.FileIO.close(self)
+
diff --git a/pysal/core/Tables.py b/pysal/core/Tables.py
new file mode 100644
index 0000000..04c8a26
--- /dev/null
+++ b/pysal/core/Tables.py
@@ -0,0 +1,167 @@
+__all__ = ['DataTable']
+import FileIO
+import numpy as np
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+
+
+class DataTable(FileIO.FileIO):
+    """ DataTable provides additional functionality to FileIO for data table file tables
+        FileIO Handlers that provide data tables should subclass this instead of FileIO """
+    class _By_Col:
+        def __init__(self, parent):
+            self.p = parent
+
+        def __repr__(self):
+            return "keys: " + self.p.header.__repr__()
+
+        def __getitem__(self, key):
+            return self.p._get_col(key)
+
+        def __setitem__(self, key, val):
+            self.p.cast(key, val)
+
+        def __call__(self, key):
+            return self.p._get_col(key)
+
+    def __init__(self, *args, **kwargs):
+        FileIO.FileIO.__init__(self, *args, **kwargs)
+
+    def __repr__(self):
+        return 'DataTable: % s' % self.dataPath
+
+    def __len__(self):
+        """ __len__ should be implemented by DataTable Subclasses """
+        raise NotImplementedError
+
+    @property
+    def by_col(self):
+        return self._By_Col(self)
+
+    def _get_col(self, key):
+        """ returns the column vector
+        """
+        if not self.header:
+            raise AttributeError('Please set the header')
+        if key in self.header:
+            return self[:, self.header.index(key)]
+        else:
+            raise AttributeError('Field: % s does not exist in header' % key)
+
+    def by_col_array(self, variable_names):
+        """
+        Return columns of table as a numpy array
+
+        Parameters
+        ----------
+
+        variable_names:  list of strings of length k
+                         names of variables to extract
+
+        Returns
+        -------
+        implicit:    numpy array of shape (n,k)
+
+
+        Notes
+        -----
+
+        If the variables are not all of the same data type, then numpy rules
+        for casting will result in a uniform type applied to all variables.
+
+        Examples
+        --------
+
+        >>> import pysal as ps
+        >>> dbf = ps.open(ps.examples.get_path('NAT.dbf'))
+        >>> hr = dbf.by_col_array(['HR70', 'HR80'])
+        >>> hr[0:5]
+        array([[  0.        ,   8.85582713],
+               [  0.        ,  17.20874204],
+               [  1.91515848,   3.4507747 ],
+               [  1.28864319,   3.26381409],
+               [  0.        ,   7.77000777]])
+        >>> hr = dbf.by_col_array(['HR80', 'HR70'])
+        >>> hr[0:5]
+        array([[  8.85582713,   0.        ],
+               [ 17.20874204,   0.        ],
+               [  3.4507747 ,   1.91515848],
+               [  3.26381409,   1.28864319],
+               [  7.77000777,   0.        ]])
+        >>> hr = dbf.by_col_array(['HR80'])
+        >>> hr[0:5]
+        array([[  8.85582713],
+               [ 17.20874204],
+               [  3.4507747 ],
+               [  3.26381409],
+               [  7.77000777]])
+        
+        Numpy only supports homogeneous arrays. See Notes above.
+
+        >>> hr = dbf.by_col_array(['STATE_NAME', 'HR80'])
+        >>> hr[0:5]
+        array([['Minnesota', '8.8558271343'],
+               ['Washington', '17.208742041'],
+               ['Washington', '3.4507746989'],
+               ['Washington', '3.2638140931'],
+               ['Washington', '7.77000777']], 
+              dtype='|S20')
+
+
+        """
+        lst = [self._get_col(variable) for variable in variable_names]
+        return np.array(lst).T
+
+    def __getitem__(self, key):
+        """ DataTables fully support slicing in 2D,
+            To provide slicing,  handlers must provide __len__
+            Slicing accepts up to two arguments.
+            Syntax,
+            table[row]
+            table[row, col]
+            table[row_start:row_stop]
+            table[row_start:row_stop:row_step]
+            table[:, col]
+            table[:, col_start:col_stop]
+            etc.
+
+            ALL indices are Zero-Offsets,
+            i.e.
+            #>>> assert index in range(0, len(table))
+        """
+        prevPos = self.tell()
+        if issubclass(type(key), basestring):
+            raise TypeError("index should be int or slice")
+        if issubclass(type(key), int) or isinstance(key, slice):
+            rows = key
+            cols = None
+        elif len(key) > 2:
+            raise TypeError("DataTables support two dimmensional slicing,  % d slices provided" % len(key))
+        elif len(key) == 2:
+            rows, cols = key
+        else:
+            raise TypeError("Key: % r,  is confusing me.  I don't know what to do" % key)
+        if isinstance(rows, slice):
+            row_start, row_stop, row_step = rows.indices(len(self))
+            self.seek(row_start)
+            data = [self.next() for i in range(row_start, row_stop, row_step)]
+        else:
+            self.seek(slice(rows).indices(len(self))[1])
+            data = [self.next()]
+        if cols is not None:
+            if isinstance(cols, slice):
+                col_start, col_stop, col_step = cols.indices(len(data[0]))
+                data = [r[col_start:col_stop:col_step] for r in data]
+            else:
+                #col_start, col_stop, col_step = cols, cols+1, 1
+                data = [r[cols] for r in data]
+        self.seek(prevPos)
+        return data
+
+
+def _test():
+    import doctest
+    doctest.testmod(verbose=True)
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/core/__init__.py b/pysal/core/__init__.py
new file mode 100644
index 0000000..d18cc4f
--- /dev/null
+++ b/pysal/core/__init__.py
@@ -0,0 +1 @@
+import FileIO
diff --git a/pysal/core/tests/__init__.py b/pysal/core/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/core/tests/test_FileIO.py b/pysal/core/tests/test_FileIO.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/core/util/__init__.py b/pysal/core/util/__init__.py
new file mode 100644
index 0000000..2a87f35
--- /dev/null
+++ b/pysal/core/util/__init__.py
@@ -0,0 +1,2 @@
+from wkt import *
+from shapefile import *
diff --git a/pysal/core/util/shapefile.py b/pysal/core/util/shapefile.py
new file mode 100644
index 0000000..973b152
--- /dev/null
+++ b/pysal/core/util/shapefile.py
@@ -0,0 +1,758 @@
+"""
+A Pure Python ShapeFile Reader and Writer
+This module is selfcontained and does not require pysal.
+This module returns and expects dictionary based data strucutres.
+This module should be wrapped into your native data strcutures.
+
+Contact:
+Charles Schmidt
+GeoDa Center
+Arizona State University
+Tempe, AZ
+http://geodacenter.asu.edu
+"""
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+
+from struct import calcsize, unpack, pack
+from cStringIO import StringIO
+from itertools import izip, islice
+import array
+import sys
+if sys.byteorder == 'little':
+    SYS_BYTE_ORDER = '<'
+else:
+    SYS_BYTE_ORDER = '>'
+STRUCT_ITEMSIZE = {}
+STRUCT_ITEMSIZE['i'] = calcsize('i')
+STRUCT_ITEMSIZE['d'] = calcsize('d')
+
+__all__ = ['shp_file', 'shx_file']
+
+#SHAPEFILE Globals
+
+
+def struct2arrayinfo(struct):
+    struct = list(struct)
+    names = [x[0] for x in struct]
+    types = [x[1] for x in struct]
+    orders = [x[2] for x in struct]
+    lname, ltype, lorder = struct.pop(0)
+    groups = {}
+    g = 0
+    groups[g] = {'names': [lname], 'size': STRUCT_ITEMSIZE[ltype],
+                 'fmt': ltype, 'order': lorder}
+    while struct:
+        name, type, order = struct.pop(0)
+        if order == lorder:
+            groups[g]['names'].append(name)
+            groups[g]['size'] += STRUCT_ITEMSIZE[type]
+            groups[g]['fmt'] += type
+        else:
+            g += 1
+            groups[g] = {'names': [name], 'size': STRUCT_ITEMSIZE[
+                type], 'fmt': type, 'order': order}
+        lname, ltype, lorder = name, type, order
+    return [groups[x] for x in range(g + 1)]
+
+HEADERSTRUCT = (
+    ('File Code', 'i', '>'),
+    ('Unused0', 'i', '>'),
+    ('Unused1', 'i', '>'),
+    ('Unused2', 'i', '>'),
+    ('Unused3', 'i', '>'),
+    ('Unused4', 'i', '>'),
+    ('File Length', 'i', '>'),
+    ('Version', 'i', '<'),
+    ('Shape Type', 'i', '<'),
+    ('BBOX Xmin', 'd', '<'),
+    ('BBOX Ymin', 'd', '<'),
+    ('BBOX Xmax', 'd', '<'),
+    ('BBOX Ymax', 'd', '<'),
+    ('BBOX Zmin', 'd', '<'),
+    ('BBOX Zmax', 'd', '<'),
+    ('BBOX Mmin', 'd', '<'),
+    ('BBOX Mmax', 'd', '<'))
+UHEADERSTRUCT = struct2arrayinfo(HEADERSTRUCT)
+RHEADERSTRUCT = (
+    ('Record Number', 'i', '>'),
+    ('Content Length', 'i', '>'))
+URHEADERSTRUCT = struct2arrayinfo(RHEADERSTRUCT)
+
+
+def noneMax(a, b):
+    if a is None:
+        return b
+    if b is None:
+        return a
+    return max(a, b)
+
+
+def noneMin(a, b):
+    if a is None:
+        return b
+    if b is None:
+        return a
+    return min(a, b)
+
+
+def _unpackDict(structure, fileObj):
+    """Utility Function, Requires a Tuple of tuples that desribes the element structure...
+
+    _unpackDict(structure tuple, fileObj file) -> dict
+
+    Arguments:
+        structure -- tuple of tuples -- (('FieldName 1','type','byteOrder'),('FieldName 2','type','byteOrder'))
+        fileObj -- file -- an open file at the correct position!
+    Returns:
+        {'FieldName 1': value, 'FieldName 2': value}
+    Side Effects:
+        #file at new position
+
+    Example:
+    >>> import pysal
+    >>> _unpackDict(UHEADERSTRUCT,open(pysal.examples.get_path('10740.shx'),'rb')) == {'BBOX Xmax': -105.29012, 'BBOX Ymax': 36.219799000000002, 'BBOX Mmax': 0.0, 'BBOX Zmin': 0.0, 'BBOX Mmin': 0.0, 'File Code': 9994, 'BBOX Ymin': 34.259672000000002, 'BBOX Xmin': -107.62651, 'Unused0': 0, 'Unused1': 0, 'Unused2': 0, 'Unused3': 0, 'Unused4': 0, 'Version': 1000, 'BBOX Zmax': 0.0, 'Shape Type': 5, 'File Length': 830}
+    True
+    """
+    d = {}
+    for struct in structure:
+        items = unpack(struct['order'] + struct['fmt'],
+                       fileObj.read(struct['size']))
+        for i, name in enumerate(struct['names']):
+            d[name] = items[i]
+    return d
+
+
+def _unpackDict2(d, structure, fileObj):
+    """Utility Function, used arrays instead from struct
+
+    Arguments:
+        d -- dict -- Dictionary to be updated.
+        structure -- tuple of tuples -- (('FieldName 1',('type',n),'byteOrder'),('FieldName 2',('type',n,'byteOrder'))
+    """
+    for name, dtype, order in structure:
+        dtype, n = dtype
+        result = array.array(dtype)
+        result.fromstring(fileObj.read(result.itemsize * n))
+        if order != SYS_BYTE_ORDER:
+            result.byteswap()
+        d[name] = result.tolist()
+    return d
+
+
+def _packDict(structure, d):
+    """Utility Function
+
+    _packDict(structure tuple, d dict) -> str
+
+    Arguments:
+        structure -- tuple of tuples -- (('FieldName 1','type','byteOrder'),('FieldName 2','type','byteOrder'))
+        d -- dict -- {'FieldName 1': value, 'FieldName 2': value}
+
+    Example:
+    >>> s = _packDict( (('FieldName 1','i','<'),('FieldName 2','i','<')), {'FieldName 1': 1, 'FieldName 2': 2} )
+    >>> s==pack('<ii',1,2)
+    True
+    >>> unpack('<ii',s)
+    (1, 2)
+    """
+    string = b''
+    for name, dtype, order in structure:
+        if len(dtype) > 1:
+            string += pack(order + dtype, *d[name])
+        else:
+            string += pack(order + dtype, d[name])
+    return string
+
+
+class shp_file:
+    """
+    Reads and Writes the SHP compenent of a ShapeFile
+
+    Attributes:
+    header -- dict -- Contents of the SHP header. #For contents see: HEADERSTRUCT
+    shape -- int -- ShapeType.
+
+    Notes: The header of both the SHP and SHX files are indentical.
+
+    """
+    SHAPE_TYPES = {'POINT': 1, 'ARC': 3, 'POLYGON': 5, 'MULTIPOINT': 8, 'POINTZ': 11, 'ARCZ': 13, 'POLYGONZ': 15, 'MULTIPOINTZ': 18, 'POINTM': 21, 'ARCM': 23, 'POLYGONM': 25, 'MULTIPOINTM': 28, 'MULTIPATCH': 31}
+
+    def __iswritable(self):
+        try:
+            assert self.__mode == 'w'
+        except AssertionError:
+            raise IOError("[Errno 9] Bad file descriptor")
+        return True
+
+    def __isreadable(self):
+        try:
+            assert self.__mode == 'r'
+        except AssertionError:
+            raise IOError("[Errno 9] Bad file descriptor")
+        return True
+
+    def __init__(self, fileName, mode='r', shape_type=None):
+        self.__mode = mode
+        if fileName.lower().endswith('.shp') or fileName.lower().endswith('.shx') or fileName.lower().endswith('.dbf'):
+            fileName = fileName[:-4]
+        self.fileName = fileName
+
+        if mode == 'r':
+            self._open_shp_file()
+        elif mode == 'w':
+            if shape_type not in self.SHAPE_TYPES:
+                raise Exception('Attempt to create shp/shx file of invalid type')
+            self._create_shp_file(shape_type)
+        else:
+            raise Exception('Only "w" and "r" modes are supported')
+
+    def _open_shp_file(self):
+        """
+        Opens a shp/shx file.
+
+        shp_file(fileName string, 'r') -> Shpfile
+
+        Arguments:
+        filename -- the name of the file to create
+        mode -- string -- 'r'
+        shape_type -- None
+
+        Example:
+        >>> import pysal
+        >>> shp = shp_file(pysal.examples.get_path('10740.shp'))
+        >>> shp.header == {'BBOX Xmax': -105.29012, 'BBOX Ymax': 36.219799000000002, 'BBOX Mmax': 0.0, 'BBOX Zmin': 0.0, 'BBOX Mmin': 0.0, 'File Code': 9994, 'BBOX Ymin': 34.259672000000002, 'BBOX Xmin': -107.62651, 'Unused0': 0, 'Unused1': 0, 'Unused2': 0, 'Unused3': 0, 'Unused4': 0, 'Version': 1000, 'BBOX Zmax': 0.0, 'Shape Type': 5, 'File Length': 260534}
+        True
+        >>> len(shp)
+        195
+        """
+        self.__isreadable()
+        fileName = self.fileName
+        self.fileObj = open(fileName + '.shp', 'rb')
+        self._shx = shx_file(fileName)
+        self.header = _unpackDict(UHEADERSTRUCT, self.fileObj)
+        self.shape = TYPE_DISPATCH[self.header['Shape Type']]
+        self.__lastShape = 0
+        # localizing for convenience
+        self.__numRecords = self._shx.numRecords
+        # constructing bounding box from header
+        h = self.header
+        self.bbox = [h['BBOX Xmin'], h['BBOX Ymin'],
+                     h['BBOX Xmax'], h['BBOX Ymax']]
+        self.shapeType = self.header['Shape Type']
+
+    def _create_shp_file(self, shape_type):
+        """
+        Creates a shp/shx file.
+
+        shp_file(fileName string, 'w', shape_type string) -> Shpfile
+
+        Arguments:
+        filename -- the name of the file to create
+        mode -- string -- must be 'w'
+        shape_type -- string -- the type of shp/shx file to create. must be one of
+                the following: 'POINT', 'POINTZ', 'POINTM',
+                'ARC', 'ARCZ', 'ARCM', 'POLYGON', 'POLYGONZ', 'POLYGONM',
+                'MULTIPOINT', 'MULTIPOINTZ', 'MULTIPOINTM', 'MULTIPATCH'
+
+        Example:
+        >>> import pysal,os
+        >>> shp = shp_file('test','w','POINT')
+        >>> p = shp_file(pysal.examples.get_path('Point.shp'))
+        >>> for pt in p:
+        ...   shp.add_shape(pt)
+        ...
+        >>> shp.close()
+        >>> open('test.shp','rb').read() == open(pysal.examples.get_path('Point.shp'),'rb').read()
+        True
+        >>> open('test.shx','rb').read() == open(pysal.examples.get_path('Point.shx'),'rb').read()
+        True
+        >>> os.remove('test.shx')
+        >>> os.remove('test.shp')
+        """
+        self.__iswritable()
+        fileName = self.fileName
+        self.fileObj = open(fileName + '.shp', 'wb')
+        self._shx = shx_file(fileName, 'w')
+        self.header = {}
+        self.header['Shape Type'] = self.SHAPE_TYPES[shape_type]
+        self.header['Version'] = 1000
+        self.header['Unused0'] = 0
+        self.header['Unused1'] = 0
+        self.header['Unused2'] = 0
+        self.header['Unused3'] = 0
+        self.header['Unused4'] = 0
+        self.header['File Code'] = 9994
+        self.__file_Length = 100
+        self.header['File Length'] = 0
+        self.header['BBOX Xmax'] = None
+        self.header['BBOX Ymax'] = None
+        self.header['BBOX Mmax'] = None
+        self.header['BBOX Zmax'] = None
+        self.header['BBOX Xmin'] = None
+        self.header['BBOX Ymin'] = None
+        self.header['BBOX Mmin'] = None
+        self.header['BBOX Zmin'] = None
+        self.shape = TYPE_DISPATCH[self.header['Shape Type']]
+        #self.__numRecords = self._shx.numRecords
+
+    def __len__(self):
+        return self.__numRecords
+
+    def __iter__(self):
+        return self
+
+    def type(self):
+        return self.shape.String_Type
+
+    def next(self):
+        """returns the next Shape in the shapeFile
+
+        Example:
+        >>> import pysal
+        >>> list(shp_file(pysal.examples.get_path('Point.shp'))) == [{'Y': -0.25904661905760773, 'X': -0.00068176617532103578, 'Shape Type': 1}, {'Y': -0.25630328607387354, 'X': 0.11697145363360706, 'Shape Type': 1}, {'Y': -0.33930131004366804, 'X': 0.05043668122270728, 'Shape Type': 1}, {'Y': -0.41266375545851519, 'X': -0.041266375545851552, 'Shape Type': 1}, {'Y': -0.44017467248908293, 'X': -0.011462882096069604, 'Shape Type': 1}, {'Y': -0.46080786026200882, 'X': 0.027510917030567628,  [...]
+        True
+        """
+        self.__isreadable()
+        nextShape = self.__lastShape
+        if nextShape == self._shx.numRecords:
+            self.__lastShape = 0
+            raise StopIteration
+        else:
+            self.__lastShape = nextShape + 1
+            return self.get_shape(nextShape)
+
+    def __seek(self, pos):
+        if pos != self.fileObj.tell():
+            self.fileObj.seek(pos)
+
+    def __read(self, pos, size):
+        self.__isreadable()
+        if pos != self.fileObj.tell():
+            self.fileObj.seek(pos)
+        return self.fileObj.read(size)
+
+    def get_shape(self, shpId):
+        self.__isreadable()
+        if shpId + 1 > self.__numRecords:
+            raise IndexError
+        fPosition, bytes = self._shx.index[shpId]
+        self.__seek(fPosition)
+        #the index does not include the 2 byte record header (which contains, Record ID and Content Length)
+        rec_id, con_len = _unpackDict(URHEADERSTRUCT, self.fileObj)
+        return self.shape.unpack(StringIO(self.fileObj.read(bytes)))
+        #return self.shape.unpack(self.fileObj.read(bytes))
+
+    def __update_bbox(self, s):
+        h = self.header
+        if s.get('Shape Type') == 1:
+            h['BBOX Xmax'] = noneMax(h['BBOX Xmax'], s.get('X'))
+            h['BBOX Ymax'] = noneMax(h['BBOX Ymax'], s.get('Y'))
+            h['BBOX Mmax'] = noneMax(h['BBOX Mmax'], s.get('M'))
+            h['BBOX Zmax'] = noneMax(h['BBOX Zmax'], s.get('Z'))
+            h['BBOX Xmin'] = noneMin(h['BBOX Xmin'], s.get('X'))
+            h['BBOX Ymin'] = noneMin(h['BBOX Ymin'], s.get('Y'))
+            h['BBOX Mmin'] = noneMin(h['BBOX Mmin'], s.get('M'))
+            h['BBOX Zmin'] = noneMin(h['BBOX Zmin'], s.get('Z'))
+        else:
+            h['BBOX Xmax'] = noneMax(h['BBOX Xmax'], s.get('BBOX Xmax'))
+            h['BBOX Ymax'] = noneMax(h['BBOX Ymax'], s.get('BBOX Ymax'))
+            h['BBOX Mmax'] = noneMax(h['BBOX Mmax'], s.get('BBOX Mmax'))
+            h['BBOX Zmax'] = noneMax(h['BBOX Zmax'], s.get('BBOX Zmax'))
+            h['BBOX Xmin'] = noneMin(h['BBOX Xmin'], s.get('BBOX Xmin'))
+            h['BBOX Ymin'] = noneMin(h['BBOX Ymin'], s.get('BBOX Ymin'))
+            h['BBOX Mmin'] = noneMin(h['BBOX Mmin'], s.get('BBOX Mmin'))
+            h['BBOX Zmin'] = noneMin(h['BBOX Zmin'], s.get('BBOX Zmin'))
+        if not self.shape.HASM:
+            self.header['BBOX Mmax'] = 0.0
+            self.header['BBOX Mmin'] = 0.0
+        if not self.shape.HASZ:
+            self.header['BBOX Zmax'] = 0.0
+            self.header['BBOX Zmin'] = 0.0
+
+    def add_shape(self, s):
+        self.__iswritable()
+        self.__update_bbox(s)
+        rec = self.shape.pack(s)
+        con_len = len(rec)
+        self.__file_Length += con_len + 8
+        rec_id, pos = self._shx.add_record(con_len)
+        self.__seek(pos)
+        self.fileObj.write(pack('>ii', rec_id, con_len / 2))
+        self.fileObj.write(rec)
+
+    def close(self):
+        self._shx.close(self.header)
+        if self.__mode == 'w':
+            self.header['File Length'] = self.__file_Length / 2
+            self.__seek(0)
+            self.fileObj.write(_packDict(HEADERSTRUCT, self.header))
+        self.fileObj.close()
+
+
+class shx_file:
+    """
+    Reads and Writes the SHX compenent of a ShapeFile
+
+    Attributes:
+    index -- list -- Contains the file offset and len of each recond in the SHP component
+    numRecords -- int -- Number of records
+
+    """
+    def __iswritable(self):
+        try:
+            assert self.__mode == 'w'
+        except AssertionError:
+            raise IOError("[Errno 9] Bad file descriptor")
+        return True
+
+    def __isreadable(self):
+        try:
+            assert self.__mode == 'r'
+        except AssertionError:
+            raise IOError("[Errno 9] Bad file descriptor")
+        return True
+
+    def __init__(self, fileName=None, mode='r'):
+        self.__mode = mode
+        if fileName.endswith('.shp') or fileName.endswith('.shx') or fileName.endswith('.dbf'):
+            fileName = fileName[:-4]
+        self.fileName = fileName
+
+        if mode == 'r':
+            self._open_shx_file()
+        elif mode == 'w':
+            self._create_shx_file()
+
+    def _open_shx_file(self):
+        """ Opens the SHX file.
+
+        shx_file(filename,'r') --> shx_file
+
+        Arguments:
+        filename -- string -- extension is optional, will remove '.dbf','.shx','.shp' and append '.shx'
+        mode -- string -- Must be 'r'
+
+        Example:
+        >>> import pysal
+        >>> shx = shx_file(pysal.examples.get_path('10740'))
+        >>> shx._header == {'BBOX Xmax': -105.29012, 'BBOX Ymax': 36.219799000000002, 'BBOX Mmax': 0.0, 'BBOX Zmin': 0.0, 'BBOX Mmin': 0.0, 'File Code': 9994, 'BBOX Ymin': 34.259672000000002, 'BBOX Xmin': -107.62651, 'Unused0': 0, 'Unused1': 0, 'Unused2': 0, 'Unused3': 0, 'Unused4': 0, 'Version': 1000, 'BBOX Zmax': 0.0, 'Shape Type': 5, 'File Length': 830}
+        True
+        >>> len(shx.index)
+        195
+        """
+        self.__isreadable()
+        self.fileObj = open(self.fileName + '.shx', 'rb')
+        self._header = _unpackDict(UHEADERSTRUCT, self.fileObj)
+        self.numRecords = numRecords = (self._header['File Length'] - 50) / 4
+        index = {}
+        fmt = '>%di' % (2 * numRecords)
+        size = calcsize(fmt)
+        dat = unpack(fmt, self.fileObj.read(size))
+        self.index = [(dat[i] * 2, dat[i + 1] * 2) for i in xrange(
+            0, len(dat), 2)]
+
+    def _create_shx_file(self):
+        """ Creates the SHX file.
+
+        shx_file(filename,'w') --> shx_file
+
+        Arguments:
+        filename -- string -- extension is optional, will remove '.dbf','.shx','.shp' and append '.shx'
+        mode -- string -- Must be 'w'
+
+        Example:
+        >>> import pysal
+        >>> shx = shx_file(pysal.examples.get_path('Point'))
+        >>> isinstance(shx,shx_file)
+        True
+        """
+        self.__iswritable()
+        self.fileObj = open(self.fileName + '.shx', 'wb')
+        self.numRecords = 0
+        self.index = []
+        self.__offset = 100  # length of header
+        self.__next_rid = 1  # record IDs start at 1
+
+    def add_record(self, size):
+        """ Add a record to the shx index.
+
+        add_record(size int) --> RecordID int
+
+        Arguments:
+        size -- int -- the length of the record in bytes NOT including the 8byte record header
+
+        Returns:
+        rec_id -- int -- the sequential record ID, 1-based.
+
+        Note: the SHX records contain (Offset, Length) in 16-bit words.
+
+        Example:
+        >>> import pysal,os
+        >>> shx = shx_file(pysal.examples.get_path('Point'))
+        >>> shx.index
+        [(100, 20), (128, 20), (156, 20), (184, 20), (212, 20), (240, 20), (268, 20), (296, 20), (324, 20)]
+        >>> shx2 = shx_file('test','w')
+        >>> [shx2.add_record(rec[1]) for rec in shx.index]
+        [(1, 100), (2, 128), (3, 156), (4, 184), (5, 212), (6, 240), (7, 268), (8, 296), (9, 324)]
+        >>> shx2.index == shx.index
+        True
+        >>> shx2.close(shx._header)
+        >>> open('test.shx','rb').read() == open(pysal.examples.get_path('Point.shx'),'rb').read()
+        True
+        >>> os.remove('test.shx')
+        """
+        self.__iswritable()
+        pos = self.__offset
+        rec_id = self.__next_rid
+        self.index.append((self.__offset, size))
+        self.__offset += size + 8  # the 8byte record Header.
+        self.numRecords += 1
+        self.__next_rid += 1
+        return rec_id, pos
+
+    def close(self, header):
+        if self.__mode == 'w':
+            self.__iswritable()
+            header['File Length'] = (
+                self.numRecords * calcsize('>ii') + 100) / 2
+            self.fileObj.seek(0)
+            self.fileObj.write(_packDict(HEADERSTRUCT, header))
+            fmt = '>%di' % (2 * self.numRecords)
+            values = []
+            for off, size in self.index:
+                values.extend([off / 2, size / 2])
+            self.fileObj.write(pack(fmt, *values))
+        self.fileObj.close()
+
+
+class NullShape:
+    Shape_Type = 0
+    STRUCT = (('Shape Type', 'i', '<'))
+
+    def unpack(self):
+        return None
+
+    def pack(self, x=None):
+        return pack('<i', 0)
+
+
+class Point(object):
+    """ Packs and Unpacks a ShapeFile Point Type
+    Example:
+    >>> import pysal
+    >>> shp = shp_file(pysal.examples.get_path('Point.shp'))
+    >>> rec = shp.get_shape(0)
+    >>> rec == {'Y': -0.25904661905760773, 'X': -0.00068176617532103578, 'Shape Type': 1}
+    True
+    >>> pos = shp.fileObj.seek(shp._shx.index[0][0]+8) #+8 byte record header
+    >>> dat = shp.fileObj.read(shp._shx.index[0][1])
+    >>> dat == Point.pack(rec)
+    True
+    """
+    Shape_Type = 1
+    String_Type = 'POINT'
+    HASZ = False
+    HASM = False
+    STRUCT = (('Shape Type', 'i', '<'),
+              ('X', 'd', '<'),
+              ('Y', 'd', '<'))
+    USTRUCT = [{'fmt': 'idd', 'order': '<', 'names': ['Shape Type',
+                                                      'X', 'Y'], 'size': 20}]
+
+    @classmethod
+    def unpack(cls, dat):
+        return _unpackDict(cls.USTRUCT, dat)
+
+    @classmethod
+    def pack(cls, record):
+        rheader = _packDict(cls.STRUCT, record)
+        return rheader
+
+
+class PointZ(Point):
+    Shape_Type = 11
+    String_Type = "POINTZ"
+    HASZ = True
+    HASM = True
+    STRUCT = (('Shape Type', 'i', '<'),
+              ('X', 'd', '<'),
+              ('Y', 'd', '<'),
+              ('Z', 'd', '<'),
+              ('M', 'd', '<'))
+    USTRUCT = [{'fmt': 'idddd', 'order': '<', 'names': ['Shape Type',
+                                                        'X', 'Y', 'Z', 'M'], 'size': 36}]
+
+
+class PolyLine:
+    """ Packs and Unpacks a ShapeFile PolyLine Type
+    Example:
+    >>> import pysal
+    >>> shp = shp_file(pysal.examples.get_path('Line.shp'))
+    >>> rec = shp.get_shape(0)
+    >>> rec == {'BBOX Ymax': -0.25832280562918325, 'NumPoints': 3, 'BBOX Ymin': -0.25895877033237352, 'NumParts': 1, 'Vertices': [(-0.0090539248870159517, -0.25832280562918325), (0.0074811573959305822, -0.25895877033237352), (0.0074811573959305822, -0.25895877033237352)], 'BBOX Xmax': 0.0074811573959305822, 'BBOX Xmin': -0.0090539248870159517, 'Shape Type': 3, 'Parts Index': [0]}
+    True
+    >>> pos = shp.fileObj.seek(shp._shx.index[0][0]+8) #+8 byte record header
+    >>> dat = shp.fileObj.read(shp._shx.index[0][1])
+    >>> dat == PolyLine.pack(rec)
+    True
+    """
+    HASZ = False
+    HASM = False
+    String_Type = 'ARC'
+    STRUCT = (('Shape Type', 'i', '<'),
+              ('BBOX Xmin', 'd', '<'),
+              ('BBOX Ymin', 'd', '<'),
+              ('BBOX Xmax', 'd', '<'),
+              ('BBOX Ymax', 'd', '<'),
+              ('NumParts', 'i', '<'),
+              ('NumPoints', 'i', '<'))
+    USTRUCT = [{'fmt': 'iddddii', 'order': '<', 'names': ['Shape Type', 'BBOX Xmin', 'BBOX Ymin', 'BBOX Xmax', 'BBOX Ymax', 'NumParts', 'NumPoints'], 'size': 44}]
+
+    @classmethod
+    def unpack(cls, dat):
+        record = _unpackDict(cls.USTRUCT, dat)
+        contentStruct = (('Parts Index', ('i', record['NumParts']), '<'),
+                         ('Vertices', ('d', 2 * record['NumPoints']), '<'))
+        _unpackDict2(record, contentStruct, dat)
+        #record['Vertices'] = [(record['Vertices'][i],record['Vertices'][i+1]) for i in xrange(0,record['NumPoints']*2,2)]
+        verts = record['Vertices']
+        #Next line is equivalent to: zip(verts[::2],verts[1::2])
+        record['Vertices'] = list(izip(
+            islice(verts, 0, None, 2), islice(verts, 1, None, 2)))
+        if not record['Parts Index']:
+            record['Parts Index'] = [0]
+        return record
+        #partsIndex = list(partsIndex)
+        #partsIndex.append(None)
+        #parts = [vertices[partsIndex[i]:partsIndex[i+1]] for i in xrange(header['NumParts'])]
+
+    @classmethod
+    def pack(cls, record):
+        rheader = _packDict(cls.STRUCT, record)
+        contentStruct = (('Parts Index', '%di' % record['NumParts'], '<'),
+                         ('Vertices', '%dd' % (2 * record['NumPoints']), '<'))
+        content = {}
+        content['Parts Index'] = record['Parts Index']
+        verts = []
+        [verts.extend(vert) for vert in record['Vertices']]
+        content['Vertices'] = verts
+        content = _packDict(contentStruct, content)
+        return rheader + content
+
+
+class PolyLineZ(object):
+    HASZ = True
+    HASM = True
+    String_Type = 'ARC'
+    STRUCT = (('Shape Type', 'i', '<'),
+              ('BBOX Xmin', 'd', '<'),
+              ('BBOX Ymin', 'd', '<'),
+              ('BBOX Xmax', 'd', '<'),
+              ('BBOX Ymax', 'd', '<'),
+              ('NumParts', 'i', '<'),
+              ('NumPoints', 'i', '<'))
+    USTRUCT = [{'fmt': 'iddddii', 'order': '<', 'names': ['Shape Type', 'BBOX Xmin', 'BBOX Ymin', 'BBOX Xmax', 'BBOX Ymax', 'NumParts', 'NumPoints'], 'size': 44}]
+
+    @classmethod
+    def unpack(cls, dat):
+        record = _unpackDict(cls.USTRUCT, dat)
+        contentStruct = (('Parts Index', ('i', record['NumParts']), '<'),
+                         ('Vertices', ('d', 2 * record['NumPoints']), '<'),
+                         ('Zmin', ('d', 1), '<'),
+                         ('Zmax', ('d', 1), '<'),
+                         ('Zarray', ('d', record['NumPoints']), '<'),
+                         ('Mmin', ('d', 1), '<'),
+                         ('Mmax', ('d', 1), '<'),
+                         ('Marray', ('d', record['NumPoints']), '<'),)
+        _unpackDict2(record, contentStruct, dat)
+        verts = record['Vertices']
+        record['Vertices'] = list(izip(
+            islice(verts, 0, None, 2), islice(verts, 1, None, 2)))
+        if not record['Parts Index']:
+            record['Parts Index'] = [0]
+        record['Zmin'] = record['Zmin'][0]
+        record['Zmax'] = record['Zmax'][0]
+        record['Mmin'] = record['Mmin'][0]
+        record['Mmax'] = record['Mmax'][0]
+        return record
+
+    @classmethod
+    def pack(cls, record):
+        rheader = _packDict(cls.STRUCT, record)
+        contentStruct = (('Parts Index', '%di' % record['NumParts'], '<'),
+                         ('Vertices', '%dd' % (2 * record['NumPoints']), '<'),
+                         ('Zmin', 'd', '<'),
+                         ('Zmax', 'd', '<'),
+                         ('Zarray', '%dd' % (record['NumPoints']), '<'),
+                         ('Mmin', 'd', '<'),
+                         ('Mmax', 'd', '<'),
+                         ('Marray', '%dd' % (record['NumPoints']), '<'))
+        content = {}
+        content.update(record)
+        content['Parts Index'] = record['Parts Index']
+        verts = []
+        [verts.extend(vert) for vert in record['Vertices']]
+        content['Vertices'] = verts
+        content = _packDict(contentStruct, content)
+        return rheader + content
+
+
+class Polygon(PolyLine):
+    """ Packs and Unpacks a ShapeFile Polygon Type
+    Indentical to PolyLine.
+
+    Example:
+    >>> import pysal
+    >>> shp = shp_file(pysal.examples.get_path('Polygon.shp'))
+    >>> rec = shp.get_shape(1)
+    >>> rec == {'BBOX Ymax': -0.3126531125455273, 'NumPoints': 7, 'BBOX Ymin': -0.35957259110238166, 'NumParts': 1, 'Vertices': [(0.05396439570183631, -0.3126531125455273), (0.051473095955454629, -0.35251390848763364), (0.059777428443393454, -0.34254870950210703), (0.063099161438568974, -0.34462479262409174), (0.048981796209073003, -0.35957259110238166), (0.046905713087088297, -0.3126531125455273), (0.05396439570183631, -0.3126531125455273)], 'BBOX Xmax': 0.063099161438568974, 'BBOX Xmin [...]
+    True
+    >>> pos = shp.fileObj.seek(shp._shx.index[1][0]+8) #+8 byte record header
+    >>> dat = shp.fileObj.read(shp._shx.index[1][1])
+    >>> dat == Polygon.pack(rec)
+    True
+    """
+    String_Type = 'POLYGON'
+
+
+class MultiPoint:
+    def __init__(self):
+        raise NotImplementedError("No MultiPoint Support at this time.")
+
+
+class PolygonZ(PolyLineZ):
+    String_Type = 'POLYGONZ'
+
+
+class MultiPointZ:
+    def __init__(self):
+        raise NotImplementedError("No MultiPointZ Support at this time.")
+
+
+class PointM:
+    def __init__(self):
+        raise NotImplementedError("No PointM Support at this time.")
+
+
+class PolyLineM:
+    def __init__(self):
+        raise NotImplementedError("No PolyLineM Support at this time.")
+
+
+class PolygonM:
+    def __init__(self):
+        raise NotImplementedError("No PolygonM Support at this time.")
+
+
+class MultiPointM:
+    def __init__(self):
+        raise NotImplementedError("No MultiPointM Support at this time.")
+
+
+class MultiPatch:
+    def __init__(self):
+        raise NotImplementedError("No MultiPatch Support at this time.")
+
+TYPE_DISPATCH = {0: NullShape, 1: Point, 3: PolyLine, 5: Polygon, 8: MultiPoint, 11: PointZ, 13: PolyLineZ, 15: PolygonZ, 18: MultiPointZ, 21: PointM, 23: PolyLineM, 25: PolygonM, 28: MultiPointM, 31: MultiPatch, 'POINT': Point, 'POINTZ': PointZ, 'POINTM': PointM, 'ARC': PolyLine, 'ARCZ': PolyLineZ, 'ARCM': PolyLineM, 'POLYGON': Polygon, 'POLYGONZ': PolygonZ, 'POLYGONM': PolygonM, 'MULTIPOINT': MultiPoint, 'MULTIPOINTZ': MultiPointZ, 'MULTIPOINTM': MultiPointM, 'MULTIPATCH': MultiPatch}
+
diff --git a/pysal/core/util/tests/test_shapefile.py b/pysal/core/util/tests/test_shapefile.py
new file mode 100644
index 0000000..8959ba1
--- /dev/null
+++ b/pysal/core/util/tests/test_shapefile.py
@@ -0,0 +1,362 @@
+import unittest
+from cStringIO import StringIO
+from pysal.core.util.shapefile import noneMax, noneMin, shp_file, shx_file, NullShape, Point, PolyLine, MultiPoint, PointZ, PolyLineZ, PolygonZ, MultiPointZ, PointM, PolyLineM, PolygonM, MultiPointM, MultiPatch
+import os
+import pysal
+
+
+class TestNoneMax(unittest.TestCase):
+    def test_none_max(self):
+        self.assertEqual(5, noneMax(5, None))
+        self.assertEqual(1, noneMax(None, 1))
+        self.assertEqual(None, noneMax(None, None))
+
+
+class TestNoneMin(unittest.TestCase):
+    def test_none_min(self):
+        self.assertEqual(5, noneMin(5, None))
+        self.assertEqual(1, noneMin(None, 1))
+        self.assertEqual(None, noneMin(None, None))
+
+
+class test_shp_file(unittest.TestCase):
+    def test___init__(self):
+        shp = shp_file(pysal.examples.get_path('10740.shp'))
+        assert shp.header == {'BBOX Xmax': -105.29012, 'BBOX Ymax': 36.219799000000002, 'BBOX Mmax': 0.0, 'BBOX Zmin': 0.0, 'BBOX Mmin': 0.0, 'File Code': 9994, 'BBOX Ymin': 34.259672000000002, 'BBOX Xmin': -107.62651, 'Unused0': 0, 'Unused1': 0, 'Unused2': 0, 'Unused3': 0, 'Unused4': 0, 'Version': 1000, 'BBOX Zmax': 0.0, 'Shape Type': 5, 'File Length': 260534}
+
+    def test___iter__(self):
+        shp = shp_file(pysal.examples.get_path('Point.shp'))
+        points = [pt for pt in shp]
+        expected = [{'Y': -0.25904661905760773, 'X': -0.00068176617532103578, 'Shape Type': 1},
+                    {'Y': -0.25630328607387354, 'X': 0.11697145363360706,
+                        'Shape Type': 1},
+                    {'Y': -0.33930131004366804, 'X': 0.05043668122270728,
+                        'Shape Type': 1},
+                    {'Y': -0.41266375545851519, 'X': -0.041266375545851552,
+                        'Shape Type': 1},
+                    {'Y': -0.44017467248908293, 'X': -0.011462882096069604,
+                        'Shape Type': 1},
+                    {'Y': -0.46080786026200882, 'X': 0.027510917030567628,
+                        'Shape Type': 1},
+                    {'Y': -0.45851528384279472, 'X': 0.075655021834060809,
+                        'Shape Type': 1},
+                    {'Y': -0.43558951965065495, 'X': 0.11233624454148461,
+                        'Shape Type': 1},
+                    {'Y': -0.40578602620087334, 'X': 0.13984716157205224, 'Shape Type': 1}]
+        assert points == expected
+
+    def test___len__(self):
+        shp = shp_file(pysal.examples.get_path('10740.shp'))
+        assert len(shp) == 195
+
+    def test_add_shape(self):
+        shp = shp_file('test_point', 'w', 'POINT')
+        points = [{'Shape Type': 1, 'X': 0, 'Y': 0},
+                  {'Shape Type': 1, 'X': 1, 'Y': 1},
+                  {'Shape Type': 1, 'X': 2, 'Y': 2},
+                  {'Shape Type': 1, 'X': 3, 'Y': 3},
+                  {'Shape Type': 1, 'X': 4, 'Y': 4}]
+        for pt in points:
+            shp.add_shape(pt)
+        shp.close()
+
+        for a, b in zip(points, shp_file('test_point')):
+            self.assertEquals(a, b)
+        os.remove('test_point.shp')
+        os.remove('test_point.shx')
+
+    def test_close(self):
+        shp = shp_file(pysal.examples.get_path('10740.shp'))
+        shp.close()
+        self.assertEqual(shp.fileObj.closed, True)
+
+    def test_get_shape(self):
+        shp = shp_file(pysal.examples.get_path('Line.shp'))
+        rec = shp.get_shape(0)
+        expected = {'BBOX Ymax': -0.25832280562918325,
+                    'NumPoints': 3,
+                    'BBOX Ymin': -0.25895877033237352,
+                    'NumParts': 1,
+                    'Vertices': [(-0.0090539248870159517, -0.25832280562918325),
+                                 (0.0074811573959305822, -0.25895877033237352),
+                                 (
+                                     0.0074811573959305822, -0.25895877033237352)],
+                    'BBOX Xmax': 0.0074811573959305822,
+                    'BBOX Xmin': -0.0090539248870159517,
+                    'Shape Type': 3,
+                    'Parts Index': [0]}
+        self.assertEqual(expected, shp.get_shape(0))
+
+    def test_next(self):
+        shp = shp_file(pysal.examples.get_path('Point.shp'))
+        points = [pt for pt in shp]
+        expected = {'Y': -0.25904661905760773, 'X': -
+                    0.00068176617532103578, 'Shape Type': 1}
+        self.assertEqual(expected, shp.next())
+        expected = {'Y': -0.25630328607387354, 'X':
+                    0.11697145363360706, 'Shape Type': 1}
+        self.assertEqual(expected, shp.next())
+
+    def test_type(self):
+        shp = shp_file(pysal.examples.get_path('Point.shp'))
+        self.assertEqual("POINT", shp.type())
+        shp = shp_file(pysal.examples.get_path('Polygon.shp'))
+        self.assertEqual("POLYGON", shp.type())
+        shp = shp_file(pysal.examples.get_path('Line.shp'))
+        self.assertEqual("ARC", shp.type())
+
+
+class test_shx_file(unittest.TestCase):
+    def test___init__(self):
+        shx = shx_file(pysal.examples.get_path('Point'))
+        assert isinstance(shx, shx_file)
+
+    def test_add_record(self):
+        shx = shx_file(pysal.examples.get_path('Point'))
+        expectedIndex = [(100, 20), (128, 20), (156, 20),
+                         (184, 20), (212, 20), (240, 20),
+                         (268, 20), (296, 20), (324, 20)]
+        assert shx.index == expectedIndex
+        shx2 = shx_file('test', 'w')
+        for i, rec in enumerate(shx.index):
+            id, location = shx2.add_record(rec[1])
+            assert id == (i + 1)
+            assert location == rec[0]
+        assert shx2.index == shx.index
+        shx2.close(shx._header)
+        new_shx = open('test.shx', 'rb').read()
+        expected_shx = open(pysal.examples.get_path('Point.shx'), 'rb').read()
+        assert new_shx == expected_shx
+        os.remove('test.shx')
+
+    def test_close(self):
+        shx = shx_file(pysal.examples.get_path('Point'))
+        shx.close(None)
+        self.assertEqual(shx.fileObj.closed, True)
+
+
+class TestNullShape(unittest.TestCase):
+    def test_pack(self):
+        null_shape = NullShape()
+        self.assertEqual(b'\x00' * 4, null_shape.pack())
+
+    def test_unpack(self):
+        null_shape = NullShape()
+        self.assertEqual(None, null_shape.unpack())
+
+
+class TestPoint(unittest.TestCase):
+    def test_pack(self):
+        record = {"X": 5, "Y": 5, "Shape Type": 1}
+        expected = b"\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x40\x00\x00\x00\x00\x00\x00\x14\x40"
+        self.assertEqual(expected, Point.pack(record))
+
+    def test_unpack(self):
+        dat = StringIO(b"\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x40\x00\x00\x00\x00\x00\x00\x14\x40")
+        expected = {"X": 5, "Y": 5, "Shape Type": 1}
+        self.assertEqual(expected, Point.unpack(dat))
+
+
+class TestPolyLine(unittest.TestCase):
+    def test_pack(self):
+        record = {'BBOX Ymax': -0.25832280562918325, 'NumPoints': 3, 'BBOX Ymin': -0.25895877033237352, 'NumParts': 1, 'Vertices': [(-0.0090539248870159517, -0.25832280562918325), (0.0074811573959305822, -0.25895877033237352), (0.0074811573959305822, -0.25895877033237352)], 'BBOX Xmax': 0.0074811573959305822, 'BBOX Xmin': -0.0090539248870159517, 'Shape Type': 3, 'Parts Index': [0]}
+        expected = b"""\x03\x00\x00\x00\xc0\x46\x52\x3a\xdd\x8a\x82\
+\xbf\x3d\xc1\x65\xce\xc7\x92\xd0\xbf\x00\xc5\
+\xa0\xe5\x8f\xa4\x7e\x3f\x6b\x40\x7f\x60\x5c\
+\x88\xd0\xbf\x01\x00\x00\x00\x03\x00\x00\x00\
+\x00\x00\x00\x00\xc0\x46\x52\x3a\xdd\x8a\x82\
+\xbf\x6b\x40\x7f\x60\x5c\x88\xd0\xbf\x00\xc5\
+\xa0\xe5\x8f\xa4\x7e\x3f\x3d\xc1\x65\xce\xc7\
+\x92\xd0\xbf\x00\xc5\xa0\xe5\x8f\xa4\x7e\x3f\
+\x3d\xc1\x65\xce\xc7\x92\xd0\xbf"""
+        self.assertEqual(expected, PolyLine.pack(record))
+
+    def test_unpack(self):
+        dat = StringIO(b"""\x03\x00\x00\x00\xc0\x46\x52\x3a\xdd\x8a\x82\
+\xbf\x3d\xc1\x65\xce\xc7\x92\xd0\xbf\x00\xc5\
+\xa0\xe5\x8f\xa4\x7e\x3f\x6b\x40\x7f\x60\x5c\
+\x88\xd0\xbf\x01\x00\x00\x00\x03\x00\x00\x00\
+\x00\x00\x00\x00\xc0\x46\x52\x3a\xdd\x8a\x82\
+\xbf\x6b\x40\x7f\x60\x5c\x88\xd0\xbf\x00\xc5\
+\xa0\xe5\x8f\xa4\x7e\x3f\x3d\xc1\x65\xce\xc7\
+\x92\xd0\xbf\x00\xc5\xa0\xe5\x8f\xa4\x7e\x3f\
+\x3d\xc1\x65\xce\xc7\x92\xd0\xbf""")
+        expected = {'BBOX Ymax': -0.25832280562918325, 'NumPoints': 3, 'BBOX Ymin': -0.25895877033237352, 'NumParts': 1, 'Vertices': [(-0.0090539248870159517, -0.25832280562918325), (0.0074811573959305822, -0.25895877033237352), (0.0074811573959305822, -0.25895877033237352)], 'BBOX Xmax': 0.0074811573959305822, 'BBOX Xmin': -0.0090539248870159517, 'Shape Type': 3, 'Parts Index': [0]}
+        self.assertEqual(expected, PolyLine.unpack(dat))
+
+
+class TestMultiPoint(unittest.TestCase):
+    def test___init__(self):
+        self.failUnlessRaises(NotImplementedError, MultiPoint)
+
+
+class TestPointZ(unittest.TestCase):
+    def test_pack(self):
+        record = {"X": 5, "Y": 5, "Z": 5, "M": 5, "Shape Type": 11}
+        expected = b"\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00\x14@"
+        self.assertEqual(expected, PointZ.pack(record))
+
+    def test_unpack(self):
+        dat = StringIO(b"\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00\x14@")
+        expected = {"X": 5, "Y": 5, "Z": 5, "M": 5, "Shape Type": 11}
+        self.assertEqual(expected, PointZ.unpack(dat))
+
+
+class TestPolyLineZ(unittest.TestCase):
+    def test___init__(self):
+        self.failUnlessRaises(NotImplementedError, PolyLineZ)
+
+
+class TestPolyLineZ(unittest.TestCase):
+    def test_pack(self):
+        record = {'BBOX Ymax': -0.25832280562918325, 'NumPoints': 3, 'BBOX Ymin': -0.25895877033237352, 'NumParts': 1, 'Vertices': [(-0.0090539248870159517, -0.25832280562918325), (0.0074811573959305822, -0.25895877033237352), (0.0074811573959305822, -0.25895877033237352)], 'BBOX Xmax': 0.0074811573959305822, 'BBOX Xmin': -0.0090539248870159517, 'Shape Type': 13, 'Parts Index': [0], 'Zmin': 0, 'Zmax': 10, 'Zarray': [0, 5, 10], 'Mmin': 2, 'Mmax': 4, 'Marray': [2, 3, 4]}
+        expected = b"""\r\x00\x00\x00\xc0FR:\xdd\x8a\x82\xbf=\xc1e\xce\xc7\x92\xd0\xbf\x00\xc5\xa0\xe5\x8f\xa4~?k@\x7f`\\\x88\xd0\xbf\x01\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\xc0FR:\xdd\x8a\x82\xbfk@\x7f`\\\x88\xd0\xbf\x00\xc5\xa0\xe5\x8f\xa4~?=\xc1e\xce\xc7\x92\xd0\xbf\x00\xc5\xa0\xe5\x8f\xa4~?=\xc1e\xce\xc7\x92\xd0\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00$@\x00\x00\x00\x00\x [...]
+        self.assertEqual(expected, PolyLineZ.pack(record))
+
+    def test_unpack(self):
+        dat = StringIO(b"""\r\x00\x00\x00\xc0FR:\xdd\x8a\x82\xbf=\xc1e\xce\xc7\x92\xd0\xbf\x00\xc5\xa0\xe5\x8f\xa4~?k@\x7f`\\\x88\xd0\xbf\x01\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\xc0FR:\xdd\x8a\x82\xbfk@\x7f`\\\x88\xd0\xbf\x00\xc5\xa0\xe5\x8f\xa4~?=\xc1e\xce\xc7\x92\xd0\xbf\x00\xc5\xa0\xe5\x8f\xa4~?=\xc1e\xce\xc7\x92\xd0\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00$@\x00\x00\x00\x [...]
+        expected = {'BBOX Ymax': -0.25832280562918325, 'NumPoints': 3, 'BBOX Ymin': -0.25895877033237352, 'NumParts': 1, 'Vertices': [(-0.0090539248870159517, -0.25832280562918325), (0.0074811573959305822, -0.25895877033237352), (0.0074811573959305822, -0.25895877033237352)], 'BBOX Xmax': 0.0074811573959305822, 'BBOX Xmin': -0.0090539248870159517, 'Shape Type': 13, 'Parts Index': [0], 'Zmin': 0, 'Zmax': 10, 'Zarray': [0, 5, 10], 'Mmin': 2, 'Mmax': 4, 'Marray': [2, 3, 4]}
+        self.assertEqual(expected, PolyLineZ.unpack(dat))
+
+
+class TestPolygonZ(unittest.TestCase):
+    def test_pack(self):
+        record = {
+            'BBOX Xmin': 0.0,
+            'BBOX Xmax': 10.0,
+            'BBOX Ymin': 0.0,
+            'BBOX Ymax': 10.0,
+            'NumPoints': 4,
+            'NumParts': 1,
+            'Vertices': [(0.0, 0.0),
+                         (10.0, 10.0),
+                         (10.0, 0.0),
+                         (0.0, 0.0)],
+            'Shape Type': 15,
+            'Parts Index': [0],
+            'Zmin': 0,
+            'Zmax': 10,
+            'Zarray': [0, 10, 0, 0],
+            'Mmin': 2,
+            'Mmax': 4,
+            'Marray': [2, 4, 2, 2]
+        }
+        dat = StringIO(PolygonZ.pack(record))
+        self.assertEqual(record, PolygonZ.unpack(dat))
+
+
+class TestMultiPointZ(unittest.TestCase):
+    def test___init__(self):
+        self.failUnlessRaises(NotImplementedError, MultiPointZ)
+        # multi_point_z = MultiPointZ()
+
+
+class TestPointM(unittest.TestCase):
+    def test___init__(self):
+        self.failUnlessRaises(NotImplementedError, PointM)
+        # point_m = PointM()
+
+
+class TestPolyLineM(unittest.TestCase):
+    def test___init__(self):
+        self.failUnlessRaises(NotImplementedError, PolyLineM)
+        # poly_line_m = PolyLineM()
+
+
+class TestPolygonM(unittest.TestCase):
+    def test___init__(self):
+        self.failUnlessRaises(NotImplementedError, PolygonM)
+        # polygon_m = PolygonM()
+
+
+class TestMultiPointM(unittest.TestCase):
+    def test___init__(self):
+        self.failUnlessRaises(NotImplementedError, MultiPointM)
+        # multi_point_m = MultiPointM()
+
+
+class TestMultiPatch(unittest.TestCase):
+    def test___init__(self):
+        self.failUnlessRaises(NotImplementedError, MultiPatch)
+        # multi_patch = MultiPatch()
+
+
+class _TestPoints(unittest.TestCase):
+    def test1(self):
+        """ Test creating and reading Point Shape Files """
+        shp = shp_file('test_point', 'w', 'POINT')
+        points = [{'Shape Type': 1, 'X': 0, 'Y': 0}, {'Shape Type': 1, 'X': 1, 'Y': 1}, {'Shape Type': 1, 'X': 2, 'Y': 2}, {'Shape Type': 1, 'X': 3, 'Y': 3}, {'Shape Type': 1, 'X': 4, 'Y': 4}]
+        for pt in points:
+            shp.add_shape(pt)
+        shp.close()
+
+        shp = list(shp_file('test_point'))
+        for a, b in zip(points, shp):
+            self.assertEquals(a, b)
+        os.remove('test_point.shp')
+        os.remove('test_point.shx')
+
+
+class _TestPolyLines(unittest.TestCase):
+    def test1(self):
+        """ Test creating and reading PolyLine Shape Files """
+        lines = [[(0, 0), (4, 4)], [(1, 0), (5, 4)], [(2, 0), (6, 4)]]
+        shapes = []
+        for line in lines:
+            x = [v[0] for v in line]
+            y = [v[1] for v in line]
+            rec = {}
+            rec['BBOX Xmin'] = min(x)
+            rec['BBOX Ymin'] = min(y)
+            rec['BBOX Xmax'] = max(x)
+            rec['BBOX Ymax'] = max(y)
+            rec['NumPoints'] = len(line)
+            rec['NumParts'] = 1
+            rec['Vertices'] = line
+            rec['Shape Type'] = 3
+            rec['Parts Index'] = [0]
+            shapes.append(rec)
+        shp = shp_file('test_line', 'w', 'ARC')
+        for line in shapes:
+            shp.add_shape(line)
+        shp.close()
+        shp = list(shp_file('test_line'))
+        for a, b in zip(shapes, shp):
+            self.assertEquals(a, b)
+        os.remove('test_line.shp')
+        os.remove('test_line.shx')
+
+
+class _TestPolygons(unittest.TestCase):
+    def test1(self):
+        """ Test creating and reading PolyLine Shape Files """
+        lines = [[(0, 0), (4, 4), (5, 4), (
+            1, 0), (0, 0)], [(1, 0), (5, 4), (6, 4), (2, 0), (1, 0)]]
+        shapes = []
+        for line in lines:
+            x = [v[0] for v in line]
+            y = [v[1] for v in line]
+            rec = {}
+            rec['BBOX Xmin'] = min(x)
+            rec['BBOX Ymin'] = min(y)
+            rec['BBOX Xmax'] = max(x)
+            rec['BBOX Ymax'] = max(y)
+            rec['NumPoints'] = len(line)
+            rec['NumParts'] = 1
+            rec['Vertices'] = line
+            rec['Shape Type'] = 5
+            rec['Parts Index'] = [0]
+            shapes.append(rec)
+        shp = shp_file('test_poly', 'w', 'POLYGON')
+        for line in shapes:
+            shp.add_shape(line)
+        shp.close()
+        shp = list(shp_file('test_poly'))
+        for a, b in zip(shapes, shp):
+            self.assertEquals(a, b)
+        os.remove('test_poly.shp')
+        os.remove('test_poly.shx')
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/util/tests/test_weight_converter.py b/pysal/core/util/tests/test_weight_converter.py
new file mode 100644
index 0000000..4514ea3
--- /dev/null
+++ b/pysal/core/util/tests/test_weight_converter.py
@@ -0,0 +1,137 @@
+import unittest
+import pysal
+from pysal.core.util.weight_converter import WeightConverter
+from pysal.core.util.weight_converter import weight_convert
+import tempfile
+import os
+import warnings
+
+
+class test_WeightConverter(unittest.TestCase):
+    def setUp(self):
+        self.base_dir = pysal.examples.get_path('')
+        self.test_files = ['arcgis_ohio.dbf', 'arcgis_txt.txt', 'ohio.swm',
+                           'wmat.dat', 'wmat.mtx', 'sids2.gal', 'juvenile.gwt',
+                           'geobugs_scot', 'stata_full.txt', 'stata_sparse.txt',
+                           'spat-sym-us.mat', 'spat-sym-us.wk1']
+        dataformats = ['arcgis_dbf', 'arcgis_text', None, None, None, None, None,
+                       'geobugs_text', 'stata_text', 'stata_text', None, None]
+        ns = [88, 3, 88, 49, 49, 100, 168, 56, 56, 56, 46, 46]
+        self.dataformats = dict(zip(self.test_files, dataformats))
+        self.ns = dict(zip(self.test_files, ns))
+        self.fileformats = [('dbf', 'arcgis_dbf'), ('txt', 'arcgis_text'), ('swm', None),
+                            ('dat', None), ('mtx', None), ('gal', None), ('',
+                                                                          'geobugs_text'),
+                            ('gwt', None), ('txt', 'stata_text'), ('mat', None), ('wk1', None)]
+
+    def test__setW(self):
+        for f in self.test_files:
+            with warnings.catch_warnings(record=True) as warn:
+                # note: we are just suppressing the warnings here; individual warnings
+                #       are tested in their specific readers
+                warnings.simplefilter("always")
+                wc = WeightConverter(self.base_dir + f,
+                                     dataFormat=self.dataformats[f])
+            self.assertEqual(wc.w_set(), True)
+            self.assertEqual(wc.w.n, self.ns[f])
+
+    def test_write(self):
+        for f in self.test_files:
+            with warnings.catch_warnings(record=True) as warn:
+                # note: we are just suppressing the warnings here; individual warnings
+                #       are tested in their specific readers
+                warnings.simplefilter("always")
+                wc = WeightConverter(self.base_dir + f,
+                                     dataFormat=self.dataformats[f])
+
+            for ext, dataformat in self.fileformats:
+                if f.lower().endswith(ext):
+                    continue
+                temp_f = tempfile.NamedTemporaryFile(
+                    suffix='.%s' % ext, dir=self.base_dir)
+                temp_fname = temp_f.name
+                temp_f.close()
+
+                with warnings.catch_warnings(record=True) as warn:
+                    # note: we are just suppressing the warnings here; individual warnings
+                    #       are tested in their specific readers
+                    warnings.simplefilter("always")
+                    if ext == 'swm':
+                        wc.write(temp_fname, useIdIndex=True)
+                    elif dataformat is None:
+                        wc.write(temp_fname)
+                    elif dataformat in ['arcgis_dbf', 'arcgis_text']:
+                        wc.write(temp_fname, dataFormat=dataformat,
+                                 useIdIndex=True)
+                    elif dataformat == 'stata_text':
+                        wc.write(temp_fname, dataFormat=dataformat,
+                                 matrix_form=True)
+                    else:
+                        wc.write(temp_fname, dataFormat=dataformat)
+
+                with warnings.catch_warnings(record=True) as warn:
+                    # note: we are just suppressing the warnings here; individual warnings
+                    #       are tested in their specific readers
+                    warnings.simplefilter("always")
+                    if dataformat is None:
+                        wnew = pysal.open(temp_fname, 'r').read()
+                    else:
+                        wnew = pysal.open(temp_fname, 'r', dataformat).read()
+
+                if (ext in ['dbf', 'swm', 'dat', 'wk1', 'gwt'] or dataformat == 'arcgis_text'):
+                    self.assertEqual(wnew.n, wc.w.n - len(wc.w.islands))
+                else:
+                    self.assertEqual(wnew.n, wc.w.n)
+                os.remove(temp_fname)
+
+    def test_weight_convert(self):
+        for f in self.test_files:
+            inFile = self.base_dir + f
+            inDataFormat = self.dataformats[f]
+            with warnings.catch_warnings(record=True) as warn:
+                # note: we are just suppressing the warnings here; individual warnings
+                #       are tested in their specific readers
+                warnings.simplefilter("always")
+                if inDataFormat is None:
+                    in_file = pysal.open(inFile, 'r')
+                else:
+                    in_file = pysal.open(inFile, 'r', inDataFormat)
+                wold = in_file.read()
+                in_file.close()
+
+            for ext, dataformat in self.fileformats:
+                if f.lower().endswith(ext):
+                    continue
+                temp_f = tempfile.NamedTemporaryFile(
+                    suffix='.%s' % ext, dir=self.base_dir)
+                outFile = temp_f.name
+                temp_f.close()
+                outDataFormat, useIdIndex, matrix_form = dataformat, False, False
+                if ext == 'swm' or dataformat in ['arcgis_dbf', 'arcgis_text']:
+                    useIdIndex = True
+                elif dataformat == 'stata_text':
+                    matrix_form = True
+
+                with warnings.catch_warnings(record=True) as warn:
+                    # note: we are just suppressing the warnings here; individual warnings
+                    #       are tested in their specific readers
+                    warnings.simplefilter("always")
+                    weight_convert(inFile, outFile, inDataFormat, outDataFormat, useIdIndex, matrix_form)
+
+                with warnings.catch_warnings(record=True) as warn:
+                    # note: we are just suppressing the warnings here; individual warnings
+                    #       are tested in their specific readers
+                    warnings.simplefilter("always")
+                    if dataformat is None:
+                        wnew = pysal.open(outFile, 'r').read()
+                    else:
+                        wnew = pysal.open(outFile, 'r', dataformat).read()
+
+                if (ext in ['dbf', 'swm', 'dat', 'wk1', 'gwt'] or dataformat == 'arcgis_text'):
+                    self.assertEqual(wnew.n, wold.n - len(wold.islands))
+                else:
+                    self.assertEqual(wnew.n, wold.n)
+                os.remove(outFile)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/util/tests/test_wkt.py b/pysal/core/util/tests/test_wkt.py
new file mode 100644
index 0000000..6821310
--- /dev/null
+++ b/pysal/core/util/tests/test_wkt.py
@@ -0,0 +1,52 @@
+import unittest
+import pysal
+
+
+class test_WKTParser(unittest.TestCase):
+    def setUp(self):
+        #Create some Well-Known Text objects
+        self.wktPOINT = 'POINT(6 10)'
+        self.wktLINESTRING = 'LINESTRING(3 4,10 50,20 25)'
+        self.wktPOLYGON = 'POLYGON((1 1,5 1,5 5,1 5,1 1),(2 2, 3 2, 3 3, 2 3,2 2))'
+        self.unsupported = ['MULTIPOINT(3.5 5.6,4.8 10.5)',
+                            'MULTILINESTRING((3 4,10 50,20 25),(-5 -8,-10 -8,-15 -4))',
+                            'MULTIPOLYGON(((1 1,5 1,5 5,1 5,1 1),(2 2, 3 2, 3 3, 2 3,2 2)),((3 3,6 2,6 4,3 3)))',
+                            'GEOMETRYCOLLECTION(POINT(4 6),LINESTRING(4 6,7 10))',
+                            'POINT ZM (1 1 5 60)',
+                            'POINT M (1 1 80)']
+        self.empty = ['POINT EMPTY', 'MULTIPOLYGON EMPTY']
+        self.parser = pysal.core.util.WKTParser()
+
+    def test_Point(self):
+        pt = self.parser(self.wktPOINT)
+        self.assert_(issubclass(type(pt), pysal.cg.Point))
+        self.assertEquals(pt[:], (6.0, 10.0))
+
+    def test_LineString(self):
+        line = self.parser(self.wktLINESTRING)
+        self.assert_(issubclass(type(line), pysal.cg.Chain))
+        parts = [[pt[:] for pt in part] for part in line.parts]
+        self.assertEquals(parts, [[(3.0, 4.0), (10.0, 50.0), (20.0, 25.0)]])
+        self.assertEquals(line.len, 73.455384532199886)
+
+    def test_Polygon(self):
+        poly = self.parser(self.wktPOLYGON)
+        self.assert_(issubclass(type(poly), pysal.cg.Polygon))
+        parts = [[pt[:] for pt in part] for part in poly.parts]
+        self.assertEquals(parts, [[(1.0, 1.0), (1.0, 5.0), (5.0, 5.0), (5.0,
+                                                                        1.0), (1.0, 1.0)], [(2.0, 2.0), (2.0, 3.0), (3.0, 3.0), (3.0, 2.0),
+                                                                                            (2.0, 2.0)]])
+        self.assertEquals(
+            poly.centroid, (2.9705882352941178, 2.9705882352941178))
+        self.assertEquals(poly.area, 17.0)
+
+    def test_fromWKT(self):
+        for wkt in self.unsupported:
+            self.failUnlessRaises(
+                NotImplementedError, self.parser.fromWKT, wkt)
+        for wkt in self.empty:
+            self.assertEquals(self.parser.fromWKT(wkt), None)
+        self.assertEquals(self.parser.__call__, self.parser.fromWKT)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/core/util/weight_converter.py b/pysal/core/util/weight_converter.py
new file mode 100644
index 0000000..5f8068d
--- /dev/null
+++ b/pysal/core/util/weight_converter.py
@@ -0,0 +1,243 @@
+import os
+import pysal
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>"
+__all__ = ["weight_convert"]
+
+
+class WeightConverter(object):
+
+    """
+    Open and reads a weights file in a format.
+    Then, writes the file in other formats.
+
+    WeightConverter can read a weights file in the following formats:
+    GAL, GWT, ArcGIS DBF/SWM/Text, DAT, MAT, MTX, WK1, GeoBUGS Text, and STATA Text.
+    It can convert the input file into all of the formats listed above, except GWT.
+    Currently, PySAL does not support writing a weights object in the GWT format.
+
+    When an input weight file includes multiple islands and
+    the format of an output weight file is ArcGIS DBF/SWM/TEXT, DAT, or WK1,
+    the number of observations in the new weights file will be
+    the original number of observations substracted by the number of islands.
+    This is because ArcGIS DBF/SWM/TEXT, DAT, WK1 formats ignore islands.
+
+    """
+
+    def __init__(self, inputPath, dataFormat=None):
+        self.inputPath = inputPath
+        self.inputDataFormat = dataFormat
+        self._setW()
+
+    def _setW(self):
+        """
+        Reads a weights file and sets a pysal.weights.weights.W object as an attribute
+
+        Examples
+        --------
+
+        Create a WeightConvert object
+
+        >>> wc = WeightConverter(pysal.examples.get_path('arcgis_ohio.dbf'),dataFormat='arcgis_dbf')
+
+        Check whether or not the W object is set as an attribute
+
+        >>> wc.w_set()
+        True
+
+        Get the number of observations included in the W object
+
+        >>> wc.w.n
+        88
+
+        """
+        try:
+            if self.inputDataFormat:
+                f = pysal.open(self.inputPath, 'r', self.inputDataFormat)
+            else:
+                f = pysal.open(self.inputPath, 'r')
+        except:
+            raise IOError('A problem occurred while reading the input file.')
+        else:
+            try:
+                self.w = f.read()
+            except:
+                raise RuntimeError('A problem occurred while creating a weights object.')
+            finally:
+                f.close()
+
+    def w_set(self):
+        """
+        Checks if a source w object is set
+        """
+        return hasattr(self, 'w')
+
+    def write(self, outputPath, dataFormat=None, useIdIndex=True, matrix_form=True):
+        """
+        Parameters
+        ----------
+        outputPath: string
+                    path to the output weights file
+        dataFormat: string
+                    'arcgis_dbf' for ArcGIS DBF format
+                    'arcgis_text' for ArcGIS Text format
+                    'geobugs_text' for GeoBUGS Text format
+                    'stata_text' for STATA Text format
+        useIdIndex: boolean
+                    True or False
+                    Applies only to ArcGIS DBF/SWM/Text formats
+        matrix_form: boolean
+                     True or False
+                     STATA Text format
+
+        Returns
+        -------
+        A weights file is created
+
+        Examples
+        --------
+        >>> import tempfile, os, pysal
+
+        Create a WeightConverter object
+
+        >>> wc = WeightConverter(pysal.examples.get_path('sids2.gal'))
+
+        Check whether or not the W object is set as an attribute
+
+        >>> wc.w_set()
+        True
+
+        Create a temporary file for this example
+
+        >>> f = tempfile.NamedTemporaryFile(suffix='.dbf')
+
+        Reassign to new variable
+
+        >>> fname = f.name
+
+        Close the temporary named file
+
+        >>> f.close()
+
+        Write the input gal file in the ArcGIS dbf format
+
+        >>> wc.write(fname, dataFormat='arcgis_dbf', useIdIndex=True)
+
+        Create a new weights object from the converted dbf file
+
+        >>> wnew = pysal.open(fname, 'r', 'arcgis_dbf').read()
+
+        Compare the number of observations in two W objects
+
+        >>> wc.w.n == wnew.n
+        True
+
+        Clean up the temporary file
+
+        >>> os.remove(fname)
+
+        """
+        ext = os.path.splitext(outputPath)[1]
+        ext = ext.replace('.', '')
+        #if ext.lower() == 'gwt':
+        #    raise TypeError, 'Currently, PySAL does not support writing a weights object into a gwt file.'
+
+        if not self.w_set():
+            raise RuntimeError('There is no weights object to write out.')
+
+        try:
+            if dataFormat:
+                o = pysal.open(outputPath, 'w', dataFormat)
+            else:
+                o = pysal.open(outputPath, 'w')
+        except:
+            raise IOError('A problem occurred while creating the output file.')
+        else:
+            try:
+                if dataFormat in ['arcgis_text', 'arcgis_dbf'] or ext == 'swm':
+                    o.write(self.w, useIdIndex=useIdIndex)
+                elif dataFormat == 'stata_text':
+                    o.write(self.w, matrix_form=matrix_form)
+                else:
+                    o.write(self.w)
+            except:
+                raise RuntimeError('A problem occurred while writing out the weights object')
+            finally:
+                o.close()
+
+
+def weight_convert(inPath, outPath, inDataFormat=None, outDataFormat=None, useIdIndex=True, matrix_form=True):
+    """
+    A utility function for directly converting a given weight
+    file into the format specified in outPath
+
+    Parameters
+    ----------
+    inPath: string
+            path to the input weights file
+    outPath: string
+             path to the output weights file
+    indataFormat: string
+                  'arcgis_dbf' for ArcGIS DBF format
+                  'arcgis_text' for ArcGIS Text format
+                  'geobugs_text' for GeoBUGS Text format
+                  'stata_text' for STATA Text format
+    outdataFormat: string
+                   'arcgis_dbf' for ArcGIS DBF format
+                   'arcgis_text' for ArcGIS Text format
+                   'geobugs_text' for GeoBUGS Text format
+                   'stata_text' for STATA Text format
+    useIdIndex: boolean
+                True or False
+                Applies only to ArcGIS DBF/SWM/Text formats
+    matrix_form: boolean
+                 True or False
+                 STATA Text format
+
+    Returns
+    -------
+    A weights file is created
+
+    Examples
+    --------
+    >>> import tempfile, os, pysal
+
+    Create a temporary file for this example
+
+    >>> f = tempfile.NamedTemporaryFile(suffix='.dbf')
+
+    Reassign to new variable
+
+    >>> fname = f.name
+
+    Close the temporary named file
+
+    >>> f.close()
+
+    Create a WeightConverter object
+
+    >>> weight_convert(pysal.examples.get_path('sids2.gal'), fname, outDataFormat='arcgis_dbf', useIdIndex=True)
+
+    Create a new weights object from the gal file
+
+    >>> wold = pysal.open(pysal.examples.get_path('sids2.gal'), 'r').read()
+
+    Create a new weights object from the converted dbf file
+
+    >>> wnew = pysal.open(fname, 'r', 'arcgis_dbf').read()
+
+    Compare the number of observations in two W objects
+
+    >>> wold.n == wnew.n
+    True
+
+    Clean up the temporary file
+
+    >>> os.remove(fname)
+
+    """
+
+    converter = WeightConverter(inPath, dataFormat=inDataFormat)
+    converter.write(outPath, dataFormat=outDataFormat,
+                    useIdIndex=useIdIndex, matrix_form=matrix_form)
+
diff --git a/pysal/core/util/wkt.py b/pysal/core/util/wkt.py
new file mode 100644
index 0000000..0907629
--- /dev/null
+++ b/pysal/core/util/wkt.py
@@ -0,0 +1,125 @@
+from pysal import cg
+import re
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__all__ = ['WKTParser']
+#####################################################################
+## ToDo: Add Well-Known-Binary support...
+##       * WKB spec:
+##  http://webhelp.esri.com/arcgisserver/9.3/dotNet/index.htm#geodatabases/the_ogc_103951442.htm
+##
+##
+#####################################################################
+
+
+class WKTParser:
+    """ Class to represent OGC WKT, supports reading and writing
+        Modified from...
+        # URL: http://dev.openlayers.org/releases/OpenLayers-2.7/lib/OpenLayers/Format/WKT.js
+        #Reg Ex Strings copied from OpenLayers.Format.WKT
+
+    Example
+    -------
+    >>> from pysal.core.IOHandlers import wkt
+    >>> import pysal
+
+    Create some Well-Known Text objects
+
+    >>> p = 'POLYGON((1 1,5 1,5 5,1 5,1 1),(2 2, 3 2, 3 3, 2 3,2 2))'
+    >>> pt = 'POINT(6 10)'
+    >>> l = 'LINESTRING(3 4,10 50,20 25)'
+
+    Instantiate the parser
+
+    >>> wkt = WKTParser()
+
+    Inspect our WKT polygon
+
+    >>> wkt(p).parts
+    [[(1.0, 1.0), (1.0, 5.0), (5.0, 5.0), (5.0, 1.0), (1.0, 1.0)], [(2.0, 2.0), (2.0, 3.0), (3.0, 3.0), (3.0, 2.0), (2.0, 2.0)]]
+    >>> wkt(p).centroid
+    (2.9705882352941178, 2.9705882352941178)
+    >>> wkt(p).area
+    17.0
+
+    Inspect pt, our WKT point object
+
+    >>> wkt(pt)
+    (6.0, 10.0)
+
+    Inspect our WKT linestring
+
+    >>> wkt(l).len
+    73.45538453219989
+    >>> wkt(l).parts
+    [[(3.0, 4.0), (10.0, 50.0), (20.0, 25.0)]]
+
+    Read in WKT from a file
+
+    >>> f = pysal.open(pysal.examples.get_path('stl_hom.wkt'))
+    >>> f.mode
+    'r'
+    >>> f.header
+    []
+
+    See local doctest output for the items not tested...
+
+    """
+    regExes = {'typeStr': re.compile('^\s*([\w\s]+)\s*\(\s*(.*)\s*\)\s*$'),
+               'spaces': re.compile('\s+'),
+               'parenComma': re.compile('\)\s*,\s*\('),
+               'doubleParenComma': re.compile('\)\s*\)\s*,\s*\(\s*\('),  # can't use {2} here
+               'trimParens': re.compile('^\s*\(?(.*?)\)?\s*$')}
+
+    def __init__(self):
+        self.parsers = p = {}
+        p['point'] = self.Point
+        p['linestring'] = self.LineString
+        p['polygon'] = self.Polygon
+
+    def Point(self, geoStr):
+        coords = self.regExes['spaces'].split(geoStr.strip())
+        return cg.Point((coords[0], coords[1]))
+
+    def LineString(self, geoStr):
+        points = geoStr.strip().split(',')
+        points = map(self.Point, points)
+        return cg.Chain(points)
+
+    def Polygon(self, geoStr):
+        rings = self.regExes['parenComma'].split(geoStr.strip())
+        for i, ring in enumerate(rings):
+            ring = self.regExes['trimParens'].match(ring).groups()[0]
+            ring = self.LineString(ring).vertices
+            rings[i] = ring
+        return cg.Polygon(rings)
+
+    def fromWKT(self, wkt):
+        matches = self.regExes['typeStr'].match(wkt)
+        if matches:
+            geoType, geoStr = matches.groups()
+            geoType = geoType.lower().strip()
+            try:
+                return self.parsers[geoType](geoStr)
+            except KeyError:
+                raise NotImplementedError("Unsupported WKT Type: %s" % geoType)
+        else:
+            return None
+    __call__ = fromWKT
+if __name__ == '__main__':
+    p = 'POLYGON((1 1,5 1,5 5,1 5,1 1),(2 2, 3 2, 3 3, 2 3,2 2))'
+    pt = 'POINT(6 10)'
+    l = 'LINESTRING(3 4,10 50,20 25)'
+    wktExamples = ['POINT(6 10)',
+                   'LINESTRING(3 4,10 50,20 25)',
+                   'POLYGON((1 1,5 1,5 5,1 5,1 1),(2 2, 3 2, 3 3, 2 3,2 2))',
+                   'MULTIPOINT(3.5 5.6,4.8 10.5)',
+                   'MULTILINESTRING((3 4,10 50,20 25),(-5 -8,-10 -8,-15 -4))',
+                   'MULTIPOLYGON(((1 1,5 1,5 5,1 5,1 1),(2 2, 3 2, 3 3, 2 3,2 2)),((3 3,6 2,6 4,3 3)))',
+                   'GEOMETRYCOLLECTION(POINT(4 6),LINESTRING(4 6,7 10))',
+                   'POINT ZM (1 1 5 60)',
+                   'POINT M (1 1 80)',
+                   'POINT EMPTY',
+                   'MULTIPOLYGON EMPTY']
+    wkt = WKTParser()
+
diff --git a/pysal/esda/__init__.py b/pysal/esda/__init__.py
new file mode 100644
index 0000000..cee82b1
--- /dev/null
+++ b/pysal/esda/__init__.py
@@ -0,0 +1,12 @@
+"""
+:mod:`esda` --- Exploratory Spatial Data Analysis
+=================================================
+
+"""
+import mapclassify
+import moran
+import smoothing
+import getisord
+import geary
+import join_counts
+import gamma
diff --git a/pysal/esda/gamma.py b/pysal/esda/gamma.py
new file mode 100644
index 0000000..810f98b
--- /dev/null
+++ b/pysal/esda/gamma.py
@@ -0,0 +1,204 @@
+"""
+Gamma index for spatial autocorrelation
+
+
+"""
+__author__ = "Luc Anselin <luc.anselin at asu.edu>"
+
+import pysal
+import numpy as np
+
+__all__ = ['Gamma']
+
+PERMUTATIONS = 999
+
+
+class Gamma:
+    """Gamma index for spatial autocorrelation
+
+
+    Parameters
+    ----------
+
+    y               : array
+                      variable measured across n spatial units
+    w               : W
+                      spatial weights instance
+                      can be binary or row-standardized
+    operation       : attribute similarity function
+                      'c' cross product (default)
+                      's' squared difference
+                      'a' absolute difference
+    standardize     : standardize variables first
+                      'no' keep as is (default)
+                      'yes' or 'y' standardize to mean zero and variance one
+    permutations    : int
+                      number of random permutations for calculation of pseudo-p_values
+
+    Attributes
+    ----------
+    y            : array
+                   original variable
+    w            : W
+                   original w object
+    op           : attribute similarity function
+    stand        : standardization
+    permutations : int
+                   number of permutations
+    gamma        : float
+                   value of Gamma index
+    sim_g        : array (if permutations>0)
+                   vector of Gamma index values for permuted samples
+    p_sim_g      : array (if permutations>0)
+                   p-value based on permutations (one-sided)
+                   null: spatial randomness
+                   alternative: the observed Gamma is more extreme than under randomness
+                   implemented as a two-sided test
+    mean_g       : average of permuted Gamma values
+    min_g        : minimum of permuted Gamma values
+    max_g        : maximum of permuted Gamma values
+
+
+    Examples
+    --------
+
+    use same example as for join counts to show similarity
+
+    >>> import numpy as np
+    >>> w=pysal.lat2W(4,4)
+    >>> y=np.ones(16)
+    >>> y[0:8]=0
+    >>> np.random.seed(12345)
+    >>> g = pysal.Gamma(y,w)
+    >>> g.g
+    20.0
+    >>> g.g_z
+    3.1879280354548638
+    >>> g.p_sim_g
+    0.0030000000000000001
+    >>> g.min_g
+    0.0
+    >>> g.max_g
+    20.0
+    >>> g.mean_g
+    11.093093093093094
+    >>> np.random.seed(12345)
+    >>> g1 = pysal.Gamma(y,w,operation='s')
+    >>> g1.g
+    8.0
+    >>> g1.g_z
+    -3.7057554345954791
+    >>> g1.p_sim_g
+    0.001
+    >>> g1.min_g
+    14.0
+    >>> g1.max_g
+    48.0
+    >>> g1.mean_g
+    25.623623623623622
+    >>> np.random.seed(12345)
+    >>> g2 = pysal.Gamma(y,w,operation='a')
+    >>> g2.g
+    8.0
+    >>> g2.g_z
+    -3.7057554345954791
+    >>> g2.p_sim_g
+    0.001
+    >>> g2.min_g
+    14.0
+    >>> g2.max_g
+    48.0
+    >>> g2.mean_g
+    25.623623623623622
+    >>> np.random.seed(12345)
+    >>> g3 = pysal.Gamma(y,w,standardize='y')
+    >>> g3.g
+    32.0
+    >>> g3.g_z
+    3.7057554345954791
+    >>> g3.p_sim_g
+    0.001
+    >>> g3.min_g
+    -48.0
+    >>> g3.max_g
+    20.0
+    >>> g3.mean_g
+    -3.2472472472472473
+    >>> np.random.seed(12345)
+    >>> def func(z,i,j):
+    ...     q = z[i]*z[j]
+    ...     return q
+    ...
+    >>> g4 = pysal.Gamma(y,w,operation=func)
+    >>> g4.g
+    20.0
+    >>> g4.g_z
+    3.1879280354548638
+    >>> g4.p_sim_g
+    0.0030000000000000001
+
+    """
+    def __init__(self, y, w, operation='c', standardize='no', permutations=PERMUTATIONS):
+        self.w = w
+        self.y = y
+        self.op = operation
+        self.stand = standardize.lower()
+        self.permutations = permutations
+        if self.stand == 'yes' or self.stand == 'y':
+            ym = np.mean(self.y)
+            ysd = np.std(self.y)
+            ys = (self.y - ym) / ysd
+            self.y = ys
+        self.g = self.__calc(self.y, self.op)
+
+        if permutations:
+            sim = [self.__calc(np.random.permutation(self.y), self.op)
+                   for i in xrange(permutations)]
+            self.sim_g = np.array(sim)
+            self.min_g = np.min(self.sim_g)
+            self.mean_g = np.mean(self.sim_g)
+            self.max_g = np.max(self.sim_g)
+            p_sim_g = self.__pseudop(self.sim_g, self.g)
+            self.p_sim_g = p_sim_g
+            self.g_z = (self.g - self.mean_g) / np.std(self.sim_g)
+
+    def __calc(self, z, op):
+        if op == 'c':     # cross-product
+            zl = pysal.lag_spatial(self.w, z)
+            g = (z * zl).sum()
+        elif op == 's':   # squared difference
+            zs = np.zeros(z.shape)
+            z2 = z ** 2
+            for i, i0 in enumerate(self.w.id_order):
+                neighbors = self.w.neighbor_offsets[i0]
+                wijs = self.w.weights[i0]
+                zw = zip(neighbors, wijs)
+                zs[i] = sum([wij * (z2[i] - 2.0 * z[i] * z[
+                    j] + z2[j]) for j, wij in zw])
+            g = zs.sum()
+        elif op == 'a':    # absolute difference
+            zs = np.zeros(z.shape)
+            for i, i0 in enumerate(self.w.id_order):
+                neighbors = self.w.neighbor_offsets[i0]
+                wijs = self.w.weights[i0]
+                zw = zip(neighbors, wijs)
+                zs[i] = sum([wij * abs(z[i] - z[j]) for j, wij in zw])
+            g = zs.sum()
+        else:              # any previously defined function op
+            zs = np.zeros(z.shape)
+            for i, i0 in enumerate(self.w.id_order):
+                neighbors = self.w.neighbor_offsets[i0]
+                wijs = self.w.weights[i0]
+                zw = zip(neighbors, wijs)
+                zs[i] = sum([wij * op(z, i, j) for j, wij in zw])
+            g = zs.sum()
+        return g
+
+    def __pseudop(self, sim, g):
+        above = sim >= g
+        larger = above.sum()
+        psim = (larger + 1.) / (self.permutations + 1.)
+        if psim > 0.5:
+            psim = (self.permutations - larger + 1.) / (self.permutations + 1.)
+        return psim
+
diff --git a/pysal/esda/geary.py b/pysal/esda/geary.py
new file mode 100644
index 0000000..238c58d
--- /dev/null
+++ b/pysal/esda/geary.py
@@ -0,0 +1,161 @@
+"""
+Geary's C statistic for spatial autocorrelation
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu> "
+
+import numpy as np
+import scipy.stats as stats
+
+__all__ = ['Geary']
+
+
+class Geary:
+    """
+    Global Geary C Autocorrelation statistic
+
+    Parameters
+    ----------
+    y              : array
+    w              : W
+                     spatial weights
+    transformation : string
+                     weights transformation, default is binary.
+                     Other options include "R": row-standardized, "D":
+                     doubly-standardized, "U": untransformed (general
+                     weights), "V": variance-stabilizing.
+    permutations   : int
+                     number of random permutations for calculation of
+                     pseudo-p_values
+
+    Attributes
+    ----------
+    y              : array
+                     original variable
+    w              : W
+                     spatial weights
+    permutations   : int
+                     number of permutations
+    C              : float
+                     value of statistic
+    EC             : float
+                     expected value
+    VC             : float
+                     variance of G under normality assumption
+    z_norm         : float
+                     z-statistic for C under normality assumption
+    z_rand         : float
+                     z-statistic for C under randomization assumption
+    p_norm         : float
+                     p-value under normality assumption (one-tailed)
+    p_rand         : float
+                     p-value under randomization assumption (one-tailed)
+    sim            : array (if permutations!=0)
+                     vector of I values for permutated samples
+    p_sim          : float (if permutations!=0)
+                     p-value based on permutations (one-tailed)
+                     null: sptial randomness
+                     alternative: the observed C is extreme
+                     it is either extremely high or extremely low
+    EC_sim         : float (if permutations!=0)
+                     average value of C from permutations
+    VC_sim         : float (if permutations!=0)
+                     variance of C from permutations
+    seC_sim        : float (if permutations!=0)
+                     standard deviation of C under permutations.
+    z_sim          : float (if permutations!=0)
+                     standardized C based on permutations
+    p_z_sim        : float (if permutations!=0)
+                     p-value based on standard normal approximation from
+                     permutations (one-tailed)
+
+    Examples
+    --------
+    >>> import pysal
+    >>> w = pysal.open(pysal.examples.get_path("book.gal")).read()
+    >>> f = pysal.open(pysal.examples.get_path("book.txt"))
+    >>> y = np.array(f.by_col['y'])
+    >>> c = Geary(y,w,permutations=0)
+    >>> print round(c.C,7)
+    0.3330108
+    >>> print round(c.p_norm,7)
+    9.2e-05
+    >>>
+    """
+    def __init__(self, y, w, transformation="r", permutations=999):
+        self.n = len(y)
+        self.y = y
+        w.transform = transformation
+        self.w = w
+        self.permutations = permutations
+        self.__moments()
+        xn = xrange(len(y))
+        self.xn = xn
+        self.y2 = y * y
+        yd = y - y.mean()
+        yss = sum(yd * yd)
+        self.den = yss * self.w.s0 * 2.0
+        self.C = self.__calc(y)
+        de = self.C - 1.0
+        self.EC = 1.0
+        self.z_norm = de / self.seC_norm
+        self.z_rand = de / self.seC_rand
+        if de > 0:
+            self.p_norm = 1 - stats.norm.cdf(self.z_norm)
+            self.p_rand = 1 - stats.norm.cdf(self.z_rand)
+        else:
+            self.p_norm = stats.norm.cdf(self.z_norm)
+            self.p_rand = stats.norm.cdf(self.z_rand)
+
+
+        if permutations:
+            sim = [self.__calc(np.random.permutation(self.y))
+                   for i in xrange(permutations)]
+            self.sim = sim = np.array(sim)
+            above = sim >= self.C
+            larger = sum(above)
+            if (permutations - larger) < larger:
+                larger = permutations - larger
+            self.p_sim = (larger + 1.) / (permutations + 1.)
+            self.EC_sim = sum(sim) / permutations
+            self.seC_sim = np.array(sim).std()
+            self.VC_sim = self.seC_sim ** 2
+            self.z_sim = (self.C - self.EC_sim) / self.seC_sim
+            self.p_z_sim = 1 - stats.norm.cdf(np.abs(self.z_sim))
+
+    def __moments(self):
+        y = self.y
+        n = self.n
+        w = self.w
+        s0 = w.s0
+        s1 = w.s1
+        s2 = w.s2
+        s02 = s0 * s0
+
+        yd = y - y.mean()
+        k = (1 / (sum(yd ** 4)) * ((sum(yd ** 2)) ** 2))
+        vc_rand = (1 / (n * ((n - 2) ** 2) * s02)) * \
+            ((((n - 1) * s1) * (n * n - 3 * n + 3 - (n - 1) * k))
+             - ((.25 * (n - 1) * s2) * (n * n + 3 * n - 6 -
+                (n * n - n + 2) * k))
+                + (s02 * (n * n - 3 - ((n - 1) ** 2) * k)))
+        vc_norm = ((1 / (2 * (n + 1) * s02)) *
+                   ((2 * s1 + s2) * (n - 1) - 4 * s02))
+
+        self.VC_rand = vc_rand
+        self.VC_norm = vc_norm
+        self.seC_rand = vc_rand ** (0.5)
+        self.seC_norm = vc_norm ** (0.5)
+
+    def __calc(self, y):
+        ys = np.zeros(y.shape)
+        y2 = y ** 2
+        for i, i0 in enumerate(self.w.id_order):
+            neighbors = self.w.neighbor_offsets[i0]
+            wijs = self.w.weights[i0]
+            z = zip(neighbors, wijs)
+            ys[i] = sum([wij * (y2[i] - 2 * y[i] * y[j] + y2[j])
+                         for j, wij in z])
+        a = (self.n - 1) * sum(ys)
+        return a / self.den
+
+
diff --git a/pysal/esda/getisord.py b/pysal/esda/getisord.py
new file mode 100644
index 0000000..30c1076
--- /dev/null
+++ b/pysal/esda/getisord.py
@@ -0,0 +1,394 @@
+"""
+Getis and Ord G statistic for spatial autocorrelation
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu>, Myunghwa Hwang <mhwang4 at gmail.com> "
+__all__ = ['G', 'G_Local']
+
+from pysal.common import np, stats, math
+from pysal.weights.spatial_lag import lag_spatial as slag
+
+PERMUTATIONS = 999
+
+
+class G:
+    """
+    Global G Autocorrelation Statistic
+
+    Parameters
+    ----------
+    y             : array
+    w             : DistanceBand W spatial weights based on distance band
+    permutations  : int
+                    the number of random permutations for calculating pseudo p_values
+
+    Attributes
+    ----------
+    y             : array
+                    original variable
+    w             : DistanceBand W spatial weights based on distance band
+    permutation   : int
+                    the number of permutations
+    G             : float
+                    the value of statistic
+    EG            : float
+                    the expected value of statistic
+    VG            : float
+                    the variance of G under normality assumption
+    z_norm        : float
+                    standard normal test statistic
+    p_norm        : float
+                    p-value under normality assumption (one-sided)
+    sim           : array (if permutations > 0)
+                    vector of G values for permutated samples
+    p_sim         : float
+                    p-value based on permutations (one-sided)
+                    null: spatial randomness
+                    alternative: the observed G is extreme it is either extremely high or extremely low
+    EG_sim        : float
+                    average value of G from permutations
+    VG_sim        : float
+                    variance of G from permutations
+    seG_sim       : float
+                    standard deviation of G under permutations.
+    z_sim         : float
+                    standardized G based on permutations
+    p_z_sim       : float
+                    p-value based on standard normal approximation from
+                    permutations (one-sided)
+
+    Notes
+    -----
+    Moments are based on normality assumption.
+
+    Examples
+    --------
+    >>> from pysal.weights.Distance import DistanceBand
+    >>> import numpy
+    >>> numpy.random.seed(10)
+
+    Preparing a point data set
+    >>> points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+
+    Creating a weights object from points
+    >>> w = DistanceBand(points,threshold=15)
+    >>> w.transform = "B"
+
+    Preparing a variable
+    >>> y = numpy.array([2, 3, 3.2, 5, 8, 7])
+
+    Applying Getis and Ord G test
+    >>> g = G(y,w)
+
+    Examining the results
+    >>> print "%.8f" % g.G
+    0.55709779
+
+    >>> print "%.4f" % g.p_norm
+    0.1729
+
+    """
+
+    def __init__(self, y, w, permutations=PERMUTATIONS):
+        self.n = len(y)
+        self.y = y
+        w.transform = "B"
+        self.w = w
+        self.permutations = permutations
+        self.__moments()
+        self.y2 = y * y
+        y = y.reshape(len(y), 1)  # Ensure that y is an n by 1 vector, otherwise y*y.T == y*y
+        self.den_sum = (y * y.T).sum() - (y * y).sum()
+        self.G = self.__calc(self.y)
+        self.z_norm = (self.G - self.EG) / math.sqrt(self.VG)
+        self.p_norm = 1.0 - stats.norm.cdf(np.abs(self.z_norm))
+
+        if permutations:
+            sim = [self.__calc(np.random.permutation(self.y))
+                   for i in xrange(permutations)]
+            self.sim = sim = np.array(sim)
+            above = sim >= self.G
+            larger = sum(above)
+            if (self.permutations - larger) < larger:
+                larger = self.permutations - larger
+            self.p_sim = (larger + 1.0) / (permutations + 1.)
+            self.EG_sim = sum(sim) / permutations
+            self.seG_sim = sim.std()
+            self.VG_sim = self.seG_sim ** 2
+            self.z_sim = (self.G - self.EG_sim) / self.seG_sim
+            self.p_z_sim = 1. - stats.norm.cdf(np.abs(self.z_sim))
+
+    def __moments(self):
+        y = self.y
+        n = self.n
+        w = self.w
+        n2 = n * n
+        s0 = w.s0
+        self.EG = s0 / (n * (n - 1))
+        s02 = s0 * s0
+        s1 = w.s1
+        s2 = w.s2
+        b0 = (n2 - 3 * n + 3) * s1 - n * s2 + 3 * s02
+        b1 = (-1.) * ((n2 - n) * s1 - 2 * n * s2 + 6 * s02)
+        b2 = (-1.) * (2 * n * s1 - (n + 3) * s2 + 6 * s02)
+        b3 = 4 * (n - 1) * s1 - 2 * (n + 1) * s2 + 8 * s02
+        b4 = s1 - s2 + s02
+        self.b0 = b0
+        self.b1 = b1
+        self.b2 = b2
+        self.b3 = b3
+        self.b4 = b4
+        y2 = y * y
+        y3 = y * y2
+        y4 = y2 * y2
+        EG2 = (b0 * (sum(
+            y2) ** 2) + b1 * sum(y4) + b2 * (sum(y) ** 2) * sum(y2))
+        EG2 += b3 * sum(y) * sum(y3) + b4 * (sum(y) ** 4)
+        EG2NUM = EG2
+        EG2DEN = (((sum(y) ** 2 - sum(y2)) ** 2) * n * (n - 1) * (
+            n - 2) * (n - 3))
+        self.EG2 = EG2NUM / EG2DEN
+        self.VG = self.EG2 - self.EG ** 2
+
+    def __calc(self, y):
+        yl = slag(self.w, y)
+        self.num = y * yl
+        return self.num.sum() / self.den_sum
+
+
+class G_Local:
+    """
+    Generalized Local G Autocorrelation Statistic
+
+    Parameters
+    ----------
+    y: array
+       variable
+    w: DistanceBand W
+       weights instance that is based on threshold distance
+       and is assumed to be aligned with y
+    transform: string
+       the type of w, either 'B' (binary) or 'R' (row-standardized)
+    permutations: int
+                  the number of random permutations for calculating
+                  pseudo p values
+    star: boolean
+          whether or not to include focal observation in sums
+          default is False
+
+    Attributes
+    ----------
+    y: array
+       original variable
+    w: DistanceBand W
+       original weights object
+    permutations: int
+                 the number of permutations
+    Gs: array of floats
+        the value of the orginal G statistic in Getis & Ord (1992)
+    EGs: float
+         expected value of Gs under normality assumption
+         the values is scalar, since the expectation is identical
+         across all observations
+    VGs: array of floats
+         variance values of Gs under normality assumption
+    Zs: array of floats
+        standardized Gs
+    p_norm: array of floats
+            p-value under normality assumption (one-sided)
+            for two-sided tests, this value should be multiplied by 2
+    sim: array of arrays of floats (if permutations>0)
+         vector of I values for permutated samples
+    p_sim: array of floats
+           p-value based on permutations (one-sided)
+           null: spatial randomness
+           alternative: the observed G is extreme
+                        it is either extremely high or extremely low
+    EG_sim: array of floats
+            average value of G from permutations
+    VG_sim: array of floats
+            variance of G from permutations
+    seG_sim: array of floats
+             standard deviation of G under permutations.
+    z_sim: array of floats
+           standardized G based on permutations
+    p_z_sim: array of floats
+             p-value based on standard normal approximation from
+             permutations (one-sided)
+
+    Notes
+    -----
+    To compute moments of Gs under normality assumption,
+    PySAL considers w is either binary or row-standardized.
+    For binary weights object, the weight value for self is 1
+    For row-standardized weights object, the weight value for self is
+    1/(the number of its neighbors + 1).
+
+    References
+    ----------
+    Getis, A. and Ord., J.K. (1992) The analysis of spatial association by use of
+    distance statistics. Geographical Analysis, 24(3):189-206
+    Ord, J.K. and Getis, A. (1995) Local spatial autocorrelation statistics:
+    distributional issues and an application. Geographical Analysis, 27(4):286-306
+    Getis, A. and Ord, J. K. (1996) Local spatial statistics: an overview,
+    in Spatial Analysis: Modelling in a GIS Environment, edited by Longley, P.
+    and Batty, M.
+
+    Examples
+    --------
+    >>> from pysal.weights.Distance import DistanceBand
+    >>> import numpy
+    >>> numpy.random.seed(10)
+
+    Preparing a point data set
+
+    >>> points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+
+    Creating a weights object from points
+
+    >>> w = DistanceBand(points,threshold=15)
+
+    Prepareing a variable
+
+    >>> y = numpy.array([2, 3, 3.2, 5, 8, 7])
+
+    Applying Getis and Ord local G test using a binary weights object
+    >>> lg = G_Local(y,w,transform='B')
+
+    Examining the results
+    >>> lg.Zs
+    array([-1.0136729 , -0.04361589,  1.31558703, -0.31412676,  1.15373986,
+            1.77833941])
+    >>> lg.p_sim[0]
+    0.10100000000000001
+
+    >>> numpy.random.seed(10)
+
+    Applying Getis and Ord local G* test using a binary weights object
+    >>> lg_star = G_Local(y,w,transform='B',star=True)
+
+    Examining the results
+    >>> lg_star.Zs
+    array([-1.39727626, -0.28917762,  0.65064964, -0.28917762,  1.23452088,
+            2.02424331])
+    >>> lg_star.p_sim[0]
+    0.10100000000000001
+
+    >>> numpy.random.seed(10)
+
+    Applying Getis and Ord local G test using a row-standardized weights object
+    >>> lg = G_Local(y,w,transform='R')
+
+    Examining the results
+    >>> lg.Zs
+    array([-0.62074534, -0.01780611,  1.31558703, -0.12824171,  0.28843496,
+            1.77833941])
+    >>> lg.p_sim[0]
+    0.10100000000000001
+
+    >>> numpy.random.seed(10)
+
+    Applying Getis and Ord local G* test using a row-standardized weights object
+    >>> lg_star = G_Local(y,w,transform='R',star=True)
+
+    Examining the results
+    >>> lg_star.Zs
+    array([-0.62488094, -0.09144599,  0.41150696, -0.09144599,  0.24690418,
+            1.28024388])
+    >>> lg_star.p_sim[0]
+    0.10100000000000001
+
+    """
+    def __init__(self, y, w, transform='R', permutations=PERMUTATIONS, star=False):
+        self.n = len(y)
+        self.y = y
+        self.w = w
+        self.w_original = w.transform
+        self.w.transform = self.w_transform = transform.lower()
+        self.permutations = permutations
+        self.star = star
+        self.calc()
+        self.p_norm = np.array(
+            [1 - stats.norm.cdf(np.abs(i)) for i in self.Zs])
+        if permutations:
+            self.__crand()
+            sim = np.transpose(self.rGs)
+            above = sim >= self.Gs
+            larger = sum(above)
+            low_extreme = (self.permutations - larger) < larger
+            larger[low_extreme] = self.permutations - larger[low_extreme]
+            self.p_sim = (larger + 1.0) / (permutations + 1)
+            self.sim = sim
+            self.EG_sim = sim.mean()
+            self.seG_sim = sim.std()
+            self.VG_sim = self.seG_sim * self.seG_sim
+            self.z_sim = (self.Gs - self.EG_sim) / self.seG_sim
+            self.p_z_sim = 1 - stats.norm.cdf(np.abs(self.z_sim))
+
+    def __crand(self):
+        y = self.y
+        rGs = np.zeros((self.n, self.permutations))
+        n_1 = self.n - 1
+        rid = range(n_1)
+        prange = range(self.permutations)
+        k = self.w.max_neighbors + 1
+        rids = np.array([np.random.permutation(rid)[0:k] for i in prange])
+        ids = np.arange(self.w.n)
+        ido = self.w.id_order
+        wc = self.__getCardinalities()
+        if self.w_transform == 'r':
+            den = np.array(wc) + self.star
+        else:
+            den = np.ones(self.w.n)
+        for i in range(self.w.n):
+            idsi = ids[ids != i]
+            np.random.shuffle(idsi)
+            yi_star = y[i] * self.star
+            wci = wc[i]
+            rGs[i] = (y[idsi[rids[:, 0:wci]]]).sum(1) + yi_star
+            rGs[i] = (np.array(rGs[i]) / den[i]) / (
+                self.y_sum - (1 - self.star) * y[i])
+        self.rGs = rGs
+
+    def __getCardinalities(self):
+        ido = self.w.id_order
+        self.wc = np.array(
+            [self.w.cardinalities[ido[i]] for i in range(self.n)])
+        return self.wc
+
+    def calc(self):
+        y = self.y
+        y2 = y * y
+        self.y_sum = y_sum = sum(y)
+        y2_sum = sum(y2)
+
+        if not self.star:
+            yl = 1.0 * slag(self.w, y)
+            ydi = y_sum - y
+            self.Gs = yl / ydi
+            N = self.n - 1
+            yl_mean = ydi / N
+            s2 = (y2_sum - y2) / N - (yl_mean) ** 2
+        else:
+            self.w.transform = 'B'
+            yl = 1.0 * slag(self.w, y)
+            yl += y
+            if self.w_transform == 'r':
+                yl = yl / (self.__getCardinalities() + 1.0)
+            self.Gs = yl / y_sum
+            N = self.n
+            yl_mean = y.mean()
+            s2 = y.var()
+
+        EGs_num, VGs_num = 1.0, 1.0
+        if self.w_transform == 'b':
+            W = self.__getCardinalities()
+            W += self.star
+            EGs_num = W * 1.0
+            VGs_num = (W * (1.0 * N - W)) / (1.0 * N - 1)
+
+        self.EGs = (EGs_num * 1.0) / N
+        self.VGs = (VGs_num) * (1.0 / (N ** 2)) * ((s2 * 1.0) / (yl_mean ** 2))
+        self.Zs = (self.Gs - self.EGs) / np.sqrt(self.VGs)
+
+        self.w.transform = self.w_original
+
diff --git a/pysal/esda/join_counts.py b/pysal/esda/join_counts.py
new file mode 100644
index 0000000..a16e291
--- /dev/null
+++ b/pysal/esda/join_counts.py
@@ -0,0 +1,144 @@
+"""
+Spatial autocorrelation for binary attributes
+
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu> , Luc Anselin <luc.anselin at asu.edu>"
+
+import pysal
+import numpy as np
+
+__all__ = ['Join_Counts']
+
+PERMUTATIONS = 999
+
+
+class Join_Counts:
+    """Binary Join Counts
+
+
+    Parameters
+    ----------
+
+    y               : array
+                      binary variable measured across n spatial units
+    w               : W
+                      spatial weights instance
+    permutations    : int
+                      number of random permutations for calculation of pseudo-p_values
+
+    Attributes
+    ----------
+    y            : array
+                   original variable
+    w            : W
+                   original w object
+    permutations : int
+                   number of permutations
+    bb           : float
+                   number of black-black joins
+    ww           : float
+                   number of white-white joins
+    bw           : float
+                   number of black-white joins
+    J            : float
+                   number of joins
+    sim_bb       : array (if permutations>0)
+                   vector of bb values for permuted samples
+    p_sim_bb     : array (if permutations>0)
+                   p-value based on permutations (one-sided)
+                   null: spatial randomness
+                   alternative: the observed bb is greater than under randomness
+    mean_bb      : average of permuted bb values
+    min_bb       : minimum of permuted bb values
+    max_bb       : maximum of permuted bb values
+    sim_bw       : array (if permutations>0)
+                   vector of bw values for permuted samples
+    p_sim_bw     : array (if permutations>0)
+                   p-value based on permutations (one-sided)
+                   null: spatial randomness
+                   alternative: the observed bw is greater than under randomness
+    mean_bw      : average of permuted bw values
+    min_bw       : minimum of permuted bw values
+    max_bw       : maximum of permuted bw values
+
+
+    Examples
+    --------
+
+    Replicate example from anselin and rey
+
+    >>> import numpy as np
+    >>> w = pysal.lat2W(4, 4)
+    >>> y = np.ones(16)
+    >>> y[0:8] = 0
+    >>> np.random.seed(12345)
+    >>> jc = pysal.Join_Counts(y, w)
+    >>> jc.bb
+    10.0
+    >>> jc.bw
+    4.0
+    >>> jc.ww
+    10.0
+    >>> jc.J
+    24.0
+    >>> len(jc.sim_bb)
+    999
+    >>> jc.p_sim_bb
+    0.0030000000000000001
+    >>> np.mean(jc.sim_bb)
+    5.5465465465465469
+    >>> np.max(jc.sim_bb)
+    10.0
+    >>> np.min(jc.sim_bb)
+    0.0
+    >>> len(jc.sim_bw)
+    999
+    >>> jc.p_sim_bw
+    1.0
+    >>> np.mean(jc.sim_bw)
+    12.811811811811811
+    >>> np.max(jc.sim_bw)
+    24.0
+    >>> np.min(jc.sim_bw)
+    7.0
+    >>>
+    """
+    def __init__(self, y, w, permutations=PERMUTATIONS):
+        w.transformation = 'b'  # ensure we have binary weights
+        self.w = w
+        self.y = y
+        self.permutations = permutations
+        self.J = w.s0 / 2.
+        self.bb, self.ww, self.bw = self.__calc(self.y)
+
+        if permutations:
+            sim = [self.__calc(np.random.permutation(self.y))
+                   for i in xrange(permutations)]
+            sim_jc = np.array(sim)
+            self.sim_bb = sim_jc[:, 0]
+            self.min_bb = np.min(self.sim_bb)
+            self.mean_bb = np.mean(self.sim_bb)
+            self.max_bb = np.max(self.sim_bb)
+            self.sim_bw = sim_jc[:, 2]
+            self.min_bw = np.min(self.sim_bw)
+            self.mean_bw = np.mean(self.sim_bw)
+            self.max_bw = np.max(self.sim_bw)
+            p_sim_bb = self.__pseudop(self.sim_bb, self.bb)
+            p_sim_bw = self.__pseudop(self.sim_bw, self.bw)
+            self.p_sim_bb = p_sim_bb
+            self.p_sim_bw = p_sim_bw
+
+    def __calc(self, z):
+        zl = pysal.lag_spatial(self.w, z)
+        bb = sum(z * zl) / 2.0
+        zw = 1 - z
+        zl = pysal.lag_spatial(self.w, zw)
+        ww = sum(zw * zl) / 2.0
+        bw = self.J - (bb + ww)
+        return (bb, ww, bw)
+
+    def __pseudop(self, sim, jc):
+        above = sim >= jc
+        larger = sum(above)
+        psim = (larger + 1.) / (self.permutations + 1.)
+        return psim
diff --git a/pysal/esda/mapclassify.py b/pysal/esda/mapclassify.py
new file mode 100644
index 0000000..36e37f7
--- /dev/null
+++ b/pysal/esda/mapclassify.py
@@ -0,0 +1,1867 @@
+"""
+A module of classification schemes for choropleth mapping.
+"""
+__author__ = "Sergio J. Rey"
+
+__all__ = ['Map_Classifier', 'quantile', 'Box_Plot', 'Equal_Interval',
+           'Fisher_Jenks', 'Fisher_Jenks_Sampled', 'Jenks_Caspall',
+           'Jenks_Caspall_Forced', 'Jenks_Caspall_Sampled', 
+           'Max_P_Classifier', 'Maximum_Breaks', 'Natural_Breaks',
+           'Quantiles', 'Percentiles', 'Std_Mean', 'User_Defined',
+           'gadf', 'K_classifiers']
+
+from pysal.common import *
+
+K = 5  # default number of classes in any map scheme with this as an argument
+
+
+def quantile(y, k=4):
+    """
+    Calculates the quantiles for an array
+
+    Parameters
+    ----------
+    y : array (n,1)
+        values to classify
+    k : int
+        number of quantiles
+
+    Returns
+    -------
+    implicit  : array (n,1)
+                quantile values
+
+    Examples
+    --------
+    >>> x = np.arange(1000)
+    >>> quantile(x)
+    array([ 249.75,  499.5 ,  749.25,  999.  ])
+    >>> quantile(x, k = 3)
+    array([ 333.,  666.,  999.])
+    >>>
+
+    Note that if there are enough ties that the quantile values repeat, we
+    collapse to pseudo quantiles in which case the number of classes will be less than k
+
+    >>> x = [1.0] * 100
+    >>> x.extend([3.0] * 40)
+    >>> len(x)
+    140
+    >>> y = np.array(x)
+    >>> quantile(y)
+    array([ 1.,  3.])
+    """
+    w = 100. / k
+    p = np.arange(w, 100 + w, w)
+    if p[-1] > 100.0:
+        p[-1] = 100.0
+    q = np.array([stats.scoreatpercentile(y, pct) for pct in p])
+    return np.unique(q)
+
+
+def binC(y, bins):
+    """
+    Bin categorical/qualitative data
+
+    Parameters
+    ----------
+    y : array (n,q)
+        categorical values
+    bins :  array (k,1)
+        unique values associated with each bin
+
+    Return
+    ------
+    b : array (n,q)
+        bin membership, values between 0 and k-1
+
+    Examples
+    --------
+    >>> np.random.seed(1)
+    >>> x = np.random.randint(2, 8, (10, 3))
+    >>> bins = range(2, 8)
+    >>> x
+    array([[7, 5, 6],
+           [2, 3, 5],
+           [7, 2, 2],
+           [3, 6, 7],
+           [6, 3, 4],
+           [6, 7, 4],
+           [6, 5, 6],
+           [4, 6, 7],
+           [4, 6, 3],
+           [3, 2, 7]])
+    >>> y = binC(x, bins)
+    >>> y
+    array([[5, 3, 4],
+           [0, 1, 3],
+           [5, 0, 0],
+           [1, 4, 5],
+           [4, 1, 2],
+           [4, 5, 2],
+           [4, 3, 4],
+           [2, 4, 5],
+           [2, 4, 1],
+           [1, 0, 5]])
+    >>>
+    """
+
+    if np.rank(y) == 1:
+        k = 1
+        n = np.shape(y)[0]
+    else:
+        n, k = np.shape(y)
+    b = np.zeros((n, k), dtype='int')
+    for i, bin in enumerate(bins):
+        b[np.nonzero(y == bin)] = i
+
+    # check for non-binned items and print a warning if needed
+    vals = set(y.flatten())
+    for val in vals:
+        if val not in bins:
+            print 'warning: value not in bin: ', val
+            print 'bins: ', bins
+
+    return b
+
+
+def bin(y, bins):
+    """
+    bin interval/ratio data
+
+    Parameters
+    ----------
+    y : array (n,q)
+        values to bin
+    bins : array (k,1)
+        upper bounds of each bin (monotonic)
+
+    Returns
+    -------
+    b : array (n,q)
+        values of values between 0 and k-1
+
+    Examples
+    --------
+    >>> np.random.seed(1)
+    >>> x = np.random.randint(2, 20, (10, 3))
+    >>> bins = [10, 15, 20]
+    >>> b = bin(x, bins)
+    >>> x
+    array([[ 7, 13, 14],
+           [10, 11, 13],
+           [ 7, 17,  2],
+           [18,  3, 14],
+           [ 9, 15,  8],
+           [ 7, 13, 12],
+           [16,  6, 11],
+           [19,  2, 15],
+           [11, 11,  9],
+           [ 3,  2, 19]])
+    >>> b
+    array([[0, 1, 1],
+           [0, 1, 1],
+           [0, 2, 0],
+           [2, 0, 1],
+           [0, 1, 0],
+           [0, 1, 1],
+           [2, 0, 1],
+           [2, 0, 1],
+           [1, 1, 0],
+           [0, 0, 2]])
+    >>>
+    """
+    if np.rank(y) == 1:
+        k = 1
+        n = np.shape(y)[0]
+    else:
+        n, k = np.shape(y)
+    b = np.zeros((n, k), dtype='int')
+    i = len(bins)
+    if type(bins) != list:
+        bins = bins.tolist()
+    binsc = copy.copy(bins)
+    while binsc:
+        i -= 1
+        c = binsc.pop(-1)
+        b[np.nonzero(y <= c)] = i
+    return b
+
+
+def bin1d(x, bins):
+    """
+    place values of a 1-d array into bins and determine counts of values in
+    each bin
+
+    Parameters
+    ----------
+    y : 1-d array
+        values to bin
+    bins : array (k,1)
+        upper bounds of each bin (monotonic)
+
+    Returns
+    -------
+    tuple(binIds,counts)
+
+    binIds: 1-d array of integer bin Ids
+
+    counts: number of elements of x falling in each bin
+
+    Examples
+    --------
+    >>> x = np.arange(100, dtype = 'float')
+    >>> bins = [25, 74, 100]
+    >>> binIds, counts = bin1d(x, bins)
+    >>> binIds
+    array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+           0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+           1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+           2, 2, 2, 2, 2, 2, 2, 2])
+    >>> counts
+    array([26, 49, 25])
+    """
+    left = [-sys.maxint]
+    left.extend(bins[0:-1])
+    right = bins
+    cuts = zip(left, right)
+    k = len(bins)
+    binIds = np.zeros(x.shape, dtype='int')
+    while cuts:
+        k -= 1
+        l, r = cuts.pop(-1)
+        binIds += (x > l) * (x <= r) * k
+    counts = np.bincount(binIds)
+    return (binIds, counts)
+
+
+def load_example():
+    """
+    Helper function for doc tests"""
+    import pysal
+    np.random.seed(10)
+    dat = pysal.open(pysal.examples.get_path('calempdensity.csv'))
+    cal = np.array([record[-1] for record in dat])
+    return cal
+
+
+def natural_breaks(values, k=5, itmax=100):
+    """
+    natural breaks helper function
+    """
+    values = np.array(values)
+    n = len(values)
+    uv = np.unique(values)
+    uvk = len(uv)
+    if  uvk < k:
+        print 'Warning: Not enough unique values in array to form k classes'
+        print "Warning: setting k to %d" % uvk
+        k = uvk
+    sids = np.random.permutation(range(len(uv)))[0:k]
+    seeds = uv[sids]
+    seeds.sort()
+    diffs = abs(np.matrix([values - seed for seed in seeds]))
+    c0 = diffs.argmin(axis=0)
+    c0 = np.array(c0)[0]
+    solving = True
+    solved = False
+    rk = range(k)
+    it = 0
+    while solving:
+        # get centroids of clusters
+        seeds = [np.median(values[c0 == c]) for c in rk]
+        seeds.sort()
+        # for each value find closest centroid
+        diffs = abs(np.matrix([values - seed for seed in seeds]))
+        # assign value to that centroid
+        c1 = diffs.argmin(axis=0)
+        c1 = np.array(c1)[0]
+        #compare new classids to previous
+        d = abs(c1 - c0)
+        if d.sum() == 0:
+            solving = False
+            solved = True
+        else:
+            c0 = c1
+        it += 1
+        if it == itmax:
+            solving = False
+    class_ids = c1
+    cuts = [max(values[c1 == c]) for c in rk]
+    return sids, seeds, diffs, class_ids, solved, it, cuts
+
+
+
+def _fisher_jenks_means(values, classes=5, sort=True):
+    """
+    Jenks Optimal (Natural Breaks) algorithm implemented in Python.
+    The original Python code comes from here:
+    http://danieljlewis.org/2010/06/07/jenks-natural-breaks-algorithm-in-python/
+    and is based on a JAVA and Fortran code available here:
+    https://stat.ethz.ch/pipermail/r-sig-geo/2006-March/000811.html
+
+    Returns class breaks such that classes are internally homogeneous while
+    assuring heterogeneity among classes.
+
+    """
+
+    if sort:
+        values.sort()
+    mat1 = []
+    for i in range(0, len(values) + 1):
+        temp = []
+        for j in range(0, classes + 1):
+            temp.append(0)
+        mat1.append(temp)
+    mat2 = []
+    for i in range(0, len(values) + 1):
+        temp = []
+        for j in range(0, classes + 1):
+            temp.append(0)
+        mat2.append(temp)
+    for i in range(1, classes + 1):
+        mat1[1][i] = 1
+        mat2[1][i] = 0
+        for j in range(2, len(values) + 1):
+            mat2[j][i] = float('inf')
+    v = 0.0
+    for l in range(2, len(values) + 1):
+        s1 = 0.0
+        s2 = 0.0
+        w = 0.0
+        for m in range(1, l + 1):
+            i3 = l - m + 1
+            val = float(values[i3 - 1])
+            s2 += val * val
+            s1 += val
+            w += 1
+            v = s2 - (s1 * s1) / w
+            i4 = i3 - 1
+            if i4 != 0:
+                for j in range(2, classes + 1):
+                    if mat2[l][j] >= (v + mat2[i4][j - 1]):
+                        mat1[l][j] = i3
+                        mat2[l][j] = v + mat2[i4][j - 1]
+        mat1[l][1] = 1
+        mat2[l][1] = v
+
+    k = len(values)
+
+    kclass = []
+    for i in range(0, classes + 1):
+        kclass.append(0)
+    kclass[classes] = float(values[len(values) - 1])
+    kclass[0] = float(values[0])
+    countNum = classes
+    while countNum >= 2:
+        pivot = mat1[k][countNum]
+        id = int(pivot - 2)
+        kclass[countNum - 1] = values[id]
+        k = int(pivot - 1)
+        countNum -= 1
+    return kclass
+
+
+class Map_Classifier:
+    """
+    Abstract class for all map classifications
+    For an array :math:`y` of :math:`n` values, a map classifier places each value
+    :math:`y_i` into one of :math:`k` mutually exclusive and exhaustive classes.
+    Each classifer defines the classes based on different criteria, but in all
+    cases the following hold for the classifiers in PySAL:
+
+    .. math::
+
+              C_j^l < y_i \le C_j^u \  forall  i \in C_j
+
+    where :math:`C_j` denotes class :math:`j` which has lower bound :math:`C_j^l` and upper bound :math:`C_j^u`.
+
+
+        
+
+    Map Classifiers Supported
+
+    * :class:`~pysal.esda.mapclassify.Box_Plot`
+    * :class:`~pysal.esda.mapclassify.Equal_Interval`
+    * :class:`~pysal.esda.mapclassify.Fisher_Jenks`
+    * :class:`~pysal.esda.mapclassify.Fisher_Jenks_Sampled`
+    * :class:`~pysal.esda.mapclassify.Jenks_Caspall`
+    * :class:`~pysal.esda.mapclassify.Jenks_Caspall_Forced`
+    * :class:`~pysal.esda.mapclassify.Jenks_Caspall_Sampled`
+    * :class:`~pysal.esda.mapclassify.Max_P_Classifier`
+    * :class:`~pysal.esda.mapclassify.Maximum_Breaks`
+    * :class:`~pysal.esda.mapclassify.Natural_Breaks`
+    * :class:`~pysal.esda.mapclassify.Quantiles`
+    * :class:`~pysal.esda.mapclassify.Percentiles`
+    * :class:`~pysal.esda.mapclassify.Std_Mean`
+    * :class:`~pysal.esda.mapclassify.User_Defined`
+
+    Utilities:
+
+    In addition to the classifiers, there are several utility functions that can be used to evaluate the properties of a specific classifier for different parameter values, or for automatic selection of a classifier and number of classes.
+
+    * :func:`~pysal.esda.mapclassify.gadf`
+    * :class:`~pysal.esda.mapclassify.K_classifiers`
+
+    References
+    ----------
+
+    Slocum, T.A., R.B. McMaster, F.C. Kessler and H.H. Howard (2009) *Thematic Cartography and Geovisualization*. Pearson Prentice Hall, Upper Saddle River.
+
+    """
+
+    def __init__(self, y):
+        self.name = 'Map Classifier'
+        if hasattr(y, 'values'):
+            y = y.values # fix for pandas
+        self.y = y
+        self._classify()
+        self._summary()
+
+    def _summary(self):
+        yb = self.yb
+        self.classes = [np.nonzero(yb == c)[0].tolist() for c in range(self.k)]
+        self.tss = self.get_tss()
+        self.adcm = self.get_adcm()
+        self.gadf = self.get_gadf()
+
+    def _classify(self):
+        self._set_bins()
+        self.yb, self.counts = bin1d(self.y, self.bins)
+
+    def __str__(self):
+        st = self._table_string()
+        return st
+
+    def __repr__(self):
+        return self._table_string()
+
+    def get_tss(self):
+        """
+        Total sum of squares around class means
+
+        Returns sum of squares over all class means
+        """
+        tss = 0
+        for class_def in self.classes:
+            if len(class_def) > 0:
+                yc = self.y[class_def]
+                css = yc - yc.mean()
+                css *= css
+                tss += sum(css)
+        return tss
+
+    def _set_bins(self):
+        pass
+
+    def get_adcm(self):
+        """
+        Absolute deviation around class median (ADCM).
+
+        Calculates the absolute deviations of each observation about its class
+        median as a measure of fit for the classification method.
+
+        Returns sum of ADCM over all classes
+        """
+        adcm = 0
+        for class_def in self.classes:
+            if len(class_def) > 0:
+                yc = self.y[class_def]
+                yc_med = np.median(yc)
+                ycd = np.abs(yc - yc_med)
+                adcm += sum(ycd)
+        return adcm
+
+    def get_gadf(self):
+        """
+        Goodness of absolute deviation of fit
+        """
+        adam = (np.abs(self.y - np.median(self.y))).sum()
+        gadf = 1 - self.adcm / adam
+        return gadf
+
+    def _table_string(self, width=12, decimal=3):
+        fmt = ".%df" % decimal
+        fmt = "%" + fmt
+        largest = max([len(fmt % i) for i in self.bins])
+        width = largest
+        fmt = "%d.%df" % (width, decimal)
+        fmt = "%" + fmt
+        k1 = self.k - 1
+        h1 = "Lower"
+        h1 = h1.center(largest)
+        h2 = " "
+        h2 = h2.center(10)
+        h3 = "Upper"
+        h3 = h3.center(largest + 1)
+
+        largest = "%d" % max(self.counts)
+        largest = len(largest) + 15
+        h4 = "Count"
+
+        h4 = h4.rjust(largest)
+        table = []
+        header = h1 + h2 + h3 + h4
+        table.append(header)
+        table.append("=" * len(header))
+
+        rows = []
+        for i, up in enumerate(self.bins):
+            if i == 0:
+                left = " " * width
+                left += "   x[i] <= "
+            else:
+                left = fmt % self.bins[i - 1]
+                left += " < x[i] <= "
+            right = fmt % self.bins[i]
+            row = left + right
+            cnt = "%d" % self.counts[i]
+            cnt = cnt.rjust(largest)
+            row += cnt
+            table.append(row)
+        name = self.name
+        top = name.center(len(row))
+        table.insert(0, top)
+        table.insert(1, " ")
+        table = "\n".join(table)
+        return table
+
+
+class Equal_Interval(Map_Classifier):
+    """
+    Equal Interval Classification
+
+    Parameters
+    ----------
+    y : array (n,1)
+        values to classify
+    k : int
+        number of classes required
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations,
+              each value is the id of the class the observation belongs to
+              yb[i] = j  for j>=1  if bins[j-1] < y[i] <= bins[j], yb[i] = 0  otherwise
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> ei = Equal_Interval(cal, k = 5)
+    >>> ei.k
+    5
+    >>> ei.counts
+    array([57,  0,  0,  0,  1])
+    >>> ei.bins
+    array([  822.394,  1644.658,  2466.922,  3289.186,  4111.45 ])
+    >>>
+
+
+    Notes
+    -----
+    Intervals defined to have equal width:
+
+    .. math::
+
+        bins_j = min(y)+w*(j+1)
+
+    with :math:`w=\\frac{max(y)-min(j)}{k}`
+    """
+
+    def __init__(self, y, k=K):
+        """
+        see class docstring
+
+        """
+
+        self.k = k
+        Map_Classifier.__init__(self, y)
+        self.name = 'Equal Interval'
+
+    def _set_bins(self):
+        y = self.y
+        k = self.k
+        max_y = max(y)
+        min_y = min(y)
+        rg = max_y - min_y
+        width = rg * 1. / k
+        cuts = np.arange(min_y + width, max_y + width, width)
+        if len(cuts) > self.k:  # handle overshooting
+            cuts = cuts[0:k]
+        cuts[-1] = max_y
+        bins = cuts.copy()
+        self.bins = bins
+
+
+class Percentiles(Map_Classifier):
+    """
+    Percentiles Map Classification
+
+    Parameters
+    ----------
+
+    y    : array
+           attribute to classify
+    pct  : array
+           percentiles default=[1,10,50,90,99,100]
+
+    Attributes
+    ----------
+    yb     : array
+             bin ids for observations (numpy array n x 1)
+
+    bins   : array
+             the upper bounds of each class (numpy array k x 1)
+
+    k      : int
+             the number of classes
+
+    counts : int
+             the number of observations falling in each class (numpy array k x 1)
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> p = Percentiles(cal)
+    >>> p.bins
+    array([  1.35700000e-01,   5.53000000e-01,   9.36500000e+00,
+             2.13914000e+02,   2.17994800e+03,   4.11145000e+03])
+    >>> p.counts
+    array([ 1,  5, 23, 23,  5,  1])
+    >>> p2 = Percentiles(cal, pct = [50, 100])
+    >>> p2.bins
+    array([    9.365,  4111.45 ])
+    >>> p2.counts
+    array([29, 29])
+    >>> p2.k
+    2
+    """
+
+    def __init__(self, y, pct=[1, 10, 50, 90, 99, 100]):
+        self.pct = pct
+        Map_Classifier.__init__(self, y)
+        self.name = 'Percentiles'
+
+    def _set_bins(self):
+        y = self.y
+        pct = self.pct
+        self.bins = np.array([stats.scoreatpercentile(y, p) for p in pct])
+        self.k = len(self.bins)
+
+
+class Box_Plot(Map_Classifier):
+    """
+    Box_Plot Map Classification
+
+
+    Parameters
+    ----------
+    y     : array
+            attribute to classify
+    hinge : float
+            multiplier for IQR
+
+    Attributes
+    ----------
+    yb : array (n,1)
+        bin ids for observations
+    bins : array (n,1)
+        the upper bounds of each class  (monotonic)
+    k : int
+        the number of classes
+    counts : array (k,1)
+        the number of observations falling in each class
+    low_outlier_ids : array
+        indices of observations that are low outliers
+    high_outlier_ids : array
+        indices of observations that are high outliers
+
+    Notes
+    -----
+
+    The bins are set as follows::
+
+        bins[0] = q[0]-hinge*IQR
+        bins[1] = q[0]
+        bins[2] = q[1]
+        bins[3] = q[2]
+        bins[4] = q[2]+hinge*IQR
+        bins[5] = inf  (see Notes)
+
+    where q is an array of the first three quartiles of y and
+    IQR=q[2]-q[0]
+
+
+    If q[2]+hinge*IQR > max(y) there will only be 5 classes and no high outliers,
+        otherwise, there will be 6 classes and at least one high outlier.
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> bp = Box_Plot(cal)
+    >>> bp.bins
+    array([ -5.28762500e+01,   2.56750000e+00,   9.36500000e+00,
+             3.95300000e+01,   9.49737500e+01,   4.11145000e+03])
+    >>> bp.counts
+    array([ 0, 15, 14, 14,  6,  9])
+    >>> bp.high_outlier_ids
+    array([ 0,  6, 18, 29, 33, 36, 37, 40, 42])
+    >>> cal[bp.high_outlier_ids]
+    array([  329.92,   181.27,   370.5 ,   722.85,   192.05,   110.74,
+            4111.45,   317.11,   264.93])
+    >>> bx = Box_Plot(np.arange(100))
+    >>> bx.bins
+    array([ -49.5 ,   24.75,   49.5 ,   74.25,  148.5 ])
+
+    """
+
+    def __init__(self, y, hinge=1.5):
+        """
+        Parameters
+        ----------
+        y : array (n,1)
+            attribute to classify
+        hinge : float
+            multiple of inter-quartile range (default=1.5)
+        """
+        self.hinge = hinge
+        Map_Classifier.__init__(self, y)
+        self.name = 'Box Plot'
+
+    def _set_bins(self):
+        y = self.y
+        pct = [25, 50, 75, 100]
+        bins = [stats.scoreatpercentile(y, p) for p in pct]
+        iqr = bins[-2] - bins[0]
+        self.iqr = iqr
+        pivot = self.hinge * iqr
+        left_fence = bins[0] - pivot
+        right_fence = bins[-2] + pivot
+        if right_fence < bins[-1]:
+            bins.insert(-1, right_fence)
+        else:
+            bins[-1] = right_fence
+        bins.insert(0, left_fence)
+        self.bins = np.array(bins)
+        self.k = len(pct)
+
+    def _classify(self):
+        Map_Classifier._classify(self)
+        self.low_outlier_ids = np.nonzero(self.yb == 0)[0]
+        self.high_outlier_ids = np.nonzero(self.yb == 5)[0]
+
+
+class Quantiles(Map_Classifier):
+    """Quantile Map Classification
+
+    Parameters
+    ----------
+    y : array (n,1)
+        values to classify
+    k : int
+        number of classes required
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations,
+              each value is the id of the class the observation belongs to
+              yb[i] = j  for j>=1  if bins[j-1] < y[i] <= bins[j], yb[i] = 0  otherwise
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> q = Quantiles(cal, k = 5)
+    >>> q.bins
+    array([  1.46400000e+00,   5.79800000e+00,   1.32780000e+01,
+             5.46160000e+01,   4.11145000e+03])
+    >>> q.counts
+    array([12, 11, 12, 11, 12])
+    >>>
+    """
+
+    def __init__(self, y, k=K):
+        self.k = k
+        Map_Classifier.__init__(self, y)
+        self.name = 'Quantiles'
+
+    def _set_bins(self):
+        y = self.y
+        k = self.k
+        self.bins = quantile(y, k=k)
+
+
+class Std_Mean(Map_Classifier):
+    """
+    Standard Deviation and Mean Map Classification
+
+    Parameters
+    ----------
+    y         : array (n,1)
+                values to classify
+    multiples : array
+                the multiples of the standard deviation to add/subtract from
+                the sample mean to define the bins, default=[-2,-1,1,2]
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations,
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> st = Std_Mean(cal)
+    >>> st.k
+    5
+    >>> st.bins
+    array([ -967.36235382,  -420.71712519,   672.57333208,  1219.21856072,
+            4111.45      ])
+    >>> st.counts
+    array([ 0,  0, 56,  1,  1])
+    >>>
+    >>> st3 = Std_Mean(cal, multiples = [-3, -1.5, 1.5, 3])
+    >>> st3.bins
+    array([-1514.00758246,  -694.03973951,   945.8959464 ,  1765.86378936,
+            4111.45      ])
+    >>> st3.counts
+    array([ 0,  0, 57,  0,  1])
+    >>>
+
+    """
+    def __init__(self, y, multiples=[-2, -1, 1, 2]):
+        self.multiples = multiples
+        Map_Classifier.__init__(self, y)
+        self.name = 'Std_Mean'
+
+    def _set_bins(self):
+        y = self.y
+        s = y.std(ddof=1)
+        m = y.mean()
+        cuts = [m + s * w for w in self.multiples]
+        y_max = y.max()
+        if cuts[-1] < y_max:
+            cuts.append(y_max)
+        self.bins = np.array(cuts)
+        self.k = len(cuts)
+
+
+class Maximum_Breaks(Map_Classifier):
+    """
+    Maximum Breaks Map Classification
+
+    Parameters
+    ----------
+    y  : array (n x 1)
+         values to classify
+
+    k  : int
+         number of classes required
+
+    Attributes
+    ----------
+    yb : array (nx1)
+         bin ids for observations
+
+    bins : array (kx1)
+           the upper bounds of each class
+
+    k    : int
+           the number of classes
+
+    counts : array (kx1)
+             the number of observations falling in each class (numpy array k x 1)
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> mb = Maximum_Breaks(cal, k = 5)
+    >>> mb.k
+    5
+    >>> mb.bins
+    array([  146.005,   228.49 ,   546.675,  2417.15 ,  4111.45 ])
+    >>> mb.counts
+    array([50,  2,  4,  1,  1])
+    >>>
+
+    """
+    def __init__(self, y, k=K, mindiff=0):
+        self.k = k
+        self.mindiff = mindiff
+        Map_Classifier.__init__(self, y)
+        self.name = 'Maximum_Breaks'
+
+    def _set_bins(self):
+        xs = self.y.copy()
+        y = self.y.copy()
+        k = self.k
+        xs.sort()
+        min_diff = self.mindiff
+        d = xs[1:] - xs[:-1]
+        diffs = d[np.nonzero(d > min_diff)]
+        diffs = sp.unique(diffs)
+        k1 = k - 1
+        if len(diffs) > k1:
+            diffs = diffs[-k1:]
+        mp = []
+        self.cids = []
+        for diff in diffs:
+            ids = np.nonzero(d == diff)
+            for id in ids:
+                self.cids.append(id[0])
+                cp = ((xs[id] + xs[id + 1]) / 2.)
+                mp.append(cp[0])
+        mp.append(xs[-1])
+        mp.sort()
+        self.bins = np.array(mp)
+
+
+class Natural_Breaks(Map_Classifier):
+    """
+    Natural Breaks Map Classification
+
+    Parameters
+    ----------
+    y       : array (n,1)
+              values to classify
+    k       : int
+              number of classes required
+    initial : int (default=100)
+              number of initial solutions to generate
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations,
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> np.random.seed(10)
+    >>> cal = load_example()
+    >>> nb = Natural_Breaks(cal, k = 5)
+    >>> nb.k
+    5
+    >>> nb.counts
+    array([14, 13, 14, 10,  7])
+    >>> nb.bins
+    array([  1.81000000e+00,   7.60000000e+00,   2.98200000e+01,
+             1.81270000e+02,   4.11145000e+03])
+    >>> x = np.array([1] * 50)
+    >>> x[-1] = 20
+    >>> nb = Natural_Breaks(x, k = 5, initial = 0)
+    Warning: Not enough unique values in array to form k classes
+    Warning: setting k to 2
+    >>> nb.bins
+    array([ 1, 20])
+    >>> nb.counts
+    array([49,  1])
+
+
+    Notes
+    -----
+    There is a tradeoff here between speed and consistency of the
+    classification
+    If you want more speed, set initial to a smaller value (0
+    would result in the best speed, if you want more consistent classes in
+    multiple runs of Natural_Breaks on the same data, set initial to a
+    higher value.
+
+
+    """
+    def __init__(self, y, k=K, initial=100):
+        self.k = k
+        self.initial = initial
+        Map_Classifier.__init__(self, y)
+        self.name = 'Natural_Breaks'
+
+    def _set_bins(self):
+
+        x = self.y.copy()
+        k = self.k
+        res0 = natural_breaks(x, k)
+        fit = res0[2].sum()
+        for i in xrange(self.initial):
+            res = natural_breaks(x, k)
+            fit_i = res[2].sum()
+            if fit_i < fit:
+                res0 = res
+        self.bins = np.array(res0[-1])
+        self.k = len(self.bins)
+        self.iterations = res0[-2]
+
+
+class Fisher_Jenks(Map_Classifier):
+    """
+    Fisher Jenks optimal classifier - mean based
+
+    Parameters
+    ----------
+    y : array (n,1)
+        values to classify
+    k : int
+        number of classes required
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+
+    Examples
+    --------
+
+    >>> cal = load_example()
+    >>> fj = Fisher_Jenks(cal)
+    >>> fj.adcm
+    799.24000000000001
+    >>> fj.bins
+    array([   75.29,   192.05,   370.5 ,   722.85,  4111.45])
+    >>> fj.counts
+    array([49,  3,  4,  1,  1])
+    >>>
+    """
+
+    def __init__(self, y, k=K):
+
+        nu = len(np.unique(y))
+        if nu < k:
+            raise ValueError("Fewer unique values than specified classes.")
+        self.k = k
+        Map_Classifier.__init__(self, y)
+        self.name = "Fisher_Jenks"
+
+
+    def _set_bins(self):
+        x = self.y.copy()
+        self.bins = np.array(_fisher_jenks_means(x, classes=self.k)[1:])
+
+
+class Fisher_Jenks_Sampled(Map_Classifier):
+    """
+    Fisher Jenks optimal classifier - mean based using random sample
+
+    Parameters
+    ----------
+    y      : array (n,1)
+             values to classify
+    k      : int
+             number of classes required
+    pct    : float
+             The percentage of n that should form the sample
+             If pct is specified such that n*pct > 1000, then 
+             pct = 1000./n, unless truncate is False
+    truncate : binary (Default True)
+             truncate pct in cases where pct * n > 1000.
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+    Examples
+    --------
+
+    (Turned off due to timing being different across hardware)
+
+    """
+
+    def __init__(self, y, k=K, pct=0.10, truncate=True):
+        self.k = k
+        n = y.size
+
+        if (pct * n > 1000) and truncate:
+            pct = 1000. / n
+        ids = np.random.random_integers(0, n - 1, n * pct)
+        yr = y[ids]
+        yr[-1] = max(y)  # make sure we have the upper bound
+        yr[0] = min(y)  # make sure we have the min
+        self.original_y = y
+        self.pct = pct
+        self.yr = yr
+        self.yr_n = yr.size
+        Map_Classifier.__init__(self, yr)
+        self.yb, self.counts = bin1d(y, self.bins)
+        self.name = "Fisher_Jenks_Sampled"
+        self.y = y
+        self._summary()  # have to recalculate summary stats
+
+    def _set_bins(self):
+        fj = Fisher_Jenks(self.y, self.k)
+        self.bins = fj.bins
+
+
+class Jenks_Caspall(Map_Classifier):
+    """
+    Jenks Caspall  Map Classification
+
+    Parameters
+    ----------
+    y : array (n,1)
+        values to classify
+    k : int
+        number of classes required
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations,
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> jc = Jenks_Caspall(cal, k = 5)
+    >>> jc.bins
+    array([  1.81000000e+00,   7.60000000e+00,   2.98200000e+01,
+             1.81270000e+02,   4.11145000e+03])
+    >>> jc.counts
+    array([14, 13, 14, 10,  7])
+
+    """
+    def __init__(self, y, k=K):
+        self.k = k
+        Map_Classifier.__init__(self, y)
+        self.name = "Jenks_Caspall"
+
+    def _set_bins(self):
+        x = self.y.copy()
+        k = self.k
+        # start with quantiles
+        q = quantile(x, k)
+        solving = True
+        xb, cnts = bin1d(x, q)
+        #class means
+        if x.ndim == 1:
+            x.shape = (x.size, 1)
+        n, k = x.shape
+        xm = [np.median(x[xb == i]) for i in np.unique(xb)]
+        xb0 = xb.copy()
+        q = xm
+        it = 0
+        rk = range(self.k)
+        while solving:
+            xb = np.zeros(xb0.shape, int)
+            d = abs(x - q)
+            xb = d.argmin(axis=1)
+            if (xb0 == xb).all():
+                solving = False
+            else:
+                xb0 = xb
+            it += 1
+            q = np.array([np.median(x[xb == i]) for i in rk])
+        cuts = np.array([max(x[xb == i]) for i in sp.unique(xb)])
+        cuts.shape = (len(cuts),)
+        self.bins = cuts
+        self.iterations = it
+
+
+class Jenks_Caspall_Sampled(Map_Classifier):
+    """
+    Jenks Caspall Map Classification using a random sample
+
+    Parameters
+    ----------
+
+    y       : array (n,1)
+              values to classify
+    k       : int
+              number of classes required
+    pct     : float
+              The percentage of n that should form the sample
+              If pct is specified such that n*pct > 1000, then pct = 1000./n
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations,
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+
+    Examples
+    --------
+
+    >>> cal = load_example()
+    >>> x = np.random.random(100000)
+    >>> jc = Jenks_Caspall(x)
+    >>> jcs = Jenks_Caspall_Sampled(x)
+    >>> jc.bins
+    array([ 0.19770952,  0.39695769,  0.59588617,  0.79716865,  0.99999425])
+    >>> jcs.bins
+    array([ 0.18877882,  0.39341638,  0.6028286 ,  0.80070925,  0.99999425])
+    >>> jc.counts
+    array([19804, 20005, 19925, 20178, 20088])
+    >>> jcs.counts
+    array([18922, 20521, 20980, 19826, 19751])
+    >>>
+
+    # not for testing since we get different times on different hardware
+    # just included for documentation of likely speed gains
+    #>>> t1 = time.time(); jc = Jenks_Caspall(x); t2 = time.time()
+    #>>> t1s = time.time(); jcs = Jenks_Caspall_Sampled(x); t2s = time.time()
+    #>>> t2 - t1; t2s - t1s
+    #1.8292930126190186
+    #0.061631917953491211
+
+    Notes
+    -----
+    This is intended for large n problems. The logic is to apply
+    Jenks_Caspall to a random subset of the y space and then bin the
+    complete vector y on the bins obtained from the subset. This would
+    trade off some "accuracy" for a gain in speed.
+
+    """
+
+    def __init__(self, y, k=K, pct=0.10):
+        self.k = k
+        n = y.size
+        if pct * n > 1000:
+            pct = 1000. / n
+        ids = np.random.random_integers(0, n - 1, n * pct)
+        yr = y[ids]
+        yr[0] = max(y)  # make sure we have the upper bound
+        self.original_y = y
+        self.pct = pct
+        self.yr = yr
+        self.yr_n = yr.size
+        Map_Classifier.__init__(self, yr)
+        self.yb, self.counts = bin1d(y, self.bins)
+        self.name = "Jenks_Caspall_Sampled"
+        self.y = y
+        self._summary()  # have to recalculate summary stats
+
+    def _set_bins(self):
+        jc = Jenks_Caspall(self.y, self.k)
+        self.bins = jc.bins
+        self.iterations = jc.iterations
+
+
+class Jenks_Caspall_Forced(Map_Classifier):
+    """
+
+    Jenks Caspall  Map Classification with forced movements
+
+    Parameters
+    ----------
+    y : array (n,1)
+        values to classify
+    k : int
+        number of classes required
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations,
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> jcf = Jenks_Caspall_Forced(cal, k = 5)
+    >>> jcf.k
+    5
+    >>> jcf.bins
+    array([[  1.34000000e+00],
+           [  5.90000000e+00],
+           [  1.67000000e+01],
+           [  5.06500000e+01],
+           [  4.11145000e+03]])
+    >>> jcf.counts
+    array([12, 12, 13,  9, 12])
+    >>> jcf4 = Jenks_Caspall_Forced(cal, k = 4)
+    >>> jcf4.k
+    4
+    >>> jcf4.bins
+    array([[  2.51000000e+00],
+           [  8.70000000e+00],
+           [  3.66800000e+01],
+           [  4.11145000e+03]])
+    >>> jcf4.counts
+    array([15, 14, 14, 15])
+    >>>
+    """
+    def __init__(self, y, k=K):
+        self.k = k
+        Map_Classifier.__init__(self, y)
+        self.name = "Jenks_Caspall_Forced"
+
+    def _set_bins(self):
+        x = self.y.copy()
+        k = self.k
+        q = quantile(x, k)
+        solving = True
+        xb, cnt = bin1d(x, q)
+        #class means
+        if x.ndim == 1:
+            x.shape = (x.size, 1)
+        n, tmp = x.shape
+        xm = [x[xb == i].mean() for i in np.unique(xb)]
+        xb0 = xb.copy()
+        q = xm
+        xbar = np.array([xm[xbi] for xbi in xb])
+        xbar.shape = (n, 1)
+        ss = x - xbar
+        ss *= ss
+        ss = sum(ss)
+        maxk = k - 1
+        down_moves = up_moves = 0
+        solving = True
+        it = 0
+        while solving:
+            # try upward moves first
+            moving_up = True
+            while moving_up:
+                class_ids = sp.unique(xb)
+                nk = [sum(xb == j) for j in class_ids]
+                candidates = nk[:-1]
+                i = 0
+                up_moves = 0
+                while candidates:
+                    nki = candidates.pop(0)
+                    if nki > 1:
+                        ids = np.nonzero(xb == class_ids[i])
+                        mover = max(ids[0])
+                        tmp = xb.copy()
+                        tmp[mover] = xb[mover] + 1
+                        tm = [x[tmp == j].mean() for j in sp.unique(tmp)]
+                        txbar = np.array([tm[xbi] for xbi in tmp])
+                        txbar.shape = (n, 1)
+                        tss = x - txbar
+                        tss *= tss
+                        tss = sum(tss)
+                        if tss < ss:
+                            xb = tmp
+                            ss = tss
+                            candidates = []
+                            up_moves += 1
+                    i += 1
+                if not up_moves:
+                    moving_up = False
+            moving_down = True
+            while moving_down:
+                class_ids = sp.unique(xb)
+                nk = [sum(xb == j) for j in class_ids]
+                candidates = nk[1:]
+                i = 1
+                down_moves = 0
+                while candidates:
+                    nki = candidates.pop(0)
+                    if nki > 1:
+                        ids = np.nonzero(xb == class_ids[i])
+                        mover = min(ids[0])
+                        mover_class = xb[mover]
+                        target_class = mover_class - 1
+                        tmp = xb.copy()
+                        tmp[mover] = target_class
+                        tm = [x[tmp == j].mean() for j in sp.unique(tmp)]
+                        txbar = np.array([tm[xbi] for xbi in tmp])
+                        txbar.shape = (n, 1)
+                        tss = x - txbar
+                        tss *= tss
+                        tss = sum(tss)
+                        if tss < ss:
+                            xb = tmp
+                            ss = tss
+                            candidates = []
+                            down_moves += 1
+                    i += 1
+                if not down_moves:
+                    moving_down = False
+            if not up_moves and not down_moves:
+                solving = False
+            it += 1
+        cuts = [max(x[xb == i]) for i in sp.unique(xb)]
+        self.bins = np.array(cuts)
+        self.iterations = it
+
+
+class User_Defined(Map_Classifier):
+    """
+    User Specified Binning
+
+
+    Parameters
+    ----------
+    y    : array (n,1)
+           values to classify
+    bins : array (k,1)
+           upper bounds of classes (have to be monotically increasing)
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations,
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> bins = [20, max(cal)]
+    >>> bins
+    [20, 4111.4499999999998]
+    >>> ud = User_Defined(cal, bins)
+    >>> ud.bins
+    array([   20.  ,  4111.45])
+    >>> ud.counts
+    array([37, 21])
+    >>> bins = [20, 30]
+    >>> ud = User_Defined(cal, bins)
+    >>> ud.bins
+    array([   20.  ,    30.  ,  4111.45])
+    >>> ud.counts
+    array([37,  4, 17])
+    >>>
+
+
+    Notes
+    -----
+    If upper bound of user bins does not exceed max(y) we append an
+    additional bin.
+
+    """
+
+    def __init__(self, y, bins):
+        if bins[-1] < max(y):
+            bins.append(max(y))
+        self.k = len(bins)
+        self.bins = np.array(bins)
+        self.y = y
+        Map_Classifier.__init__(self, y)
+        self.name = 'User Defined'
+
+    def _set_bins(self):
+        pass
+
+
+class Max_P_Classifier(Map_Classifier):
+    """
+    Max_P Map Classification
+
+    Based on Max_p regionalization algorithm
+
+    Parameters
+    ----------
+    y       : array (n,1)
+              values to classify
+    k       : int
+              number of classes required
+    initial : int
+              number of initial solutions to use prior to swapping
+
+    Attributes
+    ----------
+
+    yb      : array (n,1)
+              bin ids for observations,
+    bins    : array (k,1)
+              the upper bounds of each class
+    k       : int
+              the number of classes
+    counts  : array (k,1)
+              the number of observations falling in each class
+
+    Examples
+    --------
+    >>> import pysal
+    >>> cal = pysal.esda.mapclassify.load_example()
+    >>> mp = pysal.Max_P_Classifier(cal)
+    >>> mp.bins
+    array([    8.7 ,    16.7 ,    20.47,    66.26,  4111.45])
+    >>> mp.counts
+    array([29,  8,  1, 10, 10])
+
+    """
+    def __init__(self, y, k=K, initial=1000):
+        self.k = k
+        self.initial = initial
+        Map_Classifier.__init__(self, y)
+        self.name = "Max_P"
+
+    def _set_bins(self):
+        x = self.y.copy()
+        k = self.k
+        q = quantile(x, k)
+        if x.ndim == 1:
+            x.shape = (x.size, 1)
+        n, tmp = x.shape
+        x.sort(axis=0)
+        # find best of initial solutions
+        solution = 0
+        best_tss = x.var() * x.shape[0]
+        tss_all = np.zeros((self.initial, 1))
+        while solution < self.initial:
+            remaining = range(n)
+            seeds = [np.nonzero(di == min(
+                di))[0][0] for di in [np.abs(x - qi) for qi in q]]
+            rseeds = np.random.permutation(range(k)).tolist()
+            tmp = [remaining.remove(seed) for seed in seeds]
+            self.classes = classes = []
+            tmp = [classes.append([seed]) for seed in seeds]
+            while rseeds:
+                seed_id = rseeds.pop()
+                current = classes[seed_id]
+                growing = True
+                while growing:
+                    current = classes[seed_id]
+                    low = current[0]
+                    high = current[-1]
+                    left = low - 1
+                    right = high + 1
+                    move_made = False
+                    if left in remaining:
+                        current.insert(0, left)
+                        remaining.remove(left)
+                        move_made = True
+                    if right in remaining:
+                        current.append(right)
+                        remaining.remove(right)
+                        move_made = True
+                    if move_made:
+                        classes[seed_id] = current
+                    else:
+                        growing = False
+            tss = _fit(self.y, classes)
+            tss_all[solution] = tss
+            if tss < best_tss:
+                best_solution = classes
+                best_it = solution
+                best_tss = tss
+            solution += 1
+        classes = best_solution
+        self.best_it = best_it
+        self.tss = best_tss
+        self.a2c = a2c = {}
+        self.tss_all = tss_all
+        for r, cl in enumerate(classes):
+            for a in cl:
+                a2c[a] = r
+        swapping = True
+        it = 0
+        while swapping:
+            rseeds = np.random.permutation(range(k)).tolist()
+            total_moves = 0
+            while rseeds:
+                id = rseeds.pop()
+                growing = True
+                total_moves = 0
+                while growing:
+                    target = classes[id]
+                    left = target[0] - 1
+                    right = target[-1] + 1
+                    n_moves = 0
+                    if left in a2c:
+                        left_class = classes[a2c[left]]
+                        if len(left_class) > 1:
+                            a = left_class[-1]
+                            if self._swap(left_class, target, a):
+                                target.insert(0, a)
+                                left_class.remove(a)
+                                a2c[a] = id
+                                n_moves += 1
+                    if right in a2c:
+                        right_class = classes[a2c[right]]
+                        if len(right_class) > 1:
+                            a = right_class[0]
+                            if self._swap(right_class, target, a):
+                                target.append(a)
+                                right_class.remove(a)
+                                n_moves += 1
+                                a2c[a] = id
+                    if not n_moves:
+                        growing = False
+                total_moves += n_moves
+            if not total_moves:
+                swapping = False
+        xs = self.y.copy()
+        xs.sort()
+        self.bins = np.array([xs[cl][-1] for cl in classes])
+
+    def _ss(self, class_def):
+        """calculates sum of squares for a class"""
+        yc = self.y[class_def]
+        css = yc - yc.mean()
+        css *= css
+        return sum(css)
+
+    def _swap(self, class1, class2, a):
+        """evaluate cost of moving a from class1 to class2"""
+        ss1 = self._ss(class1)
+        ss2 = self._ss(class2)
+        tss1 = ss1 + ss2
+        class1c = copy.copy(class1)
+        class2c = copy.copy(class2)
+        class1c.remove(a)
+        class2c.append(a)
+        ss1 = self._ss(class1c)
+        ss2 = self._ss(class2c)
+        tss2 = ss1 + ss2
+        if tss1 < tss2:
+            return False
+        else:
+            return True
+
+
+def _fit(y, classes):
+    """Calculate the total sum of squares for a vector y classified into
+    classes
+
+    Parameters
+    ----------
+    y : array, variable to be classified
+
+    classes : array, integer values denoting class membership
+
+    """
+    tss = 0
+    for class_def in classes:
+        yc = y[class_def]
+        css = yc - yc.mean()
+        css *= css
+        tss += sum(css)
+    return tss
+
+kmethods = {}
+kmethods["Quantiles"] = Quantiles
+kmethods["Fisher_Jenks"] = Fisher_Jenks
+kmethods['Natural_Breaks'] = Natural_Breaks
+kmethods['Maximum_Breaks'] = Maximum_Breaks
+
+
+def gadf(y, method="Quantiles", maxk=15, pct=0.8):
+    """
+    Evaluate the Goodness of Absolute Deviation Fit of a Classifier
+    Finds the minimum value of k for which gadf>pct
+
+    Parameters
+    ----------
+
+    y      : array (nx1)
+             values to be classified
+    method : string
+             Name of classifier ["Quantiles,"Fisher_Jenks","Maximum_Breaks",
+             "Natural_Breaks"]
+    maxk   : int
+             maximum value of k to evaluate
+    pct    : float
+             The percentage of GADF to exceed
+
+    Returns
+    -------
+
+    implicit : tuple
+               first value is k, second value is instance of classifier at k,
+               third is the pct obtained
+
+    Examples
+    --------
+    >>> cal = load_example()
+    >>> qgadf = gadf(cal)
+    >>> qgadf[0]
+    15
+    >>> qgadf[-1]
+    0.37402575909092828
+
+    Quantiles fail to exceed 0.80 before 15 classes. If we lower the bar to
+    0.2 we see quintiles as a result
+
+    >>> qgadf2 = gadf(cal, pct = 0.2)
+    >>> qgadf2[0]
+    5
+    >>> qgadf2[-1]
+    0.21710231966462412
+    >>>
+
+    Notes
+    -----
+
+    The GADF is defined as:
+
+        .. math::
+
+            GADF = 1 - \sum_c \sum_{i \in c} |y_i - y_{c,med}|  / \sum_i |y_i - y_{med}|
+
+        where :math:`y_{med}` is the global median and :math:`y_{c,med}` is
+        the median for class :math:`c`.
+
+    See Also
+    --------
+    K_classifiers
+    """
+
+    y = np.array(y)
+    adam = (np.abs(y - np.median(y))).sum()
+    for k in range(2, maxk + 1):
+        cl = kmethods[method](y, k)
+        gadf = 1 - cl.adcm / adam
+        if gadf > pct:
+            break
+    return (k, cl, gadf)
+
+
+class K_classifiers:
+    """
+    Evaluate all k-classifers and pick optimal based on k and GADF
+
+    Parameters
+    ----------
+    y      : array (nx1)
+             values to be classified
+    pct    : float
+             The percentage of GADF to exceed
+
+    Attributes
+    ----------
+    best   :  instance of Map_Classifier
+              the optimal classifer
+    results : dictionary
+              keys are classifier names, values are the Map_Classifier instances with the best pct for each classifer
+
+    Examples
+    --------
+
+    >>> cal = load_example()
+    >>> ks = K_classifiers(cal)
+    >>> ks.best.name
+    'Fisher_Jenks'
+    >>> ks.best.k
+    4
+    >>> ks.best.gadf
+    0.84810327199081048
+    >>>
+
+    Notes
+    -----
+    This can be used to suggest a classification scheme.
+
+    See Also
+    --------
+    gadf
+
+    """
+    def __init__(self, y, pct=0.8):
+        results = {}
+        c = 0
+        best = gadf(y, "Fisher_Jenks", maxk=len(y) - 1, pct=pct)
+        pct0 = best[0]
+        k0 = best[-1]
+        keys = kmethods.keys()
+        keys.remove("Fisher_Jenks")
+        results["Fisher_Jenks"] = best
+        for method in keys:
+            results[method] = gadf(y, method, maxk=len(y) - 1, pct=pct)
+            k1 = results[method][0]
+            pct1 = results[method][-1]
+            if (k1 < k0) or (k1 == k0 and pct0 < pct1):
+                best = results[method]
+                k0 = k1
+                pct0 = pct1
+        self.results = results
+        self.best = best[1]
+
+
+def fj(x, k=5):
+    y = x.copy()
+    y.sort()
+    d = {}
+    initial = opt_part(y)
+    # d has key = number of groups
+    # value: list of ids, list of group tss, group size
+    split_id = [initial[0]]
+    tss = initial[1:]  # left and right within tss
+    sizes = [split_id - 1, len(y) - split_id]
+    d[2] = [split_id, tss, sizes]
+    return d
+
+
+def opt_part(x):
+    """
+    Find optimal bi-partition of x values
+    """
+
+    n = len(x)
+    tss = np.inf
+    opt_i = -999
+    for i in xrange(1, n):
+        print i
+        left = x[:i].var() * i
+        right = x[i:].var() * (n - i)
+        tss_i = left + right
+        if tss_i < tss:
+            opt_i = i
+            tss = tss_i
+            left_min = left
+            right_min = right
+    return (opt_i, tss, left_min, right_min)
diff --git a/pysal/esda/mixture_smoothing.py b/pysal/esda/mixture_smoothing.py
new file mode 100644
index 0000000..6d8b70d
--- /dev/null
+++ b/pysal/esda/mixture_smoothing.py
@@ -0,0 +1,311 @@
+"""
+Emprical Bayesian smoother using non-parametric mixture models
+to specify the prior distribution of risks
+
+This module is a python translation of mixlag function
+in CAMAN R package that is originally written by Peter Schlattmann.
+"""
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>, Luc Anselin <luc.anselin at asu.edu>, Serge Rey <srey at asu.edu"
+
+import numpy as np
+from scipy.stats import poisson
+import math
+__all__ = ['NP_Mixture_Smoother']
+
+
+class NP_Mixture_Smoother(object):
+    """Empirical Bayesian Rate Smoother Using Mixture Prior Distributions
+    It goes through 1) defining an initial set of subpopulations,
+    2) VEM algorithm to determine the number of major subpopulations,
+    3) EM algorithm, 4) combining simialr subpopulations, and 5) estimating
+    EB rates from a mixture of prior distributions from subpopulation
+    models.
+
+    Parameters
+    ----------
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+    k           : integer
+                  a seed number to specify the number of subpopulations
+    acc         : float
+                  convergence criterion; VEM and EM loops stop
+                  when the increase of log likelihood is less than acc
+    numiter     : integer
+                  the maximum number of iterations for VEM and EM loops
+    limit       : float
+                  a parameter to cotrol the limit for combing subpopulation
+                  models
+
+    Attributes
+    ----------
+    e           : same as e in parameters
+    b           : same as b in parameters
+    n           : integer
+                  the number of observations
+    w           : float
+                  a global weight value, 1 devided by n
+    k           : integer
+                  the number of subpopulations
+    acc         : same as acc in parameters
+    numiter     : same as numiter in parameters
+    limit       : same as limit in parameters
+    p           : array (k, 1)
+                  the proportions of individual subpopulations
+    t           : array (k, 1)
+                  prior risks of individual subpopulations
+    r           : array (n, 1)
+                  estimated rate values
+    category    : array (n, 1)
+                  indices of subpopulations to which each observation belongs
+
+    Examples
+    --------
+
+    importing pysal, numpy, and NP_Mixture_Smoother
+
+    >>> import pysal
+    >>> import numpy as np
+    >>> from pysal.esda.mixture_smoothing import NP_Mixture_Smoother
+
+    creating an arrary including event values
+
+    >>> e = np.array([10, 5, 12, 20])
+
+    creating an array including population-at-risk values
+
+    >>> b = np.array([100, 150, 80, 200])
+
+    applying non-parametric mixture smoothing to e and b
+
+    >>> mixture = NP_Mixture_Smoother(e,b)
+
+    extracting the smoothed rates through the property r of the NP_Mixture_Smoother instance
+
+    >>> mixture.r
+    array([ 0.10982278,  0.03445531,  0.11018404,  0.11018604])
+
+    Checking the subpopulations to which each observation belongs
+
+    >>> mixture.category
+    array([1, 0, 1, 1])
+
+    computing an initial set of prior distributions for the subpopulations
+
+    >>> mixture.getSeed()
+    (array([ 0.5,  0.5]), array([ 0.03333333,  0.15      ]))
+
+
+    applying the mixture algorithm
+
+    >>> mixture.mixalg()
+    {'mix_den': array([ 0.,  0.,  0.,  0.]), 'gradient': array([ 0.]), 'k': 1, 'p': array([ 1.]), 'grid': array([ 11.27659574]), 'accuracy': 1.0}
+
+    estimating empirical Bayesian smoothed rates
+
+    >>> mixture.getRateEstimates()
+    (array([ 0.0911574,  0.0911574,  0.0911574,  0.0911574]), array([1, 1, 1, 1]))
+
+    """
+
+    def __init__(self, e, b, k=50, acc=1.E-7, numiter=5000, limit=0.01):
+        self.e = e
+        self.b = b
+        self.n = len(e)
+        self.w = 1. / self.n
+        self.k = k
+        self.acc = acc
+        self.numiter = numiter
+        self.limit = limit
+        r = self.mixalg()
+        self.p = r['p']
+        self.t = r['grid']
+        self.r, self.category = self.getRateEstimates()
+
+    def getSeed(self):
+        self.raw_r = self.e * 1.0 / self.b
+        r_max, r_min = self.raw_r.max(), self.raw_r.min()
+        r_diff = r_max - r_min
+        step = r_diff / (self.k - 1)
+        grid = np.arange(r_min, r_max + step, step)
+        p = np.ones(self.k) * 1. / self.k
+        return p, grid
+
+    def getMixedProb(self, grid):
+        mix = np.zeros((self.n, self.k))
+        for i in range(self.n):
+            for j in range(self.k):
+                mix[i, j] = poisson.pmf(self.e[i], self.b[i] * grid[j])
+        return mix
+
+    def getGradient(self, mix, p):
+        mix_p = mix * p
+        mix_den = mix_p.sum(axis=1)
+        obs_id = mix_den > 1.E-13
+        for i in range(self.k):
+            mix_den_len = len(mix_den)
+            if (mix_den > 1.E-13).sum() == mix_den_len:
+                mix_p[:, i] = (1. / mix_den_len) * mix[:, i] / mix_den
+        gradient = []
+        for i in range(self.k):
+            gradient.append(mix_p[:, i][obs_id].sum())
+        return np.array(gradient), mix_den
+
+    def getMaxGradient(self, gradient):
+        grad_max = gradient.max()
+        grad_max_inx = gradient.argmax()
+        if grad_max <= 0:
+            return (0, 1)
+        return (grad_max, grad_max_inx)
+
+    def getMinGradient(self, gradient, p):
+        p_fil = p > 1.E-8
+        grad_fil = gradient[p_fil]
+        grad_min = grad_fil.min()
+        grad_min_inx = np.where(p_fil)[0][grad_fil.argmin()]
+        if grad_min >= 1.E+7:
+            return (1.E+7, 1)
+        return (grad_min, grad_min_inx)
+
+    def getStepsize(self, mix_den, ht):
+        mix_den_fil = np.fabs(mix_den) > 1.E-7
+        a = ht[mix_den_fil] / mix_den[mix_den_fil]
+        b = 1.0 + a
+        b_fil = np.fabs(b) > 1.E-7
+        w = self.w
+        sl = w * ht[b_fil] / b[b_fil]
+        s11 = sl.sum()
+        s0 = (w * ht).sum()
+
+        step, oldstep = 0., 0.
+        for i in range(50):
+            grad1, grad2 = 0., 0.
+            for j in range(self.n):
+                a = mix_den[j] + step * ht[j]
+            if math.fabs(a) > 1.E-7:
+                b = ht[j] / a
+                grad1 = grad1 + w * b
+                grad2 = grad2 - w * b * b
+            if math.fabs(grad2) > 1.E-10:
+                step = step - grad1 / grad2
+            if oldstep > 1.0 and step > oldstep:
+                step = 1.
+                break
+            if grad1 < 1.E-7:
+                break
+            oldstep = step
+        if step > 1.0:
+            return 1.0
+        return step
+
+    def vem(self, mix, p, grid):
+        res = {}
+        for it in range(self.numiter):
+            grad, mix_den = self.getGradient(mix, p)
+            grad_max, grad_max_inx = self.getMaxGradient(grad)
+            grad_min, grad_min_inx = self.getMinGradient(grad, p)
+            ht = (mix[:, grad_max_inx] - mix[:, grad_min_inx]
+                  ) * p[grad_min_inx]
+            st = self.getStepsize(mix_den, ht)
+            xs = st * p[grad_min_inx]
+            p[grad_min_inx] = p[grad_min_inx] - xs
+            p[grad_max_inx] = p[grad_max_inx] + xs
+            if (grad_max - 1.0) < self.acc or it == (self.numiter - 1):
+                res = {'k': self.k, 'accuracy': grad_max - 1.0, 'p': p, 'grid': grid, 'gradient': grad, 'mix_den': mix_den}
+                break
+        return res
+
+    def update(self, p, grid):
+        p_inx = p > 1.E-3
+        new_p = p[p_inx]
+        new_grid = grid[p_inx]
+        self.k = len(new_p)
+        return new_p, new_grid
+
+    def em(self, nstep, grid, p):
+        l = self.k - 1
+        w, n, e, b = self.w, self.n, self.e, self.b
+        if self.k == 1:
+            s11 = (w * b / np.ones(n)).sum()
+            s12 = (w * e / np.ones(n)).sum()
+            grid[l] = s11 / s12
+            p[l] = 1.
+            mix = self.getMixedProb(grid)
+            grad, mix_den = self.getGradient(mix, p)
+            grad_max, grad_max_inx = self.getMaxGradient(grad)
+            return {'accuracy': math.fabs(grad_max - 1), 'k': self.k, 'p': p, 'grid': grid, 'gradient': grad, 'mix_den': mix_den}
+        else:
+            res = {}
+            for counter in range(nstep):
+                mix = self.getMixedProb(grid)
+                grad, mix_den = self.getGradient(mix, p)
+                p = p * grad
+                su = p[:-1].sum()
+                p[l] = 1. - su
+                for j in range(self.k):
+                    mix_den_fil = mix_den > 1.E-10
+                    f_len = len(mix_den_fil)
+                    s11 = (w * e[mix_den_fil] / np.ones(f_len) * mix[mix_den_fil, j] / mix_den[mix_den_fil]).sum()
+                    s12 = (w * b[mix_den_fil] * (mix[mix_den_fil, j] / np.ones(f_len)) / mix_den[mix_den_fil]).sum()
+                    if s12 > 1.E-12:
+                        grid[j] = s11 / s12
+                grad_max, grad_max_inx = self.getMaxGradient(grad)
+                res = {'accuracy': math.fabs(grad_max - 1.), 'step': counter + 1, 'k': self.k, 'p': p, 'grid': grid, 'gradient': grad, 'mix_den': mix_den}
+                if res['accuracy'] < self.acc and counter > 10:
+                    break
+        return res
+
+    def getLikelihood(self, mix_den):
+        mix_den_fil = mix_den > 0
+        r = np.log(mix_den[mix_den_fil]).sum()
+        return r
+
+    def combine(self, res):
+        p, grid, k = res['p'], res['grid'], self.k
+        diff = np.fabs(grid[:-1] - grid[1:])
+        bp_seeds = (diff >= self.limit).nonzero()[0] + 1
+        if k - len(bp_seeds) > 1:
+            bp = [0]
+            if len(bp_seeds) == 1:
+                bp.append(bp_seeds[0])
+                bp.append(k - 1)
+            else:
+                if bp_seeds[1] - bp_seeds[0] > 1:
+                    bp.append(bp_seeds[0])
+                for i in range(1, len(bp_seeds)):
+                    if bp_seeds[i] - bp_seeds[i - 1] > 1:
+                        bp.append(a[i])
+            new_grid, new_p = [], []
+            for i in range(len(bp) - 1):
+                new_grid.append(grid[bp[i]])
+                new_p.append(p[bp[i]:bp[i + 1]].sum())
+            self.k = new_k = len(new_p)
+            new_grid, new_p = np.array(new_grid), np.array(new_p)
+            mix = self.getMixedProb(new_grid)
+            grad, mix_den = self.getGradient(mix, new_p)
+            res = self.em(1, new_grid, new_p)
+            if res is not None:
+                res['likelihood'] = self.getLikelihood(mix_den)
+        return res
+
+    def mixalg(self):
+        e, b, k, n = self.e, self.b, self.k, self.n
+        p, grid = self.getSeed()
+        mix = self.getMixedProb(grid)
+        vem_res = self.vem(mix, p, grid)
+        p, grid, k = vem_res['p'], vem_res['grid'], vem_res['k']
+        n_p, n_g = self.update(p, grid)
+        em_res = self.em(self.numiter, n_g, n_p)
+        com_res = self.combine(em_res)
+        return com_res
+
+    def getRateEstimates(self):
+        mix = self.getMixedProb(self.t)
+        mix_p = mix * self.p
+        denom = mix_p.sum(axis=1)
+        categ = (mix_p / denom.reshape((self.n, 1))).argmax(axis=1)
+        r = (self.t * mix_p).sum(axis=1) / denom
+        return r, categ
+
diff --git a/pysal/esda/moran.py b/pysal/esda/moran.py
new file mode 100644
index 0000000..681d98f
--- /dev/null
+++ b/pysal/esda/moran.py
@@ -0,0 +1,793 @@
+"""
+Moran's I Spatial Autocorrelation Statistics
+
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu>"
+from pysal.weights.spatial_lag import lag_spatial as slag
+from pysal.esda.smoothing import assuncao_rate
+import scipy.stats as stats
+import numpy as np
+
+__all__ = ["Moran", "Moran_Local", "Moran_BV", "Moran_BV_matrix",
+           "Moran_Rate", "Moran_Local_Rate"]
+
+
+PERMUTATIONS = 999
+
+
+class Moran:
+    """Moran's I Global Autocorrelation Statistic
+
+    Parameters
+    ----------
+
+    y               : array
+                      variable measured across n spatial units
+    w               : W
+                      spatial weights instance
+    transformation  : string
+                      weights transformation,  default is row-standardized "r".
+                      Other options include "B": binary,  "D":
+                      doubly-standardized,  "U": untransformed
+                      (general weights), "V": variance-stabilizing.
+    permutations    : int
+                      number of random permutations for calculation of
+                      pseudo-p_values
+    two_tailed      : boolean
+                      If True (default) analytical p-values for Moran are two
+                      tailed, otherwise if False, they are one-tailed.
+
+    Attributes
+    ----------
+    y            : array
+                   original variable
+    w            : W
+                   original w object
+    permutations : int
+                   number of permutations
+    I            : float
+                   value of Moran's I
+    EI           : float
+                   expected value under normality assumption
+    VI_norm      : float
+                   variance of I under normality assumption
+    seI_norm     : float
+                   standard deviation of I under normality assumption
+    z_norm       : float
+                   z-value of I under normality assumption
+    p_norm       : float
+                   p-value of I under normality assumption 
+    VI_rand      : float
+                   variance of I under randomization assumption
+    seI_rand     : float
+                   standard deviation of I under randomization assumption
+    z_rand       : float
+                   z-value of I under randomization assumption
+    p_rand       : float
+                   p-value of I under randomization assumption 
+    two_tailed   : Boolean
+                   If True p_norm and p_rand are two-tailed, otherwise they
+                   are one-tailed.
+    sim          : array (if permutations>0)
+                   vector of I values for permuted samples
+    p_sim        : array (if permutations>0)
+                   p-value based on permutations (one-tailed)
+                   null: spatial randomness
+                   alternative: the observed I is extreme if
+                   it is either extremely greater or extremely lower
+                   than the values obtained based on permutations
+    EI_sim       : float (if permutations>0)
+                   average value of I from permutations
+    VI_sim       : float (if permutations>0)
+                   variance of I from permutations
+    seI_sim      : float (if permutations>0)
+                   standard deviation of I under permutations.
+    z_sim        : float (if permutations>0)
+                   standardized I based on permutations
+    p_z_sim      : float (if permutations>0)
+                   p-value based on standard normal approximation from
+                   permutations
+
+    Examples
+    --------
+    >>> import pysal
+    >>> w = pysal.open(pysal.examples.get_path("stl.gal")).read()
+    >>> f = pysal.open(pysal.examples.get_path("stl_hom.txt"))
+    >>> y = np.array(f.by_col['HR8893'])
+    >>> mi = Moran(y,  w)
+    >>> "%7.5f" % mi.I
+    '0.24366'
+    >>> mi.EI
+    -0.012987012987012988
+    >>> mi.p_norm
+    0.00027147862770937614
+
+    SIDS example replicating OpenGeoda
+
+    >>> w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+    >>> f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+    >>> SIDR = np.array(f.by_col("SIDR74"))
+    >>> mi = pysal.Moran(SIDR,  w)
+    >>> "%6.4f" % mi.I
+    '0.2477'
+    >>> mi.p_norm
+    0.0001158330781489969
+
+    One-tailed
+
+    >>> mi_1 = pysal.Moran(SIDR,  w, two_tailed=False)
+    >>> "%6.4f" % mi_1.I
+    '0.2477'
+    >>> mi_1.p_norm
+    5.7916539074498452e-05
+
+
+    5.7916539074498452e-05
+    """
+    def __init__(self, y, w, transformation="r", permutations=PERMUTATIONS,
+        two_tailed=True):
+        self.y = y
+        w.transform = transformation
+        self.w = w
+        self.permutations = permutations
+        self.__moments()
+        self.I = self.__calc(self.z)
+        self.z_norm = (self.I - self.EI) / self.seI_norm
+        self.z_rand = (self.I - self.EI) / self.seI_rand
+
+        if self.z_norm > 0:
+            self.p_norm = 1 - stats.norm.cdf(self.z_norm)
+            self.p_rand = 1 - stats.norm.cdf(self.z_rand)
+        else:
+            self.p_norm = stats.norm.cdf(self.z_norm)
+            self.p_rand = stats.norm.cdf(self.z_rand)
+
+        if two_tailed:
+            self.p_norm *= 2.
+            self.p_rand *= 2.
+
+
+        if permutations:
+            sim = [self.__calc(np.random.permutation(self.z))
+                   for i in xrange(permutations)]
+            self.sim = sim = np.array(sim)
+            above = sim >= self.I
+            larger = sum(above)
+            if (self.permutations - larger) < larger:
+                larger = self.permutations - larger
+            self.p_sim = (larger + 1.) / (permutations + 1.)
+            self.EI_sim = sum(sim) / permutations
+            self.seI_sim = np.array(sim).std()
+            self.VI_sim = self.seI_sim ** 2
+            self.z_sim = (self.I - self.EI_sim) / self.seI_sim
+            if self.z_sim > 0:
+                self.p_z_sim = 1 - stats.norm.cdf(self.z_sim)
+            else:
+                self.p_z_sim = stats.norm.cdf(self.z_sim)
+
+    def __moments(self):
+        self.n = len(self.y)
+        y = self.y
+        #z = (y-y.mean())/y.std()
+        z = y - y.mean()
+        self.z = z
+        self.z2ss = sum(z * z)
+        self.EI = -1. / (self.n - 1)
+        n = self.n
+        s1 = self.w.s1
+        s0 = self.w.s0
+        s2 = self.w.s2
+        s02 = s0 * s0
+        v_num = n * n * s1 - n * s2 + 3 * s0 * s0
+        v_den = (n - 1) * (n + 1) * s0 * s0
+        self.VI_norm = v_num / v_den - (1.0 / (n - 1)) ** 2
+        self.seI_norm = self.VI_norm ** (1 / 2.)
+
+        k = (1 / (sum(z ** 4)) * ((sum(z ** 2)) ** 2))
+        vi = (1 / (((n - 1) ** 3) * s02)) * ((n * ((n * n - 3 * n + 3)
+                                                   * s1 - n * s2 + 3 * s02))
+                                             - (k * ((n * n - n) * s1 - 2 * n *
+                                                     s2 + 6 * s02)))
+        self.VI_rand = vi
+        self.seI_rand = vi ** (1 / 2.)
+
+    def __calc(self, z):
+        zl = slag(self.w, z)
+        inum = sum(z * zl)
+        return self.n / self.w.s0 * inum / self.z2ss
+
+
+class Moran_BV:
+    """
+    Bivariate Moran's I
+
+    Parameters
+    ----------
+    x : array
+        x-axis variable
+    y : array
+        (wy will be on y axis)
+    w : W
+        weight instance assumed to be aligned with y
+    transformation  : string
+                      weights transformation, default is row-standardized "r".
+                      Other options include
+                      "B": binary,
+                      "D": doubly-standardized,
+                      "U": untransformed (general weights),
+                      "V": variance-stabilizing.
+    permutations    : int
+                      number of random permutations for calculation of pseudo
+                      p_values
+
+    Attributes
+    ----------
+    zx            : array
+                    original x variable standardized by mean and std
+    zy            : array
+                    original y variable standardized by mean and std
+    w             : W
+                    original w object
+    permutation   : int
+                    number of permutations
+    I             : float
+                    value of bivariate Moran's I
+    sim           : array (if permutations>0)
+                    vector of I values for permuted samples
+    p_sim         : float (if permutations>0)
+                    p-value based on permutations (one-sided)
+                    null: spatial randomness
+                    alternative: the observed I is extreme
+                    it is either extremely high or extremely low
+    EI_sim        : array (if permutations>0)
+                    average value of I from permutations
+    VI_sim        : array (if permutations>0)
+                    variance of I from permutations
+    seI_sim       : array (if permutations>0)
+                    standard deviation of I under permutations.
+    z_sim         : array (if permutations>0)
+                    standardized I based on permutations
+    p_z_sim       : float  (if permutations>0)
+                    p-value based on standard normal approximation from
+                    permutations
+
+    Notes
+    -----
+
+    Inference is only based on permutations as analytical results are none too
+    reliable.
+
+    Examples
+    --------
+    >>> import pysal
+    >>> import numpy as np
+
+    Set random number generator seed so we can replicate the example
+
+    >>> np.random.seed(10)
+
+    Open the sudden infant death dbf file and read in rates for 74 and 79
+    converting each to a numpy array
+
+    >>> f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+    >>> SIDR74 = np.array(f.by_col['SIDR74'])
+    >>> SIDR79 = np.array(f.by_col['SIDR79'])
+
+    Read a GAL file and construct our spatial weights object
+
+    >>> w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+
+    Create an instance of Moran_BV
+
+    >>> mbi = Moran_BV(SIDR79,  SIDR74,  w)
+
+    What is the bivariate Moran's I value
+
+    >>> print mbi.I
+    0.156131961696
+
+    Based on 999 permutations, what is the p-value of our statistic
+
+    >>> mbi.p_z_sim
+    0.0014186617421765302
+
+
+    """
+    def __init__(self, x, y, w, transformation="r", permutations=PERMUTATIONS):
+        zy = (y - y.mean()) / y.std(ddof=1)
+        zx = (x - x.mean()) / x.std(ddof=1)
+        self.zx = zx
+        self.zy = zy
+        w.transform = transformation
+        self.w = w
+        self.I = self.__calc(zy)
+        if permutations:
+            nrp = np.random.permutation
+            sim = [self.__calc(nrp(zy)) for i in xrange(permutations)]
+            self.sim = sim = np.array(sim)
+            above = sim >= self.I
+            larger = sum(above)
+            if (permutations - larger) < larger:
+                larger = permutations - larger
+            self.p_sim = (larger + 1.) / (permutations + 1.)
+            self.EI_sim = sum(sim) / permutations
+            self.seI_sim = np.array(sim).std()
+            self.VI_sim = self.seI_sim ** 2
+            self.z_sim = (self.I - self.EI_sim) / self.seI_sim
+            if self.z_sim > 0:
+                self.p_z_sim = 1 - stats.norm.cdf(self.z_sim)
+            else:
+                self.p_z_sim = stats.norm.cdf(self.z_sim)
+
+    def __calc(self, zy):
+        wzy = slag(self.w, zy)
+        self.num = sum(self.zx * wzy)
+        self.den = sum(zy * zy)
+        return self.num / self.den
+
+
+def Moran_BV_matrix(variables, w, permutations=0, varnames=None):
+    """Bivariate Moran Matrix
+
+    Calculates bivariate Moran between all pairs of a set of variables.
+
+    Parameters
+    ----------
+    variables    : list
+                   sequence of variables
+    w            : W
+                   a spatial weights object
+    permutations : int
+                   number of permutations
+    varnames     : list
+                   strings for variable names. If specified runtime summary is
+                   printed
+
+    Returns
+    -------
+    results      : dictionary
+                   (i,  j) is the key for the pair of variables, values are
+                   the Moran_BV objects.
+
+    Examples
+    --------
+    >>> import pysal
+
+    open dbf
+
+    >>> f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+
+    pull of selected variables from dbf and create numpy arrays for each
+
+    >>> varnames = ['SIDR74',  'SIDR79',  'NWR74',  'NWR79']
+    >>> vars = [np.array(f.by_col[var]) for var in varnames]
+
+    create a contiguity matrix from an external gal file
+
+    >>> w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+
+    create an instance of Moran_BV_matrix
+
+    >>> res = Moran_BV_matrix(vars,  w,  varnames = varnames)
+
+    check values
+
+    >>> print round(res[(0,  1)].I,7)
+    0.1936261
+    >>> print round(res[(3,  0)].I,7)
+    0.3770138
+
+
+    """
+
+    k = len(variables)
+    rk = range(0, k - 1)
+    results = {}
+    for i in rk:
+        for j in range(i + 1, k):
+            y1 = variables[i]
+            y2 = variables[j]
+            results[i, j] = Moran_BV(y1, y2, w, permutations=permutations)
+            results[j, i] = Moran_BV(y2, y1, w, permutations=permutations)
+    return results
+
+
+class Moran_Rate(Moran):
+    """
+    Adjusted Moran's I Global Autocorrelation Statistic for Rate Variables
+
+    Parameters
+    ----------
+
+    e               : array
+                      an event variable measured across n spatial units
+    b               : array
+                      a population-at-risk variable measured across n spatial
+                      units
+    w               : W
+                      spatial weights instance
+    adjusted        : boolean
+                      whether or not Moran's I needs to be adjusted for rate
+                      variable
+    transformation  : string
+                      weights transformation, default is row-standardized "r".
+                      Other options include
+                      "B": binary,
+                      "D": doubly-standardized,
+                      "U": untransformed (general weights),
+                      "V": variance-stabilizing.
+    two_tailed      : Boolean
+                      If True (default), analytical p-values for Moran's I are
+                      two-tailed, otherwise they are one tailed.
+    permutations    : int
+                      number of random permutations for calculation of pseudo
+                      p_values
+
+    Attributes
+    ----------
+    y            : array
+                   rate variable computed from parameters e and b
+                   if adjusted is True, y is standardized rates
+                   otherwise, y is raw rates
+    w            : W
+                   original w object
+    permutations : int
+                   number of permutations
+    I            : float
+                   value of Moran's I
+    EI           : float
+                   expected value under normality assumption
+    VI_norm      : float
+                   variance of I under normality assumption
+    seI_norm     : float
+                   standard deviation of I under normality assumption
+    z_norm       : float
+                   z-value of I under normality assumption
+    p_norm       : float
+                   p-value of I under normality assumption 
+    VI_rand      : float
+                   variance of I under randomization assumption
+    seI_rand     : float
+                   standard deviation of I under randomization assumption
+    z_rand       : float
+                   z-value of I under randomization assumption
+    p_rand       : float
+                   p-value of I under randomization assumption
+    two_tailed   : Boolean
+                   If True, p_norm and p_rand are two-tailed p-values,
+                   otherwise they are one-tailed.
+    sim          : array (if permutations>0)
+                   vector of I values for permuted samples
+    p_sim        : array (if permutations>0)
+                   p-value based on permutations (one-sided)
+                   null: spatial randomness
+                   alternative: the observed I is extreme if it is
+                   either extremely greater or extremely lower than the values
+                   obtained from permutaitons
+    EI_sim       : float (if permutations>0)
+                   average value of I from permutations
+    VI_sim       : float (if permutations>0)
+                   variance of I from permutations
+    seI_sim      : float (if permutations>0)
+                   standard deviation of I under permutations.
+    z_sim        : float (if permutations>0)
+                   standardized I based on permutations
+    p_z_sim      : float (if permutations>0)
+                   p-value based on standard normal approximation from
+
+    References
+    ----------
+    Assuncao, R. E. and Reis, E. A. 1999. A new proposal to adjust Moran's I
+    for population density. Statistics in Medicine. 18, 2147-2162
+
+    Examples
+    --------
+    >>> import pysal
+    >>> w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+    >>> f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+    >>> e = np.array(f.by_col('SID79'))
+    >>> b = np.array(f.by_col('BIR79'))
+    >>> mi = pysal.esda.moran.Moran_Rate(e, b,  w, two_tailed=False)
+    >>> "%6.4f" % mi.I
+    '0.1662'
+    >>> "%6.4f" % mi.p_norm
+    '0.0042'
+    """
+
+    def __init__(self, e, b, w, adjusted=True, transformation="r",
+                 permutations=PERMUTATIONS, two_tailed=True):
+        if adjusted:
+            y = assuncao_rate(e, b)
+        else:
+            y = e * 1.0 / b
+        Moran.__init__(self, y, w, transformation=transformation,
+                       permutations=permutations, two_tailed=two_tailed)
+
+
+class Moran_Local:
+    """Local Moran Statistics
+
+
+    Parameters
+    ----------
+    y : n*1 array
+
+    w : weight instance assumed to be aligned with y
+
+    transformation : string
+                     weights transformation,  default is row-standardized "r".
+                     Other options include
+                     "B": binary,
+                     "D": doubly-standardized,
+                     "U": untransformed (general weights),
+                     "V": variance-stabilizing.
+
+    permutations   : number of random permutations for calculation of pseudo
+                     p_values
+    geoda_quads    : boolean (default=False)
+                     If True use GeoDa scheme: HH=1, LL=2, LH=3, HL=4
+                     If False use PySAL Scheme: HH=1, LH=2, LL=3, HL=4
+
+    Attributes
+    ----------
+
+    y            : array
+                   original variable
+    w            : W
+                   original w object
+    permutations : int
+                   number of random permutations for calculation of pseudo
+                   p_values
+    Is           : float
+                   value of Moran's I
+    q            : array (if permutations>0)
+                   values indicate quadrat location 1 HH,  2 LH,  3 LL,  4 HL
+    sim          : array (if permutations>0)
+                   vector of I values for permuted samples
+    p_sim        : array (if permutations>0)
+                   p-value based on permutations (one-sided)
+                   null: spatial randomness
+                   alternative: the observed Ii is further away or extreme
+                   from the median of simulated values. It is either extremelyi
+                   high or extremely low in the distribution of simulated Is.
+    EI_sim       : float (if permutations>0)
+                   average value of I from permutations
+    VI_sim       : float (if permutations>0)
+                   variance of I from permutations
+    seI_sim      : float (if permutations>0)
+                   standard deviation of I under permutations.
+    z_sim        : float (if permutations>0)
+                   standardized I based on permutations
+    p_z_sim      : float (if permutations>0)
+                   p-value based on standard normal approximation from
+                   permutations (one-sided)
+                   for two-sided tests, these values should be multiplied by 2
+
+    Examples
+    --------
+    >>> import pysal as ps
+    >>> import numpy as np
+    >>> np.random.seed(10)
+    >>> w = ps.open(ps.examples.get_path("desmith.gal")).read()
+    >>> f = ps.open(ps.examples.get_path("desmith.txt"))
+    >>> y = np.array(f.by_col['z'])
+    >>> lm = ps.Moran_Local(y, w, transformation = "r", permutations = 99)
+    >>> lm.q
+    array([4, 4, 4, 2, 3, 3, 1, 4, 3, 3])
+    >>> lm.p_z_sim[0]
+    0.46756830387716064
+    >>> lm = ps.Moran_Local(y, w, transformation = "r", permutations = 99, geoda_quads=True)
+    >>> lm.q
+    array([4, 4, 4, 3, 2, 2, 1, 4, 2, 2])
+
+    Note random components result is slightly different values across
+    architectures so the results have been removed from doctests and will be
+    moved into unittests that are conditional on architectures
+    """
+    def __init__(self, y, w, transformation="r", permutations=PERMUTATIONS,
+        geoda_quads=False):
+        self.y = y
+        n = len(y)
+        self.n = n
+        self.n_1 = n - 1
+        z = y - y.mean()
+        # setting for floating point noise
+        orig_settings = np.seterr()
+        np.seterr(all="ignore")
+        sy = y.std()
+        z /= sy
+        np.seterr(**orig_settings)
+        self.z = z
+        w.transform = transformation
+        self.w = w
+        self.permutations = permutations
+        self.den = sum(z * z)
+        self.Is = self.calc(self.w, self.z)
+        self.geoda_quads = geoda_quads
+        quads = [1, 2, 3, 4]
+        if geoda_quads:
+            quads = [1, 3, 2, 4]
+        self.quads = quads
+        self.__quads()
+        if permutations:
+            self.__crand()
+            sim = np.transpose(self.rlisas)
+            above = sim >= self.Is
+            larger = np.sum(above, axis=0)
+            low_extreme = (self.permutations - larger) < larger
+            larger[low_extreme] = self.permutations - larger[low_extreme]
+            self.p_sim = (larger + 1.0) / (permutations + 1.0)
+            self.sim = sim
+            self.EI_sim = sim.mean()
+            self.seI_sim = sim.std()
+            self.VI_sim = self.seI_sim * self.seI_sim
+            self.z_sim = (self.Is - self.EI_sim) / self.seI_sim
+            self.p_z_sim = 1 - stats.norm.cdf(np.abs(self.z_sim))
+
+    def calc(self, w, z):
+        zl = slag(w, z)
+        return self.n_1 * self.z * zl / self.den
+
+    def __crand(self):
+        """
+        conditional randomization
+
+        for observation i with ni neighbors,  the candidate set cannot include
+        i (we don't want i being a neighbor of i). we have to sample without
+        replacement from a set of ids that doesn't include i. numpy doesn't
+        directly support sampling wo replacement and it is expensive to
+        implement this. instead we omit i from the original ids,  permute the
+        ids and take the first ni elements of the permuted ids as the
+        neighbors to i in each randomization.
+
+        """
+        z = self.z
+        lisas = np.zeros((self.n, self.permutations))
+        n_1 = self.n - 1
+        prange = range(self.permutations)
+        k = self.w.max_neighbors + 1
+        nn = self.n - 1
+        rids = np.array([np.random.permutation(nn)[0:k] for i in prange])
+        ids = np.arange(self.w.n)
+        ido = self.w.id_order
+        w = [self.w.weights[ido[i]] for i in ids]
+        wc = [self.w.cardinalities[ido[i]] for i in ids]
+
+        for i in xrange(self.w.n):
+            idsi = ids[ids != i]
+            np.random.shuffle(idsi)
+            tmp = z[idsi[rids[:, 0:wc[i]]]]
+            lisas[i] = z[i] * (w[i] * tmp).sum(1)
+        self.rlisas = (n_1 / self.den) * lisas
+
+    def __quads(self):
+        zl = slag(self.w, self.z)
+        zp = self.z > 0
+        lp = zl > 0
+        pp = zp * lp
+        np = (1 - zp) * lp
+        nn = (1 - zp) * (1 - lp)
+        pn = zp * (1 - lp)
+        self.q = self.quads[0] * pp + self.quads[1] * np + self.quads[2] * nn + self.quads[3] * pn
+
+
+
+class Moran_Local_Rate(Moran_Local):
+    """
+    Adjusted Local Moran Statistics for Rate Variables
+
+    Parameters
+    ----------
+    e : n*1 array
+        an event variable across n spatial units
+    b : n*1 array
+        a population-at-risk variable across n spatial units
+    w : weight instance assumed to be aligned with y
+    adjusted: boolean
+              whether or not local Moran statistics need to be adjusted for
+              rate variable
+    transformation : string
+                     weights transformation,  default is row-standardized "r".
+                     Other options include
+                     "B": binary,
+                     "D": doubly-standardized,
+                     "U": untransformed (general weights),
+                     "V": variance-stabilizing.
+    permutations   : number of random permutations for calculation of pseudo
+                     p_values
+    geoda_quads    : boolean (default=False)
+                     If True use GeoDa scheme: HH=1, LL=2, LH=3, HL=4
+                     If False use PySAL Scheme: HH=1, LH=2, LL=3, HL=4
+    Attributes
+    ----------
+    y            : array
+                   rate variables computed from parameters e and b
+                   if adjusted is True, y is standardized rates
+                   otherwise, y is raw rates
+    w            : W
+                   original w object
+    permutations : int
+                   number of random permutations for calculation of pseudo
+                   p_values
+    I            : float
+                   value of Moran's I
+    q            : array (if permutations>0)
+                   values indicate quadrat location 1 HH,  2 LH,  3 LL,  4 HL
+    sim          : array (if permutations>0)
+                   vector of I values for permuted samples
+    p_sim        : array (if permutations>0)
+                   p-value based on permutations (one-sided)
+                   null: spatial randomness
+                   alternative: the observed Ii is further away or extreme
+                   from the median of simulated Iis. It is either extremely
+                   high or extremely low in the distribution of simulated Is
+    EI_sim       : float (if permutations>0)
+                   average value of I from permutations
+    VI_sim       : float (if permutations>0)
+                   variance of I from permutations
+    seI_sim      : float (if permutations>0)
+                   standard deviation of I under permutations.
+    z_sim        : float (if permutations>0)
+                   standardized I based on permutations
+    p_z_sim      : float (if permutations>0)
+                   p-value based on standard normal approximation from
+                   permutations (one-sided)
+                   for two-sided tests, these values should be multiplied by 2
+
+    References
+    ----------
+    Assuncao, R. E. and Reis, E. A. 1999. A new proposal to adjust Moran's I
+    for population density. Statistics in Medicine. 18, 2147-2162
+
+    Examples
+    --------
+    >>> import pysal as ps
+    >>> import numpy as np
+    >>> np.random.seed(10)
+    >>> w = ps.open(ps.examples.get_path("sids2.gal")).read()
+    >>> f = ps.open(ps.examples.get_path("sids2.dbf"))
+    >>> e = np.array(f.by_col('SID79'))
+    >>> b = np.array(f.by_col('BIR79'))
+    >>> lm = ps.esda.moran.Moran_Local_Rate(e, b, w, \
+                                               transformation = "r", \
+                                               permutations = 99)
+    >>> lm.q[:10]
+    array([2, 4, 3, 1, 2, 1, 1, 4, 2, 4])
+    >>> lm.p_z_sim[0]
+    0.39319552026912641
+    >>> lm = ps.esda.moran.Moran_Local_Rate(e, b, w, \
+                                               transformation = "r", \
+                                               permutations = 99, \
+                                               geoda_quads=True)
+    >>> lm.q[:10]
+    array([3, 4, 2, 1, 3, 1, 1, 4, 3, 4])
+
+    Note random components result is slightly different values across
+    architectures so the results have been removed from doctests and will be
+    moved into unittests that are conditional on architectures
+    """
+
+    def __init__(self, e, b, w, adjusted=True, transformation="r",
+                 permutations=PERMUTATIONS, geoda_quads=False):
+        if adjusted:
+            y = assuncao_rate(e, b)
+        else:
+            y = e * 1.0 / b
+        Moran_Local.__init__(self, y, w,
+                             transformation=transformation,
+                             permutations=permutations,
+                             geoda_quads=geoda_quads)
+
+
+def _test():
+    import doctest
+    # the following line could be used to define an alternative to the
+    # '<BLANKLINE>' flag
+    # doctest.BLANKLINE_MARKER = 'something better than <BLANKLINE>'
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/esda/smoothing.py b/pysal/esda/smoothing.py
new file mode 100644
index 0000000..e250a96
--- /dev/null
+++ b/pysal/esda/smoothing.py
@@ -0,0 +1,1573 @@
+"""
+Apply smoothing to rate computation
+
+[Longer Description]
+
+Author(s):
+    Myunghwa Hwang mhwang4 at gmail.com
+    David Folch dfolch at asu.edu
+    Luc Anselin luc.anselin at asu.edu
+    Serge Rey srey at asu.edu
+
+"""
+
+__author__ = "Myunghwa Hwang <mhwang4 at gmail.com>, David Folch <dfolch at asu.edu>, Luc Anselin <luc.anselin at asu.edu>, Serge Rey <srey at asu.edu"
+
+import pysal
+from pysal.weights import comb, Kernel
+from pysal.cg import Point, Ray, LineSegment
+from pysal.cg import get_angle_between, get_points_dist, get_segment_point_dist
+from pysal.cg import get_point_at_angle_and_dist, convex_hull
+from pysal.common import np, KDTree
+from pysal.weights.spatial_lag import lag_spatial as slag
+from scipy.stats import gamma, norm, chi2, poisson
+
+__all__ = ['Excess_Risk', 'Empirical_Bayes', 'Spatial_Empirical_Bayes', 'Spatial_Rate', 'Kernel_Smoother', 'Age_Adjusted_Smoother', 'Disk_Smoother', 'Spatial_Median_Rate', 'Spatial_Filtering', 'Headbanging_Triples', 'Headbanging_Median_Rate', 'flatten', 'weighted_median', 'sum_by_n', 'crude_age_standardization', 'direct_age_standardization', 'indirect_age_standardization', 'standardized_mortality_ratio', 'choynowski', 'assuncao_rate']
+
+
+def flatten(l, unique=True):
+    """flatten a list of lists
+
+    Parameters
+    ----------
+    l          : list of lists
+    unique     : boolean
+                 whether or not only unique items are wanted
+
+    Returns
+    -------
+               : list of single items
+
+    Examples
+    --------
+
+    Creating a sample list whose elements are lists of integers
+
+    >>> l = [[1, 2], [3, 4, ], [5, 6]]
+
+    Applying flatten function
+
+    >>> flatten(l)
+    [1, 2, 3, 4, 5, 6]
+
+    """
+    l = reduce(lambda x, y: x + y, l)
+    if not unique:
+        return list(l)
+    return list(set(l))
+
+
+def weighted_median(d, w):
+    """A utility function to find a median of d based on w
+
+    Parameters
+    ----------
+    d          : array (n, 1)
+                 variable for which median will be found
+    w          : array (n, 1)
+                 variable on which d's medain will be decided
+
+    Notes
+    -----
+    d and w are arranged in the same order
+
+    Returns
+    -------
+               : numeric
+                 median of d
+
+    Examples
+    --------
+
+    Creating an array including five integers.
+    We will get the median of these integers.
+
+    >>> d = np.array([5,4,3,1,2])
+
+    Creating another array including weight values for the above integers.
+    The median of d will be decided with a consideration to these weight
+    values.
+
+    >>> w = np.array([10, 22, 9, 2, 5])
+
+    Applying weighted_median function
+
+    >>> weighted_median(d, w)
+    4
+
+    """
+    dtype = [('w', '%s' % w.dtype), ('v', '%s' % d.dtype)]
+    d_w = np.array(zip(w, d), dtype=dtype)
+    d_w.sort(order='v')
+    reordered_w = d_w['w'].cumsum()
+    cumsum_threshold = reordered_w[-1] * 1.0 / 2
+    median_inx = (reordered_w >= cumsum_threshold).nonzero()[0][0]
+    if reordered_w[median_inx] == cumsum_threshold and len(d) - 1 > median_inx:
+        return np.sort(d)[median_inx:median_inx + 2].mean()
+    return np.sort(d)[median_inx]
+
+
+def sum_by_n(d, w, n):
+    """A utility function to summarize a data array into n values
+       after weighting the array with another weight array w
+
+    Parameters
+    ----------
+    d          : array(t, 1)
+                 numerical values
+    w          : array(t, 1)
+                 numerical values for weighting
+    n          : integer
+                 the number of groups
+                 t = c*n (c is a constant)
+
+    Returns
+    -------
+               : array(n, 1)
+                 an array with summarized values
+
+    Examples
+    --------
+
+    Creating an array including four integers.
+    We will compute weighted means for every two elements.
+
+    >>> d = np.array([10, 9, 20, 30])
+
+    Here is another array with the weight values for d's elements.
+
+    >>> w = np.array([0.5, 0.1, 0.3, 0.8])
+
+    We specify the number of groups for which the weighted mean is computed.
+
+    >>> n = 2
+
+    Applying sum_by_n function
+
+    >>> sum_by_n(d, w, n)
+    array([  5.9,  30. ])
+
+    """
+    t = len(d)
+    h = t / n
+    d = d * w
+    return np.array([sum(d[i: i + h]) for i in range(0, t, h)])
+
+
+def crude_age_standardization(e, b, n):
+    """A utility function to compute rate through crude age standardization
+
+    Parameters
+    ----------
+    e          : array(n*h, 1)
+                 event variable measured for each age group across n spatial units
+    b          : array(n*h, 1)
+                 population at risk variable measured for each age group across n spatial units
+    n          : integer
+                 the number of spatial units
+
+    Notes
+    -----
+    e and b are arranged in the same order
+
+    Returns
+    -------
+               : array(n, 1)
+                 age standardized rate
+
+    Examples
+    --------
+
+    Creating an array of an event variable (e.g., the number of cancer patients)
+    for 2 regions in each of which 4 age groups are available.
+    The first 4 values are event values for 4 age groups in the region 1,
+    and the next 4 values are for 4 age groups in the region 2.
+
+    >>> e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+
+    Creating another array of a population-at-risk variable (e.g., total population)
+    for the same two regions.
+    The order for entering values is the same as the case of e.
+
+    >>> b = np.array([100, 100, 110, 90, 100, 90, 110, 90])
+
+    Specifying the number of regions.
+
+    >>> n = 2
+
+    Applying crude_age_standardization function to e and b
+
+    >>> crude_age_standardization(e, b, n)
+    array([ 0.2375    ,  0.26666667])
+
+    """
+    r = e * 1.0 / b
+    b_by_n = sum_by_n(b, 1.0, n)
+    age_weight = b * 1.0 / b_by_n.repeat(len(e) / n)
+    return sum_by_n(r, age_weight, n)
+
+
+def direct_age_standardization(e, b, s, n, alpha=0.05):
+    """A utility function to compute rate through direct age standardization
+
+    Parameters
+    ----------
+    e          : array(n*h, 1)
+                 event variable measured for each age group across n spatial units
+    b          : array(n*h, 1)
+                 population at risk variable measured for each age group across n spatial units
+    s          : array(n*h, 1)
+                 standard population for each age group across n spatial units
+    n          : integer
+                 the number of spatial units
+    alpha      : float
+                 significance level for confidence interval
+
+    Notes
+    -----
+    e, b, and s are arranged in the same order
+
+    Returns
+    -------
+               : a list of n tuples; a tuple has a rate and its lower and upper limits
+                 age standardized rates and confidence intervals
+
+    Examples
+    --------
+
+    Creating an array of an event variable (e.g., the number of cancer patients)
+    for 2 regions in each of which 4 age groups are available.
+    The first 4 values are event values for 4 age groups in the region 1,
+    and the next 4 values are for 4 age groups in the region 2.
+
+    >>> e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+
+    Creating another array of a population-at-risk variable (e.g., total population)
+    for the same two regions.
+    The order for entering values is the same as the case of e.
+
+    >>> b = np.array([1000, 1000, 1100, 900, 1000, 900, 1100, 900])
+
+    For direct age standardization, we also need the data for standard population.
+    Standard population is a reference population-at-risk (e.g., population distribution for the U.S.)
+    whose age distribution can be used as a benchmarking point for comparing age distributions
+    across regions (e.g., popoulation distribution for Arizona and California).
+    Another array including standard population is created.
+
+    >>> s = np.array([1000, 900, 1000, 900, 1000, 900, 1000, 900])
+
+    Specifying the number of regions.
+
+    >>> n = 2
+
+    Applying direct_age_standardization function to e and b
+
+    >>> [i[0] for i in direct_age_standardization(e, b, s, n)]
+    [0.023744019138755977, 0.026650717703349279]
+
+    """
+    age_weight = (1.0 / b) * (s * 1.0 / sum_by_n(s, 1.0, n).repeat(len(s) / n))
+    adjusted_r = sum_by_n(e, age_weight, n)
+    var_estimate = sum_by_n(e, np.square(age_weight), n)
+    g_a = np.square(adjusted_r) / var_estimate
+    g_b = var_estimate / adjusted_r
+    k = [age_weight[i:i + len(b) / n].max() for i in range(0, len(b),
+                                                           len(b) / n)]
+    g_a_k = np.square(adjusted_r + k) / (var_estimate + np.square(k))
+    g_b_k = (var_estimate + np.square(k)) / (adjusted_r + k)
+    summed_b = sum_by_n(b, 1.0, n)
+    res = []
+    for i in range(len(adjusted_r)):
+        if adjusted_r[i] == 0:
+            upper = 0.5 * chi2(1 - 0.5 * alpha)
+            lower = 0.0
+        else:
+            lower = gamma.ppf(0.5 * alpha, g_a[i], scale=g_b[i])
+            upper = gamma.ppf(1 - 0.5 * alpha, g_a_k[i], scale=g_b_k[i])
+        res.append((adjusted_r[i], lower, upper))
+    return res
+
+
+def indirect_age_standardization(e, b, s_e, s_b, n, alpha=0.05):
+    """A utility function to compute rate through indirect age standardization
+
+    Parameters
+    ----------
+    e          : array(n*h, 1)
+                 event variable measured for each age group across n spatial units
+    b          : array(n*h, 1)
+                 population at risk variable measured for each age group across n spatial units
+    s_e        : array(n*h, 1)
+                 event variable measured for each age group across n spatial units in a standard population
+    s_b        : array(n*h, 1)
+                 population variable measured for each age group across n spatial units in a standard population
+    n          : integer
+                 the number of spatial units
+    alpha      : float
+                 significance level for confidence interval
+
+    Notes
+    -----
+    e, b, s_e, and s_b are arranged in the same order
+
+    Returns
+    -------
+               : a list of n tuples; a tuple has a rate and its lower and upper limits
+                 age standardized rate
+
+    Examples
+    --------
+
+    Creating an array of an event variable (e.g., the number of cancer patients)
+    for 2 regions in each of which 4 age groups are available.
+    The first 4 values are event values for 4 age groups in the region 1,
+    and the next 4 values are for 4 age groups in the region 2.
+
+    >>> e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+
+    Creating another array of a population-at-risk variable (e.g., total population)
+    for the same two regions.
+    The order for entering values is the same as the case of e.
+
+    >>> b = np.array([100, 100, 110, 90, 100, 90, 110, 90])
+
+    For indirect age standardization, we also need the data for standard population and event.
+    Standard population is a reference population-at-risk (e.g., population distribution for the U.S.)
+    whose age distribution can be used as a benchmarking point for comparing age distributions
+    across regions (e.g., popoulation distribution for Arizona and California).
+    When the same concept is applied to the event variable,
+    we call it standard event (e.g., the number of cancer patients in the U.S.).
+    Two additional arrays including standard population and event are created.
+
+    >>> s_e = np.array([100, 45, 120, 100, 50, 30, 200, 80])
+    >>> s_b = np.array([1000, 900, 1000, 900, 1000, 900, 1000, 900])
+
+    Specifying the number of regions.
+
+    >>> n = 2
+
+    Applying indirect_age_standardization function to e and b
+
+    >>> [i[0] for i in indirect_age_standardization(e, b, s_e, s_b, n)]
+    [0.23723821989528798, 0.2610803324099723]
+
+    """
+    smr = standardized_mortality_ratio(e, b, s_e, s_b, n)
+    s_r_all = sum(s_e * 1.0) / sum(s_b * 1.0)
+    adjusted_r = s_r_all * smr
+
+    e_by_n = sum_by_n(e, 1.0, n)
+    log_smr = np.log(smr)
+    log_smr_sd = 1.0 / np.sqrt(e_by_n)
+    norm_thres = norm.ppf(1 - 0.5 * alpha)
+    log_smr_lower = log_smr - norm_thres * log_smr_sd
+    log_smr_upper = log_smr + norm_thres * log_smr_sd
+    smr_lower = np.exp(log_smr_lower) * s_r_all
+    smr_upper = np.exp(log_smr_upper) * s_r_all
+    res = zip(adjusted_r, smr_lower, smr_upper)
+    return res
+
+
+def standardized_mortality_ratio(e, b, s_e, s_b, n):
+    """A utility function to compute standardized mortality ratio (SMR).
+
+    Parameters
+    ----------
+    e          : array(n*h, 1)
+                 event variable measured for each age group across n spatial units
+    b          : array(n*h, 1)
+                 population at risk variable measured for each age group across n spatial units
+    s_e        : array(n*h, 1)
+                 event variable measured for each age group across n spatial units in a standard population
+    s_b        : array(n*h, 1)
+                 population variable measured for each age group across n spatial units in a standard population
+    n          : integer
+                 the number of spatial units
+
+    Notes
+    -----
+    e, b, s_e, and s_b are arranged in the same order
+
+    Returns
+    -------
+               : array (nx1)
+
+    Examples
+    --------
+
+    Creating an array of an event variable (e.g., the number of cancer patients)
+    for 2 regions in each of which 4 age groups are available.
+    The first 4 values are event values for 4 age groups in the region 1,
+    and the next 4 values are for 4 age groups in the region 2.
+
+    >>> e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+
+    Creating another array of a population-at-risk variable (e.g., total population)
+    for the same two regions.
+    The order for entering values is the same as the case of e.
+
+    >>> b = np.array([100, 100, 110, 90, 100, 90, 110, 90])
+
+    To compute standardized mortality ratio (SMR),
+    we need two additional arrays for standard population and event.
+    Creating s_e and s_b for standard event and population, respectively.
+
+    >>> s_e = np.array([100, 45, 120, 100, 50, 30, 200, 80])
+    >>> s_b = np.array([1000, 900, 1000, 900, 1000, 900, 1000, 900])
+
+    Specifying the number of regions.
+
+    >>> n = 2
+
+    Applying indirect_age_standardization function to e and b
+
+    >>> standardized_mortality_ratio(e, b, s_e, s_b, n)
+    array([ 2.48691099,  2.73684211])
+
+    """
+    s_r = s_e * 1.0 / s_b
+    e_by_n = sum_by_n(e, 1.0, n)
+    expected = sum_by_n(b, s_r, n)
+    smr = e_by_n * 1.0 / expected
+    return smr
+
+
+def choynowski(e, b, n, threshold=None):
+    """Choynowski map probabilities.
+
+    Parameters
+    ----------
+    e          : array(n*h, 1)
+                 event variable measured for each age group across n spatial units
+    b          : array(n*h, 1)
+                 population at risk variable measured for each age group across n spatial units
+    n          : integer
+                 the number of spatial units
+    threshold  : float
+                 Returns zero for any p-value greater than threshold
+
+    Notes
+    -----
+    e and b are arranged in the same order
+
+    Returns
+    -------
+               : array (nx1)
+
+    References
+    ----------
+    [1] M. Choynowski. 1959. Maps based on probabilities. Journal of the
+        American Statistical Association, 54, 385-388.
+
+    Examples
+    --------
+
+    Creating an array of an event variable (e.g., the number of cancer patients)
+    for 2 regions in each of which 4 age groups are available.
+    The first 4 values are event values for 4 age groups in the region 1,
+    and the next 4 values are for 4 age groups in the region 2.
+
+    >>> e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+
+    Creating another array of a population-at-risk variable (e.g., total population)
+    for the same two regions.
+    The order for entering values is the same as the case of e.
+
+    >>> b = np.array([100, 100, 110, 90, 100, 90, 110, 90])
+
+    Specifying the number of regions.
+
+    >>> n = 2
+
+    Applying indirect_age_standardization function to e and b
+
+    >>> print choynowski(e, b, n)
+    [ 0.30437751  0.29367033]
+
+    """
+    e_by_n = sum_by_n(e, 1.0, n)
+    b_by_n = sum_by_n(b, 1.0, n)
+    r_by_n = sum(e_by_n) * 1.0 / sum(b_by_n)
+    expected = r_by_n * b_by_n
+    p = []
+    for index, i in enumerate(e_by_n):
+        if i <= expected[index]:
+            p.append(poisson.cdf(i, expected[index]))
+        else:
+            p.append(1 - poisson.cdf(i - 1, expected[index]))
+    if threshold:
+        p = [i if i < threshold else 0.0 for i in p]
+    return np.array(p)
+
+
+def assuncao_rate(e, b):
+    """The standardized rates where the mean and stadard deviation used for
+    the standardization are those of Empirical Bayes rate estimates
+    The standardized rates resulting from this function are used to compute
+    Moran's I corrected for rate variables.
+
+    Parameters
+    ----------
+    e          : array(n, 1)
+                 event variable measured at n spatial units
+    b          : array(n, 1)
+                 population at risk variable measured at n spatial units
+
+    Notes
+    -----
+    e and b are arranged in the same order
+
+    Returns
+    -------
+               : array (nx1)
+
+    References
+    ----------
+    [1] Assuncao R. M. and Reis E. A., 1999, A new proposal to adjust Moran's I
+    for population density. Statistics in Medicine, 18, 2147-2162.
+
+    Examples
+    --------
+
+    Creating an array of an event variable (e.g., the number of cancer patients)
+    for 8 regions.
+
+    >>> e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+
+    Creating another array of a population-at-risk variable (e.g., total population)
+    for the same 8 regions.
+    The order for entering values is the same as the case of e.
+
+    >>> b = np.array([100, 100, 110, 90, 100, 90, 110, 90])
+
+    Computing the rates
+
+    >>> print assuncao_rate(e, b)[:4]
+    [ 1.04319254 -0.04117865 -0.56539054 -1.73762547]
+
+    """
+
+    y = e * 1.0 / b
+    e_sum, b_sum = sum(e), sum(b)
+    ebi_b = e_sum * 1.0 / b_sum
+    s2 = sum(b * ((y - ebi_b) ** 2)) / b_sum
+    ebi_a = s2 - ebi_b / (b_sum / len(e))
+    ebi_v = ebi_a + ebi_b / b
+    return (y - ebi_b) / np.sqrt(ebi_v)
+
+
+class Excess_Risk:
+    """Excess Risk
+
+    Parameters
+    ----------
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+
+    Attributes
+    ----------
+    r           : array (n, 1)
+                  execess risk values
+
+    Examples
+    --------
+
+    Reading data in stl_hom.csv into stl to extract values
+    for event and population-at-risk variables
+
+    >>> stl = pysal.open(pysal.examples.get_path('stl_hom.csv'), 'r')
+
+    The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
+    Creating two arrays from these columns.
+
+    >>> stl_e, stl_b = np.array(stl[:,10]), np.array(stl[:,13])
+
+    Creating an instance of Excess_Risk class using stl_e and stl_b
+
+    >>> er = Excess_Risk(stl_e, stl_b)
+
+    Extracting the excess risk values through the property r of the Excess_Risk instance, er
+
+    >>> er.r[:10]
+    array([ 0.20665681,  0.43613787,  0.42078261,  0.22066928,  0.57981596,
+            0.35301709,  0.56407549,  0.17020994,  0.3052372 ,  0.25821905])
+
+    """
+    def __init__(self, e, b):
+        r_mean = e.sum() * 1.0 / b.sum()
+        self.r = e * 1.0 / (b * r_mean)
+
+
+class Empirical_Bayes:
+    """Aspatial Empirical Bayes Smoothing
+
+    Parameters
+    ----------
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+
+    Attributes
+    ----------
+    r           : array (n, 1)
+                  rate values from Empirical Bayes Smoothing
+
+    Examples
+    --------
+
+    Reading data in stl_hom.csv into stl to extract values
+    for event and population-at-risk variables
+
+    >>> stl = pysal.open(pysal.examples.get_path('stl_hom.csv'), 'r')
+
+    The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
+    Creating two arrays from these columns.
+
+    >>> stl_e, stl_b = np.array(stl[:,10]), np.array(stl[:,13])
+
+    Creating an instance of Empirical_Bayes class using stl_e and stl_b
+
+    >>> eb = Empirical_Bayes(stl_e, stl_b)
+
+    Extracting the risk values through the property r of the Empirical_Bayes instance, eb
+
+    >>> eb.r[:10]
+    array([  2.36718950e-05,   4.54539167e-05,   4.78114019e-05,
+             2.76907146e-05,   6.58989323e-05,   3.66494122e-05,
+             5.79952721e-05,   2.03064590e-05,   3.31152999e-05,
+             3.02748380e-05])
+
+    """
+    def __init__(self, e, b):
+        e_sum, b_sum = e.sum() * 1.0, b.sum() * 1.0
+        r_mean = e_sum / b_sum
+        rate = e * 1.0 / b
+        r_variat = rate - r_mean
+        r_var_left = (b * r_variat * r_variat).sum() * 1.0 / b_sum
+        r_var_right = r_mean * 1.0 / b.mean()
+        r_var = r_var_left - r_var_right
+        weight = r_var / (r_var + r_mean / b)
+        self.r = weight * rate + (1.0 - weight) * r_mean
+
+
+class Spatial_Empirical_Bayes:
+    """Spatial Empirical Bayes Smoothing
+
+    Parameters
+    ----------
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+    w           : spatial weights instance
+
+    Attributes
+    ----------
+    r           : array (n, 1)
+                  rate values from Empirical Bayes Smoothing
+
+    Examples
+    --------
+
+    Reading data in stl_hom.csv into stl to extract values
+    for event and population-at-risk variables
+
+    >>> stl = pysal.open(pysal.examples.get_path('stl_hom.csv'), 'r')
+
+    The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
+    Creating two arrays from these columns.
+
+    >>> stl_e, stl_b = np.array(stl[:,10]), np.array(stl[:,13])
+
+    Creating a spatial weights instance by reading in stl.gal file.
+
+    >>> stl_w = pysal.open(pysal.examples.get_path('stl.gal'), 'r').read()
+
+    Ensuring that the elements in the spatial weights instance are ordered
+    by the given sequential numbers from 1 to the number of observations in stl_hom.csv
+
+    >>> if not stl_w.id_order_set: stl_w.id_order = range(1,len(stl) + 1)
+
+    Creating an instance of Spatial_Empirical_Bayes class using stl_e, stl_b, and stl_w
+
+    >>> s_eb = Spatial_Empirical_Bayes(stl_e, stl_b, stl_w)
+
+    Extracting the risk values through the property r of s_eb
+
+    >>> s_eb.r[:10]
+    array([  4.01485749e-05,   3.62437513e-05,   4.93034844e-05,
+             5.09387329e-05,   3.72735210e-05,   3.69333797e-05,
+             5.40245456e-05,   2.99806055e-05,   3.73034109e-05,
+             3.47270722e-05])
+    """
+    def __init__(self, e, b, w):
+        if not w.id_order_set:
+            raise ValueError("w id_order must be set to align with the order of e an b")
+        r_mean = Spatial_Rate(e, b, w).r
+        rate = e * 1.0 / b
+        r_var_left = np.ones(len(e)) * 1.
+        ngh_num = np.ones(len(e))
+        bi = slag(w, b) + b
+        for i, idv in enumerate(w.id_order):
+            ngh = w[idv].keys() + [idv]
+            nghi = [w.id2i[k] for k in ngh]
+            ngh_num[i] = len(nghi)
+            v = sum(np.square(rate[nghi] - r_mean[i]) * b[nghi])
+            r_var_left[i] = v
+        r_var_left = r_var_left / bi
+        r_var_right = r_mean / (bi / ngh_num)
+        r_var = r_var_left - r_var_right
+        r_var[r_var < 0] = 0.0
+        self.r = r_mean + (rate - r_mean) * (r_var / (r_var + (r_mean / b)))
+
+
+class Spatial_Rate:
+    """Spatial Rate Smoothing
+
+    Parameters
+    ----------
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+    w           : spatial weights instance
+
+    Attributes
+    ----------
+    r           : array (n, 1)
+                  rate values from spatial rate smoothing
+
+    Examples
+    --------
+
+    Reading data in stl_hom.csv into stl to extract values
+    for event and population-at-risk variables
+
+    >>> stl = pysal.open(pysal.examples.get_path('stl_hom.csv'), 'r')
+
+    The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
+    Creating two arrays from these columns.
+
+    >>> stl_e, stl_b = np.array(stl[:,10]), np.array(stl[:,13])
+
+    Creating a spatial weights instance by reading in stl.gal file.
+
+    >>> stl_w = pysal.open(pysal.examples.get_path('stl.gal'), 'r').read()
+
+    Ensuring that the elements in the spatial weights instance are ordered
+    by the given sequential numbers from 1 to the number of observations in stl_hom.csv
+
+    >>> if not stl_w.id_order_set: stl_w.id_order = range(1,len(stl) + 1)
+
+    Creating an instance of Spatial_Rate class using stl_e, stl_b, and stl_w
+
+    >>> sr = Spatial_Rate(stl_e,stl_b,stl_w)
+
+    Extracting the risk values through the property r of sr
+
+    >>> sr.r[:10]
+    array([  4.59326407e-05,   3.62437513e-05,   4.98677081e-05,
+             5.09387329e-05,   3.72735210e-05,   4.01073093e-05,
+             3.79372794e-05,   3.27019246e-05,   4.26204928e-05,
+             3.47270722e-05])
+    """
+    def __init__(self, e, b, w):
+        if not w.id_order_set:
+            raise ValueError("w id_order must be set to align with the order of e and b")
+        else:
+            w.transform = 'b'
+            w_e, w_b = slag(w, e), slag(w, b)
+            self.r = (e + w_e) / (b + w_b)
+            w.transform = 'o'
+
+
+class Kernel_Smoother:
+    """Kernal smoothing
+
+    Parameters
+    ----------
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+    w           : Kernel weights instance
+
+    Attributes
+    ----------
+    r           : array (n, 1)
+                  rate values from spatial rate smoothing
+
+    Examples
+    --------
+
+    Creating an array including event values for 6 regions
+
+    >>> e = np.array([10, 1, 3, 4, 2, 5])
+
+    Creating another array including population-at-risk values for the 6 regions
+
+    >>> b = np.array([100, 15, 20, 20, 80, 90])
+
+    Creating a list containing geographic coordinates of the 6 regions' centroids
+
+    >>> points=[(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+
+    Creating a kernel-based spatial weights instance by using the above points
+
+    >>> kw=Kernel(points)
+
+    Ensuring that the elements in the kernel-based weights are ordered
+    by the given sequential numbers from 0 to 5
+
+    >>> if not kw.id_order_set: kw.id_order = range(0,len(points))
+
+    Applying kernel smoothing to e and b
+
+    >>> kr = Kernel_Smoother(e, b, kw)
+
+    Extracting the smoothed rates through the property r of the Kernel_Smoother instance
+
+    >>> kr.r
+    array([ 0.10543301,  0.0858573 ,  0.08256196,  0.09884584,  0.04756872,
+            0.04845298])
+    """
+    def __init__(self, e, b, w):
+        if type(w) != Kernel:
+            raise Error('w must be an instance of Kernel weights')
+        if not w.id_order_set:
+            raise ValueError("w id_order must be set to align with the order of e and b")
+        else:
+            w_e, w_b = slag(w, e), slag(w, b)
+            self.r = w_e / w_b
+
+
+class Age_Adjusted_Smoother:
+    """Age-adjusted rate smoothing
+
+    Parameters
+    ----------
+    e           : array (n*h, 1)
+                  event variable measured for each age group across n spatial units
+    b           : array (n*h, 1)
+                  population at risk variable measured for each age group across n spatial units
+    w           : spatial weights instance
+    s           : array (n*h, 1)
+                  standard population for each age group across n spatial units
+
+    Attributes
+    ----------
+    r           : array (n, 1)
+                  rate values from spatial rate smoothing
+
+    Notes
+    -----
+    Weights used to smooth age-specific events and populations are simple binary weights
+
+    Examples
+    --------
+
+    Creating an array including 12 values for the 6 regions with 2 age groups
+
+    >>> e = np.array([10, 8, 1, 4, 3, 5, 4, 3, 2, 1, 5, 3])
+
+    Creating another array including 12 population-at-risk values for the 6 regions
+
+    >>> b = np.array([100, 90, 15, 30, 25, 20, 30, 20, 80, 80, 90, 60])
+
+    For age adjustment, we need another array of values containing standard population
+    s includes standard population data for the 6 regions
+
+    >>> s = np.array([98, 88, 15, 29, 20, 23, 33, 25, 76, 80, 89, 66])
+
+    Creating a list containing geographic coordinates of the 6 regions' centroids
+
+    >>> points=[(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+
+    Creating a kernel-based spatial weights instance by using the above points
+
+    >>> kw=Kernel(points)
+
+    Ensuring that the elements in the kernel-based weights are ordered
+    by the given sequential numbers from 0 to 5
+
+    >>> if not kw.id_order_set: kw.id_order = range(0,len(points))
+
+    Applying age-adjusted smoothing to e and b
+
+    >>> ar = Age_Adjusted_Smoother(e, b, kw, s)
+
+    Extracting the smoothed rates through the property r of the Age_Adjusted_Smoother instance
+
+    >>> ar.r
+    array([ 0.10519625,  0.08494318,  0.06440072,  0.06898604,  0.06952076,
+            0.05020968])
+    """
+    def __init__(self, e, b, w, s, alpha=0.05):
+        t = len(e)
+        h = t / w.n
+        w.transform = 'b'
+        e_n, b_n = [], []
+        for i in range(h):
+            e_n.append(slag(w, e[i::h]).tolist())
+            b_n.append(slag(w, b[i::h]).tolist())
+        e_n = np.array(e_n).reshape((1, t), order='F')[0]
+        b_n = np.array(b_n).reshape((1, t), order='F')[0]
+        r = direct_age_standardization(e_n, b_n, s, w.n, alpha=alpha)
+        self.r = np.array([i[0] for i in r])
+        w.transform = 'o'
+
+
+class Disk_Smoother:
+    """Locally weighted averages or disk smoothing
+
+    Parameters
+    ----------
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+    w           : spatial weights matrix
+
+    Attributes
+    ----------
+    r           : array (n, 1)
+                  rate values from disk smoothing
+
+    Examples
+    --------
+
+    Reading data in stl_hom.csv into stl to extract values
+    for event and population-at-risk variables
+
+    >>> stl = pysal.open(pysal.examples.get_path('stl_hom.csv'), 'r')
+
+    The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
+    Creating two arrays from these columns.
+
+    >>> stl_e, stl_b = np.array(stl[:,10]), np.array(stl[:,13])
+
+    Creating a spatial weights instance by reading in stl.gal file.
+
+    >>> stl_w = pysal.open(pysal.examples.get_path('stl.gal'), 'r').read()
+
+    Ensuring that the elements in the spatial weights instance are ordered
+    by the given sequential numbers from 1 to the number of observations in stl_hom.csv
+
+    >>> if not stl_w.id_order_set: stl_w.id_order = range(1,len(stl) + 1)
+
+    Applying disk smoothing to stl_e and stl_b
+
+    >>> sr = Disk_Smoother(stl_e,stl_b,stl_w)
+
+    Extracting the risk values through the property r of s_eb
+
+    >>> sr.r[:10]
+    array([  4.56502262e-05,   3.44027685e-05,   3.38280487e-05,
+             4.78530468e-05,   3.12278573e-05,   2.22596997e-05,
+             2.67074856e-05,   2.36924573e-05,   3.48801587e-05,
+             3.09511832e-05])
+    """
+
+    def __init__(self, e, b, w):
+        if not w.id_order_set:
+            raise ValueError("w id_order must be set to align with the order of e and b")
+        else:
+            r = e * 1.0 / b
+            weight_sum = []
+            for i in w.id_order:
+                weight_sum.append(sum(w.weights[i]))
+            self.r = slag(w, r) / np.array(weight_sum)
+
+
+class Spatial_Median_Rate:
+    """Spatial Median Rate Smoothing
+
+    Parameters
+    ----------
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+    w           : spatial weights instance
+    aw          : array (n, 1)
+                  auxiliary weight variable measured across n spatial units
+    iteration   : integer
+                  the number of interations
+
+    Attributes
+    ----------
+    r           : array (n, 1)
+                  rate values from spatial median rate smoothing
+    w           : spatial weights instance
+    aw          : array (n, 1)
+                  auxiliary weight variable measured across n spatial units
+
+    Examples
+    --------
+
+    Reading data in stl_hom.csv into stl to extract values
+    for event and population-at-risk variables
+
+    >>> stl = pysal.open(pysal.examples.get_path('stl_hom.csv'), 'r')
+
+    The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
+    Creating two arrays from these columns.
+
+    >>> stl_e, stl_b = np.array(stl[:,10]), np.array(stl[:,13])
+
+    Creating a spatial weights instance by reading in stl.gal file.
+
+    >>> stl_w = pysal.open(pysal.examples.get_path('stl.gal'), 'r').read()
+
+    Ensuring that the elements in the spatial weights instance are ordered
+    by the given sequential numbers from 1 to the number of observations in stl_hom.csv
+
+    >>> if not stl_w.id_order_set: stl_w.id_order = range(1,len(stl) + 1)
+
+    Computing spatial median rates without iteration
+
+    >>> smr0 = Spatial_Median_Rate(stl_e,stl_b,stl_w)
+
+    Extracting the computed rates through the property r of the Spatial_Median_Rate instance
+
+    >>> smr0.r[:10]
+    array([  3.96047383e-05,   3.55386859e-05,   3.28308921e-05,
+             4.30731238e-05,   3.12453969e-05,   1.97300409e-05,
+             3.10159267e-05,   2.19279204e-05,   2.93763432e-05,
+             2.93763432e-05])
+
+    Recomputing spatial median rates with 5 iterations
+
+    >>> smr1 = Spatial_Median_Rate(stl_e,stl_b,stl_w,iteration=5)
+
+    Extracting the computed rates through the property r of the Spatial_Median_Rate instance
+
+    >>> smr1.r[:10]
+    array([  3.11293620e-05,   2.95956330e-05,   3.11293620e-05,
+             3.10159267e-05,   2.98436066e-05,   2.76406686e-05,
+             3.10159267e-05,   2.94788171e-05,   2.99460806e-05,
+             2.96981070e-05])
+
+    Computing spatial median rates by using the base variable as auxilliary weights
+    without iteration
+
+    >>> smr2 = Spatial_Median_Rate(stl_e,stl_b,stl_w,aw=stl_b)
+
+    Extracting the computed rates through the property r of the Spatial_Median_Rate instance
+
+    >>> smr2.r[:10]
+    array([  5.77412020e-05,   4.46449551e-05,   5.77412020e-05,
+             5.77412020e-05,   4.46449551e-05,   3.61363528e-05,
+             3.61363528e-05,   4.46449551e-05,   5.77412020e-05,
+             4.03987355e-05])
+
+    Recomputing spatial median rates by using the base variable as auxilliary weights
+    with 5 iterations
+
+    >>> smr3 = Spatial_Median_Rate(stl_e,stl_b,stl_w,aw=stl_b,iteration=5)
+
+    Extracting the computed rates through the property r of the Spatial_Median_Rate instance
+
+    >>> smr3.r[:10]
+    array([  3.61363528e-05,   4.46449551e-05,   3.61363528e-05,
+             3.61363528e-05,   4.46449551e-05,   3.61363528e-05,
+             3.61363528e-05,   4.46449551e-05,   3.61363528e-05,
+             4.46449551e-05])
+    >>>
+    """
+    def __init__(self, e, b, w, aw=None, iteration=1):
+        if not w.id_order_set:
+            raise ValueError("w id_order must be set to align with the order of e and b")
+        self.r = e * 1.0 / b
+        self.aw, self.w = aw, w
+        while iteration:
+            self.__search_median()
+            iteration -= 1
+
+    def __search_median(self):
+        r, aw, w = self.r, self.aw, self.w
+        new_r = []
+        if self.aw is None:
+            for i, id in enumerate(w.id_order):
+                r_disk = np.append(r[i], r[w.neighbor_offsets[id]])
+                new_r.append(np.median(r_disk))
+        else:
+            for i, id in enumerate(w.id_order):
+                id_d = [i] + list(w.neighbor_offsets[id])
+                aw_d, r_d = aw[id_d], r[id_d]
+                new_r.append(weighted_median(r_d, aw_d))
+        self.r = np.array(new_r)
+
+
+class Spatial_Filtering:
+    """Spatial Filtering
+
+    Parameters
+    ----------
+    bbox        : a list of two lists where each list is a pair of coordinates
+                  a bounding box for the entire n spatial units
+    data        : array (n, 2)
+                  x, y coordinates
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+    x_grid      : integer
+                  the number of cells on x axis
+    y_grid      : integer
+                  the number of cells on y axis
+    r           : float
+                  fixed radius of a moving window
+    pop         : integer
+                  population threshold to create adaptive moving windows
+
+    Attributes
+    ----------
+    grid        : array (x_grid*y_grid, 2)
+                  x, y coordinates for grid points
+    r           : array (x_grid*y_grid, 1)
+                  rate values for grid points
+
+    Notes
+    -----
+    No tool is provided to find an optimal value for r or pop.
+
+    Examples
+    --------
+
+    Reading data in stl_hom.csv into stl to extract values
+    for event and population-at-risk variables
+
+    >>> stl = pysal.open(pysal.examples.get_path('stl_hom.csv'), 'r')
+
+    Reading the stl data in the WKT format so that
+    we can easily extract polygon centroids
+
+    >>> fromWKT = pysal.core.util.WKTParser()
+    >>> stl.cast('WKT',fromWKT)
+
+    Extracting polygon centroids through iteration
+
+    >>> d = np.array([i.centroid for i in stl[:,0]])
+
+    Specifying the bounding box for the stl_hom data.
+    The bbox should includes two points for the left-bottom and the right-top corners
+
+    >>> bbox = [[-92.700676, 36.881809], [-87.916573, 40.3295669]]
+
+    The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
+    Creating two arrays from these columns.
+
+    >>> stl_e, stl_b = np.array(stl[:,10]), np.array(stl[:,13])
+
+    Applying spatial filtering by using a 10*10 mesh grid and a moving window
+    with 2 radius
+
+    >>> sf_0 = Spatial_Filtering(bbox,d,stl_e,stl_b,10,10,r=2)
+
+    Extracting the resulting rates through the property r of the Spatial_Filtering instance
+
+    >>> sf_0.r[:10]
+    array([  4.23561763e-05,   4.45290850e-05,   4.56456221e-05,
+             4.49133384e-05,   4.39671835e-05,   4.44903042e-05,
+             4.19845497e-05,   4.11936548e-05,   3.93463504e-05,
+             4.04376345e-05])
+
+    Applying another spatial filtering by allowing the moving window to grow until
+    600000 people are found in the window
+
+    >>> sf = Spatial_Filtering(bbox,d,stl_e,stl_b,10,10,pop=600000)
+
+    Checking the size of the reulting array including the rates
+
+    >>> sf.r.shape
+    (100,)
+
+    Extracting the resulting rates through the property r of the Spatial_Filtering instance
+
+    >>> sf.r[:10]
+    array([  3.73728738e-05,   4.04456300e-05,   4.04456300e-05,
+             3.81035327e-05,   4.54831940e-05,   4.54831940e-05,
+             3.75658628e-05,   3.75658628e-05,   3.75658628e-05,
+             3.75658628e-05])
+    """
+
+    def __init__(self, bbox, data, e, b, x_grid, y_grid, r=None, pop=None):
+        data_tree = KDTree(data)
+        x_range = bbox[1][0] - bbox[0][0]
+        y_range = bbox[1][1] - bbox[0][1]
+        x, y = np.mgrid[bbox[0][0]:bbox[1][0]:x_range / x_grid,
+                        bbox[0][1]:bbox[1][1]:y_range / y_grid]
+        self.grid = zip(x.ravel(), y.ravel())
+        self.r = []
+        if r is None and pop is None:
+            raise ValueError("Either r or pop should not be None")
+        if r is not None:
+            pnts_in_disk = data_tree.query_ball_point(self.grid, r=r)
+            for i in pnts_in_disk:
+                r = e[i].sum() * 1.0 / b[i].sum()
+                self.r.append(r)
+        if pop is not None:
+            half_nearest_pnts = data_tree.query(self.grid, k=len(e))[1]
+            for i in half_nearest_pnts:
+                e_n, b_n = e[i].cumsum(), b[i].cumsum()
+                b_n_filter = b_n <= pop
+                e_n_f, b_n_f = e_n[b_n_filter], b_n[b_n_filter]
+                if len(e_n_f) == 0:
+                    e_n_f = e_n[[0]]
+                    b_n_f = b_n[[0]]
+                self.r.append(e_n_f[-1] * 1.0 / b_n_f[-1])
+        self.r = np.array(self.r)
+
+
+class Headbanging_Triples:
+    """Generate a pseudo spatial weights instance that contains headbaning triples
+
+    Parameters
+    ----------
+    data        : array (n, 2)
+                  numpy array of x, y coordinates
+    w           : spatial weights instance
+    k           : integer number of nearest neighbors
+    t           : integer
+                  the number of triples
+    angle       : integer between 0 and 180
+                  the angle criterium for a set of triples
+    edgecorr    : boolean
+                  whether or not correction for edge points is made
+
+    Attributes
+    ----------
+    triples     : dictionary
+                  key is observation record id, value is a list of lists of triple ids
+    extra       : dictionary
+                  key is observation record id, value is a list of the following:
+                  tuple of original triple observations
+                  distance between original triple observations
+                  distance between an original triple observation and its extrapolated point
+
+    Examples
+    --------
+
+    importing k-nearest neighbor weights creator
+
+    >>> from pysal import knnW
+
+    Reading data in stl_hom.csv into stl_db to extract values
+    for event and population-at-risk variables
+
+    >>> stl_db = pysal.open(pysal.examples.get_path('stl_hom.csv'),'r')
+
+    Reading the stl data in the WKT format so that
+    we can easily extract polygon centroids
+
+    >>> fromWKT = pysal.core.util.WKTParser()
+    >>> stl_db.cast('WKT',fromWKT)
+
+    Extracting polygon centroids through iteration
+
+    >>> d = np.array([i.centroid for i in stl_db[:,0]])
+
+    Using the centroids, we create a 5-nearst neighbor weights
+
+    >>> w = knnW(d,k=5)
+
+    Ensuring that the elements in the spatial weights instance are ordered
+    by the order of stl_db's IDs
+
+    >>> if not w.id_order_set: w.id_order = w.id_order
+
+    Finding headbaning triples by using 5 nearest neighbors
+
+    >>> ht = Headbanging_Triples(d,w,k=5)
+
+    Checking the members of triples
+
+    >>> for k, item in ht.triples.items()[:5]: print k, item
+    0 [(5, 6), (10, 6)]
+    1 [(4, 7), (4, 14), (9, 7)]
+    2 [(0, 8), (10, 3), (0, 6)]
+    3 [(4, 2), (2, 12), (8, 4)]
+    4 [(8, 1), (12, 1), (8, 9)]
+
+    Opening sids2.shp file
+
+    >>> sids = pysal.open(pysal.examples.get_path('sids2.shp'),'r')
+
+    Extracting the centroids of polygons in the sids data
+
+    >>> sids_d = np.array([i.centroid for i in sids])
+
+    Creating a 5-nearest neighbors weights from the sids centroids
+
+    >>> sids_w = knnW(sids_d,k=5)
+
+    Ensuring that the members in sids_w are ordered by
+    the order of sids_d's ID
+
+    >>> if not sids_w.id_order_set: sids_w.id_order = sids_w.id_order
+
+    Finding headbaning triples by using 5 nearest neighbors
+
+    >>> s_ht = Headbanging_Triples(sids_d,sids_w,k=5)
+
+    Checking the members of the found triples
+
+    >>> for k, item in s_ht.triples.items()[:5]: print k, item
+    0 [(1, 18), (1, 21), (1, 33)]
+    1 [(2, 40), (2, 22), (22, 40)]
+    2 [(39, 22), (1, 9), (39, 17)]
+    3 [(16, 6), (19, 6), (20, 6)]
+    4 [(5, 15), (27, 15), (35, 15)]
+
+    Finding headbanging tirpes by using 5 nearest neighbors with edge correction
+
+    >>> s_ht2 = Headbanging_Triples(sids_d,sids_w,k=5,edgecor=True)
+
+    Checking the members of the found triples
+
+    >>> for k, item in s_ht2.triples.items()[:5]: print k, item
+    0 [(1, 18), (1, 21), (1, 33)]
+    1 [(2, 40), (2, 22), (22, 40)]
+    2 [(39, 22), (1, 9), (39, 17)]
+    3 [(16, 6), (19, 6), (20, 6)]
+    4 [(5, 15), (27, 15), (35, 15)]
+
+    Checking the extrapolated point that is introduced into the triples
+    during edge correction
+
+    >>> extrapolated = s_ht2.extra[72]
+
+    Checking the observation IDs constituting the extrapolated triple
+
+    >>> extrapolated[0]
+    (89, 77)
+
+    Checking the distances between the exploated point and the observation 89 and 77
+
+    >>> round(extrapolated[1],5), round(extrapolated[2],6)
+    (0.33753, 0.302707)
+    """
+    def __init__(self, data, w, k=5, t=3, angle=135.0, edgecor=False):
+        if k < 3:
+            raise ValueError("w should be NeareastNeighbors instance & the number of neighbors should be more than 3.")
+        if not w.id_order_set:
+            raise ValueError("w id_order must be set to align with the order of data")
+        self.triples, points = {}, {}
+        for i, pnt in enumerate(data):
+            ng = w.neighbor_offsets[i]
+            points[(i, Point(pnt))] = dict(zip(ng, [Point(d)
+                                                    for d in data[ng]]))
+        for i, pnt in points.keys():
+            ng = points[(i, pnt)]
+            tr, tr_dis = {}, []
+            for c in comb(ng.keys(), 2):
+                p2, p3 = ng[c[0]], ng[c[-1]]
+                ang = get_angle_between(Ray(pnt, p2), Ray(pnt, p3))
+                if ang > angle or (ang < 0.0 and ang + 360 > angle):
+                    tr[tuple(c)] = (p2, p3)
+            if len(tr) > t:
+                for c in tr.keys():
+                    p2, p3 = tr[c]
+                    tr_dis.append((get_segment_point_dist(
+                        LineSegment(p2, p3), pnt), c))
+                tr_dis = sorted(tr_dis)[:t]
+                self.triples[i] = [trp for dis, trp in tr_dis]
+            else:
+                self.triples[i] = tr.keys()
+        if edgecor:
+            self.extra = {}
+            ps = dict([(p, i) for i, p in points.keys()])
+            chull = convex_hull(ps.keys())
+            chull = [p for p in chull if len(self.triples[ps[p]]) == 0]
+            for point in chull:
+                key = (ps[point], point)
+                ng = points[key]
+                ng_dist = [(get_points_dist(point, p), p) for p in ng.values()]
+                ng_dist_s = sorted(ng_dist, reverse=True)
+                extra = None
+                while extra is None and len(ng_dist_s) > 0:
+                    p2 = ng_dist_s.pop()[-1]
+                    p3s = ng.values()
+                    p3s.remove(p2)
+                    for p3 in p3s:
+                        dist_p2_p3 = get_points_dist(p2, p3)
+                        dist_p_p2 = get_points_dist(point, p2)
+                        dist_p_p3 = get_points_dist(point, p3)
+                        if dist_p_p2 <= dist_p_p3:
+                            ray1, ray2, s_pnt, dist, c = Ray(p2, point), Ray(p2, p3), p2, dist_p_p2, (ps[p2], ps[p3])
+                        else:
+                            ray1, ray2, s_pnt, dist, c = Ray(p3, point), Ray(p3, p2), p3, dist_p_p3, (ps[p3], ps[p2])
+                        ang = get_angle_between(ray1, ray2)
+                        if ang >= 90 + angle / 2 or (ang < 0 and ang + 360 >= 90 + angle / 2):
+                            ex_point = get_point_at_angle_and_dist(
+                                ray1, angle, dist)
+                            extra = [c, dist_p2_p3, get_points_dist(
+                                s_pnt, ex_point)]
+                            break
+                self.triples[ps[point]].append(extra[0])
+                self.extra[ps[point]] = extra
+
+
+class Headbanging_Median_Rate:
+    """Headbaning Median Rate Smoothing
+
+    Parameters
+    ----------
+    e           : array (n, 1)
+                  event variable measured across n spatial units
+    b           : array (n, 1)
+                  population at risk variable measured across n spatial units
+    t           : Headbanging_Triples instance
+    aw          : array (n, 1)
+                  auxilliary weight variable measured across n spatial units
+    iteration   : integer
+                  the number of iterations
+
+    Attributes
+    ----------
+    r           : array (n, 1)
+                  rate values from headbaning median smoothing
+
+    Examples
+    --------
+
+    importing k-nearest neighbor weights creator
+
+    >>> from pysal import knnW
+
+    opening the sids2 shapefile
+
+    >>> sids = pysal.open(pysal.examples.get_path('sids2.shp'), 'r')
+
+    extracting the centroids of polygons in the sids2 data
+
+    >>> sids_d = np.array([i.centroid for i in sids])
+
+    creating a 5-nearest neighbors weights from the centroids
+
+    >>> sids_w = knnW(sids_d,k=5)
+
+    ensuring that the members in sids_w are ordered
+
+    >>> if not sids_w.id_order_set: sids_w.id_order = sids_w.id_order
+
+    finding headbanging triples by using 5 neighbors
+
+    >>> s_ht = Headbanging_Triples(sids_d,sids_w,k=5)
+
+    reading in the sids2 data table
+
+    >>> sids_db = pysal.open(pysal.examples.get_path('sids2.dbf'), 'r')
+
+    extracting the 10th and 9th columns in the sids2.dbf and
+    using data values as event and population-at-risk variables
+
+    >>> s_e, s_b = np.array(sids_db[:,9]), np.array(sids_db[:,8])
+
+    computing headbanging median rates from s_e, s_b, and s_ht
+
+    >>> sids_hb_r = Headbanging_Median_Rate(s_e,s_b,s_ht)
+
+    extracting the computed rates through the property r of the Headbanging_Median_Rate instance
+
+    >>> sids_hb_r.r[:5]
+    array([ 0.00075586,  0.        ,  0.0008285 ,  0.0018315 ,  0.00498891])
+
+    recomputing headbanging median rates with 5 iterations
+
+    >>> sids_hb_r2 = Headbanging_Median_Rate(s_e,s_b,s_ht,iteration=5)
+
+    extracting the computed rates through the property r of the Headbanging_Median_Rate instance
+
+    >>> sids_hb_r2.r[:5]
+    array([ 0.0008285 ,  0.00084331,  0.00086896,  0.0018315 ,  0.00498891])
+
+    recomputing headbanging median rates by considring a set of auxilliary weights
+
+    >>> sids_hb_r3 = Headbanging_Median_Rate(s_e,s_b,s_ht,aw=s_b)
+
+    extracting the computed rates through the property r of the Headbanging_Median_Rate instance
+
+    >>> sids_hb_r3.r[:5]
+    array([ 0.00091659,  0.        ,  0.00156838,  0.0018315 ,  0.00498891])
+    """
+    def __init__(self, e, b, t, aw=None, iteration=1):
+        self.r = e * 1.0 / b
+        self.tr, self.aw = t.triples, aw
+        if hasattr(t, 'exta'):
+            self.extra = t.extra
+        while iteration:
+            self.__search_headbanging_median()
+            iteration -= 1
+
+    def __get_screens(self, id, triples, weighted=False):
+        r, tr = self.r, self.tr
+        if len(triples) == 0:
+            return r[id]
+        if hasattr(self, 'extra') and id in self.extra:
+            extra = self.extra
+            trp_r = r[list(triples[0])]
+            trp_r[-1] = trp_r[0] + (trp_r[0] - trp_r[-1]) * (
+                extra[id][-1] * 1.0 / extra[id][1])
+            trp_r = sorted(trp_r)
+            if not weighted:
+                return r, trp_r[0], trp_r[-1]
+            else:
+                trp_aw = self.aw[trp]
+                extra_w = trp_aw[0] + (trp_aw[0] - trp_aw[-
+                                                          1]) * (extra[id][-1] * 1.0 / extra[id][1])
+                return r, trp_r[0], trp_r[-1], self.aw[id], trp_aw[0] + extra_w
+        if not weighted:
+            lowest, highest = [], []
+            for trp in triples:
+                trp_r = np.sort(r[list(trp)])
+                lowest.append(trp_r[0])
+                highest.append(trp_r[-1])
+            return r[id], np.median(np.array(lowest)), np.median(np.array(highest))
+        if weighted:
+            lowest, highest = [], []
+            lowest_aw, highest_aw = [], []
+            for trp in triples:
+                trp_r = r[list(trp)]
+                dtype = [('r', '%s' % trp_r.dtype), ('w',
+                                                     '%s' % self.aw.dtype)]
+                trp_r = np.array(zip(trp_r, list(trp)), dtype=dtype)
+                trp_r.sort(order='r')
+                lowest.append(trp_r['r'][0])
+                highest.append(trp_r['r'][-1])
+                lowest_aw.append(self.aw[trp_r['w'][0]])
+                highest_aw.append(self.aw[trp_r['w'][-1]])
+            wm_lowest = weighted_median(np.array(lowest), np.array(lowest_aw))
+            wm_highest = weighted_median(
+                np.array(highest), np.array(highest_aw))
+            triple_members = flatten(triples, unique=False)
+            return r[id], wm_lowest, wm_highest, self.aw[id] * len(triples), self.aw[triple_members].sum()
+
+    def __get_median_from_screens(self, screens):
+        if isinstance(screens, float):
+            return screens
+        elif len(screens) == 3:
+            return np.median(np.array(screens))
+        elif len(screens) == 5:
+            rk, wm_lowest, wm_highest, w1, w2 = screens
+            if rk >= wm_lowest and rk <= wm_highest:
+                return rk
+            elif rk < wm_lowest and w1 < w2:
+                return wm_lowest
+            elif rk > wm_highest and w1 < w2:
+                return wm_highest
+            else:
+                return rk
+
+    def __search_headbanging_median(self):
+        r, tr = self.r, self.tr
+        new_r = []
+        for k in tr.keys():
+            screens = self.__get_screens(
+                k, tr[k], weighted=(self.aw is not None))
+            new_r.append(self.__get_median_from_screens(screens))
+        self.r = np.array(new_r)
diff --git a/pysal/esda/tests/__init__.py b/pysal/esda/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/esda/tests/test_gamma.py b/pysal/esda/tests/test_gamma.py
new file mode 100644
index 0000000..0cdae76
--- /dev/null
+++ b/pysal/esda/tests/test_gamma.py
@@ -0,0 +1,68 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.esda.gamma import Gamma
+
+
+class Gamma_Tester(unittest.TestCase):
+    """Unit test for Gamma Index"""
+    def setUp(self):
+        self.w = pysal.lat2W(4, 4)
+        self.y = np.ones(16)
+        self.y[0:8] = 0
+
+    def test_Gamma(self):
+        """Test method"""
+        np.random.seed(12345)
+        g = Gamma(self.y, self.w)
+        self.assertAlmostEquals(g.g, 20.0)
+        self.assertAlmostEquals(g.g_z, 3.1879280354548638)
+        self.assertAlmostEquals(g.p_sim_g, 0.0030000000000000001)
+        self.assertAlmostEquals(g.min_g, 0.0)
+        self.assertAlmostEquals(g.max_g, 20.0)
+        self.assertAlmostEquals(g.mean_g, 11.093093093093094)
+        np.random.seed(12345)
+        g1 = Gamma(self.y, self.w, operation='s')
+        self.assertAlmostEquals(g1.g, 8.0)
+        self.assertAlmostEquals(g1.g_z, -3.7057554345954791)
+        self.assertAlmostEquals(g1.p_sim_g, 0.001)
+        self.assertAlmostEquals(g1.min_g, 14.0)
+        self.assertAlmostEquals(g1.max_g, 48.0)
+        self.assertAlmostEquals(g1.mean_g, 25.623623623623622)
+        np.random.seed(12345)
+        g2 = Gamma(self.y, self.w, operation='a')
+        self.assertAlmostEquals(g2.g, 8.0)
+        self.assertAlmostEquals(g2.g_z, -3.7057554345954791)
+        self.assertAlmostEquals(g2.p_sim_g, 0.001)
+        self.assertAlmostEquals(g2.min_g, 14.0)
+        self.assertAlmostEquals(g2.max_g, 48.0)
+        self.assertAlmostEquals(g2.mean_g, 25.623623623623622)
+        np.random.seed(12345)
+        g3 = Gamma(self.y, self.w, standardize='y')
+        self.assertAlmostEquals(g3.g, 32.0)
+        self.assertAlmostEquals(g3.g_z, 3.7057554345954791)
+        self.assertAlmostEquals(g3.p_sim_g, 0.001)
+        self.assertAlmostEquals(g3.min_g, -48.0)
+        self.assertAlmostEquals(g3.max_g, 20.0)
+        self.assertAlmostEquals(g3.mean_g, -3.2472472472472473)
+        np.random.seed(12345)
+
+        def func(z, i, j):
+            q = z[i] * z[j]
+            return q
+
+        g4 = Gamma(self.y, self.w, operation=func)
+        self.assertAlmostEquals(g4.g, 20.0)
+        self.assertAlmostEquals(g4.g_z, 3.1879280354548638)
+        self.assertAlmostEquals(g4.p_sim_g, 0.0030000000000000001)
+
+
+suite = unittest.TestSuite()
+test_classes = [Gamma_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/esda/tests/test_geary.py b/pysal/esda/tests/test_geary.py
new file mode 100644
index 0000000..1e12d2e
--- /dev/null
+++ b/pysal/esda/tests/test_geary.py
@@ -0,0 +1,61 @@
+"""Geary Unittest."""
+import unittest
+import pysal
+from pysal.esda import geary
+import numpy as np
+
+
+class Geary_Tester(unittest.TestCase):
+    """Geary class for unit tests."""
+    def setUp(self):
+        self.w = pysal.open(pysal.examples.get_path("book.gal")).read()
+        f = pysal.open(pysal.examples.get_path("book.txt"))
+        self.y = np.array(f.by_col['y'])
+
+    def test_Geary(self):
+        c = geary.Geary(self.y, self.w, permutations=0)
+        self.assertAlmostEquals(c.C, 0.33301083591331254)
+        self.assertAlmostEquals(c.EC, 1.0)
+
+        self.assertAlmostEquals(c.VC_norm, 0.031805300245097874)
+        self.assertAlmostEquals(c.p_norm, 9.2018240680169505e-05)
+        self.assertAlmostEquals(c.z_norm, -3.7399778367629564)
+        self.assertAlmostEquals(c.seC_norm, 0.17834040553138225)
+
+        self.assertAlmostEquals(c.VC_rand, 0.018437747611029367)
+        self.assertAlmostEquals(c.p_rand, 4.5059156794646782e-07)
+        self.assertAlmostEquals(c.z_rand, -4.9120733751216008)
+        self.assertAlmostEquals(c.seC_rand, 0.13578566791465646)
+
+        np.random.seed(12345)
+        c = geary.Geary(self.y, self.w, permutations=999)
+        self.assertAlmostEquals(c.C, 0.33301083591331254)
+        self.assertAlmostEquals(c.EC, 1.0)
+
+        self.assertAlmostEquals(c.VC_norm, 0.031805300245097874)
+        self.assertAlmostEquals(c.p_norm, 9.2018240680169505e-05)
+        self.assertAlmostEquals(c.z_norm, -3.7399778367629564)
+        self.assertAlmostEquals(c.seC_norm, 0.17834040553138225)
+
+        self.assertAlmostEquals(c.VC_rand, 0.018437747611029367)
+        self.assertAlmostEquals(c.p_rand, 4.5059156794646782e-07)
+        self.assertAlmostEquals(c.z_rand, -4.9120733751216008)
+        self.assertAlmostEquals(c.seC_rand, 0.13578566791465646)
+
+        self.assertAlmostEquals(c.EC_sim, 0.9980676303238214)
+        self.assertAlmostEquals(c.VC_sim, 0.034430408799858946)
+        self.assertAlmostEquals(c.p_sim, 0.001)
+        self.assertAlmostEquals(c.p_z_sim, 0.00016908100514811952)
+        self.assertAlmostEquals(c.z_sim, -3.5841621159171746)
+        self.assertAlmostEquals(c.seC_sim, 0.18555432843202269)
+
+
+suite = unittest.TestSuite()
+test_classes = [Geary_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/esda/tests/test_getisord.py b/pysal/esda/tests/test_getisord.py
new file mode 100644
index 0000000..97b0baf
--- /dev/null
+++ b/pysal/esda/tests/test_getisord.py
@@ -0,0 +1,59 @@
+import unittest
+from pysal.weights.Distance import DistanceBand
+from pysal.esda import getisord
+import numpy as np
+
+POINTS = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+W = DistanceBand(POINTS, threshold=15)
+Y = np.array([2, 3, 3.2, 5, 8, 7])
+
+
+class G_Tester(unittest.TestCase):
+
+    def setUp(self):
+        self.w = W
+        self.y = Y
+        np.random.seed(10)
+
+    def test_G(self):
+        g = getisord.G(self.y, self.w)
+        self.assertAlmostEquals(g.G, 0.55709779, places=8)
+        self.assertAlmostEquals(g.p_norm, 0.1729, places=4)
+
+
+class G_Local_Tester(unittest.TestCase):
+
+    def setUp(self):
+        self.w = W
+        self.y = Y
+        np.random.seed(10)
+
+    def test_G_Local_Binary(self):
+        lg = getisord.G_Local(self.y, self.w, transform='B')
+        self.assertAlmostEquals(lg.Zs[0], -1.0136729, places=7)
+        self.assertAlmostEquals(lg.p_sim[0], 0.10100000000000001, places=7)
+
+    def test_G_Local_Row_Standardized(self):
+        lg = getisord.G_Local(self.y, self.w, transform='R')
+        self.assertAlmostEquals(lg.Zs[0], -0.62074534, places=7)
+        self.assertAlmostEquals(lg.p_sim[0], 0.10100000000000001, places=7)
+
+    def test_G_star_Local_Binary(self):
+        lg = getisord.G_Local(self.y, self.w, transform='B', star=True)
+        self.assertAlmostEquals(lg.Zs[0], -1.39727626, places=8)
+        self.assertAlmostEquals(lg.p_sim[0], 0.10100000000000001, places=7)
+
+    def test_G_star_Row_Standardized(self):
+        lg = getisord.G_Local(self.y, self.w, transform='R', star=True)
+        self.assertAlmostEquals(lg.Zs[0], -0.62488094, places=8)
+        self.assertAlmostEquals(lg.p_sim[0], 0.10100000000000001, places=7)
+
+suite = unittest.TestSuite()
+test_classes = [G_Tester, G_Local_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/esda/tests/test_join_counts.py b/pysal/esda/tests/test_join_counts.py
new file mode 100644
index 0000000..ed51af8
--- /dev/null
+++ b/pysal/esda/tests/test_join_counts.py
@@ -0,0 +1,41 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.esda.join_counts import Join_Counts
+
+
+class Join_Counts_Tester(unittest.TestCase):
+    """Unit test for Join Counts"""
+    def setUp(self):
+        self.w = pysal.lat2W(4, 4)
+        self.y = np.ones(16)
+        self.y[0:8] = 0
+
+    def test_Join_Counts(self):
+        """Test method"""
+        np.random.seed(12345)
+        jc = Join_Counts(self.y, self.w)
+        self.assertAlmostEquals(jc.bb, 10.0)
+        self.assertAlmostEquals(jc.bw, 4.0)
+        self.assertAlmostEquals(jc.ww, 10.0)
+        self.assertAlmostEquals(jc.J, 24.0)
+        self.assertAlmostEquals(len(jc.sim_bb), 999)
+        self.assertAlmostEquals(jc.p_sim_bb, 0.0030000000000000001)
+        self.assertAlmostEquals(np.mean(jc.sim_bb), 5.5465465465465469)
+        self.assertAlmostEquals(np.max(jc.sim_bb), 10.0)
+        self.assertAlmostEquals(np.min(jc.sim_bb), 0.0)
+        self.assertAlmostEquals(len(jc.sim_bw), 999)
+        self.assertAlmostEquals(jc.p_sim_bw, 1.0)
+        self.assertAlmostEquals(np.mean(jc.sim_bw), 12.811811811811811)
+        self.assertAlmostEquals(np.max(jc.sim_bw), 24.0)
+        self.assertAlmostEquals(np.min(jc.sim_bw), 7.0)
+
+suite = unittest.TestSuite()
+test_classes = [Join_Counts_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/esda/tests/test_mapclassify.py b/pysal/esda/tests/test_mapclassify.py
new file mode 100644
index 0000000..0bca060
--- /dev/null
+++ b/pysal/esda/tests/test_mapclassify.py
@@ -0,0 +1,373 @@
+import pysal
+from pysal.esda.mapclassify import *
+from pysal.esda.mapclassify import binC, bin, bin1d
+import numpy as np
+import unittest
+
+
+class TestQuantile(unittest.TestCase):
+    def test_quantile(self):
+        y = np.arange(1000)
+        expected = np.array([333., 666., 999.])
+        np.testing.assert_almost_equal(expected, quantile(y, k=3))
+
+    def test_quantile_k4(self):
+        x = np.arange(1000)
+        qx = quantile(x, k=4)
+        expected = np.array([249.75, 499.5, 749.25, 999.])
+        np.testing.assert_array_almost_equal(expected, qx)
+
+    def test_quantile_k(self):
+        y = np.random.random(1000)
+        for k in range(5, 10):
+            np.testing.assert_almost_equal(k, len(quantile(y, k)))
+            self.assertEqual(k, len(quantile(y, k)))
+
+
+class TestBinC(unittest.TestCase):
+    def test_bin_c(self):
+        bins = range(2, 8)
+        y = np.array([[7, 5, 6],
+                      [2, 3, 5],
+                      [7, 2, 2],
+                      [3, 6, 7],
+                      [6, 3, 4],
+                      [6, 7, 4],
+                      [6, 5, 6],
+                      [4, 6, 7],
+                      [4, 6, 3],
+                      [3, 2, 7]])
+
+        expected = np.array([[5, 3, 4],
+                             [0, 1, 3],
+                             [5, 0, 0],
+                             [1, 4, 5],
+                             [4, 1, 2],
+                             [4, 5, 2],
+                             [4, 3, 4],
+                             [2, 4, 5],
+                             [2, 4, 1],
+                             [1, 0, 5]])
+        np.testing.assert_array_equal(expected, binC(y, bins))
+
+
+class TestBin(unittest.TestCase):
+    def test_bin(self):
+        y = np.array([[7, 13, 14],
+                      [10, 11, 13],
+                      [7, 17, 2],
+                      [18, 3, 14],
+                      [9, 15, 8],
+                      [7, 13, 12],
+                      [16, 6, 11],
+                      [19, 2, 15],
+                      [11, 11, 9],
+                      [3, 2, 19]])
+        bins = [10, 15, 20]
+        expected = np.array([[0, 1, 1],
+                             [0, 1, 1],
+                             [0, 2, 0],
+                             [2, 0, 1],
+                             [0, 1, 0],
+                             [0, 1, 1],
+                             [2, 0, 1],
+                             [2, 0, 1],
+                             [1, 1, 0],
+                             [0, 0, 2]])
+
+        np.testing.assert_array_equal(expected, bin(y, bins))
+
+
+class TestBin1d(unittest.TestCase):
+    def test_bin1d(self):
+        y = np.arange(100, dtype='float')
+        bins = [25, 74, 100]
+        binIds = np.array(
+            [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+             0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+             1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+             1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+             1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+             2, 2, 2, 2, 2, 2, 2, 2, 2, 2])
+        counts = np.array([26, 49, 25])
+
+        np.testing.assert_array_equal(binIds, bin1d(y, bins)[0])
+        np.testing.assert_array_equal(counts, bin1d(y, bins)[1])
+
+
+class TestNaturalBreaks(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_natural_breaks(self):
+        # self.assertEqual(expected, natural_breaks(values, k, itmax))
+        assert True  # TODO: implement your test here
+
+    def test_Natural_Breaks(self):
+        nb = Natural_Breaks(self.V, 5)
+        self.assertEquals(nb.k, 5)
+        self.assertEquals(len(nb.counts), 5)
+        np.testing.assert_array_almost_equal(
+            nb.counts, np.array([14, 13, 14, 10, 7]))
+
+    def test_Natural_Breaks_stability(self):
+        for i in range(10):
+            nb = Natural_Breaks(self.V, 5)
+            self.assertEquals(nb.k, 5)
+            self.assertEquals(len(nb.counts), 5)
+
+    def test_Natural_Breaks_randomData(self):
+        for i in range(10):
+            V = np.random.random(50) * (i + 1)
+            nb = Natural_Breaks(V, 5)
+            self.assertEquals(nb.k, 5)
+            self.assertEquals(len(nb.counts), 5)
+
+
+class TestMapClassifier(unittest.TestCase):
+    def test_Map_Classifier(self):
+        # map__classifier = Map_Classifier(y)
+        assert True  # TODO: implement your test here
+
+    def test___repr__(self):
+        # map__classifier = Map_Classifier(y)
+        # self.assertEqual(expected, map__classifier.__repr__())
+        assert True  # TODO: implement your test here
+
+    def test___str__(self):
+        # map__classifier = Map_Classifier(y)
+        # self.assertEqual(expected, map__classifier.__str__())
+        assert True  # TODO: implement your test here
+
+    def test_get_adcm(self):
+        # map__classifier = Map_Classifier(y)
+        # self.assertEqual(expected, map__classifier.get_adcm())
+        assert True  # TODO: implement your test here
+
+    def test_get_gadf(self):
+        # map__classifier = Map_Classifier(y)
+        # self.assertEqual(expected, map__classifier.get_gadf())
+        assert True  # TODO: implement your test here
+
+    def test_get_tss(self):
+        # map__classifier = Map_Classifier(y)
+        # self.assertEqual(expected, map__classifier.get_tss())
+        assert True  # TODO: implement your test here
+
+
+class TestEqualInterval(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Equal_Interval(self):
+        ei = Equal_Interval(self.V)
+        np.testing.assert_array_almost_equal(ei.counts,
+                                             np.array([57, 0, 0, 0, 1]))
+        np.testing.assert_array_almost_equal(ei.bins,
+                                             np.array(
+                                                 [822.394, 1644.658, 2466.922, 3289.186,
+                                                  4111.45]))
+
+
+class TestPercentiles(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Percentiles(self):
+        pc = Percentiles(self.V)
+        np.testing.assert_array_almost_equal(pc.bins, np.array([
+            1.35700000e-01, 5.53000000e-01, 9.36500000e+00, 2.13914000e+02,
+            2.17994800e+03, 4.11145000e+03]))
+        np.testing.assert_array_almost_equal(pc.counts,
+                                             np.array([1, 5, 23, 23, 5, 1]))
+
+
+class TestBoxPlot(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Box_Plot(self):
+        bp = Box_Plot(self.V)
+        bins = np.array([-5.28762500e+01, 2.56750000e+00, 9.36500000e+00,
+                         3.95300000e+01, 9.49737500e+01, 4.11145000e+03])
+        np.testing.assert_array_almost_equal(bp.bins, bins)
+
+
+class TestQuantiles(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Quantiles(self):
+        q = Quantiles(self.V, k=5)
+        np.testing.assert_array_almost_equal(q.bins,
+                                             np.array(
+                                                 [1.46400000e+00, 5.79800000e+00,
+                                                  1.32780000e+01, 5.46160000e+01, 4.11145000e+03]))
+        np.testing.assert_array_almost_equal(q.counts,
+                                             np.array([12, 11, 12, 11, 12]))
+
+
+class TestStdMean(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Std_Mean(self):
+        s = Std_Mean(self.V)
+        np.testing.assert_array_almost_equal(s.bins,
+                                             np.array(
+                                                 [-967.36235382, -420.71712519, 672.57333208,
+                                                  1219.21856072, 4111.45]))
+        np.testing.assert_array_almost_equal(s.counts,
+                                             np.array([0, 0, 56, 1, 1]))
+
+
+class TestMaximumBreaks(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Maximum_Breaks(self):
+        mb = Maximum_Breaks(self.V, k=5)
+        self.assertEquals(mb.k, 5)
+        np.testing.assert_array_almost_equal(mb.bins,
+                                             np.array(
+                                                 [146.005, 228.49, 546.675, 2417.15,
+                                                  4111.45]))
+        np.testing.assert_array_almost_equal(mb.counts,
+                                             np.array([50, 2, 4, 1, 1]))
+
+
+class TestFisherJenks(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Fisher_Jenks(self):
+        fj = Fisher_Jenks(self.V)
+        self.assertEquals(fj.adcm, 799.24000000000001)
+        np.testing.assert_array_almost_equal(fj.bins, np.array([   75.29,   192.05,   370.5 ,
+                                                                722.85,  4111.45]))
+        np.testing.assert_array_almost_equal(fj.counts, np.array([49, 3, 4,
+                                                                  1, 1]))
+
+
+class TestJenksCaspall(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Jenks_Caspall(self):
+        np.random.seed(10)
+        jc = Jenks_Caspall(self.V, k=5)
+        np.testing.assert_array_almost_equal(jc.counts,
+                                             np.array([14, 13, 14, 10, 7]))
+        np.testing.assert_array_almost_equal(jc.bins,
+                                             np.array( [1.81000000e+00, 7.60000000e+00,
+                                                  2.98200000e+01, 1.81270000e+02,
+                                                  4.11145000e+03]))
+
+
+class TestJenksCaspallSampled(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Jenks_Caspall_Sampled(self):
+        np.random.seed(100)
+        x = np.random.random(100000)
+        jc = Jenks_Caspall(x)
+        jcs = Jenks_Caspall_Sampled(x)
+        np.testing.assert_array_almost_equal(jc.bins,
+                                             np.array([0.19718393,
+                                                       0.39655886,
+                                                       0.59648522,
+                                                       0.79780763,
+                                                       0.99997979]))
+        np.testing.assert_array_almost_equal(jcs.bins,
+                                             np.array([0.20856569,
+                                                       0.41513931,
+                                                       0.62457691,
+                                                       0.82561423,
+                                                       0.99997979]))
+
+
+class TestJenksCaspallForced(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Jenks_Caspall_Forced(self):
+        np.random.seed(100)
+        jcf = Jenks_Caspall_Forced(self.V, k=5)
+        np.testing.assert_array_almost_equal(jcf.bins,
+                                             np.array([[1.34000000e+00],
+                                                       [5.90000000e+00],
+                                                       [1.67000000e+01],
+                                                       [5.06500000e+01],
+                                                       [4.11145000e+03]]))
+        np.testing.assert_array_almost_equal(jcf.counts,
+                                             np.array([12, 12, 13, 9, 12]))
+
+
+class TestUserDefined(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_User_Defined(self):
+        bins = [20, max(self.V)]
+        ud = User_Defined(self.V, bins)
+        np.testing.assert_array_almost_equal(ud.bins,
+                                             np.array([20., 4111.45]))
+        np.testing.assert_array_almost_equal(ud.counts,
+                                             np.array([37, 21]))
+
+
+class TestMaxPClassifier(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_Max_P_Classifier(self):
+        np.random.seed(100)
+        mp = Max_P_Classifier(self.V)
+        np.testing.assert_array_almost_equal(mp.bins,
+                                             np.array(
+                                                 [8.6999999999999993, 16.699999999999999,
+                                                  20.469999999999999, 66.260000000000005,
+                                                  4111.4499999999998]))
+        np.testing.assert_array_almost_equal(mp.counts,
+                                             np.array([29, 8, 1, 10, 10]))
+
+
+class TestGadf(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_gadf(self):
+        qgadf = gadf(self.V)
+        self.assertEquals(qgadf[0], 15)
+        self.assertEquals(qgadf[-1], 0.37402575909092828)
+
+
+class TestKClassifiers(unittest.TestCase):
+    def setUp(self):
+        dat = pysal.open(pysal.examples.get_path("calempdensity.csv"))
+        self.V = np.array([record[-1] for record in dat])
+
+    def test_K_classifiers(self):
+        np.random.seed(100)
+        ks = K_classifiers(self.V)
+        self.assertEquals(ks.best.name, 'Fisher_Jenks')
+        self.assertEquals(ks.best.gadf, 0.84810327199081048)
+        self.assertEquals(ks.best.k, 4)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/esda/tests/test_mixture_smoothing.py b/pysal/esda/tests/test_mixture_smoothing.py
new file mode 100644
index 0000000..47085d0
--- /dev/null
+++ b/pysal/esda/tests/test_mixture_smoothing.py
@@ -0,0 +1,43 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.esda import mixture_smoothing as m_s
+
+
+class MS_Tester(unittest.TestCase):
+    """Mixture_Smoothing Unit Tests"""
+    def setUp(self):
+        self.e = np.array([10, 5, 12, 20])
+        self.b = np.array([100, 150, 80, 200])
+
+    def test_NP_Mixture_Smoother(self):
+        """Test the main class"""
+        mix = m_s.NP_Mixture_Smoother(self.e, self.b)
+        np.testing.assert_array_almost_equal(mix.r, np.array(
+            [0.10982278, 0.03445531, 0.11018404, 0.11018604]))
+        np.testing.assert_array_almost_equal(
+            mix.category, np.array([1, 0, 1, 1]))
+        #self.failUnless(mix.getSeed(), (np.array([ 0.5,  0.5]), np.array([ 0.03333333,
+        #    0.15      ])))
+        left, right = mix.getSeed()
+        np.testing.assert_array_almost_equal(left, np.array([0.5, 0.5]))
+        np.testing.assert_array_almost_equal(
+            right, np.array([0.03333333, 0.15]))
+        d = mix.mixalg()
+        np.testing.assert_array_almost_equal(
+            d['mix_den'], np.array([0., 0., 0., 0.]))
+        np.testing.assert_array_almost_equal(d['gradient'], np.array([0.]))
+        np.testing.assert_array_almost_equal(d['p'], np.array([1.]))
+        np.testing.assert_array_almost_equal(
+            d['grid'], np.array([11.27659574]))
+        self.assertEqual(d['k'], 1)
+        self.assertEqual(d['accuracy'], 1.0)
+        left, right = mix.getRateEstimates()
+        np.testing.assert_array_almost_equal(
+            left, np.array([0.0911574, 0.0911574,
+                            0.0911574, 0.0911574]))
+        np.testing.assert_array_almost_equal(right, np.array([1, 1, 1, 1]))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/pysal/esda/tests/test_moran.py b/pysal/esda/tests/test_moran.py
new file mode 100644
index 0000000..9701c75
--- /dev/null
+++ b/pysal/esda/tests/test_moran.py
@@ -0,0 +1,94 @@
+import unittest
+import pysal
+from pysal.esda import moran
+import numpy as np
+
+
+class Moran_Tester(unittest.TestCase):
+    def setUp(self):
+        self.w = pysal.open(pysal.examples.get_path("stl.gal")).read()
+        f = pysal.open(pysal.examples.get_path("stl_hom.txt"))
+        self.y = np.array(f.by_col['HR8893'])
+
+    def test_moran(self):
+        mi = moran.Moran(self.y, self.w, two_tailed=False)
+        self.assertAlmostEquals(mi.I, 0.24365582621771659, 7)
+        self.assertAlmostEquals(mi.p_norm,0.00013573931385468807)
+
+    def test_sids(self):
+        w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+        f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+        SIDR = np.array(f.by_col("SIDR74"))
+        mi = pysal.Moran(SIDR, w, two_tailed=False)
+        self.assertAlmostEquals(mi.I, 0.24772519320480135)
+        self.assertAlmostEquals(mi.p_norm,  5.7916539074498452e-05)
+
+
+class Moran_Rate_Tester(unittest.TestCase):
+    def setUp(self):
+        self.w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+        f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+        self.e = np.array(f.by_col['SID79'])
+        self.b = np.array(f.by_col['BIR79'])
+
+    def test_moran_rate(self):
+        mi = moran.Moran_Rate(self.e, self.b, self.w, two_tailed=False)
+        self.assertAlmostEquals(mi.I, 0.16622343552567395, 7)
+        self.assertAlmostEquals(mi.p_norm, 0.004191499504892171)
+
+
+class Moran_BV_matrix_Tester(unittest.TestCase):
+    def setUp(self):
+        f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+        varnames = ['SIDR74', 'SIDR79', 'NWR74', 'NWR79']
+        self.names = varnames
+        vars = [np.array(f.by_col[var]) for var in varnames]
+        self.vars = vars
+        self.w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+
+    def test_Moran_BV_matrix(self):
+        res = moran.Moran_BV_matrix(self.vars, self.w, varnames=self.names)
+        self.assertAlmostEquals(res[(0, 1)].I, 0.19362610652874668)
+        self.assertAlmostEquals(res[(3, 0)].I, 0.37701382542927858)
+
+
+class Moran_Local_Tester(unittest.TestCase):
+    def setUp(self):
+        np.random.seed(10)
+        self.w = pysal.open(pysal.examples.get_path("desmith.gal")).read()
+        f = pysal.open(pysal.examples.get_path("desmith.txt"))
+        self.y = np.array(f.by_col['z'])
+
+    def test_Moran_Local(self):
+        lm = moran.Moran_Local(
+            self.y, self.w, transformation="r", permutations=99)
+        self.assertAlmostEquals(lm.z_sim[0], -0.081383956359666748)
+        self.assertAlmostEquals(lm.p_z_sim[0], 0.46756830387716064)
+        self.assertAlmostEquals(lm.VI_sim, 0.2067126047680822)
+
+
+class Moran_Local_Rate_Tester(unittest.TestCase):
+    def setUp(self):
+        np.random.seed(10)
+        self.w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+        f = pysal.open(pysal.examples.get_path("sids2.dbf"))
+        self.e = np.array(f.by_col['SID79'])
+        self.b = np.array(f.by_col['BIR79'])
+
+    def test_moran_rate(self):
+        lm = moran.Moran_Local_Rate(self.e, self.b, self.w,
+                                    transformation="r", permutations=99)
+        self.assertAlmostEquals(lm.z_sim[0], -0.27099998923550017)
+        self.assertAlmostEquals(lm.p_z_sim[0], 0.39319552026912641)
+        self.assertAlmostEquals(lm.VI_sim, 0.21879403675396222)
+
+
+suite = unittest.TestSuite()
+test_classes = [Moran_Tester, Moran_BV_matrix_Tester, Moran_Local_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/esda/tests/test_smoothing.py b/pysal/esda/tests/test_smoothing.py
new file mode 100644
index 0000000..cfc4637
--- /dev/null
+++ b/pysal/esda/tests/test_smoothing.py
@@ -0,0 +1,248 @@
+import unittest
+import pysal
+from pysal.esda import smoothing as sm
+from pysal import knnW
+import numpy as np
+
+
+class TestFlatten(unittest.TestCase):
+    def setUp(self):
+        self.input = [[1, 2], [3, 3, 4], [5, 6]]
+
+    def test_flatten(self):
+        out1 = sm.flatten(self.input)
+        out2 = sm.flatten(self.input, unique=False)
+        self.assertEquals(out1, [1, 2, 3, 4, 5, 6])
+        self.assertEquals(out2, [1, 2, 3, 3, 4, 5, 6])
+
+
+class TestWMean(unittest.TestCase):
+    def setUp(self):
+        self.d = np.array([5, 4, 3, 1, 2])
+        self.w1 = np.array([10, 22, 9, 2, 5])
+        self.w2 = np.array([10, 14, 17, 2, 5])
+
+    def test_weighted_median(self):
+        out1 = sm.weighted_median(self.d, self.w1)
+        out2 = sm.weighted_median(self.d, self.w2)
+        self.assertEquals(out1, 4)
+        self.assertEquals(out2, 3.5)
+
+
+class TestAgeStd(unittest.TestCase):
+    def setUp(self):
+        self.e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+        self.b = np.array([1000, 1000, 1100, 900, 1000, 900, 1100, 900])
+        self.s_e = np.array([100, 45, 120, 100, 50, 30, 200, 80])
+        self.s_b = s = np.array([1000, 900, 1000, 900, 1000, 900, 1000, 900])
+        self.n = 2
+
+    def test_crude_age_standardization(self):
+        crude = sm.crude_age_standardization(self.e, self.b, self.n).round(8)
+        crude_exp = np.array([0.02375000, 0.02666667])
+        self.assertEquals(list(crude), list(crude_exp))
+
+    def test_direct_age_standardization(self):
+        direct = np.array(sm.direct_age_standardization(
+            self.e, self.b, self.s_b, self.n)).round(8)
+        direct_exp = np.array([[0.02374402, 0.01920491,
+                                0.02904848], [0.02665072, 0.02177143, 0.03230508]])
+        self.assertEquals(list(direct.flatten()), list(direct_exp.flatten()))
+
+    def test_indirect_age_standardization(self):
+        indirect = np.array(sm.indirect_age_standardization(
+            self.e, self.b, self.s_e, self.s_b, self.n)).round(8)
+        indirect_exp = np.array([[0.02372382, 0.01940230,
+                                  0.02900789], [0.02610803, .02154304, 0.03164035]])
+        self.assertEquals(
+            list(indirect.flatten()), list(indirect_exp.flatten()))
+
+
+class TestSRate(unittest.TestCase):
+    def setUp(self):
+        sids = pysal.open(pysal.examples.get_path('sids2.dbf'), 'r')
+        self.w = pysal.open(pysal.examples.get_path('sids2.gal'), 'r').read()
+        self.b, self.e = np.array(sids[:, 8]), np.array(sids[:, 9])
+        er = [0.453433, 0.000000, 0.775871, 0.973810, 3.133190]
+        eb = [0.0016973, 0.0017054, 0.0017731, 0.0020129, 0.0035349]
+        sr = [0.0009922, 0.0012639, 0.0009740, 0.0007605, 0.0050154]
+        smr = [0.00083622, 0.00109402, 0.00081567, 0.0, 0.0048209]
+        smr_w = [0.00127146, 0.00127146, 0.0008433, 0.0, 0.0049889]
+        smr2 = [0.00091659, 0.00087641, 0.00091073, 0.0, 0.00467633]
+        self.er = [round(i, 5) for i in er]
+        self.eb = [round(i, 7) for i in eb]
+        self.sr = [round(i, 7) for i in sr]
+        self.smr = [round(i, 7) for i in smr]
+        self.smr_w = [round(i, 7) for i in smr_w]
+        self.smr2 = [round(i, 7) for i in smr2]
+
+    def test_Excess_Risk(self):
+        out_er = sm.Excess_Risk(self.e, self.b).r
+        out_er = [round(i, 5) for i in out_er[:5]]
+        self.assertEquals(out_er, self.er)
+
+    def test_Empirical_Bayes(self):
+        out_eb = sm.Empirical_Bayes(self.e, self.b).r
+        out_eb = [round(i, 7) for i in out_eb[:5]]
+        self.assertEquals(out_eb, self.eb)
+
+    def test_Spatial_Empirical_Bayes(self):
+        stl = pysal.open(pysal.examples.get_path('stl_hom.csv'), 'r')
+        stl_e, stl_b = np.array(stl[:, 10]), np.array(stl[:, 13])
+        stl_w = pysal.open(pysal.examples.get_path('stl.gal'), 'r').read()
+        if not stl_w.id_order_set:
+            stl_w.id_order = range(1, len(stl) + 1)
+        s_eb = sm.Spatial_Empirical_Bayes(stl_e, stl_b, stl_w)
+        s_ebr10 = np.array([4.01485749e-05, 3.62437513e-05,
+                            4.93034844e-05, 5.09387329e-05, 3.72735210e-05,
+                            3.69333797e-05, 5.40245456e-05, 2.99806055e-05,
+                            3.73034109e-05, 3.47270722e-05])
+        np.testing.assert_array_almost_equal(s_ebr10, s_eb.r[:10])
+
+    def test_Spatial_Rate(self):
+        out_sr = sm.Spatial_Rate(self.e, self.b, self.w).r
+        out_sr = [round(i, 7) for i in out_sr[:5]]
+        self.assertEquals(out_sr, self.sr)
+
+    def test_Spatial_Median_Rate(self):
+        out_smr = sm.Spatial_Median_Rate(self.e, self.b, self.w).r
+        out_smr_w = sm.Spatial_Median_Rate(self.e, self.b, self.w, aw=self.b).r
+        out_smr2 = sm.Spatial_Median_Rate(
+            self.e, self.b, self.w, iteration=2).r
+        out_smr = [round(i, 7) for i in out_smr[:5]]
+        out_smr_w = [round(i, 7) for i in out_smr_w[:5]]
+        out_smr2 = [round(i, 7) for i in out_smr2[:5]]
+        self.assertEquals(out_smr, self.smr)
+        self.assertEquals(out_smr_w, self.smr_w)
+        self.assertEquals(out_smr2, self.smr2)
+
+
+class TestHB(unittest.TestCase):
+    def setUp(self):
+        sids = pysal.open(pysal.examples.get_path('sids2.shp'), 'r')
+        self.sids = sids
+        self.d = np.array([i.centroid for i in sids])
+        self.w = knnW(self.d, k=5)
+        if not self.w.id_order_set:
+            self.w.id_order = self.w.id_order
+        sids_db = pysal.open(pysal.examples.get_path('sids2.dbf'), 'r')
+        self.b, self.e = np.array(sids_db[:, 8]), np.array(sids_db[:, 9])
+
+    def test_Headbanging_Triples(self):
+        ht = sm.Headbanging_Triples(self.d, self.w)
+        self.assertEquals(len(ht.triples), len(self.d))
+        ht2 = sm.Headbanging_Triples(self.d, self.w, edgecor=True)
+        self.assertTrue(hasattr(ht2, 'extra'))
+        self.assertEquals(len(ht2.triples), len(self.d))
+        htr = sm.Headbanging_Median_Rate(self.e, self.b, ht2, iteration=5)
+        self.assertEquals(len(htr.r), len(self.e))
+        for i in htr.r:
+            self.assertTrue(i is not None)
+
+    def test_Headbanging_Median_Rate(self):
+        sids_d = np.array([i.centroid for i in self.sids])
+        sids_w = pysal.knnW(sids_d, k=5)
+        if not sids_w.id_order_set:
+            sids_w.id_order = sids_w.id_order
+        s_ht = sm.Headbanging_Triples(sids_d, sids_w, k=5)
+        sids_db = pysal.open(pysal.examples.get_path('sids2.dbf'), 'r')
+        s_e, s_b = np.array(sids_db[:, 9]), np.array(sids_db[:, 8])
+        sids_hb_r = sm.Headbanging_Median_Rate(s_e, s_b, s_ht)
+        sids_hb_rr5 = np.array([0.00075586, 0.,
+                                0.0008285, 0.0018315, 0.00498891])
+        np.testing.assert_array_almost_equal(sids_hb_rr5, sids_hb_r.r[:5])
+        sids_hb_r2 = sm.Headbanging_Median_Rate(s_e, s_b, s_ht, iteration=5)
+        sids_hb_r2r5 = np.array([0.0008285, 0.00084331,
+                                 0.00086896, 0.0018315, 0.00498891])
+        np.testing.assert_array_almost_equal(sids_hb_r2r5, sids_hb_r2.r[:5])
+        sids_hb_r3 = sm.Headbanging_Median_Rate(s_e, s_b, s_ht, aw=s_b)
+        sids_hb_r3r5 = np.array([0.00091659, 0.,
+                                 0.00156838, 0.0018315, 0.00498891])
+        np.testing.assert_array_almost_equal(sids_hb_r3r5, sids_hb_r3.r[:5])
+
+
+class TestKernel_AgeAdj_SM(unittest.TestCase):
+    def setUp(self):
+        self.e = np.array([10, 1, 3, 4, 2, 5])
+        self.b = np.array([100, 15, 20, 20, 80, 90])
+        self.e1 = np.array([10, 8, 1, 4, 3, 5, 4, 3, 2, 1, 5, 3])
+        self.b1 = np.array([100, 90, 15, 30, 25, 20, 30, 20, 80, 80, 90, 60])
+        self.s = np.array([98, 88, 15, 29, 20, 23, 33, 25, 76, 80, 89, 66])
+        self.points = [(
+            10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+        self.kw = pysal.weights.Kernel(self.points)
+        if not self.kw.id_order_set:
+            self.kw.id_order = range(0, len(self.points))
+
+    def test_Kernel_Smoother(self):
+        kr = sm.Kernel_Smoother(self.e, self.b, self.kw)
+        exp = [0.10543301, 0.0858573, 0.08256196, 0.09884584,
+               0.04756872, 0.04845298]
+        self.assertEquals(list(kr.r.round(8)), exp)
+
+    def test_Age_Adjusted_Smoother(self):
+        ar = sm.Age_Adjusted_Smoother(self.e1, self.b1, self.kw, self.s)
+        exp = [0.10519625, 0.08494318, 0.06440072, 0.06898604,
+               0.06952076, 0.05020968]
+        self.assertEquals(list(ar.r.round(8)), exp)
+
+    def test_Disk_Smoother(self):
+        self.kw.transform = 'b'
+        exp = [0.12222222000000001, 0.10833333, 0.08055556,
+               0.08944444, 0.09944444, 0.09351852]
+        disk = sm.Disk_Smoother(self.e, self.b, self.kw)
+        self.assertEqual(list(disk.r.round(8)), exp)
+
+    def test_Spatial_Filtering(self):
+        points = np.array(self.points)
+        bbox = [[0, 0], [45, 45]]
+        sf = sm.Spatial_Filtering(bbox, points, self.e, self.b, 2, 2, r=30)
+        exp = [0.11111111, 0.11111111, 0.20000000000000001, 0.085106379999999995,
+               0.076923080000000005, 0.05789474, 0.052173909999999997, 0.066666669999999997, 0.04117647]
+        self.assertEqual(list(sf.r.round(8)), exp)
+
+
+class TestUtils(unittest.TestCase):
+    def test_sum_by_n(self):
+        d = np.array([10, 9, 20, 30])
+        w = np.array([0.5, 0.1, 0.3, 0.8])
+        n = 2
+        exp_sum = np.array([5.9, 30.])
+        np.testing.assert_array_almost_equal(exp_sum, sm.sum_by_n(d, w, n))
+
+    def test_standardized_mortality_ratio(self):
+        e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+        b = np.array([100, 100, 110, 90, 100, 90, 110, 90])
+        s_e = np.array([100, 45, 120, 100, 50, 30, 200, 80])
+        s_b = np.array([1000, 900, 1000, 900, 1000, 900, 1000, 900])
+        n = 2
+        exp_smr = np.array([2.48691099, 2.73684211])
+        np.testing.assert_array_almost_equal(exp_smr,
+                                             sm.standardized_mortality_ratio(e, b, s_e, s_b, n))
+
+    def test_choynowski(self):
+        e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+        b = np.array([100, 100, 110, 90, 100, 90, 110, 90])
+        n = 2
+        exp_choy = np.array([0.30437751, 0.29367033])
+        np.testing.assert_array_almost_equal(exp_choy, sm.choynowski(e, b, n))
+
+    def test_assuncao_rate(self):
+        e = np.array([30, 25, 25, 15, 33, 21, 30, 20])
+        b = np.array([100, 100, 110, 90, 100, 90, 110, 90])
+        exp_assuncao = np.array(
+            [1.04319254, -0.04117865, -0.56539054, -1.73762547])
+        np.testing.assert_array_almost_equal(
+            exp_assuncao, sm.assuncao_rate(e, b)[:4])
+
+
+suite = unittest.TestSuite()
+test_classes = [TestFlatten, TestWMean, TestAgeStd, TestSRate, TestHB,
+                TestKernel_AgeAdj_SM, TestUtils]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/examples/10740.dbf b/pysal/examples/10740.dbf
new file mode 100644
index 0000000..4dc3bd5
Binary files /dev/null and b/pysal/examples/10740.dbf differ
diff --git a/pysal/examples/10740.shp b/pysal/examples/10740.shp
new file mode 100644
index 0000000..34ee937
Binary files /dev/null and b/pysal/examples/10740.shp differ
diff --git a/pysal/examples/10740.shx b/pysal/examples/10740.shx
new file mode 100644
index 0000000..9f72583
Binary files /dev/null and b/pysal/examples/10740.shx differ
diff --git a/pysal/examples/10740_queen.gal b/pysal/examples/10740_queen.gal
new file mode 100644
index 0000000..72047ac
--- /dev/null
+++ b/pysal/examples/10740_queen.gal
@@ -0,0 +1,391 @@
+195
+1 7
+84 7 6 5 2 86 102
+2 6
+84 5 4 3 1 86
+3 7
+84 83 14 11 5 4 2
+4 8
+15 14 11 8 7 5 2 3
+5 7
+8 7 6 1 2 3 4
+6 6
+19 10 7 1 5 102
+7 7
+10 9 8 1 4 5 6
+8 7
+15 14 10 9 4 5 7
+9 7
+21 20 17 15 10 7 8
+10 7
+21 20 19 6 7 8 9
+11 8
+84 83 78 14 13 12 3 4
+12 8
+80 78 27 24 13 11 14 83
+13 8
+28 27 24 16 11 12 14 15
+14 8
+16 15 3 4 8 11 13 12
+15 9
+21 18 17 16 4 8 9 14 13
+16 7
+28 27 18 13 14 15 17
+17 9
+38 37 34 22 21 18 9 15 16
+18 6
+31 28 15 16 17 34
+19 7
+42 41 23 20 6 10 102
+20 6
+23 22 21 9 10 19
+21 7
+23 22 9 10 20 15 17
+22 5
+37 23 20 21 17
+23 7
+42 41 37 19 20 21 22
+24 8
+98 80 78 27 26 25 12 13
+25 6
+98 80 74 27 26 24
+26 6
+74 29 28 27 24 25
+27 8
+29 28 12 13 16 24 25 26
+28 8
+31 29 13 16 18 26 27 34
+29 6
+74 31 30 26 27 28
+30 10
+74 68 59 58 57 56 33 32 31 29
+31 7
+74 32 18 28 29 30 34
+32 6
+56 36 33 30 31 34
+33 9
+56 50 49 48 47 36 30 32 34
+34 10
+38 37 36 35 17 33 32 31 28 18
+35 4
+46 36 34 38
+36 7
+48 47 46 32 33 34 35
+37 7
+42 39 38 22 23 17 34
+38 8
+46 45 44 39 37 17 34 35
+39 8
+45 44 43 42 41 37 40 38
+40 7
+45 44 43 41 104 102 39
+41 6
+42 19 23 39 40 102
+42 5
+19 23 37 39 41
+43 3
+44 39 40
+44 5
+45 40 39 43 38
+45 13
+105 51 50 49 48 47 46 40 44 104 139 39 38
+46 5
+48 35 36 38 45
+47 5
+49 48 33 36 45
+48 5
+33 36 45 46 47
+49 5
+56 50 33 45 47
+50 5
+56 51 33 45 49
+51 7
+105 56 55 54 52 45 50
+52 4
+105 54 53 51
+53 7
+106 62 61 60 52 54 105
+54 7
+60 59 58 55 51 52 53
+55 6
+59 58 57 56 51 54
+56 8
+57 30 32 33 49 50 51 55
+57 4
+58 30 56 55
+58 5
+59 30 57 54 55
+59 8
+74 68 67 60 30 58 54 55
+60 5
+67 61 53 54 59
+61 7
+67 66 65 63 62 53 60
+62 9
+114 107 106 65 64 63 53 61 105
+63 8
+133 132 114 70 65 64 61 62
+64 4
+114 107 62 63
+65 9
+132 70 69 67 66 61 62 63 68
+66 3
+67 61 65
+67 6
+59 60 61 65 66 68
+68 7
+74 73 69 30 59 67 65
+69 5
+71 70 65 68 73
+70 5
+132 71 63 65 69
+71 7
+132 125 69 70 72 73 76
+72 5
+76 75 74 73 71
+73 5
+74 68 72 71 69
+74 12
+99 98 75 25 26 29 30 31 59 68 72 73
+75 5
+99 77 76 72 74
+76 7
+132 125 122 77 72 75 71
+77 7
+125 122 119 99 140 75 76
+78 10
+98 90 89 80 11 12 24 81 82 83
+79 5
+97 95 88 87 81
+80 5
+98 12 24 25 78
+81 10
+95 94 93 92 91 89 88 82 79 78
+82 6
+93 89 84 83 81 78
+83 7
+93 84 3 11 82 78 12
+84 9
+93 92 1 2 3 86 11 82 83
+85 6
+140 96 94 86 102 147
+86 8
+94 92 91 85 102 84 2 1
+87 5
+99 88 79 97 140
+88 7
+99 90 89 79 81 87 97
+89 5
+90 78 81 82 88
+90 5
+99 98 78 88 89
+91 5
+95 94 92 86 81
+92 5
+93 84 86 91 81
+93 5
+84 92 81 82 83
+94 6
+96 95 85 86 91 81
+95 6
+97 96 91 94 79 81
+96 5
+140 97 85 94 95
+97 6
+140 95 96 79 88 87
+98 7
+99 24 25 78 80 90 74
+99 8
+87 88 90 98 74 75 140 77
+100 3
+147 102 101
+101 3
+103 102 100
+102 12
+100 101 103 104 147 86 85 41 40 19 6 1
+103 4
+172 104 101 102
+104 8
+171 139 174 172 103 102 45 40
+105 11
+138 113 112 109 106 45 51 139 52 62 53
+106 7
+110 109 108 107 53 62 105
+107 7
+135 114 110 108 62 64 106
+108 7
+135 118 117 112 110 106 107
+109 4
+112 110 105 106
+110 5
+112 108 109 106 107
+111 3
+138 118 113
+112 6
+118 113 105 108 109 110
+113 5
+138 118 105 111 112
+114 10
+135 134 133 132 116 115 62 63 64 107
+115 5
+116 114 117 134 136
+116 4
+117 135 114 115
+117 6
+136 135 118 108 116 115
+118 7
+138 136 108 111 112 113 117
+119 8
+161 154 150 149 122 120 140 77
+120 6
+154 123 122 121 119 161
+121 7
+154 144 136 124 123 120 156
+122 9
+136 126 125 124 123 76 77 119 120
+123 4
+124 120 121 122
+124 4
+136 123 121 122
+125 8
+132 131 128 126 71 76 122 77
+126 5
+136 128 127 125 122
+127 4
+136 129 128 126
+128 6
+131 130 129 125 126 127
+129 6
+136 134 131 130 127 128
+130 3
+131 128 129
+131 7
+134 133 132 125 128 129 130
+132 9
+133 63 65 70 71 76 125 131 114
+133 5
+134 63 131 132 114
+134 6
+136 129 131 114 133 115
+135 5
+108 117 107 114 116
+136 16
+170 144 141 138 137 117 118 126 127 129 122 134 121 124 156 115
+137 5
+177 141 138 178 136
+138 11
+182 178 177 139 179 105 111 113 118 136 137
+139 7
+179 171 104 174 138 105 45
+140 10
+165 149 85 96 97 147 119 99 87 77
+141 4
+170 136 137 144
+142 5
+156 144 143 145 167
+143 2
+145 142
+144 8
+170 169 168 121 156 136 142 141
+145 10
+163 162 156 155 151 148 147 146 143 142
+146 3
+147 145 163
+147 9
+165 163 148 100 145 146 140 102 85
+148 5
+166 165 151 147 145
+149 5
+151 150 140 165 119
+150 5
+161 152 151 119 149
+151 8
+155 152 148 165 166 149 150 145
+152 7
+161 160 159 155 153 151 150
+153 6
+160 157 154 152 155 156
+154 9
+160 158 157 119 120 153 161 121 156
+155 6
+157 156 151 152 145 153
+156 10
+158 157 155 145 142 154 153 144 136 121
+157 5
+158 153 155 154 156
+158 3
+157 154 156
+159 3
+160 152 161
+160 5
+152 153 159 161 154
+161 7
+119 150 152 160 159 154 120
+162 2
+163 145
+163 4
+162 147 145 146
+164 0
+
+165 6
+166 140 147 148 151 149
+166 3
+148 165 151
+167 1
+142
+168 2
+169 144
+169 2
+144 168
+170 3
+136 141 144
+171 5
+180 179 104 174 139
+172 4
+173 174 104 103
+173 3
+175 174 172
+174 12
+194 184 183 180 179 176 175 173 172 171 139 104
+175 3
+176 174 173
+176 3
+194 175 174
+177 5
+187 138 178 137 195
+178 6
+187 186 182 138 177 137
+179 7
+182 181 180 171 174 139 138
+180 5
+183 181 171 174 179
+181 7
+189 186 185 182 179 180 183
+182 5
+186 179 181 138 178
+183 5
+185 184 174 180 181
+184 5
+193 185 174 183 194
+185 7
+193 191 189 186 183 184 181
+186 7
+189 188 187 185 181 182 178
+187 5
+195 188 186 178 177
+188 4
+195 186 189 187
+189 7
+195 191 190 185 186 181 188
+190 3
+195 191 189
+191 7
+195 192 193 194 185 189 190
+192 4
+195 191 193 194
+193 5
+194 184 185 192 191
+194 6
+176 174 193 192 191 184
+195 7
+191 192 189 190 188 187 177
diff --git a/pysal/examples/10740_rook.gal b/pysal/examples/10740_rook.gal
new file mode 100644
index 0000000..90f3237
--- /dev/null
+++ b/pysal/examples/10740_rook.gal
@@ -0,0 +1,391 @@
+195
+1 5
+6 5 2 86 102
+2 4
+84 5 3 1
+3 4
+84 11 4 2
+4 4
+14 8 5 3
+5 4
+7 1 2 4
+6 5
+19 10 7 1 102
+7 4
+10 8 5 6
+8 4
+15 9 4 7
+9 4
+21 15 10 8
+10 5
+20 19 6 7 9
+11 4
+83 14 12 3
+12 4
+78 24 13 11
+13 4
+27 16 12 14
+14 4
+15 4 11 13
+15 5
+17 16 8 9 14
+16 4
+28 18 13 15
+17 6
+37 34 22 21 18 15
+18 4
+28 16 17 34
+19 6
+41 23 20 6 10 102
+20 4
+23 21 10 19
+21 4
+22 9 20 17
+22 4
+37 23 21 17
+23 5
+42 37 19 20 22
+24 4
+80 27 25 12
+25 4
+98 74 26 24
+26 4
+74 29 27 25
+27 4
+28 13 24 26
+28 5
+31 29 16 18 27
+29 4
+74 31 26 28
+30 7
+74 59 58 57 56 32 31
+31 5
+32 28 29 30 34
+32 4
+33 30 31 34
+33 5
+56 49 47 36 32
+34 7
+38 36 35 17 32 31 18
+35 4
+46 36 34 38
+36 5
+48 46 33 34 35
+37 6
+42 39 38 22 23 17
+38 6
+46 45 39 37 34 35
+39 7
+44 43 42 41 37 40 38
+40 7
+45 44 43 41 104 102 39
+41 5
+42 19 39 40 102
+42 4
+23 37 39 41
+43 3
+44 39 40
+44 4
+45 40 39 43
+45 12
+105 51 50 49 48 47 46 40 44 104 139 38
+46 5
+48 35 36 38 45
+47 4
+49 48 33 45
+48 4
+36 45 46 47
+49 4
+50 33 45 47
+50 4
+56 51 45 49
+51 7
+105 56 55 54 52 45 50
+52 4
+105 54 53 51
+53 6
+62 61 60 52 54 105
+54 6
+60 59 55 51 52 53
+55 5
+58 57 56 51 54
+56 6
+57 30 33 50 51 55
+57 4
+58 30 56 55
+58 4
+59 30 57 55
+59 6
+68 67 60 30 58 54
+60 5
+67 61 53 54 59
+61 6
+67 66 65 62 53 60
+62 6
+107 106 64 63 53 61
+63 5
+132 114 65 64 62
+64 3
+114 62 63
+65 7
+70 69 67 66 61 63 68
+66 3
+67 61 65
+67 6
+59 60 61 65 66 68
+68 6
+74 73 69 59 67 65
+69 5
+71 70 65 68 73
+70 4
+132 71 65 69
+71 6
+132 69 70 72 73 76
+72 5
+76 75 74 73 71
+73 5
+74 68 72 71 69
+74 10
+99 98 75 25 26 29 30 68 72 73
+75 5
+99 77 76 72 74
+76 5
+125 77 72 75 71
+77 6
+122 119 99 140 75 76
+78 7
+98 90 89 80 12 82 83
+79 4
+97 95 88 81
+80 3
+98 24 78
+81 8
+95 93 92 91 89 88 82 79
+82 4
+93 83 81 78
+83 4
+84 11 82 78
+84 6
+93 92 2 3 86 83
+85 6
+140 96 94 86 102 147
+86 7
+94 92 91 85 102 84 1
+87 4
+99 88 97 140
+88 6
+99 90 89 79 81 87
+89 4
+90 78 81 88
+90 5
+99 98 78 88 89
+91 4
+94 92 86 81
+92 5
+93 84 86 91 81
+93 4
+84 92 81 82
+94 5
+96 95 85 86 91
+95 5
+97 96 94 79 81
+96 5
+140 97 85 94 95
+97 5
+140 95 96 79 87
+98 6
+99 25 78 80 90 74
+99 8
+87 88 90 98 74 75 140 77
+100 3
+147 102 101
+101 3
+103 102 100
+102 12
+100 101 103 104 147 86 85 41 40 19 6 1
+103 4
+172 104 101 102
+104 7
+139 174 172 103 102 45 40
+105 10
+138 113 112 109 106 45 51 139 52 53
+106 5
+110 109 107 62 105
+107 5
+135 114 108 62 106
+108 6
+135 118 117 112 110 107
+109 4
+112 110 105 106
+110 4
+112 108 109 106
+111 3
+138 118 113
+112 6
+118 113 105 108 109 110
+113 5
+138 118 105 111 112
+114 8
+135 134 133 116 115 63 64 107
+115 5
+116 114 117 134 136
+116 4
+117 135 114 115
+117 6
+136 135 118 108 116 115
+118 7
+138 136 108 111 112 113 117
+119 7
+161 150 149 122 120 140 77
+120 4
+154 122 121 119
+121 6
+154 136 124 123 120 156
+122 8
+136 126 125 124 123 77 119 120
+123 3
+124 121 122
+124 4
+136 123 121 122
+125 6
+132 131 128 126 76 122
+126 5
+136 128 127 125 122
+127 4
+136 129 128 126
+128 6
+131 130 129 125 126 127
+129 6
+136 134 131 130 127 128
+130 3
+131 128 129
+131 7
+134 133 132 125 128 129 130
+132 6
+133 63 70 71 125 131
+133 4
+134 131 132 114
+134 6
+136 129 131 114 133 115
+135 5
+108 117 107 114 116
+136 15
+170 144 141 138 137 117 118 126 127 129 122 134 121 124 115
+137 4
+177 141 138 136
+138 10
+182 178 139 179 105 111 113 118 136 137
+139 6
+179 171 104 138 105 45
+140 10
+165 149 85 96 97 147 119 99 87 77
+141 4
+170 136 137 144
+142 5
+156 144 143 145 167
+143 2
+145 142
+144 7
+170 169 168 156 136 142 141
+145 10
+163 162 156 155 151 148 147 146 143 142
+146 3
+147 145 163
+147 9
+165 163 148 100 145 146 140 102 85
+148 5
+166 165 151 147 145
+149 5
+151 150 140 165 119
+150 5
+161 152 151 119 149
+151 8
+155 152 148 165 166 149 150 145
+152 7
+161 160 159 155 153 151 150
+153 5
+160 157 154 152 155
+154 8
+160 158 157 120 153 161 121 156
+155 5
+156 151 152 145 153
+156 8
+158 157 155 145 142 154 144 121
+157 4
+158 153 154 156
+158 3
+157 154 156
+159 3
+160 152 161
+160 5
+152 153 159 161 154
+161 6
+119 150 152 160 159 154
+162 2
+163 145
+163 4
+162 147 145 146
+164 0
+
+165 6
+166 140 147 148 151 149
+166 3
+148 165 151
+167 1
+142
+168 2
+169 144
+169 2
+144 168
+170 3
+136 141 144
+171 3
+179 174 139
+172 4
+173 174 104 103
+173 3
+175 174 172
+174 10
+194 184 183 180 176 175 173 172 171 104
+175 3
+176 174 173
+176 3
+194 175 174
+177 4
+187 178 137 195
+178 5
+187 186 182 138 177
+179 6
+182 181 180 171 139 138
+180 4
+183 181 174 179
+181 6
+186 185 182 179 180 183
+182 5
+186 179 181 138 178
+183 5
+185 184 174 180 181
+184 5
+193 185 174 183 194
+185 6
+193 191 189 183 184 181
+186 6
+189 188 187 181 182 178
+187 5
+195 188 186 178 177
+188 4
+195 186 189 187
+189 6
+195 191 190 185 186 188
+190 3
+195 191 189
+191 6
+195 192 193 185 189 190
+192 3
+195 191 194
+193 4
+194 184 185 191
+194 5
+176 174 193 192 184
+195 7
+191 192 189 190 188 187 177
diff --git a/pysal/examples/Chicago77.dbf b/pysal/examples/Chicago77.dbf
new file mode 100644
index 0000000..cfa0934
Binary files /dev/null and b/pysal/examples/Chicago77.dbf differ
diff --git a/pysal/examples/Chicago77.shp b/pysal/examples/Chicago77.shp
new file mode 100644
index 0000000..ee30858
Binary files /dev/null and b/pysal/examples/Chicago77.shp differ
diff --git a/pysal/examples/Chicago77.shx b/pysal/examples/Chicago77.shx
new file mode 100644
index 0000000..56f35b4
Binary files /dev/null and b/pysal/examples/Chicago77.shx differ
diff --git a/pysal/examples/Line.dbf b/pysal/examples/Line.dbf
new file mode 100644
index 0000000..b3f7f90
Binary files /dev/null and b/pysal/examples/Line.dbf differ
diff --git a/pysal/examples/Line.prj b/pysal/examples/Line.prj
new file mode 100644
index 0000000..a30c00a
--- /dev/null
+++ b/pysal/examples/Line.prj
@@ -0,0 +1 @@
+GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]
\ No newline at end of file
diff --git a/pysal/examples/Line.shp b/pysal/examples/Line.shp
new file mode 100644
index 0000000..6e27301
Binary files /dev/null and b/pysal/examples/Line.shp differ
diff --git a/pysal/examples/Line.shx b/pysal/examples/Line.shx
new file mode 100644
index 0000000..b27a051
Binary files /dev/null and b/pysal/examples/Line.shx differ
diff --git a/pysal/examples/NAT.dbf b/pysal/examples/NAT.dbf
new file mode 100644
index 0000000..dff6763
Binary files /dev/null and b/pysal/examples/NAT.dbf differ
diff --git a/pysal/examples/NAT.shp b/pysal/examples/NAT.shp
new file mode 100644
index 0000000..12e35b2
Binary files /dev/null and b/pysal/examples/NAT.shp differ
diff --git a/pysal/examples/NAT.shx b/pysal/examples/NAT.shx
new file mode 100644
index 0000000..1be8468
Binary files /dev/null and b/pysal/examples/NAT.shx differ
diff --git a/pysal/examples/Point.dbf b/pysal/examples/Point.dbf
new file mode 100644
index 0000000..13e7563
Binary files /dev/null and b/pysal/examples/Point.dbf differ
diff --git a/pysal/examples/Point.prj b/pysal/examples/Point.prj
new file mode 100644
index 0000000..a30c00a
--- /dev/null
+++ b/pysal/examples/Point.prj
@@ -0,0 +1 @@
+GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]
\ No newline at end of file
diff --git a/pysal/examples/Point.shp b/pysal/examples/Point.shp
new file mode 100644
index 0000000..846c2b3
Binary files /dev/null and b/pysal/examples/Point.shp differ
diff --git a/pysal/examples/Point.shx b/pysal/examples/Point.shx
new file mode 100644
index 0000000..8a9f027
Binary files /dev/null and b/pysal/examples/Point.shx differ
diff --git a/pysal/examples/Polygon.dbf b/pysal/examples/Polygon.dbf
new file mode 100644
index 0000000..d925117
Binary files /dev/null and b/pysal/examples/Polygon.dbf differ
diff --git a/pysal/examples/Polygon.prj b/pysal/examples/Polygon.prj
new file mode 100644
index 0000000..a30c00a
--- /dev/null
+++ b/pysal/examples/Polygon.prj
@@ -0,0 +1 @@
+GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]
\ No newline at end of file
diff --git a/pysal/examples/Polygon.shp b/pysal/examples/Polygon.shp
new file mode 100644
index 0000000..3ad50c2
Binary files /dev/null and b/pysal/examples/Polygon.shp differ
diff --git a/pysal/examples/Polygon.shx b/pysal/examples/Polygon.shx
new file mode 100644
index 0000000..eb12139
Binary files /dev/null and b/pysal/examples/Polygon.shx differ
diff --git a/pysal/examples/README.txt b/pysal/examples/README.txt
new file mode 100644
index 0000000..11d7482
--- /dev/null
+++ b/pysal/examples/README.txt
@@ -0,0 +1,5 @@
+Documentation for all example data sets for PySAL can be found at
+
+http://pysal.org/users/tutorials/examples.html
+
+If you obtained this example data by downloading it as a separate zip file, it should be unzipped in the pysal directory, so that the path to the data herein would be like pysal/examples/rook31.dbf, for example. 
diff --git a/pysal/examples/__init__.py b/pysal/examples/__init__.py
new file mode 100644
index 0000000..492f33e
--- /dev/null
+++ b/pysal/examples/__init__.py
@@ -0,0 +1,9 @@
+import os
+import pysal
+
+__all__ = ['get_path']
+
+
+def get_path(example_name):
+    base = os.path.split(pysal.__file__)[0]
+    return os.path.join(base, 'examples', example_name)
diff --git a/pysal/examples/arcgis_ohio.dbf b/pysal/examples/arcgis_ohio.dbf
new file mode 100644
index 0000000..34cef0a
Binary files /dev/null and b/pysal/examples/arcgis_ohio.dbf differ
diff --git a/pysal/examples/arcgis_txt.txt b/pysal/examples/arcgis_txt.txt
new file mode 100644
index 0000000..1063e70
--- /dev/null
+++ b/pysal/examples/arcgis_txt.txt
@@ -0,0 +1,9 @@
+StationID
+1  1   0.0
+1  2   0.1
+1  3   0.14286
+2  1   0.1
+2  3   0.05
+3  1   0.16667
+3  2   0.06667
+3  3   0.0
\ No newline at end of file
diff --git a/pysal/examples/baltim.dbf b/pysal/examples/baltim.dbf
new file mode 100644
index 0000000..be9d815
Binary files /dev/null and b/pysal/examples/baltim.dbf differ
diff --git a/pysal/examples/baltim.shp b/pysal/examples/baltim.shp
new file mode 100644
index 0000000..f867886
Binary files /dev/null and b/pysal/examples/baltim.shp differ
diff --git a/pysal/examples/baltim.shx b/pysal/examples/baltim.shx
new file mode 100644
index 0000000..8414722
Binary files /dev/null and b/pysal/examples/baltim.shx differ
diff --git a/pysal/examples/baltim_k4.gwt b/pysal/examples/baltim_k4.gwt
new file mode 100644
index 0000000..1f77dc6
--- /dev/null
+++ b/pysal/examples/baltim_k4.gwt
@@ -0,0 +1,845 @@
+0 211 baltim.shp STATION
+1 96      1
+1 16      1
+1 90      1
+1 133      1
+2 5      1
+2 4      1
+2 7      1
+2 185      1
+3 4      1
+3 7      1
+3 2      1
+3 5      1
+4 2      1
+4 3      1
+4 7      1
+4 5      1
+5 7      1
+5 2      1
+5 11      1
+5 4      1
+6 10      1
+6 70      1
+6 8      1
+6 12      1
+7 5      1
+7 2      1
+7 3      1
+7 4      1
+8 12      1
+8 14      1
+8 70      1
+8 6      1
+9 195      1
+9 76      1
+9 211      1
+9 68      1
+10 6      1
+10 70      1
+10 48      1
+10 8      1
+11 13      1
+11 5      1
+11 15      1
+11 7      1
+12 14      1
+12 8      1
+12 70      1
+12 67      1
+13 11      1
+13 65      1
+13 67      1
+13 69      1
+14 12      1
+14 8      1
+14 11      1
+14 5      1
+15 180      1
+15 2      1
+15 185      1
+15 195      1
+16 18      1
+16 179      1
+16 96      1
+16 1      1
+17 18      1
+17 179      1
+17 74      1
+17 135      1
+18 17      1
+18 74      1
+18 179      1
+18 16      1
+19 21      1
+19 20      1
+19 22      1
+19 25      1
+20 19      1
+20 25      1
+20 200      1
+20 24      1
+21 19      1
+21 203      1
+21 20      1
+21 25      1
+22 204      1
+22 72      1
+22 205      1
+22 23      1
+23 72      1
+23 75      1
+23 22      1
+23 24      1
+24 75      1
+24 20      1
+24 19      1
+24 192      1
+25 200      1
+25 20      1
+25 21      1
+25 19      1
+26 37      1
+26 38      1
+26 27      1
+26 46      1
+27 30      1
+27 39      1
+27 35      1
+27 38      1
+28 32      1
+28 35      1
+28 39      1
+28 33      1
+29 42      1
+29 32      1
+29 33      1
+29 31      1
+30 27      1
+30 39      1
+30 38      1
+30 34      1
+31 86      1
+31 166      1
+31 29      1
+31 170      1
+32 28      1
+32 29      1
+32 153      1
+32 31      1
+33 34      1
+33 39      1
+33 36      1
+33 28      1
+34 33      1
+34 39      1
+34 36      1
+34 30      1
+35 28      1
+35 27      1
+35 39      1
+35 30      1
+36 34      1
+36 33      1
+36 47      1
+36 77      1
+37 26      1
+37 38      1
+37 46      1
+37 27      1
+38 46      1
+38 50      1
+38 30      1
+38 43      1
+39 34      1
+39 33      1
+39 30      1
+39 27      1
+40 82      1
+40 86      1
+40 87      1
+40 42      1
+41 161      1
+41 170      1
+41 153      1
+41 172      1
+42 29      1
+42 40      1
+42 86      1
+42 31      1
+43 50      1
+43 47      1
+43 38      1
+43 46      1
+44 45      1
+44 49      1
+44 48      1
+44 51      1
+45 51      1
+45 77      1
+45 49      1
+45 44      1
+46 50      1
+46 38      1
+46 43      1
+46 52      1
+47 43      1
+47 77      1
+47 36      1
+47 45      1
+48 58      1
+48 44      1
+48 10      1
+48 56      1
+49 45      1
+49 44      1
+49 50      1
+49 47      1
+50 46      1
+50 43      1
+50 38      1
+50 47      1
+51 77      1
+51 45      1
+51 54      1
+51 79      1
+52 46      1
+52 50      1
+52 49      1
+52 38      1
+53 55      1
+53 57      1
+53 60      1
+53 56      1
+54 56      1
+54 57      1
+54 79      1
+54 51      1
+55 57      1
+55 53      1
+55 56      1
+55 54      1
+56 54      1
+56 57      1
+56 58      1
+56 55      1
+57 55      1
+57 56      1
+57 53      1
+57 54      1
+58 56      1
+58 54      1
+58 55      1
+58 57      1
+59 61      1
+59 62      1
+59 60      1
+59 64      1
+60 53      1
+60 59      1
+60 71      1
+60 55      1
+61 62      1
+61 59      1
+61 64      1
+61 66      1
+62 61      1
+62 64      1
+62 59      1
+62 63      1
+63 91      1
+63 64      1
+63 93      1
+63 62      1
+64 62      1
+64 61      1
+64 63      1
+64 59      1
+65 69      1
+65 67      1
+65 13      1
+65 68      1
+66 209      1
+66 71      1
+66 61      1
+66 92      1
+67 69      1
+67 65      1
+67 13      1
+67 12      1
+68 211      1
+68 65      1
+68 209      1
+68 71      1
+69 67      1
+69 65      1
+69 13      1
+69 71      1
+70 6      1
+70 12      1
+70 8      1
+70 10      1
+71 66      1
+71 68      1
+71 60      1
+71 211      1
+72 23      1
+72 22      1
+72 73      1
+72 74      1
+73 74      1
+73 95      1
+73 18      1
+73 94      1
+74 73      1
+74 18      1
+74 17      1
+74 16      1
+75 23      1
+75 24      1
+75 210      1
+75 72      1
+76 210      1
+76 9      1
+76 195      1
+76 192      1
+77 51      1
+77 45      1
+77 47      1
+77 79      1
+78 81      1
+78 79      1
+78 82      1
+78 40      1
+79 81      1
+79 78      1
+79 54      1
+79 77      1
+80 83      1
+80 82      1
+80 87      1
+80 88      1
+81 79      1
+81 78      1
+81 54      1
+81 77      1
+82 80      1
+82 87      1
+82 83      1
+82 40      1
+83 80      1
+83 82      1
+83 59      1
+83 78      1
+84 86      1
+84 87      1
+84 169      1
+84 85      1
+85 169      1
+85 89      1
+85 84      1
+85 90      1
+86 84      1
+86 31      1
+86 40      1
+86 42      1
+87 88      1
+87 82      1
+87 84      1
+87 40      1
+88 87      1
+88 89      1
+88 84      1
+88 82      1
+89 90      1
+89 85      1
+89 133      1
+89 88      1
+90 89      1
+90 133      1
+90 91      1
+90 85      1
+91 63      1
+91 96      1
+91 90      1
+91 93      1
+92 94      1
+92 95      1
+92 66      1
+92 209      1
+93 96      1
+93 63      1
+93 95      1
+93 92      1
+94 95      1
+94 92      1
+94 73      1
+94 93      1
+95 94      1
+95 92      1
+95 73      1
+95 93      1
+96 93      1
+96 1      1
+96 91      1
+96 63      1
+97 98      1
+97 117      1
+97 129      1
+97 119      1
+98 97      1
+98 117      1
+98 129      1
+98 119      1
+99 104      1
+99 105      1
+99 108      1
+99 106      1
+100 105      1
+100 106      1
+100 99      1
+100 104      1
+101 107      1
+101 100      1
+101 103      1
+101 106      1
+102 101      1
+102 100      1
+102 107      1
+102 105      1
+103 107      1
+103 109      1
+103 116      1
+103 106      1
+104 99      1
+104 108      1
+104 106      1
+104 110      1
+105 100      1
+105 99      1
+105 104      1
+105 106      1
+106 110      1
+106 104      1
+106 108      1
+106 103      1
+107 103      1
+107 109      1
+107 116      1
+107 106      1
+108 110      1
+108 104      1
+108 106      1
+108 99      1
+109 116      1
+109 103      1
+109 111      1
+109 107      1
+110 108      1
+110 106      1
+110 104      1
+110 103      1
+111 114      1
+111 113      1
+111 116      1
+111 109      1
+112 108      1
+112 114      1
+112 207      1
+112 110      1
+113 111      1
+113 116      1
+113 109      1
+113 114      1
+114 111      1
+114 116      1
+114 109      1
+114 112      1
+115 113      1
+115 107      1
+115 109      1
+115 116      1
+116 109      1
+116 103      1
+116 111      1
+116 107      1
+117 119      1
+117 129      1
+117 121      1
+117 98      1
+118 120      1
+118 126      1
+118 124      1
+118 123      1
+119 129      1
+119 122      1
+119 127      1
+119 121      1
+120 126      1
+120 118      1
+120 127      1
+120 124      1
+121 119      1
+121 129      1
+121 117      1
+121 122      1
+122 127      1
+122 119      1
+122 129      1
+122 120      1
+123 128      1
+123 125      1
+123 127      1
+123 122      1
+124 126      1
+124 138      1
+124 120      1
+124 118      1
+125 128      1
+125 123      1
+125 127      1
+125 129      1
+126 124      1
+126 120      1
+126 138      1
+126 118      1
+127 122      1
+127 123      1
+127 125      1
+127 129      1
+128 125      1
+128 123      1
+128 127      1
+128 122      1
+129 119      1
+129 122      1
+129 127      1
+129 117      1
+130 136      1
+130 126      1
+130 121      1
+130 120      1
+131 167      1
+131 168      1
+131 132      1
+131 174      1
+132 133      1
+132 178      1
+132 174      1
+132 131      1
+133 132      1
+133 178      1
+133 90      1
+133 89      1
+134 135      1
+134 139      1
+134 137      1
+134 179      1
+135 134      1
+135 179      1
+135 17      1
+135 139      1
+136 138      1
+136 130      1
+136 126      1
+136 124      1
+137 139      1
+137 134      1
+137 135      1
+137 124      1
+138 124      1
+138 136      1
+138 126      1
+138 205      1
+139 137      1
+139 134      1
+139 17      1
+139 135      1
+140 152      1
+140 142      1
+140 177      1
+140 141      1
+141 142      1
+141 140      1
+141 152      1
+141 151      1
+142 141      1
+142 140      1
+142 152      1
+142 151      1
+143 145      1
+143 165      1
+143 146      1
+143 149      1
+144 145      1
+144 146      1
+144 143      1
+144 165      1
+145 143      1
+145 146      1
+145 165      1
+145 144      1
+146 165      1
+146 143      1
+146 145      1
+146 147      1
+147 148      1
+147 165      1
+147 150      1
+147 146      1
+148 177      1
+148 147      1
+148 150      1
+148 152      1
+149 167      1
+149 171      1
+149 165      1
+149 168      1
+150 151      1
+150 148      1
+150 147      1
+150 152      1
+151 150      1
+151 152      1
+151 148      1
+151 177      1
+152 151      1
+152 148      1
+152 140      1
+152 150      1
+153 159      1
+153 41      1
+153 154      1
+153 163      1
+154 163      1
+154 153      1
+154 159      1
+154 157      1
+155 160      1
+155 156      1
+155 157      1
+155 159      1
+156 162      1
+156 164      1
+156 155      1
+156 161      1
+157 155      1
+157 160      1
+157 163      1
+157 159      1
+158 171      1
+158 172      1
+158 170      1
+158 161      1
+159 153      1
+159 163      1
+159 155      1
+159 161      1
+160 155      1
+160 157      1
+160 156      1
+160 164      1
+161 41      1
+161 156      1
+161 159      1
+161 172      1
+162 164      1
+162 156      1
+162 161      1
+162 160      1
+163 154      1
+163 159      1
+163 157      1
+163 153      1
+164 162      1
+164 156      1
+164 160      1
+164 155      1
+165 143      1
+165 146      1
+165 147      1
+165 149      1
+166 170      1
+166 172      1
+166 169      1
+166 31      1
+167 149      1
+167 168      1
+167 131      1
+167 175      1
+168 171      1
+168 167      1
+168 131      1
+168 149      1
+169 85      1
+169 84      1
+169 166      1
+169 168      1
+170 166      1
+170 172      1
+170 41      1
+170 171      1
+171 158      1
+171 168      1
+171 172      1
+171 149      1
+172 170      1
+172 158      1
+172 171      1
+172 166      1
+173 176      1
+173 178      1
+173 179      1
+173 135      1
+174 175      1
+174 177      1
+174 132      1
+174 131      1
+175 174      1
+175 177      1
+175 148      1
+175 167      1
+176 173      1
+176 135      1
+176 134      1
+176 179      1
+177 175      1
+177 148      1
+177 174      1
+177 152      1
+178 133      1
+178 132      1
+178 173      1
+178 1      1
+179 17      1
+179 135      1
+179 18      1
+179 134      1
+180 15      1
+180 185      1
+180 190      1
+180 2      1
+181 183      1
+181 193      1
+181 189      1
+181 182      1
+182 190      1
+182 196      1
+182 184      1
+182 183      1
+183 181      1
+183 189      1
+183 193      1
+183 182      1
+184 191      1
+184 190      1
+184 192      1
+184 187      1
+185 180      1
+185 2      1
+185 15      1
+185 181      1
+186 188      1
+186 197      1
+186 199      1
+186 201      1
+187 196      1
+187 184      1
+187 202      1
+187 191      1
+188 186      1
+188 194      1
+188 201      1
+188 197      1
+189 193      1
+189 183      1
+189 181      1
+189 188      1
+190 184      1
+190 182      1
+190 191      1
+190 192      1
+191 192      1
+191 184      1
+191 190      1
+191 187      1
+192 191      1
+192 184      1
+192 190      1
+192 76      1
+193 181      1
+193 189      1
+193 183      1
+193 185      1
+194 202      1
+194 201      1
+194 196      1
+194 188      1
+195 9      1
+195 76      1
+195 15      1
+195 180      1
+196 202      1
+196 187      1
+196 182      1
+196 194      1
+197 199      1
+197 198      1
+197 186      1
+197 188      1
+198 197      1
+198 201      1
+198 186      1
+198 199      1
+199 197      1
+199 186      1
+199 198      1
+199 188      1
+200 25      1
+200 20      1
+200 21      1
+200 187      1
+201 194      1
+201 202      1
+201 188      1
+201 198      1
+202 194      1
+202 196      1
+202 187      1
+202 201      1
+203 206      1
+203 207      1
+203 21      1
+203 204      1
+204 205      1
+204 206      1
+204 22      1
+204 203      1
+205 204      1
+205 206      1
+205 22      1
+205 207      1
+206 207      1
+206 203      1
+206 204      1
+206 205      1
+207 206      1
+207 203      1
+207 204      1
+207 205      1
+208 99      1
+208 200      1
+208 104      1
+208 25      1
+209 66      1
+209 211      1
+209 210      1
+209 68      1
+210 76      1
+210 209      1
+210 75      1
+210 211      1
+211 68      1
+211 209      1
+211 9      1
+211 65      1
diff --git a/pysal/examples/baltim_q.gal b/pysal/examples/baltim_q.gal
new file mode 100644
index 0000000..c6351d8
--- /dev/null
+++ b/pysal/examples/baltim_q.gal
@@ -0,0 +1,423 @@
+0 211 baltim STATION
+1 7
+178 173 133 96 91 90 16
+2 5
+185 15 7 5 4
+3 2
+7 4
+4 5
+193 185 7 3 2
+5 5
+15 14 11 7 2
+6 3
+70 10 8
+7 5
+14 5 4 3 2
+8 4
+70 14 12 6
+9 6
+211 195 76 68 13 11
+10 4
+70 58 48 6
+11 6
+195 15 14 13 9 5
+12 5
+70 67 14 13 8
+13 7
+68 67 65 14 12 11 9
+14 6
+13 12 11 8 7 5
+15 6
+195 185 180 11 5 2
+16 9
+179 173 96 95 93 74 73 18 1
+17 6
+179 139 135 134 74 18
+18 4
+179 74 17 16
+19 6
+204 24 23 22 21 20
+20 5
+191 25 24 21 19
+21 7
+208 204 203 200 25 20 19
+22 5
+205 204 72 23 19
+23 6
+75 73 72 24 22 19
+24 8
+210 192 191 76 75 23 20 19
+25 6
+200 191 187 184 21 20
+26 4
+38 37 35 27
+27 5
+39 38 35 30 26
+28 6
+154 153 39 35 33 32
+29 5
+42 36 33 32 31
+30 5
+43 39 38 34 27
+31 8
+170 169 166 86 42 41 32 29
+32 6
+153 41 33 31 29 28
+33 6
+39 36 34 32 29 28
+34 6
+47 43 39 36 33 30
+35 5
+154 39 28 27 26
+36 7
+81 77 47 42 34 33 29
+37 4
+52 46 38 26
+38 7
+50 46 43 37 30 27 26
+39 6
+35 34 33 30 28 27
+40 6
+87 86 84 82 78 42
+41 7
+172 170 161 159 153 32 31
+42 7
+86 81 78 40 36 31 29
+43 6
+50 49 47 38 34 30
+44 6
+58 52 51 49 48 45
+45 5
+77 51 49 47 44
+46 4
+52 50 38 37
+47 6
+77 49 45 43 36 34
+48 3
+58 44 10
+49 6
+52 50 47 45 44 43
+50 5
+52 49 46 43 38
+51 6
+79 77 58 54 45 44
+52 5
+50 49 46 44 37
+53 6
+70 69 60 58 57 55
+54 6
+79 78 58 57 56 51
+55 4
+58 57 56 53
+56 4
+58 57 55 54
+57 7
+83 78 60 56 55 54 53
+58 9
+70 56 55 54 53 51 48 44 10
+59 5
+83 71 64 61 60
+60 6
+83 71 69 59 57 53
+61 5
+71 66 64 62 59
+62 6
+93 92 66 64 63 61
+63 6
+96 93 91 88 64 62
+64 7
+88 83 80 63 62 61 59
+65 5
+71 69 68 67 13
+66 5
+209 92 71 62 61
+67 5
+70 69 65 13 12
+68 6
+211 209 71 65 13 9
+69 6
+71 70 67 65 60 53
+70 8
+69 67 58 53 12 10 8 6
+71 8
+209 69 68 66 65 61 60 59
+72 6
+205 139 74 73 23 22
+73 7
+95 94 75 74 72 23 16
+74 6
+139 73 72 18 17 16
+75 5
+210 94 73 24 23
+76 6
+211 210 195 192 24 9
+77 6
+81 79 51 47 45 36
+78 8
+83 82 81 79 57 54 42 40
+79 5
+81 78 77 54 51
+80 5
+88 87 83 82 64
+81 5
+79 78 77 42 36
+82 5
+87 83 80 78 40
+83 7
+82 80 78 64 60 59 57
+84 6
+169 88 87 86 85 40
+85 6
+169 133 131 89 88 84
+86 5
+169 84 42 40 31
+87 5
+88 84 82 80 40
+88 9
+91 90 89 87 85 84 80 64 63
+89 4
+133 90 88 85
+90 5
+133 91 89 88 1
+91 5
+96 90 88 63 1
+92 7
+210 209 95 94 93 66 62
+93 6
+96 95 92 63 62 16
+94 5
+210 95 92 75 73
+95 5
+94 93 92 73 16
+96 5
+93 91 63 16 1
+97 3
+125 115 98
+98 6
+129 125 117 115 113 97
+99 4
+208 105 104 100
+100 7
+107 106 105 104 103 101 99
+101 5
+115 107 105 102 100
+102 5
+199 198 197 105 101
+103 6
+116 110 109 107 106 100
+104 6
+208 110 108 106 100 99
+105 6
+208 198 102 101 100 99
+106 4
+110 104 103 100
+107 6
+115 113 109 103 101 100
+108 5
+208 114 112 110 104
+109 5
+116 113 111 107 103
+110 6
+116 114 108 106 104 103
+111 5
+121 116 114 113 109
+112 5
+208 207 203 114 108
+113 7
+121 117 115 111 109 107 98
+114 8
+207 130 121 116 112 111 110 108
+115 5
+113 107 101 98 97
+116 5
+114 111 110 109 103
+117 5
+129 121 119 113 98
+118 8
+141 137 134 128 126 124 123 120
+119 4
+129 122 121 117
+120 6
+130 127 126 123 122 118
+121 7
+130 122 119 117 114 113 111
+122 6
+130 129 127 121 120 119
+123 5
+128 127 125 120 118
+124 4
+138 137 126 118
+125 6
+129 128 127 123 98 97
+126 6
+138 136 130 124 120 118
+127 5
+129 125 123 122 120
+128 4
+141 125 123 118
+129 6
+127 125 122 119 117 98
+130 7
+207 136 126 122 121 120 114
+131 8
+175 174 169 168 167 133 132 85
+132 4
+178 174 133 131
+133 7
+178 132 131 90 89 85 1
+134 7
+176 141 139 137 135 118 17
+135 5
+179 176 173 134 17
+136 6
+207 206 205 138 130 126
+137 5
+139 138 134 124 118
+138 6
+205 139 137 136 126 124
+139 7
+205 138 137 134 74 72 17
+140 7
+178 177 176 174 173 152 142
+141 5
+176 142 134 128 118
+142 5
+176 152 151 141 140
+143 6
+165 162 158 149 146 145
+144 3
+164 146 145
+145 5
+164 162 146 144 143
+146 6
+165 150 147 145 144 143
+147 6
+175 167 165 150 148 146
+148 5
+177 175 152 150 147
+149 6
+171 168 167 165 158 143
+150 5
+152 151 148 147 146
+151 3
+152 150 142
+152 6
+177 151 150 148 142 140
+153 6
+163 159 154 41 32 28
+154 4
+163 153 35 28
+155 6
+163 161 160 159 157 156
+156 5
+164 162 161 160 155
+157 3
+163 160 155
+158 6
+172 171 162 161 149 143
+159 5
+163 161 155 153 41
+160 4
+164 157 156 155
+161 7
+172 162 159 158 156 155 41
+162 6
+164 161 158 156 145 143
+163 5
+159 157 155 154 153
+164 5
+162 160 156 145 144
+165 5
+167 149 147 146 143
+166 5
+172 170 169 168 31
+167 6
+175 168 165 149 147 131
+168 7
+172 171 169 167 166 149 131
+169 7
+168 166 131 86 85 84 31
+170 4
+172 166 41 31
+171 4
+172 168 158 149
+172 7
+171 170 168 166 161 158 41
+173 7
+179 178 176 140 135 16 1
+174 6
+178 177 175 140 132 131
+175 6
+177 174 167 148 147 131
+176 6
+173 142 141 140 135 134
+177 5
+175 174 152 148 140
+178 6
+174 173 140 133 132 1
+179 5
+173 135 18 17 16
+180 5
+195 190 185 182 15
+181 5
+193 189 185 183 182
+182 8
+196 190 187 185 184 183 181 180
+183 6
+196 194 189 188 182 181
+184 6
+192 191 190 187 182 25
+185 7
+193 182 181 180 15 4 2
+186 5
+201 199 198 197 188
+187 6
+202 200 196 184 182 25
+188 5
+201 194 189 186 183
+189 4
+193 188 183 181
+190 5
+195 192 184 182 180
+191 5
+192 184 25 24 20
+192 6
+195 191 190 184 76 24
+193 4
+189 185 181 4
+194 5
+202 201 196 188 183
+195 7
+192 190 180 76 15 11 9
+196 5
+202 194 187 183 182
+197 4
+199 198 186 102
+198 6
+208 201 197 186 105 102
+199 3
+197 186 102
+200 5
+208 202 187 25 21
+201 6
+208 202 198 194 188 186
+202 6
+208 201 200 196 194 187
+203 6
+208 207 206 204 112 21
+204 6
+206 205 203 22 21 19
+205 7
+206 204 139 138 136 72 22
+206 5
+207 205 204 203 136
+207 6
+206 203 136 130 114 112
+208 11
+203 202 201 200 198 112 108 105 104 99 21
+209 6
+211 210 92 71 68 66
+210 7
+211 209 94 92 76 75 24
+211 5
+210 209 76 68 9
diff --git a/pysal/examples/book.gal b/pysal/examples/book.gal
new file mode 100644
index 0000000..4d6fc1d
--- /dev/null
+++ b/pysal/examples/book.gal
@@ -0,0 +1,33 @@
+16
+1 2
+2 5
+2 3
+1 3 6
+3 3
+2 4 7
+4 2
+3 8
+5 3
+1 6 9
+6 4
+2 5 7 10
+7 4
+3 6 8 11
+8 3
+4 7 12
+9 3
+5 10 13
+10 4
+6 9 11 14
+11 4
+7 10 12 15
+12 3
+8 11 16
+13 2
+9 14
+14 3
+10 13 15
+15 3
+11 14 16
+16 2
+12 15
diff --git a/pysal/examples/book.txt b/pysal/examples/book.txt
new file mode 100644
index 0000000..87bd9c9
--- /dev/null
+++ b/pysal/examples/book.txt
@@ -0,0 +1,18 @@
+16,6
+"id","y","z","z2","z4","wzz"
+1,39,-6.375,40.640625,1.651660e+03,68.531250
+2,41,-4.375,19.140625,3.663635e+02,74.921875
+3,38,-7.375,54.390625,2.958340e+03,67.296875
+4,45,-0.375,0.140625,1.977539e-02,4.031250
+5,39,-6.375,40.640625,1.651660e+03,45.421875
+6,42,-3.375,11.390625,1.297463e+02,38.812500
+7,41,-4.375,19.140625,3.663635e+02,50.312500
+8,42,-3.375,11.390625,1.297463e+02,-2.953125
+9,48,2.625,6.890625,4.748071e+01,-2.953125
+10,49,3.625,13.140625,1.726760e+02,27.187500
+11,48,2.625,6.890625,4.748071e+01,24.937500
+12,51,5.625,31.640625,1.001129e+03,49.921875
+13,47,1.625,2.640625,6.972900e+00,13.406250
+14,51,5.625,31.640625,1.001129e+03,55.546875
+15,50,4.625,21.390625,4.575588e+02,82.671875
+16,55,9.625,92.640625,8.582285e+03,98.6562509
diff --git a/pysal/examples/burkitt.dbf b/pysal/examples/burkitt.dbf
new file mode 100644
index 0000000..e7ecd09
Binary files /dev/null and b/pysal/examples/burkitt.dbf differ
diff --git a/pysal/examples/burkitt.shp b/pysal/examples/burkitt.shp
new file mode 100644
index 0000000..bcd3d72
Binary files /dev/null and b/pysal/examples/burkitt.shp differ
diff --git a/pysal/examples/burkitt.shx b/pysal/examples/burkitt.shx
new file mode 100644
index 0000000..459563f
Binary files /dev/null and b/pysal/examples/burkitt.shx differ
diff --git a/pysal/examples/calempdensity.csv b/pysal/examples/calempdensity.csv
new file mode 100644
index 0000000..bfe8734
--- /dev/null
+++ b/pysal/examples/calempdensity.csv
@@ -0,0 +1,59 @@
+"Geographic Area","Geographic Area","Geographic Name","GEONAME","GEOCOMP","STATE","Number of Employees for All Sectors","Number of employees","Class Number","sq. km","emp/sq km"
+"05000US06001","06001","Alameda County, California","Alameda County, California","00","06",630171,630171,5,1910.1,329.92
+"05000US06003","06003","Alpine County, California","Alpine County, California","00","06",813,813,1,1913.1,0.42
+"05000US06005","06005","Amador County, California","Amador County, California","00","06",9061,9061,2,1534.7,5.9
+"05000US06007","06007","Butte County, California","Butte County, California","00","06",59578,59578,3,4246.6,14.03
+"05000US06009","06009","Calaveras County, California","Calaveras County, California","00","06",7344,7344,2,2642.3,2.78
+"05000US06011","06011","Colusa County, California","Colusa County, California","00","06",4000,4000,1,2980.5,1.34
+"05000US06013","06013","Contra Costa County, California","Contra Costa County, California","00","06",338156,338156,5,1865.5,181.27
+"05000US06015","06015","Del Norte County, California","Del Norte County, California","00","06",4303,4303,1,2610.4,1.65
+"05000US06017","06017","El Dorado County, California","El Dorado County, California","00","06",44477,44477,3,4432.8,10.03
+"05000US06019","06019","Fresno County, California","Fresno County, California","00","06",257975,257975,4,15444.7,16.7
+"05000US06021","06021","Glenn County, California","Glenn County, California","00","06",4487,4487,1,3405.5,1.32
+"05000US06023","06023","Humboldt County, California","Humboldt County, California","00","06",36962,36962,3,9253.5,3.99
+"05000US06025","06025","Imperial County, California","Imperial County, California","00","06",34156,34156,3,10813.4,3.16
+"05000US06027","06027","Inyo County, California","Inyo County, California","00","06",5820,5820,1,26397.5,0.22
+"05000US06029","06029","Kern County, California","Kern County, California","00","06",183412,183412,4,21086.8,8.7
+"05000US06031","06031","Kings County, California","Kings County, California","00","06",23610,23610,2,3598.8,6.56
+"05000US06033","06033","Lake County, California","Lake County, California","00","06",10648,10648,2,3259.4,3.27
+"05000US06035","06035","Lassen County, California","Lassen County, California","00","06",3860,3860,1,11803.9,0.33
+"05000US06037","06037","Los Angeles County, California","Los Angeles County, California","00","06",3895886,3895886,5,10515.3,370.5
+"05000US06039","06039","Madera County, California","Madera County, California","00","06",24957,24957,2,5538.5,4.51
+"05000US06041","06041","Marin County, California","Marin County, California","00","06",101358,101358,4,1346.2,75.29
+"05000US06043","06043","Mariposa County, California","Mariposa County, California","00","06",3739,3739,1,3758.6,0.99
+"05000US06045","06045","Mendocino County, California","Mendocino County, California","00","06",24898,24898,2,9089,2.74
+"05000US06047","06047","Merced County, California","Merced County, California","00","06",43369,43369,3,4995.8,8.68
+"05000US06049","06049","Modoc County, California","Modoc County, California","00","06",1467,1467,1,10215.9,0.14
+"05000US06051","06051","Mono County, California","Mono County, California","00","06",7289,7289,1,7885.2,0.92
+"05000US06053","06053","Monterey County, California","Monterey County, California","00","06",108660,108660,4,8603.8,12.63
+"05000US06055","06055","Napa County, California","Napa County, California","00","06",56029,56029,3,1952.5,28.7
+"05000US06057","06057","Nevada County, California","Nevada County, California","00","06",29805,29805,3,2480.3,12.02
+"05000US06059","06059","Orange County, California","Orange County, California","00","06",1478452,1478452,5,2045.3,722.85
+"05000US06061","06061","Placer County, California","Placer County, California","00","06",133427,133427,4,3637.4,36.68
+"05000US06063","06063","Plumas County, California","Plumas County, California","00","06",4863,4863,1,6614.8,0.74
+"05000US06065","06065","Riverside County, California","Riverside County, California","00","06",556789,556789,5,18669.1,29.82
+"05000US06067","06067","Sacramento County, California","Sacramento County, California","00","06",480346,480346,5,2501.1,192.05
+"05000US06069","06069","San Benito County, California","San Benito County, California","00","06",12163,12163,2,3597.9,3.38
+"05000US06071","06071","San Bernardino County, California","San Bernardino County, California","00","06",579135,579135,5,51961.2,11.15
+"05000US06073","06073","San Diego County, California","San Diego County, California","00","06",1205862,1205862,5,10889.6,110.74
+"05000US06075","06075","San Francisco County, California","San Francisco County, California","00","06",497485,497485,5,121,4111.45
+"05000US06077","06077","San Joaquin County, California","San Joaquin County, California","00","06",179276,179276,4,3624.1,49.47
+"05000US06079","06079","San Luis Obispo County, California","San Luis Obispo County, California","00","06",88413,88413,3,8558.7,10.33
+"05000US06081","06081","San Mateo County, California","San Mateo County, California","00","06",368859,368859,5,1163.2,317.11
+"05000US06083","06083","Santa Barbara County, California","Santa Barbara County, California","00","06",145202,145202,4,7092.6,20.47
+"05000US06085","06085","Santa Clara County, California","Santa Clara County, California","00","06",886011,886011,5,3344.3,264.93
+"05000US06087","06087","Santa Cruz County, California","Santa Cruz County, California","00","06",76488,76488,3,1154.3,66.26
+"05000US06089","06089","Shasta County, California","Shasta County, California","00","06",52804,52804,3,9804.8,5.39
+"05000US06091","06091","Sierra County, California","Sierra County, California","00","06",324,324,1,2469.4,0.13
+"05000US06093","06093","Siskiyou County, California","Siskiyou County, California","00","06",9992,9992,2,16284,0.61
+"05000US06095","06095","Solano County, California","Solano County, California","00","06",108653,108653,4,2145,50.65
+"05000US06097","06097","Sonoma County, California","Sonoma County, California","00","06",165261,165261,4,4082.4,40.48
+"05000US06099","06099","Stanislaus County, California","Stanislaus County, California","00","06",141928,141928,4,3870.9,36.67
+"05000US06101","06101","Sutter County, California","Sutter County, California","00","06",20430,20430,2,1561,13.09
+"05000US06103","06103","Tehama County, California","Tehama County, California","00","06",13809,13809,2,7643.2,1.81
+"05000US06105","06105","Trinity County, California","Trinity County, California","00","06",1668,1668,1,8233.3,0.2
+"05000US06107","06107","Tulare County, California","Tulare County, California","00","06",94949,94949,4,12495,7.6
+"05000US06109","06109","Tuolumne County, California","Tuolumne County, California","00","06",14519,14519,2,5790.3,2.51
+"05000US06111","06111","Ventura County, California","Ventura County, California","00","06",273745,273745,5,4781,57.26
+"05000US06113","06113","Yolo County, California","Yolo County, California","00","06",63769,63769,3,2622.2,24.32
+"05000US06115","06115","Yuba County, California","Yuba County, California","00","06",11374,11374,2,1632.9,6.97
diff --git a/pysal/examples/columbus.dbf b/pysal/examples/columbus.dbf
new file mode 100644
index 0000000..f850bda
Binary files /dev/null and b/pysal/examples/columbus.dbf differ
diff --git a/pysal/examples/columbus.gal b/pysal/examples/columbus.gal
new file mode 100644
index 0000000..b8edabc
--- /dev/null
+++ b/pysal/examples/columbus.gal
@@ -0,0 +1,99 @@
+49
+1 2
+2 3
+2 3
+4 3 1
+3 4
+5 4 2 1
+4 4
+8 3 5 2
+5 8
+16 15 11 8 9 6 3 4
+6 2
+9 5
+7 4
+14 13 12 8
+8 6
+13 12 11 5 4 7
+9 8
+26 25 22 20 15 10 6 5
+10 4
+22 20 17 9
+11 5
+16 15 12 5 8
+12 6
+16 14 13 11 8 7
+13 4
+14 12 7 8
+14 6
+19 12 13 16 18 7
+15 6
+25 16 26 5 9 11
+16 8
+25 24 18 15 5 11 12 14
+17 3
+23 20 10
+18 4
+24 19 16 14
+19 3
+18 24 14
+20 10
+35 33 27 22 23 32 40 17 10 9
+21 3
+34 24 30
+22 6
+28 27 26 20 10 9
+23 3
+32 17 20
+24 7
+30 29 25 16 18 21 19
+25 8
+30 29 15 26 28 16 24 9
+26 6
+29 28 22 9 25 15
+27 4
+33 28 20 22
+28 9
+38 37 29 27 33 35 22 26 25
+29 7
+37 30 28 38 24 25 26
+30 5
+37 29 24 25 21
+31 3
+39 36 34
+32 4
+41 40 23 20
+33 4
+35 20 27 28
+34 4
+42 36 21 31
+35 7
+44 43 38 40 20 33 28
+36 5
+46 39 34 42 31
+37 6
+45 38 43 28 29 30
+38 6
+43 35 44 28 37 29
+39 3
+46 36 31
+40 5
+47 41 32 35 20
+41 3
+47 32 40
+42 2
+34 36
+43 6
+48 45 35 44 38 37
+44 5
+49 48 35 43 38
+45 4
+48 49 37 43
+46 2
+36 39
+47 2
+40 41
+48 4
+49 44 43 45
+49 3
+44 48 45
diff --git a/pysal/examples/columbus.html b/pysal/examples/columbus.html
new file mode 100644
index 0000000..af8b1ac
--- /dev/null
+++ b/pysal/examples/columbus.html
@@ -0,0 +1,132 @@
+<?xml version="1.0" encoding="iso-8859-1"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
+      "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+  <title>SAL Data Sets - Columbus</title>
+</head>
+
+<body>
+<h1>Columbus</h1>
+
+<h2>Data provided "as is," no warranties</h2>
+
+<h2>Description</h2>
+
+<p>Crime data for 49 neighborhoods in Columbus, OH, 1980</p>
+
+<p>Type = polygon shape file, projected, arbitrary units</p>
+
+<p>Observations = 49</p>
+
+<p>Variables = 20</p>
+
+<h2>Source</h2>
+
+<p>Anselin, Luc (1988). Spatial Econometrics. Boston, Kluwer 
+   Academic, Table 12.1, p. 189.</p>
+
+<h2>Variables</h2>
+
+<table>
+  <thead>
+    <tr>
+      <th>Variable</th>
+      <th>Description</th>
+    </tr>
+  </thead>
+  <tbody>
+    <tr>
+      <td>AREA</td>
+      <td>neighborhood area (computed by ArcView)</td>
+    </tr>
+    <tr>
+       <td>PERIMETER</td>
+       <td>neighborhood perimeter (computed by ArcView)</td>
+    </tr>
+    <tr>
+       <td>COLUMBUS_</td>
+       <td>internal polygon ID (generated by ArcView)</td>
+    </tr>
+    <tr>
+       <td>COLUMBUS_I</td>
+       <td>internal polygon ID (geneated by ArcView)</td>
+    </tr>
+    <tr>
+       <td>POLYID</td>
+       <td>neighborhood ID, used in GeoDa User's Guide and tutorials</td>
+    </tr>
+    <tr>
+       <td>NEIG</td>
+       <td>neighborhood ID, used in Spatial Econometrics examples</td>
+    </tr>
+    <tr>
+       <td>HOVAL</td>
+       <td>housing value (in $1,000)</td>
+    </tr>
+    <tr>
+       <td>INC</td>
+       <td>household income (in $1,000)</td>
+    </tr>
+    <tr>
+       <td>CRIME</td>
+       <td>residential burglaries and vehicle thefts per 1000 households</td>
+    </tr>
+    <tr>
+       <td>OPEN</td>
+       <td>open space (area)</td>
+    </tr>
+    <tr>
+       <td>PLUMB</td>
+       <td>percent housing units without plumbing</td>
+    </tr>
+    <tr>
+       <td>DISCBD</td>
+       <td>distance to CBD</td>
+    </tr>
+    <tr>
+       <td>X</td>
+       <td>centroid x coordinate (in arbitrary digitizing units)</td>
+    </tr>
+    <tr>
+       <td>Y</td>
+       <td>centroid y coordinate (in arbitrary digitizing units)</td>
+    </tr>
+    <tr>
+       <td>NSA</td>
+       <td>north-south indicator variable (North = 1)</td>
+    </tr>
+    <tr>
+       <td>NSB</td>
+       <td>other north-south indicator variable (North = 1)</td>
+    </tr>
+    <tr>
+       <td>EW</td>
+       <td>east-west indicator variable (East = 1)</td>
+    </tr>
+    <tr>
+       <td>CP</td>
+       <td>core-periphery indicator variable (Core = 1)</td>
+    </tr>
+    <tr>
+       <td>THOUS</td>
+       <td>constant (= 1000)</td>
+    </tr>
+    <tr>
+       <td>NEIGNO</td>
+       <td>another neighborhood ID variable (NEIG + 1000)</td>
+    </tr>
+  </tbody>
+</table>
+
+<br />
+<hr />
+
+<p>Prepared by <a href="mailto:anselin at uiuc.edu">Luc Anselin</a></p>
+
+<p><a href="http://sal.agecon.uiuc.edu">UIUC-ACE Spatial Analysis
+Laboratory</a></p>
+
+<p>Last updated June 16, 2003</p>
+</body>
+</html>
diff --git a/pysal/examples/columbus.json b/pysal/examples/columbus.json
new file mode 100644
index 0000000..579b7c3
--- /dev/null
+++ b/pysal/examples/columbus.json
@@ -0,0 +1,104 @@
+{
+"type": "FeatureCollection",
+                                                                                
+"features": [
+{ "type": "Feature", "properties": { "AREA": 0.309441, "PERIMETER": 2.440629, "COLUMBUS_": 2.0, "COLUMBUS_I": 5.0, "POLYID": 1.0, "NEIG": 5, "HOVAL": 80.467003, "INC": 19.531, "CRIME": 15.72598, "OPEN": 2.850747, "PLUMB": 0.217155, "DISCBD": 5.03, "X": 38.799999, "Y": 44.07, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1005.0 }, "bbox": [ 8.559700012207031, 13.995059967041016, 9.09996509552002, 14.742449760437012 ], "geometry": { "type": "Polygon", "coordinate [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.259329, "PERIMETER": 2.236939, "COLUMBUS_": 3.0, "COLUMBUS_I": 1.0, "POLYID": 2.0, "NEIG": 1, "HOVAL": 44.567001, "INC": 21.232, "CRIME": 18.801754, "OPEN": 5.29672, "PLUMB": 0.320581, "DISCBD": 4.27, "X": 35.619999, "Y": 42.380001, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1001.0 }, "bbox": [ 7.950088977813721, 13.727390289306641, 8.666550636291504, 14.263930320739746 ], "geometry": { "type": "Polygon", "coord [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.192468, "PERIMETER": 2.187547, "COLUMBUS_": 4.0, "COLUMBUS_I": 6.0, "POLYID": 3.0, "NEIG": 6, "HOVAL": 26.35, "INC": 15.956, "CRIME": 30.626781, "OPEN": 4.534649, "PLUMB": 0.374404, "DISCBD": 3.89, "X": 39.82, "Y": 41.18, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1006.0 }, "bbox": [ 8.653305053710938, 13.544429779052734, 9.351485252380371, 14.008090019226074 ], "geometry": { "type": "Polygon", "coordinates": [  [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.083841, "PERIMETER": 1.427635, "COLUMBUS_": 5.0, "COLUMBUS_I": 2.0, "POLYID": 4.0, "NEIG": 2, "HOVAL": 33.200001, "INC": 4.477, "CRIME": 32.38776, "OPEN": 0.394427, "PLUMB": 1.186944, "DISCBD": 3.7, "X": 36.5, "Y": 40.52, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1002.0 }, "bbox": [ 8.19859504699707, 13.586509704589844, 8.685274124145508, 13.861700057983398 ], "geometry": { "type": "Polygon", "coordinates": [ [ [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.488888, "PERIMETER": 2.997133, "COLUMBUS_": 6.0, "COLUMBUS_I": 7.0, "POLYID": 5.0, "NEIG": 7, "HOVAL": 23.225, "INC": 11.252, "CRIME": 50.73151, "OPEN": 0.405664, "PLUMB": 0.624596, "DISCBD": 2.83, "X": 40.009998, "Y": 38.0, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1007.0 }, "bbox": [ 8.677577018737793, 12.861089706420898, 9.401384353637695, 13.722209930419922 ], "geometry": { "type": "Polygon", "coordinates": [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.283079, "PERIMETER": 2.335634, "COLUMBUS_": 7.0, "COLUMBUS_I": 8.0, "POLYID": 6.0, "NEIG": 8, "HOVAL": 28.75, "INC": 16.028999, "CRIME": 26.066658, "OPEN": 0.563075, "PLUMB": 0.25413, "DISCBD": 3.78, "X": 43.75, "Y": 39.279999, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1008.0 }, "bbox": [ 9.333296775817871, 13.272419929504395, 10.180600166320801, 13.698240280151367 ], "geometry": { "type": "Polygon", "coordinat [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.257084, "PERIMETER": 2.554577, "COLUMBUS_": 8.0, "COLUMBUS_I": 4.0, "POLYID": 7.0, "NEIG": 4, "HOVAL": 75.0, "INC": 8.438, "CRIME": 0.178269, "OPEN": 0.0, "PLUMB": 2.402402, "DISCBD": 2.74, "X": 33.360001, "Y": 38.41, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1004.0 }, "bbox": [ 7.801973819732666, 12.942020416259766, 8.456572532653809, 13.644510269165039 ], "geometry": { "type": "Polygon", "coordinates": [ [ [  [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.204954, "PERIMETER": 2.139524, "COLUMBUS_": 9.0, "COLUMBUS_I": 3.0, "POLYID": 8.0, "NEIG": 3, "HOVAL": 37.125, "INC": 11.337, "CRIME": 38.425858, "OPEN": 3.483478, "PLUMB": 2.739726, "DISCBD": 2.89, "X": 36.709999, "Y": 38.709999, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1003.0 }, "bbox": [ 8.10498046875, 13.104069709777832, 8.733969688415527, 13.644430160522461 ], "geometry": { "type": "Polygon", "coordinates [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.500755, "PERIMETER": 3.169707, "COLUMBUS_": 10.0, "COLUMBUS_I": 18.0, "POLYID": 9.0, "NEIG": 18, "HOVAL": 52.599998, "INC": 17.586, "CRIME": 30.515917, "OPEN": 0.527488, "PLUMB": 0.890736, "DISCBD": 3.17, "X": 43.439999, "Y": 35.919998, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1018.0 }, "bbox": [ 9.124277114868164, 12.595190048217773, 10.095430374145508, 13.298540115356445 ], "geometry": { "type": "Polygon", " [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.246689, "PERIMETER": 2.087235, "COLUMBUS_": 11.0, "COLUMBUS_I": 10.0, "POLYID": 10.0, "NEIG": 10, "HOVAL": 96.400002, "INC": 13.598, "CRIME": 34.000835, "OPEN": 1.548348, "PLUMB": 0.557724, "DISCBD": 4.33, "X": 47.610001, "Y": 36.419998, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1010.0 }, "bbox": [ 10.015439987182617, 12.72404956817627, 10.649680137634277, 13.272509574890137 ], "geometry": { "type": "Polygon",  [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.041012, "PERIMETER": 0.919488, "COLUMBUS_": 12.0, "COLUMBUS_I": 38.0, "POLYID": 11.0, "NEIG": 38, "HOVAL": 19.700001, "INC": 7.467, "CRIME": 62.275448, "OPEN": 0.0, "PLUMB": 1.479915, "DISCBD": 1.9, "X": 37.849998, "Y": 36.299999, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1038.0 }, "bbox": [ 8.572946548461914, 12.810150146484375, 8.757728576660156, 13.116339683532715 ], "geometry": { "type": "Polygon", "coordin [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.035769, "PERIMETER": 0.902125, "COLUMBUS_": 13.0, "COLUMBUS_I": 37.0, "POLYID": 12.0, "NEIG": 37, "HOVAL": 19.9, "INC": 10.048, "CRIME": 56.705669, "OPEN": 3.157895, "PLUMB": 2.635046, "DISCBD": 1.91, "X": 37.130001, "Y": 36.119999, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1037.0 }, "bbox": [ 8.456572532653809, 12.809200286865234, 8.632829666137695, 13.106889724731445 ], "geometry": { "type": "Polygon", "coord [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.034377, "PERIMETER": 0.93659, "COLUMBUS_": 14.0, "COLUMBUS_I": 39.0, "POLYID": 13.0, "NEIG": 39, "HOVAL": 41.700001, "INC": 9.549, "CRIME": 46.716129, "OPEN": 0.0, "PLUMB": 6.328423, "DISCBD": 2.09, "X": 35.950001, "Y": 36.400002, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1039.0 }, "bbox": [ 8.145203590393066, 12.930660247802734, 8.487373352050781, 13.104069709777832 ], "geometry": { "type": "Polygon", "coordin [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.060884, "PERIMETER": 1.128424, "COLUMBUS_": 15.0, "COLUMBUS_I": 40.0, "POLYID": 14.0, "NEIG": 40, "HOVAL": 42.900002, "INC": 9.963, "CRIME": 57.066132, "OPEN": 0.477104, "PLUMB": 5.110962, "DISCBD": 1.83, "X": 35.720001, "Y": 35.599998, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1040.0 }, "bbox": [ 8.062442779541016, 12.787229537963867, 8.512937545776367, 12.944100379943848 ], "geometry": { "type": "Polygon", "c [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.106653, "PERIMETER": 1.437606, "COLUMBUS_": 16.0, "COLUMBUS_I": 9.0, "POLYID": 15.0, "NEIG": 9, "HOVAL": 18.0, "INC": 9.873, "CRIME": 48.585487, "OPEN": 0.174325, "PLUMB": 1.311475, "DISCBD": 1.7, "X": 39.610001, "Y": 34.91, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1009.0 }, "bbox": [ 8.757728576660156, 12.532369613647461, 9.233386993408203, 12.86400032043457 ], "geometry": { "type": "Polygon", "coordinates":  [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.093154, "PERIMETER": 1.340061, "COLUMBUS_": 17.0, "COLUMBUS_I": 36.0, "POLYID": 16.0, "NEIG": 36, "HOVAL": 18.799999, "INC": 7.625, "CRIME": 54.838711, "OPEN": 0.533737, "PLUMB": 4.6875, "DISCBD": 1.1, "X": 37.599998, "Y": 34.080002, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1036.0 }, "bbox": [ 8.50916576385498, 12.361550331115723, 8.785566329956055, 12.861089706420898 ], "geometry": { "type": "Polygon", "coord [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.102087, "PERIMETER": 1.382359, "COLUMBUS_": 18.0, "COLUMBUS_I": 11.0, "POLYID": 17.0, "NEIG": 11, "HOVAL": 41.75, "INC": 9.798, "CRIME": 36.868774, "OPEN": 0.448232, "PLUMB": 1.619745, "DISCBD": 4.47, "X": 48.580002, "Y": 34.459999, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1011.0 }, "bbox": [ 10.356060028076172, 12.44025993347168, 10.709790229797363, 12.838859558105469 ], "geometry": { "type": "Polygon", "coor [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.055494, "PERIMETER": 1.183352, "COLUMBUS_": 19.0, "COLUMBUS_I": 42.0, "POLYID": 18.0, "NEIG": 42, "HOVAL": 60.0, "INC": 13.185, "CRIME": 43.962486, "OPEN": 24.998068, "PLUMB": 13.849287, "DISCBD": 1.58, "X": 36.150002, "Y": 33.919998, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1042.0 }, "bbox": [ 8.358473777770996, 12.355310440063477, 8.589687347412109, 12.788189888000488 ], "geometry": { "type": "Polygon", "coo [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.061342, "PERIMETER": 1.249247, "COLUMBUS_": 20.0, "COLUMBUS_I": 41.0, "POLYID": 19.0, "NEIG": 41, "HOVAL": 30.6, "INC": 11.618, "CRIME": 54.521965, "OPEN": 0.111111, "PLUMB": 2.622951, "DISCBD": 1.53, "X": 35.759998, "Y": 34.66, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1041.0 }, "bbox": [ 8.122319221496582, 12.445659637451172, 8.431012153625488, 12.788060188293457 ], "geometry": { "type": "Polygon", "coordinat [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.444629, "PERIMETER": 3.174601, "COLUMBUS_": 21.0, "COLUMBUS_I": 17.0, "POLYID": 20.0, "NEIG": 17, "HOVAL": 81.266998, "INC": 31.07, "CRIME": 0.223797, "OPEN": 5.318607, "PLUMB": 0.167224, "DISCBD": 3.57, "X": 46.73, "Y": 31.91, "NSA": 0.0, "NSB": 1.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1017.0 }, "bbox": [ 9.841083526611328, 11.741860389709473, 10.425640106201172, 12.781140327453613 ], "geometry": { "type": "Polygon", "coordinat [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.699258, "PERIMETER": 5.07749, "COLUMBUS_": 22.0, "COLUMBUS_I": 43.0, "POLYID": 21.0, "NEIG": 43, "HOVAL": 19.975, "INC": 10.655, "CRIME": 40.074074, "OPEN": 1.643756, "PLUMB": 1.559576, "DISCBD": 1.41, "X": 34.080002, "Y": 30.42, "NSA": 0.0, "NSB": 0.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1043.0 }, "bbox": [ 7.06132984161377, 11.527389526367188, 8.563572883605957, 12.725419998168945 ], "geometry": { "type": "Polygon", "coordinat [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.192891, "PERIMETER": 1.992717, "COLUMBUS_": 23.0, "COLUMBUS_I": 19.0, "POLYID": 22.0, "NEIG": 19, "HOVAL": 30.450001, "INC": 11.709, "CRIME": 33.705048, "OPEN": 4.539754, "PLUMB": 1.785714, "DISCBD": 2.45, "X": 43.369999, "Y": 33.459999, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1019.0 }, "bbox": [ 9.357977867126465, 12.226559638977051, 10.015439987182617, 12.72404956817627 ], "geometry": { "type": "Polygon", " [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.24712, "PERIMETER": 2.147528, "COLUMBUS_": 24.0, "COLUMBUS_I": 12.0, "POLYID": 23.0, "NEIG": 12, "HOVAL": 47.733002, "INC": 21.155001, "CRIME": 20.048504, "OPEN": 0.532632, "PLUMB": 0.216763, "DISCBD": 4.78, "X": 49.610001, "Y": 32.650002, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1012.0 }, "bbox": [ 10.421750068664551, 11.990639686584473, 10.888489723205566, 12.652009963989258 ], "geometry": { "type": "Polygon [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.192226, "PERIMETER": 2.240392, "COLUMBUS_": 25.0, "COLUMBUS_I": 35.0, "POLYID": 24.0, "NEIG": 35, "HOVAL": 53.200001, "INC": 14.236, "CRIME": 38.297871, "OPEN": 0.62622, "PLUMB": 18.811075, "DISCBD": 0.42, "X": 36.599998, "Y": 32.09, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1035.0 }, "bbox": [ 8.131139755249023, 12.088789939880371, 8.790392875671387, 12.645350456237793 ], "geometry": { "type": "Polygon", "coor [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.17168, "PERIMETER": 1.666489, "COLUMBUS_": 26.0, "COLUMBUS_I": 32.0, "POLYID": 25.0, "NEIG": 32, "HOVAL": 17.9, "INC": 8.461, "CRIME": 61.299175, "OPEN": 0.0, "PLUMB": 6.529851, "DISCBD": 0.83, "X": 39.360001, "Y": 32.880001, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1032.0 }, "bbox": [ 8.707547187805176, 12.125729560852051, 9.131059646606445, 12.63424015045166 ], "geometry": { "type": "Polygon", "coordinates": [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.107298, "PERIMETER": 1.406823, "COLUMBUS_": 27.0, "COLUMBUS_I": 20.0, "POLYID": 26.0, "NEIG": 20, "HOVAL": 20.299999, "INC": 8.085, "CRIME": 40.969742, "OPEN": 1.238288, "PLUMB": 2.534275, "DISCBD": 1.5, "X": 41.130001, "Y": 33.139999, "NSA": 1.0, "NSB": 1.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1020.0 }, "bbox": [ 9.124277114868164, 12.182160377502441, 9.394844055175781, 12.63424015045166 ], "geometry": { "type": "Polygon", "coo [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.137802, "PERIMETER": 1.780751, "COLUMBUS_": 28.0, "COLUMBUS_I": 21.0, "POLYID": 27.0, "NEIG": 21, "HOVAL": 34.099998, "INC": 10.822, "CRIME": 52.79443, "OPEN": 19.368099, "PLUMB": 1.483516, "DISCBD": 2.24, "X": 43.950001, "Y": 31.610001, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1021.0 }, "bbox": [ 9.468775749206543, 12.002750396728516, 10.007160186767578, 12.341389656066895 ], "geometry": { "type": "Polygon",  [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.174773, "PERIMETER": 1.637148, "COLUMBUS_": 29.0, "COLUMBUS_I": 31.0, "POLYID": 28.0, "NEIG": 31, "HOVAL": 22.85, "INC": 7.856, "CRIME": 56.919785, "OPEN": 0.509305, "PLUMB": 3.001072, "DISCBD": 1.41, "X": 41.310001, "Y": 30.9, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1031.0 }, "bbox": [ 9.084967613220215, 11.734999656677246, 9.492551803588867, 12.234550476074219 ], "geometry": { "type": "Polygon", "coordinate [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.085972, "PERIMETER": 1.312158, "COLUMBUS_": 30.0, "COLUMBUS_I": 33.0, "POLYID": 29.0, "NEIG": 33, "HOVAL": 32.5, "INC": 8.681, "CRIME": 60.750446, "OPEN": 0.0, "PLUMB": 2.645051, "DISCBD": 0.81, "X": 39.720001, "Y": 30.639999, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1033.0 }, "bbox": [ 8.790384292602539, 11.792989730834961, 9.131059646606445, 12.182160377502441 ], "geometry": { "type": "Polygon", "coordinates [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.104355, "PERIMETER": 1.524931, "COLUMBUS_": 31.0, "COLUMBUS_I": 34.0, "POLYID": 30.0, "NEIG": 34, "HOVAL": 22.5, "INC": 13.906, "CRIME": 68.892044, "OPEN": 1.63878, "PLUMB": 15.600624, "DISCBD": 0.37, "X": 38.290001, "Y": 30.35, "NSA": 0.0, "NSB": 0.0, "EW": 0.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1034.0 }, "bbox": [ 8.386432647705078, 11.785490036010742, 8.898137092590332, 12.125729560852051 ], "geometry": { "type": "Polygon", "coordinat [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.117409, "PERIMETER": 1.716047, "COLUMBUS_": 32.0, "COLUMBUS_I": 45.0, "POLYID": 31.0, "NEIG": 45, "HOVAL": 31.799999, "INC": 16.940001, "CRIME": 17.677214, "OPEN": 3.936443, "PLUMB": 0.85389, "DISCBD": 3.78, "X": 27.940001, "Y": 29.85, "NSA": 1.0, "NSB": 1.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1045.0 }, "bbox": [ 6.456532001495361, 11.781330108642578, 7.185831069946289, 12.078980445861816 ], "geometry": { "type": "Polygon", "co [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.18558, "PERIMETER": 2.108951, "COLUMBUS_": 33.0, "COLUMBUS_I": 13.0, "POLYID": 32.0, "NEIG": 13, "HOVAL": 40.299999, "INC": 18.941999, "CRIME": 19.145592, "OPEN": 2.221022, "PLUMB": 0.255102, "DISCBD": 4.76, "X": 50.110001, "Y": 29.91, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1013.0 }, "bbox": [ 10.424249649047852, 11.633870124816895, 11.204830169677734, 12.037540435791016 ], "geometry": { "type": "Polygon", " [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.087472, "PERIMETER": 1.507971, "COLUMBUS_": 34.0, "COLUMBUS_I": 22.0, "POLYID": 33.0, "NEIG": 22, "HOVAL": 23.6, "INC": 9.918, "CRIME": 41.968163, "OPEN": 0.0, "PLUMB": 1.023891, "DISCBD": 2.28, "X": 44.099998, "Y": 30.4, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1022.0 }, "bbox": [ 9.480667114257812, 11.76294994354248, 10.049909591674805, 12.009679794311523 ], "geometry": { "type": "Polygon", "coordinates": [  [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.226594, "PERIMETER": 2.519132, "COLUMBUS_": 35.0, "COLUMBUS_I": 44.0, "POLYID": 34.0, "NEIG": 44, "HOVAL": 28.450001, "INC": 14.948, "CRIME": 23.974028, "OPEN": 3.029087, "PLUMB": 0.386803, "DISCBD": 3.06, "X": 30.32, "Y": 28.26, "NSA": 0.0, "NSB": 0.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1044.0 }, "bbox": [ 6.966993808746338, 11.329950332641602, 7.733179092407227, 11.872119903564453 ], "geometry": { "type": "Polygon", "coordina [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.175453, "PERIMETER": 1.974937, "COLUMBUS_": 36.0, "COLUMBUS_I": 23.0, "POLYID": 35.0, "NEIG": 23, "HOVAL": 27.0, "INC": 12.814, "CRIME": 39.175053, "OPEN": 4.220401, "PLUMB": 0.633675, "DISCBD": 2.37, "X": 43.700001, "Y": 29.18, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1023.0 }, "bbox": [ 9.341614723205566, 11.531109809875488, 10.05659008026123, 11.866829872131348 ], "geometry": { "type": "Polygon", "coordinat [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.17813, "PERIMETER": 1.790058, "COLUMBUS_": 37.0, "COLUMBUS_I": 46.0, "POLYID": 36.0, "NEIG": 46, "HOVAL": 36.299999, "INC": 18.739, "CRIME": 14.305556, "OPEN": 6.773331, "PLUMB": 0.332349, "DISCBD": 4.23, "X": 27.27, "Y": 28.209999, "NSA": 0.0, "NSB": 0.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1046.0 }, "bbox": [ 6.456532001495361, 11.439629554748535, 7.019946098327637, 11.82621955871582 ], "geometry": { "type": "Polygon", "coordi [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.121154, "PERIMETER": 1.402252, "COLUMBUS_": 38.0, "COLUMBUS_I": 30.0, "POLYID": 37.0, "NEIG": 30, "HOVAL": 43.299999, "INC": 17.017, "CRIME": 42.445076, "OPEN": 4.839273, "PLUMB": 1.230329, "DISCBD": 1.08, "X": 38.32, "Y": 28.82, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1030.0 }, "bbox": [ 8.713337898254395, 11.434320449829102, 9.095859527587891, 11.810020446777344 ], "geometry": { "type": "Polygon", "coordina [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.053881, "PERIMETER": 0.934509, "COLUMBUS_": 39.0, "COLUMBUS_I": 24.0, "POLYID": 38.0, "NEIG": 24, "HOVAL": 22.700001, "INC": 11.107, "CRIME": 53.710938, "OPEN": 0.0, "PLUMB": 0.8, "DISCBD": 1.58, "X": 41.040001, "Y": 28.780001, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1024.0 }, "bbox": [ 9.084967613220215, 11.546170234680176, 9.343691825866699, 11.792989730834961 ], "geometry": { "type": "Polygon", "coordinate [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.174823, "PERIMETER": 2.335402, "COLUMBUS_": 40.0, "COLUMBUS_I": 47.0, "POLYID": 39.0, "NEIG": 47, "HOVAL": 39.599998, "INC": 18.476999, "CRIME": 19.100863, "OPEN": 0.0, "PLUMB": 0.314663, "DISCBD": 5.53, "X": 24.25, "Y": 26.690001, "NSA": 0.0, "NSB": 0.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1047.0 }, "bbox": [ 5.87490701675415, 11.057000160217285, 6.481366157531738, 11.781330108642578 ], "geometry": { "type": "Polygon", "coordin [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.302908, "PERIMETER": 2.285487, "COLUMBUS_": 41.0, "COLUMBUS_I": 16.0, "POLYID": 40.0, "NEIG": 16, "HOVAL": 61.950001, "INC": 29.833, "CRIME": 16.241299, "OPEN": 6.45131, "PLUMB": 0.132743, "DISCBD": 4.4, "X": 48.439999, "Y": 27.93, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1016.0 }, "bbox": [ 10.05659008026123, 11.213310241699219, 10.808690071105957, 11.74390983581543 ], "geometry": { "type": "Polygon", "coordi [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.137024, "PERIMETER": 1.525097, "COLUMBUS_": 42.0, "COLUMBUS_I": 14.0, "POLYID": 41.0, "NEIG": 14, "HOVAL": 42.099998, "INC": 22.207001, "CRIME": 18.905146, "OPEN": 0.293317, "PLUMB": 0.247036, "DISCBD": 5.33, "X": 51.240002, "Y": 27.799999, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1014.0 }, "bbox": [ 10.799189567565918, 11.232259750366211, 11.141839981079102, 11.727809906005859 ], "geometry": { "type": "Polygo [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.266541, "PERIMETER": 2.176543, "COLUMBUS_": 43.0, "COLUMBUS_I": 49.0, "POLYID": 42.0, "NEIG": 49, "HOVAL": 44.333, "INC": 25.872999, "CRIME": 16.49189, "OPEN": 1.792993, "PLUMB": 0.134439, "DISCBD": 3.87, "X": 29.02, "Y": 26.58, "NSA": 0.0, "NSB": 0.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1049.0 }, "bbox": [ 6.75403881072998, 10.997920036315918, 7.404403209686279, 11.586389541625977 ], "geometry": { "type": "Polygon", "coordinate [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.060241, "PERIMETER": 0.967793, "COLUMBUS_": 44.0, "COLUMBUS_I": 29.0, "POLYID": 43.0, "NEIG": 29, "HOVAL": 25.700001, "INC": 13.38, "CRIME": 36.663612, "OPEN": 0.0, "PLUMB": 0.589226, "DISCBD": 1.95, "X": 41.09, "Y": 27.49, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1029.0 }, "bbox": [ 9.082836151123047, 11.308409690856934, 9.341614723205566, 11.559189796447754 ], "geometry": { "type": "Polygon", "coordinates":  [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.173337, "PERIMETER": 1.868044, "COLUMBUS_": 45.0, "COLUMBUS_I": 25.0, "POLYID": 44.0, "NEIG": 25, "HOVAL": 33.5, "INC": 16.961, "CRIME": 25.962263, "OPEN": 1.463993, "PLUMB": 0.329761, "DISCBD": 2.67, "X": 43.23, "Y": 27.309999, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1025.0 }, "bbox": [ 9.335508346557617, 11.211409568786621, 9.963891983032227, 11.546170234680176 ], "geometry": { "type": "Polygon", "coordinat [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.256431, "PERIMETER": 2.193039, "COLUMBUS_": 46.0, "COLUMBUS_I": 28.0, "POLYID": 45.0, "NEIG": 28, "HOVAL": 27.733, "INC": 14.135, "CRIME": 29.028488, "OPEN": 1.006118, "PLUMB": 2.3912, "DISCBD": 2.13, "X": 39.32, "Y": 25.85, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1028.0 }, "bbox": [ 8.622117042541504, 10.821869850158691, 9.178548812866211, 11.460300445556641 ], "geometry": { "type": "Polygon", "coordinates": [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.124728, "PERIMETER": 1.841029, "COLUMBUS_": 47.0, "COLUMBUS_I": 48.0, "POLYID": 46.0, "NEIG": 48, "HOVAL": 76.099998, "INC": 18.323999, "CRIME": 16.530533, "OPEN": 9.683953, "PLUMB": 0.424628, "DISCBD": 5.27, "X": 25.469999, "Y": 25.709999, "NSA": 0.0, "NSB": 0.0, "EW": 0.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1048.0 }, "bbox": [ 6.167585849761963, 10.978030204772949, 6.678176879882812, 11.447600364685059 ], "geometry": { "type": "Polygon" [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.245249, "PERIMETER": 2.079986, "COLUMBUS_": 48.0, "COLUMBUS_I": 15.0, "POLYID": 47.0, "NEIG": 15, "HOVAL": 42.5, "INC": 18.950001, "CRIME": 27.822861, "OPEN": 0.0, "PLUMB": 0.268817, "DISCBD": 5.57, "X": 50.889999, "Y": 25.24, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1015.0 }, "bbox": [ 10.588159561157227, 10.788629531860352, 11.287420272827148, 11.315879821777344 ], "geometry": { "type": "Polygon", "coordinat [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.069762, "PERIMETER": 1.102032, "COLUMBUS_": 49.0, "COLUMBUS_I": 27.0, "POLYID": 48.0, "NEIG": 27, "HOVAL": 26.799999, "INC": 11.813, "CRIME": 26.645266, "OPEN": 4.884389, "PLUMB": 1.034807, "DISCBD": 2.33, "X": 41.209999, "Y": 25.9, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 1.0, "THOUS": 1000.0, "NEIGNO": 1027.0 }, "bbox": [ 9.093674659729004, 11.048029899597168, 9.3948974609375, 11.312379837036133 ], "geometry": { "type": "Polygon", "coordin [...]
+,
+{ "type": "Feature", "properties": { "AREA": 0.205964, "PERIMETER": 2.199169, "COLUMBUS_": 50.0, "COLUMBUS_I": 26.0, "POLYID": 49.0, "NEIG": 26, "HOVAL": 35.799999, "INC": 18.796, "CRIME": 22.541491, "OPEN": 0.259826, "PLUMB": 0.901442, "DISCBD": 3.03, "X": 42.669998, "Y": 24.959999, "NSA": 0.0, "NSB": 0.0, "EW": 1.0, "CP": 0.0, "THOUS": 1000.0, "NEIGNO": 1026.0 }, "bbox": [ 9.095559120178223, 10.828829765319824, 9.78189754486084, 11.285369873046875 ], "geometry": { "type": "Polygon", "c [...]
+
+]
+}
diff --git a/pysal/examples/columbus.shp b/pysal/examples/columbus.shp
new file mode 100644
index 0000000..d6dde43
Binary files /dev/null and b/pysal/examples/columbus.shp differ
diff --git a/pysal/examples/columbus.shx b/pysal/examples/columbus.shx
new file mode 100644
index 0000000..7086b6f
Binary files /dev/null and b/pysal/examples/columbus.shx differ
diff --git a/pysal/examples/desmith.gal b/pysal/examples/desmith.gal
new file mode 100644
index 0000000..43bb702
--- /dev/null
+++ b/pysal/examples/desmith.gal
@@ -0,0 +1,21 @@
+10
+1 2
+2 4
+2 2
+1 5
+3 2
+4 7
+4 4
+1 3 5 8
+5 4
+2 4 6 9
+6 2
+5 10
+7 2
+3 8
+8 3
+4 7 9
+9 3
+5 8 10
+10 2
+6 9
diff --git a/pysal/examples/desmith.txt b/pysal/examples/desmith.txt
new file mode 100644
index 0000000..2b0194a
--- /dev/null
+++ b/pysal/examples/desmith.txt
@@ -0,0 +1,12 @@
+10,2
+"id","z"
+1,2.24
+2,3.10
+3,4.55
+4,-5.15
+5,-4.39
+6,0.46
+7,5.54
+8,9.02
+9,-2.09
+10,-3.06
diff --git a/pysal/examples/eberly_net.dbf b/pysal/examples/eberly_net.dbf
new file mode 100644
index 0000000..1b2478c
Binary files /dev/null and b/pysal/examples/eberly_net.dbf differ
diff --git a/pysal/examples/eberly_net.shp b/pysal/examples/eberly_net.shp
new file mode 100644
index 0000000..a6e289f
Binary files /dev/null and b/pysal/examples/eberly_net.shp differ
diff --git a/pysal/examples/eberly_net.shx b/pysal/examples/eberly_net.shx
new file mode 100644
index 0000000..cc5763d
Binary files /dev/null and b/pysal/examples/eberly_net.shx differ
diff --git a/pysal/examples/eberly_net_pts_offnetwork.dbf b/pysal/examples/eberly_net_pts_offnetwork.dbf
new file mode 100644
index 0000000..2417238
Binary files /dev/null and b/pysal/examples/eberly_net_pts_offnetwork.dbf differ
diff --git a/pysal/examples/eberly_net_pts_offnetwork.shp b/pysal/examples/eberly_net_pts_offnetwork.shp
new file mode 100644
index 0000000..873a42c
Binary files /dev/null and b/pysal/examples/eberly_net_pts_offnetwork.shp differ
diff --git a/pysal/examples/eberly_net_pts_offnetwork.shx b/pysal/examples/eberly_net_pts_offnetwork.shx
new file mode 100644
index 0000000..23b1e7d
Binary files /dev/null and b/pysal/examples/eberly_net_pts_offnetwork.shx differ
diff --git a/pysal/examples/eberly_net_pts_onnetwork.dbf b/pysal/examples/eberly_net_pts_onnetwork.dbf
new file mode 100644
index 0000000..a09cb8f
Binary files /dev/null and b/pysal/examples/eberly_net_pts_onnetwork.dbf differ
diff --git a/pysal/examples/eberly_net_pts_onnetwork.shp b/pysal/examples/eberly_net_pts_onnetwork.shp
new file mode 100644
index 0000000..412d263
Binary files /dev/null and b/pysal/examples/eberly_net_pts_onnetwork.shp differ
diff --git a/pysal/examples/eberly_net_pts_onnetwork.shx b/pysal/examples/eberly_net_pts_onnetwork.shx
new file mode 100644
index 0000000..6a01e9c
Binary files /dev/null and b/pysal/examples/eberly_net_pts_onnetwork.shx differ
diff --git a/pysal/examples/examples.txt b/pysal/examples/examples.txt
new file mode 100644
index 0000000..68640e3
--- /dev/null
+++ b/pysal/examples/examples.txt
@@ -0,0 +1,141 @@
+******************************************************************
+PySAL: Example Data Sets 
+******************************************************************
+
+PySAL comes with a number of example data sets that are used in some of the
+documentation strings in the source code. All the example data sets can be
+found in the **examples** directory.
+
+10740
+=====
+Polygon shapefile for Albuquerque New Mexico.
+
+* 10740.dbf: attribute database file
+* 10740.shp: shapefile
+* 10740.shx: spatial index
+* 10740_queen.gal: queen contiguity GAL format
+* 10740_rook.gal: rook contiguity GAL format
+
+
+book
+====
+Synthetic data to illustrate spatial weights. Source: Anselin, L. and S.J. Rey (in
+progress) Spatial Econometrics: Foundations.
+
+* book.gal: rook contiguity for regular lattice
+* book.txt: attribute data for regular lattice
+
+calempdensity
+=============
+Employment density for California counties. Source: Anselin, L. and S.J. Rey (in
+progress) Spatial Econometrics: Foundations.
+
+* calempdensity.csv: data on employment and employment density in California
+  counties.
+
+chicago77
+=========
+Chicago Community Areas (n=77). Source: Anselin, L. and S.J. Rey (in
+progress) Spatial Econometrics: Foundations.
+
+* Chicago77.dbf: attribute data
+* Chicago77.shp: shapefile
+* Chicago77.shx: spatial index
+
+
+desmith
+=======
+Example data for autocorrelation analysis. Source: de Smith et al (2009)
+`Geospatial Analysis
+<http://www.spatialanalysisonline.com/output/html/MoranIandGearyC.html>`_ (Used
+with permission)
+
+* desmith.txt: attribute data for 10 spatial units
+* desmith.gal: spatial weights in GAL format
+
+juvenile
+========
+Cardiff juvenile delinquent residences.
+
+* juvenile.dbf: attribute data
+* juvenile.html: documentation
+* juvenile.shp: shapefile
+* juvenile.shx: spatial index
+
+mexico
+======
+State regional income Mexican states 1940-2000. Source:  Rey, S.J. and M.L.
+Sastre Gutierrez. "Interregional inequality dynamics in Mexico." Spatial
+Economic Analysis. Forthcoming.
+
+* mexico.csv: attribute data
+* mexico.gal: spatial weights in GAL format
+
+rook31
+======
+Small test shapefile
+
+* rook31.dbf: attribute data
+* rook31.gal: spatia weights in GAL format
+* rook31.shp: shapefile
+* rook31.shx: spatial index
+
+
+sacramento2
+===========
+1998 and 2001 Zip Code Business Patterns (Census Bureau) for Sacramento MSA
+
+ * sacramento2.dbf
+ * sacramento2.sbn
+ * sacramento2.sbx
+ * sacramento2.shp
+ * sacramento2.shx
+
+shp_test
+========
+Sample Shapefiles used only for testing purposes. Each example include a ".shp" Shapefile, ".shx" Shapefile Index, ".dbf" DBase file, and a ".prj" ESRI Projection file.
+
+Examples include:
+
+ * Point: Example of an ESRI Shapefile of Type 1 (Point).
+ * Line: Example of an ESRI Shapefile of Type 3 (Line).
+ * Polygon: Example of an ESRI Shapefile of Type 5 (Polygon).
+
+sids2
+=====
+North Carolina county SIDS death counts and rates
+ 
+ * sids2.dbf:  attribute data
+ * sids2.html: documentation
+ * sids2.shp:  shapefile
+ * sids2.shx:  spatial index
+ * sids2.gal:  GAL file for spatial weights
+
+stl_hom
+=======
+Homicides and selected socio-economic characteristics for counties surrounding St Louis, MO. Data aggregated for three time periods: 1979-84 (steady decline in homicides), 1984-88 (stable period), and 1988-93 (steady increase in homicides). Source: S. Messner, L. Anselin, D. Hawkins, G. Deane, S. Tolnay, R. Baller (2000). An Atlas of the Spatial Patterning of County-Level Homicide, 1960-1990. Pittsburgh, PA, National Consortium on Violence Research (NCOVR).
+
+ * stl_hom.html: Metadata
+ * stl_hom.txt: txt file with attribute data
+ * stl_hom.wkt: A Well-Known-Text representation of the geometry.
+ * stl_hom.csv: attribute data and WKT geometry.
+ * stl.hom.gal: GAL file for spatial weights
+ 
+
+US Regional Incomes
+===================
+Per capita income for the lower 48 US states, 1929-2010
+
+ * us48.shp: shapefile 
+ * us48.dbf: dbf for shapefile
+ * us48.shx: index for shapefile
+ * usjoin.csv: attribute data (comma delimited file)
+
+Virginia
+========
+Virginia Counties Shapefile.
+
+  * virginia.shp: Shapefile
+  * virginia.shx: shapefile index
+  * virginia.dbf: attributes
+  * virginia.prj: shapefile projection
diff --git a/pysal/examples/geobugs_scot b/pysal/examples/geobugs_scot
new file mode 100644
index 0000000..d3c326e
--- /dev/null
+++ b/pysal/examples/geobugs_scot
@@ -0,0 +1,66 @@
+list( num = c(3, 2, 1, 3, 3, 0, 5, 0, 5, 4, 
+0, 2, 3, 3, 2, 6, 6, 6, 5, 3, 
+3, 2, 4, 8, 3, 3, 4, 4, 11, 6, 
+7, 3, 4, 9, 4, 2, 4, 6, 3, 4, 
+5, 5, 4, 5, 4, 6, 6, 4, 9, 2, 
+4, 4, 4, 5, 6, 5
+),
+adj = c(
+19, 9, 5, 
+10, 7, 
+12, 
+28, 20, 18, 
+19, 12, 1, 
+
+17, 16, 13, 10, 2, 
+
+29, 23, 19, 17, 1, 
+22, 16, 7, 2, 
+
+5, 3, 
+19, 17, 7, 
+35, 32, 31, 
+29, 25, 
+29, 22, 21, 17, 10, 7, 
+29, 19, 16, 13, 9, 7, 
+56, 55, 33, 28, 20, 4, 
+17, 13, 9, 5, 1, 
+56, 18, 4, 
+50, 29, 16, 
+16, 10, 
+39, 34, 29, 9, 
+56, 55, 48, 47, 44, 31, 30, 27, 
+29, 26, 15, 
+43, 29, 25, 
+56, 32, 31, 24, 
+45, 33, 18, 4, 
+50, 43, 34, 26, 25, 23, 21, 17, 16, 15, 9, 
+55, 45, 44, 42, 38, 24, 
+47, 46, 35, 32, 27, 24, 14, 
+31, 27, 14, 
+55, 45, 28, 18, 
+54, 52, 51, 43, 42, 40, 39, 29, 23, 
+46, 37, 31, 14, 
+41, 37, 
+46, 41, 36, 35, 
+54, 51, 49, 44, 42, 30, 
+40, 34, 23, 
+52, 49, 39, 34, 
+53, 49, 46, 37, 36, 
+51, 43, 38, 34, 30, 
+42, 34, 29, 26, 
+49, 48, 38, 30, 24, 
+55, 33, 30, 28, 
+53, 47, 41, 37, 35, 31, 
+53, 49, 48, 46, 31, 24, 
+49, 47, 44, 24, 
+54, 53, 52, 48, 47, 44, 41, 40, 38, 
+29, 21, 
+54, 42, 38, 34, 
+54, 49, 40, 34, 
+49, 47, 46, 41, 
+52, 51, 49, 38, 34, 
+56, 45, 33, 30, 24, 18, 
+55, 27, 24, 20, 18
+),
+sumNumNeigh = 234)
\ No newline at end of file
diff --git a/pysal/examples/geodanet/crimes.dbf b/pysal/examples/geodanet/crimes.dbf
new file mode 100644
index 0000000..523c2c8
Binary files /dev/null and b/pysal/examples/geodanet/crimes.dbf differ
diff --git a/pysal/examples/geodanet/crimes.prj b/pysal/examples/geodanet/crimes.prj
new file mode 100644
index 0000000..b7b18c8
--- /dev/null
+++ b/pysal/examples/geodanet/crimes.prj
@@ -0,0 +1 @@
+PROJCS["NAD_1983_StatePlane_Arizona_Central_FIPS_0202_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",699998.6],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot_US",0.3048006096012192]]
\ No newline at end of file
diff --git a/pysal/examples/geodanet/crimes.sbn b/pysal/examples/geodanet/crimes.sbn
new file mode 100644
index 0000000..4646933
Binary files /dev/null and b/pysal/examples/geodanet/crimes.sbn differ
diff --git a/pysal/examples/geodanet/crimes.sbx b/pysal/examples/geodanet/crimes.sbx
new file mode 100644
index 0000000..4a7fea5
Binary files /dev/null and b/pysal/examples/geodanet/crimes.sbx differ
diff --git a/pysal/examples/geodanet/crimes.shp b/pysal/examples/geodanet/crimes.shp
new file mode 100644
index 0000000..efc7d25
Binary files /dev/null and b/pysal/examples/geodanet/crimes.shp differ
diff --git a/pysal/examples/geodanet/crimes.shp.xml b/pysal/examples/geodanet/crimes.shp.xml
new file mode 100644
index 0000000..6ec0a37
--- /dev/null
+++ b/pysal/examples/geodanet/crimes.shp.xml
@@ -0,0 +1,3 @@
+<?xml version="1.0"?>
+<!--<!DOCTYPE metadata SYSTEM "http://www.esri.com/metadata/esriprof80.dtd">-->
+<metadata xml:lang="en"><Esri><CreaDate>20120210</CreaDate><CreaTime>09543000</CreaTime><SyncOnce>FALSE</SyncOnce><DataProperties><lineage><Process ToolSource="C:\Program Files\ArcGIS\ArcToolbox\Toolboxes\Data Management Tools.tbx\DefineProjection" Date="20100423" Time="143725">DefineProjection MesaCrime_SPCentral_feet PROJCS['NAD_1983_StatePlane_Arizona_Central_FIPS_0202_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PR [...]
diff --git a/pysal/examples/geodanet/crimes.shx b/pysal/examples/geodanet/crimes.shx
new file mode 100644
index 0000000..8a794bc
Binary files /dev/null and b/pysal/examples/geodanet/crimes.shx differ
diff --git a/pysal/examples/geodanet/schools.dbf b/pysal/examples/geodanet/schools.dbf
new file mode 100644
index 0000000..70ab8d1
Binary files /dev/null and b/pysal/examples/geodanet/schools.dbf differ
diff --git a/pysal/examples/geodanet/schools.prj b/pysal/examples/geodanet/schools.prj
new file mode 100644
index 0000000..b7b18c8
--- /dev/null
+++ b/pysal/examples/geodanet/schools.prj
@@ -0,0 +1 @@
+PROJCS["NAD_1983_StatePlane_Arizona_Central_FIPS_0202_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",699998.6],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot_US",0.3048006096012192]]
\ No newline at end of file
diff --git a/pysal/examples/geodanet/schools.sbn b/pysal/examples/geodanet/schools.sbn
new file mode 100644
index 0000000..0508084
Binary files /dev/null and b/pysal/examples/geodanet/schools.sbn differ
diff --git a/pysal/examples/geodanet/schools.sbx b/pysal/examples/geodanet/schools.sbx
new file mode 100644
index 0000000..abadf28
Binary files /dev/null and b/pysal/examples/geodanet/schools.sbx differ
diff --git a/pysal/examples/geodanet/schools.shp b/pysal/examples/geodanet/schools.shp
new file mode 100644
index 0000000..b71949e
Binary files /dev/null and b/pysal/examples/geodanet/schools.shp differ
diff --git a/pysal/examples/geodanet/schools.shp.xml b/pysal/examples/geodanet/schools.shp.xml
new file mode 100644
index 0000000..c09b39c
--- /dev/null
+++ b/pysal/examples/geodanet/schools.shp.xml
@@ -0,0 +1,546 @@
+<?xml version="1.0"?>
+<metadata><idinfo><citation><citeinfo><origin>Arizona Department of Environmental Quality, Arizona Department of Health Services</origin><pubdate>August 14, 2007</pubdate><title Sync="TRUE">SCHOOLS_EVERYTHING_8_7_08</title><edition>Schools 8-14-2007</edition><geoform Sync="TRUE">vector digital data</geoform><onlink>\\adeq.lcl\gisprod\data\adeq\schools-everything-8-14-07.shp</onlink><ftname Sync="TRUE">SCHOOLS_EVERYTHING_8_7_08</ftname></citeinfo></citation><descript><abstract>This data s [...]
+
+All data are provided "as is" and may contain errors. The data are for reference and illustration purposes only and are not suitable for site-specific decision making. Information found here should not be used for making financial or any other commitments. Conclusions drawn from such information are the responsibility of the user.  
+
+ADEQ assumes no responsibility for errors arising from misuse of the data or maps derived from the data. ADEQ disclaims any liability for injury, damage or loss that might result from the use of this information. In no event shall ADEQ become liable to users of these data and maps, or any other party, arising from the use or modification of the data.</useconst><ptcontac><cntinfo><cntorgp><cntorg>Arizona Department of Environmental Quality</cntorg></cntorgp><cntaddr><addrtype>mailing and  [...]
+aW5mbz4NCiAgICA8Y2l0YXRpb24+DQogICAgICA8Y2l0ZWluZm8+DQogICAgICAgIDxvcmlnaW4+
+QXJpem9uYSBEZXBhcnRtZW50IG9mIEVudmlyb25tZW50YWwgUXVhbGl0eSwgQXJpem9uYSBEZXBh
+cnRtZW50IG9mIEhlYWx0aCBTZXJ2aWNlczwvb3JpZ2luPg0KICAgICAgICA8cHViZGF0ZT5BdWd1
+c3QgMTQsIDIwMDc8L3B1YmRhdGU+DQogICAgICAgIDxlZGl0aW9uPlNjaG9vbHMgOC0xNC0yMDA3
+PC9lZGl0aW9uPg0KICAgICAgICA8b25saW5rPlxcYWRlcS5sY2xcZ2lzcHJvZFxkYXRhXGFkZXFc
+c2Nob29scy1ldmVyeXRoaW5nLTgtMTQtMDcuc2hwPC9vbmxpbms+DQogICAgICA8L2NpdGVpbmZv
+Pg0KICAgIDwvY2l0YXRpb24+DQogICAgPGRlc2NyaXB0Pg0KICAgICAgPGFic3RyYWN0PlRoaXMg
+ZGF0YSBzZXQgaXMgYSBnZW5lcmFsIHJlZmVyZW5jZSBmb3Igc2Nob29scyBvciAibGVhcm5pbmcg
+c2l0ZXMiIGluIEFyaXpvbmEuICBJdCByZXByZXNlbnRzIHNjaG9vbHMgZnJvbSB0aGUgQVogRGVw
+YXJ0bWVudCBvZiBFZHVjYXRpb24gKENURFMgbnVtYmVycywgY2hhcnRlciBhbmQgcHVibGljIHNj
+aG9vbHMpLCBBWiBTY2hvb2wgRmFjaWxpdGllcyBCb2FyZCwgcHJpdmF0ZSBzY2hvb2xzLCBzb21l
+IHRlY2huaWNhbCBzY2hvb2xzLCBjb2xsZWdlcyBhbmQgdW5pdmVyc2l0aWVzLjwvYWJzdHJhY3Q+
+DQogICAgICA8cHVycG9zZT5UaGUgaW50ZW50aW9uIHdpdGggd2hpY2ggdGhlIGRhdGEgc2V0IHdh
+cyBkZXZlbG9wZWQgaXMgZm9yIGdlbmVyYWwgcmVmZXJlbmNlIG9ubHkuICBJdCBpcyByZXByZXNl
+bnRhdGl2ZSBvbmx5IHByZXNlbnRpbmcgYSBzaW5nbGUgcG9pbnQgaW4gdGltZSBmb3IgdGhlIHRv
+cGljICJsZWFybmluZyBzaXRlcy4iICBJdCBpcyBub3QgdGhlIGZpbmFsIG9yIGF1dGhvcml0YXRp
+dmUgbGVnYWwgZG9jdW1lbnRhdGlvbiBmb3IgdGhlIGxlYXJuaW5nIHNpdGVzIGRhdGEgb3IgbG9j
+YXRpb25zLjwvcHVycG9zZT4NCiAgICAgIDxzdXBwbGluZj5UaGlzIGRhdGEgc2V0IGRvZXMgbm90
+IGNvbnRhaW4gbG9jYXRpb25zIGZvciBhbGwgY29zbWV0b2xvZ3kgb3IgYmVhdXR5IGNvbGxlZ2Vz
+LCBob3JzZXNob2Vpbmcgb3Igd2VsZGluZyB0ZWNobmljYWwgc2Nob29scywgb3Igb3RoZXIgdHJh
+ZGUgc2Nob29scy48L3N1cHBsaW5mPg0KICAgIDwvZGVzY3JpcHQ+DQogICAgPHRpbWVwZXJkPg0K
+ICAgICAgPGN1cnJlbnQ+cHVibGljYXRpb24gZGF0ZTwvY3VycmVudD4NCiAgICA8L3RpbWVwZXJk
+Pg0KICAgIDxzdGF0dXM+DQogICAgICA8cHJvZ3Jlc3M+SW4gd29yazwvcHJvZ3Jlc3M+DQogICAg
+ICA8dXBkYXRlPkFzIG5lZWRlZDwvdXBkYXRlPg0KICAgIDwvc3RhdHVzPg0KICAgIDxrZXl3b3Jk
+cz4NCiAgICAgIDx0aGVtZT4NCiAgICAgICAgPHRoZW1la2V5PkFERVE8L3RoZW1la2V5Pg0KICAg
+ICAgICA8dGhlbWVrZXk+ZW52aXJvbm1lbnQ8L3RoZW1la2V5Pg0KICAgICAgICA8dGhlbWVrZXk+
+QXJpem9uYTwvdGhlbWVrZXk+DQogICAgICAgIDx0aGVtZWtleT5FbnZpcm9ubWVudGFsIFF1YWxp
+dHk8L3RoZW1la2V5Pg0KICAgICAgICA8dGhlbWVrZXk+RGVwYXJ0bWVudCBvZiBFbnZpcm9ubWVu
+dGFsIFF1YWxpdHk8L3RoZW1la2V5Pg0KICAgICAgICA8dGhlbWVrZXk+c2Nob29sczwvdGhlbWVr
+ZXk+DQogICAgICAgIDx0aGVtZWtleT5sZWFybmluZyBzaXRlczwvdGhlbWVrZXk+DQogICAgICAg
+IDx0aGVtZWtleT5jb2xsZWdlczwvdGhlbWVrZXk+DQogICAgICAgIDx0aGVtZWtleT51bml2ZXJz
+aXRpZXM8L3RoZW1la2V5Pg0KICAgICAgICA8dGhlbWVrZXk+Z3JhZGUgc2Nob29sPC90aGVtZWtl
+eT4NCiAgICAgICAgPHRoZW1la2V5PmVsZW1lbnRhcnkgc2Nob29sPC90aGVtZWtleT4NCiAgICAg
+ICAgPHRoZW1la2V5PmhpZ2ggc2Nob29sPC90aGVtZWtleT4NCiAgICAgICAgPHRoZW1la2V5Pm1p
+ZGRsZSBzY2hvb2w8L3RoZW1la2V5Pg0KICAgICAgICA8dGhlbWVrZXk+a2luZGVyZ2FydGVuPC90
+aGVtZWtleT4NCiAgICAgICAgPHRoZW1la2V5PnByaXZhdGUgc2Nob29sPC90aGVtZWtleT4NCiAg
+ICAgICAgPHRoZW1la2V5PnBhcm9jaGlhbCBzY2hvb2w8L3RoZW1la2V5Pg0KICAgICAgICA8dGhl
+bWVrZXk+bW9udGVzc29yaTwvdGhlbWVrZXk+DQogICAgICAgIDx0aGVtZWtleT5jb21tdW5pdHkg
+Y29sbGVnZTwvdGhlbWVrZXk+DQogICAgICAgIDx0aGVtZWtleT5qdW5pb3IgY29sbGVnZTwvdGhl
+bWVrZXk+DQogICAgICAgIDx0aGVtZWtleT51bml2ZXJzaXR5PC90aGVtZWtleT4NCiAgICAgICAg
+PHRoZW1la2V5PkFyaXpvbmEgIERlcGFydG1lbnQgb2YgRWR1Y2F0aW9uPC90aGVtZWtleT4NCiAg
+ICAgICAgPHRoZW1la2V5PkNoYXJ0ZXIgU2Nob29sPC90aGVtZWtleT4NCiAgICAgIDwvdGhlbWU+
+DQogICAgICA8cGxhY2U+DQogICAgICAgIDxwbGFjZWtleT5Bcml6b25hPC9wbGFjZWtleT4NCiAg
+ICAgIDwvcGxhY2U+DQogICAgICA8dGVtcG9yYWw+DQogICAgICAgIDx0ZW1wa2V5PjIwMDg8L3Rl
+bXBrZXk+DQogICAgICA8L3RlbXBvcmFsPg0KICAgIDwva2V5d29yZHM+DQogICAgPGFjY2NvbnN0
+PkFjY2VzcyB0byB0aGVzZSBkYXRhIGFyZSBhbGxvd2VkIGZvciBub24tY29tbWVyY2lhbCBhcHBs
+aWNhdGlvbnMgd2l0aG91dCBjaGFyZ2UuICBDb21tZXJjaWFsIHVzZXMgcmVxdWlyZSBwYXltZW50
+LjwvYWNjY29uc3Q+DQogICAgPHVzZWNvbnN0PlRoZSBBcml6b25hIERlcGFydG1lbnQgb2YgRW52
+aXJvbm1lbnRhbCBRdWFsaXR5IGFuZCBvdGhlcnMgaGF2ZSBjb21waWxlZCB0aGlzIGRhdGEgYXMg
+YSBzZXJ2aWNlIHRvIG91ciBjdXN0b21lcnMgdXNpbmcgaW5mb3JtYXRpb24gZnJvbSB2YXJpb3Vz
+IHNvdXJjZXMuIEFERVEgYW5kIGl0cyBjb2xsYWJvcmF0b3JzIGNhbm5vdCBlbnN1cmUgdGhhdCB0
+aGUgaW5mb3JtYXRpb24gaXMgYWNjdXJhdGUsIGN1cnJlbnQgb3IgY29tcGxldGUuIE5laXRoZXIg
+dGhlIGluZm9ybWF0aW9uIHByZXNlbnRlZCBub3IgbWFwcyBkZXJpdmVkIGZyb20gdGhlbSBhcmUg
+b2ZmaWNpYWwgZG9jdW1lbnRzLiAgDQoNCkFsbCBkYXRhIGFyZSBwcm92aWRlZCAiYXMgaXMiIGFu
+ZCBtYXkgY29udGFpbiBlcnJvcnMuIFRoZSBkYXRhIGFyZSBmb3IgcmVmZXJlbmNlIGFuZCBpbGx1
+c3RyYXRpb24gcHVycG9zZXMgb25seSBhbmQgYXJlIG5vdCBzdWl0YWJsZSBmb3Igc2l0ZS1zcGVj
+aWZpYyBkZWNpc2lvbiBtYWtpbmcuIEluZm9ybWF0aW9uIGZvdW5kIGhlcmUgc2hvdWxkIG5vdCBi
+ZSB1c2VkIGZvciBtYWtpbmcgZmluYW5jaWFsIG9yIGFueSBvdGhlciBjb21taXRtZW50cy4gQ29u
+Y2x1c2lvbnMgZHJhd24gZnJvbSBzdWNoIGluZm9ybWF0aW9uIGFyZSB0aGUgcmVzcG9uc2liaWxp
+dHkgb2YgdGhlIHVzZXIuICANCg0KQURFUSBhc3N1bWVzIG5vIHJlc3BvbnNpYmlsaXR5IGZvciBl
+cnJvcnMgYXJpc2luZyBmcm9tIG1pc3VzZSBvZiB0aGUgZGF0YSBvciBtYXBzIGRlcml2ZWQgZnJv
+bSB0aGUgZGF0YS4gQURFUSBkaXNjbGFpbXMgYW55IGxpYWJpbGl0eSBmb3IgaW5qdXJ5LCBkYW1h
+Z2Ugb3IgbG9zcyB0aGF0IG1pZ2h0IHJlc3VsdCBmcm9tIHRoZSB1c2Ugb2YgdGhpcyBpbmZvcm1h
+dGlvbi4gSW4gbm8gZXZlbnQgc2hhbGwgQURFUSBiZWNvbWUgbGlhYmxlIHRvIHVzZXJzIG9mIHRo
+ZXNlIGRhdGEgYW5kIG1hcHMsIG9yIGFueSBvdGhlciBwYXJ0eSwgYXJpc2luZyBmcm9tIHRoZSB1
+c2Ugb3IgbW9kaWZpY2F0aW9uIG9mIHRoZSBkYXRhLjwvdXNlY29uc3Q+DQogICAgPHB0Y29udGFj
+Pg0KICAgICAgPGNudGluZm8+DQogICAgICAgIDxjbnRvcmdwPg0KICAgICAgICAgIDxjbnRvcmc+
+QXJpem9uYSBEZXBhcnRtZW50IG9mIEVudmlyb25tZW50YWwgUXVhbGl0eTwvY250b3JnPg0KICAg
+ICAgICA8L2NudG9yZ3A+DQogICAgICAgIDxjbnRhZGRyPg0KICAgICAgICAgIDxhZGRydHlwZT5t
+YWlsaW5nIGFuZCBwaHlzaWNhbCBhZGRyZXNzPC9hZGRydHlwZT4NCiAgICAgICAgICA8YWRkcmVz
+cz4xMTEwIFcgV2FzaGluZ3RvbiBTdDwvYWRkcmVzcz4NCiAgICAgICAgICA8Y2l0eT5QaG9lbml4
+PC9jaXR5Pg0KICAgICAgICAgIDxzdGF0ZT5Bcml6b25hPC9zdGF0ZT4NCiAgICAgICAgICA8cG9z
+dGFsPjg1MDA3PC9wb3N0YWw+DQogICAgICAgICAgPGNvdW50cnk+VVNBPC9jb3VudHJ5Pg0KICAg
+ICAgICA8L2NudGFkZHI+DQogICAgICA8L2NudGluZm8+DQogICAgPC9wdGNvbnRhYz4NCiAgICA8
+ZGF0YWNyZWQ+VGhpcyBkYXRhIHNldCBoYXMgYmVlbiBjcmVhdGVkIGluIGNvbGxhYm9yYXRpb24g
+d2l0aCB0aGUgQXJpem9uYSBEZXBhcnRtZW50IG9mIEVkdWNhdGlvbiwgQXJpem9uYSBEZXBhcnRt
+ZW50IG9mIEhlYWx0aCBTZXJ2aWNlcywgQXJpem9uYSBTdGF0ZSBMYW5kIERlcGFydG1lbnQgIGFu
+ZCB0aGUgQXJpem9uYSBTdGF0ZSBDYXJ0b2dyYXBoZXJzIE9mZmljZS48L2RhdGFjcmVkPg0KICAg
+IDxzZWNpbmZvPg0KICAgICAgPHNlY2NsYXNzPlVuY2xhc3NpZmllZDwvc2VjY2xhc3M+DQogICAg
+PC9zZWNpbmZvPg0KICA8L2lkaW5mbz4NCiAgPGRhdGFxdWFsPg0KICAgIDxsaW5lYWdlPg0KICAg
+ICAgPHByb2NzdGVwPg0KICAgICAgICA8cHJvY2Rlc2M+RGF0YXNldCBjb3BpZWQuPC9wcm9jZGVz
+Yz4NCiAgICAgIDwvcHJvY3N0ZXA+DQogICAgICA8cHJvY3N0ZXA+DQogICAgICAgIDxwcm9jZGVz
+Yz5NZXRhZGF0YSBpbXBvcnRlZC48L3Byb2NkZXNjPg0KICAgICAgICA8c3JjdXNlZD5EOlxET0NV
+TUV+MVxWTUd+MS5BREVcTE9DQUxTfjFcVGVtcFx4bWxDQS50bXA8L3NyY3VzZWQ+DQogICAgICA8
+L3Byb2NzdGVwPg0KICAgICAgPHByb2NzdGVwPg0KICAgICAgICA8cHJvY2Rlc2M+TWV0YWRhdGEg
+aW1wb3J0ZWQuPC9wcm9jZGVzYz4NCiAgICAgICAgPHNyY3VzZWQ+UzpcY29tbW9uXHZtZ1xzY2hv
+b2xzbWV0YWRhdGEueG1sPC9zcmN1c2VkPg0KICAgICAgPC9wcm9jc3RlcD4NCiAgICA8L2xpbmVh
+Z2U+DQogIDwvZGF0YXF1YWw+DQogIDxzcGRvaW5mbz4NCiAgICA8cHR2Y3RpbmY+DQogICAgICA8
+ZXNyaXRlcm0gTmFtZT0iU0NIT09MU19FVkVSWVRISU5HXzhfN18wOCIgLz4NCiAgICA8L3B0dmN0
+aW5mPg0KICA8L3NwZG9pbmZvPg0KICA8ZWFpbmZvPg0KICAgIDxkZXRhaWxlZCBOYW1lPSJTQ0hP
+T0xTX0VWRVJZVEhJTkdfOF83XzA4Ij4NCiAgICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+
+RklEPC9hdHRybGFibD4NCiAgICAgICAgPGF0dHJkZWY+SW50ZXJuYWwgZmVhdHVyZSBudW1iZXIu
+PC9hdHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+RVNSSTwvYXR0cmRlZnM+DQogICAgICAgIDxh
+dHRyZG9tdj4NCiAgICAgICAgICA8dWRvbT5TZXF1ZW50aWFsIHVuaXF1ZSB3aG9sZSBudW1iZXJz
+IHRoYXQgYXJlIGF1dG9tYXRpY2FsbHkgZ2VuZXJhdGVkLjwvdWRvbT4NCiAgICAgICAgPC9hdHRy
+ZG9tdj4NCiAgICAgIDwvYXR0cj4NCiAgICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+U2hh
+cGU8L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5GZWF0dXJlIGdlb21ldHJ5LjwvYXR0cmRl
+Zj4NCiAgICAgICAgPGF0dHJkZWZzPkVTUkk8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cmRvbXY+
+DQogICAgICAgICAgPHVkb20+Q29vcmRpbmF0ZXMgZGVmaW5pbmcgdGhlIGZlYXR1cmVzLjwvdWRv
+bT4NCiAgICAgICAgPC9hdHRyZG9tdj4NCiAgICAgIDwvYXR0cj4NCiAgICAgIDxhdHRyPg0KICAg
+ICAgICA8YXR0cmxhYmw+TkFNRTwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPlNjaG9vbCBv
+ciBsZWFybmluZyBzaXRlIG5hbWU8L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJpZXM8
+L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPkdvb2Q8L2F0
+dHJ2YT4NCiAgICAgICAgICA8YXR0cnZhZT5OYW1lcyB2ZXJpZmllZCB0byBtdWxpdHBsZSBzb3Vy
+Y2VzPC9hdHRydmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICAgIDxhdHRybWZycT5BcyBu
+ZWVkZWQ8L2F0dHJtZnJxPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxh
+dHRybGFibD5BRERSRVNTPC9hdHRybGFibD4NCiAgICAgICAgPGF0dHJkZWY+UGh5c2ljYWwgYWRk
+cmVzcyBvZiBzY2hvb2wgb3IgbGVhcm5pbmcgc2l0ZTwvYXR0cmRlZj4NCiAgICAgICAgPGF0dHJk
+ZWZzPlZhcmllczwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFpPg0KICAgICAgICAgIDxhdHRy
+dmE+R29vZDwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPlZlcmlmaWVkIHRvIG11bHRpcGxl
+IHNvdXJjZXM8L2F0dHJ2YWU+DQogICAgICAgIDwvYXR0cnZhaT4NCiAgICAgICAgPGF0dHJtZnJx
+PkFzIG5lZWRlZDwvYXR0cm1mcnE+DQogICAgICA8L2F0dHI+DQogICAgICA8YXR0cj4NCiAgICAg
+ICAgPGF0dHJsYWJsPlpJUDwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPlpJUCBDb2RlIG9m
+IHBoeXNpY2FsIGxvY2F0aW9uPC9hdHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+VVNQUyBaSVAg
+Q29kZTwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFpPg0KICAgICAgICAgIDxhdHRydmE+R29v
+ZDwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPlZlcmlmaWVkIHRvIG91dHNpZGUgc291cmNl
+PC9hdHRydmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICAgIDxhdHRybWZycT5BcyBuZWVk
+ZWQ8L2F0dHJtZnJxPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRy
+bGFibD5DVERTPC9hdHRybGFibD4NCiAgICAgICAgPGF0dHJkZWY+QVogRGVwdCBvZiBFZHVjYXRp
+b24gSWRlbnRpZmljYXRpb24gTnVtYmVyLCAoQ291bnR5IENvZGUsIFR5cGUgQ29kZSwgRGlzdHJp
+Y3QgQ29kZSAmYW1wOyBTaXRlIE51bWJlcjwvYXR0cmRlZj4NCiAgICAgICAgPGF0dHJkZWZzPkFa
+IERlcHQuIG9mIEVkdWNhdGlvbjwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFpPg0KICAgICAg
+ICAgIDxhdHRydmE+TWVkaXVtPC9hdHRydmE+DQogICAgICAgICAgPGF0dHJ2YWU+TWlzc2luZyBs
+ZWFkaW5nIHplcm9zIGluIHN0cmluZyBmaWVsZDwvYXR0cnZhZT4NCiAgICAgICAgPC9hdHRydmFp
+Pg0KICAgICAgICA8YXR0cm1mcnE+QXMgbmVlZGVkPC9hdHRybWZycT4NCiAgICAgIDwvYXR0cj4N
+CiAgICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+Q1REU19OVU08L2F0dHJsYWJsPg0KICAg
+ICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRybGFibD5TVEFUVVM8L2F0dHJs
+YWJsPg0KICAgICAgICA8YXR0cmRlZj5PcGVyYXRpbmcgU3RhdHVzIChvcGVuICwgY2xvc2VkLCBw
+cm9wb3NlZCk8L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJpZXM8L2F0dHJkZWZzPg0K
+ICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPkdvb2Q8L2F0dHJ2YT4NCiAgICAg
+ICAgICA8YXR0cnZhZT5WYWxpZGF0ZWQgdG8gbXVsdGlwbGUgc291cmNlczwvYXR0cnZhZT4NCiAg
+ICAgICAgPC9hdHRydmFpPg0KICAgICAgICA8YXR0cm1mcnE+QXMgbmVlZGVkPC9hdHRybWZycT4N
+CiAgICAgIDwvYXR0cj4NCiAgICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+TE9DQVRJT048
+L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5Mb2NhdGlvbiBjaGVjayBtZXRob2Q8L2F0dHJk
+ZWY+DQogICAgICAgIDxhdHRyZGVmcz5BREVRPC9hdHRyZGVmcz4NCiAgICAgICAgPGF0dHJkb212
+Pg0KICAgICAgICAgIDxlZG9tPg0KICAgICAgICAgICAgPGVkb212PkRJRzwvZWRvbXY+DQogICAg
+ICAgICAgICA8ZWRvbXZkPkRpZ2l0YWxseSB2ZXJpZmllZCBhZ2FpbnN0IHJhc3RlciBkYXRhIG9y
+IG90aGVyIGRhdGEgc2V0IChwYXJjZWxzKTwvZWRvbXZkPg0KICAgICAgICAgICAgPGVkb212ZHM+
+QURFUTwvZWRvbXZkcz4NCiAgICAgICAgICA8L2Vkb20+DQogICAgICAgICAgPGVkb20+DQogICAg
+ICAgICAgICA8ZWRvbXY+Tk9OPC9lZG9tdj4NCiAgICAgICAgICAgIDxlZG9tdmQ+Tm9uLXNwZWNp
+ZmljLCBtdWx0aXBsZSBtZXRob2RzIG9mIHZlcmlmaWNhdGlvbiAoZGlnaXRhbCwgZ2VvY29kaW5n
+LCBHUFMsIGV0Yy4pPC9lZG9tdmQ+DQogICAgICAgICAgICA8ZWRvbXZkcz5BREVRPC9lZG9tdmRz
+Pg0KICAgICAgICAgIDwvZWRvbT4NCiAgICAgICAgICA8ZWRvbT4NCiAgICAgICAgICAgIDxlZG9t
+dj5HUFM8L2Vkb212Pg0KICAgICAgICAgICAgPGVkb212ZD5HbG9iYWwgUG9zaXRpb25pbmcgU3lz
+dGVtIC0gZmllbGQgY29sbGVjdGVkPC9lZG9tdmQ+DQogICAgICAgICAgICA8ZWRvbXZkcz5BREVR
+PC9lZG9tdmRzPg0KICAgICAgICAgIDwvZWRvbT4NCiAgICAgICAgICA8ZWRvbT4NCiAgICAgICAg
+ICAgIDxlZG9tdj5HRU88L2Vkb212Pg0KICAgICAgICAgICAgPGVkb212ZD5PcmlnaW5hbGx5IGdl
+b2NvZGVkIC0gYWRkcmVzcyBtYXRjaGVkIFtsb2NhdGlvbiB2ZXJpZmllZCBieSBvdGhlciBtZXRo
+b2RzXTwvZWRvbXZkPg0KICAgICAgICAgICAgPGVkb212ZHM+QURFUTwvZWRvbXZkcz4NCiAgICAg
+ICAgICA8L2Vkb20+DQogICAgICAgIDwvYXR0cmRvbXY+DQogICAgICAgIDxhdHRydmFpPg0KICAg
+ICAgICAgIDxhdHRydmE+R29vZDwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPlZlcmlmaWVk
+PC9hdHRydmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICAgIDxhdHRybWZycT5BcyBuZWVk
+ZWQ8L2F0dHJtZnJxPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRy
+bGFibD5RQV9RQzwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPlF1YWxpdHkgQXNzdXJhbmNl
+IC8gUXVhbGl0eSBDb250cm9sIENvZGU8L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5BREVR
+PC9hdHRyZGVmcz4NCiAgICAgICAgPGF0dHJkb212Pg0KICAgICAgICAgIDxlZG9tPg0KICAgICAg
+ICAgICAgPGVkb212PjA8L2Vkb212Pg0KICAgICAgICAgICAgPGVkb212ZD5Vc2VkIGFzIGlkZW50
+aWZpZXIgZm9yIHZlcnNpb24gYWRkaXRpb25zIHRvIGRhdGEgc2V0IC0gR1BTXSwgbG9jYXRpb24g
+cXVhbGl0eSBpcyAib2siPC9lZG9tdmQ+DQogICAgICAgICAgICA8ZWRvbXZkcz5BREVRPC9lZG9t
+dmRzPg0KICAgICAgICAgIDwvZWRvbT4NCiAgICAgICAgICA8ZWRvbT4NCiAgICAgICAgICAgIDxl
+ZG9tdj4xPC9lZG9tdj4NCiAgICAgICAgICAgIDxlZG9tdmQ+VmVyeSBoaWdoIGNvbmZpZGVuY2Ug
+b2YgbG9jYXRpb24gYWNjdXJhY3ksIG1hdGNoZWQgdG8gYXQgbGVhc3QgdHdvIGluZGVwZW5kZW50
+IHNvdXJjZXM8L2Vkb212ZD4NCiAgICAgICAgICAgIDxlZG9tdmRzPkFERVE8L2Vkb212ZHM+DQog
+ICAgICAgICAgPC9lZG9tPg0KICAgICAgICAgIDxlZG9tPg0KICAgICAgICAgICAgPGVkb212PjI8
+L2Vkb212Pg0KICAgICAgICAgICAgPGVkb212ZD5Mb3cgY29uZmlkZW5jZSBvZiBsb2NhdGlvbmFs
+IGFjY3VyYWN5LCB1bmFibGUgdG8gbWF0Y2ggdG8gb3RoZXIgc291cmNlczwvZWRvbXZkPg0KICAg
+ICAgICAgICAgPGVkb212ZHM+QURFUTwvZWRvbXZkcz4NCiAgICAgICAgICA8L2Vkb20+DQogICAg
+ICAgICAgPGVkb20+DQogICAgICAgICAgICA8ZWRvbXY+MzwvZWRvbXY+DQogICAgICAgICAgICA8
+ZWRvbXZkPlVzZWQgYXMgaWRlbnRpZmllciBmb3IgdmVyc2lvbiBhZGRpdGlvbnMgdG8gZGF0YSBz
+ZXQgLSBHUFMgb3IgTk9OLCBsb2NhdGlvbiBxdWFsaXR5IGlzICJvayI8L2Vkb212ZD4NCiAgICAg
+ICAgICAgIDxlZG9tdmRzPkFERVE8L2Vkb212ZHM+DQogICAgICAgICAgPC9lZG9tPg0KICAgICAg
+ICAgIDxlZG9tPg0KICAgICAgICAgICAgPGVkb212PjQ8L2Vkb212Pg0KICAgICAgICAgICAgPGVk
+b212ZD5Vc2VkIGFzIGlkZW50aWZpZXIgZm9yIHZlcnNpb24gYWRkaXRpb25zIHRvIGRhdGEgc2V0
+IC0gR1BTIG9yIE5PTiwgbG9jYXRpb24gcXVhbGl0eSBpcyAib2siPC9lZG9tdmQ+DQogICAgICAg
+ICAgICA8ZWRvbXZkcz5BREVRPC9lZG9tdmRzPg0KICAgICAgICAgIDwvZWRvbT4NCiAgICAgICAg
+ICA8ZWRvbT4NCiAgICAgICAgICAgIDxlZG9tdj41PC9lZG9tdj4NCiAgICAgICAgICAgIDxlZG9t
+dmQ+VmVyeSBoaWdoIGNvbmZpZGVuY2Ugb2YgbG9jYXRpb24gYWNjdXJhY3ksIG1hdGNoZWQgZGln
+aXRhbGx5LCBldGMuIHRvIGF0IGxlYXN0IHR3byBpbmRlcGVuZGVudCBzb3VyY2VzPC9lZG9tdmQ+
+DQogICAgICAgICAgICA8ZWRvbXZkcz5BREVRPC9lZG9tdmRzPg0KICAgICAgICAgIDwvZWRvbT4N
+CiAgICAgICAgPC9hdHRyZG9tdj4NCiAgICAgICAgPGF0dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2
+YT5Hb29kPC9hdHRydmE+DQogICAgICAgIDwvYXR0cnZhaT4NCiAgICAgIDwvYXR0cj4NCiAgICAg
+IDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+Q0lUWTwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRy
+ZGVmPlBoeXNpY2FsIGxvY2F0aW9uIGNpdHkgb3IgdG93bjwvYXR0cmRlZj4NCiAgICAgICAgPGF0
+dHJkZWZzPkRpZ2l0YWw8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8
+YXR0cnZhPkdvb2Q8L2F0dHJ2YT4NCiAgICAgICAgICA8YXR0cnZhZT5WZXJpZmllZDwvYXR0cnZh
+ZT4NCiAgICAgICAgPC9hdHRydmFpPg0KICAgICAgICA8YXR0cm1mcnE+QXMgbmVlZGVkPC9hdHRy
+bWZycT4NCiAgICAgIDwvYXR0cj4NCiAgICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+Q09V
+TlRZPC9hdHRybGFibD4NCiAgICAgICAgPGF0dHJkZWY+Q291bnR5PC9hdHRyZGVmPg0KICAgICAg
+ICA8YXR0cmRlZnM+RGlnaXRhbDwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFpPg0KICAgICAg
+ICAgIDxhdHRydmE+R29vZDwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPlZlcmlmaWVkPC9h
+dHRydmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICAgIDxhdHRybWZycT5BcyBuZWVkZWQ8
+L2F0dHJtZnJxPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRybGFi
+bD5QSE9ORTwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPlBob25lIE51bWJlcjwvYXR0cmRl
+Zj4NCiAgICAgICAgPGF0dHJkZWZzPlZhcmllczwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFp
+Pg0KICAgICAgICAgIDxhdHRydmE+TWVkaXVtPC9hdHRydmE+DQogICAgICAgICAgPGF0dHJ2YWU+
+Tm90IHZlcmlmaWVkPC9hdHRydmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICAgIDxhdHRy
+bWZycT5BcyBuZWVkZWQ8L2F0dHJtZnJxPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQog
+ICAgICAgIDxhdHRybGFibD5GQVg8L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5GQVggTnVt
+YmVyPC9hdHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+VmFyaWVzPC9hdHRyZGVmcz4NCiAgICAg
+ICAgPGF0dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2YT5NZWRpdW08L2F0dHJ2YT4NCiAgICAgICAg
+ICA8YXR0cnZhZT5Ob3QgdmVyaWZpZWQ8L2F0dHJ2YWU+DQogICAgICAgIDwvYXR0cnZhaT4NCiAg
+ICAgICAgPGF0dHJtZnJxPkFzIG5lZWRlZDwvYXR0cm1mcnE+DQogICAgICA8L2F0dHI+DQogICAg
+ICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPkxPV0dSQURFPC9hdHRybGFibD4NCiAgICAgICAg
+PGF0dHJkZWY+TG93ZXN0IGNsYXNzIGxldmVsPC9hdHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+
+VmFyaWVzPC9hdHRyZGVmcz4NCiAgICAgICAgPGF0dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2YT5N
+ZWRpdW08L2F0dHJ2YT4NCiAgICAgICAgICA8YXR0cnZhZT5Ob3QgVmVyaWZpZWQ8L2F0dHJ2YWU+
+DQogICAgICAgIDwvYXR0cnZhaT4NCiAgICAgICAgPGF0dHJtZnJxPkFzIG5lZWRlZDwvYXR0cm1m
+cnE+DQogICAgICA8L2F0dHI+DQogICAgICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPkhJR0hH
+UkFERTwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPkhpZ2hlc3QgY2xhc3MgbGV2ZWwgdGF1
+Z2h0PC9hdHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+VmFyaWVzPC9hdHRyZGVmcz4NCiAgICAg
+ICAgPGF0dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2YT5NZWRpdW08L2F0dHJ2YT4NCiAgICAgICAg
+ICA8YXR0cnZhZT5Ob3QgdmVyaWZpZWQ8L2F0dHJ2YWU+DQogICAgICAgIDwvYXR0cnZhaT4NCiAg
+ICAgICAgPGF0dHJtZnJxPkFzIG5lZWRlZDwvYXR0cm1mcnE+DQogICAgICA8L2F0dHI+DQogICAg
+ICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPkNPTU1FTlQ8L2F0dHJsYWJsPg0KICAgICAgICA8
+YXR0cmRlZj5Db21tZW50cyBGaWVsZDwvYXR0cmRlZj4NCiAgICAgICAgPGF0dHJkZWZzPlZhcmll
+czwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRybWZycT5BcyBuZWVkZWQ8L2F0dHJtZnJxPg0KICAg
+ICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRybGFibD5ESVNUUklDVDwvYXR0
+cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPlNjaG9vbCBEaXN0cmljdCBvciBDaGFydGVyIEhvbGRl
+cjwvYXR0cmRlZj4NCiAgICAgICAgPGF0dHJkZWZzPkFaIERlcGFydG1lbnQgb2YgRWR1Y2F0aW9u
+PC9hdHRyZGVmcz4NCiAgICAgICAgPGF0dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2YT5NZWRpdW08
+L2F0dHJ2YT4NCiAgICAgICAgICA8YXR0cnZhZT5Ob3QgZnVsbHkgdmVyaWZpZWQ8L2F0dHJ2YWU+
+DQogICAgICAgIDwvYXR0cnZhaT4NCiAgICAgICAgPGF0dHJtZnJxPkFzIG5lZWRlZDwvYXR0cm1m
+cnE+DQogICAgICA8L2F0dHI+DQogICAgICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPkdSQURF
+PC9hdHRybGFibD4NCiAgICAgICAgPGF0dHJkZWY+UmFuZ2Ugb2YgY2xhc3NlcyB0YXVnaHQ8L2F0
+dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJpZXM8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0
+cm1mcnE+QXMgbmVlZGVkPC9hdHRybWZycT4NCiAgICAgIDwvYXR0cj4NCiAgICAgIDxhdHRyPg0K
+ICAgICAgICA8YXR0cmxhYmw+TlVSU0U8L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5TY2hv
+b2wgTnVyc2UgcHJlc2VudD88L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJpZXM8L2F0
+dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPlVua25vd248L2F0
+dHJ2YT4NCiAgICAgICAgPC9hdHRydmFpPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQog
+ICAgICAgIDxhdHRybGFibD5STl9QSE48L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5SZWdp
+c3RlciBOdXJzZSBQaG9uZSBOdW1iZXI8L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJp
+ZXM8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPlVua25v
+d248L2F0dHJ2YT4NCiAgICAgICAgPC9hdHRydmFpPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0
+dHI+DQogICAgICAgIDxhdHRybGFibD5KVVZfUE9QPC9hdHRybGFibD4NCiAgICAgICAgPGF0dHJk
+ZWY+SnV2ZW5pbGUgUG9wdWxhdGlvbiAobnVtYmVyIG9mIHN0dWRlbnRzKTwvYXR0cmRlZj4NCiAg
+ICAgICAgPGF0dHJkZWZzPlZhcmllczwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFpPg0KICAg
+ICAgICAgIDxhdHRydmE+VW5rbm93bjwvYXR0cnZhPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAg
+ICA8L2F0dHI+DQogICAgICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPkRJU1ROVU08L2F0dHJs
+YWJsPg0KICAgICAgICA8YXR0cmRlZj5TY2hvb2wgRGlzdHJpY3QgTnVtYmVyIG9yIENoYXJ0ZXIg
+SG9sZGVyIE51bWJlcjwvYXR0cmRlZj4NCiAgICAgICAgPGF0dHJkZWZzPkFaIERlcGFydG1lbnQg
+b2YgRWR1Y2F0aW9uPC9hdHRyZGVmcz4NCiAgICAgICAgPGF0dHJ2YWk+DQogICAgICAgICAgPGF0
+dHJ2YT5NZWRpdW08L2F0dHJ2YT4NCiAgICAgICAgICA8YXR0cnZhZT5QYXJ0aWFsbHkgdmVyaWZp
+ZWQ8L2F0dHJ2YWU+DQogICAgICAgIDwvYXR0cnZhaT4NCiAgICAgIDwvYXR0cj4NCiAgICAgIDxh
+dHRyPg0KICAgICAgICA8YXR0cmxhYmw+TUFJTFRPPC9hdHRybGFibD4NCiAgICAgICAgPGF0dHJk
+ZWY+TWFpbGluZyBBZGRyZXNzPC9hdHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+VmFyaWVzPC9h
+dHRyZGVmcz4NCiAgICAgICAgPGF0dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2YT5NZWRpdW08L2F0
+dHJ2YT4NCiAgICAgICAgICA8YXR0cnZhZT5Ob3QgdmVyaWZpZWQ8L2F0dHJ2YWU+DQogICAgICAg
+IDwvYXR0cnZhaT4NCiAgICAgICAgPGF0dHJtZnJxPkFzIG5lZWRlZDwvYXR0cm1mcnE+DQogICAg
+ICA8L2F0dHI+DQogICAgICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPk1BSUxDSVRZPC9hdHRy
+bGFibD4NCiAgICAgICAgPGF0dHJkZWY+TWFpbGluZyBBZGRyZXNzIENpdHk8L2F0dHJkZWY+DQog
+ICAgICAgIDxhdHRyZGVmcz5WYXJpZXM8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAg
+ICAgICAgICA8YXR0cnZhPk1lZGl1bTwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPk5vdCB2
+ZXJpZmllZDwvYXR0cnZhZT4NCiAgICAgICAgPC9hdHRydmFpPg0KICAgICAgPC9hdHRyPg0KICAg
+ICAgPGF0dHI+DQogICAgICAgIDxhdHRybGFibD5NQUlMU1RBVDwvYXR0cmxhYmw+DQogICAgICAg
+IDxhdHRyZGVmPk1haWxpbmcgQWRkcmVzcyBTdGF0ZTwvYXR0cmRlZj4NCiAgICAgICAgPGF0dHJk
+ZWZzPlZhcmllczwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFpPg0KICAgICAgICAgIDxhdHRy
+dmE+TWVkaXVtPC9hdHRydmE+DQogICAgICAgICAgPGF0dHJ2YWU+Tm90IHZlcmlmaWVkPC9hdHRy
+dmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICA8L2F0dHI+DQogICAgICA8YXR0cj4NCiAg
+ICAgICAgPGF0dHJsYWJsPk1BSUxaSVA8L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5NYWls
+aW5nIEFkZHJlc3MgWklQPC9hdHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+VmFyaWVzPC9hdHRy
+ZGVmcz4NCiAgICAgICAgPGF0dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2YT5NZWRpdW08L2F0dHJ2
+YT4NCiAgICAgICAgICA8YXR0cnZhZT5Ob3QgdmVyaWZpZWQ8L2F0dHJ2YWU+DQogICAgICAgIDwv
+YXR0cnZhaT4NCiAgICAgIDwvYXR0cj4NCiAgICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+
+Q0xBU1M8L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5DbGFzcyAtIGdyYWRlIGxldmVsczwv
+YXR0cmRlZj4NCiAgICAgICAgPGF0dHJkZWZzPlZhcmllczwvYXR0cmRlZnM+DQogICAgICAgIDxh
+dHRyZG9tdj4NCiAgICAgICAgICA8ZWRvbT4NCiAgICAgICAgICAgIDxlZG9tdj5Vbml2ZXJzaXR5
+PC9lZG9tdj4NCiAgICAgICAgICAgIDxlZG9tdmQ+VW5pdmVyc2l0eSBsZXZlbDwvZWRvbXZkPg0K
+ICAgICAgICAgICAgPGVkb212ZHM+VmFyaWVzPC9lZG9tdmRzPg0KICAgICAgICAgIDwvZWRvbT4N
+CiAgICAgICAgICA8ZWRvbT4NCiAgICAgICAgICAgIDxlZG9tdj5Db21tLiBDb2xsZWdlPC9lZG9t
+dj4NCiAgICAgICAgICAgIDxlZG9tdmQ+Q29tbXVuaXR5IENvbGxlZ2UgLSBwYXJ0IG9mIGNvbW11
+bml0eSBjb2xsZWdlIG5ldHdvcms8L2Vkb212ZD4NCiAgICAgICAgICAgIDxlZG9tdmRzPlZhcmll
+czwvZWRvbXZkcz4NCiAgICAgICAgICA8L2Vkb20+DQogICAgICAgICAgPGVkb20+DQogICAgICAg
+ICAgICA8ZWRvbXY+Q29sbGVnZTwvZWRvbXY+DQogICAgICAgICAgICA8ZWRvbXZkPkNvbGxlZ2Ug
+LSBub24tdW5pdmVyc2l0eSBvciBjb21tdW5pdHkgY29sbGVnZTwvZWRvbXZkPg0KICAgICAgICAg
+ICAgPGVkb212ZHM+VmFyaWVzPC9lZG9tdmRzPg0KICAgICAgICAgIDwvZWRvbT4NCiAgICAgICAg
+ICA8ZWRvbT4NCiAgICAgICAgICAgIDxlZG9tdj5UZWNoPC9lZG9tdj4NCiAgICAgICAgICAgIDxl
+ZG9tdmQ+VGVjaG5pY2FsIFNjaG9vbHM8L2Vkb212ZD4NCiAgICAgICAgICAgIDxlZG9tdmRzPlZh
+cmllczwvZWRvbXZkcz4NCiAgICAgICAgICA8L2Vkb20+DQogICAgICAgICAgPGVkb20+DQogICAg
+ICAgICAgICA8ZWRvbXY+UmVsLiBDb2xsZWdlPC9lZG9tdj4NCiAgICAgICAgICAgIDxlZG9tdmQ+
+UmVsaWdpb3VzIENvbGxlZ2U8L2Vkb212ZD4NCiAgICAgICAgICAgIDxlZG9tdmRzPlZhcmllczwv
+ZWRvbXZkcz4NCiAgICAgICAgICA8L2Vkb20+DQogICAgICAgICAgPGVkb20+DQogICAgICAgICAg
+ICA8ZWRvbXY+U3BlY2lhbCBOZWVkczwvZWRvbXY+DQogICAgICAgICAgICA8ZWRvbXZkPlNwZWNp
+YWwgbmVlZHMgc2Nob29sczwvZWRvbXZkPg0KICAgICAgICAgICAgPGVkb212ZHM+QURFUTwvZWRv
+bXZkcz4NCiAgICAgICAgICA8L2Vkb20+DQogICAgICAgICAgPGVkb20+DQogICAgICAgICAgICA8
+ZWRvbXY+QWxsIEdyYWRlczwvZWRvbXY+DQogICAgICAgICAgICA8ZWRvbXZkPkFsbCBncmFkZSBs
+ZXZlbHM8L2Vkb212ZD4NCiAgICAgICAgICAgIDxlZG9tdmRzPkFERVE8L2Vkb212ZHM+DQogICAg
+ICAgICAgPC9lZG9tPg0KICAgICAgICAgIDxlZG9tPg0KICAgICAgICAgICAgPGVkb212PkhpZ2g8
+L2Vkb212Pg0KICAgICAgICAgICAgPGVkb212ZD5IaWdoIFNjaG9vbDwvZWRvbXZkPg0KICAgICAg
+ICAgICAgPGVkb212ZHM+VmFyaWVzPC9lZG9tdmRzPg0KICAgICAgICAgIDwvZWRvbT4NCiAgICAg
+ICAgICA8ZWRvbT4NCiAgICAgICAgICAgIDxlZG9tdj5KUi9TUiBIaWdoPC9lZG9tdj4NCiAgICAg
+ICAgICAgIDxlZG9tdmQ+U2V2ZW50aCB0aHJvdWdoIHR3ZWxmdGggZ3JhZGU8L2Vkb212ZD4NCiAg
+ICAgICAgICAgIDxlZG9tdmRzPkFERVE8L2Vkb212ZHM+DQogICAgICAgICAgPC9lZG9tPg0KICAg
+ICAgICAgIDxlZG9tPg0KICAgICAgICAgICAgPGVkb212Pk1pZGRsZTwvZWRvbXY+DQogICAgICAg
+ICAgICA8ZWRvbXZkPk1pZGRsZSBTY2hvb2w8L2Vkb212ZD4NCiAgICAgICAgICAgIDxlZG9tdmRz
+PlZhcmllczwvZWRvbXZkcz4NCiAgICAgICAgICA8L2Vkb20+DQogICAgICAgICAgPGVkb20+DQog
+ICAgICAgICAgICA8ZWRvbXY+UHJpbWFyeTwvZWRvbXY+DQogICAgICAgICAgICA8ZWRvbXZkPlBy
+aW1hcnkgb3IgZWxlbWVudGFyeSBzY2hvb2w8L2Vkb212ZD4NCiAgICAgICAgICAgIDxlZG9tdmRz
+PlZhcmllczwvZWRvbXZkcz4NCiAgICAgICAgICA8L2Vkb20+DQogICAgICAgICAgPGVkb20+DQog
+ICAgICAgICAgICA8ZWRvbXY+KEJsYW5rKTwvZWRvbXY+DQogICAgICAgICAgICA8ZWRvbXZkPlVu
+a25vd24gZ3JhZGUgbGV2ZWxzPC9lZG9tdmQ+DQogICAgICAgICAgPC9lZG9tPg0KICAgICAgICA8
+L2F0dHJkb212Pg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPkdvb2Q8L2F0
+dHJ2YT4NCiAgICAgICAgICA8YXR0cnZhZT5WZXJpZmllZDwvYXR0cnZhZT4NCiAgICAgICAgPC9h
+dHRydmFpPg0KICAgICAgICA8YXR0cm1mcnE+QXMgbmVlZGVkPC9hdHRybWZycT4NCiAgICAgIDwv
+YXR0cj4NCiAgICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+VFlQRV8xPC9hdHRybGFibD4N
+CiAgICAgICAgPGF0dHJkZWY+VHlwZSBvZiBTY2hvb2w8L2F0dHJkZWY+DQogICAgICAgIDxhdHRy
+ZGVmcz5WYXJpZXM8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cmRvbXY+DQogICAgICAgICAgPGVk
+b20+DQogICAgICAgICAgICA8ZWRvbXY+Q2hhcnRlcjwvZWRvbXY+DQogICAgICAgICAgICA8ZWRv
+bXZkPkFyaXpvbmEgQ2hhcnRlciBTY2hvb2w8L2Vkb212ZD4NCiAgICAgICAgICAgIDxlZG9tdmRz
+PkFyaXpvbmEgQm9hcmQgb2YgQ2hhcnRlciBTY2hvb2xzPC9lZG9tdmRzPg0KICAgICAgICAgIDwv
+ZWRvbT4NCiAgICAgICAgICA8ZWRvbT4NCiAgICAgICAgICAgIDxlZG9tdj5QdWJsaWM8L2Vkb212
+Pg0KICAgICAgICAgICAgPGVkb212ZD5QdWJsaWMgU2Nob29sPC9lZG9tdmQ+DQogICAgICAgICAg
+ICA8ZWRvbXZkcz5BeiBEZXBhcnRlbWVudCBvZiBFZHVjYXRpb248L2Vkb212ZHM+DQogICAgICAg
+ICAgPC9lZG9tPg0KICAgICAgICAgIDxlZG9tPg0KICAgICAgICAgICAgPGVkb212PkJJQTwvZWRv
+bXY+DQogICAgICAgICAgICA8ZWRvbXZkPkJ1cmVhdSBvZiBJbmRpYW4gQWZmYWlycyBvcGVyYXRl
+ZCBzY2hvb2w8L2Vkb212ZD4NCiAgICAgICAgICAgIDxlZG9tdmRzPlVTIEJJQTwvZWRvbXZkcz4N
+CiAgICAgICAgICA8L2Vkb20+DQogICAgICAgICAgPGVkb20+DQogICAgICAgICAgICA8ZWRvbXY+
+Q2xvc2VkPC9lZG9tdj4NCiAgICAgICAgICAgIDxlZG9tdmQ+Q2xvc2VkIFNjaG9vbDwvZWRvbXZk
+Pg0KICAgICAgICAgICAgPGVkb212ZHM+QURFUTwvZWRvbXZkcz4NCiAgICAgICAgICA8L2Vkb20+
+DQogICAgICAgICAgPGVkb20+DQogICAgICAgICAgICA8ZWRvbXY+UHJpdmF0ZTwvZWRvbXY+DQog
+ICAgICAgICAgICA8ZWRvbXZkPlByaXZhdGUgb3IgUGFyb2NoaWFsIG9wZXJhdGVkIHNjaG9vbDwv
+ZWRvbXZkPg0KICAgICAgICAgICAgPGVkb212ZHM+VmFyaWVzPC9lZG9tdmRzPg0KICAgICAgICAg
+IDwvZWRvbT4NCiAgICAgICAgICA8ZWRvbT4NCiAgICAgICAgICAgIDxlZG9tdj5UcmliYWw8L2Vk
+b212Pg0KICAgICAgICAgICAgPGVkb212ZD5UcmliZSBvcGVyYXRlZCBzY2hvb2w8L2Vkb212ZD4N
+CiAgICAgICAgICAgIDxlZG9tdmRzPlZhcmllczwvZWRvbXZkcz4NCiAgICAgICAgICA8L2Vkb20+
+DQogICAgICAgIDwvYXR0cmRvbXY+DQogICAgICAgIDxhdHRydmFpPg0KICAgICAgICAgIDxhdHRy
+dmE+R29vZDwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPnZlcmlmaWVkPC9hdHRydmFlPg0K
+ICAgICAgICA8L2F0dHJ2YWk+DQogICAgICAgIDxhdHRybWZycT5BcyBuZWVkZWQ8L2F0dHJtZnJx
+Pg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRybGFibD5LSU5ERVI8
+L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5LaW5kZXJnYXJkZW4gVGF1Z2h0PzwvYXR0cmRl
+Zj4NCiAgICAgICAgPGF0dHJkZWZzPlZhcmllczwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFp
+Pg0KICAgICAgICAgIDxhdHRydmE+TWVkaXVtPC9hdHRydmE+DQogICAgICAgICAgPGF0dHJ2YWU+
+Tm90IFZlcmlmaWVkPC9hdHRydmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICA8L2F0dHI+
+DQogICAgICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPkZJUlNUPC9hdHRybGFibD4NCiAgICAg
+ICAgPGF0dHJkZWY+Rmlyc3QgR3JhZGUgVGF1Z2h0PzwvYXR0cmRlZj4NCiAgICAgICAgPGF0dHJk
+ZWZzPlZhcmllczwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFpPg0KICAgICAgICAgIDxhdHRy
+dmE+TWVkaXVtPC9hdHRydmE+DQogICAgICAgICAgPGF0dHJ2YWU+Tm90IFZlcmlmaWVkPC9hdHRy
+dmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICA8L2F0dHI+DQogICAgICA8YXR0cj4NCiAg
+ICAgICAgPGF0dHJsYWJsPlNFQ09ORDwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPlNlY29u
+ZCBHcmFkZSBUYXVnaHQ/PC9hdHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+VmFyaWVzPC9hdHRy
+ZGVmcz4NCiAgICAgICAgPGF0dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2YT5NZWRpdW08L2F0dHJ2
+YT4NCiAgICAgICAgICA8YXR0cnZhZT5Ob3QgVmVyaWZpZWQ8L2F0dHJ2YWU+DQogICAgICAgIDwv
+YXR0cnZhaT4NCiAgICAgIDwvYXR0cj4NCiAgICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+
+VEhJUkQ8L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5UaGlyZCBHcmFkZSBUYXVnaHQ/PC9h
+dHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+VmFyaWVzPC9hdHRyZGVmcz4NCiAgICAgICAgPGF0
+dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2YT5NZWRpdW08L2F0dHJ2YT4NCiAgICAgICAgICA8YXR0
+cnZhZT5Ob3QgVmVyaWZpZWQ8L2F0dHJ2YWU+DQogICAgICAgIDwvYXR0cnZhaT4NCiAgICAgIDwv
+YXR0cj4NCiAgICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+Rk9VUlRIPC9hdHRybGFibD4N
+CiAgICAgICAgPGF0dHJkZWY+Rm91cnRoIEdyYWRlIFRhdWdodD88L2F0dHJkZWY+DQogICAgICAg
+IDxhdHRyZGVmcz5WYXJpZXM8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAg
+ICA8YXR0cnZhPk1lZGl1bTwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPk5vdCBWZXJpZmll
+ZDwvYXR0cnZhZT4NCiAgICAgICAgPC9hdHRydmFpPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0
+dHI+DQogICAgICAgIDxhdHRybGFibD5GSUZUSDwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVm
+PkZpZnRoIEdyYWRlIFRhdWdodD88L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJpZXM8
+L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPk1lZGl1bTwv
+YXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPk5vdCBWZXJpZmllZDwvYXR0cnZhZT4NCiAgICAg
+ICAgPC9hdHRydmFpPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRy
+bGFibD5TSVhUSDwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPlNpeHRoIEdyYWRlIFRhdWdo
+dD88L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJpZXM8L2F0dHJkZWZzPg0KICAgICAg
+ICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPk1lZGl1bTwvYXR0cnZhPg0KICAgICAgICAg
+IDxhdHRydmFlPk5vdCBWZXJpZmllZDwvYXR0cnZhZT4NCiAgICAgICAgPC9hdHRydmFpPg0KICAg
+ICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRybGFibD5TRVZFTlRIPC9hdHRy
+bGFibD4NCiAgICAgICAgPGF0dHJkZWY+U2V2ZW50aCBHcmFkZSBUYXVnaHQ/PC9hdHRyZGVmPg0K
+ICAgICAgICA8YXR0cmRlZnM+VmFyaWVzPC9hdHRyZGVmcz4NCiAgICAgICAgPGF0dHJ2YWk+DQog
+ICAgICAgICAgPGF0dHJ2YT5NZWRpdW08L2F0dHJ2YT4NCiAgICAgICAgICA8YXR0cnZhZT5Ob3Qg
+VmVyaWZpZWQ8L2F0dHJ2YWU+DQogICAgICAgIDwvYXR0cnZhaT4NCiAgICAgIDwvYXR0cj4NCiAg
+ICAgIDxhdHRyPg0KICAgICAgICA8YXR0cmxhYmw+RUlHSFRIPC9hdHRybGFibD4NCiAgICAgICAg
+PGF0dHJkZWY+RWlnaHRoIEdyYWRlIFRhdWdodD88L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVm
+cz5WYXJpZXM8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZh
+Pk1lZGl1bTwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPk5vdCBWZXJpZmllZDwvYXR0cnZh
+ZT4NCiAgICAgICAgPC9hdHRydmFpPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAg
+ICAgIDxhdHRybGFibD5OSU5USDwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPk5pbnRoIEdy
+YWRlIFRhdWdodDwvYXR0cmRlZj4NCiAgICAgICAgPGF0dHJkZWZzPlZhcmllczwvYXR0cmRlZnM+
+DQogICAgICAgIDxhdHRydmFpPg0KICAgICAgICAgIDxhdHRydmE+TWVkaXVtPC9hdHRydmE+DQog
+ICAgICAgICAgPGF0dHJ2YWU+Tm90IFZlcmlmaWVkPC9hdHRydmFlPg0KICAgICAgICA8L2F0dHJ2
+YWk+DQogICAgICA8L2F0dHI+DQogICAgICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPlRFTlRI
+PC9hdHRybGFibD4NCiAgICAgICAgPGF0dHJkZWY+VGVudGggR3JhZGUgVGF1Z2h0PzwvYXR0cmRl
+Zj4NCiAgICAgICAgPGF0dHJkZWZzPlZhcmllczwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFp
+Pg0KICAgICAgICAgIDxhdHRydmE+TWVkaXVtPC9hdHRydmE+DQogICAgICAgICAgPGF0dHJ2YWU+
+Tm90IFZlcmlmaWVkPC9hdHRydmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICA8L2F0dHI+
+DQogICAgICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPkVMRVZFTlRIPC9hdHRybGFibD4NCiAg
+ICAgICAgPGF0dHJkZWY+RWxldmVudGggR3JhZGUgVGF1Z2h0PzwvYXR0cmRlZj4NCiAgICAgICAg
+PGF0dHJkZWZzPlZhcmllczwvYXR0cmRlZnM+DQogICAgICAgIDxhdHRydmFpPg0KICAgICAgICAg
+IDxhdHRydmE+TWVkaXVtPC9hdHRydmE+DQogICAgICAgICAgPGF0dHJ2YWU+Tm90IFZlcmlmaWVk
+PC9hdHRydmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQogICAgICA8L2F0dHI+DQogICAgICA8YXR0
+cj4NCiAgICAgICAgPGF0dHJsYWJsPlRXRUxGVEg8L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRl
+Zj5Ud2VsZnRoIEdyYWRlIFRhdWdodD88L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJp
+ZXM8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPk1lZGl1
+bTwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPk5vdCBWZXJpZmllZDwvYXR0cnZhZT4NCiAg
+ICAgICAgPC9hdHRydmFpPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxh
+dHRybGFibD5QUkVTQ0hMPC9hdHRybGFibD4NCiAgICAgICAgPGF0dHJkZWY+UHJlc2Nob29sIExl
+dmVsIFRhdWdodD88L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJpZXM8L2F0dHJkZWZz
+Pg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPk1lZGl1bTwvYXR0cnZhPg0K
+ICAgICAgICAgIDxhdHRydmFlPk5vdCBWZXJpZmllZDwvYXR0cnZhZT4NCiAgICAgICAgPC9hdHRy
+dmFpPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRybGFibD5BQ0NV
+UkFDWTwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPk9yaWdpbmFsICBBY2N1cmFjeTwvYXR0
+cmRlZj4NCiAgICAgICAgPGF0dHJkZWZzPkFESFM8L2F0dHJkZWZzPg0KICAgICAgPC9hdHRyPg0K
+ICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRybGFibD5CT0FSRElORzwvYXR0cmxhYmw+DQogICAg
+ICAgIDxhdHRyZGVmPkJvYXJkaW5nIFNjaG9vbD88L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVm
+cz5WYXJpZXM8L2F0dHJkZWZzPg0KICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZh
+Pk1lZGl1bTwvYXR0cnZhPg0KICAgICAgICAgIDxhdHRydmFlPk5vdCBWZXJpZmllZDwvYXR0cnZh
+ZT4NCiAgICAgICAgPC9hdHRydmFpPg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAg
+ICAgIDxhdHRybGFibD5SRUdJT048L2F0dHJsYWJsPg0KICAgICAgICA8YXR0cmRlZj5SZWdpb24g
+b2YgU3RhdGU8L2F0dHJkZWY+DQogICAgICAgIDxhdHRyZGVmcz5WYXJpZXM8L2F0dHJkZWZzPg0K
+ICAgICAgICA8YXR0cnZhaT4NCiAgICAgICAgICA8YXR0cnZhPk1lZGl1bTwvYXR0cnZhPg0KICAg
+ICAgICAgIDxhdHRydmFlPk5vdCBWZXJpZmllZDwvYXR0cnZhZT4NCiAgICAgICAgPC9hdHRydmFp
+Pg0KICAgICAgPC9hdHRyPg0KICAgICAgPGF0dHI+DQogICAgICAgIDxhdHRybGFibD5XRUJfUEFH
+RTwvYXR0cmxhYmw+DQogICAgICAgIDxhdHRyZGVmPlNjaG9vbCBXZWIgUGFnZSBBZGRyZXNzPC9h
+dHRyZGVmPg0KICAgICAgICA8YXR0cmRlZnM+VmFyaWVzPC9hdHRyZGVmcz4NCiAgICAgICAgPGF0
+dHJ2YWk+DQogICAgICAgICAgPGF0dHJ2YT5NZWRpdW08L2F0dHJ2YT4NCiAgICAgICAgICA8YXR0
+cnZhZT5QYXJpdGlhbGx5IFZlcmlmaWVkPC9hdHRydmFlPg0KICAgICAgICA8L2F0dHJ2YWk+DQog
+ICAgICA8L2F0dHI+DQogICAgICA8YXR0cj4NCiAgICAgICAgPGF0dHJsYWJsPlBMQUNfSUROTzwv
+YXR0cmxhYmw+DQogICAgICA8L2F0dHI+DQogICAgPC9kZXRhaWxlZD4NCiAgPC9lYWluZm8+DQog
+IDxkaXN0aW5mbz4NCiAgICA8cmVzZGVzYz5Eb3dubG9hZGFibGUgRGF0YTwvcmVzZGVzYz4NCiAg
+ICA8c3Rkb3JkZXI+DQogICAgICA8ZGlnZm9ybT4NCiAgICAgICAgPGRpZ3RpbmZvPg0KICAgICAg
+ICAgIDx0cmFuc2l6ZT4zLjAxNzwvdHJhbnNpemU+DQogICAgICAgIDwvZGlndGluZm8+DQogICAg
+ICA8L2RpZ2Zvcm0+DQogICAgPC9zdGRvcmRlcj4NCiAgPC9kaXN0aW5mbz4NCiAgPG1ldGFpbmZv
+Pg0KICAgIDxtZXRleHRucz4NCiAgICAgIDxvbmxpbms+aHR0cDovL3d3dy5lc3JpLmNvbS9tZXRh
+ZGF0YS9lc3JpcHJvZjgwLmh0bWw8L29ubGluaz4NCiAgICAgIDxtZXRwcm9mPkVTUkkgTWV0YWRh
+dGEgUHJvZmlsZTwvbWV0cHJvZj4NCiAgICA8L21ldGV4dG5zPg0KICAgIDxtZXRleHRucz4NCiAg
+ICAgIDxvbmxpbms+aHR0cDovL3d3dy5lc3JpLmNvbS9tZXRhZGF0YS9lc3JpcHJvZjgwLmh0bWw8
+L29ubGluaz4NCiAgICAgIDxtZXRwcm9mPkVTUkkgTWV0YWRhdGEgUHJvZmlsZTwvbWV0cHJvZj4N
+CiAgICA8L21ldGV4dG5zPg0KICAgIDxtZXRleHRucz4NCiAgICAgIDxvbmxpbms+aHR0cDovL3d3
+dy5lc3JpLmNvbS9tZXRhZGF0YS9lc3JpcHJvZjgwLmh0bWw8L29ubGluaz4NCiAgICAgIDxtZXRw
+cm9mPkVTUkkgTWV0YWRhdGEgUHJvZmlsZTwvbWV0cHJvZj4NCiAgICA8L21ldGV4dG5zPg0KICA8
+L21ldGFpbmZvPg0KICA8RXNyaT4NCiAgICA8Q3JlYURhdGU+MjAwODA4MTE8L0NyZWFEYXRlPg0K
+ICAgIDxDcmVhVGltZT4xMDA1MjQwMDwvQ3JlYVRpbWU+DQogICAgPFN5bmNPbmNlPkZBTFNFPC9T
+eW5jT25jZT4NCiAgICA8U3luY0RhdGU+MjAxMTA2MjA8L1N5bmNEYXRlPg0KICAgIDxTeW5jVGlt
+ZT4xMzEyMDkwMDwvU3luY1RpbWU+DQogICAgPE1vZERhdGU+MjAxMTA2MjA8L01vZERhdGU+DQog
+ICAgPE1vZFRpbWU+MTMxMjA5MDA8L01vZFRpbWU+DQogICAgPE1ldGFJRD57MDEyODg5MzEtOEY1
+Qi00OTg4LTlGNTUtRTcwODFGMDkxMkU3fTwvTWV0YUlEPg0KICA8L0Vzcmk+DQogIDxkYXRhSWRJ
+bmZvPg0KICAgIDxkYXRhRXh0Pg0KICAgICAgPGdlb0VsZT4NCiAgICAgICAgPEdlb0JuZEJveCBl
+c3JpRXh0ZW50VHlwZT0ic2VhcmNoIiAvPg0KICAgICAgPC9nZW9FbGU+DQogICAgPC9kYXRhRXh0
+Pg0KICAgIDxnZW9Cb3ggZXNyaUV4dGVudFR5cGU9ImRlY2RlZ3JlZXMiIC8+DQogICAgPGRlc2NL
+ZXlzPg0KICAgICAgPHRoZXNhTmFtZSB1dWlkcmVmPSI3MjNmNjk5OC0wNThlLTExZGMtODMxNC0w
+ODAwMjAwYzlhNjYiIC8+DQogICAgPC9kZXNjS2V5cz4NCiAgPC9kYXRhSWRJbmZvPg0KICA8c3Bh
+dFJlcEluZm8+DQogICAgPFZlY3RTcGF0UmVwPg0KICAgICAgPGdlb21ldE9ianMgTmFtZT0iU0NI
+T09MU19FVkVSWVRISU5HXzhfN18wOCIgLz4NCiAgICA8L1ZlY3RTcGF0UmVwPg0KICA8L3NwYXRS
+ZXBJbmZvPg0KPC9tZXRhZGF0YT4=</Data></Enclosure><Thumbnail><Data EsriPropertyType="PictureX">/9j/4AAQSkZJRgABAQEAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0a
+HBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIy
+MjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCACFAMgDASIA
+AhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQA
+AAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3
+ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWm
+p6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEA
+AwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSEx
+BhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElK
+U1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3
+uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD3+iii
+gAooooAKKKKACiiigChba3pl7qt5pdtfQy31mFNxAjZaMN0z/njj1q/UENla29zcXENtFHPcEGaR
+EAaQgYG49Tgcc1PQBzEPjzRn8Tavoc7S2kulrG01xcqEhO/AUByeuWA5xntnBrXttc0281m+0i3u
+le/sVRriIA5QOMrz0PHpUmoaTp2rWz2+oWNvdQuVLpNGGDFTlc564NWljRWLKihiACQOSB0oAdVS
+51Sws720s7m8ghubtmW3idwGlIGTtHfAq3WLrHhbStc1bSdTvYWa70qYzWzo5XBI6HHUZAOPUemQ
+QDaorNj1/S5fEM2gx3kbapDALiS3GcqhOASemeRx1wQe9Ns/EWmX2v6holvOzX+nqj3EZRgFDjK8
+kYPHp60AalNd1jjZ3YKigliewFY+hNr9zBqC+Ibe0gJuZFtRZysSYOiljwQ3uMfQVnx6feeGbTT9
+I05lTQYraf7VqN3eZntjjKsN4IPJPXgAdgMEA1NN8TaLq2n2F9Z6jA9vfsUtSzbDKwzlVDYJI2nj
+2rWrgbvVvCHhW20bS/Emqw6hqmmiJoJZ4Q8wLnYJAFHH88AE5OCe+oAKKKKACiiigAooooAKKKKA
+CiiigAooooAKKKKACiiigCpqmow6RpV1qNwsrw20TSusKF3IAycAdTUlldx39jb3kSyLHPGsiiRC
+jAEZGVPIPsanooAKzb+/v7bVdNtrbSpLq1uXcXN0sqqLYBcqSp5bJ446UaJpt1pVjJBd6rc6lI08
+komuAoZVZiQg2gcAcfywMAO1LVtLsJ7Oy1C7hik1CQwW8UhwZmxyo/z1IHUigCtqFxdRXNjdaNpl
+tf8A2qdIbu4EyoY4Bu+bODu2kn5fc+ta4RA7OFUOwALAcnHT+Zrm4NFl8I6JZaZ4R0y3e3F4DNHc
+3LDy4nYl2UnJJGeB/PocK68Yadp/xLv49ZvNS0mGwsdsYuHUWVyrfNvXj/WcMAMknaR1BFAHd393
+b2NjNc3V3HaQIvzTyMFVOwJJ46+tcXqsE2m6Tpmm+K9f0m90SaGaPUptRURSXLY3J5e0gDAH14z1
+xjEvtKtvE/iubSNG1WG50TVoHvtZBu1uHG+NVh8tGyY/4XDD2zwFFdXq/hjSIfCtkNU0yXxA+hw+
+ZbiRA80jIvYcBicDg8HA4OKAKuhwm78aPqNjE0/hu40i2FlMvleQhRyQqLjeMDB9jnP8OO2rBGtS
+Ww0KKDQrqOzvkw7fJGtgNgKq6545wuB9OuAd6gAooooAKKKKACiiigAooooAKKKKACiiigArNmtd
+UfxDa3UWoxx6XHA6T2ZgBaSQkbWD5yMc8fzzxzfxL8eS+BNGtZ7XTWvry9n8iBN2FDYzyB8x9gB+
+I72PFdr42u4RJ4a1DT7VTYyrJbzxkuZyBtKv2xzjIAz1Bz8oB1U8wt7eWYpI4jQuVjUszYGcADqf
+aue8IeJ77xRBc3dx4evdJtAV+yvdsA86kcnZ1XB/PNVPCX/CX6fYaFpmu2kd032OQ32oG6DPHIG/
+doRj5ztIBbuQTnj5uwoAKKzZtGjn8Q2usG7vFktoHhFukxELhiDuZO5GP84GK03/AAksl3rEcP8A
+ZsNv5C/2ZM29m83ad3mr6bsdO3rQBt1HJBDM8TyxRu8Tb42ZQSjYIyPQ4JH0JrB0u08VxS6S2p6p
+YTxRWbLqCRWxVpp+NrIc4AHPYDrxyNvKeFJviL4qk1qTX/8AinrGSGS1tIYoR5ySbjiQE8nA4z0b
+gjHWgD0a8vLbT7Oa8vJ44LaFC8ksjYVVHUk1g+Kbjw3N4dh1TWLCPV9OV0eHy7b7VnzPlDKADkYb
+r/XFPu/DuoXc2kxya5LJYW0Dw39tNBG4v8pty/GBzkkAY57U7wxZ28Hh6TSk1xtWFvJJbyTq6B4j
+k/u/3eNhUEDHBHGMDAABjeL/AA1b3l7Z3Fhb2VrLextZ3d7G7RXYt9hYCDaPmf5emDwMdCcQXcui
++NPhvbXuq3eq6JpUcis0lzceRLIiNtHmNk5D/mSQRzW5pPgbQ9J0rSLD7O14NIkaWzmu23yRuxJJ
+B/Hp04B6gGt27s7XULSS1vLeK4t5Bh4pUDKw9weDQBwPjHwoniPwlYeD/Dt/arDZT2puo3uC0iWw
+BxzycnAIz1xXoiqEQKOgGK4zQtB12Px9rGuX4sLSzkX7PDDaRqWukGCkkjkbtyjK4/oBntKACiii
+gAooooAKKKKACiiigAooooAKKKKAOG+KXg678Y+H7aKzcGSxuPtX2YnH2napAj3ZwpJPUg1v3Fpq
++pWujzJetpE0Usc15bxKswkXb80O4jpk/eHpW1XG+FdC8W6T4p12fWNeTUdGupDJZRPkyREnOOgC
+gDjAyDweO4B0Wt61ZeHtHuNV1F3S0twDIyRs5GSAOACepFXkcSRq652sARkY4+hpxAIwRkGs3W9G
+j1yxjtZLu8tQk8c2+0mMTkowbaSOxxz/AI0AaVcFYeJdStvitrOk6qLiHS3hhNjJO0aQhuBhTwWL
+MSMZJ+Xp6XvE+r+LEs9bttB0BzdQpCLC7aWNlmZzhztJGNg9frjFec/Ef4UXtxHrHi+fWL7UNRjj
+ikt7aG2DBCpG9QpJ+QckAdBnO7uAanjr40HTH1XQ9BsLka9Z3Hl5uIdyeWqlnkAB7AcZ7EH2r0/Q
+NWh1zQLHUoJ4ZluIVcvCRt3Y+YdTjByMZOMV50fC+l/GLRrTVtT0yfTzDOnk3u1Flv7cAZLAcqGJ
+bA5x2Jru3vdO8LvpOi22nTQ2kwdI2toP3FsqLuJkbooPPPfkmgDcrJ0vw1o+iHUG0yyjtG1CUzXD
+RZBZyOue3fAHAycdafF4h0iefUoItQgeXTAGvFVsmEFdwJ/AH8j6Vi2XjObVPEOlwabo9xd6DqNo
+08erxn5FYZ+VlIyvTHODk9OtAFfQfCGvaL4mhkbxPc3OgWtqYYLOYl5JHY5LSsepB6EYwMDgZ3dr
+XAnxu2ueJtHh8OatpX2FbqaDU7e7by7nK4UbEbDdSMYHUrnvXVX+pXttrOmWVtprXMF0z/aLhZlX
+7MqrkMVPLAnjjp+IoA1KKxfDlt4htob0eIdQtLyR7p2tjbQmMJCfuqff88erda2qACiiigAooooA
+KKKKACiiigAooooAKKKKAIL27jsLG4vJVkaOCNpGEaF2IAycKOSfYVzel3NvY2Op+MLrWdQfSr6F
+Lxbe8TaLONU5CrjOT7deOvU7Ot6ldaVYxz2mlXOpSNPHEYbcqGVWYAudxHAHP88DJESS61L4murW
+40+0/sD7KpiuPNzI8pJDKyYxjH9OTkhQDQs7u31CygvLWUS288ayxOOjKwyD+RqtrOuab4fs0u9U
+ult4HmSFXYE5dzhRx/kVU8V+H5fEvh+XSYNVudMErKJJrXG5o8/MnsCMjj9RkHStbGC00+3slDSQ
+26IiGZjIx24wSTyTwDk96AOJ8erpFpruiX+veIdVtrVp44rbTbMsFlnDhg7bBuIHQj6Y569PremC
+9ay1Bbm5STTXa5jiiufJjnOwgLIeQV5/DnsSDPrUFneaXdWt1ClwDC37nzAjNuBUANkbSclQcjrX
+zNqfxI8WeD4JPCTiI20cYVra7XfLBFJH/qC6kZChhhhg8DpytAH0l4V1lvEHhfT9UdLeOS4iDPHb
+ziZEboQGHBxj8Onal8TeHrLxToFzo+oS3EdrOBva3l2MMHPXoRx0IIr5K+H/AI6uvC3ibSri+vtR
+bSLQyK1rBLlQrg5AQnGN2CR7Z619XodN8eeDFMsNyNO1W2BMb7opArfr/Q+4PIBBBolx4bsNJ0/w
+vaWQto5o47xrt28wwKpBIYcs/QDPA6dOlzW9Vl0K1sfsejXV+stzHbGO0Vf3KNxvIPRR+XTkda1I
+II7a3igiBEcSBFBYsQAMDk8n6mpKAPO/iB8PfDXiKC6vBJZaZrtun2sX3yqQF6NKP4k+XGSOMfUH
+Y8KWFxqS2viTxBplnD4gSBrQXNrP5kcsO7IdcHADdR1OD74EXjfwLH4n069/s+S30/VbyFLWa+aH
+ezW4bc0fUYz6+2Pp02laemk6RZadExdLWBIFYgAsFUDOBx2oAzbez8RL4zvLufUrZvD7WyJb2axf
+vFlz8zFvz+oIGBjJ3a53xrbwPoK3k1pql4bC4ju47fTJCssjq3AwCMjnJHtkciugRi8asVZSQDtb
+qPY0AOooooAKKKKACiiigAooooAKKKKACiiigAorL1jWRpthevaW51C/toPOGnwSKJnBJA4PQEg8
+98HAJ4q09/Db6Z9vvmWyiWISTfaHVRCMZIY5xx9cUAWqKo6nq9ho+jz6tfXCxWMEfmPKAWG31GMk
+9ulV7vxBb20ukLHb3d1HqkmyKa2hLogKlgzn+FSB1/oDQBwfxK+G2o+KtbXVdL+w7xYNbyRzyOnm
+uGBjJwCCF5YdOVGcg8eZT/ALxhcXuqSXd3bTOkRmhuBLu+1ynkpzgg9fmbHOOucj3u/1XxP/AGvL
+aab4eia2imtx9rubsKssTZ80qoBIK4HB+voDpazpt1qcVqlrqtzpxhuY5na3CkyqpyYzkHg/5yOK
+APi3WPCXiPw3DFc6tpF5YxOyiOWWMqCxG4DPrjt14Poa9C+BviLxP/wl1ro1pfCbS9p861upTsRO
+SWjHZh6DrzngEjqPiH4Vtk1jxNqWuX3iHVooLI31pbrGY7e3d2EahZASuVxnG0fKOc4OcH4aeGtY
+8XeO5PFlkLPSbWzkidVMSzBwQBtH+1sBLPwdx7EnAB7ne3+r6tca7oenWt1pVxBAn2XVpoleGRnB
++6M846d8dx0B5CSX4teGfDVvM40zxBcRF1liSNvOI3rtOQVDfKG6AEbuQcZHfeI9IuNc0d7G11a7
+0uVnRxc2pG8bWBxz2OP8jINHwfrV1qVtfWOp3ME+rabctBdtbwPHHk/Mu3cOflIzj+WCQDoElBEY
+ceXK67vLZhuHTI464z2pl6Lo2NwLFoluzG3kmYEoHx8u4DnGcZxWXrvhq21iRL6Nvs2sW0EsVlfA
+bmtzIuCwU8N+P4YrmdH+J+kx69F4T1k39prCP9mWa9gEa3TDADjbwN5yR29+RkA7bSxfrpVqNUeB
+78RL9oaAERl8c7c84zVusSHSbu08T32tz69dPYS26ounyBRDAV5Lg/56nJPGNugAooooAKKKKACi
+iigAooooAKKKKACo5zIsEhhUNKFJQHuccelSUUAeGeF/hLqXirU7nxT46uL601Ge5Dra27iIjYeM
+sCSF4XGMEbc56GvTPFHgXTfF0sf9p3eoi3WEwtawXTJFIN6vllHU5Uf/AKwCOnooAiW2gS1W1WGM
+W6p5Yi2jaFxjbj0xxipaKKAGh0MhjDKXUBiueQDnBx+B/I06ss6TpGn6te+Imhjhu5bcR3N0zEDy
+kyecnAA7n2HpWfpnipNT1iJYvsZ0a8t1k029F0N93JyXQRkZ+UDn0/HAAN+5toby1mtbiNZYJkaO
+SNhkMpGCD7EGuAuPB+j/AA4t7/xP4atPKmitFhazmv8AybaTDD53Z8/MBnknnPqc11mjaFbeHI9R
+aC4vZ0url7tlnlaYxluSqDrjOTjk5PesvU9d8Na38PJ9W1jdBodxCfM+2QbWXnA+RgctuA24BycY
+zQBEPid4Uk0a3v4tYgb7TcfZIURWdjNkAgIAGIG4HOOhHqKwfhs3iDVvEmp+INTn0u5t5IfsTT2F
+3I6tLFIwyIydqjaR+hH3mr548ZaAPCfiGC304XYh8tJ7S+Zx/pQPzLNHtHyjkYGSRjnnivevgf4r
++0+ALqTWtXtf9DvDGWmYI6B8EGRifmLOzYJ5JzyewB6drMupQaNdy6PbQ3OorGTbwzvsR27An/8A
+V9R1CLptrcz2uoX2nWh1KKML5uwO0R6lVcgHGc+n0q4ZowkjBtwjzuCDcQQM4wOc+1c54Vvb7xFo
+d5f3d2xs7+WQ2QS3e2mggI2gNk53AgncPqDyAADW1zRLHxFo1zpOpRvJaXChZFSQoSAQRyDnqBVy
+CCK1t4reBAkUSBEQdFUDAH5Vh2ug6lpiaFaafrUp0+wDJdJdp50t2u3C5kONpB54H6DFdBQAUUUU
+AFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFAAQCMEZBqg+i6W81jK2nWvmWGfsjeUP3AIwQn90
+Y9Kv0UAcj4o0zxC/inw5qmiXdz9nhuPJ1C0EyrE0DclyCOSMAdz0xjk1093Z2uoWklreW8VxbyDD
+xSoGVh7g8Gp6zbVdZGuX7XctmdKKR/Y0jVhKGwd+8njrjGKAPPfiDpl14r+InhjRLKRzBprC/vYV
+lkgxGXUK6uBjI2kDBzzxjkjTh8F+C10TUfACXivcXWbu4RpkN1lmyJOnUYGOOmM9ec34l6xP4J8Z
++HPFkVnJdwSJJp13FHuLFWKsu3nbuyCQMc4PtjpPDHhBbMSarqd5JqWrXBkaG/urVEuLeGTkR9O3
+oeBkgADigDH0PRtI+FdxeXdx4hvLqx1q9ihhjnXzWFwSwJLqOSe5IH3ecmtW/k1XSvEdpbnxbaKd
+U1LzILK7thu+zrH88UZUjnODk+3fO7zvwRBqnw08e6n4b1UX1/ockJube4WFjDEVBk38/KpwGBxx
+uAq7qHj6PULjwl4yv9GvrXS4XvpEEbSSSMqphXwnyAEZyHI6Nzt5IB61rOs2Hh/SZ9U1S4FvZwAG
+SQgnGSAOByeSKuo6yRq6MGRgCpHcGqWl6jaa/olrqECO1peQrIizR7SVIzyp/wD1VfoAKKKKACii
+igAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKAGuiSAB1VgCCMjOCOhp1FFABTFijWERL
+GoiC7QgHAHTGPSiigB4AAwBgCiiigAooooAKKKKACiiigD//2Q==</Data></Thumbnail></Binary><mdHrLvName Sync="TRUE">dataset</mdHrLvName><refSysInfo><RefSystem><refSysID><identCode code="26912"></identCode><idCodeSpace Sync="TRUE">EPSG</idCodeSpace><idVersion Sync="TRUE">7.4.1</idVersion></refSysID></RefSystem></refSysInfo><mdDateSt Sync="TRUE">20110620</mdDateSt></metadata>
diff --git a/pysal/examples/geodanet/schools.shx b/pysal/examples/geodanet/schools.shx
new file mode 100644
index 0000000..5c20492
Binary files /dev/null and b/pysal/examples/geodanet/schools.shx differ
diff --git a/pysal/examples/geodanet/streets.dbf b/pysal/examples/geodanet/streets.dbf
new file mode 100644
index 0000000..ce6d200
Binary files /dev/null and b/pysal/examples/geodanet/streets.dbf differ
diff --git a/pysal/examples/geodanet/streets.prj b/pysal/examples/geodanet/streets.prj
new file mode 100644
index 0000000..b7b18c8
--- /dev/null
+++ b/pysal/examples/geodanet/streets.prj
@@ -0,0 +1 @@
+PROJCS["NAD_1983_StatePlane_Arizona_Central_FIPS_0202_Feet",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",699998.6],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-111.9166666666667],PARAMETER["Scale_Factor",0.9999],PARAMETER["Latitude_Of_Origin",31.0],UNIT["Foot_US",0.3048006096012192]]
\ No newline at end of file
diff --git a/pysal/examples/geodanet/streets.sbn b/pysal/examples/geodanet/streets.sbn
new file mode 100644
index 0000000..4f4e82f
Binary files /dev/null and b/pysal/examples/geodanet/streets.sbn differ
diff --git a/pysal/examples/geodanet/streets.sbx b/pysal/examples/geodanet/streets.sbx
new file mode 100644
index 0000000..5fc350e
Binary files /dev/null and b/pysal/examples/geodanet/streets.sbx differ
diff --git a/pysal/examples/geodanet/streets.shp b/pysal/examples/geodanet/streets.shp
new file mode 100644
index 0000000..b5809a7
Binary files /dev/null and b/pysal/examples/geodanet/streets.shp differ
diff --git a/pysal/examples/geodanet/streets.shx b/pysal/examples/geodanet/streets.shx
new file mode 100644
index 0000000..4733484
Binary files /dev/null and b/pysal/examples/geodanet/streets.shx differ
diff --git a/pysal/examples/juvenile.dbf b/pysal/examples/juvenile.dbf
new file mode 100644
index 0000000..c5e8e3f
Binary files /dev/null and b/pysal/examples/juvenile.dbf differ
diff --git a/pysal/examples/juvenile.gwt b/pysal/examples/juvenile.gwt
new file mode 100644
index 0000000..6d9f409
--- /dev/null
+++ b/pysal/examples/juvenile.gwt
@@ -0,0 +1,2803 @@
+1 168 juvenile ID
+1 2         14.1421356
+2 1         14.1421356
+2 58                  7
+2 57         7.61577311
+2 5                  5
+2 4         3.60555128
+2 3         5.09901951
+2 6         14.1421356
+3 2         5.09901951
+3 57         2.82842712
+3 5         3.60555128
+3 4         2.23606798
+3 6         13.3416641
+3 56          9.8488578
+3 55         9.43398113
+3 54         11.4017543
+3 53         11.3137085
+3 52         14.1421356
+3 58         2.23606798
+3 59         13.3416641
+4 2         3.60555128
+4 3         2.23606798
+4 57         4.12310563
+4 5                  2
+4 7         14.1421356
+4 6         12.0415946
+4 56         11.4017543
+4 55         10.7703296
+4 54         12.5299641
+4 53         12.2065556
+4 58         4.47213595
+5 2                  5
+5 3         3.60555128
+5 4                  2
+5 7         12.1655251
+5 6         10.0498756
+5 56          11.045361
+5 55          10.198039
+5 54         11.7046999
+5 53         11.1803399
+5 52          13.892444
+5 57         4.12310563
+5 58         5.65685425
+6 2         14.1421356
+6 3         13.3416641
+6 4         12.0415946
+6 5         10.0498756
+6 51         8.48528137
+6 49         10.2956301
+6 48         13.6014705
+6 9         9.05538514
+6 8         9.43398113
+6 7         3.60555128
+6 10         13.0384048
+6 52         13.3416641
+6 53          12.083046
+6 54         13.4164079
+6 55         13.6014705
+6 57          12.083046
+7 4         14.1421356
+7 5         12.1655251
+7 6         3.60555128
+7 51                  5
+7 49         7.28010989
+7 48         10.2956301
+7 9         7.28010989
+7 8         6.32455532
+7 47          13.892444
+7 11                 14
+7 10         11.1803399
+7 50         11.4017543
+7 52         11.1803399
+7 53         10.6301458
+7 54         12.0415946
+7 55         12.8062485
+7 57         13.1529464
+79 19         13.6014705
+79 18          12.083046
+79 81         3.60555128
+79 80         1.41421356
+79 21                 14
+79 22                 13
+79 23                 12
+79 27         13.9283883
+79 28         12.5299641
+79 29         10.2956301
+79 30         7.81024968
+79 31         9.21954446
+79 73         12.0415946
+79 74         10.4403065
+79 75                  8
+79 76         4.47213595
+79 77         3.16227766
+79 78         9.21954446
+8 6         9.43398113
+8 7         6.32455532
+8 49                  1
+8 48         4.24264069
+8 9         4.12310563
+8 47         7.81024968
+8 46         8.06225775
+8 45         8.54400375
+8 12         9.21954446
+8 11         8.24621125
+8 10         6.40312424
+8 50         7.07106781
+8 44         12.2065556
+8 43         12.8062485
+8 42         13.0384048
+8 41         12.5299641
+8 40          13.892444
+8 51         2.23606798
+8 52         13.6014705
+9 6         9.05538514
+9 7         7.28010989
+9 8         4.12310563
+9 49                  4
+9 48         7.28010989
+9 47         10.2956301
+9 46                 10
+9 45         9.89949494
+9 12         8.24621125
+9 11         7.28010989
+9 10                  4
+9 50                 11
+9 41         14.1421356
+9 51         5.83095189
+10 6         13.0384048
+10 7         11.1803399
+10 8         6.40312424
+10 9                  4
+10 47         9.05538514
+10 46         8.24621125
+10 45         7.61577311
+10 12         4.47213595
+10 11         3.60555128
+10 44          12.083046
+10 43                 13
+10 42         12.5299641
+10 41         11.6619038
+10 40         13.0384048
+10 48         7.28010989
+10 49         5.65685425
+10 50         11.7046999
+10 51         8.60232527
+11 7                 14
+11 8         8.24621125
+11 9         7.28010989
+11 10         3.60555128
+11 45                  5
+11 12                  1
+11 37         11.4017543
+11 44         9.21954446
+11 43          10.198039
+11 42         9.48683298
+11 41         8.54400375
+11 40          9.8488578
+11 39         10.8166538
+11 36         14.1421356
+11 46         6.08276253
+11 47         7.28010989
+11 48         7.07106781
+11 49         7.28010989
+11 50         11.4017543
+11 51         10.4403065
+12 8         9.21954446
+12 9         8.24621125
+12 10         4.47213595
+12 11                  1
+12 37         10.6301458
+12 44         9.05538514
+12 43         10.0498756
+12 42         9.21954446
+12 41         8.24621125
+12 40         9.48683298
+12 39         10.2956301
+12 36          13.453624
+12 35          13.453624
+12 34         13.6014705
+12 45         5.09901951
+12 46         6.32455532
+12 47         7.61577311
+12 48         7.81024968
+12 49         8.24621125
+12 50         12.0415946
+12 51         11.4017543
+13 27                  7
+13 26         6.08276253
+13 25         6.08276253
+13 24         6.32455532
+13 14         3.16227766
+13 22         8.60232527
+13 21         7.81024968
+13 20         7.07106781
+13 19         11.4017543
+13 17         11.7046999
+13 16         11.4017543
+13 15         10.4403065
+13 30                 14
+13 29                 11
+13 23         9.43398113
+13 18          13.453624
+13 28         9.05538514
+13 31         13.0384048
+13 32         4.47213595
+13 33         6.32455532
+13 34                 10
+13 35         11.4017543
+13 36         12.8062485
+13 37         11.4017543
+14 13         3.16227766
+14 26                  3
+14 25                  3
+14 24         3.16227766
+14 22         5.65685425
+14 21                  5
+14 20         4.47213595
+14 19         8.94427191
+14 17         10.0498756
+14 16                 10
+14 15                  9
+14 23         6.40312424
+14 18         10.8166538
+14 27         4.12310563
+14 28         6.32455532
+14 29         8.06225775
+14 30          11.045361
+14 31          10.198039
+14 32         3.16227766
+14 33         4.24264069
+14 34         8.60232527
+14 35                 10
+14 36         11.4017543
+14 37         10.7703296
+15 13         10.4403065
+15 14                  9
+15 17         1.41421356
+15 16                  1
+15 18                  6
+15 19         4.12310563
+15 20         5.38516481
+15 21         5.83095189
+15 22         6.40312424
+15 23         7.07106781
+15 24         8.54400375
+15 25         9.48683298
+15 26         9.48683298
+15 27         10.7703296
+15 28         12.5299641
+15 29         12.8062485
+15 32         12.0415946
+15 33         12.3693169
+16 13         11.4017543
+16 14                 10
+16 15                  1
+16 17                  1
+16 18         6.08276253
+16 19         4.47213595
+16 20         6.32455532
+16 21         6.70820393
+16 22         7.21110255
+16 23         7.81024968
+16 24         9.48683298
+16 25         10.4403065
+16 26         10.4403065
+16 27         11.7046999
+16 28         13.4164079
+16 29         13.6014705
+16 32         13.0384048
+16 33         13.3416641
+17 13         11.7046999
+17 14         10.0498756
+17 15         1.41421356
+17 16                  1
+17 18         5.09901951
+17 19         3.60555128
+17 20         6.08276253
+17 21         6.32455532
+17 22         6.70820393
+17 23         7.21110255
+17 24         9.21954446
+17 25          10.198039
+17 26          10.198039
+17 27         11.4017543
+17 28                 13
+17 29         13.0384048
+17 32                 13
+17 33         13.1529464
+18 79          12.083046
+18 13          13.453624
+18 14         10.8166538
+18 15                  6
+18 17         5.09901951
+18 16         6.08276253
+18 19         2.23606798
+18 20         6.40312424
+18 21         5.83095189
+18 22         5.38516481
+18 23         5.09901951
+18 24         8.54400375
+18 25         9.48683298
+18 26         9.48683298
+18 27          10.198039
+18 28                 11
+18 29          10.198039
+18 30         11.1803399
+18 31         11.7046999
+18 32                 13
+18 33         12.3693169
+18 77         12.8062485
+18 80         12.6491106
+18 81         13.1529464
+19 79         13.6014705
+19 13         11.4017543
+19 14         8.94427191
+19 17         3.60555128
+19 16         4.47213595
+19 15         4.12310563
+19 18         2.23606798
+19 20         4.47213595
+19 21         4.12310563
+19 22                  4
+19 23         4.12310563
+19 24         7.07106781
+19 25         8.06225775
+19 26         8.06225775
+19 27                  9
+19 28          10.198039
+19 29          9.8488578
+19 30         11.4017543
+19 31         11.6619038
+19 32         11.4017543
+19 33          11.045361
+19 77          13.892444
+20 13         7.07106781
+20 14         4.47213595
+20 22                  2
+20 21                  1
+20 19         4.47213595
+20 17         6.08276253
+20 16         6.32455532
+20 15         5.38516481
+20 23                  3
+20 18         6.40312424
+20 24         3.16227766
+20 25         4.12310563
+20 26         4.12310563
+20 27         5.38516481
+20 28         7.21110255
+20 29         7.81024968
+20 30         10.2956301
+20 31                 10
+20 32         7.07106781
+20 33         7.07106781
+20 34         11.4017543
+20 35         12.6491106
+20 36         13.9283883
+20 37         14.1421356
+21 13         7.81024968
+21 14                  5
+21 20                  1
+21 22                  1
+21 19         4.12310563
+21 17         6.32455532
+21 16         6.70820393
+21 15         5.83095189
+21 23                  2
+21 18         5.83095189
+21 79                 14
+21 24                  3
+21 25                  4
+21 26                  4
+21 27         5.09901951
+21 28         6.70820393
+21 29         7.07106781
+21 30         9.43398113
+21 31         9.21954446
+21 32         7.28010989
+21 33                  7
+21 34         11.1803399
+21 35         12.3693169
+21 36         13.6014705
+21 37         14.0356688
+21 77         13.3416641
+22 13         8.60232527
+22 14         5.65685425
+22 20                  2
+22 21                  1
+22 19                  4
+22 17         6.70820393
+22 16         7.21110255
+22 15         6.40312424
+22 23                  1
+22 18         5.38516481
+22 79                 13
+22 24         3.16227766
+22 25         4.12310563
+22 26         4.12310563
+22 27                  5
+22 28         6.32455532
+22 29         6.40312424
+22 30         8.60232527
+22 31         8.48528137
+22 32         7.61577311
+22 33         7.07106781
+22 34          11.045361
+22 35         12.1655251
+22 36         13.3416641
+22 37                 14
+22 77         12.3693169
+23 13         9.43398113
+23 14         6.40312424
+23 20                  3
+23 21                  2
+23 22                  1
+23 19         4.12310563
+23 17         7.21110255
+23 16         7.81024968
+23 15         7.07106781
+23 18         5.09901951
+23 79                 12
+23 80         13.0384048
+23 24         3.60555128
+23 25         4.47213595
+23 26         4.47213595
+23 27         5.09901951
+23 28         6.08276253
+23 29         5.83095189
+23 30         7.81024968
+23 31         7.81024968
+23 32         8.06225775
+23 33         7.28010989
+23 34                 11
+23 35         12.0415946
+23 36         13.1529464
+23 37         14.0356688
+23 77         11.4017543
+24 13         6.32455532
+24 14         3.16227766
+24 22         3.16227766
+24 21                  3
+24 20         3.16227766
+24 19         7.07106781
+24 17         9.21954446
+24 16         9.48683298
+24 15         8.54400375
+24 77                 13
+24 23         3.60555128
+24 18         8.54400375
+24 25                  1
+24 26                  1
+24 27         2.23606798
+24 28         4.24264069
+24 29         5.38516481
+24 30         8.24621125
+24 31         7.61577311
+24 32         4.47213595
+24 33                  4
+24 34         8.24621125
+24 35         9.48683298
+24 36         10.7703296
+24 37          11.045361
+25 13         6.08276253
+25 14                  3
+25 24                  1
+25 22         4.12310563
+25 21                  4
+25 20         4.12310563
+25 19         8.06225775
+25 17          10.198039
+25 16         10.4403065
+25 15         9.48683298
+25 77         13.0384048
+25 23         4.47213595
+25 18         9.48683298
+25 26     1.41421456e-05
+25 27         1.41421356
+25 28         3.60555128
+25 29         5.09901951
+25 30         8.06225775
+25 31         7.28010989
+25 32         3.60555128
+25 33                  3
+25 34         7.28010989
+25 35         8.54400375
+25 36          9.8488578
+25 37         10.0498756
+25 39         13.3416641
+26 13         6.08276253
+26 14                  3
+26 25     1.41421456e-05
+26 24                  1
+26 22         4.12310563
+26 21                  4
+26 20         4.12310563
+26 19         8.06225775
+26 17          10.198039
+26 16         10.4403065
+26 15         9.48683298
+26 77         13.0384048
+26 23         4.47213595
+26 18         9.48683298
+26 27         1.41421356
+26 28         3.60555128
+26 29         5.09901951
+26 30         8.06225775
+26 31         7.28010989
+26 32         3.60555128
+26 33                  3
+26 34         7.28010989
+26 35         8.54400375
+26 36          9.8488578
+26 37         10.0498756
+26 39         13.3416641
+27 13                  7
+27 26         1.41421356
+27 25         1.41421356
+27 24         2.23606798
+27 14         4.12310563
+27 22                  5
+27 21         5.09901951
+27 20         5.38516481
+27 19                  9
+27 17         11.4017543
+27 16         11.7046999
+27 15         10.7703296
+27 77         12.1655251
+27 30                  7
+27 29                  4
+27 23         5.09901951
+27 18          10.198039
+27 79         13.9283883
+27 28         2.23606798
+27 31         6.08276253
+27 32         3.60555128
+27 33         2.23606798
+27 34         6.08276253
+27 35         7.28010989
+27 36         8.54400375
+27 37                  9
+27 38         13.6014705
+27 39         12.1655251
+27 40         14.1421356
+27 73          13.892444
+27 75         13.3416641
+28 22         6.32455532
+28 21         6.70820393
+28 20         7.21110255
+28 19          10.198039
+28 17                 13
+28 16         13.4164079
+28 15         12.5299641
+28 27         2.23606798
+28 26         3.60555128
+28 25         3.60555128
+28 24         4.24264069
+28 14         6.32455532
+28 13         9.05538514
+28 77         10.4403065
+28 31                  4
+28 30         5.09901951
+28 29         2.23606798
+28 23         6.08276253
+28 18                 11
+28 79         12.5299641
+28 76         13.1529464
+28 80          13.892444
+28 32         5.09901951
+28 33         3.16227766
+28 34         5.09901951
+28 35                  6
+28 36         7.07106781
+28 37         8.24621125
+28 38         11.6619038
+28 39                 11
+28 40                 13
+28 41         14.0356688
+28 42                 14
+28 73         11.6619038
+28 75         11.1803399
+29 13                 11
+29 27                  4
+29 28         2.23606798
+29 22         6.40312424
+29 21         7.07106781
+29 20         7.81024968
+29 19          9.8488578
+29 17         13.0384048
+29 16         13.6014705
+29 15         12.8062485
+29 26         5.09901951
+29 25         5.09901951
+29 24         5.38516481
+29 14         8.06225775
+29 77         8.24621125
+29 30                  3
+29 23         5.83095189
+29 18          10.198039
+29 79         10.2956301
+29 76          11.045361
+29 81         13.6014705
+29 80         11.6619038
+29 31         2.23606798
+29 32         7.28010989
+29 33         5.38516481
+29 34         6.70820393
+29 35         7.28010989
+29 36         8.06225775
+29 37          9.8488578
+29 38         11.7046999
+29 39         12.1655251
+29 40         14.1421356
+29 73         10.6301458
+29 74                 13
+29 75         9.48683298
+30 13                 14
+30 27                  7
+30 28         5.09901951
+30 29                  3
+30 22         8.60232527
+30 21         9.43398113
+30 20         10.2956301
+30 19         11.4017543
+30 26         8.06225775
+30 25         8.06225775
+30 24         8.24621125
+30 14          11.045361
+30 77         5.38516481
+30 23         7.81024968
+30 18         11.1803399
+30 79         7.81024968
+30 76         8.06225775
+30 81         11.3137085
+30 80         9.21954446
+30 31         1.41421356
+30 32          10.198039
+30 33         8.24621125
+30 34         8.48528137
+30 35         8.60232527
+30 36         8.94427191
+30 37         11.4017543
+30 38          11.045361
+30 39                 13
+30 73         8.60232527
+30 74         10.2956301
+30 75         6.70820393
+31 28                  4
+31 22         8.48528137
+31 21         9.21954446
+31 20                 10
+31 19         11.6619038
+31 27         6.08276253
+31 26         7.28010989
+31 25         7.28010989
+31 24         7.61577311
+31 14          10.198039
+31 13         13.0384048
+31 77         6.70820393
+31 30         1.41421356
+31 29         2.23606798
+31 23         7.81024968
+31 18         11.7046999
+31 79         9.21954446
+31 76         9.21954446
+31 81         12.7279221
+31 80         10.6301458
+31 32         9.05538514
+31 33         7.07106781
+31 34         7.07106781
+31 35         7.21110255
+31 36         7.61577311
+31 37                 10
+31 38          10.198039
+31 39         11.7046999
+31 40         13.6014705
+31 73         8.48528137
+31 74         10.7703296
+31 75         7.28010989
+32 33                  2
+32 27         3.60555128
+32 26         3.60555128
+32 25         3.60555128
+32 24         4.47213595
+32 14         3.16227766
+32 13         4.47213595
+32 22         7.61577311
+32 21         7.28010989
+32 20         7.07106781
+32 19         11.4017543
+32 17                 13
+32 16         13.0384048
+32 15         12.0415946
+32 31         9.05538514
+32 30          10.198039
+32 29         7.28010989
+32 28         5.09901951
+32 23         8.06225775
+32 18                 13
+32 34         5.65685425
+32 35         7.07106781
+32 36         8.48528137
+32 37         7.61577311
+32 39         11.1803399
+32 40                 13
+32 41         13.6014705
+32 42         13.9283883
+33 32                  2
+33 27         2.23606798
+33 26                  3
+33 25                  3
+33 24                  4
+33 14         4.24264069
+33 13         6.32455532
+33 22         7.07106781
+33 21                  7
+33 20         7.07106781
+33 19          11.045361
+33 17         13.1529464
+33 16         13.3416641
+33 15         12.3693169
+33 77         13.6014705
+33 31         7.07106781
+33 30         8.24621125
+33 29         5.38516481
+33 28         3.16227766
+33 23         7.28010989
+33 18         12.3693169
+33 34         4.47213595
+33 35         5.83095189
+33 36         7.21110255
+33 37         7.07106781
+33 38         12.7279221
+33 39         10.4403065
+33 40         12.3693169
+33 41         13.1529464
+33 42         13.3416641
+33 73         13.9283883
+33 75         14.0356688
+34 12         13.6014705
+34 22          11.045361
+34 21         11.1803399
+34 20         11.4017543
+34 33         4.47213595
+34 32         5.65685425
+34 27         6.08276253
+34 26         7.28010989
+34 25         7.28010989
+34 24         8.24621125
+34 14         8.60232527
+34 13                 10
+34 77         13.6014705
+34 75         12.3693169
+34 31         7.07106781
+34 30         8.48528137
+34 29         6.70820393
+34 28         5.09901951
+34 23                 11
+34 35         1.41421356
+34 36         2.82842712
+34 37         3.16227766
+34 38         8.60232527
+34 39         6.08276253
+34 40         8.06225775
+34 41                  9
+34 42         9.05538514
+34 43          10.198039
+34 44         10.0498756
+34 45         12.3693169
+34 46         13.1529464
+34 47         14.0356688
+34 67          13.892444
+34 73          11.045361
+35 12          13.453624
+35 22         12.1655251
+35 21         12.3693169
+35 20         12.6491106
+35 33         5.83095189
+35 32         7.07106781
+35 27         7.28010989
+35 26         8.54400375
+35 25         8.54400375
+35 24         9.48683298
+35 14                 10
+35 13         11.4017543
+35 77          13.453624
+35 75         11.7046999
+35 73                 10
+35 34         1.41421356
+35 31         7.21110255
+35 30         8.60232527
+35 29         7.28010989
+35 28                  6
+35 23         12.0415946
+35 74         14.1421356
+35 36         1.41421356
+35 37         2.82842712
+35 38         7.21110255
+35 39                  5
+35 40                  7
+35 41         8.06225775
+35 42                  8
+35 43         9.05538514
+35 44                  9
+35 45         11.7046999
+35 46         12.3693169
+35 47         13.1529464
+35 67         12.5299641
+35 68         13.0384048
+36 11         14.1421356
+36 12          13.453624
+36 22         13.3416641
+36 21         13.6014705
+36 20         13.9283883
+36 33         7.21110255
+36 32         8.48528137
+36 27         8.54400375
+36 26          9.8488578
+36 25          9.8488578
+36 24         10.7703296
+36 14         11.4017543
+36 13         12.8062485
+36 77          13.453624
+36 75         11.1803399
+36 73         9.05538514
+36 35         1.41421356
+36 34         2.82842712
+36 31         7.61577311
+36 30         8.94427191
+36 29         8.06225775
+36 28         7.07106781
+36 23         13.1529464
+36 74         13.3416641
+36 37         3.16227766
+36 38         5.83095189
+36 39         4.12310563
+36 40         6.08276253
+36 41         7.28010989
+36 42         7.07106781
+36 43                  8
+36 44         8.06225775
+36 45         11.1803399
+36 46         11.7046999
+36 47         12.3693169
+36 67         11.1803399
+36 68         11.6619038
+37 11         11.4017543
+37 12         10.6301458
+37 33         7.07106781
+37 32         7.61577311
+37 27                  9
+37 26         10.0498756
+37 25         10.0498756
+37 24          11.045361
+37 14         10.7703296
+37 13         11.4017543
+37 22                 14
+37 21         14.0356688
+37 20         14.1421356
+37 73         12.1655251
+37 36         3.16227766
+37 35         2.82842712
+37 34         3.16227766
+37 31                 10
+37 30         11.4017543
+37 29          9.8488578
+37 28         8.24621125
+37 23         14.0356688
+37 38         8.24621125
+37 39         3.60555128
+37 40         5.38516481
+37 41         6.08276253
+37 42         6.32455532
+37 43         7.61577311
+37 44         7.28010989
+37 45         9.21954446
+37 46         10.0498756
+37 47                 11
+37 67         12.0415946
+37 68         12.7279221
+38 37         8.24621125
+38 33         12.7279221
+38 27         13.6014705
+38 77         13.6014705
+38 75         9.43398113
+38 73         5.65685425
+38 36         5.83095189
+38 35         7.21110255
+38 34         8.60232527
+38 31          10.198039
+38 30          11.045361
+38 29         11.7046999
+38 28         11.6619038
+38 72         14.0356688
+38 76          13.892444
+38 74                 10
+38 39         6.08276253
+38 40         6.70820393
+38 41         8.06225775
+38 42         7.21110255
+38 43         7.07106781
+38 44         7.81024968
+38 45         12.2065556
+38 46         12.0415946
+38 47         12.0415946
+38 64         12.2065556
+38 67                  7
+38 68         7.07106781
+38 69         14.1421356
+39 11         10.8166538
+39 12         10.2956301
+39 37         3.60555128
+39 33         10.4403065
+39 32         11.1803399
+39 27         12.1655251
+39 26         13.3416641
+39 25         13.3416641
+39 38         6.08276253
+39 73         11.1803399
+39 36         4.12310563
+39 35                  5
+39 34         6.08276253
+39 31         11.7046999
+39 30                 13
+39 29         12.1655251
+39 28                 11
+39 40                  2
+39 41         3.16227766
+39 42                  3
+39 43         4.12310563
+39 44                  4
+39 45         7.21110255
+39 46         7.61577311
+39 47         8.24621125
+39 48         11.7046999
+39 50                 13
+39 67         8.48528137
+39 68         9.21954446
+40 8          13.892444
+40 10         13.0384048
+40 11          9.8488578
+40 12         9.48683298
+40 37         5.38516481
+40 33         12.3693169
+40 32                 13
+40 27         14.1421356
+40 39                  2
+40 38         6.70820393
+40 73         12.2065556
+40 36         6.08276253
+40 35                  7
+40 34         8.06225775
+40 31         13.6014705
+40 29         14.1421356
+40 28                 13
+40 41         1.41421356
+40 42                  1
+40 43         2.23606798
+40 44                  2
+40 45         5.65685425
+40 46         5.83095189
+40 47         6.32455532
+40 48          9.8488578
+40 49         13.0384048
+40 50                 11
+40 67         7.21110255
+40 68         8.06225775
+41 8         12.5299641
+41 9         14.1421356
+41 10         11.6619038
+41 11         8.54400375
+41 12         8.24621125
+41 37         6.08276253
+41 33         13.1529464
+41 32         13.6014705
+41 42                  1
+41 40         1.41421356
+41 39         3.16227766
+41 38         8.06225775
+41 73         13.6014705
+41 36         7.28010989
+41 35         8.06225775
+41 34                  9
+41 28         14.0356688
+41 43         2.23606798
+41 44         1.41421356
+41 45         4.24264069
+41 46         4.47213595
+41 47         5.09901951
+41 48         8.54400375
+41 49         11.6619038
+41 50         10.0498756
+41 51         13.9283883
+41 67         7.61577311
+41 68         8.54400375
+42 8         13.0384048
+42 10         12.5299641
+42 11         9.48683298
+42 12         9.21954446
+42 41                  1
+42 37         6.32455532
+42 33         13.3416641
+42 32         13.9283883
+42 40                  1
+42 39                  3
+42 38         7.21110255
+42 73         12.8062485
+42 36         7.07106781
+42 35                  8
+42 34         9.05538514
+42 28                 14
+42 43         1.41421356
+42 44                  1
+42 45                  5
+42 46                  5
+42 47         5.38516481
+42 48         8.94427191
+42 49         12.2065556
+42 50                 10
+42 67         6.70820393
+42 68         7.61577311
+43 8         12.8062485
+43 10                 13
+43 11          10.198039
+43 12         10.0498756
+43 37         7.61577311
+43 42         1.41421356
+43 41         2.23606798
+43 40         2.23606798
+43 39         4.12310563
+43 38         7.07106781
+43 73         12.7279221
+43 36                  8
+43 35         9.05538514
+43 34          10.198039
+43 44                  1
+43 45         5.38516481
+43 46                  5
+43 47                  5
+43 48         8.60232527
+43 49         12.0415946
+43 50         9.05538514
+43 51          13.892444
+43 64                 13
+43 67         5.38516481
+43 68         6.32455532
+44 8         12.2065556
+44 10          12.083046
+44 11         9.21954446
+44 12         9.05538514
+44 37         7.28010989
+44 43                  1
+44 42                  1
+44 41         1.41421356
+44 40                  2
+44 39                  4
+44 38         7.81024968
+44 73          13.453624
+44 36         8.06225775
+44 35                  9
+44 34         10.0498756
+44 45         4.47213595
+44 46         4.24264069
+44 47         4.47213595
+44 48         8.06225775
+44 49         11.4017543
+44 50                  9
+44 51         13.4164079
+44 64         13.9283883
+44 67         6.32455532
+44 68         7.28010989
+45 8         8.54400375
+45 9         9.89949494
+45 10         7.61577311
+45 11                  5
+45 12         5.09901951
+45 37         9.21954446
+45 68                 11
+45 67                 10
+45 44         4.47213595
+45 43         5.38516481
+45 42                  5
+45 41         4.24264069
+45 40         5.65685425
+45 39         7.21110255
+45 38         12.2065556
+45 36         11.1803399
+45 35         11.7046999
+45 34         12.3693169
+45 46         1.41421356
+45 47         2.82842712
+45 48                  5
+45 49         7.61577311
+45 50         8.06225775
+45 51          10.198039
+46 8         8.06225775
+46 9                 10
+46 10         8.24621125
+46 45         1.41421356
+46 12         6.32455532
+46 11         6.08276253
+46 37         10.0498756
+46 68         10.0498756
+46 67         9.05538514
+46 44         4.24264069
+46 43                  5
+46 42                  5
+46 41         4.47213595
+46 40         5.83095189
+46 39         7.61577311
+46 38         12.0415946
+46 36         11.7046999
+46 35         12.3693169
+46 34         13.1529464
+46 47         1.41421356
+46 48         4.12310563
+46 49         7.21110255
+46 50         6.70820393
+46 51         9.48683298
+47 7          13.892444
+47 8         7.81024968
+47 9         10.2956301
+47 10         9.05538514
+47 46         1.41421356
+47 45         2.82842712
+47 12         7.61577311
+47 11         7.28010989
+47 37                 11
+47 68         9.21954446
+47 67         8.24621125
+47 44         4.47213595
+47 43                  5
+47 42         5.38516481
+47 41         5.09901951
+47 40         6.32455532
+47 39         8.24621125
+47 38         12.0415946
+47 36         12.3693169
+47 35         13.1529464
+47 34         14.0356688
+47 48         3.60555128
+47 49         7.07106781
+47 50         5.38516481
+47 51         8.94427191
+48 6         13.6014705
+48 7         10.2956301
+48 8         4.24264069
+48 9         7.28010989
+48 47         3.60555128
+48 46         4.12310563
+48 45                  5
+48 12         7.81024968
+48 11         7.07106781
+48 10         7.28010989
+48 68          12.083046
+48 67         11.1803399
+48 44         8.06225775
+48 43         8.60232527
+48 42         8.94427191
+48 41         8.54400375
+48 40          9.8488578
+48 39         11.7046999
+48 49         3.60555128
+48 50         4.47213595
+48 51         5.38516481
+49 6         10.2956301
+49 7         7.28010989
+49 8                  1
+49 9                  4
+49 48         3.60555128
+49 47         7.07106781
+49 46         7.21110255
+49 45         7.61577311
+49 12         8.24621125
+49 11         7.28010989
+49 10         5.65685425
+49 50                  7
+49 44         11.4017543
+49 43         12.0415946
+49 42         12.2065556
+49 41         11.6619038
+49 40         13.0384048
+49 51         3.16227766
+50 7         11.4017543
+50 8         7.07106781
+50 9                 11
+50 49                  7
+50 47         5.38516481
+50 46         6.70820393
+50 45         8.06225775
+50 12         12.0415946
+50 11         11.4017543
+50 10         11.7046999
+50 48         4.47213595
+50 68         9.89949494
+50 67         9.21954446
+50 44                  9
+50 43         9.05538514
+50 42                 10
+50 41         10.0498756
+50 40                 11
+50 39                 13
+50 64         13.6014705
+50 51         6.70820393
+50 52         12.0415946
+50 53         14.0356688
+50 61         12.2065556
+51 6         8.48528137
+51 7                  5
+51 49         3.16227766
+51 48         5.38516481
+51 9         5.83095189
+51 8         2.23606798
+51 47         8.94427191
+51 46         9.48683298
+51 45          10.198039
+51 12         11.4017543
+51 11         10.4403065
+51 10         8.60232527
+51 50         6.70820393
+51 44         13.4164079
+51 43          13.892444
+51 41         13.9283883
+51 52         11.4017543
+51 53          12.083046
+51 54         13.4164079
+52 3         14.1421356
+52 5          13.892444
+52 51         11.4017543
+52 8         13.6014705
+52 7         11.1803399
+52 6         13.3416641
+52 61         10.2956301
+52 50         12.0415946
+52 53         2.82842712
+52 54         3.16227766
+52 55         5.38516481
+52 56         6.08276253
+52 57         11.3137085
+52 58         13.6014705
+52 59         13.3416641
+53 3         11.3137085
+53 4         12.2065556
+53 5         11.1803399
+53 51          12.083046
+53 7         10.6301458
+53 6          12.083046
+53 52         2.82842712
+53 61         13.0384048
+53 50         14.0356688
+53 54         1.41421356
+53 55                  3
+53 56         4.12310563
+53 57         8.48528137
+53 58         10.8166538
+53 59          12.083046
+54 3         11.4017543
+54 4         12.5299641
+54 5         11.7046999
+54 51         13.4164079
+54 7         12.0415946
+54 6         13.4164079
+54 53         1.41421356
+54 52         3.16227766
+54 61         12.8062485
+54 55         2.23606798
+54 56                  3
+54 57         8.60232527
+54 58         10.6301458
+54 59         10.7703296
+55 3         9.43398113
+55 4         10.7703296
+55 5          10.198039
+55 7         12.8062485
+55 6         13.6014705
+55 54         2.23606798
+55 53                  3
+55 52         5.38516481
+55 56         1.41421356
+55 57         6.70820393
+55 58         8.48528137
+55 59         9.43398113
+56 3          9.8488578
+56 4         11.4017543
+56 5          11.045361
+56 55         1.41421356
+56 54                  3
+56 53         4.12310563
+56 52         6.08276253
+56 57         7.28010989
+56 58         8.60232527
+56 59         8.06225775
+57 2         7.61577311
+57 3         2.82842712
+57 4         4.12310563
+57 5         4.12310563
+57 7         13.1529464
+57 6          12.083046
+57 56         7.28010989
+57 55         6.70820393
+57 54         8.60232527
+57 53         8.48528137
+57 52         11.3137085
+57 58                  3
+57 59          12.083046
+58 2                  7
+58 57                  3
+58 5         5.65685425
+58 4         4.47213595
+58 3         2.23606798
+58 56         8.60232527
+58 55         8.48528137
+58 54         10.6301458
+58 53         10.8166538
+58 52         13.6014705
+58 59         11.1803399
+59 58         11.1803399
+59 57          12.083046
+59 3         13.3416641
+59 56         8.06225775
+59 55         9.43398113
+59 54         10.7703296
+59 53          12.083046
+59 52         13.3416641
+60 61         9.21954446
+60 96         11.1803399
+60 63         11.1803399
+60 62         7.07106781
+60 97         10.4403065
+60 98         12.3693169
+60 99          12.083046
+60 100          12.083046
+61 52         10.2956301
+61 53         13.0384048
+61 54         12.8062485
+61 60         9.21954446
+61 50         12.2065556
+61 63         11.4017543
+61 62         9.43398113
+61 64         11.4017543
+62 60         7.07106781
+62 61         9.43398113
+62 96         6.70820393
+62 63         4.12310563
+62 93         10.8166538
+62 69         14.1421356
+62 66         8.24621125
+62 65         8.06225775
+62 64         7.81024968
+62 95                 10
+62 94         9.89949494
+63 60         11.1803399
+63 61         11.4017543
+63 62         4.12310563
+63 68         11.1803399
+63 67          12.083046
+63 93         8.60232527
+63 72         13.3416641
+63 71                 13
+63 70         11.7046999
+63 69         10.4403065
+63 66                  5
+63 65         5.83095189
+63 64         4.47213595
+63 95         9.21954446
+63 94         8.54400375
+63 96         7.07106781
+64 50         13.6014705
+64 61         11.4017543
+64 62         7.81024968
+64 63         4.47213595
+64 68         6.70820393
+64 67         7.61577311
+64 44         13.9283883
+64 43                 13
+64 38         12.2065556
+64 93         11.4017543
+64 72         13.0384048
+64 71          13.453624
+64 70         12.0415946
+64 69         10.6301458
+64 66         7.28010989
+64 65         9.05538514
+64 95                 13
+64 94         12.0415946
+64 96         11.4017543
+65 62         8.06225775
+65 63         5.83095189
+65 64         9.05538514
+65 93         2.82842712
+65 72          10.198039
+65 71                  9
+65 70         8.06225775
+65 69         7.28010989
+65 66         2.23606798
+65 94                  3
+65 92         11.1803399
+65 91         13.0384048
+65 95         4.12310563
+65 96         4.47213595
+66 62         8.24621125
+66 63                  5
+66 64         7.28010989
+66 65         2.23606798
+66 68         13.0384048
+66 67         14.0356688
+66 93         4.12310563
+66 72                  9
+66 71         8.24621125
+66 70         7.07106781
+66 69                  6
+66 92         12.6491106
+66 91          13.453624
+66 94         5.09901951
+66 95         6.32455532
+66 96         6.40312424
+67 45                 10
+67 46         9.05538514
+67 47         8.24621125
+67 48         11.1803399
+67 50         9.21954446
+67 63          12.083046
+67 64         7.61577311
+67 66         14.0356688
+67 37         12.0415946
+67 68                  1
+67 44         6.32455532
+67 43         5.38516481
+67 42         6.70820393
+67 41         7.61577311
+67 40         7.21110255
+67 39         8.48528137
+67 38                  7
+67 73         11.7046999
+67 36         11.1803399
+67 35         12.5299641
+67 34          13.892444
+68 45                 11
+68 46         10.0498756
+68 47         9.21954446
+68 48          12.083046
+68 50         9.89949494
+68 63         11.1803399
+68 64         6.70820393
+68 66         13.0384048
+68 67                  1
+68 37         12.7279221
+68 44         7.28010989
+68 43         6.32455532
+68 42         7.61577311
+68 41         8.54400375
+68 40         8.06225775
+68 39         9.21954446
+68 38         7.07106781
+68 73         11.4017543
+68 36         11.6619038
+68 35         13.0384048
+68 69         13.9283883
+69 62         14.1421356
+69 63         10.4403065
+69 64         10.6301458
+69 65         7.28010989
+69 66                  6
+69 68         13.9283883
+69 75          13.453624
+69 73         11.6619038
+69 38         14.1421356
+69 72                  3
+69 71         2.82842712
+69 70         1.41421356
+69 74                 10
+69 91          9.8488578
+69 90         10.8166538
+69 89         13.0384048
+69 88         9.21954446
+69 87         9.43398113
+69 92         12.1655251
+69 93         6.40312424
+69 94         8.60232527
+69 95                 10
+69 96         11.7046999
+70 63         11.7046999
+70 64         12.0415946
+70 65         8.06225775
+70 66         7.07106781
+70 69         1.41421356
+70 75          13.453624
+70 73          12.083046
+70 72         2.23606798
+70 71         1.41421356
+70 74         9.89949494
+70 91         8.54400375
+70 90         9.43398113
+70 89         11.6619038
+70 88         7.81024968
+70 87         8.06225775
+70 84         14.1421356
+70 92         11.4017543
+70 93         6.70820393
+70 94         8.94427191
+70 95         10.2956301
+70 96         12.3693169
+71 63                 13
+71 64          13.453624
+71 65                  9
+71 66         8.24621125
+71 69         2.82842712
+71 70         1.41421356
+71 75         13.6014705
+71 73         12.6491106
+71 72         2.23606798
+71 78         14.1421356
+71 74                 10
+71 91         7.28010989
+71 90         8.06225775
+71 89         10.2956301
+71 88         6.40312424
+71 87         6.70820393
+71 85          13.892444
+71 84         12.7279221
+71 92         10.7703296
+71 93         7.28010989
+71 94         9.48683298
+71 95         10.7703296
+71 96         13.1529464
+72 38         14.0356688
+72 63         13.3416641
+72 64         13.0384048
+72 65          10.198039
+72 66                  9
+72 69                  3
+72 70         2.23606798
+72 71         2.23606798
+72 75         11.4017543
+72 73         10.4403065
+72 78                 13
+72 76         13.0384048
+72 74         7.81024968
+72 91         9.05538514
+72 90         9.48683298
+72 89         11.7046999
+72 88         7.21110255
+72 87         7.07106781
+72 84         13.6014705
+72 92                 13
+72 93         8.94427191
+72 94         11.1803399
+72 95         12.5299641
+73 35                 10
+73 36         9.05538514
+73 37         12.1655251
+73 38         5.65685425
+73 39         11.1803399
+73 40         12.2065556
+73 41         13.6014705
+73 42         12.8062485
+73 43         12.7279221
+73 44          13.453624
+73 67         11.7046999
+73 68         11.4017543
+73 69         11.6619038
+73 70          12.083046
+73 71         12.6491106
+73 72         10.4403065
+73 33         13.9283883
+73 27          13.892444
+73 77                  9
+73 75         4.12310563
+73 34          11.045361
+73 31         8.48528137
+73 30         8.60232527
+73 29         10.6301458
+73 28         11.6619038
+73 79         12.0415946
+73 78         14.1421356
+73 76         8.54400375
+73 74         4.47213595
+73 80         13.1529464
+74 35         14.1421356
+74 36         13.3416641
+74 38                 10
+74 69                 10
+74 70         9.89949494
+74 71                 10
+74 72         7.81024968
+74 73         4.47213595
+74 79         10.4403065
+74 77         8.06225775
+74 75         3.60555128
+74 31         10.7703296
+74 30         10.2956301
+74 29                 13
+74 78                 10
+74 76         6.08276253
+74 81         13.0384048
+74 80         11.1803399
+74 87                 11
+74 88         12.0415946
+75 34         12.3693169
+75 35         11.7046999
+75 36         11.1803399
+75 38         9.43398113
+75 69          13.453624
+75 70          13.453624
+75 71         13.6014705
+75 72         11.4017543
+75 73         4.12310563
+75 74         3.60555128
+75 33         14.0356688
+75 27         13.3416641
+75 77         5.09901951
+75 31         7.28010989
+75 30         6.70820393
+75 29         9.48683298
+75 28         11.1803399
+75 79                  8
+75 78         10.8166538
+75 76         4.47213595
+75 81         11.1803399
+75 80         9.05538514
+76 28         13.1529464
+76 29          11.045361
+76 30         8.06225775
+76 31         9.21954446
+76 38          13.892444
+76 72         13.0384048
+76 73         8.54400375
+76 74         6.08276253
+76 75         4.47213595
+76 79         4.47213595
+76 77         3.16227766
+76 78         7.28010989
+76 81                  7
+76 80         5.09901951
+76 87         13.4164079
+77 24                 13
+77 25         13.0384048
+77 26         13.0384048
+77 27         12.1655251
+77 28         10.4403065
+77 29         8.24621125
+77 30         5.38516481
+77 31         6.70820393
+77 33         13.6014705
+77 34         13.6014705
+77 35          13.453624
+77 36          13.453624
+77 38         13.6014705
+77 73                  9
+77 74         8.06225775
+77 75         5.09901951
+77 76         3.16227766
+77 22         12.3693169
+77 21         13.3416641
+77 19          13.892444
+77 23         11.4017543
+77 18         12.8062485
+77 79         3.16227766
+77 78         10.0498756
+77 81         6.70820393
+77 80         4.47213595
+78 71         14.1421356
+78 72                 13
+78 73         14.1421356
+78 74                 10
+78 75         10.8166538
+78 76         7.28010989
+78 77         10.0498756
+78 79         9.21954446
+78 81         8.60232527
+78 80         8.54400375
+78 84          12.083046
+78 87         9.43398113
+78 88         10.8166538
+78 90          13.453624
+80 79         1.41421356
+80 23         13.0384048
+80 28          13.892444
+80 29         11.6619038
+80 30         9.21954446
+80 31         10.6301458
+80 73         13.1529464
+80 74         11.1803399
+80 75         9.05538514
+80 76         5.09901951
+80 77         4.47213595
+80 78         8.54400375
+80 18         12.6491106
+80 81         2.23606798
+81 79         3.60555128
+81 29         13.6014705
+81 30         11.3137085
+81 31         12.7279221
+81 74         13.0384048
+81 75         11.1803399
+81 76                  7
+81 77         6.70820393
+81 78         8.60232527
+81 80         2.23606798
+81 18         13.1529464
+82 141         13.9283883
+82 83         4.12310563
+82 137         13.0384048
+83 82         4.12310563
+83 137         12.3693169
+84 70         14.1421356
+84 71         12.7279221
+84 72         13.6014705
+84 78          12.083046
+84 85         3.60555128
+84 86         5.83095189
+84 87         6.70820393
+84 88         6.40312424
+84 89                  4
+84 90         5.38516481
+84 91         7.28010989
+84 92         13.0384048
+85 71          13.892444
+85 84         3.60555128
+85 86         2.23606798
+85 87         9.05538514
+85 88         8.24621125
+85 89         3.60555128
+85 90         5.83095189
+85 91         7.07106781
+85 92         11.1803399
+86 85         2.23606798
+86 84         5.83095189
+86 129         13.0384048
+86 87                 11
+86 88         10.0498756
+86 89         5.09901951
+86 90         7.28010989
+86 91         8.06225775
+86 92         10.7703296
+86 126         12.8062485
+86 127         12.7279221
+87 69         9.43398113
+87 70         8.06225775
+87 71         6.70820393
+87 72         7.07106781
+87 74                 11
+87 78         9.43398113
+87 76         13.4164079
+87 86                 11
+87 85         9.05538514
+87 84         6.70820393
+87 88         1.41421356
+87 89         6.08276253
+87 90         4.47213595
+87 91         5.65685425
+87 92         12.2065556
+87 93         13.0384048
+88 69         9.21954446
+88 70         7.81024968
+88 71         6.40312424
+88 72         7.21110255
+88 78         10.8166538
+88 74         12.0415946
+88 89                  5
+88 87         1.41421356
+88 86         10.0498756
+88 85         8.24621125
+88 84         6.40312424
+88 90         3.16227766
+88 91         4.24264069
+88 92         10.8166538
+88 93         12.1655251
+88 94         14.0356688
+89 69         13.0384048
+89 70         11.6619038
+89 71         10.2956301
+89 72         11.7046999
+89 88                  5
+89 87         6.08276253
+89 86         5.09901951
+89 85         3.60555128
+89 84                  4
+89 90         2.23606798
+89 91         3.60555128
+89 92         9.05538514
+89 93          13.892444
+90 69         10.8166538
+90 70         9.43398113
+90 71         8.06225775
+90 72         9.48683298
+90 78          13.453624
+90 89         2.23606798
+90 88         3.16227766
+90 87         4.47213595
+90 86         7.28010989
+90 85         5.83095189
+90 84         5.38516481
+90 91                  2
+90 92         8.54400375
+90 93          12.083046
+90 94         13.6014705
+91 65         13.0384048
+91 66          13.453624
+91 69          9.8488578
+91 70         8.54400375
+91 71         7.28010989
+91 72         9.05538514
+91 90                  2
+91 89         3.60555128
+91 88         4.24264069
+91 87         5.65685425
+91 86         8.06225775
+91 85         7.07106781
+91 84         7.28010989
+91 92         6.70820393
+91 93         10.2956301
+91 94         11.7046999
+91 95         12.3693169
+92 65         11.1803399
+92 66         12.6491106
+92 72                 13
+92 71         10.7703296
+92 70         11.4017543
+92 69         12.1655251
+92 91         6.70820393
+92 90         8.54400375
+92 89         9.05538514
+92 88         10.8166538
+92 87         12.2065556
+92 86         10.7703296
+92 85         11.1803399
+92 84         13.0384048
+92 127         13.0384048
+92 126                 12
+92 93         8.54400375
+92 94         8.60232527
+92 95         8.48528137
+92 96         12.0415946
+92 124          12.083046
+92 125         13.9283883
+93 62         10.8166538
+93 63         8.60232527
+93 64         11.4017543
+93 65         2.82842712
+93 66         4.12310563
+93 72         8.94427191
+93 71         7.28010989
+93 70         6.70820393
+93 69         6.40312424
+93 92         8.54400375
+93 91         10.2956301
+93 90          12.083046
+93 89          13.892444
+93 88         12.1655251
+93 87         13.0384048
+93 94         2.23606798
+93 95         3.60555128
+93 96                  6
+94 62         9.89949494
+94 63         8.54400375
+94 64         12.0415946
+94 65                  3
+94 93         2.23606798
+94 72         11.1803399
+94 71         9.48683298
+94 70         8.94427191
+94 69         8.60232527
+94 66         5.09901951
+94 92         8.60232527
+94 91         11.7046999
+94 90         13.6014705
+94 88         14.0356688
+94 95         1.41421356
+94 96         4.12310563
+95 62                 10
+95 63         9.21954446
+95 64                 13
+95 93         3.60555128
+95 72         12.5299641
+95 71         10.7703296
+95 70         10.2956301
+95 69                 10
+95 66         6.32455532
+95 65         4.12310563
+95 94         1.41421356
+95 92         8.48528137
+95 91         12.3693169
+95 96         3.60555128
+96 60         11.1803399
+96 62         6.70820393
+96 63         7.07106781
+96 93                  6
+96 71         13.1529464
+96 70         12.3693169
+96 69         11.7046999
+96 66         6.40312424
+96 65         4.47213595
+96 64         11.4017543
+96 95         3.60555128
+96 94         4.12310563
+96 92         12.0415946
+96 97         13.9283883
+97 96         13.9283883
+97 60         10.4403065
+97 98                  2
+97 99         2.23606798
+97 100         2.23606798
+97 101         5.65685425
+97 102         6.40312424
+97 103         8.48528137
+97 104         8.48528137
+97 115                 10
+97 116         11.7046999
+97 117         8.24621125
+98 97                  2
+98 60         12.3693169
+98 99         2.23606798
+98 100         2.23606798
+98 101         4.47213595
+98 102         5.38516481
+98 103         7.21110255
+98 104         7.21110255
+98 112         13.0384048
+98 115         8.48528137
+98 116          9.8488578
+98 117         6.32455532
+99 60          12.083046
+99 98         2.23606798
+99 97         2.23606798
+99 117                  7
+99 100     1.41421456e-05
+99 101         3.60555128
+99 102         4.24264069
+99 103         6.40312424
+99 104         6.40312424
+99 112         12.0415946
+99 115         8.06225775
+99 116          10.198039
+100 60          12.083046
+100 99     1.41421456e-05
+100 98         2.23606798
+100 97         2.23606798
+100 117                  7
+100 101         3.60555128
+100 102         4.24264069
+100 103         6.40312424
+100 104         6.40312424
+100 112         12.0415946
+100 115         8.06225775
+100 116          10.198039
+101 100         3.60555128
+101 99         3.60555128
+101 98         4.47213595
+101 97         5.65685425
+101 117         4.47213595
+101 116                  7
+101 102                  1
+101 103         2.82842712
+101 104         2.82842712
+101 105         13.3416641
+101 111         12.0415946
+101 112         8.60232527
+101 113         12.5299641
+101 114         11.1803399
+101 115         4.47213595
+102 101                  1
+102 100         4.24264069
+102 99         4.24264069
+102 98         5.38516481
+102 97         6.40312424
+102 117                  5
+102 116         7.07106781
+102 103         2.23606798
+102 104         2.23606798
+102 105         12.3693169
+102 106         13.3416641
+102 107         13.6014705
+102 111         11.4017543
+102 112         7.81024968
+102 113          12.083046
+102 114         10.7703296
+102 115         4.12310563
+103 102         2.23606798
+103 101         2.82842712
+103 100         6.40312424
+103 99         6.40312424
+103 98         7.21110255
+103 97         8.48528137
+103 117         4.47213595
+103 116         5.38516481
+103 115                  2
+103 104     1.41421456e-05
+103 105          12.083046
+103 106                 13
+103 107         13.4164079
+103 110         12.6491106
+103 111         9.21954446
+103 112         5.83095189
+103 113          9.8488578
+103 114         8.54400375
+104 103     1.41421456e-05
+104 102         2.23606798
+104 101         2.82842712
+104 100         6.40312424
+104 99         6.40312424
+104 98         7.21110255
+104 97         8.48528137
+104 117         4.47213595
+104 116         5.38516481
+104 115                  2
+104 105          12.083046
+104 106                 13
+104 107         13.4164079
+104 110         12.6491106
+104 111         9.21954446
+104 112         5.83095189
+104 113          9.8488578
+104 114         8.54400375
+105 104          12.083046
+105 103          12.083046
+105 102         12.3693169
+105 101         13.3416641
+105 111                 13
+105 115         13.0384048
+105 112                 10
+105 106                  1
+105 107         1.41421356
+105 108         4.12310563
+105 109                  5
+105 110         9.05538514
+106 105                  1
+106 104                 13
+106 103                 13
+106 102         13.3416641
+106 111         13.4164079
+106 110                  9
+106 115          13.892444
+106 112         10.6301458
+106 107                  1
+106 108         3.16227766
+106 109                  4
+107 106                  1
+107 105         1.41421356
+107 104         13.4164079
+107 103         13.4164079
+107 102         13.6014705
+107 110                 10
+107 112         11.4017543
+107 108                  3
+107 109         4.12310563
+108 107                  3
+108 106         3.16227766
+108 105         4.12310563
+108 110         10.4403065
+108 112          13.453624
+108 109         1.41421356
+109 108         1.41421356
+109 107         4.12310563
+109 106                  4
+109 105                  5
+109 110          9.8488578
+109 112         13.6014705
+110 106                  9
+110 107                 10
+110 108         10.4403065
+110 109          9.8488578
+110 104         12.6491106
+110 103         12.6491106
+110 105         9.05538514
+110 111         6.70820393
+110 116         14.0356688
+110 115         12.1655251
+110 114          9.8488578
+110 113         9.43398113
+110 112         7.07106781
+111 105                 13
+111 106         13.4164079
+111 110         6.70820393
+111 104         9.21954446
+111 103         9.21954446
+111 102         11.4017543
+111 101         12.0415946
+111 117         11.1803399
+111 116         8.24621125
+111 115         7.81024968
+111 114         3.16227766
+111 113         2.82842712
+111 112         4.12310563
+112 105                 10
+112 106         10.6301458
+112 107         11.4017543
+112 108          13.453624
+112 109         13.6014705
+112 110         7.07106781
+112 111         4.12310563
+112 104         5.83095189
+112 103         5.83095189
+112 102         7.81024968
+112 101         8.60232527
+112 100         12.0415946
+112 99         12.0415946
+112 98         13.0384048
+112 117         9.05538514
+112 116         7.28010989
+112 115         5.09901951
+112 114         5.38516481
+112 113         6.08276253
+113 110         9.43398113
+113 111         2.82842712
+113 112         6.08276253
+113 104          9.8488578
+113 103          9.8488578
+113 102          12.083046
+113 101         12.5299641
+113 117         10.6301458
+113 116         7.21110255
+113 115         8.06225775
+113 114         1.41421356
+113 118         13.6014705
+114 110          9.8488578
+114 111         3.16227766
+114 112         5.38516481
+114 113         1.41421356
+114 104         8.54400375
+114 103         8.54400375
+114 102         10.7703296
+114 101         11.1803399
+114 117         9.21954446
+114 116         5.83095189
+114 115         6.70820393
+114 118          13.453624
+115 103                  2
+115 104                  2
+115 105         13.0384048
+115 106          13.892444
+115 110         12.1655251
+115 111         7.81024968
+115 112         5.09901951
+115 113         8.06225775
+115 114         6.70820393
+115 102         4.12310563
+115 101         4.47213595
+115 100         8.06225775
+115 99         8.06225775
+115 98         8.48528137
+115 97                 10
+115 117                  4
+115 116         3.60555128
+116 101                  7
+116 102         7.07106781
+116 103         5.38516481
+116 104         5.38516481
+116 110         14.0356688
+116 111         8.24621125
+116 112         7.28010989
+116 113         7.21110255
+116 114         5.83095189
+116 115         3.60555128
+116 100          10.198039
+116 99          10.198039
+116 98          9.8488578
+116 97         11.7046999
+116 117         3.60555128
+116 120         13.4164079
+116 119         13.6014705
+116 118                 13
+117 99                  7
+117 100                  7
+117 101         4.47213595
+117 102                  5
+117 103         4.47213595
+117 104         4.47213595
+117 111         11.1803399
+117 112         9.05538514
+117 113         10.6301458
+117 114         9.21954446
+117 115                  4
+117 116         3.60555128
+117 98         6.32455532
+117 97         8.24621125
+117 120          13.453624
+118 113         13.6014705
+118 114          13.453624
+118 116                 13
+118 122         12.2065556
+118 121         9.43398113
+118 120         9.21954446
+118 119         7.21110255
+118 160         4.47213595
+118 159         7.21110255
+118 158         7.07106781
+118 157         10.6301458
+118 156         11.1803399
+118 161                  5
+119 116         13.6014705
+119 118         7.21110255
+119 122                  5
+119 121         2.23606798
+119 120         2.23606798
+119 125         10.4403065
+119 124         11.1803399
+119 157         11.1803399
+119 156         7.81024968
+119 155         12.2065556
+119 154         13.6014705
+119 123         8.54400375
+119 158         9.05538514
+119 159          10.198039
+119 160         6.32455532
+119 161         12.0415946
+120 116         13.4164079
+120 117          13.453624
+120 118         9.21954446
+120 119         2.23606798
+120 122         3.16227766
+120 121         1.41421356
+120 126         14.1421356
+120 125         9.05538514
+120 124         9.48683298
+120 157         13.0384048
+120 156         8.94427191
+120 155         12.7279221
+120 154         14.1421356
+120 123         8.60232527
+120 158         11.1803399
+120 159         12.3693169
+120 160         8.54400375
+120 161         14.1421356
+121 118         9.43398113
+121 119         2.23606798
+121 120         1.41421356
+121 122         2.82842712
+121 127         14.1421356
+121 126         13.3416641
+121 125         8.24621125
+121 124         8.94427191
+121 157                 12
+121 156         7.61577311
+121 155         11.3137085
+121 154         12.7279221
+121 123         7.21110255
+121 158         10.4403065
+121 159         11.7046999
+121 160         8.06225775
+122 118         12.2065556
+122 119                  5
+122 120         3.16227766
+122 121         2.82842712
+122 128         13.3416641
+122 127                 12
+122 126          11.045361
+122 125                  6
+122 124         6.32455532
+122 156         9.05538514
+122 155         11.6619038
+122 154         13.0384048
+122 123         7.21110255
+122 157         14.1421356
+122 158                 13
+122 160         10.8166538
+123 119         8.54400375
+123 120         8.60232527
+123 121         7.21110255
+123 122         7.21110255
+123 128         9.48683298
+123 127                 10
+123 126         9.89949494
+123 125         6.32455532
+123 124         8.24621125
+123 155         4.47213595
+123 154         5.83095189
+123 151         12.5299641
+123 150                 13
+123 156         4.24264069
+123 157                 10
+123 158         10.8166538
+123 159         12.2065556
+123 160         10.4403065
+124 119         11.1803399
+124 120         9.48683298
+124 121         8.94427191
+124 122         6.32455532
+124 123         8.24621125
+124 92          12.083046
+124 128         8.60232527
+124 127         6.32455532
+124 126         5.09901951
+124 125                  2
+124 155                 12
+124 154         13.0384048
+124 156          12.083046
+125 119         10.4403065
+125 120         9.05538514
+125 121         8.24621125
+125 122                  6
+125 123         6.32455532
+125 124                  2
+125 92         13.9283883
+125 128         7.61577311
+125 127                  6
+125 126         5.09901951
+125 155                 10
+125 154          11.045361
+125 156         10.2956301
+126 92                 12
+126 120         14.1421356
+126 121         13.3416641
+126 122          11.045361
+126 123         9.89949494
+126 124         5.09901951
+126 125         5.09901951
+126 86         12.8062485
+126 128         4.47213595
+126 127         1.41421356
+126 129          12.083046
+126 151         13.6014705
+126 150                 13
+126 149         13.0384048
+126 148         13.0384048
+126 147         12.0415946
+126 154         12.6491106
+126 155          12.083046
+126 156         14.1421356
+127 92         13.0384048
+127 121         14.1421356
+127 122                 12
+127 123                 10
+127 124         6.32455532
+127 125                  6
+127 126         1.41421356
+127 86         12.7279221
+127 128         3.16227766
+127 129         10.7703296
+127 151         12.3693169
+127 150         11.7046999
+127 149         11.6619038
+127 148         11.6619038
+127 147         10.6301458
+127 154          12.083046
+127 155         11.6619038
+128 122         13.3416641
+128 123         9.48683298
+128 124         8.60232527
+128 125         7.61577311
+128 126         4.47213595
+128 127         3.16227766
+128 129         9.05538514
+128 151         9.21954446
+128 150         8.54400375
+128 149         8.60232527
+128 148         8.60232527
+128 147         7.81024968
+128 154                 10
+128 155         9.89949494
+128 156         13.4164079
+129 86         13.0384048
+129 126          12.083046
+129 127         10.7703296
+129 128         9.05538514
+129 146         7.21110255
+129 145         9.21954446
+129 134         14.1421356
+129 147                  5
+129 148         7.21110255
+129 149         7.21110255
+129 150         9.21954446
+129 151         10.6301458
+130 136         5.09901951
+130 133         3.16227766
+130 132                  4
+130 131         1.41421356
+130 139                 13
+130 138         9.48683298
+130 137         8.24621125
+130 135         10.4403065
+130 134                  9
+131 130         1.41421356
+131 136         4.47213595
+131 133                  2
+131 140         14.0356688
+131 139         11.7046999
+131 138         8.24621125
+131 137         7.61577311
+131 135         9.21954446
+131 132         3.16227766
+131 134         8.06225775
+132 130                  4
+132 136         7.07106781
+132 133         1.41421356
+132 131         3.16227766
+132 139         12.0415946
+132 138         9.05538514
+132 137                 10
+132 135         6.70820393
+132 134                  5
+132 145         12.8062485
+132 146          13.453624
+133 130         3.16227766
+133 131                  2
+133 132         1.41421356
+133 136         5.65685425
+133 140         14.0356688
+133 139         11.1803399
+133 138                  8
+133 137         8.60232527
+133 135         7.28010989
+133 134         6.08276253
+134 129         14.1421356
+134 130                  9
+134 132                  5
+134 139         12.6491106
+134 138         10.8166538
+134 137         13.6014705
+134 136         11.1803399
+134 133         6.08276253
+134 131         8.06225775
+134 135         3.16227766
+134 142         14.0356688
+134 145         9.43398113
+134 146         10.7703296
+135 130         10.4403065
+135 131         9.21954446
+135 132         6.70820393
+135 133         7.28010989
+135 134         3.16227766
+135 139         10.2956301
+135 138         9.21954446
+135 137                 13
+135 136         11.1803399
+135 142         13.1529464
+135 145         11.7046999
+135 146         13.3416641
+136 130         5.09901951
+136 131         4.47213595
+136 132         7.07106781
+136 133         5.65685425
+136 134         11.1803399
+136 135         11.1803399
+136 140         10.4403065
+136 139         9.21954446
+136 138         5.65685425
+136 137         3.16227766
+137 130         8.24621125
+137 131         7.61577311
+137 132                 10
+137 133         8.60232527
+137 134         13.6014705
+137 135                 13
+137 136         3.16227766
+137 83         12.3693169
+137 82         13.0384048
+137 141         12.1655251
+137 140         8.06225775
+137 139         8.06225775
+137 138         5.09901951
+138 130         9.48683298
+138 131         8.24621125
+138 132         9.05538514
+138 133                  8
+138 134         10.8166538
+138 135         9.21954446
+138 136         5.65685425
+138 137         5.09901951
+138 141         11.4017543
+138 140         6.08276253
+138 139         3.60555128
+139 130                 13
+139 131         11.7046999
+139 132         12.0415946
+139 133         11.1803399
+139 134         12.6491106
+139 135         10.2956301
+139 136         9.21954446
+139 137         8.06225775
+139 138         3.60555128
+139 141         9.43398113
+139 140         4.24264069
+140 131         14.0356688
+140 133         14.0356688
+140 136         10.4403065
+140 137         8.06225775
+140 138         6.08276253
+140 139         4.24264069
+140 141         5.38516481
+141 82         13.9283883
+141 137         12.1655251
+141 138         11.4017543
+141 139         9.43398113
+141 140         5.38516481
+142 134         14.0356688
+142 135         13.1529464
+142 143         2.23606798
+142 144         8.06225775
+142 145         12.7279221
+142 166          13.453624
+142 168         12.8062485
+143 142         2.23606798
+143 144         7.61577311
+143 145         13.6014705
+143 165         12.5299641
+143 166         11.4017543
+143 167         12.8062485
+143 168         10.8166538
+144 143         7.61577311
+144 142         8.06225775
+144 145         8.06225775
+144 146         9.48683298
+144 152         14.0356688
+144 165          9.8488578
+144 166          10.198039
+144 167         11.4017543
+144 168         9.21954446
+145 129         9.21954446
+145 132         12.8062485
+145 135         11.7046999
+145 134         9.43398113
+145 144         8.06225775
+145 143         13.6014705
+145 142         12.7279221
+145 146         2.23606798
+145 147         9.48683298
+145 148         10.0498756
+145 149         10.0498756
+145 150                 12
+145 151         13.0384048
+146 129         7.21110255
+146 132          13.453624
+146 145         2.23606798
+146 135         13.3416641
+146 134         10.7703296
+146 144         9.48683298
+146 147         7.28010989
+146 148                  8
+146 149                  8
+146 150         10.0498756
+146 151         11.1803399
+147 126         12.0415946
+147 127         10.6301458
+147 128         7.81024968
+147 129                  5
+147 146         7.28010989
+147 145         9.48683298
+147 148         2.23606798
+147 149         2.23606798
+147 150         4.24264069
+147 151         5.65685425
+147 152         12.6491106
+147 154         12.3693169
+147 155         13.1529464
+148 126         13.0384048
+148 127         11.6619038
+148 128         8.60232527
+148 129         7.21110255
+148 147         2.23606798
+148 146                  8
+148 145         10.0498756
+148 149     1.41421456e-05
+148 150         2.23606798
+148 151         3.60555128
+148 152         10.4403065
+148 153         12.8062485
+148 154          11.045361
+148 155                 12
+148 164                 13
+149 126         13.0384048
+149 127         11.6619038
+149 128         8.60232527
+149 129         7.21110255
+149 148     1.41421456e-05
+149 147         2.23606798
+149 146                  8
+149 145         10.0498756
+149 150         2.23606798
+149 151         3.60555128
+149 152         10.4403065
+149 153         12.8062485
+149 154          11.045361
+149 155                 12
+149 164                 13
+150 123                 13
+150 126                 13
+150 127         11.7046999
+150 128         8.54400375
+150 129         9.21954446
+150 149         2.23606798
+150 148         2.23606798
+150 147         4.24264069
+150 146         10.0498756
+150 145                 12
+150 151         1.41421356
+150 152         9.05538514
+150 153         10.8166538
+150 154                  9
+150 155         10.0498756
+150 164         11.4017543
+151 123         12.5299641
+151 126         13.6014705
+151 127         12.3693169
+151 128         9.21954446
+151 129         10.6301458
+151 150         1.41421356
+151 149         3.60555128
+151 148         3.60555128
+151 147         5.65685425
+151 146         11.1803399
+151 145         13.0384048
+151 152                  8
+151 153         9.43398113
+151 154         8.06225775
+151 155         9.21954446
+151 163                 13
+151 164          10.198039
+152 151                  8
+152 149         10.4403065
+152 148         10.4403065
+152 147         12.6491106
+152 150         9.05538514
+152 165         12.8062485
+152 144         14.0356688
+152 153                  5
+152 154         12.0415946
+152 155          13.453624
+152 162         9.21954446
+152 163         6.40312424
+152 164         2.82842712
+153 149         12.8062485
+153 148         12.8062485
+153 151         9.43398113
+153 150         10.8166538
+153 164         3.60555128
+153 163                  4
+153 152                  5
+153 154         9.48683298
+153 155         10.7703296
+153 157         13.4164079
+153 162         6.32455532
+154 119         13.6014705
+154 120         14.1421356
+154 121         12.7279221
+154 122         13.0384048
+154 123         5.83095189
+154 124         13.0384048
+154 125          11.045361
+154 128                 10
+154 127          12.083046
+154 126         12.6491106
+154 151         8.06225775
+154 150                  9
+154 149          11.045361
+154 148          11.045361
+154 147         12.3693169
+154 164         12.5299641
+154 163         13.3416641
+154 153         9.48683298
+154 152         12.0415946
+154 155         1.41421356
+154 156         6.32455532
+154 157         9.48683298
+154 158         12.0415946
+154 159         13.1529464
+154 160         13.1529464
+155 119         12.2065556
+155 120         12.7279221
+155 121         11.3137085
+155 122         11.6619038
+155 123         4.47213595
+155 124                 12
+155 125                 10
+155 128         9.89949494
+155 127         11.6619038
+155 126          12.083046
+155 154         1.41421356
+155 151         9.21954446
+155 150         10.0498756
+155 149                 12
+155 148                 12
+155 147         13.1529464
+155 164          13.892444
+155 153         10.7703296
+155 152          13.453624
+155 156         5.09901951
+155 157         8.94427191
+155 158         11.1803399
+155 159         12.3693169
+155 160         12.0415946
+156 118         11.1803399
+156 119         7.81024968
+156 120         8.94427191
+156 121         7.61577311
+156 122         9.05538514
+156 128         13.4164079
+156 126         14.1421356
+156 125         10.2956301
+156 124          12.083046
+156 155         5.09901951
+156 154         6.32455532
+156 123         4.24264069
+156 157         5.83095189
+156 158         6.70820393
+156 159         8.06225775
+156 160                  7
+156 161         14.1421356
+157 118         10.6301458
+157 119         11.1803399
+157 120         13.0384048
+157 121                 12
+157 122         14.1421356
+157 156         5.83095189
+157 155         8.94427191
+157 154         9.48683298
+157 123                 10
+157 153         13.4164079
+157 158         3.60555128
+157 159         4.12310563
+157 160         6.40312424
+157 161         11.4017543
+158 118         7.07106781
+158 122                 13
+158 121         10.4403065
+158 120         11.1803399
+158 119         9.05538514
+158 157         3.60555128
+158 156         6.70820393
+158 155         11.1803399
+158 154         12.0415946
+158 123         10.8166538
+158 159         1.41421356
+158 160         3.16227766
+158 161         8.06225775
+159 118         7.21110255
+159 121         11.7046999
+159 120         12.3693169
+159 119          10.198039
+159 158         1.41421356
+159 157         4.12310563
+159 156         8.06225775
+159 155         12.3693169
+159 154         13.1529464
+159 123         12.2065556
+159 160                  4
+159 161         7.28010989
+160 118         4.47213595
+160 122         10.8166538
+160 121         8.06225775
+160 120         8.54400375
+160 119         6.32455532
+160 159                  4
+160 158         3.16227766
+160 157         6.40312424
+160 156                  7
+160 155         12.0415946
+160 154         13.1529464
+160 123         10.4403065
+160 161         7.28010989
+161 120         14.1421356
+161 119         12.0415946
+161 118                  5
+161 160         7.28010989
+161 159         7.28010989
+161 158         8.06225775
+161 157         11.4017543
+161 156         14.1421356
+162 164         6.40312424
+162 163         2.82842712
+162 153         6.32455532
+162 152         9.21954446
+163 153                  4
+163 154         13.3416641
+163 162         2.82842712
+163 151                 13
+163 164         3.60555128
+163 152         6.40312424
+164 153         3.60555128
+164 154         12.5299641
+164 155          13.892444
+164 162         6.40312424
+164 163         3.60555128
+164 149                 13
+164 148                 13
+164 151          10.198039
+164 150         11.4017543
+164 152         2.82842712
+164 165         13.4164079
+165 152         12.8062485
+165 164         13.4164079
+165 168                  2
+165 167         2.23606798
+165 166         2.23606798
+165 144          9.8488578
+165 143         12.5299641
+166 165         2.23606798
+166 144          10.198039
+166 143         11.4017543
+166 142          13.453624
+166 167         1.41421356
+166 168                  1
+167 165         2.23606798
+167 168         2.23606798
+167 166         1.41421356
+167 144         11.4017543
+167 143         12.8062485
+168 165                  2
+168 167         2.23606798
+168 166                  1
+168 144         9.21954446
+168 143         10.8166538
+168 142         12.8062485
diff --git a/pysal/examples/juvenile.html b/pysal/examples/juvenile.html
new file mode 100644
index 0000000..39626f3
--- /dev/null
+++ b/pysal/examples/juvenile.html
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="iso-8859-1"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
+      "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+  <title>SAL Data Sets - Juvenile</title>
+</head>
+
+<body>
+<h1>JUVENILE</h1>
+
+<h2>Data provided "as is," no warranties</h2>
+
+<h2>Description</h2>
+
+<p>Residences of juvenile offenders in Cardiff, UK</p>
+
+<p>Type = point shape file, projected, unknown projection</p>
+
+<p>Observations = 168</p>
+
+<p>Variables = 3</p>
+
+<h2>Source</h2>
+
+<p>Bailey, T. and  A. Gatrell (1995). Interactive Spatial
+   Data Analysis. New York: Wiley, p. 95.</p>
+
+<h2>Variables</h2>
+
+<table>
+  <thead>
+    <tr>
+      <th>Variable</th>
+      <th>Description</th>
+    </tr>
+  </thead>
+  <tbody>
+    <tr>
+      <td>ID</td>
+      <td>location identifier</td>
+    </tr>
+    <tr>
+       <td>X</td>
+       <td>X-coordinate</td>
+     </tr>
+    <tr>
+       <td>Y</td>
+       <td>Y-coordinate</td>
+     </tr>  </tbody>
+</table>
+<br />
+
+<hr />
+
+<p>Prepared by <a href="mailto:anselin at uiuc.edu">Luc Anselin</a></p>
+
+<p><a href="http://sal.agecon.uiuc.edu">UIUC-ACE Spatial Analysis
+Laboratory</a></p>
+
+<p>Last updated June 24, 2003</p>
+</body>
+</html>
diff --git a/pysal/examples/juvenile.shp b/pysal/examples/juvenile.shp
new file mode 100644
index 0000000..00f1c8c
Binary files /dev/null and b/pysal/examples/juvenile.shp differ
diff --git a/pysal/examples/juvenile.shx b/pysal/examples/juvenile.shx
new file mode 100644
index 0000000..b22e112
Binary files /dev/null and b/pysal/examples/juvenile.shx differ
diff --git a/pysal/examples/lattice10x10.shp b/pysal/examples/lattice10x10.shp
new file mode 100644
index 0000000..47ad243
Binary files /dev/null and b/pysal/examples/lattice10x10.shp differ
diff --git a/pysal/examples/lattice10x10.shx b/pysal/examples/lattice10x10.shx
new file mode 100644
index 0000000..60dea1d
Binary files /dev/null and b/pysal/examples/lattice10x10.shx differ
diff --git a/pysal/examples/mexico.csv b/pysal/examples/mexico.csv
new file mode 100644
index 0000000..14f87ac
--- /dev/null
+++ b/pysal/examples/mexico.csv
@@ -0,0 +1,33 @@
+State,pcgdp1940,pcgdp1950,pcgdp1960,pcgdp1970,pcgdp1980,pcgdp1990,pcgdp2000,hanson03,hanson98,esquivel99,inegi,inegi2
+Aguascalientes,10384.000,6234.000,8714.000,16078.000,21022.000,20787.000,27782.000,2.000,2.000,3.000,4.000,4.000
+Baja California,22361.000,20977.000,17865.000,25321.000,29283.000,26839.000,29855.000,1.000,1.000,5.000,1.000,1.000
+Baja California Sur,9573.000,16013.000,16707.000,24384.000,29038.000,25842.000,26103.000,2.000,2.000,6.000,1.000,1.000
+Campeche,3758.000,4929.000,5925.000,10274.000,12166.000,51123.000,36163.000,6.000,5.000,4.000,5.000,5.000
+Chiapas,2934.000,4138.000,5280.000,7015.000,16200.000,8637.000,8684.000,5.000,5.000,7.000,5.000,5.000
+Chihuahua,8578.000,13997.000,16265.000,19178.000,23399.000,25332.000,30735.000,1.000,1.000,5.000,1.000,2.000
+Coahuila,8537.000,9673.000,12318.000,20562.000,25688.000,26084.000,28460.000,1.000,1.000,5.000,2.000,2.000
+Colima,6909.000,6049.000,6036.000,12551.000,17427.000,18313.000,21358.000,3.000,3.000,6.000,4.000,4.000
+Distrito Federal,17816.000,17119.000,23174.000,32386.000,42028.000,43810.000,54349.000,4.000,4.000,1.000,3.000,3.000
+Durango,12132.000,8859.000,9323.000,12700.000,16726.000,17353.000,17379.000,2.000,2.000,3.000,1.000,2.000
+Guanajuato,4359.000,5686.000,8209.000,11635.000,13864.000,13607.000,15585.000,3.000,3.000,3.000,4.000,4.000
+Guerrero,2181.000,3629.000,4991.000,6497.000,8727.000,9084.000,11820.000,5.000,5.000,7.000,5.000,5.000
+Hidalgo,4414.000,5194.000,6399.000,7767.000,12391.000,13091.000,12348.000,3.000,3.000,2.000,3.000,3.000
+Jalisco,5309.000,8232.000,9953.000,16288.000,20659.000,20133.000,21610.000,3.000,3.000,6.000,4.000,4.000
+Mexico,3408.000,4972.000,9053.000,17164.000,20165.000,18547.000,16322.000,4.000,4.000,1.000,3.000,3.000
+Michoacan,3327.000,5272.000,5244.000,8109.000,11206.000,10980.000,11838.000,3.000,3.000,7.000,4.000,4.000
+Morelos,6936.000,8962.000,10499.000,13892.000,16513.000,17701.000,18170.000,3.000,3.000,2.000,3.000,3.000
+Nayarit,4836.000,7515.000,7621.000,10880.000,13354.000,12757.000,11478.000,2.000,2.000,6.000,4.000,4.000
+Nuevo Leon,9073.000,11490.000,20117.000,28206.000,34856.000,34726.000,38672.000,1.000,1.000,5.000,2.000,2.000
+Oaxaca,1892.000,4538.000,4140.000,5230.000,7730.000,8465.000,9010.000,5.000,5.000,7.000,5.000,5.000
+Puebla,3569.000,6415.000,6542.000,9775.000,13374.000,11895.000,15685.000,3.000,3.000,2.000,3.000,5.000
+Quertaro,11016.000,5560.000,7110.000,14073.000,20088.000,22441.000,26149.000,3.000,3.000,3.000,3.000,4.000
+Quintana Roo,21965.000,28747.000,9677.000,17046.000,26695.000,25049.000,33442.000,6.000,5.000,4.000,5.000,5.000
+San Luis Potosi,4372.000,7533.000,6440.000,9721.000,12691.000,15436.000,15866.000,2.000,2.000,3.000,4.000,4.000
+Sinaloa,4840.000,6663.000,9613.000,14477.000,15312.000,15823.000,15242.000,2.000,2.000,6.000,1.000,1.000
+Sonora,6399.000,10345.000,12134.000,22662.000,23181.000,24784.000,24068.000,1.000,1.000,5.000,1.000,1.000
+Tabasco,2459.000,3857.000,6494.000,9367.000,42361.000,16055.000,13360.000,6.000,5.000,4.000,5.000,5.000
+Tamaulipas,7508.000,8536.000,8383.000,17128.000,21937.000,19983.000,23546.000,1.000,1.000,5.000,2.000,2.000
+Tlaxcala,3605.000,4178.000,4357.000,6245.000,9882.000,10339.000,11701.000,3.000,3.000,2.000,3.000,3.000
+Veracruz,5203.000,10143.000,11404.000,12240.000,14252.000,13796.000,12191.000,3.000,3.000,4.000,5.000,5.000
+Yucatan,7990.000,8428.000,10067.000,11665.000,15239.000,13979.000,17509.000,6.000,5.000,4.000,5.000,5.000
+Zacatecas,3734.000,6435.000,5821.000,7426.000,8876.000,11656.000,11130.000,2.000,2.000,3.000,4.000,4.000
diff --git a/pysal/examples/mexico.gal b/pysal/examples/mexico.gal
new file mode 100644
index 0000000..a489957
--- /dev/null
+++ b/pysal/examples/mexico.gal
@@ -0,0 +1,65 @@
+32        
+0 2       
+31 13       
+1 2       
+2 25       
+2 1       
+1        
+3 3       
+30 22 26      
+4 3       
+19 26 29      
+5 4       
+6 9 24 25     
+6 5       
+18 23 31 9 5    
+7 2       
+13 15       
+8 2       
+16 14       
+9 6       
+5 6 31 13 17 24   
+11 5       
+15 14 16 20 19    
+10 5       
+23 21 31 15 13    
+12 6       
+21 23 29 20 28 14   
+13 8       
+17 31 0 23 10 15 7 9 
+14 8       
+21 12 28 20 16 11 15 8 
+15 6       
+7 13 10 21 14 11   
+16 4       
+14 8 20 11     
+17 4       
+24 9 31 13     
+18 4       
+6 27 23 31     
+19 4       
+11 20 29 4     
+20 7       
+29 19 11 16 14 28 12  
+21 5       
+23 12 14 15 10    
+22 2       
+30 3       
+24 4       
+25 5 9 17     
+23 9       
+18 27 29 12 21 10 31 6 13
+25 3       
+1 5 24      
+26 3       
+3 4 29      
+27 3       
+18 29 23      
+28 3       
+12 20 14      
+29 7       
+26 4 19 20 12 23 27  
+30 2       
+3 22       
+31 8       
+18 23 10 0 13 17 9 6 
diff --git a/pysal/examples/nat_queen.gal b/pysal/examples/nat_queen.gal
new file mode 100644
index 0000000..c1675e9
--- /dev/null
+++ b/pysal/examples/nat_queen.gal
@@ -0,0 +1,6171 @@
+0 3085 NAT FIPSNO
+27077 3
+27007 27135 27071
+53019 3
+53047 53065 53043
+53065 4
+53043 53051 53063 53019
+53047 7
+53025 53017 53007 53057 53073 53019 53043
+53051 4
+53063 16017 16021 53065
+16021 3
+16017 30053 53051
+30053 4
+16017 30029 30089 16021
+30029 9
+30063 30047 30089 30099 30049 30077 30073 30035 30053
+30035 3
+30101 30073 30029
+30101 3
+30073 30051 30035
+30051 4
+30073 30041 30015 30101
+30041 3
+30015 30005 30051
+30091 4
+30085 30019 38105 38023
+38023 3
+38105 38013 30091
+38013 5
+38105 38075 38101 38061 38023
+38075 4
+38049 38101 38009 38013
+38009 4
+38049 38079 38069 38075
+38079 3
+38069 38095 38009
+38095 5
+38069 38071 38005 38019 38079
+38019 4
+38071 38067 38099 38095
+38067 3
+38099 27069 38019
+27069 4
+38099 27135 27089 38067
+27135 4
+27089 27077 27007 27069
+30005 4
+30027 30015 30071 30041
+30071 5
+30069 30027 30033 30105 30005
+30105 5
+30033 30085 30055 30019 30071
+30019 3
+30091 30085 30105
+53073 2
+53057 53047
+16017 7
+16055 53063 30053 30089 16079 16021 53051
+38101 5
+38061 38049 38055 38075 38013
+27071 4
+27061 27007 27137 27077
+53057 4
+53061 53047 53007 53073
+38105 7
+30083 30085 38061 38053 38013 38023 30091
+38049 6
+38055 38069 38083 38009 38101 38075
+27137 5
+27017 27001 27061 27075 27071
+30085 7
+30055 30083 38105 38053 30091 30019 30105
+38061 6
+38025 38053 38101 38055 38105 38013
+27089 7
+27113 27119 38035 38099 27007 27135 27069
+38071 5
+38005 38063 38099 38019 38095
+38099 7
+38063 27089 38035 27069 38067 38071 38019
+27007 9
+27057 27029 27113 27061 27021 27071 27077 27089 27135
+38069 7
+38083 38005 38103 38095 38079 38049 38009
+53007 6
+53037 53033 53061 53017 53047 53057
+30073 6
+30099 30015 30051 30101 30035 30029
+53009 1
+53031
+38005 6
+38027 38103 38063 38071 38069 38095
+30015 8
+30045 30013 30099 30027 30005 30041 30073 30051
+53061 4
+53033 53029 53007 53057
+53029 1
+53061
+30089 7
+30061 16079 30047 30063 30029 16017 30053
+27075 2
+27031 27137
+38063 7
+38039 38027 38035 38091 38099 38005 38071
+38035 6
+38097 38091 27119 27089 38063 38099
+27119 8
+27087 27107 38097 27125 27113 27029 27089 38035
+27113 5
+27125 27007 27029 27119 27089
+53017 4
+53037 53025 53047 53007
+38053 8
+38007 38033 30109 30083 38025 38061 30085 38105
+53031 3
+53027 53045 53009
+30083 6
+30109 30021 30055 38053 38105 30085
+30099 5
+30049 30015 30013 30073 30029
+30055 6
+30033 30021 30079 30083 30085 30105
+16079 7
+16057 16009 16055 30061 30089 16035 16017
+53063 7
+53075 53043 16055 16009 16017 53051 53065
+30047 3
+30063 30029 30089
+27029 6
+27005 27087 27057 27007 27113 27119
+16055 4
+16009 16079 16017 53063
+30033 7
+30087 30069 30079 30017 30055 30105 30071
+27125 2
+27113 27119
+53025 8
+53005 53077 53037 53001 53021 53017 53043 53047
+53043 7
+53001 53063 53075 53065 53025 53047 53019
+30049 7
+30043 30077 30059 30007 30013 30099 30029
+53035 2
+53045 53053
+27061 5
+27001 27021 27137 27071 27007
+38083 6
+38015 38055 38103 38043 38069 38049
+38103 7
+38043 38027 38031 38093 38005 38083 38069
+38055 8
+38065 38057 38025 38015 38083 38049 38061 38101
+38027 5
+38031 38039 38063 38005 38103
+38025 6
+38089 38007 38057 38055 38061 38053
+30027 8
+30107 30037 30045 30069 30065 30071 30005 30015
+30021 4
+30079 30109 30083 30055
+53033 4
+53053 53037 53007 53061
+30013 5
+30045 30059 30015 30049 30099
+38039 6
+38093 38031 38091 38003 38063 38027
+38091 6
+38003 38097 38017 38035 38039 38063
+38097 5
+27107 27119 38017 38035 38091
+53045 5
+53027 53053 53067 53035 53031
+30063 9
+30081 16049 16035 30061 30077 30039 30029 30047 30089
+30069 5
+30065 30087 30033 30071 30027
+30077 6
+30023 30039 30043 30049 30063 30029
+53037 6
+53077 53053 53025 53017 53007 53033
+38031 4
+38039 38027 38093 38103
+38057 5
+38089 38065 38059 38055 38025
+53027 5
+53049 53067 53041 53045 53031
+27107 6
+27027 38017 27087 27005 27119 38097
+27087 4
+27029 27005 27119 27107
+30061 4
+16035 30063 30089 16079
+27021 8
+27097 27153 27035 27159 27057 27001 27061 27007
+23003 4
+23019 23021 23025 23029
+30045 5
+30059 30027 30107 30015 30013
+27057 5
+27159 27005 27021 27007 27029
+16009 5
+16057 53075 16079 16055 53063
+30109 6
+30025 30079 38033 38053 30083 30021
+38033 5
+30025 38007 38087 38053 30109
+38007 5
+38089 38087 38025 38053 38033
+38093 7
+38047 38043 38003 38045 38039 38031 38103
+38043 6
+38029 38015 38093 38047 38103 38083
+38015 6
+38059 38065 38043 38029 38083 38055
+53053 7
+53041 53067 53037 53077 53033 53045 53035
+38065 4
+38015 38059 38055 38057
+53001 4
+53021 53075 53043 53025
+53075 10
+53013 53023 53021 16057 16069 53003 16009 53063 53001 53043
+38003 6
+38045 38017 38073 38091 38093 38039
+38017 7
+38073 27027 38077 27107 38003 38097 38091
+30079 6
+30017 30109 30025 30021 30033 30055
+27005 7
+27111 27027 27159 27057 27029 27087 27107
+27027 6
+27167 38077 27005 27111 27107 38017
+53067 4
+53053 53041 53027 53045
+16057 5
+16069 16035 16079 16009 53075
+30059 7
+30007 30107 30097 30031 30045 30013 30049
+53077 7
+53039 53059 53041 53005 53025 53037 53053
+27001 8
+27065 27095 27035 27115 27017 27137 27061 27021
+38089 7
+38087 38059 38037 38041 38057 38025 38007
+38059 7
+38037 38029 38085 38015 38089 38065 38057
+55007 4
+55129 55031 55003 55113
+16035 7
+16061 16069 16049 30063 30061 16057 16079
+30017 6
+30075 30087 30025 30011 30079 30033
+30087 8
+30003 30103 30111 30065 30075 30017 30033 30069
+30039 5
+30081 30023 30001 30077 30063
+27159 5
+27111 27021 27153 27057 27005
+27035 4
+27097 27001 27095 27021
+53049 3
+53069 53041 53027
+53041 8
+53069 53015 53059 53077 53053 53049 53067 53027
+30007 4
+30043 30031 30059 30049
+27017 4
+27115 55031 27137 27001
+30037 6
+30095 30097 30107 30065 30111 30027
+55031 6
+55013 55129 27115 55007 55113 27017
+30065 5
+30087 30111 30069 30037 30027
+30107 5
+30097 30037 30027 30059 30045
+53021 6
+53005 53013 53071 53075 53001 53025
+53005 7
+41049 53039 41059 53071 53021 53025 53077
+38037 5
+38001 38041 38085 38059 38089
+27111 7
+27041 27051 27167 27159 27153 27005 27027
+53023 4
+53013 53003 41063 53075
+30025 8
+30011 38011 46063 38087 38033 30109 30017 30079
+16049 9
+16085 16003 41063 16061 16069 30081 16059 30063 16035
+30081 5
+30001 16059 30039 30063 16049
+55003 4
+55113 55051 55099 55007
+38047 5
+38051 38029 38045 38093 38043
+38029 8
+46031 38085 38051 46021 38047 38043 38059 38015
+38045 6
+38051 38073 38021 38003 38047 38093
+38087 7
+38011 38041 38001 38089 30025 38007 38033
+38041 4
+38001 38037 38087 38089
+38073 6
+38021 38077 38081 38017 38045 38003
+38077 8
+46091 38081 27155 46109 27167 27027 38073 38017
+27167 5
+27155 27111 27051 27027 38077
+16069 7
+41063 53003 16049 16035 16061 16057 53075
+53013 6
+41059 53071 53023 41063 53075 53021
+53071 4
+41059 53013 53005 53021
+55051 4
+55099 55125 26053 55003
+23025 6
+23011 23007 23019 23027 23021 23003
+23021 3
+23019 23003 23025
+30043 7
+30093 30023 30007 30031 30057 30049 30077
+30111 7
+30009 30095 30087 30003 30103 30065 30037
+30103 3
+30087 30003 30111
+16061 3
+16035 16049 16069
+53003 4
+16069 41063 53023 53075
+38085 7
+46105 38001 46021 38029 46031 38037 38059
+27115 7
+27025 27059 27065 55013 55031 27017 27001
+23019 6
+23009 23027 23029 23003 23021 23025
+53059 7
+41051 53011 53015 53039 41027 53077 53041
+53015 5
+53011 41009 53069 53059 53041
+53069 4
+53015 41009 53041 53049
+27153 6
+27041 27097 27145 27021 27111 27159
+27097 6
+27145 27009 27095 27035 27021 27153
+38051 6
+46021 38021 46089 38045 38047 38029
+38021 6
+46013 46089 38081 38073 38051 38045
+38081 6
+46091 46013 46109 38077 38021 38073
+38011 4
+46063 38001 38087 30025
+38001 8
+46105 46063 38085 46031 38037 38041 38011 38087
+30023 5
+30043 30093 30001 30077 30039
+27095 7
+27141 27009 27065 27059 27001 27097 27035
+41007 3
+41057 41009 41067
+30097 5
+30031 30095 30037 30107 30059
+41009 7
+41067 41057 53011 41051 53015 41007 53069
+30093 4
+30043 30057 30001 30023
+55129 6
+55005 55013 55113 55107 55007 55031
+55113 7
+55107 55005 55099 55003 55129 55031 55007
+55013 6
+55095 27025 55129 55005 55031 27115
+27065 5
+27059 27115 27025 27001 27095
+30011 6
+56011 30075 46019 46063 30025 30017
+30095 5
+30031 30111 30009 30037 30097
+27051 6
+27149 27155 27041 27121 27111 27167
+27041 6
+27149 27121 27153 27145 27111 27051
+53011 4
+53059 41051 53015 41009
+53039 8
+41065 41027 41055 53005 41049 41021 53077 53059
+30003 7
+56003 30009 30075 56033 30087 30103 30111
+27155 6
+46109 27051 27149 27011 27167 38077
+41059 7
+41023 41049 41063 41061 53013 53071 53005
+41063 9
+41001 41061 16003 16049 16069 41059 53003 53013 53023
+55099 8
+55119 55107 55085 55069 55125 55051 55113 55003
+46021 6
+46129 46031 46089 38051 38085 38029
+46063 6
+46019 46105 38001 38011 30011 30025
+46089 6
+46045 46129 46013 38021 46021 38051
+46105 7
+46093 46019 46137 46031 38085 38001 46063
+46031 8
+46041 46137 46129 46021 38029 46105 38001 38085
+46013 8
+46115 46049 46045 46037 38081 46091 38021 46089
+30001 8
+16033 16059 16043 30057 30093 30023 30081 30039
+46091 5
+46037 46109 38077 38081 46013
+46109 7
+46051 46037 27011 27155 46091 38081 38077
+41049 6
+41069 41021 41059 41023 53005 53039
+41061 4
+41023 41063 41001 41059
+30057 5
+30031 16043 30001 30093 30043
+27009 4
+27145 27095 27141 27097
+41021 5
+41065 41055 41049 41069 53039
+30075 7
+56033 56005 30011 56011 30017 30003 30087
+27145 9
+27093 27067 27121 27141 27171 27009 27097 27041 27153
+41057 6
+41041 41071 41053 41067 41009 41007
+41067 6
+41071 41051 41005 41009 41057 41007
+27121 6
+27151 27149 27145 27067 27041 27051
+27149 6
+27011 27121 27151 27041 27051 27155
+27059 6
+27003 27141 27025 27115 27065 27095
+27025 7
+27163 27003 55095 55013 27115 27059 27065
+41055 3
+41065 41021 53039
+55095 6
+55109 27163 55005 55033 55013 27025
+41051 6
+41027 41005 53059 41067 53011 41009
+41027 5
+41005 41065 53039 41051 53059
+41065 8
+41031 41047 41005 41069 41021 41055 53039 41027
+16059 7
+16037 16085 16033 16023 30001 30081 16049
+23029 3
+23009 23019 23003
+23007 4
+23001 23017 23011 23025
+55005 8
+55033 55109 55107 55017 55113 55129 55095 55013
+55107 6
+55017 55099 55119 55113 55005 55129
+30009 6
+30031 56029 30003 56003 30111 30095
+46129 6
+46041 46045 46107 46089 46021 46031
+46045 6
+46049 46107 46013 46115 46089 46129
+46037 7
+46025 46115 46109 46051 46029 46091 46013
+27011 6
+46051 27151 27149 27073 46109 27155
+27141 7
+27171 27059 27003 27053 27095 27145 27009
+46137 7
+46103 46055 46093 46117 46041 46031 46105
+46041 7
+46055 46107 46119 46117 46129 46031 46137
+41005 6
+41047 41071 41065 41027 41051 41067
+27171 7
+27019 27085 27093 27003 27053 27141 27145
+41071 5
+41053 41005 41047 41067 41057
+27003 7
+27123 27053 27163 27025 27059 27171 27141
+27067 6
+27023 27151 27093 27129 27145 27121
+27151 6
+27023 27073 27067 27121 27011 27149
+55119 6
+55019 55017 55073 55069 55099 55107
+23017 6
+23031 33003 33007 23005 23001 23007
+46051 6
+46039 46029 27073 27011 46109 46037
+27093 5
+27129 27171 27085 27145 27067
+33007 4
+33009 50009 23017 33003
+27163 7
+27037 27123 55109 55093 55095 27025 27003
+55017 6
+55035 55033 55119 55019 55107 55005
+41047 6
+41043 41053 41065 41031 41005 41071
+27073 6
+46039 27023 27173 27151 46051 27011
+16003 6
+16045 16087 41001 16085 16049 41063
+46107 6
+46119 46049 46069 46045 46041 46129
+46049 6
+46059 46069 46115 46013 46045 46107
+27053 7
+27139 27019 27123 27037 27003 27171 27141
+46115 7
+46059 46025 46005 46037 46013 46049 46045
+55109 5
+55093 55033 55005 55095 27163
+55033 7
+55091 55093 55017 55035 55005 55109 55095
+46019 6
+46081 56011 46093 46105 46063 30011
+16085 6
+16015 16045 16059 16037 16049 16003
+46025 6
+46005 46057 46077 46029 46037 46115
+46029 5
+46057 46039 46051 46025 46037
+27023 5
+27173 27067 27129 27151 27073
+27123 4
+27163 27037 27003 27053
+41001 6
+41045 41023 16003 16087 41063 41061
+41053 6
+41003 41041 41047 41043 41071 41057
+41069 6
+41013 41031 41023 41049 41065 41021
+46093 6
+46103 46081 46055 46137 46105 46019
+41041 4
+41003 41039 41053 41057
+55019 6
+55053 55035 55141 55073 55119 55017
+50009 4
+50005 50019 33007 33009
+50013 3
+36019 50011 50007
+50011 4
+50007 50019 50015 50013
+50019 4
+50005 50015 50009 50011
+36019 4
+36031 36033 50013 50007
+56029 7
+30031 56039 56043 56003 56017 56013 30009
+56011 6
+56045 56005 46081 46019 30011 30075
+56003 6
+56019 56043 56033 30003 56029 30009
+56005 6
+56019 56033 56045 56009 56011 30075
+56033 5
+56019 56005 30075 56003 30003
+36033 4
+36089 36041 36031 36019
+41023 8
+41025 41013 41045 41001 41061 41059 41069 41049
+27085 5
+27129 27019 27143 27171 27093
+27019 5
+27143 27139 27053 27171 27085
+46039 7
+46011 46057 27173 27081 27073 46051 46029
+27037 7
+27131 27139 55093 27049 27163 27123 27053
+27173 7
+27081 27083 27129 27127 27023 46039 27073
+46119 5
+46117 46069 46065 46107 46041
+46069 7
+46085 46065 46059 46017 46049 46119 46107
+46059 6
+46017 46005 46073 46115 46049 46069
+27129 9
+27127 27085 27143 27103 27015 27093 27173 27023 27067
+55093 6
+27049 55091 55033 55109 27037 27163
+16037 6
+16039 16015 16023 16013 16059 16085
+55035 7
+55121 55011 55091 55019 55053 55017 55033
+16087 5
+16075 41045 16045 16003 41001
+41031 6
+41043 41069 41013 41017 41065 41047
+27139 6
+27079 27143 27037 27131 27053 27019
+46057 5
+46077 46039 46011 46029 46025
+50015 5
+50007 50005 50019 50023 50011
+41043 6
+41039 41003 41017 41031 41047 41053
+46117 7
+46075 46055 46085 46065 46119 46041 46137
+50005 6
+50017 50023 33009 50009 50019 50015
+23027 6
+23015 23013 23011 23009 23019 23025
+46055 7
+46071 46103 46075 46117 46137 46041 46093
+16043 8
+16065 16081 16051 16033 56039 30031 30057 30001
+27143 6
+27103 27139 27019 27079 27129 27085
+50007 7
+36031 50023 50001 50015 50011 36019 50013
+23011 6
+23023 23001 23015 23027 23025 23007
+27049 7
+27039 27131 27157 27109 55091 55093 27037
+41003 4
+41043 41039 41053 41041
+27127 6
+27033 27101 27083 27015 27129 27173
+55141 6
+55057 55053 55097 55001 55073 55019
+55091 6
+27157 55035 55011 55033 27049 55093
+56039 8
+56023 16019 16081 56013 56035 56029 30031 16043
+46005 7
+46111 46073 46077 46097 46025 46059 46115
+27083 5
+27117 27081 27127 27101 27173
+27081 6
+46101 46011 27083 27117 27173 46039
+46081 5
+56045 46093 46103 46019 56011
+55011 5
+27157 55121 27169 55035 55091
+55121 5
+27169 55053 55063 55035 55011
+55053 7
+55063 55081 55057 55141 55019 55121 55035
+16033 5
+16051 16023 16043 30001 16059
+41013 5
+41017 41025 41023 41069 41031
+56019 6
+56025 56043 56005 56009 56033 56003
+46065 4
+46069 46085 46117 46119
+36031 7
+36113 36115 36041 50001 50007 36019 36033
+27079 6
+27161 27013 27103 27131 27139 27143
+27131 7
+27147 27161 27049 27039 27037 27079 27139
+46077 7
+46097 46111 46011 46079 46057 46005 46025
+46011 7
+46079 46101 27081 27117 46039 46077 46057
+46103 8
+46113 46033 56045 46071 46055 46093 46137 46081
+16045 7
+16027 16075 16015 16001 16085 16003 16087
+27015 6
+27165 27033 27013 27103 27127 27129
+50023 5
+50001 50005 50017 50007 50015
+23001 5
+23023 23011 23005 23007 23017
+27103 5
+27079 27013 27143 27015 27129
+27157 5
+27109 55011 27169 55091 27049
+41045 8
+32013 41025 16073 16027 16075 16087 41001 41023
+23009 3
+23029 23019 23027
+33009 9
+33019 50027 50017 33003 33001 33013 33007 50005 50009
+41017 7
+41035 41039 41025 41013 41037 41043 41031
+23013 2
+23015 23027
+16015 6
+16001 16039 16013 16037 16085 16045
+23015 4
+23023 23013 23027 23011
+50001 7
+36115 50021 50017 50027 50023 36031 50007
+33003 6
+33001 23031 23017 33017 33009 33007
+41039 6
+41019 41017 41035 41043 41003 41041
+27013 7
+27091 27165 27161 27079 27043 27103 27015
+55057 7
+55123 55081 55021 55001 55111 55141 55053
+16023 6
+16013 16011 16051 16033 16037 16059
+46085 10
+46123 46095 46075 46023 46015 46053 46017 46069 46065 46117
+50017 5
+50027 33009 50005 50001 50023
+27161 6
+27043 27131 27147 27047 27079 27013
+46017 5
+46073 46059 46015 46085 46069
+46073 6
+46015 46003 46111 46005 46017 46059
+46101 6
+46099 46079 27081 27117 27133 46011
+27117 7
+27133 46099 27101 27083 46101 46011 27081
+27039 6
+27047 27147 27109 27049 27099 27131
+46111 7
+46035 46003 46077 46097 46061 46005 46073
+27101 6
+27105 27133 27033 27127 27117 27083
+27033 6
+27063 27105 27165 27015 27127 27101
+27147 5
+27047 27039 27099 27131 27161
+27109 6
+27099 27169 27045 27157 27039 27049
+46097 6
+46061 46079 46087 46077 46111 46005
+46079 6
+46087 46101 46011 46099 46097 46077
+27169 7
+27045 27055 55063 55121 27109 27157 55011
+56045 7
+56009 56027 46103 46033 46081 56011 56005
+46075 5
+46071 46085 46095 46117 46055
+23005 4
+23031 23023 23001 23017
+56043 6
+56013 56017 56019 56025 56003 56029
+55081 4
+55123 55063 55057 55053
+16075 4
+16027 16045 16087 41045
+36041 7
+36089 36035 36043 36113 36091 36031 36033
+27165 5
+27063 27013 27091 27015 27033
+36043 7
+36065 36049 36077 36057 36041 36035 36089
+55063 6
+27055 55081 55123 55053 27169 55121
+16039 8
+16073 16001 16013 16083 16047 16025 16037 16015
+56017 3
+56043 56013 56029
+16051 6
+16065 16019 16011 16043 16033 16023
+41025 7
+32031 41037 41045 32013 41023 41017 41013
+23023 4
+23015 23011 23005 23001
+56013 8
+56037 56035 56025 56007 56043 56017 56039 56029
+46071 7
+46007 46113 46075 46095 46121 46055 46103
+16013 10
+16031 16067 16063 16025 16011 16077 16023 16039 16015 16037
+16081 4
+16019 16065 56039 16043
+50027 7
+50025 50003 50021 33019 33009 50017 50001
+41019 6
+41033 41015 41029 41011 41035 41039
+46003 6
+46015 46035 46043 46023 46111 46073
+46015 6
+46003 46023 46053 46073 46017 46085
+16065 4
+16081 16019 16043 16051
+16027 5
+16001 16073 16045 16075 41045
+46095 6
+46121 46007 46123 46085 46071 46075
+16025 4
+16047 16063 16013 16039
+46033 5
+46047 56027 46113 46103 56045
+50021 4
+50003 36115 50027 50001
+27043 6
+19109 27091 27047 19189 27161 27013
+46099 8
+46083 46125 46087 27133 19119 27117 46101 46079
+27133 6
+27105 19119 27101 27117 46099 46101
+27047 7
+19189 19195 27099 27039 27147 27043 27161
+27105 6
+19119 19143 27063 27033 27101 27133
+27063 7
+19059 19143 27091 19063 27165 27033 27105
+27091 6
+19063 27043 19109 27013 27063 27165
+27055 6
+19191 27045 55123 55063 19005 27169
+27099 8
+19131 19195 27045 19089 27109 27047 27147 27039
+27045 6
+19089 19191 27055 27169 27099 27109
+46035 5
+46043 46061 46067 46111 46003
+46061 5
+46087 46067 46097 46035 46111
+46087 6
+46067 46099 46125 46079 46061 46097
+23031 4
+33017 23005 23017 33003
+36115 7
+36083 36091 36113 50003 50021 50001 36031
+16001 5
+16039 16073 16015 16027 16045
+36113 4
+36091 36115 36031 36041
+46123 6
+31031 46121 46053 31103 46085 46095
+33001 4
+33013 33017 33003 33009
+55123 8
+55023 19005 55057 55111 55103 55081 27055 55063
+46113 8
+31045 46047 46007 31031 31161 46071 46103 46033
+16073 7
+32013 16083 32007 16039 16001 16027 41045
+16019 7
+16029 16011 56039 56023 16081 16065 16051
+16011 7
+16005 16077 16029 16019 16013 16051 16023
+41035 7
+6093 41029 41037 6049 41017 41019 41039
+41037 5
+6049 41025 32031 41035 41017
+41011 2
+41015 41019
+33013 6
+33011 33019 33017 33015 33001 33009
+33019 6
+33005 50025 33013 33011 33009 50027
+33017 5
+33015 23031 33013 33001 33003
+55103 5
+55043 55023 55111 55049 55123
+56025 6
+56007 56009 56001 56019 56013 56043
+46053 6
+31103 46023 31015 46123 46015 46085
+56027 7
+56031 56015 56009 31165 46047 46033 56045
+46023 9
+31015 46043 46009 31107 46067 46053 46015 46003 46085
+46125 6
+46135 46067 46083 46027 46099 46087
+46083 6
+46027 46127 19119 19167 46099 46125
+19195 6
+19081 19033 19189 19131 27099 27047
+19131 6
+19067 19033 19089 19037 27099 19195
+19005 6
+19065 19191 55023 19043 55123 27055
+19189 6
+19081 19109 19195 19033 27047 27043
+19191 7
+19065 19037 19089 19005 19043 27055 27045
+56009 8
+56001 56007 56031 56027 56045 56025 56019 56005
+19143 7
+19141 19167 19119 19059 19041 27063 27105
+19059 6
+19041 19141 19063 19147 27063 19143
+19109 9
+19091 19151 19147 19063 19081 19197 19189 27043 27091
+19089 6
+19037 19067 19191 27045 19131 27099
+19063 6
+19147 19041 19109 27091 19059 27063
+19119 7
+19167 19143 19141 27105 46083 27133 46099
+46043 4
+46067 46023 46035 46003
+46067 8
+46009 46125 46135 46087 46043 46023 46061 46035
+46047 5
+31165 46113 31045 46033 56027
+56035 4
+56023 56013 56037 56039
+55023 5
+19043 55103 55043 55123 19005
+36091 8
+36001 36093 36057 36035 36115 36083 36113 36041
+46007 6
+31161 46121 31031 46095 46071 46113
+46121 6
+31031 46123 31103 46095 46007 46071
+50003 7
+25003 36083 50025 25011 50027 50021 36115
+56023 8
+56041 49033 16007 16029 56037 56035 56039 16019
+36035 4
+36057 36091 36041 36043
+33015 4
+33011 25009 33017 33013
+19167 7
+19149 46127 19141 19035 19143 19119 46083
+50025 5
+33005 25011 33019 50027 50003
+19141 8
+19035 19149 19041 19021 19059 19143 19167 19119
+19033 8
+19069 19197 19081 19067 19023 19131 19195 19189
+19041 8
+19021 19035 19147 19151 19063 19059 19141 19143
+19081 7
+19197 19091 19033 19069 19195 19189 19109
+19147 7
+19151 19021 19109 19091 19063 19041 19059
+19067 7
+19069 19023 19037 19017 19089 19131 19033
+19037 7
+19023 19017 19065 19191 19089 19067 19131
+55049 6
+55065 55043 55025 55045 55111 55103
+55043 7
+19061 19043 55065 17085 55049 55103 55023
+33011 7
+25027 33005 25009 33015 25017 33013 33019
+16063 5
+16053 16047 16067 16013 16025
+16047 5
+16063 16053 16083 16025 16039
+16067 4
+16053 16031 16013 16063
+33005 5
+25011 33011 25027 33019 50025
+46135 6
+31107 46009 46027 31027 46125 46067
+46009 4
+46135 31107 46067 46023
+16077 5
+16031 16005 16071 16011 16013
+46127 7
+31051 46027 19193 31043 19167 19149 46083
+46027 6
+31027 46127 31051 46083 46135 46125
+19065 8
+19013 19019 19017 19043 19055 19005 19191 19037
+19043 8
+19019 19055 19061 55043 55023 19065 19191 19005
+36057 6
+36095 36077 36091 36093 36035 36043
+16029 6
+16041 16007 16005 56023 16019 16011
+16005 5
+16071 16029 16041 16011 16077
+31165 6
+56015 31013 31157 31045 46047 56027
+31045 5
+31013 31161 46113 31165 46047
+31161 8
+31123 31069 31013 31031 31075 46007 31045 46113
+41029 4
+41033 41035 6093 41019
+31103 8
+31149 31017 31031 31015 31089 46053 46121 46123
+31015 6
+31149 31089 31107 46023 31103 46053
+31031 11
+31171 31091 31075 31017 31009 31103 46123 46121 31161 46007 46113
+41015 4
+41033 6015 41019 41011
+36083 7
+36039 36021 36001 25003 50003 36115 36091
+36093 4
+36095 36091 36001 36057
+19149 5
+19141 19035 19193 19167 46127
+19035 8
+19093 19193 19041 19021 19161 19141 19149 19167
+19017 6
+19013 19023 19065 19019 19037 19067
+19023 8
+19075 19083 19069 19037 19017 19013 19067 19033
+19021 8
+19093 19147 19151 19025 19041 19161 19035 19141
+16083 6
+32007 16031 16053 16047 16073 16039
+19151 8
+19161 19109 19091 19187 19147 19025 19021 19041
+19091 6
+19187 19081 19197 19109 19151 19147
+19197 8
+19079 19187 19033 19069 19083 19081 19091 19109
+19069 8
+19083 19079 19067 19023 19075 19033 19197 19081
+36077 7
+36017 36053 36065 36095 36025 36057 36043
+31089 8
+31183 31071 31115 31149 31003 31107 31015 31103
+25009 4
+25025 25017 33011 33015
+31107 8
+31003 31027 31139 46135 31089 31015 46009 46023
+31027 6
+31139 31051 31179 46027 31107 46135
+16053 5
+16067 16031 16063 16083 16047
+36095 6
+36025 36001 36039 36093 36057 36077
+31017 5
+31009 31149 31115 31103 31031
+55065 5
+17085 55045 17177 55049 55043
+36001 6
+36039 36083 36021 36091 36095 36093
+31149 5
+31115 31089 31015 31103 31017
+41033 5
+6015 41029 6093 41019 41015
+31051 6
+31179 46127 31043 31173 31027 46027
+25003 8
+36027 36021 25013 9005 25011 25015 50003 36083
+25011 6
+25015 25027 33005 25003 50025 50003
+25017 5
+25027 25009 25025 25021 33011
+25027 10
+9015 9013 25013 25015 25021 44007 25017 33011 25011 33005
+19061 6
+19105 19055 17085 19097 55043 19043
+16031 8
+32007 16071 49003 16077 16013 16053 16083 16067
+19187 7
+19073 19025 19079 19015 19197 19091 19151
+19055 6
+19113 19019 19061 19105 19043 19065
+19019 7
+19011 19013 19055 19113 19043 19065 19017
+19013 7
+19171 19075 19019 19011 19065 19017 19023
+56015 6
+56021 56031 31007 31165 31157 56027
+56031 5
+56001 56015 56021 56027 56009
+16007 5
+49033 49005 16041 56023 16029
+19193 8
+19133 31173 31043 19093 19047 19035 19149 46127
+19093 6
+19133 19161 19047 19021 19035 19193
+19161 7
+19047 19025 19027 19151 19093 19021 19035
+19025 6
+19027 19187 19073 19161 19151 19021
+19079 6
+19015 19083 19169 19069 19197 19187
+25015 4
+25027 25011 25013 25003
+19083 7
+19169 19075 19127 19023 19069 19079 19197
+19075 6
+19127 19013 19171 19023 19083 19069
+31043 4
+31173 19193 31051 46127
+36025 8
+36007 36017 42127 36105 36111 36039 36095 36077
+17085 6
+19097 17177 17015 55065 19061 55043
+36021 7
+36027 36111 36039 25003 9005 36083 36001
+16071 6
+16041 49005 49003 16005 16031 16077
+36039 6
+36111 36083 36021 36001 36025 36095
+25025 3
+25009 25021 25017
+56007 8
+8107 8081 56037 56001 8057 56009 56025 56013
+31139 6
+31119 31003 31179 31167 31027 31107
+31013 5
+31157 31161 31123 31045 31165
+31003 6
+31011 31183 31139 31119 31107 31089
+56001 7
+8057 56021 8069 56031 56009 56007 56025
+16041 6
+49033 16007 49005 16029 16071 16005
+19097 5
+19045 19105 17015 17085 19061
+31179 7
+31119 31167 31173 31039 31051 31139 31027
+25013 6
+9013 9003 9005 25027 25015 25003
+19105 6
+19031 19113 19097 19045 19061 19055
+19011 7
+19095 19157 19171 19113 19103 19019 19013
+19113 7
+19103 19095 19105 19031 19055 19011 19019
+19171 7
+19157 19099 19127 19011 19095 19013 19075
+31173 7
+31039 19133 31021 19193 31043 31179 31051
+56037 8
+49009 49043 56041 56007 8081 56013 56023 56035
+25023 3
+25005 25021 25001
+25021 6
+44007 25005 25023 25025 25027 25017
+19133 6
+31021 19047 19085 19093 19193 31173
+19047 8
+19165 19085 19027 19009 19161 19133 19093 19193
+19027 7
+19009 19165 19073 19077 19025 19047 19161
+19073 6
+19077 19015 19049 19187 19027 19025
+19015 6
+19049 19169 19153 19079 19073 19187
+19127 6
+19099 19169 19171 19157 19075 19083
+19169 6
+19153 19127 19099 19083 19015 19079
+36111 6
+36071 36105 36027 36021 36039 36025
+31039 6
+31037 31167 31021 31173 31053 31179
+25005 5
+44005 44001 44007 25023 25021
+31167 6
+31141 31119 31039 31037 31179 31139
+31119 6
+31011 31179 31167 31139 31141 31003
+31075 5
+31005 31069 31091 31031 31161
+31115 6
+31009 31089 31071 31149 31041 31017
+31091 5
+31117 31005 31171 31031 31075
+31071 6
+31175 31041 31183 31077 31089 31115
+31171 5
+31117 31009 31113 31031 31091
+31183 6
+31175 31077 31003 31011 31089 31071
+31009 6
+31113 31115 31017 31041 31171 31031
+36027 7
+36079 36071 9005 9001 25003 36021 36111
+25001 1
+25023
+31021 6
+31053 19085 19133 31177 31039 31173
+9005 7
+9001 9003 9009 25013 36027 36021 25003
+9003 6
+9009 9011 9013 9007 25013 9005
+19045 7
+19163 17161 19031 17015 17195 19097 19105
+9013 5
+9015 9011 25027 25013 9003
+9015 5
+9011 44003 44007 25027 9013
+36105 5
+42103 42127 36111 36071 36025
+44007 6
+44003 25005 44001 25021 9015 25027
+49005 6
+49057 49003 49033 16007 16041 16071
+6093 8
+6089 6105 6023 6015 6049 41035 41033 41029
+31069 7
+31049 31033 31123 31101 31005 31075 31161
+49003 6
+49045 32007 49057 49005 16071 16031
+49033 8
+49029 49057 56041 49043 56023 16007 49005 16041
+31157 5
+31007 31123 31013 56015 31165
+31123 6
+31033 31007 31069 31161 31157 31013
+42127 7
+42115 36007 42069 36105 42103 42089 36025
+6015 4
+6023 6093 41033 41015
+32013 7
+32027 32031 32007 32015 16073 41045 41025
+32007 9
+32033 32011 32015 49045 49003 16031 16083 32013 16073
+6049 6
+6035 6089 32031 41037 6093 41035
+32031 13
+32510 6061 6057 32029 6091 6035 32001 32019 32027 32013 41025 6049 41037
+19031 6
+19103 19163 19139 19045 19105 19113
+31011 6
+31077 31141 31125 31119 31003 31183
+19099 7
+19125 19153 19157 19123 19171 19127 19169
+19153 7
+19181 19121 19049 19099 19125 19169 19015
+19157 7
+19123 19095 19107 19011 19171 19099 19127
+19085 6
+31177 19165 19155 19047 31021 19133
+19077 7
+19001 19029 19009 19121 19049 19073 19027
+19165 6
+19155 19009 19029 19027 19047 19085
+19009 6
+19029 19001 19077 19027 19165 19047
+19095 7
+19107 19103 19183 19113 19011 19157 19171
+19049 7
+19001 19121 19153 19181 19015 19077 19073
+19103 7
+19183 19031 19139 19115 19113 19095 19011
+17161 7
+17131 19115 19139 19163 17195 17073 19045
+19163 4
+19139 17161 19045 19031
+44001 2
+25005 44007
+44003 4
+44009 9011 44007 9015
+31037 6
+31023 31141 31053 31155 31039 31167
+31053 7
+31155 31023 31177 31055 31021 31037 31039
+31141 8
+31121 31143 31125 31037 31023 31167 31011 31119
+31005 5
+31117 31101 31091 31075 31069
+31077 8
+31093 31163 31175 31011 31125 31121 31183 31071
+31117 6
+31101 31113 31111 31171 31091 31005
+31113 5
+31111 31041 31009 31117 31171
+31041 9
+31047 31111 31163 31019 31175 31071 31113 31009 31115
+31175 5
+31163 31077 31183 31071 31041
+17197 7
+17043 17089 17031 17091 17063 17093 18089
+39095 6
+26091 26115 39069 39173 39051 39123
+17093 6
+17037 17089 17063 17099 17197 17043
+18089 6
+17031 18111 17091 18127 18073 17197
+18127 4
+18073 18149 18091 18089
+39051 5
+26059 39069 39171 39095 26091
+39055 6
+39085 39133 39153 39035 39155 39007
+9011 6
+9007 44003 44009 9015 9013 9003
+39171 6
+18151 26059 18033 39051 39069 39039
+31007 6
+31105 56021 31123 31033 31157 56015
+31177 5
+19155 19085 31055 31053 31021
+44005 1
+25005
+9001 5
+36119 36079 9009 9005 36027
+56021 7
+8069 31105 8123 31007 56015 56001 56031
+42131 5
+42015 42113 42069 42115 42079
+44009 2
+44003 9011
+9007 3
+9009 9011 9003
+9009 4
+9007 9003 9001 9005
+42069 5
+42115 42079 42089 42127 42131
+17099 9
+17103 17037 17203 17123 17155 17011 17063 17105 17093
+36071 9
+34031 36087 34037 42103 36119 36079 36027 36111 36105
+42047 6
+42083 42123 42065 42053 42023 42033
+39035 6
+39103 39153 39093 39055 39133 39085
+42121 7
+42039 42019 42085 42031 42005 42053 42123
+42053 6
+42123 42031 42083 42047 42065 42121
+39123 3
+39143 39173 39095
+42023 5
+42083 42033 42035 42105 42047
+39173 7
+39137 39063 39069 39147 39123 39143 39095
+42103 5
+42089 36071 34037 36105 42127
+42081 9
+42105 42117 42119 42093 42097 42035 42037 42113 42015
+19139 6
+19115 19163 17161 17131 19103 19031
+42113 5
+42015 42037 42131 42079 42081
+17011 7
+17195 17175 17155 17073 17099 17123 17103
+17073 6
+17095 17131 17011 17175 17195 17161
+56041 4
+49043 56037 56023 49033
+18113 7
+18087 18039 18183 18085 18033 18151 18003
+18033 6
+18151 18087 18003 39171 39039 18113
+36079 5
+36087 36119 9001 36027 36071
+31125 5
+31121 31093 31141 31077 31011
+39093 5
+39005 39077 39043 39103 39035
+19123 6
+19135 19125 19107 19179 19157 19099
+19155 8
+19129 31153 31055 19029 19137 19165 31177 19085
+19183 6
+19101 19107 19115 19087 19103 19095
+19125 6
+19117 19181 19123 19135 19099 19153
+19121 7
+19175 19001 19181 19039 19153 19049 19077
+19181 6
+19039 19125 19117 19153 19121 19049
+19107 6
+19179 19183 19101 19095 19123 19157
+19029 7
+19137 19001 19003 19077 19009 19155 19165
+19001 7
+19003 19121 19175 19049 19077 19029 19009
+39143 5
+39147 39043 39077 39123 39173
+39155 7
+39007 39099 39133 42085 42073 42039 39055
+42085 6
+42073 39099 42121 42019 42039 39155
+18099 6
+18141 18131 18049 18149 18085 18039
+39069 7
+39137 39039 39173 39063 39095 39051 39171
+42035 6
+42105 42033 42027 42119 42081 42023
+17063 5
+17105 17197 17091 17093 17099
+6023 4
+6105 6045 6093 6015
+31023 8
+31185 31159 31143 31155 31109 31053 31037 31141
+31155 8
+31109 31159 31153 31055 31025 31053 31023 31037
+39043 3
+39077 39093 39143
+18085 6
+18039 18169 18049 18183 18113 18099
+18149 7
+18091 18131 18073 18099 18049 18141 18127
+42031 5
+42005 42019 42065 42053 42121
+31033 7
+8075 31105 31049 31069 8115 31123 31007
+39039 6
+39125 18003 39069 39137 18033 39171
+49057 6
+49045 49011 49033 49029 49005 49003
+19115 8
+19057 19087 17131 17071 17161 19139 19183 19103
+42079 7
+42107 42025 42037 42089 42069 42113 42131
+31143 6
+31185 31081 31121 31023 31159 31141
+31055 5
+31153 19155 31155 31177 31053
+31163 5
+31019 31093 31077 31175 31041
+31093 6
+31079 31019 31121 31125 31077 31163
+31111 8
+31085 31135 31101 31047 31063 31041 31113 31117
+31121 7
+31079 31081 31143 31141 31125 31093 31077
+31101 7
+8115 31135 31049 31111 31117 31005 31069
+31105 5
+8123 31033 8075 31007 56021
+42065 6
+42063 42005 42033 42047 42031 42053
+49029 5
+49035 49011 49043 49033 49057
+6105 5
+6045 6089 6103 6093 6023
+36119 6
+36005 34003 36087 9001 36079 36071
+34037 6
+34041 42089 34031 36071 34027 42103
+39153 6
+39169 39103 39133 39151 39055 39035
+39133 6
+39151 39155 39099 39055 39153 39035
+17131 8
+17187 17071 19057 17073 17095 17161 19115 19139
+36087 5
+34003 34031 36119 36071 36079
+17155 3
+17099 17123 17011
+42037 6
+42093 42097 42107 42079 42113 42081
+17091 7
+17053 17075 17105 18089 18111 17197 17063
+18183 5
+18069 18169 18003 18113 18085
+18073 8
+18007 18111 18181 18149 18131 18127 18091 18089
+39077 7
+39139 39033 39147 39093 39005 39043 39143
+18003 9
+18179 18001 18069 39039 39125 39161 18033 18183 18113
+39103 5
+39169 39005 39153 39035 39093
+49043 8
+49051 49013 49035 49009 56037 56041 49029 49033
+42027 6
+42061 42013 42087 42033 42119 42035
+42033 8
+42021 42013 42063 42027 42035 42023 42065 42047
+42089 8
+42095 42025 34037 34041 42103 42079 42069 42127
+39147 6
+39175 39063 39077 39033 39143 39173
+39125 4
+39161 39137 39039 18003
+17175 5
+17095 17123 17011 17143 17073
+18111 5
+17075 18073 18007 18089 17091
+31049 5
+8115 31101 31135 31069 31033
+34031 6
+34013 34027 36087 34003 36071 34037
+31153 5
+31025 19129 19155 31055 31155
+6089 6
+6103 6035 6063 6049 6093 6105
+6035 5
+6091 6063 32031 6049 6089
+42097 9
+42099 42043 42067 42109 42119 42093 42037 42107 42081
+18049 7
+18017 18131 18169 18085 18103 18099 18149
+42019 8
+42003 42007 42073 42005 42129 42031 42121 42085
+42005 7
+42003 42063 42129 42065 42031 42019 42121
+18131 6
+18181 18099 18049 18017 18149 18073
+42093 3
+42037 42097 42081
+39063 7
+39065 39003 39137 39147 39175 39173 39069
+19129 6
+19071 31025 19137 19145 19155 31153
+39137 7
+39003 39161 39173 39063 39069 39125 39039
+19137 7
+19145 19071 19003 19173 19029 19129 19155
+19003 7
+19173 19145 19175 19159 19001 19137 19029
+19039 7
+19053 19159 19175 19117 19185 19181 19121
+19179 7
+19007 19051 19135 19101 19177 19107 19123
+19101 6
+19177 19051 19087 19183 19179 19107
+19175 7
+19159 19173 19039 19053 19121 19003 19001
+19087 6
+19177 19057 19115 19111 19101 19183
+31081 7
+31035 31001 31079 31185 31059 31143 31121
+19117 7
+19185 19053 19135 19007 19125 19039 19181
+19135 7
+19007 19185 19179 19051 19123 19117 19125
+49011 4
+49045 49029 49035 49057
+17095 6
+17187 17143 17057 17175 17073 17131
+17123 6
+17203 17143 17099 17175 17155 17011
+42119 6
+42087 42097 42109 42081 42027 42035
+42025 5
+42107 42095 42077 42089 42079
+39099 6
+39029 39151 42085 42073 39155 39133
+42073 6
+42007 39029 42019 42085 39099 39155
+34003 6
+34017 34013 36005 36119 36087 34031
+17105 6
+17113 17203 17053 17091 17063 17099
+34041 6
+42095 34027 34019 42017 34037 42089
+49045 8
+32033 49049 49023 49035 49011 49057 49003 32007
+34027 7
+34039 34035 34019 34013 34031 34041 34037
+19057 5
+19111 17071 17131 19115 19087
+17071 7
+17067 19111 17187 17109 17131 19057 19115
+17187 5
+17109 17095 17057 17131 17071
+31025 6
+31109 19071 31131 19129 31153 31155
+39005 7
+39083 39139 39169 39075 39103 39093 39077
+18169 6
+18103 18069 18053 18183 18085 18049
+31159 7
+31059 31151 31185 31109 31155 31023 31143
+31109 8
+31067 31151 31025 31131 31097 31155 31159 31023
+31019 8
+31137 31099 31047 31079 31001 31093 31163 31041
+31047 6
+31073 31063 31019 31137 31041 31111
+31185 7
+31059 31035 31159 31151 31023 31143 31081
+31079 6
+31001 31081 31035 31121 31093 31019
+18069 5
+18053 18179 18003 18183 18169
+17075 5
+17183 17053 18111 18007 17091
+18103 5
+18067 18017 18053 18169 18049
+17053 6
+17019 17113 17183 17075 17091 17105
+8081 6
+49047 49009 8107 8103 56007 56037
+8123 7
+8001 8013 8069 8087 8075 31105 56021
+8057 5
+8107 8069 8049 56001 56007
+31135 7
+31029 8095 8115 31111 31085 31101 31049
+8075 8
+8121 8087 8115 8095 8125 31033 8123 31105
+8115 6
+8095 31135 31101 31049 8075 31033
+8107 7
+8045 8103 8049 8037 8057 56007 8081
+8069 6
+8013 8049 8123 56021 8057 56001
+49009 5
+49013 49047 8081 56037 49043
+39033 6
+39101 39175 39139 39117 39077 39147
+32015 6
+32001 32027 32011 32023 32007 32013
+32011 4
+32033 32023 32007 32015
+39139 5
+39117 39005 39083 39077 39033
+39169 5
+39075 39151 39153 39103 39005
+39161 7
+39107 18001 39003 39011 39137 39125 18003
+39175 5
+39065 39033 39101 39147 39063
+39151 8
+39157 39075 39029 39019 39099 39133 39169 39153
+17143 6
+17057 17203 17179 17123 17095 17175
+42095 5
+42077 34041 42017 42089 42025
+32027 4
+32001 32015 32013 32031
+42107 8
+42075 42043 42077 42025 42011 42079 42097 42037
+18001 5
+18075 18179 39161 39107 18003
+18179 6
+18009 18053 18001 18075 18003 18069
+17203 6
+17179 17105 17113 17099 17123 17143
+39029 7
+39081 54029 39019 42007 42073 39099 39151
+18017 6
+18015 18181 18103 18067 18049 18131
+18181 6
+18157 18007 18017 18015 18131 18073
+49035 6
+49043 49051 49029 49049 49045 49011
+39003 5
+39011 39065 39063 39137 39161
+42063 5
+42129 42021 42033 42065 42005
+19071 6
+31131 19145 29005 19137 19129 31025
+19145 7
+29005 19173 29147 19003 19137 19071 19129
+19173 7
+29147 19159 29227 19175 19003 19145 19137
+19159 7
+29227 19053 29081 19039 19175 19173 19003
+36059 2
+36005 36103
+19051 7
+29197 19007 19177 29199 19101 19179 19135
+19177 7
+29199 19111 29045 19087 19101 19051 19179
+19053 7
+29081 19185 29129 19117 19039 19159 19175
+19185 7
+29129 19007 29171 19135 19117 19053 19039
+34013 5
+34039 34003 34017 34031 34027
+19007 7
+29171 19051 29197 19179 19135 19185 19117
+42109 5
+42099 42067 42087 42097 42119
+49047 8
+49019 49015 49007 49013 8045 8103 8081 49009
+42007 6
+54029 42019 42003 42125 42073 39029
+42087 5
+42061 42067 42109 42027 42119
+49013 6
+49049 49051 49047 49007 49009 49043
+34017 3
+34003 36005 34013
+19111 6
+29045 17067 17071 19057 19177 19087
+39065 7
+39091 39011 39175 39101 39159 39063 39003
+31131 7
+31097 31067 29005 19071 31127 31109 31025
+42077 6
+42011 42095 42017 42091 42107 42025
+34019 5
+42017 34035 34021 34027 34041
+36103 1
+36059
+17113 8
+17039 17147 17107 17179 17019 17053 17105 17203
+34035 5
+34021 34023 34039 34027 34019
+18015 5
+18023 18157 18067 18017 18181
+17179 6
+17125 17057 17113 17107 17203 17143
+42013 5
+42009 42021 42061 42027 42033
+18007 7
+18171 17183 18181 18157 18073 17075 18111
+8095 5
+8125 31135 31029 8115 8075
+42061 7
+42057 42009 42067 42087 42055 42027 42013
+42021 6
+42111 42129 42013 42009 42033 42063
+34039 4
+34023 34013 34027 34035
+39107 6
+39037 18075 39149 39011 39161 18001
+39019 5
+39067 39157 39081 39029 39151
+17057 7
+17169 17109 17179 17125 17143 17187 17095
+39117 5
+39041 39101 39083 39139 39033
+39101 6
+39159 39117 39033 39041 39065 39175
+31151 7
+31095 31169 31059 31067 31109 31159 31185
+31001 8
+31181 31061 31099 31035 31129 31081 31079 31019
+31035 8
+31129 31181 31059 31169 31185 31081 31001 31079
+31059 8
+31129 31169 31151 31095 31159 31185 31035 31081
+42067 6
+42055 42099 42097 42109 42061 42087
+31085 6
+31087 31057 31029 31063 31111 31135
+31029 5
+31057 8125 31085 31135 8095
+31063 7
+31145 31087 31073 31065 31047 31085 31111
+31073 4
+31065 31137 31047 31063
+39011 7
+39037 39149 39091 39065 39003 39107 39161
+31099 6
+31083 31061 31137 31001 31181 31019
+49051 4
+49049 49013 49043 49035
+31137 7
+31065 31083 31099 31061 31019 31073 31047
+42011 6
+42071 42075 42091 42029 42077 42107
+42129 8
+42051 42125 42003 42021 42063 42111 42005 42019
+42003 5
+42125 42129 42005 42019 42007
+18053 9
+18095 18159 18067 18179 18009 18035 18069 18103 18169
+39075 6
+39031 39083 39151 39157 39169 39005
+42043 7
+42041 42099 42075 42071 42133 42107 42097
+39157 6
+39059 39031 39067 39019 39151 39075
+17067 7
+17001 29111 29045 17109 17169 17071 19111
+17109 5
+17057 17169 17187 17067 17071
+42099 6
+42055 42041 42043 42067 42097 42109
+54029 5
+54009 39081 42007 42125 39029
+42017 8
+42101 42091 34021 34005 34019 42077 42095 34041
+29045 6
+29103 29199 17067 19111 29111 19177
+29199 6
+29103 29001 29197 29045 19177 19051
+29197 5
+29001 29171 29199 19051 19007
+34023 4
+34021 34025 34039 34035
+39081 7
+39013 39067 54009 54069 54029 39029 39019
+18009 4
+18035 18075 18179 18053
+29171 6
+29211 29129 29197 29001 19007 19185
+29005 7
+31147 31127 29147 29087 19145 31131 19071
+18075 7
+18135 18035 39107 39037 18001 18009 18179
+49049 7
+49039 49023 49013 49007 49051 49035 49045
+29147 7
+29003 29087 29075 29227 19173 29005 19145
+29129 6
+29079 29081 29171 29211 19185 19053
+18067 6
+18159 18023 18053 18103 18015 18017
+29081 7
+29061 29075 29227 29129 29079 19053 19159
+18157 7
+18107 18045 18171 18023 18015 18181 18007
+29227 5
+29075 29081 19159 29147 19173
+39083 7
+39089 39041 39031 39075 39005 39117 39139
+31127 6
+31133 31097 29087 29005 31147 31131
+42075 4
+42011 42071 42107 42043
+39091 5
+39021 39149 39159 39065 39011
+31067 8
+20201 31095 31097 31133 20117 31131 31109 31151
+31097 5
+31133 31127 31131 31067 31109
+8087 4
+8001 8121 8075 8123
+39159 7
+39097 39021 39041 39049 39101 39091 39065
+17183 8
+17041 17045 17019 18165 18171 18007 17075 17053
+18171 5
+18165 18045 18157 18007 17183
+39149 6
+39109 39037 39091 39021 39011 39107
+42125 9
+42059 54051 54069 54009 42129 42051 42003 54029 42007
+8049 8
+8037 8117 8013 8047 8019 8069 8107 8057
+39031 6
+39119 39089 39157 39059 39075 39083
+6103 6
+6021 6045 6063 6007 6089 6105
+34025 4
+34029 34005 34021 34023
+6063 6
+6115 6091 6007 6035 6103 6089
+39041 6
+39049 39083 39089 39117 39159 39101
+42091 6
+42045 42101 42029 42017 42011 42077
+17125 6
+17017 17129 17169 17107 17179 17057
+18023 7
+18011 18107 18159 18057 18067 18015 18157
+8125 7
+8063 8121 20023 31029 31057 8095 8075
+8121 7
+8073 8005 8001 8125 8063 8075 8087
+39067 5
+39059 39081 39013 39019 39157
+34021 6
+34023 34025 34035 34005 42017 34019
+18159 5
+18057 18095 18053 18067 18023
+17019 6
+17041 17147 17183 17045 17053 17113
+54009 4
+54069 42125 54029 39081
+18095 6
+18059 18057 18035 18065 18053 18159
+18035 6
+18065 18075 18135 18009 18095 18053
+29075 6
+29063 29003 29081 29061 29227 29147
+29211 6
+29115 29079 29001 29121 29171 29129
+18045 5
+18121 18165 18157 18107 18171
+39037 9
+39135 18177 18135 39109 39113 39149 39011 39107 18075
+31169 7
+20157 31129 31151 31095 20201 31059 31035
+31095 6
+20157 31067 31151 20201 31169 31059
+29001 7
+29115 29199 29103 29197 29121 29211 29171
+31057 6
+20023 31085 31087 20153 31029 8125
+31061 7
+20147 31083 31001 31181 31099 20183 31137
+31181 7
+20183 31035 31129 20089 31001 31061 31099
+31129 7
+20089 31059 31169 20157 31035 31181 31001
+31087 5
+20153 31063 31145 31085 31057
+31083 6
+20147 20137 31065 31099 31061 31137
+31065 8
+20039 20137 31145 31137 31083 20147 31073 31063
+31145 5
+20039 20153 31065 31063 31087
+17107 7
+17167 17129 17115 17039 17113 17125 17179
+42009 6
+24001 42111 42057 42061 42013 42021
+42041 5
+42001 42055 42043 42133 42099
+18135 5
+18177 18065 39037 18075 18035
+42071 7
+24025 42133 42029 24015 42011 42075 42043
+29103 6
+29121 29045 29111 29205 29199 29001
+42055 8
+24043 42057 42001 24021 42099 42041 42067 42061
+17147 6
+17139 17115 17039 17019 17041 17113
+17039 4
+17115 17147 17113 17107
+42111 6
+24023 42051 42009 24001 42021 42129
+17169 7
+17009 17001 17125 17057 17017 17109 17067
+39089 7
+39045 39049 39119 39127 39031 39083 39041
+39021 6
+39023 39109 39159 39097 39091 39149
+29087 6
+31147 29003 20043 29147 31127 29005
+31147 7
+20013 20131 31133 20043 29087 29005 31127
+31133 6
+20117 31127 31147 20131 31097 31067
+29079 6
+29117 29061 29211 29115 29129 29081
+8013 6
+8059 8047 8001 8123 8069 8049
+29111 6
+29127 29205 17001 17067 29103 29045
+42029 6
+24015 10003 42045 42091 42071 42011
+18057 6
+18097 18011 18059 18095 18159 18023
+42133 8
+24005 24013 42001 24015 24025 42071 42041 42043
+18107 7
+18121 18011 18063 18133 18023 18157 18045
+8103 4
+8045 8107 49047 8081
+39059 6
+39121 39119 39013 39067 39157 39031
+17001 6
+29127 17009 17149 17169 17067 29111
+39109 5
+39113 39023 39021 39149 39037
+54069 5
+54051 39013 42125 54009 39081
+18011 5
+18063 18057 18097 18023 18107
+34005 7
+34001 34007 42101 34029 34025 34021 42017
+18165 6
+17045 18121 18167 18171 18045 17183
+17129 4
+17017 17107 17167 17125
+39013 7
+39111 39121 54051 54069 39081 39059 39067
+34029 3
+34001 34025 34005
+42057 5
+24001 42055 24043 42061 42009
+39119 6
+39127 39059 39121 39115 39031 39089
+6007 6
+6101 6011 6021 6115 6063 6103
+29061 6
+29025 29063 29117 29079 29081 29075
+39049 6
+39129 39097 39089 39045 39041 39159
+42051 7
+54077 54061 42059 42111 24023 42129 42125
+42101 6
+34015 42045 34005 34007 42017 42091
+29003 6
+29021 20043 29063 29075 29147 29087
+17017 6
+17009 17129 17167 17137 17125 17169
+32033 7
+32017 32023 49027 49023 49045 32007 32011
+39097 7
+39047 39057 39023 39049 39129 39159 39021
+17009 5
+17149 17017 17137 17169 17001
+8045 7
+8077 49019 8037 8097 8107 8103 49047
+18065 7
+18139 18059 18177 18041 18135 18035 18095
+42001 5
+24021 42133 24013 42041 42055
+42045 5
+10003 42101 34015 42091 42029
+17115 7
+17021 17167 17147 17139 17173 17039 17107
+29063 6
+29049 29021 29061 29025 29075 29003
+29121 8
+29175 29041 29115 29205 29137 29103 29001 29211
+39023 5
+39057 39113 39097 39021 39109
+29115 6
+29117 29121 29041 29001 29211 29079
+54051 6
+54103 39111 42059 42125 54069 39013
+42059 5
+54103 42051 54061 42125 54051
+18177 6
+18161 18041 39037 39135 18135 18065
+49023 5
+49027 49039 49049 32033 49045
+32001 6
+32021 32019 32023 32015 32027 32031
+20137 6
+20065 20179 20039 20147 31083 31065
+20147 7
+20065 20183 20163 31061 31083 20137 31065
+20153 7
+20193 20181 20023 20039 31145 31087 31057
+20023 5
+20181 8063 20153 31057 8125
+20039 6
+20179 20193 20137 31065 31145 20153
+20157 6
+20029 20089 20201 31095 31169 31129
+20043 6
+20005 20013 29003 29021 31147 29087
+6045 6
+6033 6097 6021 6103 6105 6023
+20183 6
+20141 20163 20089 31181 20147 31061
+20089 7
+20123 20141 20157 20029 31129 20183 31181
+20201 8
+20027 20029 20117 20161 31067 20157 31095 31169
+8001 7
+8005 8031 8059 8121 8087 8123 8013
+20131 6
+20149 20117 20013 20085 31147 31133
+20117 6
+20161 20131 20149 31133 20201 31067
+20013 5
+20085 20043 20005 31147 20131
+34007 4
+34015 34001 34005 42101
+17167 8
+17117 17137 17115 17107 17021 17135 17017 17129
+29117 6
+29025 29115 29041 29033 29079 29061
+18121 6
+18167 18107 18133 18021 18045 18165
+29205 6
+29175 29127 29111 29137 29121 29103
+18059 6
+18145 18097 18065 18139 18095 18057
+29127 6
+29137 17149 17001 29173 29111 29205
+39121 6
+39115 39013 39111 39059 39167 39119
+18097 7
+18109 18081 18063 18059 18145 18057 18011
+39045 5
+39129 39127 39089 39073 39049
+18063 5
+18109 18133 18097 18011 18107
+8047 4
+8019 8059 8013 8049
+39127 6
+39073 39115 39119 39009 39045 39089
+39113 7
+39017 39135 39023 39057 39109 39165 39037
+39135 5
+18161 39113 39037 39017 18177
+8117 5
+8065 8037 8093 8019 8049
+8037 6
+8097 8065 8117 8049 8045 8107
+8059 9
+8093 8019 8035 8119 8001 8031 8005 8013 8047
+17045 7
+17023 17029 17041 18167 18165 17183 17019
+17041 6
+17029 17139 17183 17045 17019 17147
+34015 7
+34033 10003 34001 34011 34007 42101 42045
+17137 7
+17061 17171 17149 17167 17117 17009 17017
+18133 6
+18119 18021 18109 18063 18121 18107
+39111 6
+39167 54051 54103 54095 39013 39121
+39057 6
+39027 39165 39097 39047 39023 39113
+17149 9
+29163 29173 17061 17013 17137 17171 17009 29127 17001
+8019 5
+8059 8047 8093 8117 8049
+10003 7
+24029 24015 34015 34033 10001 42045 42029
+17021 4
+17135 17173 17115 17167
+29021 6
+29165 20005 29063 29049 29003 20043
+49007 6
+49015 49039 49047 49019 49049 49013
+49039 6
+49041 49027 49015 49007 49049 49023
+39129 6
+39141 39047 39073 39045 39049 39097
+8031 3
+8001 8005 8059
+6021 5
+6011 6033 6007 6103 6045
+17139 5
+17173 17029 17041 17147 17115
+17171 3
+17137 17061 17149
+18139 6
+18031 18145 18041 18047 18065 18059
+18041 5
+18161 18047 18177 18139 18065
+29025 6
+29177 29049 29117 29033 29061 29063
+34033 3
+34015 34011 10003
+6091 5
+6057 6115 32031 6035 6063
+39115 5
+39009 39167 39121 39127 39119
+29049 6
+29047 29165 29177 29025 29063 29021
+32019 7
+32005 32510 32029 32021 6051 32001 32031
+18161 5
+18047 39017 39135 18177 18041
+8005 7
+8035 8039 8121 8073 8001 8031 8059
+34001 6
+34009 34011 34029 34005 34015 34007
+24043 8
+54037 54003 54065 24001 24021 51107 42055 42057
+24001 8
+54027 54057 24023 24043 54065 42057 42009 42111
+24015 6
+24029 24025 10003 42029 42133 42071
+24023 7
+54093 54023 54077 24001 54057 42111 42051
+24025 4
+24005 24015 42071 42133
+54061 6
+54091 54049 54103 54077 42051 42059
+54077 7
+54001 54091 24023 54023 54093 42051 54061
+24013 6
+24031 24027 24021 24005 42133 42001
+24005 6
+24003 24027 24510 24025 42133 24013
+54103 8
+54017 54095 54061 54049 54033 42059 54051 39111
+24021 7
+51107 24027 24013 24031 42001 24043 42055
+39047 6
+39071 39027 39129 39141 39097 39057
+29041 7
+29195 29033 29175 29089 29121 29117 29115
+18145 6
+18005 18081 18139 18031 18059 18097
+49015 7
+49041 49019 49037 49055 49047 49007 49039
+54065 5
+51069 54027 54003 24043 24001
+17029 6
+17035 17173 17045 17023 17041 17139
+29173 5
+29007 29137 17149 29163 29127
+29137 6
+29175 29173 29007 29127 29205 29121
+17173 8
+17051 17135 17029 17035 17139 17049 17021 17115
+39073 6
+39163 39141 39009 39127 39129 39045
+20029 6
+20143 20123 20027 20201 20157 20089
+20005 7
+20087 20085 29165 20103 29021 20043 20013
+20085 6
+20177 20149 20005 20087 20013 20131
+54057 5
+54023 54027 24001 54031 24023
+39167 7
+54107 54073 39009 54095 39111 39115 39121
+54049 4
+54091 54033 54061 54103
+18081 5
+18013 18109 18145 18005 18097
+18109 7
+18105 18119 18081 18013 18097 18063 18133
+6115 6
+6101 6057 6061 6091 6063 6007
+32029 3
+32510 32019 32031
+54003 5
+51043 51069 54037 24043 54065
+29033 6
+29107 29177 29041 29195 29025 29117
+18167 6
+18153 17023 18021 18121 17045 18165
+29175 7
+29089 29137 29007 29019 29205 29121 29041
+18021 6
+18153 18133 18119 18055 18167 18121
+54095 6
+54085 54073 54103 54017 39167 39111
+29163 6
+29139 29007 17013 29113 17149 29173
+39165 6
+39025 39061 39017 39027 39057 39113
+39017 7
+18029 18047 39165 39061 39113 18161 39135
+6033 6
+6113 6055 6011 6097 6021 6045
+39027 6
+39015 39025 39047 39071 39057 39165
+8063 7
+8017 8073 20199 20181 20023 8125 8121
+20181 6
+20199 20193 20109 20153 20023 8063
+20193 6
+20109 20179 20063 20039 20153 20181
+20179 5
+20063 20065 20137 20039 20193
+20027 6
+20041 20143 20161 20061 20201 20029
+34011 4
+34001 34009 34033 34015
+8073 9
+8025 8101 8041 8039 8017 8061 8063 8121 8005
+8039 4
+8041 8035 8073 8005
+20161 6
+20061 20197 20149 20117 20027 20201
+8093 8
+8015 8065 8119 8043 8035 8059 8019 8117
+20163 6
+20051 20195 20065 20141 20183 20147
+8035 6
+8119 8039 8041 8005 8093 8059
+20141 7
+20167 20051 20123 20105 20089 20183 20163
+20149 6
+20197 20085 20177 20131 20161 20117
+20065 6
+20063 20195 20163 20147 20137 20179
+20123 5
+20105 20029 20143 20089 20141
+39009 7
+39105 39163 39167 54107 39115 39073 39127
+49027 6
+49001 32017 49041 49039 49023 32033
+54027 6
+54031 54023 54065 51069 24001 54057
+29165 6
+20209 20103 29049 29047 29021 20005
+17135 7
+17119 17005 17117 17051 17173 17021 17167
+17117 6
+17083 17061 17135 17119 17167 17137
+29177 6
+29095 29047 29033 29107 29025 29049
+17061 6
+17083 17013 17117 17137 17171 17149
+49019 8
+49037 49055 8077 8085 8045 49047 49015 49007
+18047 7
+18137 18029 18031 39017 18161 18041 18139
+6057 4
+6061 32031 6091 6115
+39141 7
+39131 39071 39073 39163 39079 39129 39047
+54037 5
+51043 51069 51107 54003 24043
+17023 7
+17079 17033 17035 18167 18153 17045 17029
+54073 4
+54107 54095 54085 39167
+54033 7
+54041 54017 54091 54001 54049 54097 54103
+18119 5
+18055 18109 18105 18133 18021
+51069 9
+51171 51840 54031 51043 51187 54037 54003 54065 54027
+29047 5
+20209 29177 29049 29095 29165
+54017 6
+54021 54085 54041 54033 54103 54095
+54091 5
+54001 54077 54061 54049 54033
+18031 6
+18079 18005 18047 18137 18139 18145
+6011 5
+6101 6113 6007 6021 6033
+20087 5
+20045 20177 20103 20005 20085
+20103 6
+20045 20209 20091 29165 20087 20005
+54107 7
+54035 54105 39105 54085 54073 39167 39009
+29195 6
+29159 29107 29089 29053 29041 29033
+17013 6
+29113 17083 29183 17061 29163 17149
+54085 7
+54013 54105 54017 54021 54095 54107 54073
+39163 6
+39079 39105 39053 39009 39073 39141
+24029 4
+24035 10001 10003 24015
+17035 5
+17079 17049 17023 17029 17173
+8065 5
+8097 8093 8015 8117 8037
+39071 6
+39001 39015 39131 39141 39047 39027
+24510 2
+24005 24003
+10001 5
+24011 24035 10005 24029 10003
+24027 6
+24033 24031 24005 24003 24013 24021
+8077 7
+49037 8085 8029 8051 8097 8045 49019
+8097 6
+8065 8015 8051 8037 8077 8045
+24031 8
+51059 51013 11001 51107 24033 24027 24013 24021
+29007 7
+29027 29019 29139 29163 29173 29175 29137
+29089 5
+29053 29019 29175 29195 29041
+18005 6
+18071 18013 18079 18031 18145 18081
+18013 5
+18105 18005 18071 18081 18109
+18105 6
+18093 18055 18071 18013 18109 18119
+54023 8
+54071 54083 54093 54031 54027 54057 24023 54077
+34009 2
+34001 34011
+51107 8
+51061 51043 24031 51059 51153 24021 54037 24043
+18029 6
+18115 18137 39061 21015 39017 18047
+6061 8
+6067 6101 32510 32005 6017 6057 32031 6115
+39061 7
+21117 21037 21015 39025 39165 18029 39017
+20143 6
+20169 20105 20041 20027 20029 20123
+18137 7
+18077 18079 18029 18115 18155 18047 18031
+6101 6
+6113 6061 6115 6067 6007 6011
+54001 6
+54097 54093 54083 54077 54091 54033
+29107 6
+29101 29095 29195 29159 29033 29177
+24035 4
+24041 24011 10001 24029
+54093 5
+54083 54023 24023 54001 54077
+51043 6
+51187 51107 51061 54037 54003 51069
+39025 7
+21023 21191 21037 39015 39027 39165 39061
+32510 5
+32019 32005 32029 32031 6061
+18153 6
+17033 18055 18083 18021 18167 17023
+17083 5
+29183 17117 17119 17061 17013
+8051 8
+8053 8091 8085 8029 8109 8015 8097 8077
+29019 7
+29135 29053 29027 29051 29007 29089 29175
+39015 6
+21161 21023 39001 39071 39027 39025
+54031 7
+51165 54071 51069 51171 54027 54023 54057
+29095 7
+29037 20091 20209 29107 29101 29177 29047
+24003 5
+24009 24033 24005 24027 24510
+29113 5
+29219 29139 17013 29183 29163
+17051 7
+17027 17121 17005 17025 17049 17173 17135
+20061 5
+20041 20197 20161 20127 20027
+20177 6
+20139 20197 20087 20045 20085 20149
+17049 5
+17079 17025 17035 17051 17173
+20105 6
+20053 20167 20143 20169 20123 20141
+8029 3
+8085 8051 8077
+20197 7
+20111 20127 20177 20139 20149 20061 20161
+51840 1
+51069
+39079 6
+39145 39131 39053 39163 39087 39141
+39105 6
+54053 39053 54107 54035 39009 39163
+20209 5
+20091 29095 29047 29165 20103
+18079 6
+18143 18071 18137 18077 18031 18005
+39131 5
+39145 39001 39079 39141 39071
+17033 6
+17159 17101 17079 18153 18083 17023
+17079 6
+17159 17025 17033 17023 17035 17049
+54105 5
+54035 54013 54085 54087 54107
+18055 8
+18027 18101 18083 18105 18093 18119 18153 18021
+54041 6
+54007 54021 54097 54101 54033 54017
+32023 9
+6027 32009 32021 32003 32017 32033 32001 32011 32015
+29139 6
+29073 29027 29113 29219 29163 29007
+21015 7
+21077 18155 18115 21117 39061 21081 18029
+24011 5
+24019 24041 10005 10001 24035
+20109 6
+20203 20199 20063 20171 20193 20181
+20195 5
+20135 20063 20051 20163 20065
+20051 6
+20165 20135 20167 20141 20163 20195
+20063 8
+20101 20171 20195 20135 20065 20179 20109 20193
+24033 8
+51059 24017 51510 11001 24009 24003 24027 24031
+20199 6
+20071 8017 20109 20203 20181 8063
+20167 6
+20009 20165 20105 20053 20141 20051
+20041 7
+20113 20169 20127 20115 20061 20027 20143
+8041 7
+8101 8043 8119 8073 8025 8039 8035
+8119 5
+8043 8041 8035 8093 8059
+54083 7
+54075 54101 54097 54071 54023 54093 54001
+21037 4
+21191 21117 39025 39061
+54097 5
+54101 54001 54083 54041 54033
+54021 5
+54013 54041 54017 54007 54085
+32005 6
+6003 6017 6051 32019 32510 6061
+51171 5
+51139 51165 51187 51069 54031
+54035 7
+54079 54053 54087 54039 54105 54107 39105
+21117 5
+21081 21037 21191 39061 21015
+32021 5
+6051 32023 32009 32001 32019
+18071 7
+18175 18093 18079 18143 18005 18105 18013
+29027 6
+29151 29051 29073 29139 29007 29019
+20045 7
+20059 20139 20091 20121 20103 20087 20177
+6017 5
+6005 6067 32005 6003 6061
+29053 6
+29141 29159 29019 29135 29089 29195
+8015 6
+8043 8109 8093 8051 8097 8065
+39001 6
+21161 39145 21135 39131 39071 39015
+54013 6
+54087 54007 54015 54021 54085 54105
+20091 7
+20059 20121 29095 29037 20209 20045 20103
+49041 6
+49031 49001 49015 49055 49039 49027
+51187 6
+51139 51157 51043 51061 51171 51069
+8017 5
+8061 20071 20199 8063 8073
+39053 6
+54011 39087 54053 39105 39079 39163
+18115 4
+18155 21015 18029 18137
+17005 4
+17119 17051 17135 17027
+54053 5
+54011 54035 54079 39105 39053
+39145 6
+21135 21089 39087 39079 39131 39001
+51061 7
+51047 51157 51179 51153 51107 51187 51043
+17119 9
+17163 29510 29189 29183 17027 17005 17135 17083 17117
+29219 5
+29073 29183 29071 29113 29139
+18093 6
+18117 18101 18071 18175 18105 18055
+11001 5
+51059 51510 51013 24033 24031
+29183 8
+29071 29510 29189 17119 17083 29219 29113 17013
+20169 6
+20113 20053 20041 20115 20143 20105
+10005 5
+24047 24045 24019 24011 10001
+54071 7
+51091 54075 51165 51015 54031 54023 54083
+24041 3
+24011 24019 24035
+54087 5
+54039 54013 54015 54035 54105
+29159 7
+29015 29083 29101 29141 29053 29195 29107
+29101 5
+29083 29037 29159 29107 29095
+51013 5
+51059 51510 51610 11001 24031
+18155 6
+21041 18077 21015 21077 18115 18137
+6113 6
+6095 6055 6067 6101 6011 6033
+6003 6
+6009 6005 6051 32005 6109 6017
+18077 7
+18019 18143 21041 21223 18155 18137 18079
+29135 5
+29141 29019 29051 29131 29053
+17025 6
+17121 17159 17191 17079 17049 17051
+18083 8
+18051 18125 17185 17101 18027 18055 17033 18153
+18101 5
+18037 18027 18117 18093 18055
+18027 5
+18125 18101 18037 18055 18083
+54007 6
+54067 54015 54101 54041 54013 54021
+51610 2
+51013 51059
+29189 7
+29099 29071 29510 17119 17163 17133 29183
+21191 6
+21081 21023 21097 39025 21037 21117
+21077 5
+21187 21041 21081 21015 18155
+20127 6
+20017 20115 20111 20197 20041 20061
+20139 6
+20031 20111 20059 20045 20177 20197
+20053 6
+20159 20009 20169 20113 20105 20167
+51157 5
+51113 51139 51061 51047 51187
+17101 4
+17185 17159 18083 17033
+17159 7
+17047 17191 17101 17185 17033 17079 17025
+6055 4
+6097 6095 6113 6033
+6097 5
+6041 6095 6055 6033 6045
+51510 4
+51059 24033 11001 51013
+51165 8
+51660 51015 51079 51003 51139 51171 54031 54071
+39087 7
+21019 21089 39053 54011 54099 39145 39079
+29037 6
+29013 20121 29101 29083 29095 20091
+17121 6
+17081 17189 17027 17025 17191 17051
+18143 5
+18175 18077 18019 18079 18071
+51139 6
+51079 51157 51113 51171 51187 51165
+21023 6
+21097 21201 39015 21161 39025 21191
+21081 7
+21187 21191 21097 21209 21117 21077 21015
+29510 4
+17119 17163 29189 29183
+18175 8
+18061 18025 18117 18143 18019 18043 18071 18093
+24009 4
+24017 24037 24003 24033
+21041 6
+21223 21187 21103 21077 18155 18077
+21161 6
+21201 21135 39001 39015 21069 21023
+21089 5
+21043 21135 39087 21019 39145
+17027 6
+17189 17163 17121 17051 17119 17005
+54101 7
+54067 54075 54025 54083 54097 54007 54041
+21223 5
+21185 18019 21041 21103 18077
+6067 7
+6095 6005 6077 6017 6061 6113 6101
+20121 7
+20003 20107 20059 29037 29013 20091 20045
+29051 5
+29131 29027 29151 29135 29019
+54075 6
+54025 51091 51017 54071 54083 54101
+20111 6
+20073 20017 20031 20139 20197 20127
+20059 7
+20003 20031 20121 20107 20091 20045 20139
+21187 6
+21073 21103 21081 21209 21077 21041
+21135 7
+21205 21069 21089 39145 21043 21161 39001
+29073 8
+29161 29125 29151 29071 29055 29219 29139 29027
+29151 5
+29125 29131 29073 29027 29051
+24017 4
+51059 24009 24037 24033
+6051 9
+6019 6039 6109 32009 6027 32021 6003 32005 32019
+29071 7
+29055 29099 29221 29189 29183 29073 29219
+24019 4
+10005 24045 24011 24041
+51047 6
+51137 51113 51179 51177 51061 51157
+6005 5
+6077 6009 6003 6017 6067
+20171 6
+20093 20203 20101 20055 20063 20109
+29141 6
+29015 29131 29029 29135 29053 29159
+20203 6
+20093 20075 20071 20171 20109 20199
+20101 4
+20055 20135 20063 20171
+20135 8
+20083 20055 20165 20145 20051 20195 20101 20063
+20071 6
+20075 8099 8061 20203 20199 8017
+8043 7
+8027 8109 8041 8101 8119 8015 8093
+20165 5
+20009 20145 20167 20051 20135
+20009 6
+20185 20145 20053 20159 20167 20165
+18117 5
+18025 18037 18175 18093 18101
+54079 5
+54043 54011 54035 54039 54053
+32017 8
+32003 4015 49053 49021 49001 49027 32033 32023
+8085 7
+8113 8091 49037 8051 8029 8077 49019
+54015 6
+54019 54039 54007 54067 54087 54013
+17163 7
+17133 17189 17157 17027 17119 29510 29189
+54039 9
+54005 54043 54067 54019 54081 54015 54087 54079 54035
+51113 5
+51079 51137 51047 51157 51139
+8061 7
+8089 8011 8025 20071 8099 8017 8073
+17191 7
+17065 17081 17047 17159 17193 17121 17025
+18019 7
+21111 18043 21223 18077 21185 18175 18143
+20113 7
+20155 20159 20115 20079 20041 20169 20053
+21201 5
+21181 21097 21161 21069 21023
+21103 6
+21211 21073 21185 21187 21223 21041
+20115 7
+20079 20017 20015 20127 20113 20169 20041
+51179 7
+51630 51177 51099 51033 51153 51047 51061
+54011 6
+54099 54043 54079 54053 39053 39087
+51091 4
+51017 51015 54071 54075
+21097 7
+21209 21023 21201 21181 21017 21081 21191
+49001 6
+49021 49031 49017 49041 49027 32017
+17047 4
+17193 17185 17159 17191
+17185 6
+17193 18083 17101 18051 17047 17159
+29083 6
+29185 29013 29015 29159 29101 29037
+54067 6
+54019 54101 54025 54007 54039 54015
+24045 4
+24039 10005 24047 24019
+18125 5
+18173 18051 18037 18027 18083
+29015 6
+29085 29185 29141 29029 29159 29083
+21185 5
+21111 21223 21103 21211 18019
+21069 6
+21011 21181 21205 21135 21201 21161
+18051 7
+18163 18129 17193 18125 18173 18083 17185
+18037 8
+18147 18173 18025 18123 18117 18101 18125 18027
+17133 5
+29186 29099 17157 17163 29189
+8101 8
+8055 8027 8089 8071 8025 8073 8041 8043
+20017 5
+20015 20111 20073 20127 20115
+20159 5
+20185 20113 20155 20053 20009
+8025 5
+8089 8061 8073 8101 8041
+24037 2
+24017 24009
+17189 6
+17145 17157 17081 17121 17027 17163
+21043 6
+21063 21205 21127 21019 21089 21135
+49055 6
+49017 49031 49037 49019 49041 49015
+49031 4
+49055 49017 49041 49001
+6009 5
+6099 6077 6109 6005 6003
+21019 5
+54099 39087 21127 21043 21089
+29099 6
+29221 17133 29186 29187 29189 29071
+49037 14
+4001 4005 4017 49025 49017 8083 35045 8113 8033 8085 8077 49019 49055 49015
+21209 7
+21239 21067 21073 21017 21097 21187 21081
+51015 9
+51163 51125 51820 51790 51017 51003 51165 51091 54071
+17081 6
+17055 17145 17065 17191 17121 17189
+51079 5
+51003 51113 51137 51139 51165
+29013 6
+29217 20107 29083 29185 29037 20121
+51660 1
+51165
+8109 9
+8003 8105 8079 8053 8055 8027 8043 8015 8051
+21181 6
+21017 21011 21069 21173 21201 21097
+32009 4
+32023 6027 6051 32021
+24047 4
+51001 24039 10005 24045
+6109 6
+6043 6099 6051 6039 6009 6003
+20031 7
+20207 20073 20003 20001 20059 20139 20111
+29131 7
+29029 29125 29169 29151 29051 29141 29135
+18025 6
+18123 18061 21163 18175 18117 18037
+18061 6
+21163 18043 21111 21093 18175 18025
+54099 7
+21159 21127 54043 54059 54011 21019 39087
+18043 4
+21111 18019 18061 18175
+51137 6
+51109 51003 51177 51047 51079 51113
+21205 7
+21165 21011 21043 21063 21175 21135 21069
+20107 7
+20011 20001 20003 29013 29217 20121 20059
+20003 7
+20001 20207 20107 20011 20121 20059 20031
+21111 8
+21093 21029 21211 21215 21185 18019 18043 18061
+51177 7
+51109 51630 51033 51085 51179 51137 51047
+51099 4
+51033 51193 51057 51179
+54043 7
+54059 54039 54005 54045 54079 54099 54011
+21073 6
+21005 21211 21209 21239 21187 21103
+21017 7
+21049 21067 21011 21173 21181 21209 21097
+21211 6
+21215 21073 21005 21103 21111 21185
+20145 6
+20047 20083 20185 20009 20165 20135
+8091 5
+8111 8113 8051 8053 8085
+6041 1
+6097
+51630 2
+51179 51177
+21011 6
+21165 21173 21205 21069 21017 21181
+6077 7
+6085 6001 6013 6099 6009 6005 6067
+24039 3
+24047 51001 24045
+29125 5
+29169 29073 29161 29151 29131
+51193 4
+51057 51133 51159 51099
+51003 9
+51029 51125 51540 51109 51065 51137 51079 51015 51165
+21127 7
+21115 21175 21063 54099 21159 21019 21043
+21063 4
+21175 21127 21043 21205
+51017 6
+51005 54025 51015 51091 51163 54075
+54025 8
+54063 54089 54019 51005 51017 54075 54067 54101
+29029 7
+29059 29085 29169 29105 29131 29015 29141
+54019 6
+54081 54025 54089 54067 54015 54039
+8099 6
+8009 8011 20187 20075 20071 8061
+18123 6
+21091 18147 21163 18025 21027 18037
+8011 5
+8071 8089 8099 8009 8061
+17193 8
+17165 17059 17065 18051 18129 17185 17047 17191
+17065 6
+17165 17055 17193 17059 17191 17081
+20075 6
+20187 20093 20067 20203 20071 8099
+20093 7
+20067 20187 20055 20081 20171 20203 20075
+8089 5
+8011 8071 8025 8061 8101
+8027 4
+8101 8055 8043 8109
+20055 8
+20067 20081 20069 20083 20135 20101 20093 20171
+20185 6
+20047 20159 20155 20151 20009 20145
+20083 6
+20057 20069 20145 20047 20135 20055
+51033 7
+51085 51057 51097 51101 51099 51177 51179
+18173 7
+21101 18163 18037 18147 21059 18125 18051
+18129 6
+21225 17059 21101 18163 18051 17193
+54005 5
+54045 54081 54109 54039 54043
+17157 7
+29157 29186 17077 17145 17189 17133 17163
+29185 7
+29039 29217 29015 29085 29167 29083 29013
+17145 5
+17077 17081 17055 17189 17157
+29055 6
+29065 29161 29221 29093 29071 29073
+21067 6
+21113 21239 21049 21017 21151 21209
+29221 5
+29187 29093 29099 29055 29071
+18147 5
+21059 21091 18123 18037 18173
+21163 5
+21027 21093 18061 18123 18025
+21173 6
+21049 21165 21011 21197 21017 21181
+21239 6
+21167 21005 21067 21113 21209 21073
+51790 1
+51015
+18163 4
+18173 18051 21101 18129
+20079 5
+20173 20155 20015 20115 20113
+20155 7
+20095 20151 20173 20079 20113 20185 20159
+20073 7
+20049 20015 20207 20205 20031 20111 20017
+51057 6
+51119 51159 51097 51193 51033 51099
+6095 4
+6067 6113 6097 6055
+29161 6
+29215 29169 29055 29065 29073 29125
+21215 5
+21179 21029 21005 21211 21111
+8113 5
+8033 8111 8091 8085 49037
+51109 6
+51075 51065 51085 51177 51137 51003
+49017 6
+49025 49021 49037 49055 49031 49001
+8053 7
+8067 8111 8109 8079 8007 8051 8091
+49021 5
+49053 49017 49025 49001 32017
+17055 6
+17199 17077 17065 17165 17081 17145
+21005 7
+21229 21179 21239 21073 21167 21215 21211
+29186 5
+29187 17157 29157 17133 29099
+21175 7
+21237 21165 21115 21153 21127 21063 21205
+21029 4
+21093 21215 21179 21111
+21049 6
+21151 21173 21197 21065 21017 21067
+51159 4
+51133 51103 51057 51193
+6013 2
+6001 6077
+51820 1
+51015
+20015 8
+20035 20191 20173 20073 20049 20017 20079 20115
+29187 6
+29123 29093 29157 29186 29221 29099
+6099 7
+6085 6001 6043 6047 6109 6009 6077
+20047 6
+20097 20057 20151 20185 20145 20083
+29085 5
+29167 29029 29059 29015 29185
+51540 1
+51003
+21165 6
+21197 21175 21205 21237 21011 21173
+29217 7
+29011 20037 20011 29039 29185 29013 20107
+21027 6
+21183 21091 21093 21163 21085 18123
+51125 6
+51163 51009 51003 51029 51011 51015
+20207 6
+20205 20001 20133 20003 20031 20073
+20001 7
+20133 20205 20011 20107 20003 20207 20031
+20011 6
+20037 20133 29217 20107 20001 20003
+51001 3
+51131 24047 24039
+51133 3
+51103 51159 51193
+54045 4
+54059 54109 54005 54043
+29169 6
+29105 29161 29215 29125 29029 29131
+21093 9
+21099 21085 21179 21123 21111 21029 21027 21163 18061
+51085 7
+51087 51075 51127 51101 51033 51109 51177
+51065 5
+51049 51029 51109 51075 51003
+8055 6
+8023 8003 8071 8101 8027 8109
+21115 5
+21071 21153 21159 21127 21175
+21113 5
+21167 21151 21079 21067 21239
+20069 5
+20081 20057 20119 20055 20083
+21091 5
+21183 21059 21027 18123 18147
+21179 7
+21155 21123 21229 21005 21215 21093 21029
+54081 6
+54109 54089 54055 54019 54005 54039
+54059 8
+21195 21159 54109 54047 54045 51027 54043 54099
+51097 7
+51095 51127 51101 51119 51073 51057 51033
+21101 7
+21233 21225 21059 21149 18173 18163 18129
+8111 6
+8067 8083 8033 8053 8091 8113
+21167 6
+21021 21229 21113 21079 21239 21005
+8079 4
+8105 8109 8007 8053
+21159 6
+21071 54059 21195 54099 21115 21127
+51005 8
+51045 54063 51580 51560 51163 51023 51017 54025
+17077 7
+29031 29157 17199 17181 17055 17145 17157
+21197 6
+21065 21237 21129 21165 21049 21173
+21059 6
+21149 21091 21183 18147 21101 18173
+17059 6
+17069 17165 18129 21225 17193 17065
+21151 7
+21203 21079 21065 21109 21049 21113 21067
+17165 8
+17087 17151 17199 17059 17069 17193 17065 17055
+51101 4
+51127 51097 51085 51033
+20057 6
+20025 20119 20047 20097 20083 20069
+20173 6
+20191 20095 20015 20035 20079 20155
+21229 5
+21155 21167 21021 21005 21179
+51075 7
+51145 51049 51087 51085 51041 51109 51065
+29157 8
+29017 29123 17181 17077 29031 17157 29187 29186
+21225 6
+21055 17069 21101 21233 18129 17059
+8033 5
+8083 8111 8067 8113 49037
+29059 6
+29077 29167 29105 29029 29225 29085
+6001 4
+6077 6099 6085 6013
+6043 4
+6047 6039 6109 6099
+29039 5
+29057 29011 29167 29185 29217
+21153 6
+21025 21237 21115 21071 21119 21175
+29105 6
+29229 29225 29215 29169 29059 29029
+17199 6
+17087 17181 17165 17151 17055 17077
+54089 5
+54055 54025 54063 54081 54019
+21237 6
+21129 21153 21175 21025 21197 21165
+51560 1
+51005
+21065 5
+21109 21129 21197 21151 21049
+51103 2
+51133 51159
+8105 5
+8007 8003 8021 8109 8079
+21079 6
+21137 21021 21151 21203 21167 21113
+29167 6
+29057 29059 29085 29077 29039 29185
+20151 6
+20007 20097 20095 20155 20047 20185
+51009 6
+51163 51019 51031 51680 51011 51125
+6075 1
+6081
+51580 1
+51005
+8071 8
+35007 35059 8023 8009 8011 8055 8089 8101
+54109 6
+54047 54055 54081 54059 54045 54005
+51023 5
+51161 51045 51019 51163 51005
+51029 6
+51147 51011 51049 51065 51003 51125
+29065 6
+29215 29093 29179 29055 29203 29161
+51119 3
+51073 51097 51057
+6039 5
+6047 6019 6051 6043 6109
+21071 5
+21119 21195 21159 21115 21153
+21195 8
+51195 21133 51051 21119 51027 54059 21071 21159
+51049 6
+51147 51007 51145 51075 51065 51029
+51530 1
+51163
+29093 7
+29179 29123 29223 29187 29065 29221 29055
+8003 5
+8021 8055 8023 8109 8105
+21183 7
+21177 21149 21085 21031 21027 21091 21059
+20187 7
+20129 8009 20093 20067 20189 20075 8099
+20081 7
+20175 20189 20067 20069 20119 20055 20093
+20067 6
+20189 20055 20081 20093 20187 20075
+21123 6
+21099 21155 21217 21179 21087 21093
+20097 6
+20033 20025 20151 20007 20047 20057
+20205 6
+20125 20049 20001 20133 20207 20073
+20133 7
+20099 20125 20011 20037 20001 20205 20207
+21155 6
+21217 21021 21045 21229 21179 21123
+20095 6
+20077 20007 20173 20191 20155 20151
+54063 6
+51071 54055 51045 51005 54025 54089
+21129 6
+21189 21109 21025 21237 21065 21197
+21021 6
+21045 21079 21137 21167 21155 21229
+51087 7
+51041 51760 51145 51036 51127 51085 51075
+6081 3
+6085 6087 6075
+21025 6
+21193 21189 21119 21153 21129 21237
+51145 5
+51007 51087 51041 51075 51049
+21149 6
+21107 21233 21183 21059 21177 21101
+20037 7
+20021 20099 29011 29097 29217 20011 20133
+51045 6
+51121 51071 51023 51161 51005 54063
+21233 6
+21033 21055 21107 21149 21101 21225
+29011 5
+29097 29057 29039 29217 20037
+29123 5
+29223 29017 29157 29187 29093
+8009 7
+40025 35059 20129 20187 8099 8071 8011
+8023 6
+35055 8021 8071 35007 8055 8003
+21137 5
+21045 21203 21199 21079 21021
+8083 6
+4001 8067 35045 8111 8033 49037
+8067 6
+8007 35045 8053 8111 8083 8033
+51127 6
+51036 51095 51097 51101 51087 51085
+6047 6
+6069 6085 6039 6019 6043 6099
+21085 6
+21061 21031 21099 21093 21183 21027
+49053 4
+49025 4015 49021 32017
+17069 6
+21139 17151 21225 17059 21055 17165
+51760 2
+51087 51041
+17151 6
+17127 17087 21139 17069 17165 17199
+29215 8
+29091 29067 29229 29203 29065 29161 29105 29169
+17181 7
+17153 17003 29031 17087 17199 29157 17077
+17087 6
+17153 17151 17127 17165 17199 17181
+29179 5
+29203 29223 29093 29035 29065
+20049 6
+20019 20035 20125 20205 20073 20015
+29017 5
+29223 29031 29207 29157 29123
+29031 7
+29207 17181 17003 29201 17077 29017 29157
+51073 4
+51095 51115 51119 51097
+54055 8
+51021 51185 54047 54063 51071 54089 54109 54081
+29057 6
+29109 29097 29077 29167 29039 29011
+21109 7
+21125 21203 21189 21051 21129 21065 21151
+6019 8
+6031 6053 6069 6027 6107 6051 6047 6039
+21107 5
+21033 21177 21047 21233 21149
+51041 11
+51730 51053 51570 51670 51007 51036 51149 51087 51760 51145 51075
+51011 6
+51031 51147 51029 51037 51009 51125
+51131 1
+51001
+21045 7
+21207 21001 21217 21137 21199 21021 21155
+54047 5
+51027 54055 51185 54109 54059
+21055 6
+21143 21139 21233 21033 21225 17069
+49025 6
+4015 4005 49037 49017 49053 49021
+21189 5
+21051 21025 21193 21129 21109
+21203 6
+21199 21109 21125 21151 21137 21079
+21119 6
+21193 21195 21071 21133 21025 21153
+51027 6
+51167 51051 54047 51185 21195 54059
+51115 1
+51073
+51007 6
+51135 51147 51041 51053 51145 51049
+51036 7
+51670 51149 51095 51181 51127 51041 51087
+29225 6
+29043 29077 29229 29067 29105 29059
+29229 4
+29215 29067 29105 29225
+6085 7
+6087 6047 6069 6099 6077 6081 6001
+21217 5
+21001 21087 21045 21155 21123
+51071 6
+51155 51021 51045 51121 54063 54055
+20191 7
+40053 20077 20035 40071 20015 20173 20095
+20035 7
+40071 20049 20019 40113 20015 20191 20173
+51680 3
+51019 51031 51009
+20119 6
+40007 20175 20025 20057 20081 20069
+20025 6
+40007 20033 40059 20097 20057 20119
+21087 5
+21169 21099 21217 21001 21123
+20007 7
+40151 20033 20077 40003 20095 20151 20097
+51095 7
+51830 51700 51073 51199 51097 51036 51127
+6027 8
+6029 6107 32003 6071 32023 6019 32009 6051
+21099 7
+21009 21061 21087 21169 21123 21093 21085
+21193 7
+21131 21051 21119 21133 21095 21025 21189
+51031 7
+51019 51143 51011 51037 51083 51009 51680
+8007 7
+35045 8021 35039 8105 8067 8079 8053
+29077 6
+29109 29225 29043 29059 29057 29167
+29203 6
+29149 29091 29035 29179 29215 29065
+21139 7
+21145 17127 21143 21055 21157 17069 17151
+21177 7
+21219 21047 21031 21141 21183 21107 21149
+51147 7
+51037 51007 51135 51049 51111 51029 51011
+21031 6
+21141 21061 21085 21227 21177 21183
+8021 6
+8023 8003 35055 35039 8007 8105
+20189 6
+20129 20175 40139 20081 20067 20187
+20129 5
+40025 20189 40139 20187 8009
+20175 5
+40139 20119 40007 20081 20189
+20077 5
+40003 20191 40053 20095 20007
+20125 7
+40147 20019 20099 40105 20133 20205 20049
+20099 6
+40105 20037 20021 40035 20133 20125
+20033 5
+40059 20007 40151 20097 20025
+21033 6
+21221 21143 21107 21047 21233 21055
+51121 6
+51063 51750 51155 51161 51045 51071
+29097 6
+29145 20021 29057 29109 29011 20037
+21199 7
+21231 21207 21125 21147 21203 21045 21137
+21051 7
+21013 21121 21125 21131 21193 21189 21109
+17127 6
+21145 21007 17153 21139 17151 17087
+17003 6
+29133 29201 21007 17153 17181 29031
+51185 6
+51173 51167 54055 51021 51027 54047
+17153 6
+17087 17127 21145 21007 17181 17003
+20021 6
+40115 40035 29097 29145 20037 20099
+21061 5
+21227 21099 21009 21085 21031
+21125 7
+21147 21235 21051 21121 21109 21199 21203
+51149 8
+51730 51053 51570 51670 51181 51036 51183 51041
+51770 1
+51161
+51670 3
+51036 51149 51041
+21131 4
+21013 21095 21193 21051
+21001 6
+21057 21169 21207 21045 21217 21087
+29223 7
+29023 29035 29207 29017 29123 29179 29093
+51021 6
+51173 51155 51071 54055 51197 51185
+51051 4
+51195 51027 51167 21195
+20019 5
+40113 20125 40147 20049 20035
+51135 5
+51111 51053 51025 51007 51147
+51570 3
+51730 51149 51041
+51830 2
+51199 51095
+29109 7
+29009 29145 29043 29209 29077 29057 29097
+6087 4
+6069 6053 6085 6081
+51053 8
+51081 51025 51730 51183 51149 51041 51135 51007
+21133 5
+21095 21195 51195 21193 21119
+29201 5
+29143 29207 29133 17003 29031
+51037 6
+51083 51111 51117 51147 51031 51011
+51181 5
+51175 51183 51093 51149 51036
+51730 4
+51149 51570 51041 51053
+51155 7
+51035 51197 51750 51063 51121 51071 51021
+51700 3
+51650 51199 51095
+51067 6
+51161 51089 51141 51063 51143 51019
+21007 6
+21039 29133 21145 17127 17153 17003
+21145 7
+21083 21039 21157 21139 17127 21007 17153
+51195 8
+51169 51105 51720 21095 51167 51051 21195 21133
+21227 6
+21213 21141 21009 21003 21061 21031
+21207 6
+21053 21057 21199 21231 21045 21001
+21143 5
+21157 21221 21033 21055 21139
+21169 6
+21171 21009 21001 21057 21087 21099
+21009 6
+21003 21169 21171 21099 21227 21061
+21047 7
+47161 21221 21219 47125 21177 21033 21107
+51750 2
+51121 51155
+51093 3
+51175 51800 51181
+51167 7
+51169 51173 51185 51191 51027 51195 51051
+51143 8
+51019 37157 37033 51590 51089 51083 51031 51067
+29207 7
+29069 29023 29201 29143 29031 29223 29017
+51063 6
+51035 51067 51141 51161 51121 51155
+51111 5
+51117 51025 51135 51037 51147
+51183 5
+51081 51181 51175 51053 51149
+51650 2
+51199 51700
+29035 6
+29181 29149 29023 29223 29203 29179
+29043 6
+29209 29067 29213 29225 29109 29077
+29133 7
+29143 21007 21039 21105 21075 17003 29201
+21219 5
+47125 21141 47147 21177 21047
+21141 6
+21227 21213 47147 21031 21219 21177
+51197 5
+51077 51173 51155 51035 51021
+29067 7
+29153 29213 29091 29215 29043 29229 29225
+21157 6
+21035 21083 21143 21221 21145 21139
+29145 6
+29119 40115 29109 29009 29097 20021
+29091 6
+29153 29149 5049 29203 29215 29067
+21121 4
+21235 21051 21013 21125
+51025 7
+37185 51117 51081 51053 37131 51111 51135
+21095 6
+21013 51195 51105 21133 21131 21193
+51173 6
+51191 51197 51077 51185 51021 51167
+21221 6
+47161 21035 21047 21033 21157 21143
+4017 5
+4007 4005 4001 4009 49037
+4005 6
+4025 4015 4017 4007 49037 49025
+40035 7
+40097 40131 40105 40041 40115 20021 20099
+40105 5
+40147 40035 40131 20099 20125
+40115 5
+29145 29119 40041 20021 40035
+40113 7
+40117 40103 40071 40147 40143 20019 20035
+40071 6
+40047 40103 40053 40113 20035 20191
+40147 6
+40131 40143 40105 20125 40113 20019
+35059 8
+35037 35021 35007 48111 48205 40025 8009 8071
+35045 8
+35031 4001 35039 35043 8007 8067 8083 49037
+35039 8
+35028 35049 35043 35033 35055 35045 8021 8007
+40003 6
+40093 40151 40047 40053 20077 20007
+40053 6
+40047 40071 40103 20191 40003 20077
+40025 6
+48111 40139 48421 20129 8009 35059
+40151 6
+40153 40059 40003 40093 20007 20033
+4015 8
+4012 6071 32003 4005 4025 49025 49053 32017
+21231 5
+47137 21053 21147 21199 21207
+40007 8
+48357 48295 40139 40045 40059 20025 20119 20175
+40139 8
+48195 48421 40007 48357 20175 40025 20129 20189
+4001 9
+4009 4011 35003 35006 35031 35045 8083 49037 4017
+40059 6
+40045 40151 40153 20033 40007 20025
+35055 5
+35033 35007 8023 35039 8021
+35007 6
+35033 35059 35021 8071 35055 8023
+29209 5
+5015 29009 29213 29043 29109
+6069 5
+6019 6053 6047 6087 6085
+21235 7
+47013 47151 21147 21013 21121 47025 21125
+21039 5
+21105 21083 21145 21007 29133
+21147 6
+47151 47137 21125 21235 21231 21199
+21013 7
+21095 21131 51105 21051 47025 21235 21121
+51720 1
+51195
+21083 7
+47183 21105 21035 47079 21157 21145 21039
+21057 6
+47027 21171 21207 21053 21001 21169
+21003 6
+47165 21213 21171 21009 47111 21227
+51035 7
+37171 51640 51077 51063 51141 51155 51197
+29009 6
+5007 29119 29209 5015 29109 29145
+51191 7
+47091 47163 51520 51169 51077 51173 51167
+29023 6
+29181 29069 5021 29207 29223 29035
+51800 5
+51175 37073 51710 37029 51093
+6053 5
+6031 6019 6079 6069 6087
+51117 7
+51083 37181 37077 51025 37185 51111 51037
+51105 6
+47025 47067 51195 51169 21013 21095
+21213 5
+47147 21003 21227 47165 21141
+51169 7
+47073 47067 51191 51167 47163 51195 51105
+21053 5
+47027 21231 21207 47137 21057
+29149 7
+5135 5049 29181 5121 29035 29203 29091
+51141 6
+37169 37171 51089 51067 51035 51063
+29143 7
+29155 47095 29069 21075 29133 29201 29207
+32003 5
+6071 4015 32017 6027 32023
+51089 6
+37157 37169 51690 51143 51067 51141
+21171 6
+47111 21057 21169 47027 21003 21009
+29181 5
+5121 29023 5021 29035 29149
+29213 7
+5009 5015 29153 5089 29067 29209 29043
+40153 5
+40045 40093 40151 40043 40059
+51077 9
+37005 37009 47091 51640 37171 51035 51197 51191 51173
+29153 6
+5089 5005 29091 5049 29067 29213
+21105 6
+47131 21075 21083 47183 21039 29133
+29119 5
+40041 29009 5007 29145 40115
+21035 5
+47161 47079 21221 21157 21083
+6107 4
+6031 6027 6029 6019
+51690 1
+51089
+51640 2
+51035 51077
+47161 7
+47005 47079 47125 47083 21047 21221 21035
+40041 7
+40001 40021 40097 29119 5007 40115 40035
+47147 7
+47037 47021 47125 47165 21213 21141 21219
+47165 7
+47037 47111 47169 47189 21003 47147 21213
+47125 7
+47043 47083 47147 47021 21219 47161 21047
+47111 7
+47159 47169 47027 47087 21171 47165 21003
+51590 1
+51143
+29069 8
+5031 5055 5021 29155 5093 29143 29207 29023
+47027 7
+47087 47137 47133 21053 21057 47111 21171
+47137 7
+47133 47049 47151 21147 21231 47027 21053
+51520 2
+51191 47163
+47163 7
+47179 47019 47073 47091 51191 51520 51169
+47091 7
+37011 47019 37009 37189 51077 51191 47163
+40103 7
+40083 40047 40113 40117 40119 40071 40053
+47151 7
+47129 47049 47013 47001 21235 21147 47137
+40131 6
+40145 40143 40097 40035 40147 40105
+47067 5
+47057 47025 47073 51169 51105
+47025 7
+47057 47173 47013 47067 51105 21013 21235
+47013 5
+47001 47025 47173 21235 47151
+40045 7
+40129 48211 48295 40043 40153 40059 40007
+40047 7
+40073 40093 40103 40083 40071 40053 40003
+47073 7
+47063 47059 47057 47163 47179 51169 47067
+37009 5
+37189 37005 37193 51077 47091
+47049 6
+47035 47141 47133 47151 47129 47137
+37005 4
+37193 37171 51077 37009
+37171 8
+37197 37193 37169 37067 51141 51035 37005 51077
+21075 5
+47131 47095 21105 29143 29133
+40117 5
+40119 40143 40037 40113 40103
+37053 3
+51710 37029 37055
+37131 7
+51081 37083 37185 37091 37015 51175 51025
+37091 4
+51175 37015 37073 37131
+37029 5
+37139 37073 37053 51710 51800
+37073 7
+51175 37041 37139 37029 37143 51800 37091
+37185 7
+37069 37181 37131 37083 37127 51025 51117
+37169 6
+37067 37157 37081 51089 51141 37171
+37033 6
+37001 37157 37145 37135 51083 51143
+37157 7
+37081 37067 37033 37001 51143 51089 37169
+37077 7
+51083 37063 37145 37069 37183 37181 51117
+37145 5
+37135 37077 37063 51083 37033
+37181 4
+37185 37069 51117 37077
+47133 5
+47141 47087 47049 47137 47027
+47087 5
+47159 47133 47027 47141 47111
+40097 5
+40145 40021 40041 40035 40131
+37083 7
+37065 37127 37069 37015 37117 37131 37185
+40093 7
+40043 40011 40047 40073 40003 40153 40151
+37139 3
+37143 37029 37073
+47019 6
+37121 47171 47179 47091 37011 47163
+47095 5
+29155 47131 21075 47045 29143
+47131 6
+47045 47183 21105 47053 21075 47095
+47183 6
+47053 47017 47079 21083 47131 21105
+47079 6
+47017 47005 47161 47183 21035 21083
+47169 4
+47189 47111 47159 47165
+48295 5
+48211 48393 48357 40045 40007
+48421 7
+48341 48205 48111 48195 48233 40139 40025
+5021 5
+5055 5121 29069 29181 29023
+48111 5
+48205 48421 48341 40025 35059
+48195 6
+48341 48233 48357 48393 40139 48421
+5005 6
+5129 5089 5065 5137 5049 29153
+5049 6
+5065 29149 5135 5005 29153 29091
+5135 6
+5063 5065 5075 5121 29149 5049
+5121 6
+5075 5021 5055 29181 5135 29149
+5007 7
+40001 5143 5015 5087 29009 40041 29119
+5089 5
+5129 5009 5005 29153 29213
+5015 7
+5087 29213 5009 5101 29209 5007 29009
+5009 5
+5101 5089 5129 29213 5015
+48357 7
+48233 48393 48295 48211 40007 48195 40139
+6031 5
+6079 6107 6029 6019 6053
+47021 5
+47043 47037 47187 47147 47125
+47179 5
+47171 47059 47019 47163 47073
+47173 5
+47093 47001 47057 47025 47013
+40143 7
+40111 40037 40131 40145 40117 40147 40113
+37193 8
+37003 37027 37189 37197 37097 37171 37005 37009
+29155 5
+5093 47045 47095 29143 29069
+47159 6
+47189 47141 47041 47087 47111 47169
+47057 7
+47093 47089 47073 47067 47063 47025 47173
+47059 6
+47029 47063 47171 37115 47179 47073
+47037 6
+47187 47189 47149 47165 47147 47021
+37189 5
+37011 37193 37009 37027 47091
+37143 3
+37041 37139 37073
+47083 5
+47085 47005 47043 47125 47161
+47129 5
+47035 47001 47145 47151 47049
+47005 7
+47039 47017 47085 47135 47083 47161 47079
+47189 7
+47149 47041 47159 47015 47169 47037 47165
+47063 5
+47089 47059 47073 47029 47057
+37041 2
+37143 37073
+47043 6
+47081 47085 47187 47021 47125 47083
+47141 7
+47185 47041 47035 47049 47133 47159 47087
+5087 7
+5033 5047 5143 5101 5071 5015 5007
+47001 7
+47105 47145 47093 47173 47013 47129 47151
+37011 7
+37111 37121 37027 37023 37189 47091 47019
+37197 5
+37097 37067 37059 37171 37193
+5055 5
+5031 5075 29069 5021 5121
+35033 6
+35049 35021 35047 35007 35055 35039
+5075 6
+5067 5063 5055 5031 5121 5135
+37069 7
+37101 37183 37083 37127 37185 37181 37077
+5065 5
+5137 5135 5063 5049 5005
+47171 6
+37115 37121 37199 47019 47179 47059
+37067 7
+37057 37059 37081 37157 37169 37197 37171
+40119 5
+40081 40083 40037 40117 40103
+47085 5
+47135 47043 47081 47083 47005
+37081 6
+37151 37057 37001 37157 37067 37169
+37001 6
+37151 37135 37037 37033 37081 37157
+37015 5
+37117 37187 37091 37083 37131
+5143 4
+5033 40001 5087 5007
+37135 5
+37063 37037 37145 37001 37033
+37063 5
+37037 37077 37183 37135 37145
+47053 6
+47033 47045 47017 47183 47113 47131
+35021 5
+35047 35037 35059 35033 35007
+35043 7
+35006 35031 35049 35001 35028 35039 35045
+47045 7
+47097 5093 47053 47033 47131 29155 47095
+37127 7
+37101 37195 37183 37065 37083 37069 37185
+47093 8
+47009 47105 47145 47057 47089 47155 47173 47001
+47089 5
+47155 47029 47063 47093 47057
+47029 6
+47155 37115 47059 37087 47089 47063
+40021 6
+40101 40145 40001 40135 40041 40097
+40037 6
+40107 40081 40111 40143 40119 40117
+47035 8
+47007 47175 47143 47185 47145 47129 47049 47141
+40001 6
+5033 40135 5143 5007 40041 40021
+40073 6
+40017 40011 40083 40109 40047 40093
+40083 7
+40109 40017 40081 40119 40103 40073 40047
+40145 6
+40111 40021 40097 40101 40131 40143
+40011 6
+40039 40043 40017 40015 40073 40093
+40043 6
+40039 40129 40011 40093 40045 40153
+47017 7
+47113 47077 47005 47039 47079 47053 47183
+37121 5
+37199 37011 37111 47019 47171
+37065 5
+37195 37117 37147 37083 37127
+47041 6
+47015 47185 47141 47177 47189 47159
+5101 6
+5115 5071 5129 5009 5087 5015
+5129 7
+5115 5137 5005 5089 5141 5101 5009
+37027 6
+37023 37003 37193 37035 37011 37189
+5137 6
+5023 5141 5063 5065 5129 5005
+47149 7
+47003 47117 47187 47015 47031 47189 47037
+47185 5
+47175 47177 47035 47141 47041
+37199 5
+37021 37115 37111 37121 47171
+37117 6
+37147 37013 37187 37015 37065 37083
+37183 7
+37085 37037 37101 37127 37069 37063 37077
+48393 8
+48179 48065 48233 48211 48483 48295 48357 48195
+48211 7
+48483 48179 40129 40045 48295 48393 48357
+47145 8
+47107 47121 47143 47105 47093 47001 47035 47129
+48233 7
+48065 48375 48341 48393 48357 48195 48421
+48205 7
+48359 35037 48341 48375 48421 48111 35059
+47187 7
+47117 47119 47081 47149 47037 47043 47021
+48341 8
+48359 48375 48233 48065 48195 48421 48205 48111
+37115 6
+37087 47171 37199 37021 47029 47059
+37097 9
+37109 37035 37003 37159 37025 37119 37059 37197 37193
+37059 5
+37067 37057 37159 37097 37197
+47155 6
+47009 37087 37173 47029 47089 47093
+37003 4
+37035 37097 37193 37027
+37057 7
+37167 37159 37151 37123 37081 37067 37059
+40129 6
+48483 40039 40009 40043 40045 48211
+35049 7
+35001 35028 35047 35057 35033 35039 35043
+35031 4
+35043 35006 35045 4001
+47081 6
+47101 47135 47119 47187 47043 47085
+5093 9
+5035 47157 5111 5031 47097 47167 47045 29155 29069
+47033 5
+47075 47097 47053 47113 47045
+37023 7
+37045 37161 37111 37035 37109 37027 37011
+5031 6
+5111 5067 29069 5093 5055 5075
+35028 3
+35049 35039 35043
+47015 5
+47031 47177 47041 47149 47189
+37187 5
+37013 37177 37095 37117 37015
+37177 2
+37095 37187
+37111 6
+37021 37023 37161 37011 37199 37121
+47097 5
+47167 47075 47033 47045 5093
+40081 6
+40125 40109 40037 40107 40119 40083
+5063 7
+5145 5023 5067 5075 5135 5137 5065
+37151 6
+37123 37037 37125 37001 37081 37057
+47105 6
+47107 47009 47123 47093 47001 47145
+5067 7
+5147 5145 5111 5037 5031 5075 5063
+35047 6
+35057 35019 35037 35021 35049 35033
+47009 6
+37075 47123 47155 37173 47093 47105
+37037 8
+37125 37105 37183 37085 37063 37151 37135 37001
+40101 6
+40091 40111 40135 40021 40061 40145
+40111 6
+40107 40101 40091 40145 40143 40037
+47177 7
+47061 47031 47175 47153 47185 47015 47041
+47039 6
+47071 47077 47135 47181 47005 47017
+47119 6
+47055 47099 47101 47117 47187 47081
+37195 6
+37191 37101 37147 37079 37065 37127
+37159 6
+37025 37123 37057 37167 37059 37097
+47135 6
+47181 47081 47101 47085 47039 47005
+47143 5
+47065 47121 47007 47145 47035
+47175 5
+47153 47007 47035 47185 47177
+37147 7
+37107 37079 37013 37117 37049 37195 37065
+47077 5
+47023 47113 47039 47071 47017
+37035 6
+37109 37045 37097 37003 37023 37027
+47075 6
+47047 47167 47113 47069 47033 47097
+37021 7
+37175 37089 37087 37111 37161 37199 37115
+37101 7
+37163 37085 37195 37191 37127 37069 37183
+40039 6
+40149 40009 40011 40015 40043 40129
+6029 8
+6037 6083 6111 6079 6071 6027 6031 6107
+6071 8
+6037 6059 6065 4012 4015 32003 6029 6027
+6079 4
+6083 6029 6031 6053
+47113 7
+47069 47077 47023 47017 47075 47033 47053
+5141 6
+5029 5115 5023 5045 5137 5129
+37087 8
+37099 37173 37089 37021 37175 37115 47155 47029
+47007 5
+47065 47153 47143 47035 47175
+5047 5
+5131 5033 5083 5071 5087
+5071 5
+5083 5115 5101 5047 5087
+37055 2
+37095 37053
+5033 7
+40079 5131 40135 5047 5087 5143 40001
+47121 5
+47011 47065 47145 47107 47143
+35037 10
+35041 35011 35009 35019 48117 48359 48205 35059 35047 35021
+37013 6
+37049 37137 37095 37187 37147 37117
+5115 7
+5149 5083 5029 5141 5129 5101 5071
+40017 8
+40051 40015 40109 40027 40087 40083 40073 40011
+40109 6
+40027 40081 40125 40083 40017 40073
+47117 6
+47055 47003 47149 47103 47187 47119
+5023 5
+5045 5063 5145 5137 5141
+47031 7
+47051 47127 47003 47061 47177 47015 47149
+5111 5
+5037 5093 5035 5031 5067
+47003 5
+47103 47127 47031 47149 47117
+37173 7
+37113 37075 47123 37087 37099 47009 47155
+47123 7
+47139 47107 37075 37039 37173 47009 47105
+47101 5
+47099 47181 47119 47081 47135
+37079 4
+37191 37147 37107 37195
+47107 6
+47139 47011 47105 47123 47145 47121
+40107 7
+40063 40133 40125 40091 40111 40037 40081
+40135 7
+40079 40061 5033 5131 40101 40001 40021
+47167 5
+47157 47075 47047 47097 5093
+48375 7
+48381 48359 48065 48011 48233 48341 48205
+48065 8
+48011 48381 48179 48129 48393 48233 48375 48341
+48179 7
+48129 48011 48483 48087 48211 48393 48065
+48483 7
+48129 48087 40009 40129 48211 48179 48393
+48359 6
+48117 48375 48381 48341 48205 35037
+37105 3
+37125 37085 37037
+37161 8
+45083 37149 37089 37045 45021 37023 37021 37111
+47023 5
+47109 47069 47071 47077 47113
+37191 6
+37163 37107 37061 37079 37195 37101
+37085 7
+37051 37125 37163 37101 37183 37105 37037
+47153 6
+47115 47061 47065 47007 47175 47177
+37045 7
+45021 37071 45091 37035 37109 37023 37161
+40091 6
+40063 40121 40061 40101 40107 40111
+37109 6
+37071 37097 37119 37035 37045 37023
+40015 7
+40031 40075 40149 40051 40017 40039 40011
+47061 5
+47051 47153 47115 47177 47031
+4025 5
+4012 4007 4013 4005 4015
+5145 7
+5085 5117 5045 5147 5067 5063 5023
+37099 6
+13241 45073 37113 37175 37087 37173
+37125 9
+37165 37153 37123 37051 37085 37093 37105 37037 37151
+37119 7
+45057 45091 37071 37025 37179 37109 37097
+37025 5
+37179 37167 37159 37119 37097
+40009 8
+40057 40055 48087 40149 40075 40039 48483 40129
+37123 7
+37007 37167 37125 37153 37151 37159 37057
+37167 6
+37179 37123 37007 37057 37025 37159
+47181 6
+47071 47099 1077 47101 47135 47039
+37089 6
+37175 37161 37149 45045 37021 37087
+47157 6
+28033 5035 47047 28093 47167 5093
+40149 4
+40075 40015 40039 40009
+40125 7
+40087 40027 40133 40123 40107 40081 40109
+40061 6
+40077 40121 40079 40135 40091 40101
+47065 9
+13295 13083 13047 47115 47011 47143 47121 47007 47153
+5029 5
+5105 5149 5045 5141 5115
+47099 5
+1077 47055 47119 47101 47181
+47055 6
+1083 1077 47103 47117 47119 47099
+37075 5
+37039 37113 37173 47009 47123
+40133 4
+40123 40063 40107 40125
+5131 6
+40079 5047 5083 5127 5033 40135
+5037 5
+5147 5035 5123 5111 5067
+5147 6
+5117 5037 5123 5095 5067 5145
+5035 8
+28143 5077 5123 47157 28033 5093 5037 5111
+47069 8
+28009 47047 47109 28003 28139 47023 47113 47075
+47071 8
+28141 28003 47109 47181 1077 47039 47023 47077
+5083 6
+5127 5115 5149 5071 5131 5047
+37107 6
+37061 37049 37103 37147 37191 37079
+47127 4
+47103 47051 47031 47003
+37175 7
+45073 37089 45045 45077 37021 37099 37087
+37071 4
+45091 37119 37109 37045
+47047 6
+28093 47069 28009 47075 47157 47167
+40079 9
+40089 40127 40077 5127 5113 5131 5033 40135 40061
+37149 4
+45045 37161 45083 37089
+47109 4
+28003 47071 47023 47069
+40027 5
+40051 40125 40087 40109 40017
+47103 7
+1089 1083 47051 47127 47003 47055 47117
+40051 7
+40031 40049 40137 40027 40087 40017 40015
+5045 7
+5105 5145 5085 5119 5023 5029 5141
+47051 7
+1089 47061 47115 1071 47031 47103 47127
+47011 7
+13047 13213 13313 47139 47107 47121 47065
+40087 6
+40049 40123 40125 40051 40027 40017
+47115 6
+13083 1071 47065 47153 47051 47061
+37113 7
+13241 37043 37039 37099 45073 37173 37075
+37163 7
+37017 37051 37061 37141 37191 37101 37085
+40121 7
+40005 40029 40063 40077 40127 40061 40091
+37137 2
+37049 37013
+5149 7
+5097 5127 5105 5051 5029 5115 5083
+40063 6
+40029 40123 40121 40091 40107 40133
+37039 7
+13111 13291 47139 37113 37075 37043 47123
+47139 6
+13213 37039 47123 13111 47107 47011
+37051 6
+37155 37093 37163 37017 37085 37125
+37103 5
+37133 37061 37031 37049 37107
+35001 4
+35006 35049 35057 35043
+35019 5
+35027 35057 35011 35037 35047
+45045 8
+45001 45007 45077 45083 45059 37149 37175 37089
+37179 6
+45057 37007 37167 45025 37025 37119
+37007 6
+45069 37153 45025 37123 37179 37167
+37093 5
+37165 37153 37051 37155 37125
+45083 6
+45059 45087 45021 37161 45045 37149
+37095 4
+37055 37013 37177 37187
+37061 6
+37133 37103 37141 37107 37163 37191
+45021 5
+45087 45091 37045 45083 37161
+48381 7
+48069 48117 48011 48437 48065 48375 48359
+48011 7
+48437 48129 48045 48179 48065 48381 48375
+48087 7
+48191 48075 48129 40057 40009 48483 48179
+48129 7
+48191 48045 48087 48483 48179 48011 48065
+48117 6
+48369 35009 48381 48069 48359 35037
+37153 7
+45069 45025 37165 37093 37125 37007 37123
+45091 7
+45023 45087 45057 37119 37071 45021 37045
+37043 5
+13291 13281 13241 37113 37039
+5123 5
+5077 5095 5035 5147 5037
+40075 7
+40065 40055 40031 40141 40015 40149 40009
+40055 4
+40057 40075 40065 40009
+5105 6
+5051 5119 5125 5045 5029 5149
+5127 6
+5113 5149 5097 5083 40079 5131
+5117 5
+5085 5095 5001 5147 5145
+45077 4
+45073 45045 45007 37175
+5085 6
+5119 5001 5117 5069 5145 5045
+40077 4
+40127 40079 40061 40121
+37049 6
+37137 37013 37031 37103 37107 37147
+45057 7
+45039 45023 45025 45055 37179 37119 45091
+45073 10
+13119 13257 13137 13241 45077 45007 13147 37175 37099 37113
+35057 7
+35006 35053 35019 35027 35047 35001 35049
+37165 5
+45069 37155 37093 37125 37153
+40057 6
+48075 40055 40065 48197 40009 48087
+5119 6
+5053 5125 5085 5069 5105 5045
+1077 8
+1033 28141 1083 1079 47055 47099 47071 47181
+1083 6
+1079 1089 1103 47103 47055 1077
+5095 6
+5001 5107 5077 5123 5117 5147
+28003 6
+28117 28139 28141 47071 47109 47069
+28141 7
+28057 28117 1033 1077 1059 47071 28003
+28139 5
+28145 28009 28117 28003 47069
+28009 5
+28093 28139 28145 47069 47047
+28033 5
+28137 28143 28093 47157 5035
+28093 7
+28071 28137 28009 28145 47047 28033 47157
+13241 6
+13137 13281 45073 37099 37113 37043
+1089 6
+1103 1071 1095 47051 47103 1083
+1071 6
+1095 1049 13083 47115 1089 47051
+13313 5
+13295 13047 13213 13129 47011
+13213 6
+13129 13111 13123 47139 47011 13313
+13047 4
+13295 13313 47011 47065
+13083 5
+1049 13295 47065 47115 1071
+13295 8
+13055 1049 13313 13129 13047 13115 47065 13083
+13281 5
+13311 13291 13241 13137 37043
+13291 6
+13187 13111 13281 13311 37043 37039
+13111 7
+13123 13291 13187 13085 37039 13213 47139
+37133 4
+37141 37031 37103 37061
+40123 8
+40069 40099 40049 40063 40029 40133 40087 40125
+35009 5
+35041 48369 48017 48117 35037
+37155 8
+45033 45069 37017 37047 45051 37051 37165 37093
+5077 5
+5107 28143 5035 5123 5095
+45087 7
+45071 45059 45039 45023 45091 45021 45083
+1033 4
+1059 1079 1077 28141
+6111 3
+6083 6037 6029
+28143 8
+28119 28027 5107 28137 28107 28033 5035 5077
+37031 3
+37133 37049 37103
+40049 6
+40019 40137 40123 40099 40087 40051
+5125 5
+5053 5059 5051 5119 5105
+1049 7
+1055 1095 13055 1019 13083 13295 1071
+40031 6
+40033 40141 40051 40137 40015 40075
+40065 6
+48487 48197 40141 40075 40057 40055
+37017 5
+37141 37047 37163 37155 37051
+13123 5
+13227 13129 13085 13111 13213
+45023 4
+45057 45039 45091 45087
+13137 7
+13011 13139 13311 45073 13257 13241 13281
+45007 7
+13105 45001 13147 45059 45045 45073 45077
+45025 8
+45061 45031 45055 45069 37153 45057 37007 37179
+1079 7
+1133 1059 1103 1043 1083 1033 1077
+45069 8
+45031 37155 45033 45041 37165 37153 45025 37007
+13311 5
+13139 13187 13137 13281 13291
+28137 5
+28107 28093 28071 28033 28143
+45059 7
+45047 45001 45071 45087 45083 45007 45045
+5051 5
+5059 5097 5125 5105 5149
+35011 5
+35027 35041 35005 35037 35019
+40029 5
+40069 40121 40063 40005 40123
+28117 6
+28081 28145 28141 28057 28003 28139
+5097 8
+5109 5061 5113 5059 5019 5051 5149 5127
+48369 6
+48017 35041 48069 48279 48117 35009
+48069 6
+48279 48437 48189 48381 48369 48117
+48437 6
+48189 48045 48153 48011 48069 48381
+48045 6
+48153 48191 48345 48129 48437 48011
+48075 5
+48101 48191 48197 40057 48087
+48191 6
+48345 48075 48101 48087 48129 48045
+5113 7
+5133 40089 5097 5109 5061 5127 40079
+37141 7
+37019 37129 37047 37133 37017 37061 37163
+13187 5
+13085 13311 13139 13291 13111
+40127 6
+40023 40005 40089 40079 40077 40121
+40005 6
+40013 40069 40127 40023 40121 40029
+40137 6
+40067 40033 40019 40049 40031 40051
+1103 5
+1095 1043 1089 1079 1083
+13257 4
+13011 45073 13119 13137
+5107 7
+28011 5041 5001 28027 28143 5077 5095
+40099 4
+40019 40069 40123 40049
+40141 6
+48487 40033 48485 40031 40065 40075
+13129 8
+13015 13115 13123 13227 13057 13213 13295 13313
+45033 6
+45067 45041 37047 45051 37155 45069
+13085 7
+13117 13057 13227 13139 13187 13123 13111
+45055 7
+45079 45039 45031 45061 45025 45085 45057
+28145 7
+28115 28071 28081 28117 28139 28093 28009
+1095 7
+1009 1043 1049 1055 1071 1103 1089
+35041 8
+35025 35005 48017 48079 48369 35009 35037 35011
+35003 6
+35017 4011 35053 35051 35006 4001
+13115 6
+13233 1019 13055 13015 13129 13295
+13055 4
+1019 13115 13295 1049
+35053 5
+35006 35051 35027 35057 35003
+1059 6
+1093 28057 1079 1133 1033 28141
+48197 6
+48155 48101 48487 40065 48075 40057
+5001 7
+5079 5069 5041 5107 5095 5085 5117
+28071 7
+28161 28107 28145 28115 28013 28093 28137
+28107 6
+28135 28119 28071 28161 28137 28143
+45039 6
+45071 45055 45079 45057 45023 45087
+13227 5
+13057 13015 13085 13123 13129
+13119 5
+13195 13011 13147 45073 13257
+28027 7
+28133 28011 5041 28119 28135 28143 5107
+45031 5
+45061 45069 45041 45025 45055
+45071 7
+45081 45047 45039 45079 45063 45087 45059
+1019 7
+1015 1055 13233 1029 13115 13055 1049
+28081 6
+28017 28115 28057 28095 28117 28145
+4007 6
+4021 4013 4009 4017 4025 4005
+28119 4
+28107 28135 28143 28027
+40069 7
+40095 40019 40005 40013 40029 40123 40099
+40019 7
+40085 40067 40069 40095 40099 40049 40137
+5059 6
+5019 5053 5039 5125 5051 5097
+40089 8
+48387 40023 5133 5081 48037 5113 40079 40127
+40033 6
+48485 40137 40067 48077 40031 40141
+13139 9
+13135 13117 13011 13157 13013 13137 13311 13085 13187
+5053 6
+5039 5069 5025 5119 5125 5059
+5069 6
+5025 5079 5001 5053 5119 5085
+13147 5
+13195 45007 13105 13119 45073
+13011 6
+13157 13119 13195 13257 13137 13139
+45001 6
+13105 45059 45047 45065 45007 45045
+37047 6
+45051 37019 37141 45033 37017 37155
+28057 6
+28095 1059 1093 28141 28081 28117
+48487 7
+48023 48155 48485 48009 40141 40065 48197
+13015 7
+13223 13233 13227 13057 13067 13129 13115
+13057 7
+13067 13117 13085 13227 13121 13015 13129
+45047 6
+45065 45081 45037 45071 45059 45001
+37129 2
+37019 37141
+28115 5
+28013 28081 28017 28145 28071
+45061 5
+45041 45085 45031 45025 45055
+37019 4
+45051 37129 37141 37047
+5061 6
+5081 5133 5109 5057 5097 5113
+5109 6
+5057 5019 5099 5097 5061 5113
+35027 7
+35035 35051 35005 35011 35019 35053 35057
+5019 6
+5099 5039 5103 5059 5109 5097
+13117 5
+13121 13139 13135 13085 13057
+1093 7
+1075 28095 1127 1057 1133 1059 28057
+48345 7
+48107 48125 48153 48101 48269 48191 48045
+1043 6
+1127 1133 1009 1095 1103 1079
+48101 7
+48125 48269 48155 48197 48075 48345 48191
+1133 5
+1043 1079 1127 1093 1059
+48153 7
+48303 48107 48189 48345 48125 48045 48437
+48189 7
+48219 48303 48279 48153 48107 48437 48069
+48279 7
+48079 48219 48017 48189 48303 48069 48369
+48017 6
+48079 48279 48219 48369 35041 35009
+45041 8
+45089 45027 45085 45067 45033 45061 45031 45069
+45051 6
+45043 45067 37019 37047 45033 37155
+13157 6
+13219 13059 13013 13195 13011 13139
+45067 5
+45089 45051 45043 45033 45041
+40067 6
+48337 48077 40085 40019 40137 40033
+13105 8
+13221 13195 45065 13181 45001 45007 13317 13147
+13195 7
+13059 13147 13105 13221 13119 13157 13011
+1009 6
+1073 1127 1055 1115 1095 1043
+45079 6
+45063 45085 45017 45055 45071 45039
+48155 6
+48275 48269 48487 48023 48197 48101
+1055 6
+1115 1019 1015 1049 1009 1095
+5133 5
+5057 5061 5081 5113 40089
+45063 6
+45003 45081 45017 45079 45075 45071
+28161 5
+28043 28135 28013 28071 28107
+48485 6
+48023 48009 48077 40033 48487 40141
+13121 10
+13077 13113 13063 13097 13045 13089 13067 13135 13117 13057
+40095 5
+40085 40013 48181 40069 40019
+5079 5
+5043 5025 5041 5001 5069
+45081 5
+45037 45063 45003 45071 45047
+13135 7
+13247 13089 13013 13297 13139 13121 13117
+28135 7
+28083 28133 28161 28043 28107 28119 28027
+28013 6
+28043 28017 28155 28115 28161 28071
+40013 7
+48147 48181 48277 40023 40005 40095 40069
+40023 6
+48277 40089 48387 40127 40013 40005
+45085 6
+45017 45027 45041 45079 45061 45055
+5039 6
+5013 5103 5025 5053 5019 5059
+48077 6
+48237 48009 48337 40067 48485 40033
+13013 6
+13297 13059 13219 13157 13135 13139
+5041 7
+5017 5043 28011 28027 5107 5079 5001
+28011 6
+28151 5017 28133 28027 5107 5041
+13067 5
+13097 13223 13121 13057 13015
+13233 6
+13143 1029 13223 13015 13115 1019
+35005 6
+35015 35035 35025 35041 35027 35011
+28095 7
+28087 28025 28017 1093 1075 28057 28081
+13223 6
+13045 13143 13067 13097 13015 13233
+28017 6
+28155 28095 28025 28081 28013 28115
+6065 5
+6025 6073 6059 4012 6071
+45065 6
+13073 13181 45037 45047 13105 45001
+40085 6
+48097 48337 40095 48181 40019 40067
+5025 7
+5011 5013 5079 5043 5069 5039 5053
+1075 5
+28087 1057 1093 1107 28095
+13059 5
+13219 13221 13195 13157 13013
+13221 7
+13265 13133 13219 13317 13105 13059 13195
+4013 5
+4012 4021 4019 4007 4025
+6083 3
+6111 6029 6079
+5057 7
+5091 5081 5099 5073 5109 5133 5061
+1127 7
+1125 1057 1073 1009 1043 1133 1093
+28133 6
+28151 28083 28053 28135 28027 28011
+48337 6
+48237 48097 48497 40085 40067 48077
+13317 6
+13301 13265 13181 13189 13221 13105
+1115 6
+1117 1073 1121 1015 1055 1009
+13181 5
+13189 13073 45065 13317 13105
+45037 6
+13245 13073 45081 45003 45065 45047
+1015 5
+1121 1019 1029 1115 1055
+13219 7
+13211 13297 13221 13059 13133 13157 13013
+13089 5
+13063 13135 13247 13151 13121
+48387 8
+48159 48449 48119 48277 48037 48343 40089 40023
+5099 6
+5073 5103 5027 5019 5057 5109
+48181 7
+48085 48121 48097 48147 40013 40085 40095
+1029 8
+1027 1111 1121 13045 13233 13143 1015 1019
+6059 4
+6037 6065 6073 6071
+48097 5
+48121 48497 48181 40085 48337
+5081 6
+48037 5057 5091 5061 5133 40089
+45027 6
+45075 45017 45089 45015 45041 45085
+48277 6
+48119 48147 48387 48159 40023 40013
+13297 6
+13217 13247 13219 13211 13013 13135
+1057 5
+1107 1127 1125 1075 1093
+13143 4
+13223 13233 13045 1029
+28043 7
+28097 28015 28083 28013 28155 28161 28135
+48147 6
+48085 48231 48277 48119 40013 48181
+45089 5
+45015 45043 45067 45041 45027
+45017 5
+45027 45075 45085 45063 45079
+45003 7
+13033 45011 13245 45075 45063 45037 45081
+1073 6
+1007 1125 1115 1117 1009 1127
+48269 8
+48433 48263 48125 48275 48207 48155 48101 48345
+48125 8
+48169 48263 48107 48269 48433 48101 48345 48153
+48107 8
+48305 48169 48303 48125 48263 48345 48153 48189
+48303 8
+48445 48305 48219 48107 48169 48153 48189 48279
+48009 7
+48503 48447 48023 48077 48237 48485 48487
+48219 7
+48445 48079 48303 48305 48189 48279 48017
+48275 6
+48433 48207 48023 48447 48155 48269
+48023 8
+48447 48207 48009 48503 48485 48487 48275 48155
+48079 7
+48501 35025 48219 48445 48279 48017 35041
+5103 6
+5027 5013 5139 5039 5099 5019
+13045 8
+13149 1111 13121 13077 13097 13223 13143 1029
+28083 6
+28053 28015 28051 28043 28135 28133
+28025 5
+28105 28155 28087 28095 28017
+13211 6
+13159 13217 13133 13237 13219 13297
+5013 5
+5011 5139 5025 5039 5103
+13097 4
+13121 13067 13045 13223
+5043 6
+5003 5011 5041 5017 5079 5025
+13247 5
+13151 13297 13217 13135 13089
+4011 6
+4003 4009 35017 35023 35003 4001
+45043 5
+45019 45015 45051 45089 45067
+13133 6
+13237 13265 13141 13221 13211 13219
+28087 6
+28103 28105 1107 1075 28095 28025
+13217 6
+13035 13151 13211 13159 13297 13247
+28155 7
+28019 28097 28025 28105 28017 28043 28013
+13265 5
+13141 13301 13317 13221 13133
+5011 5
+5139 5043 5003 5025 5013
+45075 9
+45029 45009 45011 45015 45027 45035 45003 45017 45063
+48037 6
+48067 48343 5091 5081 48387 40089
+13073 5
+13189 13245 45037 45065 13181
+1121 6
+1037 1117 1029 1027 1015 1115
+28015 5
+28051 28097 28007 28043 28083
+28097 5
+28155 28019 28007 28043 28015
+13189 6
+13163 13301 13073 13245 13181 13317
+4009 7
+4019 4021 4011 4003 4001 4007 4017
+13063 5
+13113 13151 13255 13121 13089
+13151 6
+13255 13217 13035 13247 13063 13089
+1125 7
+1065 1063 1107 1007 1073 1127 1057
+13301 6
+13125 13141 13189 13163 13317 13265
+5091 7
+48067 22015 5073 22017 5057 48037 5081
+28105 6
+28159 28019 28087 28103 28025 28155
+35025 9
+48301 35015 48003 48495 48165 48501 48079 35041 35005
+5017 9
+22123 22067 5003 28151 28055 22035 5041 28011 5043
+13113 4
+13077 13063 13255 13121
+1117 6
+1021 1007 1037 1121 1115 1073
+13245 6
+13163 45003 13033 45037 13189 13073
+1107 7
+1119 28103 1125 1063 1057 28087 1075
+28151 6
+28055 28125 28053 28133 28011 5017
+28019 5
+28007 28105 28159 28155 28097
+13159 6
+13207 13035 13237 13211 13169 13217
+45015 6
+45035 45019 45043 45089 45075 45027
+13077 7
+13285 13199 13149 13255 13113 13121 13045
+6073 3
+6025 6065 6059
+1111 7
+1123 1027 13149 13285 13045 1017 1029
+1027 5
+1037 1123 1111 1029 1121
+48119 6
+48223 48231 48387 48159 48277 48147
+45011 5
+13033 45009 45005 45075 45003
+5073 7
+22015 22017 5027 22119 5099 5091 5057
+35051 7
+35029 35013 35017 35035 35027 35053 35003
+6037 4
+6059 6071 6029 6111
+13237 6
+13169 13141 13133 13009 13159 13211
+4021 4
+4019 4009 4007 4013
+48237 7
+48363 48503 48497 48367 48337 48077 48009
+13141 7
+13009 13303 13125 13301 13265 13237 13133
+5027 6
+22119 5139 22027 5103 5073 5099
+13035 6
+13207 13171 13255 13159 13217 13151
+6025 3
+4012 6065 6073
+48497 6
+48367 48121 48439 48097 48237 48337
+48121 6
+48439 48085 48113 48181 48097 48497
+45009 6
+45049 45005 45035 45029 45075 45011
+13149 4
+13077 13285 13045 1111
+48231 8
+48257 48397 48085 48223 48379 48467 48119 48147
+48085 6
+48397 48113 48231 48147 48181 48121
+5003 6
+22111 5139 5017 22067 5043 5011
+48449 4
+48159 48343 48063 48387
+48207 7
+48253 48433 48447 48417 48023 48275 48269
+48447 7
+48417 48503 48429 48009 48023 48207 48275
+48503 6
+48429 48237 48363 48009 48447 48023
+48263 7
+48415 48169 48433 48151 48269 48125 48107
+48433 7
+48151 48207 48253 48275 48269 48263 48125
+48159 7
+48499 48223 48449 48063 48387 48119 48277
+35035 8
+48141 48229 35013 35015 48109 35005 35027 35051
+5139 8
+22111 22027 5003 22067 5011 5027 5013 5103
+48169 7
+48033 48305 48263 48415 48125 48107 48303
+48305 7
+48115 48445 48169 48033 48107 48303 48219
+48445 7
+48165 48501 48305 48115 48303 48219 48079
+28051 6
+28089 28163 28053 28007 28015 28083
+48501 4
+48445 48165 48079 35025
+48223 5
+48379 48159 48499 48119 48231
+48343 7
+48459 48063 48067 48315 48037 48449 48387
+13255 8
+13231 13171 13199 13035 13151 13077 13113 13063
+28053 6
+28125 28051 28163 28083 28151 28133
+45035 5
+45029 45015 45019 45009 45075
+13125 4
+13303 13163 13301 13141
+13163 8
+13167 13303 13033 13107 13245 13189 13125 13301
+48067 5
+48315 5091 22017 48037 48343
+13033 8
+13107 13165 45005 45011 13251 45003 13163 13245
+28007 9
+28089 28079 28163 28159 28099 28019 28051 28097 28015
+28103 6
+28069 28159 1107 1119 28087 28105
+28159 7
+28079 28099 28103 28069 28105 28007 28019
+1007 6
+1105 1065 1117 1021 1073 1125
+13303 6
+13319 13009 13163 13167 13125 13141
+13199 7
+13145 13285 13231 13293 13263 13255 13077
+13285 6
+1017 13199 13145 13077 13149 1111
+35017 5
+35023 35029 35051 35003 4011
+13171 5
+13293 13231 13207 13035 13255
+13207 7
+13079 13293 13169 13021 13159 13035 13171
+13231 4
+13171 13293 13255 13199
+13009 5
+13319 13169 13303 13141 13237
+13169 7
+13021 13009 13319 13289 13237 13207 13159
+45029 7
+45013 45049 45005 45019 45035 45075 45009
+1063 5
+1119 1065 1091 1125 1107
+45005 6
+13251 45029 45009 45049 13033 45011
+1017 5
+1123 13285 13145 1081 1111
+1123 7
+1051 1037 1081 1087 1017 1111 1027
+1037 6
+1021 1123 1027 1051 1121 1117
+28125 4
+28055 28053 28163 28151
+48063 5
+48459 48499 48343 48159 48449
+1021 7
+1047 1001 1105 1051 1037 1117 1007
+35013 4
+35029 48141 35035 35051
+13251 6
+13031 13165 45049 13103 45005 13033
+28163 8
+28049 28149 28055 28089 28007 28051 28125 28053
+45049 7
+13103 45053 45029 45013 45009 13251 45005
+22017 9
+48365 48203 48315 22015 22081 22031 5073 48067 5091
+45019 4
+45043 45029 45035 45015
+22015 6
+22119 22013 22081 22017 5073 5091
+28055 6
+28149 22035 28163 28125 28151 5017
+22119 5
+22027 22013 5027 22015 5073
+22027 6
+22013 22111 22061 5139 22119 5027
+22111 6
+22061 22067 22073 5003 5139 22027
+1065 5
+1091 1105 1007 1125 1063
+48499 7
+48467 48379 48459 48423 48063 48159 48223
+22067 7
+22083 22073 22123 5017 22111 5139 5003
+22123 4
+22083 22035 5017 22067
+22035 6
+22065 22083 28055 28149 22123 5017
+13319 7
+13023 13289 13167 13175 13303 13009 13169
+48363 7
+48133 48429 48367 48221 48143 48237 48503
+13293 7
+13269 13263 13207 13171 13079 13231 13199
+48367 6
+48221 48439 48251 48497 48363 48237
+48439 6
+48251 48113 48139 48121 48367 48497
+48113 6
+48139 48257 48397 48085 48439 48121
+48397 4
+48231 48257 48085 48113
+1119 7
+1023 28075 28069 1091 1063 1107 28103
+48379 4
+48499 48223 48467 48231
+35015 6
+48109 35025 48301 48389 35005 35035
+48417 6
+48059 48253 48429 48133 48447 48207
+48253 7
+48353 48441 48151 48417 48059 48207 48433
+48151 7
+48353 48335 48415 48253 48441 48433 48263
+48415 7
+48335 48227 48033 48151 48353 48263 48169
+48033 7
+48317 48227 48115 48415 48335 48169 48305
+48115 7
+48003 48317 48165 48033 48227 48305 48445
+48429 5
+48133 48363 48503 48417 48447
+48165 6
+48003 48115 48317 48445 48501 35025
+13021 6
+13153 13225 13079 13289 13169 13207
+13165 4
+13107 13251 13031 13033
+28099 7
+28101 28123 28079 28069 28075 28159 28007
+28079 6
+28123 28089 28099 28101 28159 28007
+28069 6
+28101 28075 1119 28103 28099 28159
+48459 7
+48423 48203 48183 48315 48343 48063 48499
+13289 6
+13023 13153 13175 13319 13021 13169
+28089 7
+28049 28121 28079 28123 28007 28051 28163
+48315 5
+48203 22017 48067 48459 48343
+13263 7
+13053 13197 13215 13145 13269 13293 13199
+1105 5
+1091 1021 1047 1007 1065
+13145 6
+13215 1081 13263 13199 1017 13285
+13079 5
+13225 13269 13021 13207 13293
+48257 6
+48139 48467 48213 48231 48397 48113
+48467 6
+48213 48423 48499 48257 48379 48231
+13107 10
+13279 13209 13283 13043 13167 13031 13165 13267 13033 13163
+13167 6
+13283 13175 13107 13163 13319 13303
+48203 6
+48365 48401 48183 22017 48315 48459
+35023 4
+4003 35029 35017 4011
+1051 6
+1101 1001 1123 1087 1021 1037
+22061 5
+22013 22073 22111 22049 22027
+45053 4
+13051 13103 45013 45049
+1081 6
+1113 1087 13215 13145 1123 1017
+13269 7
+13249 13197 13225 13079 13193 13293 13263
+22073 6
+22049 22083 22067 22021 22061 22111
+13175 7
+13309 13091 13023 13283 13167 13289 13319
+1001 5
+1085 1047 1051 1101 1021
+1047 6
+1131 1091 1085 1001 1021 1105
+48423 7
+48213 48401 48073 48183 48459 48467 48499
+13225 5
+13193 13021 13153 13079 13269
+13153 7
+13093 13193 13023 13289 13235 13021 13225
+22083 7
+22021 22041 22065 22035 22123 22067 22073
+48183 4
+48401 48203 48423 48459
+13031 7
+13109 13043 13103 13029 13251 13107 13165
+28123 7
+28129 28121 28101 28061 28099 28079 28089
+28149 7
+28021 22107 22065 28049 28163 28055 22035
+35029 4
+35013 35051 35023 35017
+13215 5
+1113 13053 13263 13145 1081
+1087 6
+1011 1101 1113 1081 1051 1123
+28121 6
+28029 28049 28123 28129 28127 28089
+13103 6
+13029 45053 13051 45049 13031 13251
+22013 8
+22069 22081 22049 22127 22061 22027 22119 22015
+28075 7
+28023 28061 28101 1023 1119 28069 28099
+28101 8
+28129 28075 28023 28061 28069 28099 28123 28079
+13023 6
+13235 13175 13091 13319 13289 13153
+28049 7
+28029 28021 28121 28127 28089 28163 28149
+22065 5
+22107 22041 28149 22035 22083
+48251 7
+48035 48425 48221 48139 48217 48439 48367
+13197 7
+13307 13259 13053 13249 13261 13269 13263
+48221 5
+48425 48143 48251 48367 48363
+48139 7
+48217 48213 48349 48257 48113 48251 48439
+13043 5
+13279 13031 13109 13267 13107
+1091 8
+1025 1023 1047 1131 1105 1065 1119 1063
+13053 5
+13259 1113 13197 13263 13215
+4019 6
+4012 4023 4003 4009 4021 4013
+48441 6
+48399 48353 48059 48083 48253 48151
+48227 7
+48173 48317 48335 48431 48415 48033 48115
+48335 7
+48431 48353 48081 48151 48415 48227 48033
+48133 7
+48049 48059 48143 48093 48363 48429 48417
+48059 6
+48083 48133 48049 48417 48441 48253
+48317 7
+48329 48003 48227 48173 48033 48115 48165
+48003 7
+48135 48495 48317 48329 48115 48165 35025
+48353 7
+48081 48441 48399 48253 48151 48335 48415
+48143 7
+48193 48093 48425 48035 48221 48133 48363
+1113 7
+1005 1011 13053 13259 13215 1081 1087
+13193 6
+13261 13249 13093 13153 13225 13269
+13283 6
+13309 13209 13279 13107 13167 13175
+1101 7
+1041 1085 1011 1109 1087 1051 1001
+22049 5
+22127 22021 22073 22013 22061
+4003 5
+4023 35023 4011 4019 4009
+13091 6
+13315 13235 13309 13175 13271 13023
+13249 4
+13193 13261 13269 13197
+1085 6
+1013 1131 1101 1041 1001 1047
+22041 5
+22025 22021 22065 22107 22083
+48401 7
+48073 48419 48365 48347 48203 48183 48423
+13235 5
+13093 13091 13315 13023 13153
+48365 5
+22017 22031 48419 48203 48401
+48213 8
+48161 48349 48073 48423 48001 48467 48139 48257
+13209 5
+13161 13309 13279 13107 13283
+13279 7
+13001 13161 13267 13043 13107 13209 13283
+22031 6
+48419 22069 22081 22085 48365 22017
+48349 6
+48293 48217 48001 48161 48213 48139
+48425 4
+48251 48035 48221 48143
+1023 7
+1129 28153 28023 1025 1091 1119 28075
+1011 5
+1109 1113 1005 1087 1101
+13267 8
+13001 13179 13183 13305 13109 13279 13043 13107
+13309 6
+13271 13209 13161 13175 13283 13091
+13093 6
+13081 13261 13235 13315 13153 13193
+22021 7
+22059 22127 22083 22041 22025 22049 22073
+48217 6
+48309 48035 48349 48293 48139 48251
+1131 6
+1099 1025 1085 1013 1047 1091
+13109 5
+13029 13179 13031 13267 13043
+45013 3
+45029 45053 45049
+48093 5
+48333 48049 48193 48143 48133
+22107 8
+28001 22029 22025 28021 28063 28149 22065 22041
+22081 5
+22013 22069 22031 22015 22017
+13029 5
+13179 13051 13103 13109 13031
+28061 7
+28067 28129 28153 28075 28023 28101 28123
+28023 5
+28153 1023 28075 28061 28101
+28129 7
+28031 28127 28061 28067 28101 28123 28121
+28021 5
+28063 28029 28049 28149 22107
+13259 7
+13239 1005 13197 13307 13053 13243 1113
+13307 5
+13243 13261 13197 13273 13259
+13261 8
+13177 13273 13081 13093 13193 13307 13249 13197
+48035 7
+48099 48193 48217 48309 48251 48425 48143
+1005 7
+1045 1067 1109 13239 13259 1113 1011
+22127 6
+22043 22069 22059 22021 22049 22013
+22069 8
+22115 22085 22079 22127 22043 22013 22081 22031
+48073 7
+48225 48001 48347 48005 48401 48213 48423
+13271 6
+13017 13315 13161 13069 13309 13091
+13315 7
+13287 13081 13271 13091 13017 13093 13235
+48431 6
+48383 48173 48081 48451 48335 48227
+13179 5
+13191 13183 13029 13109 13267
+48173 6
+48383 48461 48329 48431 48227 48317
+48329 6
+48461 48135 48173 48383 48317 48003
+48081 5
+48451 48399 48353 48431 48335
+48495 5
+48301 48135 48475 48003 35025
+48135 6
+48103 48475 48329 48461 48003 48495
+48399 6
+48451 48083 48095 48441 48081 48353
+48083 6
+48307 48095 48049 48059 48399 48441
+48049 7
+48307 48093 48333 48411 48133 48083 48059
+48001 6
+48289 48161 48073 48225 48349 48213
+1109 6
+1031 1041 1005 1045 1011 1101
+28127 7
+28077 28065 28029 28129 28031 28049 28121
+1041 6
+1039 1013 1109 1031 1101 1085
+28029 7
+28085 28063 28127 28077 28121 28049 28021
+13081 6
+13321 13177 13287 13315 13093 13261
+48193 6
+48281 48333 48035 48099 48143 48093
+48161 5
+48293 48001 48289 48213 48349
+13183 4
+13305 13191 13179 13267
+48109 5
+48229 48389 48243 35015 35035
+48389 6
+48243 48301 48371 48475 48109 35015
+48301 5
+48495 48475 48389 35025 35015
+48229 5
+48141 48109 48243 48377 35035
+48141 3
+48229 35035 35013
+1025 6
+1129 1099 1003 1131 1091 1023
+13239 5
+1067 13243 13061 13259 1005
+48419 7
+48405 48347 22085 48403 22031 48365 48401
+22025 6
+22009 22059 22029 22107 22041 22021
+1013 6
+1035 1099 1041 1039 1085 1131
+13161 7
+13069 13001 13005 13279 13209 13271 13309
+13001 6
+13005 13305 13267 13229 13279 13161
+13273 6
+13037 13243 13177 13095 13261 13307
+22059 6
+22079 22043 22025 22009 22021 22127
+13243 6
+13061 13273 13037 13307 13239 13259
+13177 5
+13095 13081 13321 13261 13273
+28153 7
+28041 28111 28067 1023 1129 28023 28061
+28063 6
+28001 28029 28085 28021 28037 22107
+48309 6
+48027 48099 48293 48145 48217 48035
+13051 3
+45053 13029 13103
+13321 7
+13205 13095 13277 13071 13287 13081 13177
+48347 5
+48405 48005 48419 48073 48401
+13287 6
+13017 13155 13277 13315 13321 13081
+22085 6
+48351 48403 22069 22115 48419 22031
+13017 5
+13155 13271 13069 13287 13315
+1099 6
+1003 1035 1053 1013 1131 1025
+28067 6
+28035 28031 28153 28111 28061 28129
+13305 7
+13025 13229 13191 13127 13183 13001 13267
+48293 7
+48395 48145 48289 48161 48349 48309 48217
+13069 8
+13003 13019 13155 13299 13005 13161 13017 13271
+22043 4
+22059 22127 22079 22069
+28031 6
+28065 28067 28035 28073 28129 28127
+1067 6
+1045 13099 1069 13239 13061 1005
+13061 5
+13037 13243 13099 1067 13239
+28065 5
+28091 28077 28073 28031 28127
+13155 5
+13019 13277 13069 13017 13287
+22029 7
+22077 22009 28001 28157 22125 22107 22025
+28077 6
+28085 28091 28147 28065 28127 28029
+28001 5
+28037 28157 28063 22107 22029
+1035 4
+1053 1039 1013 1099
+4023 2
+4003 4019
+48333 5
+48411 48281 48193 48093 48049
+28085 7
+28113 28005 28037 28077 28147 28029 28063
+48099 5
+48281 48309 48027 48035 48193
+13005 5
+13229 13001 13299 13069 13161
+48451 8
+48413 48235 48383 48095 48327 48399 48081 48431
+1129 6
+1097 28041 1003 1025 1023 28153
+13191 4
+13127 13179 13305 13183
+13095 6
+13205 13007 13037 13321 13177 13273
+48475 6
+48103 48371 48135 48301 48389 48495
+48405 5
+48005 48403 48241 48419 48347
+48103 5
+48461 48105 48371 48135 48475
+48461 6
+48383 48105 48173 48329 48103 48135
+48383 7
+48451 48235 48105 48431 48173 48461 48329
+48289 7
+48041 48395 48225 48313 48001 48293 48161
+13037 6
+13099 13095 13273 13007 13061 13243
+1031 5
+1039 1045 1061 1109 1041
+1045 6
+1061 1067 1069 1005 1031 1109
+28037 5
+28157 28085 28005 28001 28063
+48403 5
+48241 22085 48351 48405 48419
+48225 7
+48471 48313 48455 48005 48073 48289 48001
+13277 6
+13071 13075 13019 13155 13287 13321
+48095 6
+48413 48307 48327 48083 48451 48399
+1039 8
+12091 1053 1031 1061 12131 1041 1035 1013
+48235 4
+48105 48413 48451 48383
+13229 5
+13299 13305 13025 13005 13001
+48005 8
+48373 48455 48405 48241 48457 48225 48347 48073
+48145 5
+48331 48027 48293 48395 48309
+13099 7
+13253 1069 13007 13201 13037 13061 1067
+22079 7
+22003 22039 22115 22009 22059 22043 22069
+48307 6
+48327 48411 48319 48049 48083 48095
+48411 7
+48299 48319 48053 48281 48333 48307 48049
+13019 7
+13185 13173 13075 13069 13003 13155 13277
+13299 8
+12003 13065 13003 13049 13025 13229 13005 13069
+48281 6
+48027 48099 48053 48193 48411 48333
+13127 4
+13039 13025 13191 13305
+13007 6
+13087 13201 13205 13095 13099 13037
+13205 7
+13131 13087 13321 13071 13275 13095 13007
+28091 6
+22117 28147 28073 28109 28065 28077
+28073 5
+28109 28035 28091 28065 28031
+28035 6
+28109 28111 28131 28067 28073 28031
+28111 6
+28131 28039 28041 28153 28035 28067
+28041 5
+28039 1097 1129 28153 28111
+13003 5
+13173 13299 13065 13069 13019
+48455 5
+48407 48471 48373 48005 48225
+48371 7
+48043 48243 48443 48105 48103 48475 48389
+28157 6
+22125 28005 28037 22037 28001 22029
+22115 6
+22011 48351 22079 22003 22069 22085
+13025 6
+13049 13127 13039 13305 13299 13229
+48395 7
+48051 48331 48313 48041 48289 48293 48145
+28005 7
+22037 22091 28113 22105 28085 28157 28037
+28113 5
+28147 22117 22105 28085 28005
+28147 5
+28091 22117 28113 28085 28077
+13075 5
+13027 13071 13019 13185 13277
+22009 8
+22097 22039 22125 22077 22029 22025 22079 22059
+13071 6
+13275 13277 13075 13027 13205 13321
+1069 6
+1061 13253 13099 12063 1045 1067
+48027 7
+48491 48053 48331 48145 48309 48281 48099
+1003 6
+1097 12033 1053 1099 1129 1025
+1053 7
+12033 12113 1039 12091 1035 1003 1099
+13201 4
+13253 13007 13087 13099
+1061 7
+12131 12059 12063 1069 1045 1039 1031
+13173 5
+13185 13065 13101 13003 13019
+48351 7
+48361 48241 22011 22019 22115 22085 48403
+13065 6
+12023 13101 12003 13299 13173 13003
+1097 5
+28059 28039 1003 1129 28041
+13039 4
+12089 13049 13127 13025
+48241 7
+48199 48457 48351 48361 48403 48005 48405
+48373 6
+48291 48407 48457 48199 48005 48455
+48331 6
+48287 48491 48051 48395 48145 48027
+48243 6
+48377 48371 48043 48389 48229 48109
+48313 6
+48185 48041 48225 48471 48395 48289
+48413 7
+48435 48105 48327 48267 48095 48451 48235
+48327 6
+48319 48307 48267 48413 48095 48451
+48105 9
+48443 48435 48465 48413 48235 48371 48383 48461 48103
+13087 7
+12063 13253 13131 12039 13205 13007 13201
+13253 5
+12063 13087 13201 13099 1069
+13131 5
+12039 13275 12073 13205 13087
+13275 6
+12073 13027 12065 13071 13131 13205
+13027 6
+12065 13185 12079 13075 13275 13071
+13049 5
+12003 12089 13039 13025 13299
+48457 4
+48199 48241 48373 48005
+48471 6
+48185 48407 48339 48455 48225 48313
+48053 7
+48031 48299 48491 48453 48027 48281 48411
+13185 7
+12079 13173 13101 12047 13019 13027 13075
+22077 8
+22097 22121 22047 22099 22037 22125 22029 22009
+28109 8
+28045 22103 22117 28047 28131 28035 28073 28091
+22117 6
+22105 28109 22103 28091 28147 28113
+22105 7
+22095 22063 22091 22103 22117 28113 28005
+22091 5
+22033 22037 22105 22063 28005
+22037 7
+22121 22033 22125 22091 28005 22077 28157
+28039 5
+28059 28131 1097 28041 28111
+22125 6
+22037 22121 28157 22077 22009 22029
+12063 10
+12005 12013 12133 12059 13087 13253 12039 12077 1061 1069
+12059 4
+12131 12063 12133 1061
+22039 6
+22001 22053 22003 22097 22009 22079
+12131 6
+12091 12133 12005 12059 1061 1039
+48041 6
+48477 48051 48185 48313 48289 48395
+48319 6
+48267 48299 48171 48411 48327 48307
+48299 5
+48171 48053 48031 48411 48319
+28131 7
+28045 28047 28039 28059 28111 28109 28035
+48491 6
+48453 48331 48287 48021 48027 48053
+48407 5
+48339 48291 48373 48455 48471
+22003 6
+22053 22011 22039 22001 22079 22115
+22011 5
+22003 22053 22019 22115 48351
+48185 6
+48473 48477 48471 48339 48313 48041
+13101 5
+12047 12023 13065 13185 13173
+22097 6
+22055 22001 22077 22099 22009 22039
+12133 4
+12005 12063 12131 12059
+12089 4
+12003 12031 13039 13049
+28059 4
+28047 1097 28039 28131
+48051 5
+48287 48041 48477 48395 48331
+22033 6
+22047 22121 22063 22005 22091 22037
+48267 7
+48137 48435 48171 48319 48265 48327 48413
+12039 6
+12077 12013 12073 13131 12063 13087
+22103 4
+28045 28109 22105 22117
+48435 5
+48465 48267 48137 48413 48105
+12073 6
+12129 12077 12065 13275 12039 13131
+28047 4
+28045 28059 28131 28109
+12065 6
+12129 12123 12079 13027 12073 13275
+48043 4
+48377 48443 48371 48243
+48443 4
+48465 48105 48043 48371
+22121 5
+22047 22033 22037 22125 22077
+22063 5
+22005 22105 22095 22033 22091
+28045 4
+28047 28109 28131 22103
+12079 7
+12123 12047 12121 12067 13185 12065 13027
+48377 3
+48043 48243 48229
+48453 6
+48055 48209 48031 48021 48491 48053
+48339 6
+48473 48291 48201 48407 48185 48471
+12047 5
+12121 12023 13101 12079 13185
+12013 5
+12045 12005 12039 12077 12063
+12077 7
+12045 12129 12037 12073 12039 12013 12063
+12023 8
+12041 12121 12125 12001 12003 13065 12047 13101
+12003 9
+12125 12089 12031 12019 13049 12007 13299 12023 13065
+12031 4
+12019 12109 12003 12089
+12005 5
+12045 12013 12133 12063 12131
+48287 6
+48149 48021 48477 48051 48331 48491
+48199 6
+48291 48361 48241 48245 48457 48373
+48171 6
+48265 48031 48299 48259 48267 48319
+48031 7
+48259 48453 48209 48091 48053 48171 48299
+22099 7
+22045 22101 22055 22047 22007 22097 22077
+22047 7
+22045 22005 22007 22099 22033 22121 22077
+22019 5
+48361 22053 22023 22011 48351
+22053 7
+22023 22001 22113 22039 22003 22019 22011
+48291 7
+48201 48245 48071 48199 48373 48339 48407
+22001 6
+22055 22113 22097 22039 22053 22003
+12121 5
+12041 12067 12023 12047 12079
+48021 6
+48177 48055 48287 48149 48453 48491
+12091 4
+12113 12131 1039 1053
+12113 3
+12033 12091 1053
+48477 7
+48149 48473 48015 48185 48041 48287 48051
+12033 3
+12113 1053 1003
+22055 5
+22113 22099 22045 22097 22001
+22005 6
+22007 22093 22095 22063 22047 22033
+48209 5
+48091 48055 48453 48187 48031
+12123 4
+12067 12029 12079 12065
+22095 6
+22093 22089 22057 22105 22005 22063
+12129 4
+12037 12065 12073 12077
+48137 7
+48271 48465 48385 48463 48265 48267 48435
+48465 5
+48137 48271 48435 48443 48105
+48265 6
+48385 48259 48019 48171 48137 48267
+12067 5
+12029 12041 12121 12123 12079
+48473 6
+48015 48339 48201 48157 48185 48477
+12109 4
+12107 12019 12035 12031
+48361 6
+48245 22019 22023 48351 48199 48241
+12045 4
+12077 12037 12013 12005
+12019 6
+12001 12007 12109 12107 12031 12003
+48245 4
+48071 48361 48291 48199
+48149 8
+48285 48177 48055 48015 48089 48477 48287 48021
+48201 7
+48039 48157 48071 48167 48291 48473 48339
+22093 4
+22007 22095 22057 22005
+22071 3
+22051 22087 22075
+22113 5
+22023 22045 22055 22001 22053
+12125 4
+12007 12001 12003 12023
+12007 5
+12001 12019 12107 12125 12003
+48259 6
+48019 48031 48091 48029 48265 48171
+48015 6
+48089 48157 48481 48473 48149 48477
+22089 3
+22051 22057 22095
+48385 4
+48019 48463 48265 48137
+22007 8
+22045 22101 22057 22109 22093 22005 22047 22099
+48055 6
+48187 48149 48177 48021 48453 48209
+22023 4
+22113 22053 48361 22019
+22087 2
+22075 22071
+48091 5
+48029 48187 48209 48259 48031
+12037 3
+12129 12045 12077
+48089 5
+48239 48285 48481 48015 48149
+22101 4
+22045 22007 22109 22099
+12001 8
+12075 12041 12107 12083 12019 12007 12125 12023
+12041 6
+12029 12001 12075 12023 12121 12067
+22057 6
+22051 22109 22007 22089 22093 22095
+48019 6
+48463 48325 48029 48259 48385 48265
+22075 3
+22051 22087 22071
+48071 4
+48167 48245 48201 48291
+48187 6
+48493 48029 48177 48055 48091 48209
+12107 7
+12083 12035 12109 12127 12001 12007 12019
+12029 4
+12041 12075 12067 12123
+48157 5
+48481 48039 48201 48015 48473
+48177 8
+48255 48493 48285 48123 48149 48021 48187 48055
+48029 7
+48013 48325 48493 48187 48091 48019 48259
+48325 6
+48163 48507 48463 48029 48013 48019
+12035 3
+12127 12107 12109
+22045 6
+22007 22101 22047 22099 22113 22055
+48285 6
+48469 48123 48089 48239 48149 48177
+48481 6
+48239 48039 48321 48157 48089 48015
+48463 8
+48323 48507 48271 48325 48163 48019 48385 48137
+48271 5
+48323 48463 48507 48137 48465
+48039 5
+48321 48167 48201 48481 48157
+12075 5
+12083 12017 12001 12029 12041
+48167 3
+48071 48039 48201
+12083 7
+12119 12017 12127 12069 12107 12075 12001
+48493 5
+48013 48177 48255 48187 48029
+12127 7
+12095 12117 12069 12009 12083 12035 12107
+48123 5
+48175 48255 48285 48469 48177
+22109 3
+22057 22101 22007
+12069 7
+12119 12095 12097 12105 12117 12127 12083
+48239 6
+48057 48469 48321 48481 48089 48285
+48013 8
+48311 48283 48163 48255 48297 48493 48029 48325
+48321 4
+48057 48039 48239 48481
+48255 7
+48297 48175 48025 48177 48123 48013 48493
+22051 4
+22075 22057 22071 22089
+48469 6
+48391 48175 48239 48057 48285 48123
+48507 6
+48127 48323 48163 48325 48463 48271
+48163 7
+48127 48283 48013 48311 48325 48507 48463
+48323 5
+48127 48479 48507 48463 48271
+12017 4
+12053 12119 12083 12075
+12119 6
+12101 12053 12069 12105 12083 12017
+48175 5
+48025 48391 48469 48123 48255
+12117 4
+12095 12009 12127 12069
+48297 7
+48249 48131 48311 48409 48025 48255 48013
+12095 6
+12097 12105 12009 12117 12127 12069
+48025 5
+48391 48175 48409 48297 48255
+12053 3
+12101 12119 12017
+48283 6
+48479 48127 48311 48131 48013 48163
+48311 6
+48479 48297 48131 48013 48283 48163
+48127 5
+48283 48479 48163 48507 48323
+12009 5
+12097 12061 12095 12117 12127
+48391 6
+48007 48409 48057 48469 48025 48175
+12101 5
+12057 12103 12105 12119 12053
+12105 10
+12081 12049 12057 12093 12055 12097 12095 12101 12119 12069
+12097 7
+12055 12009 12061 12093 12095 12105 12069
+48057 5
+48007 48321 48239 48391 48469
+48479 7
+48505 48131 48247 48311 48283 48127 48323
+12103 2
+12057 12101
+12057 5
+12081 12105 12049 12101 12103
+48409 6
+48355 48249 48007 48391 48025 48297
+48007 3
+48057 48391 48409
+48131 7
+48247 48249 48047 48297 48479 48311 48283
+48249 6
+48047 48273 48409 48355 48297 48131
+12061 4
+12093 12111 12097 12009
+48355 3
+48273 48409 48249
+12049 5
+12027 12081 12055 12105 12057
+12055 7
+12015 12043 12027 12093 12097 12049 12105
+12081 5
+12115 12049 12027 12105 12057
+12093 9
+12051 12043 12085 12099 12111 12061 12055 12097 12105
+48273 4
+48261 48047 48355 48249
+12111 3
+12085 12093 12061
+12115 3
+12027 12015 12081
+48247 5
+48427 48505 48047 48131 48479
+12027 6
+12015 12055 12043 12049 12115 12081
+48505 3
+48427 48247 48479
+48047 7
+48215 48427 48261 48273 48249 48247 48131
+12085 5
+12051 12099 12043 12111 12093
+12043 8
+12071 12051 12015 12085 12099 12093 12055 12027
+48261 4
+48215 48489 48273 48047
+12015 6
+12071 12055 12043 12051 12027 12115
+12099 5
+12011 12051 12085 12043 12093
+12051 8
+12021 12071 12099 12011 12085 12093 12043 12015
+48427 4
+48215 48047 48247 48505
+48215 5
+48061 48261 48489 48047 48427
+12071 4
+12051 12021 12043 12015
+48489 3
+48061 48215 48261
+12021 5
+12025 12087 12011 12051 12071
+48061 2
+48489 48215
+12011 4
+12025 12099 12021 12051
+12025 3
+12087 12011 12021
+12087 2
+12025 12021
+27031 1
+27075
+26083 1
+26061
+26061 4
+26131 26013 26071 26083
+26131 3
+26053 26071 26061
+26013 3
+26071 26103 26061
+26103 6
+26109 26043 26071 26003 26041 26013
+26053 4
+55125 26071 26131 55051
+26095 4
+26153 26003 26033 26097
+26003 4
+26041 26153 26095 26103
+26153 4
+26041 26097 26095 26003
+26071 9
+55037 55041 55125 26043 26103 26013 26053 26131 26061
+55125 6
+55085 26071 55041 26053 55099 55051
+26043 5
+55075 55037 26109 26103 26071
+26097 3
+26033 26153 26095
+26041 4
+26109 26153 26003 26103
+26033 2
+26097 26095
+55041 7
+55067 55085 55075 55083 55037 26071 55125
+55037 4
+26043 55075 26071 55041
+26109 4
+55075 26041 26103 26043
+55085 5
+55069 55041 55067 55125 55099
+55075 5
+55083 26109 26043 55037 55041
+26047 2
+26029 26031
+26031 5
+26137 26029 26141 26119 26047
+26141 3
+26119 26007 26031
+55069 5
+55067 55085 55073 55119 55099
+55067 5
+55073 55083 55041 55069 55085
+26029 4
+26009 26031 26137 26047
+55029 1
+55061
+26007 4
+26135 26001 26119 26141
+26009 5
+26055 26079 26137 26039 26029
+26137 7
+26039 26079 26119 26135 26031 26009 26029
+26119 7
+26135 26039 26007 26001 26141 26137 26031
+26089 2
+26019 26055
+55073 8
+55097 55135 55083 55067 55069 55141 55019 55119
+36089 5
+36049 36045 36033 36041 36043
+26079 7
+26113 26165 26055 26039 26143 26137 26009
+26039 8
+26143 26113 26135 26129 26119 26137 26079 26009
+26001 5
+26129 26069 26135 26007 26119
+26055 7
+26165 26101 26019 26079 26113 26009 26089
+26135 8
+26129 26143 26001 26069 26007 26119 26039 26137
+26019 4
+26101 26055 26165 26089
+55061 3
+55071 55009 55029
+55097 6
+55001 55135 55083 55137 55073 55141
+55135 6
+55137 55087 55139 55083 55097 55073
+55009 5
+55083 55015 55087 55061 55071
+55087 5
+55139 55009 55015 55083 55135
+26101 5
+26105 26165 26085 26055 26019
+26069 4
+26011 26129 26001 26135
+26165 7
+26085 26113 26133 26079 26055 26101 26019
+26113 7
+26133 26143 26035 26039 26079 26165 26055
+26143 7
+26035 26129 26051 26135 26039 26113 26079
+26129 7
+26051 26069 26011 26001 26135 26143 26039
+36045 3
+36075 36049 36089
+55071 4
+55117 55015 55061 55009
+55001 7
+55111 55077 55021 55137 55097 55057 55141
+55137 6
+55047 55077 55139 55135 55001 55097
+55139 6
+55047 55015 55039 55087 55137 55135
+55015 6
+55039 55071 55117 55009 55139 55087
+36049 5
+36065 36075 36043 36089 36045
+26105 4
+26127 26085 26123 26101
+26085 7
+26123 26127 26133 26107 26165 26105 26101
+26133 7
+26107 26123 26035 26073 26113 26085 26165
+26011 4
+26017 26051 26069 26129
+26035 7
+26107 26073 26051 26111 26143 26133 26113
+26051 7
+26073 26111 26011 26017 26129 26035 26143
+26063 2
+26151 26157
+26017 5
+26145 26111 26157 26011 26051
+55077 4
+55047 55021 55137 55001
+55047 6
+55021 55039 55027 55139 55137 55077
+55039 6
+55131 55027 55117 55015 55047 55139
+55117 5
+55089 55131 55071 55039 55015
+26111 6
+26057 26073 26017 26145 26051 26035
+26127 4
+26123 26121 26085 26105
+26073 7
+26117 26107 26111 26057 26051 26035 26133
+26123 8
+26121 26117 26081 26107 26133 26085 26127 26105
+26107 6
+26073 26117 26035 26133 26123 26085
+26157 6
+26049 26145 26151 26087 26063 26017
+36075 6
+36067 36011 36065 36053 36049 36045
+26151 4
+26087 26147 26063 26157
+55021 7
+55025 55111 55027 55047 55077 55001 55057
+55111 7
+55021 55025 55001 55049 55103 55123 55057
+55027 7
+55055 55025 55131 55133 55039 55021 55047
+36065 5
+36053 36043 36077 36049 36075
+26145 6
+26155 26057 26157 26049 26017 26111
+55131 6
+55133 55089 55079 55117 55039 55027
+55089 4
+55079 55133 55117 55131
+26121 4
+26139 26081 26123 26127
+26117 7
+26067 26081 26057 26037 26073 26107 26123
+26057 7
+26037 26067 26145 26155 26111 26117 26073
+36011 6
+36109 36099 36117 36023 36067 36075
+36063 3
+36073 36037 36029
+36073 3
+36037 36055 36063
+36055 5
+36051 36037 36069 36117 36073
+36117 4
+36069 36011 36099 36055
+26087 6
+26125 26049 26147 26099 26151 26157
+26081 7
+26005 26139 26067 26015 26117 26121 26123
+55025 7
+55045 55055 55105 55027 55021 55111 55049
+36067 4
+36023 36053 36075 36011
+26049 6
+26093 26155 26087 26125 26157 26145
+26139 3
+26081 26005 26121
+55055 5
+55105 55133 55127 55027 55025
+55133 7
+55127 55079 55101 55089 55131 55055 55027
+55079 4
+55101 55089 55133 55131
+36053 6
+36017 36023 36077 36065 36067 36075
+26147 3
+26099 26087 26151
+26155 6
+26065 26037 26049 26093 26145 26057
+36037 6
+36121 36029 36055 36051 36073 36063
+26067 6
+26015 26037 26045 26057 26117 26081
+26037 6
+26045 26155 26065 26057 26067 26117
+36029 5
+36013 36009 36037 36121 36063
+36069 6
+36101 36123 36051 36099 36117 36055
+36099 6
+36097 36123 36011 36109 36069 36117
+36051 6
+36003 36121 36101 36069 36055 36037
+26099 4
+26163 26125 26147 26087
+26125 6
+26161 26093 26099 26163 26087 26049
+36121 5
+36009 36051 36003 36037 36029
+55045 6
+17177 55105 17201 55025 55065 55049
+55105 6
+17201 55127 17007 55055 55045 55025
+55127 7
+17007 17111 55059 55101 55133 55105 55055
+55101 4
+55059 55079 55127 55133
+36023 7
+36107 36109 36017 36007 36053 36067 36011
+26093 6
+26075 26161 26065 26125 26049 26155
+26065 6
+26045 26093 26161 26075 26155 26037
+26005 5
+26159 26015 26077 26081 26139
+26045 6
+26025 26015 26065 26075 26037 26067
+26015 6
+26077 26045 26025 26067 26005 26081
+36123 4
+36101 36097 36099 36069
+36017 5
+36007 36077 36025 36053 36023
+55059 4
+17097 17111 55101 55127
+36109 6
+36015 36097 36023 36107 36011 36099
+36101 8
+42105 42117 36003 36015 36097 36123 36069 36051
+36013 4
+42123 42049 36009 36029
+36097 5
+36015 36109 36099 36101 36123
+36009 6
+42083 42123 36003 36121 36013 36029
+36003 6
+42083 36101 42105 36051 36009 36121
+17177 6
+17015 17201 17141 55045 17085 55065
+17201 6
+17141 17007 17037 55105 17177 55045
+17111 7
+17037 17089 17007 55059 17097 17031 55127
+17007 6
+17141 17111 55127 17037 17201 55105
+17097 3
+55059 17031 17111
+26163 4
+26115 26161 26099 26125
+26161 7
+26091 26075 26163 26115 26125 26093 26065
+26075 7
+26059 26025 26161 26091 26093 26045 26065
+26077 7
+26027 26149 26159 26025 26023 26015 26005
+26025 7
+26149 26023 26075 26059 26045 26077 26015
+26159 5
+26027 26021 26077 26149 26005
+36007 6
+42115 36107 36025 42127 36017 36023
+36107 6
+42015 36015 36007 42115 36023 36109
+36015 6
+42117 36107 42015 36109 36097 36101
+42049 4
+42039 39007 42123 36013
+26021 4
+18091 26027 26159 18141
+17141 7
+17103 17195 17015 17037 17007 17201 17177
+17015 6
+17141 17195 17177 19045 19097 17085
+17031 6
+17043 17089 18089 17197 17097 17111
+17089 6
+17037 17197 17031 17043 17093 17111
+17037 8
+17103 17093 17099 17089 17111 17141 17007 17201
+26115 4
+26091 26163 39095 26161
+26091 6
+26059 26115 39095 26161 26075 39051
+26059 7
+18151 26023 39051 26091 26075 39171 26025
+26023 6
+18151 18087 26149 26059 26025 26077
+26149 7
+18087 18039 26027 26025 26023 26077 26159
+26027 6
+18039 18141 26077 26149 26159 26021
+42015 7
+42117 42115 42131 42113 36107 36015 42081
+42117 5
+42105 42015 42081 36015 36101
+42123 8
+42039 42083 42047 42053 36009 36013 42049 42121
+42115 6
+42127 42069 36007 42015 36107 42131
+42083 7
+42105 42023 42047 36003 36009 42123 42053
+42105 7
+42081 42117 42035 36101 42083 36003 42023
+17043 4
+17197 17031 17089 17093
+39007 5
+39085 42039 39155 42049 39055
+17195 7
+17103 17011 17141 17015 17073 17161 19045
+17103 5
+17037 17099 17141 17195 17011
+42039 6
+42123 42121 42049 39007 42085 39155
+39085 3
+39007 39055 39035
+18091 5
+18141 18149 26021 18073 18127
+18141 6
+18039 18099 26027 18091 26021 18149
+18039 7
+18113 18087 18085 26149 26027 18141 18099
+18087 6
+18033 18151 18113 26023 26149 18039
+18151 6
+39171 18033 26059 26023 18087 18113
+36005 4
+36059 36119 34017 34003
+51019 8
+51161 51163 51680 51031 51143 51009 51067 51023
+51710 3
+37053 37029 51800
+51081 5
+51175 51183 51053 37131 51025
+51059 9
+51153 24033 24017 11001 51013 51510 24031 51610 51107
+51175 8
+51093 51800 37073 51181 37091 51081 37131 51183
+51083 7
+51117 37077 51037 37145 37033 51143 51031
+51163 8
+51125 51009 51019 51015 51530 51023 51005 51017
+51161 7
+51019 51023 51770 51067 51063 51121 51045
+55083 8
+55009 55075 55087 55135 55097 55073 55067 55041
+4012 7
+4019 4013 4025 4015 6025 6065 6071
+35006 7
+35057 35053 35001 35043 35003 35031 4001
+51199 4
+51650 51700 51830 51095
+51153 4
+51059 51179 51061 51107
+30031 10
+56029 30009 30095 30097 56039 16043 30057 30043 30007 30059
diff --git a/pysal/examples/nat_queen_old.gal b/pysal/examples/nat_queen_old.gal
new file mode 100644
index 0000000..555db04
--- /dev/null
+++ b/pysal/examples/nat_queen_old.gal
@@ -0,0 +1,6171 @@
+3085
+0 3
+40 30 22
+1 3
+2 3 69
+2 4
+1 4 69 62
+3 7
+1 68 69 42 55 27 31
+4 4
+2 28 5 62
+5 3
+28 4 6
+6 4
+49 28 5 7
+7 9
+6 70 8 43 49 86 88 59 63
+8 3
+9 43 7
+9 3
+8 10 43
+10 4
+11 9 43 46
+11 3
+10 46 23
+12 4
+32 26 35 13
+13 3
+32 12 14
+14 5
+32 29 36 13 15
+15 4
+16 33 29 14
+16 4
+33 17 41 15
+17 3
+16 41 18
+18 5
+17 19 45 38 41
+19 4
+18 20 38 39
+20 3
+19 21 39
+21 4
+20 37 22 39
+22 4
+0 40 21 37
+23 4
+24 11 46 78
+24 5
+25 66 87 78 23
+25 5
+24 66 35 60 26
+26 3
+25 35 12
+27 2
+3 31
+28 7
+65 4 5 6 49 61 62
+29 5
+33 75 36 14 15
+30 4
+0 40 34 72
+31 4
+27 42 3 47
+32 7
+35 36 12 13 14 56 58
+33 6
+41 75 15 16 73 29
+34 5
+72 120 50 133 30
+35 7
+32 12 56 25 26 60 58
+36 6
+32 75 77 14 56 29
+37 7
+22 39 40 52 21 54 53
+38 5
+51 18 19 45 39
+39 7
+19 37 38 51 20 21 52
+40 9
+0 64 99 37 72 96 22 54 30
+41 7
+33 73 74 45 16 17 18
+42 6
+3 47 80 55 89 31
+43 6
+7 8 9 10 46 59
+44 1
+57
+45 6
+38 41 74 76 18 51
+46 8
+98 11 10 43 78 81 23 59
+47 4
+80 48 42 31
+48 1
+47
+49 7
+6 7 86 63 28 61 95
+50 2
+34 2891
+51 7
+38 39 76 45 82 83 52
+52 6
+51 37 39 83 84 53
+53 8
+64 67 37 84 54 52 93 94
+54 5
+40 64 67 37 53
+55 4
+89 42 3 68
+56 8
+32 35 36 101 102 103 77 58
+57 3
+92 44 85
+58 6
+32 35 101 79 56 60
+59 5
+81 43 70 46 7
+60 6
+66 35 79 113 25 58
+61 7
+65 100 124 49 117 28 95
+62 7
+65 2 4 69 110 100 28
+63 3
+49 86 7
+64 6
+99 40 114 53 54 94
+65 4
+100 28 61 62
+66 7
+113 87 24 25 60 125 126
+67 2
+53 54
+68 8
+3 69 138 139 109 55 89 119
+69 7
+1 2 3 68 109 110 62
+70 7
+161 132 7 81 118 88 59
+71 2
+107 85
+72 5
+40 96 34 30 120
+73 6
+33 105 74 75 41 106
+74 7
+104 41 76 45 73 105 90
+75 8
+33 36 73 106 108 77 91 29
+76 5
+90 74 51 82 45
+77 6
+36 103 75 56 121 91
+78 8
+98 134 136 137 87 46 23 24
+79 4
+113 58 60 101
+80 4
+89 42 107 47
+81 5
+118 98 59 70 46
+82 6
+51 104 76 111 83 90
+83 6
+111 112 82 51 84 52
+84 5
+112 83 52 53 93
+85 5
+92 57 107 116 71
+86 9
+145 95 7 144 49 88 127 124 63
+87 5
+24 136 66 126 78
+88 6
+161 70 7 179 86 127
+89 6
+68 42 107 80 119 55
+90 4
+104 74 76 82
+91 5
+121 122 75 108 77
+92 5
+57 130 131 116 85
+93 6
+112 114 115 84 53 94
+94 4
+64 114 53 93
+95 4
+49 124 61 86
+96 8
+128 129 99 40 172 173 72 120
+97 4
+160 168 228 159
+98 5
+81 118 78 46 137
+99 5
+40 64 128 114 96
+100 5
+65 110 61 62 117
+101 6
+102 143 113 56 58 79
+102 5
+56 143 101 150 103
+103 5
+56 121 77 102 150
+104 7
+105 74 111 82 147 149 90
+105 6
+104 73 74 147 148 106
+106 6
+73 75 108 148 105 122
+107 7
+131 71 80 116 85 119 89
+108 4
+122 91 75 106
+109 4
+138 68 69 110
+110 10
+100 69 138 109 142 117 155 156 62 165
+111 6
+104 112 82 83 149 152
+112 7
+83 111 115 84 152 153 93
+113 6
+66 101 143 79 60 125
+114 7
+64 128 99 141 115 93 94
+115 6
+141 112 114 153 154 93
+116 4
+131 107 92 85
+117 5
+124 155 100 61 110
+118 7
+98 132 70 137 3084 81 182
+119 7
+194 139 68 169 107 131 89
+120 8
+96 129 34 133 167 72 180 188
+121 7
+103 140 77 150 151 122 91
+122 7
+166 108 106 140 148 121 91
+123 4
+185 146 186 135
+124 7
+164 144 117 86 155 61 95
+125 6
+66 143 113 214 189 126
+126 8
+66 163 136 162 195 214 87 125
+127 5
+88 145 179 206 86
+128 5
+96 114 99 172 141
+129 4
+96 120 180 173
+130 3
+131 92 171
+131 8
+130 171 169 170 107 116 119 92
+132 4
+161 3084 118 70
+133 4
+120 34 167 135
+134 6
+162 136 137 78 182 190
+135 6
+123 133 167 185 186 187
+136 5
+162 134 126 78 87
+137 5
+98 182 118 78 134
+138 6
+68 139 109 110 156 157
+139 7
+194 68 197 138 209 119 157
+140 5
+121 122 178 166 151
+141 7
+128 192 172 114 115 154 191
+142 4
+156 198 110 165
+143 8
+101 102 177 201 189 113 150 125
+144 9
+227 164 198 263 145 86 155 124 255
+145 5
+144 227 206 86 127
+146 4
+186 123 158 199
+147 5
+104 105 148 149 174
+148 8
+166 200 105 106 204 174 147 122
+149 6
+104 174 175 147 152 111
+150 7
+102 103 143 177 178 151 121
+151 4
+121 178 140 150
+152 6
+176 175 112 149 153 111
+153 8
+208 196 176 207 112 115 152 154
+154 5
+153 115 196 141 191
+155 7
+164 165 198 110 144 117 124
+156 6
+197 198 138 142 110 157
+157 4
+138 139 156 197
+158 4
+2897 146 2902 199
+159 6
+160 97 229 168 306 311
+160 3
+168 97 159
+161 7
+132 70 3084 184 179 88 211
+162 7
+163 134 136 232 195 190 126
+163 3
+162 195 126
+164 3
+144 155 124
+165 4
+198 155 142 110
+166 7
+200 204 203 140 178 148 122
+167 7
+133 135 220 120 187 188 221
+168 6
+160 97 228 306 342 159
+169 7
+224 193 194 131 225 170 119
+170 5
+169 183 131 171 193
+171 4
+130 131 170 183
+172 6
+96 128 192 141 173 215
+173 6
+96 129 172 180 215 212
+174 6
+200 202 175 147 148 149
+175 6
+202 205 174 176 149 152
+176 6
+205 207 208 152 153 175
+177 4
+201 178 150 143
+178 8
+166 201 203 140 177 204 150 151
+179 5
+88 161 184 206 127
+180 7
+129 188 173 237 212 120 220
+181 3
+216 217 183
+182 5
+137 3084 134 118 190
+183 7
+224 193 170 171 181 216 217
+184 4
+161 179 206 211
+185 6
+187 230 231 135 186 123
+186 7
+230 199 135 146 185 123 231
+187 6
+230 167 135 185 221 223
+188 5
+120 220 180 221 167
+189 6
+262 201 143 214 280 125
+190 5
+232 162 3084 182 134
+191 6
+192 196 141 218 219 154
+192 6
+172 141 215 218 219 191
+193 4
+224 169 170 183
+194 8
+225 226 169 139 209 213 119 222
+195 7
+162 163 232 214 281 283 126
+196 6
+236 208 153 154 219 191
+197 7
+198 139 285 209 210 156 157
+198 9
+165 268 142 255 144 210 155 156 197
+199 8
+2915 2902 231 146 158 246 186 2910
+200 6
+166 233 202 204 174 148
+201 6
+262 203 143 177 178 189
+202 6
+200 233 234 205 174 175
+203 7
+166 262 201 204 238 271 178
+204 8
+166 200 233 203 238 239 178 148
+205 8
+257 259 202 235 175 176 207 234
+206 8
+227 325 211 145 179 308 184 127
+207 5
+176 153 235 208 205
+208 7
+196 235 236 207 176 248 153
+209 6
+194 197 139 270 213 285
+210 4
+268 197 198 285
+211 5
+184 161 3084 206 308
+212 4
+215 180 173 237
+213 5
+222 209 194 226 270
+214 7
+195 189 280 282 283 125 126
+215 9
+192 172 173 237 241 212 249 244 218
+216 6
+269 272 242 181 183 217
+217 6
+224 240 242 181 183 216
+218 6
+192 244 245 215 219 191
+219 6
+192 196 236 245 218 191
+220 6
+167 237 243 180 188 221
+221 7
+251 167 188 243 187 220 223
+222 3
+226 194 213
+223 6
+187 260 261 230 251 221
+224 6
+225 193 169 240 183 217
+225 5
+224 169 194 240 226
+226 8
+225 194 299 270 240 213 253 222
+227 7
+353 325 263 296 206 144 145
+228 3
+168 97 342
+229 4
+311 338 247 159
+230 8
+260 261 231 185 186 187 252 223
+231 6
+230 199 246 185 186 252
+232 6
+162 195 3084 279 281 190
+233 6
+256 200 202 204 239 234
+234 6
+256 257 259 233 202 205
+235 7
+259 264 265 205 207 208 248
+236 6
+196 208 245 248 219 254
+237 7
+258 241 243 212 215 180 220
+238 7
+203 204 334 239 304 307 271
+239 7
+256 291 233 204 238 304 307
+240 6
+224 225 226 242 217 253
+241 7
+258 237 243 215 249 286 287
+242 5
+216 217 253 240 269
+243 7
+258 267 237 241 251 220 221
+244 6
+294 266 245 215 249 218
+245 6
+266 236 244 218 219 254
+246 6
+2915 231 199 2924 273 252
+247 6
+229 338 372 250 412 349
+248 6
+288 265 235 236 208 254
+249 5
+241 244 294 286 215
+250 4
+274 343 349 247
+251 7
+289 260 295 267 243 221 223
+252 6
+261 230 231 297 273 246
+253 6
+226 299 269 303 240 242
+254 6
+288 290 266 236 245 248
+255 6
+198 263 298 268 335 144
+256 6
+257 291 292 233 234 239
+257 6
+256 259 292 293 234 205
+258 7
+289 267 300 237 241 243 287
+259 7
+257 293 264 234 235 205 318
+260 5
+295 251 261 230 223
+261 7
+260 230 295 297 316 252 223
+262 6
+321 201 203 271 280 189
+263 6
+227 296 335 144 346 255
+264 6
+259 265 235 332 301 318
+265 5
+248 264 235 301 288
+266 5
+290 294 244 245 254
+267 4
+289 258 243 251
+268 6
+198 298 210 341 285 255
+269 6
+303 272 242 216 313 253
+270 6
+226 326 299 209 213 285
+271 6
+321 262 203 334 307 238
+272 4
+216 313 269 350
+273 6
+324 297 2924 246 315 252
+274 4
+305 250 277 343
+275 3
+276 310 278
+276 4
+275 302 310 277
+277 4
+305 274 276 302
+278 4
+329 275 284 310
+279 7
+385 232 3084 381 373 281 317
+280 6
+321 262 370 214 282 189
+281 6
+195 327 232 373 279 283
+282 6
+327 370 214 280 441 283
+283 5
+281 282 195 214 327
+284 4
+376 329 2925 278
+285 8
+197 326 268 270 209 210 341 383
+286 5
+241 287 309 294 249
+287 5
+241 258 300 309 286
+288 7
+320 290 265 301 333 248 254
+289 7
+258 331 295 267 300 312 251
+290 7
+288 320 294 266 314 254 319
+291 5
+256 328 292 304 239
+292 7
+256 257 354 291 293 328 357
+293 6
+257 259 292 357 358 318
+294 9
+290 266 336 339 244 309 249 314 286
+295 6
+289 260 261 312 251 316
+296 6
+353 227 263 387 346 380
+297 7
+322 323 324 261 316 273 252
+298 5
+375 335 268 341 255
+299 6
+226 326 270 303 344 253
+300 6
+289 258 330 331 309 287
+301 5
+264 265 332 333 288
+302 5
+337 276 277 310 305
+303 6
+299 269 344 313 253 350
+304 7
+354 291 328 371 238 239 307
+305 6
+355 302 337 274 277 343
+306 6
+168 342 311 345 347 159
+307 7
+386 334 271 304 371 238 239
+308 8
+388 325 393 3084 206 211 317 382
+309 6
+294 330 300 339 286 287
+310 7
+329 302 337 275 276 278 348
+311 6
+384 229 338 306 347 159
+312 7
+289 295 361 331 366 340 316
+313 4
+272 269 350 303
+314 6
+290 294 363 364 336 319
+315 6
+352 2945 324 2924 273 2933
+316 6
+322 261 295 297 340 312
+317 8
+385 451 388 421 457 3084 308 279
+318 7
+259 293 358 264 362 332 367
+319 5
+320 360 290 363 314
+320 6
+288 290 359 360 333 319
+321 5
+280 370 334 262 271
+322 5
+340 297 323 316 369
+323 5
+297 322 379 324 369
+324 7
+352 323 379 297 273 374 315
+325 5
+353 227 308 382 206
+326 5
+344 299 285 270 383
+327 6
+430 373 441 281 282 283
+328 4
+304 354 291 292
+329 7
+278 284 310 376 348 413 415
+330 6
+356 331 300 339 309 351
+331 7
+289 356 361 330 300 365 312
+332 7
+264 362 301 367 368 333 318
+333 7
+288 320 359 360 332 301 368
+334 8
+321 386 419 397 238 271 370 307
+335 7
+263 298 375 346 394 414 255
+336 6
+294 364 339 377 314 351
+337 5
+305 355 348 310 302
+338 5
+384 311 372 229 247
+339 5
+336 330 309 294 351
+340 5
+312 369 322 316 366
+341 8
+420 298 268 375 602 285 394 383
+342 3
+168 306 228
+343 9
+417 355 389 426 427 305 274 250 349
+344 7
+326 423 424 299 303 350 383
+345 2
+306 347
+346 6
+387 263 296 335 380 414
+347 4
+384 345 306 311
+348 7
+355 389 329 398 337 310 413
+349 6
+417 247 428 343 250 412
+350 6
+390 423 303 272 344 313
+351 7
+356 330 399 336 339 405 377
+352 7
+2945 418 2971 324 374 2970 315
+353 6
+227 325 422 296 387 382
+354 10
+416 292 357 433 328 395 431 304 392 371
+355 5
+305 343 348 389 337
+356 6
+330 331 365 399 402 351
+357 5
+392 354 292 293 358
+358 6
+293 391 392 362 318 357
+359 6
+320 368 360 333 400 401
+360 7
+320 359 363 333 400 401 319
+361 6
+331 365 366 402 407 312
+362 7
+358 391 332 367 409 410 318
+363 6
+360 364 401 403 314 319
+364 6
+363 336 403 404 377 314
+365 5
+361 402 331 356 407
+366 6
+369 361 408 340 407 312
+367 6
+362 332 368 410 411 318
+368 6
+359 332 333 367 400 411
+369 7
+322 323 366 340 406 408 379
+370 7
+321 397 334 432 280 441 282
+371 5
+304 354 307 395 386
+372 4
+384 338 412 247
+373 6
+385 327 430 279 281 381
+374 4
+352 418 379 324
+375 4
+298 341 394 335
+376 7
+453 329 458 2925 378 284 415
+377 5
+336 404 364 405 351
+378 7
+483 2949 458 2925 376 506 2973
+379 6
+418 323 324 374 369 406
+380 8
+387 420 296 396 501 473 346 414
+381 3
+385 373 279
+382 6
+353 421 422 393 308 325
+383 7
+326 424 285 341 344 602 605
+384 4
+338 347 372 311
+385 8
+451 430 317 561 547 373 279 381
+386 7
+419 454 455 307 395 334 371
+387 10
+353 422 296 396 524 472 346 380 474 478
+388 4
+393 308 317 421
+389 7
+355 456 427 461 398 343 348
+390 6
+517 423 489 493 425 350
+391 6
+448 358 392 362 433 409
+392 6
+354 357 358 391 431 433
+393 4
+388 308 421 382
+394 5
+375 420 341 414 335
+395 6
+416 354 454 455 386 371
+396 4
+472 473 387 380
+397 5
+432 370 419 450 334
+398 4
+456 348 389 413
+399 6
+356 402 405 439 444 351
+400 8
+359 360 368 401 434 435 411 447
+401 6
+359 360 363 400 403 447
+402 7
+356 361 439 365 399 436 407
+403 6
+363 364 401 404 442 447
+404 7
+364 403 405 377 442 443 446
+405 6
+399 404 377 444 446 351
+406 6
+418 440 369 438 408 379
+407 8
+361 365 366 402 436 437 408 445
+408 6
+366 369 406 407 440 445
+409 5
+448 449 362 410 391
+410 5
+409 362 449 411 367
+411 6
+400 449 367 368 434 410
+412 4
+428 372 349 247
+413 7
+453 456 329 398 494 348 415
+414 5
+420 346 380 394 335
+415 4
+376 329 413 453
+416 6
+354 455 490 395 492 431
+417 4
+426 428 349 343
+418 8
+352 2971 452 374 438 429 406 379
+419 8
+450 454 487 488 492 386 334 397
+420 7
+394 341 602 603 380 414 501
+421 7
+388 422 393 457 484 317 382
+422 7
+353 387 484 421 382 478 485
+423 7
+390 424 489 594 344 604 350
+424 5
+344 423 604 605 383
+425 2
+493 390
+426 6
+417 343 459 427 428 471
+427 6
+389 426 343 461 471 475
+428 5
+417 426 459 412 349
+429 5
+418 2971 452 469 470
+430 6
+385 547 551 327 373 441
+431 6
+416 354 392 490 491 433
+432 7
+450 486 530 397 529 370 441
+433 9
+448 449 354 391 392 491 431 477 509
+434 6
+480 449 400 435 411 476
+435 6
+480 460 400 434 447 479
+436 6
+407 463 465 402 437 439
+437 6
+463 467 468 407 436 445
+438 6
+481 418 452 482 406 440
+439 6
+463 465 402 436 399 444
+440 7
+481 482 406 468 438 408 445
+441 8
+547 327 551 530 430 432 370 282
+442 7
+460 462 464 403 404 443 447
+443 6
+462 464 466 404 442 446
+444 9
+503 399 465 466 405 502 439 504 446
+445 6
+440 467 468 437 407 408
+446 6
+464 466 404 405 443 444
+447 7
+460 435 462 400 401 403 442
+448 4
+409 449 433 391
+449 8
+448 433 434 409 410 411 476 477
+450 5
+432 419 397 486 487
+451 4
+385 561 317 457
+452 5
+470 482 418 429 438
+453 8
+515 458 494 495 483 376 413 415
+454 6
+386 419 455 488 395 492
+455 6
+416 386 454 490 395 492
+456 7
+389 519 520 461 398 494 413
+457 8
+451 484 421 561 531 597 665 317
+458 4
+376 378 483 453
+459 4
+508 426 428 471
+460 7
+462 496 497 435 442 479 447
+461 5
+456 520 427 389 475
+462 8
+464 460 496 497 500 442 443 447
+463 8
+467 465 499 436 437 439 504 505
+464 8
+462 497 466 500 502 442 443 446
+465 7
+439 463 436 503 504 505 444
+466 7
+464 500 502 503 443 444 446
+467 7
+463 498 499 468 437 505 445
+468 7
+481 499 498 467 437 440 445
+469 6
+514 2987 429 470 2971 3007
+470 7
+482 452 523 514 429 469 542
+471 7
+459 521 426 427 475 508 522
+472 5
+473 474 387 396 511
+473 5
+472 396 380 501 511
+474 4
+472 387 524 511
+475 5
+520 522 427 461 471
+476 6
+480 449 509 434 477 510
+477 4
+433 476 509 449
+478 5
+544 387 524 485 422
+479 7
+480 518 460 496 435 532 540
+480 6
+518 434 435 476 510 479
+481 8
+482 526 527 528 498 468 438 440
+482 8
+481 452 438 523 526 527 470 440
+483 6
+512 453 458 495 378 506
+484 6
+421 422 552 457 531 485
+485 5
+544 552 484 478 422
+486 6
+450 549 487 432 529 598
+487 5
+488 450 419 549 486
+488 8
+576 419 549 454 487 492 595 599
+489 4
+594 517 390 423
+490 8
+416 513 516 455 491 492 431 507
+491 6
+516 490 431 433 507 509
+492 11
+416 513 578 419 580 582 454 455 488 490 576
+493 4
+425 517 390 601
+494 7
+545 515 453 519 456 413 543
+495 4
+512 515 483 453
+496 5
+497 460 532 462 479
+497 8
+496 460 462 464 532 533 534 500
+498 6
+481 499 527 528 467 468
+499 8
+463 528 498 467 468 505 538 539
+500 8
+534 462 464 497 466 533 502 535
+501 6
+420 524 473 603 380 511
+502 8
+535 525 464 466 500 534 503 444
+503 6
+525 465 466 502 504 444
+504 8
+525 463 465 503 536 505 538 444
+505 8
+539 536 499 465 467 504 538 463
+506 7
+512 483 3018 541 2994 378 2973
+507 8
+577 579 516 581 550 490 491 509
+508 4
+521 546 459 471
+509 8
+548 550 491 433 507 476 477 510
+510 6
+480 548 518 554 476 509
+511 5
+472 473 474 524 501
+512 6
+545 515 495 483 506 541
+513 5
+516 490 492 582 577
+514 5
+3026 542 469 470 3007
+515 6
+512 545 453 494 495 543
+516 5
+513 490 491 577 507
+517 5
+489 594 493 390 601
+518 6
+480 554 560 540 510 479
+519 8
+583 456 586 555 494 520 537 543
+520 6
+519 456 522 461 537 475
+521 5
+546 563 508 522 471
+522 10
+520 521 555 589 590 592 563 471 537 475
+523 6
+482 553 556 526 470 542
+524 8
+544 387 596 501 474 603 478 511
+525 7
+568 503 567 536 502 535 504
+526 6
+481 482 523 556 558 527
+527 7
+481 482 557 558 528 498 526
+528 7
+481 539 557 559 498 499 527
+529 6
+640 644 486 432 530 598
+530 5
+432 441 551 644 529
+531 5
+552 457 484 597 593
+532 8
+560 565 496 497 564 533 540 479
+533 6
+564 497 500 565 534 532
+534 7
+502 497 500 533 566 535 565
+535 6
+566 502 525 500 534 567
+536 6
+525 568 504 505 538 570
+537 4
+520 522 555 519
+538 7
+536 499 569 504 505 570 539
+539 6
+559 528 499 569 505 538
+540 4
+560 532 518 479
+541 8
+512 545 3018 591 600 506 571 3037
+542 6
+514 3043 553 523 3026 470
+543 7
+545 515 583 586 519 494 571
+544 6
+485 552 524 593 596 478
+545 6
+512 515 494 571 541 543
+546 3
+521 563 508
+547 8
+385 551 430 784 561 786 790 441
+548 6
+550 554 510 509 574 575
+549 5
+488 599 598 486 487
+550 6
+507 548 581 607 509 575
+551 7
+547 644 786 430 530 791 441
+552 6
+544 484 485 593 531 597
+553 5
+556 523 588 542 3043
+554 7
+548 518 560 510 572 574 575
+555 6
+519 522 587 589 537 586
+556 6
+553 523 588 558 526 606
+557 7
+610 615 617 558 527 528 559
+558 7
+615 617 556 557 526 527 606
+559 7
+608 610 615 557 528 569 539
+560 7
+518 585 554 532 564 540 572
+561 8
+385 451 457 784 547 724 792 665
+562 3
+584 563 573
+563 6
+546 521 522 592 562 573
+564 6
+611 585 560 532 533 565
+565 8
+611 534 614 564 533 566 532 613
+566 7
+612 613 614 535 565 534 567
+567 6
+612 616 525 566 535 568
+568 6
+609 616 525 567 536 570
+569 6
+608 610 559 538 539 570
+570 6
+608 609 568 536 569 538
+571 6
+545 583 651 591 541 543
+572 6
+585 554 622 623 560 574
+573 5
+592 562 563 620 642
+574 6
+548 554 622 624 572 575
+575 6
+548 550 554 624 574 607
+576 5
+488 625 578 595 492
+577 6
+513 579 516 582 629 507
+578 5
+576 580 627 492 625
+579 6
+577 581 626 629 630 507
+580 5
+628 578 627 492 582
+581 6
+579 550 626 630 507 607
+582 6
+513 580 577 492 628 629
+583 7
+643 519 586 651 571 668 543
+584 1
+562
+585 6
+641 611 623 560 564 572
+586 7
+555 583 648 519 587 643 543
+587 6
+647 648 586 555 589 638
+588 7
+3043 553 618 619 556 3061 606
+589 5
+587 522 555 638 590
+590 5
+592 522 589 638 621
+591 5
+600 659 571 651 541
+592 6
+522 620 621 590 563 573
+593 6
+544 552 531 596 597 696
+594 8
+517 423 489 687 709 601 604 735
+595 7
+576 705 488 625 694 599 732
+596 6
+544 524 593 696 603 766
+597 8
+708 552 457 593 531 724 696 665
+598 5
+640 529 549 486 599
+599 6
+640 549 488 694 595 598
+600 7
+649 541 3056 659 727 591 3037
+601 4
+594 687 493 517
+602 7
+803 420 341 794 603 605 383
+603 9
+420 524 596 501 602 795 794 766 981
+604 6
+736 423 424 594 605 735
+605 13
+736 1057 803 997 424 1129 1168 1091 602 1179 604 1054 383
+606 6
+556 619 588 558 661 617
+607 6
+581 550 624 626 669 575
+608 7
+609 674 610 559 569 570 671
+609 7
+608 674 675 676 616 568 570
+610 7
+608 677 615 557 559 569 671
+611 6
+672 641 613 585 564 565
+612 7
+675 614 678 679 616 566 567
+613 6
+672 611 678 614 565 566
+614 6
+612 613 678 679 565 566
+615 7
+673 610 677 617 557 558 559
+616 7
+609 675 612 679 567 568 676
+617 7
+673 615 557 558 661 697 606
+618 7
+714 619 588 661 664 697 3061
+619 4
+618 588 661 606
+620 2
+592 573
+621 4
+592 638 646 590
+622 6
+688 623 624 689 572 574
+623 7
+641 585 622 688 689 700 572
+624 8
+704 699 622 688 575 669 574 607
+625 5
+576 627 578 595 705
+626 8
+704 579 581 701 630 669 702 607
+627 6
+705 578 580 625 628 703
+628 5
+627 580 629 582 703
+629 9
+577 579 582 776 777 628 630 701 703
+630 5
+701 626 579 581 629
+631 7
+3044 3045 718 3059 686 633 634
+632 6
+3047 3048 684 656 658 636
+633 6
+3045 3046 650 686 3059 631
+634 6
+731 3044 718 720 631 635
+635 4
+720 3065 634 692
+636 5
+632 3049 684 3048 639
+637 6
+712 681 653 3060 713 3064
+638 6
+646 647 587 589 590 621
+639 6
+3049 684 695 667 636 3069
+640 6
+706 644 598 529 694 599
+641 5
+672 585 611 700 623
+642 1
+573
+643 5
+648 586 668 710 583
+644 7
+640 706 551 529 530 791 785
+645 5
+3056 649 698 3053 662
+646 2
+621 638
+647 3
+648 587 638
+648 4
+586 587 643 647
+649 5
+600 3056 698 645 727
+650 9
+758 3046 807 716 686 3062 663 633 764
+651 9
+710 583 715 711 591 659 571 668 733
+652 6
+707 3057 3055 657 726 655
+653 6
+712 713 723 3064 637 670
+654 7
+739 740 682 3055 693 3063 655
+655 6
+707 652 654 3055 3057 693
+656 3
+632 680 658
+657 5
+3057 3058 652 685 726
+658 7
+743 680 745 684 656 728 632
+659 5
+600 651 727 711 591
+660 9
+737 717 742 3053 3054 685 3058 662 759
+661 6
+617 618 619 697 714 606
+662 5
+717 698 660 645 3053
+663 7
+3062 650 716 3061 758 664 730
+664 6
+714 757 663 730 618 3061
+665 4
+457 724 597 561
+666 7
+3067 719 722 691 667 3068 3069
+667 6
+722 695 666 3068 3069 639
+668 5
+643 651 715 710 583
+669 5
+624 704 626 702 607
+670 5
+721 690 723 772 653
+671 6
+608 610 677 674 749 755
+672 8
+641 611 613 678 744 746 700 734
+673 6
+677 615 617 750 752 697
+674 6
+608 609 676 754 755 671
+675 7
+609 612 679 616 748 751 676
+676 6
+609 674 675 616 748 754
+677 6
+673 610 615 749 750 671
+678 7
+672 612 613 614 679 746 747
+679 7
+675 612 614 678 616 747 751
+680 5
+656 721 658 728 690
+681 7
+713 682 3060 3063 761 762 637
+682 6
+739 681 654 3063 761 762
+683 6
+738 741 691 692 3066 3067
+684 7
+743 745 658 695 632 636 639
+685 6
+657 3058 660 725 726 759
+686 5
+633 650 764 718 631
+687 4
+601 594 709 1005
+688 8
+774 775 778 622 623 624 689 699
+689 8
+771 774 775 622 623 688 700 734
+690 3
+680 721 670
+691 6
+738 773 683 719 666 3067
+692 7
+738 741 683 720 3065 3066 635
+693 5
+707 740 739 654 655
+694 7
+640 706 595 788 789 599 732
+695 6
+745 684 722 729 667 639
+696 6
+708 593 756 597 596 766
+697 8
+768 769 673 617 618 752 661 714
+698 7
+804 645 649 717 662 727 760
+699 6
+704 688 774 778 624 753
+700 5
+672 641 734 689 623
+701 5
+776 626 629 630 702
+702 6
+704 776 779 701 626 669
+703 8
+627 705 777 787 628 629 857 859
+704 7
+779 624 753 626 699 669 702
+705 7
+625 595 627 789 787 732 703
+706 5
+640 785 644 694 788
+707 6
+740 652 813 655 693 726
+708 5
+696 756 811 724 597
+709 5
+594 907 1005 735 687
+710 6
+643 715 651 763 668 3070
+711 6
+651 765 659 727 733 767
+712 6
+800 713 653 637 723 797
+713 6
+800 712 681 653 761 637
+714 8
+768 769 770 618 661 664 697 757
+715 5
+651 668 733 710 763
+716 3
+650 758 663
+717 6
+737 804 742 660 662 698
+718 7
+781 686 783 631 634 731 764
+719 5
+722 666 691 780 773
+720 8
+731 741 842 692 3065 634 635 810
+721 7
+772 680 690 728 793 796 670
+722 9
+805 806 780 719 695 729 666 667 798
+723 5
+712 772 653 670 797
+724 8
+708 811 561 597 792 665 829 863
+725 6
+841 844 685 726 759 828
+726 8
+707 845 841 652 813 685 657 725
+727 8
+802 711 649 760 659 600 698 765
+728 6
+743 680 721 658 793 799
+729 4
+745 722 798 695
+730 5
+664 801 757 758 663
+731 5
+720 634 842 781 718
+732 5
+787 705 595 789 694
+733 6
+715 711 651 823 763 767
+734 5
+672 689 771 700 744
+735 6
+736 709 907 909 594 604
+736 5
+909 604 605 1054 735
+737 9
+804 870 742 874 717 660 759 856 825
+738 7
+741 809 683 782 691 692 773
+739 8
+866 867 740 682 654 693 762 827
+740 7
+866 867 813 654 707 693 739
+741 6
+738 809 810 683 720 692
+742 3
+737 660 717
+743 7
+832 812 745 684 658 728 799
+744 6
+672 771 746 814 815 734
+745 7
+743 812 684 658 695 729 798
+746 7
+672 678 744 747 814 815 816
+747 7
+678 679 746 751 816 817 815
+748 7
+675 676 751 817 754 821 822
+749 7
+677 750 755 820 824 819 671
+750 6
+673 677 749 752 819 820
+751 7
+675 679 747 748 816 817 821
+752 6
+768 673 750 820 697 831
+753 7
+704 778 779 853 854 855 699
+754 7
+674 676 748 755 821 822 824
+755 7
+674 749 754 819 822 824 671
+756 4
+696 811 708 766
+757 6
+801 770 714 849 664 730
+758 6
+801 807 650 716 663 730
+759 6
+737 685 660 725 825 828
+760 5
+698 834 804 802 727
+761 6
+800 808 681 682 713 762
+762 6
+739 808 681 682 761 827
+763 6
+710 715 3070 823 733 830
+764 6
+837 807 650 718 783 686
+765 6
+802 835 711 876 727 767
+766 8
+996 811 756 981 886 696 596 603
+767 7
+835 838 711 846 733 823 765
+768 5
+752 697 714 831 769
+769 7
+768 770 872 873 714 697 831
+770 5
+769 714 873 757 849
+771 6
+833 775 744 814 689 734
+772 7
+869 797 721 723 796 893 670
+773 6
+738 868 780 782 719 691
+774 7
+775 778 688 689 852 855 699
+775 8
+897 898 771 774 833 688 689 852
+776 8
+864 862 777 779 853 701 702 629
+777 6
+864 776 629 859 860 703
+778 7
+774 688 753 852 854 855 699
+779 6
+704 776 753 853 854 702
+780 5
+722 868 773 806 719
+781 5
+842 731 901 718 783
+782 5
+809 738 868 773 889
+783 6
+901 781 718 919 764 837
+784 6
+961 547 561 790 792 826
+785 7
+706 899 644 1009 788 791 955
+786 5
+791 905 547 790 551
+787 7
+705 843 789 857 858 732 703
+788 8
+706 899 843 785 914 915 789 694
+789 6
+705 843 787 788 694 732
+790 7
+961 547 1029 905 784 786 984
+791 6
+644 551 905 785 786 955
+792 5
+784 561 826 724 829
+793 6
+721 850 851 728 796 799
+794 6
+603 803 997 1207 602 795
+795 4
+794 603 981 1207
+796 5
+721 850 772 893 793
+797 5
+712 800 723 772 869
+798 7
+805 745 812 847 722 729 861
+799 5
+728 793 851 832 743
+800 8
+869 871 712 713 848 808 761 797
+801 6
+807 840 849 757 758 730
+802 5
+760 834 876 765 727
+803 4
+602 794 605 997
+804 8
+737 834 870 865 717 760 698 895
+805 5
+722 885 806 798 847
+806 6
+868 805 780 882 722 885
+807 6
+801 837 840 650 758 764
+808 7
+800 875 848 881 761 762 827
+809 6
+738 741 839 810 782 889
+810 6
+741 839 809 842 720 891
+811 6
+708 724 886 756 766 863
+812 5
+832 745 861 798 743
+813 5
+866 707 740 845 726
+814 6
+833 771 744 746 815 884
+815 7
+744 746 747 814 816 884 887
+816 7
+746 747 815 817 887 751 892
+817 7
+747 748 751 816 821 890 892
+818 2
+836 3070
+819 7
+749 878 879 755 820 750 824
+820 7
+749 878 877 752 819 750 831
+821 7
+748 751 817 754 822 888 890
+822 7
+748 824 754 883 821 755 888
+823 5
+763 846 733 830 767
+824 7
+749 879 754 883 822 755 819
+825 5
+856 737 874 828 759
+826 8
+1043 961 1127 784 984 1075 792 829
+827 6
+739 808 875 904 867 762
+828 5
+856 825 844 725 759
+829 6
+1043 724 886 792 826 863
+830 3
+763 3070 823
+831 6
+768 769 872 877 752 820
+832 7
+896 900 743 812 851 861 799
+833 7
+897 898 771 775 814 884 894
+834 6
+865 802 804 876 911 760
+835 5
+917 876 765 838 767
+836 1
+818
+837 8
+807 840 938 783 945 946 919 764
+838 5
+880 835 917 846 767
+839 5
+809 810 891 889 913
+840 6
+801 837 807 938 912 849
+841 5
+939 844 725 726 845
+842 7
+891 901 902 810 781 720 731
+843 5
+858 914 787 788 789
+844 7
+841 939 972 944 725 856 828
+845 6
+866 841 939 813 947 726
+846 4
+880 823 838 767
+847 6
+798 805 903 885 861 926
+848 5
+800 808 916 881 871
+849 7
+801 770 840 873 912 948 757
+850 5
+793 851 796 893 910
+851 6
+832 900 910 850 793 799
+852 7
+928 897 774 775 778 855 927
+853 8
+931 932 933 776 779 753 854 862
+854 8
+932 933 778 779 753 853 855 927
+855 8
+928 933 774 778 753 852 854 927
+856 6
+737 874 844 944 825 828
+857 6
+930 934 787 858 859 703
+858 5
+857 843 787 930 914
+859 7
+934 936 777 937 857 860 703
+860 4
+864 777 859 936
+861 7
+832 896 903 812 847 926 798
+862 6
+864 931 932 935 776 853
+863 4
+811 724 829 886
+864 7
+931 935 776 777 936 860 862
+865 6
+834 804 942 911 957 895
+866 8
+739 740 904 813 845 977 947 867
+867 5
+904 827 866 739 740
+868 9
+773 806 780 782 882 918 889 921 922
+869 6
+800 772 871 906 797 893
+870 7
+737 804 874 940 942 895 959
+871 6
+800 962 869 906 848 916
+872 7
+769 963 873 877 948 956 831
+873 5
+872 769 770 948 849
+874 6
+737 870 940 944 856 825
+875 5
+808 881 904 827 920
+876 8
+802 835 967 834 911 978 917 765
+877 6
+872 878 943 820 956 831
+878 6
+929 877 879 819 820 943
+879 5
+824 929 819 878 883
+880 4
+908 846 838 917
+881 7
+965 808 970 875 848 916 920
+882 4
+922 868 885 806
+883 6
+929 879 888 822 824 924
+884 7
+833 951 814 815 887 952 894
+885 7
+805 806 941 847 882 922 926
+886 7
+996 811 1043 1044 829 766 863
+887 7
+815 816 979 884 951 923 892
+888 6
+883 821 822 890 924 954
+889 6
+868 839 809 782 913 918
+890 7
+975 817 821 888 954 923 892
+891 7
+960 902 839 810 842 913 925
+892 5
+816 817 890 923 887
+893 7
+772 869 906 910 850 949 796
+894 6
+833 898 884 951 952 953
+895 4
+865 804 942 870
+896 5
+832 900 861 950 903
+897 8
+928 833 898 775 1008 1011 852 953
+898 5
+897 833 953 894 775
+899 4
+785 915 788 1009
+900 7
+832 896 950 910 976 851 982
+901 8
+902 1031 1032 842 781 783 968 919
+902 5
+968 842 891 901 925
+903 6
+896 964 847 950 861 926
+904 9
+993 866 867 965 875 994 977 920 827
+905 8
+1024 1028 1029 1039 786 790 791 955
+906 6
+962 869 871 973 949 893
+907 6
+709 909 974 1005 1047 735
+908 4
+880 971 917 967
+909 6
+736 1090 907 974 1054 735
+910 6
+900 976 850 851 949 893
+911 6
+865 834 876 978 987 957
+912 6
+840 969 938 849 948 980
+913 7
+960 966 839 918 889 891 958
+914 7
+930 1001 843 1103 915 788 858
+915 7
+899 1103 1009 914 1059 788 1109
+916 5
+848 881 962 970 871
+917 6
+835 838 967 876 908 880
+918 5
+921 889 868 958 913
+919 6
+901 1031 1032 783 945 837
+920 4
+904 881 875 965
+921 6
+868 918 985 922 1018 958
+922 6
+868 941 882 885 985 921
+923 6
+975 979 887 890 892 989
+924 6
+992 929 883 888 954 990
+925 5
+960 968 891 1016 902
+926 9
+1026 1027 964 903 941 847 995 885 861
+927 7
+928 933 1003 1008 852 854 855
+928 6
+897 1003 1008 852 855 927
+929 7
+992 878 943 883 879 924 990
+930 6
+934 1000 1001 914 857 858
+931 7
+864 932 999 935 1006 853 862
+932 7
+931 933 1006 1007 853 854 862
+933 7
+932 1003 1007 853 854 855 927
+934 5
+1000 857 930 859 937
+935 6
+864 931 998 999 936 862
+936 8
+864 998 935 937 1002 999 859 860
+937 5
+936 1000 1002 859 934
+938 7
+837 840 969 912 946 1014 988
+939 6
+1062 844 841 972 845 947
+940 5
+944 874 986 870 959
+941 5
+985 922 995 885 926
+942 7
+865 870 1063 1065 959 957 895
+943 6
+929 877 878 1017 956 990
+944 8
+1061 844 874 940 1071 856 986 972
+945 6
+837 1032 946 919 1048 988
+946 4
+945 938 988 837
+947 6
+866 1062 1064 939 845 977
+948 7
+963 872 873 912 849 980 983
+949 7
+1025 906 973 910 976 893 1022
+950 6
+896 964 903 982 900 991
+951 6
+1004 979 884 887 952 894
+952 7
+1004 1010 884 951 953 1012 894
+953 6
+897 898 1010 1011 952 894
+954 6
+992 975 1015 888 890 924
+955 6
+1024 1030 1009 905 785 791
+956 6
+963 872 877 943 1017 1019
+957 6
+865 1063 942 911 1040 987
+958 6
+966 913 918 921 1018 1021
+959 8
+870 1063 1065 940 1068 1069 942 986
+960 7
+966 1035 913 1016 891 925 1023
+961 4
+784 984 826 790
+962 6
+871 906 973 916 1020 970
+963 6
+872 1038 948 983 1019 956
+964 5
+1026 991 950 926 903
+965 5
+904 881 920 970 993
+966 5
+960 913 1021 958 1023
+967 7
+1060 876 971 908 978 917 1013
+968 6
+901 1094 1031 902 1016 925
+969 4
+912 938 980 1014
+970 7
+993 962 965 1036 881 916 1020
+971 3
+1060 908 967
+972 5
+944 939 844 1061 1062
+973 6
+1025 962 906 949 1020 1055
+974 6
+1090 907 909 1172 1141 1047
+975 6
+954 1015 890 923 1052 989
+976 6
+900 910 949 982 1022 1045
+977 7
+866 904 1066 1067 994 1064 947
+978 6
+967 1033 876 911 1013 987
+979 6
+887 1004 1042 951 923 989
+980 6
+969 1034 912 948 1014 983
+981 7
+1120 603 996 1328 1207 795 766
+982 7
+1072 900 1037 976 1045 950 991
+983 5
+980 1034 963 948 1038
+984 7
+961 1156 1029 1127 790 826 1157
+985 7
+995 941 1018 921 922 1051 1050
+986 5
+944 1068 1071 940 959
+987 5
+1040 1033 978 957 911
+988 7
+1041 938 1048 945 946 1014 1080
+989 6
+1056 975 1042 979 923 1052
+990 8
+992 929 1095 943 1073 1079 1017 924
+991 5
+1026 964 982 950 1037
+992 6
+929 1073 1015 954 924 990
+993 6
+994 965 904 970 1036 1070
+994 5
+904 977 1066 1070 993
+995 6
+1058 1027 941 985 1051 926
+996 5
+1120 1044 886 766 981
+997 6
+1057 803 1229 1207 794 605
+998 6
+999 936 1002 935 1106 1117
+999 7
+931 998 935 936 1006 1113 1117
+1000 7
+930 934 937 1002 1104 1105 1001
+1001 5
+1000 1104 914 930 1103
+1002 6
+998 1105 936 937 1000 1106
+1003 6
+928 933 1007 1008 1082 927
+1004 6
+1042 979 1012 951 952 1083
+1005 6
+1282 709 907 1101 687 1047
+1006 6
+931 932 999 1007 1113 1115
+1007 7
+932 933 1003 1006 1082 1115 1118
+1008 8
+928 897 1003 1107 1111 1082 1011 927
+1009 7
+1059 899 1030 785 915 1046 955
+1010 6
+1084 1011 1012 952 953 1116
+1011 6
+897 1008 1010 1111 953 1116
+1012 5
+952 1084 1010 1083 1004
+1013 4
+1033 978 1060 967
+1014 8
+1123 1124 969 938 1041 980 988 1034
+1015 6
+992 1093 975 1073 954 1052
+1016 6
+960 1094 968 1035 1096 925
+1017 6
+1095 943 1079 1019 956 990
+1018 6
+1074 985 921 1050 1021 958
+1019 6
+963 1038 1078 1079 1017 956
+1020 6
+962 970 1036 973 1086 1055
+1021 7
+1088 1089 966 1074 1018 958 1023
+1022 5
+976 1025 1081 949 1045
+1023 5
+960 1089 1035 1021 966
+1024 4
+905 955 1030 1039
+1025 6
+973 949 1081 1055 1022 1119
+1026 7
+1027 964 1099 1100 1037 926 991
+1027 5
+1026 995 1100 1058 926
+1028 5
+1112 905 1151 1029 1039
+1029 6
+1028 1157 905 790 984 1151
+1030 9
+1024 1059 1220 1039 1009 1046 1112 1114 955
+1031 7
+901 1094 968 1132 1032 1077 919
+1032 6
+901 1031 945 1077 919 1048
+1033 7
+1060 1040 978 1108 1013 987 1053
+1034 7
+1124 1126 1038 980 1014 983 1049
+1035 6
+960 1089 1096 1135 1016 1023
+1036 6
+993 1097 970 1070 1020 1086
+1037 6
+1026 1099 1102 1072 982 991
+1038 9
+963 1126 1034 1078 983 1049 1146 1019 1098
+1039 5
+1024 905 1112 1028 1030
+1040 7
+1154 1063 1033 1149 1053 987 957
+1041 4
+1080 1123 988 1014
+1042 6
+1056 1122 1004 979 1083 989
+1043 6
+1127 1075 1044 886 826 829
+1044 6
+1120 1043 996 1075 886 1239
+1045 6
+1072 1130 976 982 1081 1022
+1046 3
+1009 1059 1030
+1047 5
+1141 907 1005 974 1101
+1048 5
+1032 945 988 1077 1080
+1049 3
+1034 1126 1038
+1050 6
+1128 1074 1140 985 1018 1051
+1051 5
+1128 985 1050 995 1058
+1052 6
+1056 1125 975 1015 989 1093
+1053 3
+1040 1033 1108
+1054 5
+736 1129 1090 605 909
+1055 5
+1025 1020 973 1086 1119
+1056 6
+1122 1125 1137 1042 1052 989
+1057 7
+1312 1091 997 1225 1229 1179 605
+1058 5
+1027 1128 1051 995 1100
+1059 7
+1046 1030 1009 915 1109 1110 1114
+1060 6
+967 1033 971 1165 1108 1013
+1061 8
+1092 1062 1131 972 1166 1071 944 1076
+1062 8
+1121 1061 1064 939 972 947 1076 1085
+1063 6
+1065 942 957 1040 1149 959
+1064 7
+1062 1067 1164 977 947 1176 1085
+1065 4
+1063 1069 942 959
+1066 6
+994 1067 1070 977 1139 1087
+1067 7
+1064 1066 1164 977 1139 1173 1176
+1068 6
+1155 1158 1069 1071 986 959
+1069 6
+1153 1155 1065 1068 1187 959
+1070 8
+993 994 1097 1066 1036 1134 1138 1087
+1071 7
+1155 1061 1158 1068 1166 944 986
+1072 6
+1152 1130 1037 1102 1045 982
+1073 7
+992 1093 1095 1160 1015 1145 990
+1074 6
+1088 1161 1021 1140 1018 1050
+1075 7
+1127 1043 1044 1365 1239 826 1370
+1076 5
+1136 1121 1092 1061 1062
+1077 6
+1031 1032 1132 1080 1048 1150
+1078 5
+1079 1098 1019 1038 1159
+1079 6
+1159 1095 1078 1017 1019 990
+1080 8
+1123 1189 1192 1041 1077 1048 988 1150
+1081 6
+1025 1130 1045 1148 1022 1119
+1082 6
+1003 1007 1008 1170 1107 1118
+1083 7
+1122 1004 1042 1012 1142 1143 1084
+1084 6
+1191 1010 1012 1142 1083 1116
+1085 5
+1064 1185 1164 1062 1121
+1086 7
+1097 1036 1133 1144 1055 1020 1119
+1087 4
+1066 1139 1134 1070
+1088 5
+1161 1074 1021 1162 1089
+1089 7
+1088 1163 1162 1035 1135 1021 1023
+1090 6
+1129 909 974 1168 1172 1054
+1091 3
+1057 1179 605
+1092 5
+1136 1177 1131 1076 1061
+1093 6
+1125 1073 1174 1015 1145 1052
+1094 6
+1031 968 1132 1096 1016 1180
+1095 7
+1159 1160 1073 1079 1017 990 1183
+1096 6
+1094 1035 1135 1205 1016 1180
+1097 6
+1036 1133 1070 1138 1147 1086
+1098 6
+1188 1159 1038 1078 1208 1146
+1099 6
+1026 1100 1037 1102 1169 1178
+1100 7
+1026 1027 1128 1099 1058 1167 1169
+1101 6
+1281 1282 1005 1262 1141 1047
+1102 6
+1152 1184 1099 1037 1072 1178
+1103 7
+1216 1001 1104 914 915 1109 1241
+1104 6
+1216 1000 1001 1103 1105 1211
+1105 6
+1000 1002 1104 1106 1211 1214
+1106 5
+1105 1002 1117 998 1214
+1107 6
+1218 1190 1008 1170 1111 1082
+1108 4
+1033 1060 1053 1165
+1109 9
+1059 1110 1219 1358 1103 1361 915 1334 1241
+1110 4
+1114 1059 1109 1219
+1111 6
+1190 1195 1008 1107 1116 1011
+1112 8
+1028 1030 1323 1039 1235 1220 1114 1151
+1113 6
+999 1006 1213 1115 1212 1117
+1114 6
+1219 1220 1030 1059 1110 1112
+1115 7
+1217 1193 1006 1007 1113 1213 1118
+1116 6
+1191 1195 1010 1011 1111 1084
+1117 6
+998 999 1106 1113 1212 1214
+1118 5
+1193 1082 1115 1170 1007
+1119 7
+1025 1198 1144 1081 1148 1086 1055
+1120 6
+1345 996 1328 1044 981 1239
+1121 6
+1185 1062 1164 1136 1076 1085
+1122 6
+1056 1199 1137 1042 1143 1083
+1123 7
+1248 1124 1189 1041 1014 1080 1244
+1124 6
+1248 1123 1126 1034 1014 1181
+1125 6
+1056 1186 1093 1137 1174 1052
+1126 6
+1124 1034 1038 1049 1146 1181
+1127 8
+1043 1156 1329 1075 1365 984 826 1370
+1128 7
+1058 1100 1167 1171 1140 1050 1051
+1129 4
+1168 1090 605 1054
+1130 7
+1152 1197 1072 1201 1045 1081 1148
+1131 5
+1136 1177 1092 1061 1166
+1132 7
+1094 1031 1202 1203 1077 1180 1150
+1133 4
+1144 1097 1147 1086
+1134 7
+1223 1070 1138 1139 1173 1206 1087
+1135 5
+1096 1089 1035 1163 1205
+1136 9
+1121 1092 1185 1226 1131 1196 1076 1240 1177
+1137 5
+1056 1122 1199 1125 1186
+1138 6
+1224 1097 1134 1070 1206 1147
+1139 5
+1066 1067 1173 1134 1087
+1140 6
+1128 1161 1200 1074 1171 1050
+1141 5
+1262 1172 1101 974 1047
+1142 5
+1232 1143 1083 1084 1191
+1143 6
+1122 1142 1199 1232 1238 1083
+1144 7
+1227 1133 1198 1204 1147 1086 1119
+1145 6
+1093 1160 1258 1073 1234 1174
+1146 6
+1252 1126 1098 1038 1188 1181
+1147 7
+1224 1097 1133 1138 1204 1237 1144
+1148 6
+1130 1197 1198 1081 1242 1119
+1149 4
+1040 1154 1175 1063
+1150 5
+1080 1192 1203 1132 1077
+1151 5
+1112 1235 1028 1029 1157
+1152 6
+1184 1130 1102 1072 1201 1236
+1153 2
+1187 1069
+1154 5
+1040 1210 1149 1254 1175
+1155 6
+1187 1158 1068 1069 1071 1215
+1156 7
+1157 1127 1194 1329 984 1370 1182
+1157 6
+1156 1029 1235 984 1182 1151
+1158 8
+3074 1155 1260 1068 1166 1071 1251 1215
+1159 7
+1095 1098 1231 1078 1079 1208 1183
+1160 5
+1073 1234 1183 1145 1095
+1161 6
+1088 1162 1230 1200 1074 1140
+1162 5
+1088 1089 1163 1230 1161
+1163 6
+1089 1250 1162 1230 1135 1205
+1164 8
+1121 1221 1185 1064 1067 1176 1085 1255
+1165 2
+1108 1060
+1166 8
+3074 1131 1061 1158 3083 1071 1177 1247
+1167 6
+1128 1100 1169 1171 1209 1243
+1168 8
+1090 1129 1233 1172 1301 1225 1179 605
+1169 7
+1222 1228 1099 1100 1167 1209 1178
+1170 6
+1218 1253 1193 1107 1082 1118
+1171 7
+1264 1128 1261 1167 1200 1140 1243
+1172 6
+1090 974 1262 1168 1141 1301
+1173 6
+1221 1223 1067 1134 1139 1176
+1174 6
+1186 1093 1258 1259 1145 1125
+1175 4
+1256 1154 1149 1210
+1176 5
+1064 1067 1164 1173 1221
+1177 6
+1092 1131 1166 1136 1240 1247
+1178 7
+1184 1222 1290 1099 1102 1169 1273
+1179 5
+1168 1057 1091 605 1225
+1180 6
+1094 1096 1132 1202 1267 1205
+1181 5
+1248 1252 1146 1124 1126
+1182 8
+1377 1156 1157 1194 1453 1329 1235 1400
+1183 7
+1159 1160 1095 1231 1265 1234 1303
+1184 6
+1152 1290 1102 1296 1236 1178
+1185 7
+1121 1284 1255 1226 1164 1136 1085
+1186 7
+1125 1286 1259 1199 1137 1238 1174
+1187 5
+1153 1155 1069 1294 1215
+1188 5
+1208 1249 1098 1252 1146
+1189 7
+1123 1287 1192 1298 1266 1080 1244
+1190 5
+1275 1218 1107 1195 1111
+1191 6
+1084 1195 1232 1276 1142 1116
+1192 5
+1080 1266 1203 1189 1150
+1193 6
+1217 1253 1170 1115 1277 1118
+1194 3
+1329 1156 1182
+1195 7
+1190 1191 1116 1111 1305 1275 1276
+1196 1
+1136
+1197 6
+1285 1130 1201 1242 1148 1246
+1198 6
+1227 1144 1242 1148 1245 1119
+1199 5
+1137 1122 1186 1238 1143
+1200 6
+1288 1161 1230 1264 1171 1140
+1201 5
+1152 1130 1236 1197 1246
+1202 6
+1280 1267 1132 1203 1180 1279
+1203 6
+1280 1192 1132 1266 1202 1150
+1204 5
+1144 1257 1147 1237 1227
+1205 8
+1250 1096 1163 1135 1267 1268 1269 1180
+1206 6
+1223 1224 1134 1138 1299 1270
+1207 9
+1379 997 1229 1328 1620 981 1754 795 794
+1208 6
+1249 1188 1159 1098 1231 1309
+1209 7
+1291 1228 1261 1167 1169 1274 1243
+1210 5
+1256 1154 1314 1254 1175
+1211 6
+1216 1317 1319 1104 1105 1214
+1212 5
+1113 1213 1117 1214 1321
+1213 6
+1217 1321 1324 1113 1115 1212
+1214 8
+1317 1320 1321 1105 1106 1211 1212 1117
+1215 8
+3074 1283 1187 1158 1251 1294 1155 1311
+1216 6
+1319 1322 1103 1104 1241 1211
+1217 6
+1193 1324 1325 1277 1115 1213
+1218 7
+1253 1190 1170 1107 1337 1275 1340
+1219 7
+1220 1323 1358 1361 1109 1110 1114
+1220 5
+1112 1323 1114 1219 1030
+1221 7
+1223 1255 1164 1304 1299 1173 1176
+1222 4
+1169 1178 1228 1273
+1223 5
+1221 1299 1173 1134 1206
+1224 5
+1270 1138 1147 1237 1206
+1225 6
+1312 1057 1263 1168 1233 1179
+1226 5
+1136 1185 1284 1240 1289
+1227 7
+1257 1198 1327 1332 1144 1204 1245
+1228 5
+1169 1291 1273 1222 1209
+1229 5
+1312 1057 1379 997 1207
+1230 7
+1250 1288 1161 1162 1163 1293 1200
+1231 6
+1159 1303 1208 1309 1310 1183
+1232 7
+1302 1191 1238 1143 1306 1276 1142
+1233 5
+1168 1225 1316 1301 1263
+1234 6
+1318 1160 1258 1265 1145 1183
+1235 6
+1377 1157 1323 1112 1182 1151
+1236 6
+1152 1184 1296 1201 1308 1246
+1237 6
+1224 1257 1330 1204 1270 1147
+1238 7
+1186 1286 1199 1232 1302 1143 1306
+1239 6
+1120 1345 1075 1044 1365 1366
+1240 6
+1289 1226 1136 1177 1278 1247
+1241 5
+1216 1322 1109 1334 1103
+1242 6
+1285 1197 1198 1148 1245 1342
+1243 4
+1209 1171 1261 1167
+1244 4
+1248 1298 1123 1189
+1245 5
+1242 1227 1342 1198 1327
+1246 6
+1285 1297 1197 1201 1236 1308
+1247 7
+1315 3083 1166 1240 1177 1341 1278
+1248 9
+1123 1124 1292 1298 1331 1272 1252 1244 1181
+1249 5
+1208 1313 1252 1188 1309
+1250 6
+1163 1293 1326 1230 1268 1205
+1251 5
+3074 1283 1260 1158 1215
+1252 8
+1248 1249 1188 1313 1292 1272 1146 1181
+1253 6
+1218 1193 1170 1337 1340 1277
+1254 5
+1154 1314 1380 1210 1350
+1255 7
+1185 1284 1221 1164 1304 1372 1343
+1256 3
+1210 1314 1175
+1257 5
+1332 1330 1227 1204 1237
+1258 7
+1348 1318 1352 1259 1234 1174 1145
+1259 5
+1186 1286 1348 1258 1174
+1260 5
+1283 3074 1251 1158 1271
+1261 6
+1295 1264 1171 1209 1274 1243
+1262 6
+1281 1447 1101 1172 1301 1141
+1263 6
+1312 1316 1381 1225 1233 1367
+1264 7
+1288 1261 1295 1200 1171 1300 1336
+1265 5
+1303 1234 1383 1318 1183
+1266 6
+1280 1189 1287 1192 1203 1335
+1267 8
+1347 1351 1355 1202 1205 1279 1180 1269
+1268 5
+1250 1356 1205 1326 1269
+1269 5
+1356 1267 1268 1205 1351
+1270 6
+1349 1224 1330 1299 1237 1206
+1271 2
+3074 1260
+1272 7
+1248 1313 1252 1292 1357 1331 1369
+1273 6
+1344 1222 1290 1291 1228 1178
+1274 5
+1307 1209 1291 1261 1295
+1275 6
+1218 1190 1195 1359 1305 1340
+1276 6
+1382 1191 1195 1232 1305 1306
+1277 6
+1217 1253 1193 1325 1360 1337
+1278 5
+1240 1289 1315 1333 1247
+1279 4
+1280 1202 1267 1347
+1280 7
+1346 1347 1266 1202 1203 1335 1279
+1281 4
+1282 1101 1262 1447
+1282 5
+1281 1401 1447 1005 1101
+1283 4
+3074 1251 1260 1215
+1284 8
+1408 1185 1255 1289 1226 1376 1372 1374
+1285 7
+1386 1197 1297 1342 1368 1242 1246
+1286 6
+1186 1348 1238 1259 1302 1375
+1287 6
+1189 1298 1266 1363 1335 1373
+1288 5
+1200 1264 1293 1230 1336
+1289 6
+1284 1226 1374 1333 1240 1278
+1290 6
+1184 1344 1296 1273 1178 1338
+1291 7
+1344 1371 1228 1273 1209 1274 1307
+1292 4
+1248 1272 1331 1252
+1293 8
+1250 1288 1385 1387 1326 1384 1230 1336
+1294 4
+1311 1362 1187 1215
+1295 6
+1307 1261 1264 1300 1274 1339
+1296 6
+1184 1290 1236 1338 1308 1354
+1297 5
+1368 1308 1364 1285 1246
+1298 6
+1248 1189 1287 1363 1331 1244
+1299 7
+1412 1221 1349 1223 1270 1304 1206
+1300 5
+1264 1336 1339 1353 1295
+1301 7
+1316 1447 1262 1168 1233 1172 1404
+1302 7
+1286 1390 1391 1232 1238 1306 1375
+1303 5
+1265 1231 1310 1383 1183
+1304 6
+1411 1412 1221 1255 1299 1343
+1305 6
+1445 1382 1195 1359 1275 1276
+1306 7
+1238 1382 1390 1391 1232 1302 1276
+1307 6
+1371 1291 1295 1396 1274 1339
+1308 7
+1354 1389 1296 1297 1364 1236 1246
+1309 8
+1249 1313 1448 1231 1406 1208 1434 1310
+1310 5
+1383 1303 1309 1406 1231
+1311 4
+3074 1362 1294 1215
+1312 9
+1057 1538 1379 1381 1225 1229 1263 1620 1591
+1313 7
+1249 1252 1272 1369 1434 1436 1309
+1314 4
+1256 1210 1350 1254
+1315 6
+1388 1393 1333 1341 1278 1247
+1316 5
+1233 1367 1404 1301 1263
+1317 6
+1319 1320 1421 1424 1211 1214
+1318 6
+1413 1383 1352 1258 1265 1234
+1319 6
+1216 1317 1322 1420 1421 1211
+1320 4
+1424 1321 1317 1214
+1321 8
+1320 1324 1424 1426 1399 1212 1213 1214
+1322 6
+1216 1415 1319 1420 1334 1241
+1323 7
+1377 1219 1220 1358 1423 1235 1112
+1324 5
+1217 1325 1213 1399 1321
+1325 6
+1217 1324 1360 1425 1399 1277
+1326 5
+1384 1356 1250 1268 1293
+1327 5
+1395 1227 1332 1245 1342
+1328 8
+1120 1345 1576 1454 981 1207 1754 1722
+1329 7
+1156 1127 1194 1400 1370 1450 1182
+1330 6
+1349 1414 1257 1332 1237 1270
+1331 7
+1248 1292 1357 1298 1363 1431 1272
+1332 9
+1349 1414 1257 1227 1490 1327 1330 1395 1430
+1333 5
+1289 1315 1388 1374 1278
+1334 7
+1415 1417 1322 1422 1361 1109 1241
+1335 7
+1280 1346 1287 1418 1419 1266 1373
+1336 7
+1392 1288 1353 1387 1293 1264 1300
+1337 7
+1218 1443 1444 1253 1360 1340 1277
+1338 5
+1296 1344 1290 1354 1378
+1339 6
+1353 1295 1300 1398 1396 1307
+1340 7
+1218 1443 1253 1359 1464 1337 1275
+1341 7
+1315 3083 1393 1394 1427 1402 1247
+1342 6
+1285 1386 1327 1395 1242 1245
+1343 4
+1304 1411 1372 1255
+1344 7
+1378 1290 1291 1397 1273 1338 1371
+1345 6
+1120 1452 1454 1328 1366 1239
+1346 4
+1280 1418 1347 1335
+1347 6
+1280 1346 1418 1355 1267 1279
+1348 6
+1286 1352 1258 1259 1432 1375
+1349 6
+1412 1414 1330 1299 1332 1270
+1350 4
+1314 1380 1405 1254
+1351 5
+1356 1355 1267 1428 1269
+1352 6
+1348 1413 1318 1258 1432 1468
+1353 5
+1336 1392 1339 1300 1398
+1354 6
+1378 1389 1296 1338 1403 1308
+1355 7
+1442 1347 1351 1418 1267 1428 1429
+1356 8
+1351 1416 1326 1384 1428 1269 1268 1437
+1357 5
+1272 1457 1331 1369 1431
+1358 8
+1219 1323 1484 1422 1423 1361 1109 1532
+1359 5
+1464 1305 1275 1340 1445
+1360 5
+1425 1337 1444 1277 1325
+1361 5
+1422 1219 1358 1109 1334
+1362 2
+1294 1311
+1363 6
+1287 1298 1331 1431 1433 1373
+1364 6
+1409 1410 1389 1297 1368 1308
+1365 6
+1127 1452 1075 1366 1239 1370
+1366 4
+1345 1452 1365 1239
+1367 5
+1404 1466 1316 1381 1263
+1368 5
+1297 1386 1364 1285 1409
+1369 6
+1313 1357 1457 1272 1465 1436
+1370 14
+1571 1156 1127 1512 1708 1450 1707 1452 1329 1075 1716 1365 1726 1599
+1371 7
+1344 1440 1435 1291 1396 1397 1307
+1372 9
+1408 1473 1411 1284 3077 1255 1441 1463 1343
+1373 6
+1287 1419 1455 1363 1335 1433
+1374 5
+1408 1289 1284 1333 1388
+1375 6
+1348 1286 1390 1302 1432 1471
+1376 1
+1284
+1377 9
+1544 1323 1484 1453 1423 1235 1525 1496 1182
+1378 6
+1344 1354 1397 1338 1403 1439
+1379 4
+1312 1620 1229 1207
+1380 4
+1405 1350 1254 1477
+1381 6
+1312 1538 1515 1263 1367 1466
+1382 7
+1474 1475 1445 1391 1305 1306 1276
+1383 7
+1413 1318 1480 1265 1406 1303 1310
+1384 6
+1416 1385 1356 1293 1326 1438
+1385 6
+1384 1481 1387 1293 1392 1438
+1386 7
+1409 1285 1395 1491 1368 1497 1342
+1387 4
+1336 1385 1293 1392
+1388 6
+1408 1315 1451 1393 1333 1374
+1389 7
+1410 1354 1458 1364 1403 1308 1470
+1390 7
+1475 1476 1391 1302 1306 1471 1375
+1391 7
+1474 1475 1476 1382 1390 1302 1306
+1392 8
+1481 1353 1387 1459 1398 1449 1336 1385
+1393 7
+1315 1482 1451 1388 1427 1402 1341
+1394 4
+1427 1341 1446 1407
+1395 7
+1479 1386 1327 1491 1332 1430 1342
+1396 6
+1440 1371 1456 1398 1307 1339
+1397 7
+1344 1378 1435 1403 1460 1371 1439
+1398 6
+1456 1353 1392 1396 1449 1339
+1399 6
+1321 1324 1325 1425 1426 1467
+1400 5
+1494 1329 1450 1453 1182
+1401 1
+1282
+1402 2
+1393 1341
+1403 6
+1378 1354 1389 1397 1470 1439
+1404 7
+1316 1609 1514 1301 1462 1367 1466
+1405 3
+1380 1477 1350
+1406 5
+1448 1480 1309 1310 1383
+1407 4
+1394 1478 1446 1461
+1408 9
+1473 1483 1284 1451 1388 1372 1469 1374 1535
+1409 7
+1410 1386 1485 1458 1364 1368 1497
+1410 4
+1409 1458 1364 1389
+1411 6
+1412 3077 1304 1498 1372 1343
+1412 8
+1411 1349 1414 1299 1520 1555 1304 1498
+1413 7
+1318 1383 1352 1513 1518 1480 1468
+1414 6
+1412 1349 1330 1520 1490 1332
+1415 6
+1568 1417 1322 1420 1334 1546
+1416 6
+1472 1384 1356 1488 1437 1438
+1417 5
+1568 1532 1422 1334 1415
+1418 8
+1504 1346 1347 1355 1429 1335 1419 1502
+1419 6
+1504 1418 1455 1335 1373 1502
+1420 6
+1415 1322 1319 1548 1421 1546
+1421 7
+1317 1319 1420 1546 1547 1548 1424
+1422 5
+1361 1532 1358 1334 1417
+1423 4
+1377 1323 1484 1358
+1424 8
+1317 1320 1321 1547 1548 1421 1487 1426
+1425 6
+1444 1325 1360 1399 1528 1467
+1426 6
+1506 1321 1487 1424 1399 1467
+1427 7
+1505 1446 1482 1393 1394 1492 1341
+1428 7
+1442 1351 1355 1356 1501 1493 1437
+1429 6
+1442 1511 1418 1355 1493 1502
+1430 5
+1490 1395 1332 1533 1479
+1431 7
+1510 1331 1357 1457 1363 1433 1499
+1432 7
+1348 1352 1516 1527 1375 1468 1471
+1433 5
+1455 1363 1373 1499 1431
+1434 6
+1536 1313 1543 1448 1436 1309
+1435 6
+1440 1486 1460 1397 1371 1503
+1436 5
+1313 1434 1543 1465 1369
+1437 5
+1416 1428 1356 1501 1488
+1438 5
+1384 1385 1416 1472 1481
+1439 6
+1378 1460 1397 1403 1500 1470
+1440 6
+1435 1486 1456 1396 1495 1371
+1441 1
+1372
+1442 4
+1355 1428 1429 1493
+1443 5
+1464 1337 1444 1340 1507
+1444 7
+1443 1360 1425 1554 1507 1528 1337
+1445 7
+1474 1382 1551 1584 1464 1305 1359
+1446 6
+1537 1394 1427 1492 1461 1407
+1447 4
+1281 1282 1301 1262
+1448 6
+1536 1480 1580 1434 1309 1406
+1449 5
+1392 1456 1459 1398 1489
+1450 5
+1400 1329 1370 1512 1494
+1451 6
+1408 1509 1482 1483 1388 1393
+1452 6
+1345 1454 1365 1366 1370 1599
+1453 7
+1377 1572 1624 1496 1494 1400 1182
+1454 5
+1328 1345 1599 1452 1576
+1455 6
+1504 1419 1519 1433 1499 1373
+1456 7
+1440 1508 1449 1489 1396 1398 1495
+1457 5
+1369 1465 1357 1510 1431
+1458 7
+1409 1410 1517 1389 1521 1485 1470
+1459 4
+1392 1449 1489 1481
+1460 6
+1523 1439 1397 1435 1500 1503
+1461 4
+1524 1478 1446 1407
+1462 2
+1514 1404
+1463 1
+1372
+1464 8
+1507 1445 1612 1613 1359 1584 1443 1340
+1465 6
+1510 1543 1457 1369 1436 1567
+1466 7
+1381 1574 1609 1514 1515 1367 1404
+1467 6
+1506 1550 1425 1426 1399 1528
+1468 5
+1352 1432 1527 1413 1513
+1469 1
+1408
+1470 6
+1389 1521 1458 1403 1500 1439
+1471 7
+1476 1516 1390 1432 1563 1566 1375
+1472 6
+1575 1416 1545 1488 1481 1438
+1473 6
+1408 3077 1529 1594 1372 1535
+1474 6
+1475 1445 1382 1551 1552 1391
+1475 7
+1474 1476 1382 1390 1551 1552 1391
+1476 6
+1475 1390 1391 1552 1563 1471
+1477 3
+1595 1380 1405
+1478 3
+1524 1461 1407
+1479 4
+1491 1395 1533 1430
+1480 6
+1413 1383 1448 1580 1518 1406
+1481 9
+1472 1575 1385 1549 1392 1489 1459 1621 1438
+1482 7
+1505 1573 1451 1393 1427 1558 1509
+1483 5
+1408 1535 1451 1541 1509
+1484 6
+1377 1569 1544 1358 1423 1532
+1485 5
+1409 1458 1539 1517 1497
+1486 5
+1440 1503 1435 1526 1495
+1487 5
+1424 1426 1547 1506 1615
+1488 5
+1416 1472 1501 1437 1545
+1489 7
+1508 1449 1549 1456 1553 1459 1481
+1490 6
+1414 1520 1588 1430 1332 1533
+1491 8
+1603 1540 1479 1386 1533 1395 1497 1597
+1492 7
+1537 1573 1446 1505 1427 1587 1619
+1493 7
+1442 1511 1565 1428 1429 1562 1501
+1494 6
+1571 1572 1512 1450 1453 1400
+1495 6
+1440 1508 1486 1456 1557 1526
+1496 4
+1624 1377 1453 1525
+1497 6
+1409 1539 1540 1386 1485 1491
+1498 8
+1411 1412 3077 1522 1555 1531 1564 1534
+1499 7
+1510 1581 1455 1586 1431 1433 1519
+1500 6
+1521 1523 1556 1460 1470 1439
+1501 6
+1545 1488 1428 1493 1562 1437
+1502 6
+1504 1511 1577 1418 1419 1429
+1503 7
+1601 1526 1486 1523 1460 1590 1435
+1504 8
+1579 1577 1418 1419 1582 1455 1519 1502
+1505 4
+1482 1427 1492 1573
+1506 6
+1550 1615 1616 1426 1487 1467
+1507 6
+1443 1444 1612 1613 1554 1464
+1508 5
+1456 1489 1553 1557 1495
+1509 7
+1483 1541 1482 1451 1558 1593 1561
+1510 8
+1585 1581 1457 1586 1431 1465 1499 1567
+1511 6
+1577 1429 1502 1565 1598 1493
+1512 5
+1370 1571 1572 1450 1494
+1513 6
+1413 1607 1518 1527 1625 1468
+1514 4
+1609 1466 1404 1462
+1515 4
+1466 1538 1381 1574
+1516 5
+1432 1527 1589 1566 1471
+1517 6
+1602 1539 1485 1521 1458 1560
+1518 6
+1413 1607 1480 1513 1580 1608
+1519 6
+1504 1579 1581 1582 1455 1499
+1520 5
+1588 1490 1555 1412 1414
+1521 6
+1517 1458 1556 1560 1500 1470
+1522 1
+1498
+1523 5
+1500 1460 1590 1556 1503
+1524 2
+1461 1478
+1525 5
+1496 1377 1624 1544 1631
+1526 6
+1601 1570 1486 1557 1495 1503
+1527 6
+1513 1516 1589 1432 1625 1468
+1528 6
+1444 1618 1550 1425 1554 1467
+1529 6
+1473 3077 1614 1623 1594 3071
+1530 1
+1559
+1531 1
+1498
+1532 8
+1568 1569 1729 1417 1484 1422 1715 1358
+1533 6
+1479 1490 1491 1588 1430 1597
+1534 5
+1498 1564 3077 3078 3071
+1535 6
+1408 1473 1541 1483 1594 1629
+1536 6
+1543 1448 1580 1583 1434 1626
+1537 3
+1587 1492 1446
+1538 5
+1312 1515 1381 1574 1591
+1539 5
+1497 1602 1540 1485 1517
+1540 8
+1602 1539 1676 1666 1603 1491 1497 1658
+1541 6
+1509 1483 1561 1535 1629 1605
+1542 1
+3077
+1543 7
+1536 1583 1656 1465 1434 1436 1567
+1544 5
+1377 1631 1484 1525 1569
+1545 7
+1472 1575 1488 1562 1628 1501 1630
+1546 7
+1632 1633 1415 1548 1420 1421 1568
+1547 7
+1632 1634 1548 1421 1487 1424 1615
+1548 6
+1632 1546 1547 1420 1421 1424
+1549 6
+1617 1481 1610 1553 1489 1621
+1550 6
+1506 1638 1616 1618 1528 1467
+1551 6
+1552 1474 1475 1636 1445 1584
+1552 7
+1474 1475 1636 1637 1551 1476 1563
+1553 6
+1508 1610 1549 1489 1557 1596
+1554 6
+1635 1444 1612 1618 1507 1528
+1555 6
+1412 1611 1520 1588 1498 1564
+1556 6
+1600 1521 1523 1590 1560 1500
+1557 6
+1570 1508 1553 1526 1495 1596
+1558 7
+1509 1606 1482 1561 1593 1578 1573
+1559 3
+1664 1609 1530
+1560 6
+1600 1602 1517 1521 1556 1622
+1561 5
+1605 1593 1509 1558 1541
+1562 6
+1545 1501 1493 1592 1628 1565
+1563 7
+1648 1476 1637 1641 1552 1566 1471
+1564 6
+3078 1640 1611 1555 1498 1534
+1565 6
+1639 1511 1493 1592 1562 1598
+1566 5
+1641 1563 1516 1589 1471
+1567 5
+1656 1465 1585 1510 1543
+1568 7
+1633 1415 1417 1546 1715 1720 1532
+1569 6
+1728 1729 1544 1484 1532 1631
+1570 5
+1601 1642 1596 1557 1526
+1571 6
+1572 1512 1716 1494 1370 1726
+1572 6
+1571 1512 1453 1716 1494 1624
+1573 6
+1505 1606 1482 1619 1492 1558
+1574 6
+1538 1731 1609 1515 1591 1466
+1575 6
+1472 1481 1649 1621 1545 1630
+1576 4
+1328 1722 1454 1599
+1577 6
+1504 1511 1579 1502 1627 1598
+1578 2
+1593 1558
+1579 6
+1504 1577 1644 1582 1519 1627
+1580 8
+1536 1698 1701 1448 1608 1518 1480 1626
+1581 7
+1510 1645 1582 1647 1586 1519 1499
+1582 6
+1504 1579 1644 1581 1647 1519
+1583 5
+1536 1656 1626 1692 1543
+1584 6
+1636 1445 1613 1551 1464 1659
+1585 5
+1656 1586 1687 1510 1567
+1586 7
+1667 1510 1581 1645 1585 1687 1499
+1587 4
+1604 1537 1619 1492
+1588 8
+1611 1646 1597 1520 1490 1555 1657 1533
+1589 6
+1641 1516 1527 1625 1566 1663
+1590 7
+1600 1601 1643 1650 1523 1556 1503
+1591 8
+1312 1538 1731 1765 1574 1834 1745 1620
+1592 5
+1562 1628 1565 1682 1639
+1593 11
+1665 1509 1670 1578 1606 1651 1653 1558 1561 1661 1605
+1594 6
+1473 1668 1623 1529 1629 1535
+1595 1
+1477
+1596 7
+1570 1642 1678 1553 1557 1655 1610
+1597 5
+1603 1491 1588 1533 1646
+1598 6
+1639 1577 1511 1679 1627 1565
+1599 6
+1576 1452 1708 1454 1370 1722
+1600 5
+1560 1643 1556 1622 1590
+1601 6
+1570 1526 1642 1650 1590 1503
+1602 6
+1666 1539 1540 1517 1622 1560
+1603 6
+1540 1646 1491 1685 1658 1597
+1604 1
+1587
+1605 6
+1665 1541 1593 1561 1660 1629
+1606 7
+1651 1573 1619 1653 1558 1593 1669
+1607 6
+1698 1608 1513 1518 1625 1693
+1608 4
+1698 1580 1518 1607
+1609 7
+1664 1731 1574 1514 1559 1466 1404
+1610 5
+1553 1655 1596 1549 1617
+1611 6
+1671 1640 1555 1588 1657 1564
+1612 7
+1507 1613 1713 1554 1635 1719 1464
+1613 7
+1712 1507 1612 1584 1713 1464 1659
+1614 3
+1529 3071 1623
+1615 6
+1506 1547 1634 1487 1616 1724
+1616 6
+1506 1638 1550 1615 1724 1727
+1617 5
+1680 1610 1655 1549 1621
+1618 7
+1635 1638 1550 1554 1718 1528 1721
+1619 7
+1573 1606 1672 3082 1587 1492 1662
+1620 8
+1312 1379 1956 1957 1207 1591 1754 1765
+1621 7
+1617 1481 1575 1549 1680 1681 1649
+1622 7
+1600 1602 1704 1643 1666 1654 1560
+1623 7
+1668 1614 1686 1529 1594 3076 3071
+1624 7
+1572 1453 1716 1717 1496 1631 1525
+1625 6
+1607 1513 1589 1527 1693 1663
+1626 6
+1536 1701 1580 1583 1751 1692
+1627 7
+1675 1577 1579 1644 1679 1699 1598
+1628 7
+1696 1545 1682 1592 1562 1630 1695
+1629 7
+1668 1541 1689 1594 1535 1660 1605
+1630 6
+1696 1575 1545 1677 1649 1628
+1631 6
+1728 1569 1544 1525 1624 1717
+1632 6
+1633 1634 1546 1547 1548 1725
+1633 5
+1568 1632 1546 1720 1725
+1634 5
+1632 1547 1724 1725 1615
+1635 5
+1554 1612 1618 1718 1719
+1636 7
+1584 1637 1710 1551 1552 1714 1659
+1637 6
+1648 1636 1709 1710 1552 1563
+1638 5
+1616 1721 1618 1550 1727
+1639 6
+1706 1679 1682 1592 1565 1598
+1640 6
+3078 1671 1611 1683 1688 1564
+1641 6
+1700 1648 1589 1563 1566 1663
+1642 7
+1601 1570 1734 1678 1650 1723 1596
+1643 7
+1600 1590 1702 1735 1654 1650 1622
+1644 6
+1675 1674 1579 1582 1647 1627
+1645 6
+1667 1674 1581 1647 1586 1694
+1646 6
+1603 1705 1588 1685 1657 1597
+1647 6
+1674 1675 1644 1581 1582 1645
+1648 6
+1700 1637 1641 1709 1711 1563
+1649 5
+1677 1681 1621 1630 1575
+1650 7
+1601 1732 1734 1702 1642 1643 1590
+1651 8
+1665 1669 1670 1606 1653 1593 1690 1661
+1652 1
+3078
+1653 3
+1593 1651 1606
+1654 4
+1704 1643 1622 1735
+1655 6
+1610 1678 1680 1617 1596 1738
+1656 7
+1543 1583 1585 1687 1743 1692 1567
+1657 6
+1697 1671 1705 1611 1646 1588
+1658 4
+1676 1603 1540 1685
+1659 5
+1584 1712 1714 1636 1613
+1660 5
+1689 1665 1703 1605 1629
+1661 3
+1593 1651 1670
+1662 2
+3082 1619
+1663 7
+1730 1700 1641 1741 1589 1625 1693
+1664 4
+1609 1745 1731 1559
+1665 8
+3073 1605 1670 1703 1651 1593 1690 1660
+1666 5
+1704 1676 1602 1540 1622
+1667 5
+1753 1586 1645 1694 1687
+1668 6
+3076 1746 1623 1689 1594 1629
+1669 5
+3075 1690 1651 1684 1606
+1670 4
+1593 1651 1661 1665
+1671 7
+1697 1640 1611 1740 1683 1688 1657
+1672 3
+3082 1619 1691
+1673 6
+3078 1752 1686 1688 1755 3071
+1674 6
+1733 1675 1644 1645 1647 1694
+1675 7
+1699 1733 1737 1674 1644 1647 1627
+1676 8
+1666 1540 1704 1736 1747 1685 1658 1749
+1677 6
+1696 1681 1739 1649 1748 1630
+1678 6
+1642 1750 1655 1723 1596 1738
+1679 5
+1706 1627 1699 1598 1639
+1680 6
+1617 1738 1681 1621 1655 1756
+1681 6
+1739 1677 1680 1649 1621 1756
+1682 7
+1639 1768 1706 1772 1592 1628 1695
+1683 2
+1640 1671
+1684 3
+1744 3075 1669
+1685 7
+1603 1705 1676 1742 1646 1658 1749
+1686 8
+3076 1673 1803 1804 1774 1623 1755 3071
+1687 7
+1667 1743 1585 1586 1656 1753 1775
+1688 6
+3078 1671 1640 1673 1740 1752
+1689 5
+1668 1746 1660 1629 1703
+1690 5
+3075 1665 1651 1669 3073
+1691 2
+1672 3082
+1692 6
+1743 1751 1656 1626 1583 1757
+1693 6
+1698 1607 1730 1625 1758 1663
+1694 7
+1794 1667 1733 1674 1645 1753 1762
+1695 5
+1696 1772 1682 1628 1770
+1696 6
+1770 1677 1748 1628 1630 1695
+1697 5
+1760 1657 1705 1740 1671
+1698 7
+1761 1701 1607 1608 1580 1693 1758
+1699 6
+1764 1737 1706 1675 1679 1627
+1700 6
+1763 1641 1741 1711 1648 1663
+1701 6
+1761 1826 1580 1698 1751 1626
+1702 4
+1650 1643 1732 1735
+1703 7
+3073 1797 1665 1801 1746 1689 1660
+1704 6
+1666 1622 1735 1676 1747 1654
+1705 6
+1760 1697 1742 1685 1657 1646
+1706 6
+1699 1764 1639 1768 1679 1682
+1707 5
+1370 1708 2326 2182 1726
+1708 6
+2182 1707 2002 1722 1370 1599
+1709 7
+1637 1769 1710 1711 1648 1810 1783
+1710 5
+1714 1783 1636 1637 1709
+1711 5
+1648 1769 1763 1700 1709
+1712 7
+1795 1613 1838 1713 1714 1781 1659
+1713 6
+1612 1613 1712 1781 1719 1788
+1714 6
+1636 1838 1712 1710 1783 1659
+1715 8
+1568 1729 1968 1875 1908 1720 1532 1823
+1716 8
+1571 1572 1876 1717 1624 1370 1726 1919
+1717 8
+1728 1860 1925 1716 1624 1876 1918 1631
+1718 6
+1635 1618 1812 1719 1721 1788
+1719 6
+1635 1612 1713 1781 1718 1788
+1720 6
+1568 1633 1821 1715 1725 1823
+1721 6
+1638 1618 1812 1718 1759 1727
+1722 8
+1957 1576 1708 1328 3080 2002 1754 1599
+1723 5
+1750 1777 1642 1734 1678
+1724 8
+1634 1833 1615 1616 1787 1820 1725 1727
+1725 8
+1632 1633 1634 1833 1720 1724 1821 1824
+1726 9
+1571 2311 3081 1707 1716 2165 2326 1370 1919
+1727 6
+1638 1616 1721 1787 1724 1759
+1728 5
+1569 1729 1860 1717 1631
+1729 6
+1728 1569 1860 1715 1875 1532
+1730 5
+1741 1831 1693 1758 1663
+1731 5
+1664 1609 1745 1574 1591
+1732 7
+1702 1734 1735 1650 1782 1785 1786
+1733 5
+1737 1674 1675 1762 1694
+1734 6
+1732 1642 1777 1650 1782 1723
+1735 7
+1732 1702 1704 1643 1747 1654 1785
+1736 1
+1676
+1737 7
+1762 1699 1764 1733 1675 1817 1818
+1738 6
+1776 1678 1680 1750 1655 1756
+1739 6
+1771 1677 1773 1681 1748 1756
+1740 7
+1760 1697 1767 1671 1752 1688 1793
+1741 6
+1730 1763 1700 1829 1831 1663
+1742 7
+1760 1705 1778 1779 1780 1685 1749
+1743 6
+1775 1687 1656 1692 1757 1822
+1744 5
+3072 1800 3075 1684 1799
+1745 5
+1664 1834 1731 1958 1591
+1746 7
+1668 1703 1801 1805 1807 1689 3076
+1747 6
+1735 1704 1676 1749 1784 1785
+1748 5
+1696 1770 1739 1677 1771
+1749 7
+1676 1742 1747 1685 1779 1784 1789
+1750 5
+1776 1777 1738 1723 1678
+1751 7
+1826 1827 1828 1701 1626 1692 1757
+1752 6
+1793 1673 1802 1740 1688 1755
+1753 7
+1794 1667 1687 1775 1840 1815 1694
+1754 5
+1328 1722 1620 1957 1207
+1755 6
+1766 1673 1802 1804 1686 1752
+1756 6
+1776 1738 1739 1773 1680 1681
+1757 5
+1828 1751 1692 1822 1743
+1758 7
+1761 1730 1830 1831 1832 1698 1693
+1759 5
+1721 1890 1787 1812 1727
+1760 9
+1792 1697 1793 1705 1767 1740 1742 1780 1790
+1761 6
+1825 1698 1701 1830 1826 1758
+1762 6
+1794 1733 1737 1816 1817 1694
+1763 5
+1769 1829 1700 1741 1711
+1764 5
+1768 1737 1706 1699 1818
+1765 4
+1956 1834 1620 1591
+1766 1
+1755
+1767 2
+1760 1740
+1768 7
+1764 1706 1772 1682 1847 1849 1818
+1769 7
+1763 1829 1709 1711 1810 1882 1885
+1770 7
+1696 1771 1835 1772 1844 1748 1695
+1771 7
+1770 1739 1773 1844 1748 1850 1819
+1772 7
+1768 1770 1835 1682 1847 1853 1695
+1773 7
+1771 1809 1739 1776 1841 1819 1756
+1774 1
+1686
+1775 8
+1921 1859 1924 1743 1840 1687 1753 1822
+1776 7
+1809 1738 1773 1808 1777 1750 1756
+1777 7
+1808 1782 1734 1776 1750 1723 1791
+1778 2
+1779 1742
+1779 7
+1836 1742 1778 1780 1749 1814 1789
+1780 7
+1760 1857 1742 1779 1845 1814 1790
+1781 7
+1795 1866 1712 1713 1719 1788 1887
+1782 7
+1856 1732 1734 1777 1848 1786 1791
+1783 6
+1888 1810 1838 1709 1714 1710
+1784 5
+1785 1842 1747 1749 1789
+1785 7
+1732 1735 1837 1842 1747 1784 1786
+1786 5
+1856 1785 1732 1837 1782
+1787 7
+1890 1820 1905 1727 1724 1917 1759
+1788 7
+1713 1812 1781 1718 1719 1886 1887
+1789 7
+1836 1842 1843 1749 1779 1784 1851
+1790 5
+1760 1792 1780 1845 1839
+1791 6
+1808 1777 1782 1848 1884 1854
+1792 4
+1760 1793 1790 1839
+1793 8
+1760 1792 1858 1865 1802 1740 1839 1752
+1794 5
+1816 1753 1762 1694 1815
+1795 5
+1712 1866 1883 1781 1838
+1796 3
+3072 1965 1799
+1797 7
+3073 3075 1798 1703 1801 1870 1811
+1798 4
+1800 3075 1797 1870
+1799 5
+1744 3072 1796 1813 1800
+1800 7
+3075 1798 1799 1744 1813 1846 1852
+1801 7
+1797 1862 1703 1807 1746 1811 1878
+1802 6
+1793 1804 1868 1865 1752 1755
+1803 6
+3076 1804 1869 1806 1872 1686
+1804 7
+1865 1802 1803 1868 1869 1686 1755
+1805 7
+3076 1862 1806 1807 1873 1746 1903
+1806 5
+1872 1873 1803 3076 1805
+1807 4
+1801 1746 1805 1862
+1808 5
+1776 1777 1809 1854 1791
+1809 5
+1776 1808 1773 1854 1841
+1810 5
+1888 1769 1882 1709 1783
+1811 7
+1797 1862 1801 1902 1870 1878 1893
+1812 7
+1889 1890 1718 1721 1788 1886 1759
+1813 3
+1800 1846 1799
+1814 6
+1857 1892 1864 1836 1779 1780
+1815 5
+1840 1753 1794 1877 1816
+1816 6
+1762 1794 1874 1877 1815 1817
+1817 6
+1762 1891 1737 1874 1816 1818
+1818 6
+1891 1764 1768 1737 1817 1849
+1819 4
+1841 1850 1771 1773
+1820 5
+1904 1905 1787 1724 1833
+1821 7
+1824 1907 1908 1910 1720 1725 1823
+1822 5
+1859 1775 1828 1757 1743
+1823 5
+1720 1715 1908 1821 1910
+1824 6
+1833 1821 1904 1907 1910 1725
+1825 6
+1761 1826 1830 1863 1896 1898
+1826 6
+1761 1827 1701 1863 1751 1825
+1827 6
+1826 1828 1861 1863 1932 1751
+1828 6
+1859 1861 1827 1751 1757 1822
+1829 7
+1763 1831 1769 1741 1871 1885 1855
+1830 5
+1832 1761 1896 1758 1825
+1831 7
+1730 1829 1895 1832 1741 1758 1855
+1832 5
+1896 1895 1830 1758 1831
+1833 7
+1824 1724 1904 1905 1907 1820 1725
+1834 5
+1745 1956 1765 1958 1591
+1835 5
+1844 1770 1772 1853 1909
+1836 5
+1864 1843 1779 1789 1814
+1837 5
+1856 1785 1786 1842 1879
+1838 7
+1888 1795 1712 1714 1940 1783 1883
+1839 8
+1792 1793 1858 1897 1845 1912 1915 1790
+1840 5
+1921 1753 1815 1877 1775
+1841 6
+1894 1773 1809 1850 1819 1854
+1842 7
+1837 1880 1879 1784 1785 1851 1789
+1843 6
+1864 1836 1911 1881 1851 1789
+1844 6
+1771 1770 1835 1909 1899 1850
+1845 5
+1857 1780 1897 1790 1839
+1846 3
+1800 1852 1813
+1847 5
+1768 1849 1867 1772 1853
+1848 5
+1856 1906 1884 1782 1791
+1849 7
+1891 1768 1867 1942 1847 1818 1946
+1850 7
+1899 1926 1894 1771 1841 1844 1819
+1851 5
+1880 1881 1842 1843 1789
+1852 2
+1800 1846
+1853 6
+1920 1835 1867 1772 1909 1847
+1854 7
+1894 1841 1900 1808 1809 1884 1791
+1855 7
+1963 1829 1895 1831 1964 1966 1871
+1856 7
+1837 1934 1906 1782 1879 1848 1786
+1857 7
+1923 1892 1929 1780 1845 1814 1897
+1858 5
+1912 1793 1913 1865 1839
+1859 5
+1924 1828 1861 1822 1775
+1860 6
+1728 1729 1936 1875 1717 1918
+1861 6
+1859 1828 1932 1935 1827 1924
+1862 7
+1954 1801 1805 1807 1811 1878 1903
+1863 5
+1825 1826 1827 1932 1898
+1864 6
+1892 1836 1901 1843 1814 1911
+1865 7
+1793 1858 1804 1802 1868 1913 1916
+1866 5
+1883 1795 1931 1781 1887
+1867 5
+1920 1849 1946 1853 1847
+1868 6
+1865 1802 1804 1933 1869 1916
+1869 6
+1804 1803 1868 1933 1872 1938
+1870 5
+1902 1811 1797 1798 1927
+1871 4
+1885 1829 1966 1855
+1872 5
+1873 1938 1803 1869 1806
+1873 5
+1872 1938 1805 1806 1903
+1874 6
+1922 1891 1959 1877 1816 1817
+1875 5
+1968 1729 1936 1715 1860
+1876 7
+1925 2056 3081 1716 1717 1918 1919
+1877 7
+1921 1922 1930 1840 1874 1815 1816
+1878 7
+1954 1893 1862 1801 1903 1811 1944
+1879 8
+1856 1837 1934 1937 1842 1880 1914 1906
+1880 5
+1881 1842 1851 1914 1879
+1881 6
+1961 1843 1911 1880 1914 1851
+1882 6
+1888 1984 1769 1810 1939 1885
+1883 6
+1795 1866 1931 1838 1940 1983
+1884 8
+1962 1900 1906 1848 1947 1948 1854 1791
+1885 6
+1984 1829 1769 1966 1871 1882
+1886 6
+1889 1971 1812 1972 1788 1887
+1887 7
+1866 1931 1971 1972 1781 1788 1886
+1888 6
+1838 1810 1939 1940 1783 1882
+1889 6
+1890 1955 2000 1971 1812 1886
+1890 6
+1889 1955 1812 1787 1917 1759
+1891 7
+1959 1874 1942 1849 1817 1818 1950
+1892 5
+1864 1857 1901 1814 1929
+1893 5
+1944 1811 1902 1878 1949
+1894 6
+1926 1900 1841 1941 1850 1854
+1895 6
+1831 1832 1964 1896 1970 1855
+1896 7
+1825 1830 1895 1832 1898 1960 1970
+1897 6
+1857 1923 1839 1845 1915 1951
+1898 6
+1825 1863 1896 1932 1960 1974
+1899 7
+1926 1844 1973 1975 1977 1850 1909
+1900 5
+1948 1884 1894 1854 1941
+1901 5
+1864 1929 1911 1892 1953
+1902 6
+1893 1927 1870 1969 1811 1949
+1903 7
+1954 1862 1995 1805 1873 1938 1878
+1904 8
+1824 1987 1988 1989 1833 1905 1907 1820
+1905 7
+1988 1989 1833 1904 1787 1820 1917
+1906 8
+1856 1934 1967 1879 1848 1947 1884 1982
+1907 7
+1824 1986 1987 1833 1904 1910 1821
+1908 7
+1986 1990 1968 1715 1910 1821 1823
+1909 7
+1920 1835 1899 1844 1973 1943 1853
+1910 8
+1824 1986 1987 1990 1907 1908 1821 1823
+1911 6
+1953 1864 1961 1901 1843 1881
+1912 9
+1858 1915 1913 1999 2006 2007 1945 1839 1951
+1913 5
+1912 1865 1858 1916 1945
+1914 6
+1961 1937 1879 1880 1881 1978
+1915 4
+1912 1897 1951 1839
+1916 7
+1865 1868 1933 1913 2009 1945 2010
+1917 6
+1890 1955 1989 1905 2008 1787
+1918 7
+1860 1925 2086 2056 1936 1876 1717
+1919 4
+1876 1716 1726 3081
+1920 6
+1867 1909 1943 1946 1980 1853
+1921 9
+1985 1924 1930 1775 1840 2026 1877 1976 2013
+1922 5
+1952 1874 1877 1930 1959
+1923 7
+1857 1992 1929 1997 1999 1897 1951
+1924 6
+1921 1859 1861 1935 1976 1775
+1925 3
+1876 1717 1918
+1926 5
+1850 1899 1941 1894 1975
+1927 5
+1928 1969 1902 1870 2063
+1928 2
+2063 1927
+1929 6
+1857 1923 1892 1953 1992 1901
+1930 5
+1952 1921 1922 1877 1985
+1931 6
+1887 2015 1866 1972 1883 1983
+1932 7
+1827 1861 1863 1898 1935 2003 1974
+1933 6
+1868 1869 1938 2005 2009 1916
+1934 6
+1856 1937 1906 1879 1979 1982
+1935 7
+1924 1861 2024 2025 1932 2003 1976
+1936 6
+1860 2086 2057 1968 1875 1918
+1937 6
+2021 1934 1879 1978 1979 1914
+1938 8
+1991 1995 1933 1869 1872 1873 2005 1903
+1939 6
+1888 1984 2016 1998 1940 1882
+1940 6
+1888 1998 1939 1838 1883 1983
+1941 7
+1894 1926 1900 1996 2001 1975 1948
+1942 6
+1891 2028 1849 1946 2011 1950
+1943 6
+1920 2019 2020 1909 1980 1973
+1944 6
+1954 1893 1994 1981 1878 1949
+1945 6
+2007 2009 1912 1913 2010 1916
+1946 6
+1920 1867 1942 1849 2011 1980
+1947 5
+2017 1906 1884 1962 1967
+1948 5
+1900 1962 1884 1941 1996
+1949 7
+2083 1893 1902 1969 2030 1944 1981
+1950 5
+1993 1891 2028 1942 1959
+1951 6
+1923 1897 1997 1999 1912 1915
+1952 6
+1985 1922 1959 1930 2027 2034
+1953 7
+1992 1929 1911 1901 2032 1961 2012
+1954 7
+1862 1994 1995 1903 1878 1944 2047
+1955 6
+1889 1890 2000 2008 1917 2014
+1956 8
+1765 1958 1834 2351 1620 2263 2117 1957
+1957 8
+1956 2279 3080 2351 1620 1722 2255 1754
+1958 4
+1745 1834 1956 2263
+1959 7
+1952 1922 1891 1993 2027 1874 1950
+1960 6
+2018 1896 1898 1970 1974 2041
+1961 8
+1953 2032 2004 1911 1881 1914 2012 1978
+1962 5
+1948 1947 1884 1996 2017
+1963 5
+2023 1964 2029 1966 1855
+1964 5
+2029 1970 1895 1963 1855
+1965 2
+1796 2063
+1966 7
+1984 2023 1963 1871 2035 1885 1855
+1967 5
+2017 1906 1947 1982 2043
+1968 10
+2112 2164 1990 2057 1936 1875 1908 1715 2070 2137
+1969 6
+2049 2083 1927 1902 2063 1949
+1970 7
+2050 1895 1896 1964 2018 2029 1960
+1971 8
+1889 2000 1972 2038 2040 2044 1886 1887
+1972 6
+1931 1971 2038 1887 1886 2015
+1973 6
+2020 1899 1943 1909 2039 1977
+1974 5
+1960 2041 1898 2003 1932
+1975 7
+1926 1899 2031 2001 1941 1977 2042
+1976 5
+2024 1921 2026 1924 1935
+1977 5
+2031 2039 1899 1973 1975
+1978 7
+2021 1961 1937 2004 1914 1979 2046
+1979 7
+2052 2053 1934 1937 1978 1982 2021
+1980 5
+1920 1946 2019 2011 1943
+1981 4
+1944 1994 1949 2030
+1982 6
+1979 2053 1934 1967 1906 2043
+1983 7
+2051 2022 1931 1998 1940 1883 2015
+1984 7
+2016 2023 1966 1939 1882 1885 2035
+1985 5
+1952 1921 1930 2034 2013
+1986 7
+1987 1990 2066 2067 1908 1910 1907
+1987 8
+2067 1986 1988 1904 2066 1907 2069 1910
+1988 7
+1987 1989 1904 1905 2067 2068 2069
+1989 7
+1988 1904 1905 2068 2069 2008 1917
+1990 6
+1986 1910 1968 2066 1908 2070
+1991 3
+1938 1995 2005
+1992 8
+1953 1923 1929 1997 2062 2065 2036 2012
+1993 5
+2027 2028 2037 1950 1959
+1994 6
+1954 2030 2064 1944 1981 2047
+1995 7
+1954 2054 1991 1903 1938 2005 2047
+1996 6
+2017 1962 2001 1941 1948 2045
+1997 7
+1923 2033 1992 1999 2065 2072 1951
+1998 6
+2048 2016 2051 1939 1940 1983
+1999 6
+1923 1997 2033 2006 1912 1951
+2000 7
+1889 1955 2123 1971 2040 2075 2014
+2001 5
+2042 2045 1996 1941 1975
+2002 5
+3080 1722 1708 2182 2262
+2003 7
+2081 2025 1932 1935 1974 2041 2079
+2004 6
+2085 1961 2032 2099 1978 2046
+2005 9
+2054 1991 1995 1933 2061 1938 2071 2087 2009
+2006 7
+2084 2033 2059 1999 2072 2007 1912
+2007 5
+1912 1945 2010 2059 2006
+2008 8
+1955 1989 2088 2068 2075 2076 1917 2014
+2009 7
+2060 1933 2005 2071 1945 2010 1916
+2010 6
+2059 2060 2007 2009 1945 1916
+2011 6
+2019 2090 2028 1942 1946 1980
+2012 6
+1953 1992 1961 2058 2032 2036
+2013 6
+1921 1985 2026 2034 2097 2098
+2014 4
+2000 2008 1955 2075
+2015 7
+2022 1931 1972 2038 2111 2044 1983
+2016 6
+1984 2048 2082 1998 1939 2035
+2017 9
+2043 1962 1996 1967 2104 2105 2106 1947 2045
+2018 5
+1960 2041 1970 2050 2077
+2019 5
+2020 2090 2011 1980 1943
+2020 6
+2019 2090 2091 1943 1973 2039
+2021 5
+1937 1978 1979 2052 2046
+2022 4
+2051 2111 2015 1983
+2023 6
+1984 1963 2029 1966 2035 2078
+2024 5
+1976 2025 2026 2074 1935
+2025 6
+2024 2092 1935 2003 2074 2079
+2026 8
+1921 2114 2118 2024 2097 1976 2074 2013
+2027 8
+1952 1959 1993 2093 2095 2096 2034 2037
+2028 8
+1993 2090 2093 2094 2037 1942 2011 1950
+2029 6
+2050 2023 1963 1964 1970 2078
+2030 6
+2083 2055 1994 1981 2064 1949
+2031 4
+1977 2042 2039 1975
+2032 7
+2080 1953 2085 1961 2058 2004 2012
+2033 4
+2072 1997 2006 1999
+2034 6
+1952 1985 2027 2096 2098 2013
+2035 9
+1984 2016 2082 2147 2150 2023 2187 1966 2078
+2036 4
+1992 2058 2012 2062
+2037 4
+1993 2027 2028 2093
+2038 5
+2040 2044 1971 1972 2015
+2039 7
+2020 2091 2031 2100 1973 1977 2042
+2040 7
+2120 2123 2000 2152 1971 2038 2044
+2041 7
+2081 2018 1960 2089 2003 1974 2077
+2042 7
+1975 2031 2001 2100 2101 2039 2045
+2043 7
+2017 2053 1967 2102 2103 2104 1982
+2044 6
+2120 1971 2038 2040 2015 2111
+2045 6
+2017 1996 2001 2101 2105 2042
+2046 7
+2052 2021 2099 2004 2073 1978 2085
+2047 7
+1954 2148 2054 1994 1995 2125 2064
+2048 7
+2016 2082 2051 2150 2151 1998 2138
+2049 2
+1969 2083
+2050 7
+2018 2029 1970 2136 2140 2077 2078
+2051 6
+2048 2022 1998 2138 1983 2111
+2052 7
+2021 2073 1979 2108 2109 2046 2053
+2053 6
+2043 2052 2103 1979 2109 1982
+2054 6
+2113 1995 2061 2125 2005 2047
+2055 5
+2064 2083 2110 2030 2119
+2056 4
+3081 1876 2086 1918
+2057 5
+1936 1968 2207 2086 2137
+2058 8
+2080 2062 2032 2129 2194 2036 2135 2012
+2059 6
+2084 2060 2130 2006 2007 2010
+2060 6
+2059 2130 2132 2071 2009 2010
+2061 5
+2113 2071 2005 2054 2087
+2062 6
+2115 1992 2058 2065 2036 2135
+2063 4
+1928 1969 1965 1927
+2064 6
+2148 2055 1994 2030 2110 2047
+2065 5
+1992 2072 2115 1997 2062
+2066 7
+1986 1987 1990 2067 2070 2142 2143
+2067 7
+2144 1986 1987 1988 2066 2069 2143
+2068 7
+2145 2146 1988 1989 2088 2069 2008
+2069 7
+2144 2146 1987 1988 1989 2067 2068
+2070 6
+2112 1990 1968 2066 2141 2142
+2071 7
+2087 2060 2061 2130 2132 2005 2009
+2072 7
+2115 2084 2065 1997 2127 2033 2006
+2073 5
+2107 2099 2052 2046 2108
+2074 5
+2024 2025 2026 2092 2114
+2075 7
+2123 2124 2157 2000 2008 2076 2014
+2076 4
+2008 2088 2075 2124
+2077 6
+2018 2121 2050 2089 2136 2041
+2078 6
+2050 2147 2023 2029 2035 2140
+2079 5
+2025 2171 2003 2092 2081
+2080 4
+2032 2129 2058 2085
+2081 6
+2089 2191 2003 2041 2171 2079
+2082 4
+2016 2048 2035 2150
+2083 6
+2049 2119 2055 2030 1969 1949
+2084 7
+2059 2127 2161 2130 2006 2072 2174
+2085 10
+2080 2176 2128 2192 2032 2129 2099 2004 2154 2046
+2086 7
+2056 3081 1936 2057 2168 1918 2207
+2087 5
+2113 2071 2132 2005 2061
+2088 6
+2145 2124 2068 2008 2170 2076
+2089 6
+2081 2121 2190 2191 2041 2077
+2090 8
+2019 2116 2091 2028 2094 2131 2020 2011
+2091 6
+2020 2153 2090 2131 2100 2039
+2092 6
+2114 2025 2155 2074 2171 2079
+2093 6
+2027 2028 2094 2095 2037 2139
+2094 7
+2116 2090 2028 2093 2196 2169 2139
+2095 5
+2096 2139 2162 2027 2093
+2096 5
+2034 2027 2162 2098 2095
+2097 5
+2026 2098 2134 2013 2118
+2098 7
+2162 2096 2097 2034 2134 2172 2013
+2099 6
+2085 2128 2004 2073 2107 2046
+2100 6
+2153 2091 2163 2101 2039 2042
+2101 6
+2122 2163 2100 2105 2042 2045
+2102 5
+2104 2106 2043 2158 2103
+2103 6
+2053 2158 2126 2102 2043 2109
+2104 4
+2017 2106 2043 2102
+2105 5
+2017 2122 2106 2045 2101
+2106 8
+2017 2166 2122 2158 2102 2167 2104 2105
+2107 5
+2128 2073 2099 2108 2133
+2108 6
+2052 2149 2133 2073 2107 2109
+2109 7
+2052 2149 2126 2160 2103 2108 2053
+2110 4
+2064 2119 2148 2055
+2111 8
+2051 2022 2120 2156 2184 2138 2044 2015
+2112 5
+1968 2218 2164 2141 2070
+2113 8
+2054 2087 2220 2125 2061 2195 2132 2159
+2114 5
+2026 2155 2092 2074 2118
+2115 7
+2179 2062 2127 2065 2135 2072 2174
+2116 4
+2169 2090 2131 2094
+2117 3
+2263 1956 2351
+2118 8
+2177 2114 2183 2026 2155 2097 2134 2173
+2119 3
+2083 2110 2055
+2120 6
+2152 2185 2156 2040 2044 2111
+2121 5
+2136 2089 2186 2077 2190
+2122 7
+2180 2163 2101 2167 2105 2106 2229
+2123 6
+2152 2188 2157 2000 2040 2075
+2124 6
+2088 2157 2197 2170 2075 2076
+2125 5
+2113 2195 2148 2054 2047
+2126 5
+2160 2175 2109 2158 2103
+2127 4
+2072 2115 2084 2174
+2128 7
+2085 2154 2189 2193 2099 2133 2107
+2129 7
+2080 2224 2085 2058 2192 2194 2135
+2130 8
+2178 2084 2059 2060 2161 2132 2071 2203
+2131 7
+2212 2214 2153 2090 2091 2169 2116
+2132 8
+2113 2178 2087 2219 2060 2159 2130 2071
+2133 5
+2128 2107 2108 2189 2149
+2134 5
+2097 2098 2172 2173 2118
+2135 7
+2115 2058 2062 2129 2194 2179 2200
+2136 5
+2121 2050 2140 2077 2186
+2137 5
+1968 2057 2251 2164 2207
+2138 5
+2048 2184 2151 2051 2111
+2139 6
+2181 2093 2094 2095 2162 2196
+2140 8
+2208 2050 2147 2186 2206 2136 2205 2078
+2141 6
+2112 2217 2218 2164 2070 2142
+2142 6
+2216 2217 2066 2070 2141 2143
+2143 6
+2144 2215 2216 2066 2067 2142
+2144 6
+2146 2211 2215 2067 2069 2143
+2145 5
+2088 2146 2170 2068 2213
+2146 6
+2144 2145 2211 2213 2068 2069
+2147 7
+2187 2035 2078 2230 2140 2205 2206
+2148 7
+2125 2064 2195 2201 2204 2110 2047
+2149 5
+2189 2160 2108 2109 2133
+2150 6
+2048 2082 2151 2187 2242 2035
+2151 6
+2048 2241 2242 2150 2184 2138
+2152 6
+2120 2185 2123 2188 2223 2040
+2153 5
+2163 2212 2131 2091 2100
+2154 4
+2128 2176 2085 2193
+2155 7
+2177 2114 2118 2247 2248 2092 2171
+2156 4
+2120 2184 2185 2111
+2157 6
+2188 2123 2124 2197 2233 2075
+2158 8
+2102 2103 2126 2166 2199 2106 2198 2175
+2159 6
+2113 2219 2220 2222 2195 2132
+2160 7
+2209 2149 2189 2126 2199 2109 2175
+2161 7
+2178 2243 2084 2130 2227 2203 2174
+2162 7
+2181 2095 2096 2098 2202 2139 2172
+2163 7
+2212 2153 2122 2226 2100 2101 2229
+2164 8
+2112 2218 2251 2141 1968 2301 2137 2333
+2165 6
+2311 3081 2350 2168 2396 1726
+2166 6
+2180 2250 2158 2198 2167 2106
+2167 4
+2106 2180 2122 2166
+2168 5
+3081 2350 2165 2086 2207
+2169 6
+2210 2116 2214 2094 2131 2196
+2170 6
+2145 2213 2088 2124 2228 2197
+2171 7
+2081 2247 2155 2092 2191 2236 2079
+2172 7
+2240 2162 2098 2134 2232 2202 2173
+2173 6
+2118 2183 2134 2232 2172 2239
+2174 6
+2115 2084 2127 2161 2227 2179
+2175 5
+2160 2198 2158 2126 2199
+2176 5
+2192 2193 2154 2085 2225
+2177 7
+2118 2247 2248 2155 2266 2183 2239
+2178 5
+2161 2130 2203 2132 2219
+2179 7
+2115 2135 2227 2231 2200 2237 2174
+2180 7
+2272 2278 2122 2229 2166 2167 2250
+2181 6
+2252 2254 2162 2196 2202 2139
+2182 6
+2262 1707 1708 2353 2002 2326
+2183 4
+2177 2173 2118 2239
+2184 7
+2241 2151 2185 2156 2138 2235 2111
+2185 7
+2257 2120 2156 2223 2152 2184 2235
+2186 6
+2208 2244 2121 2190 2136 2140
+2187 8
+2242 2147 2150 2275 2281 2320 2035 2230
+2188 6
+2245 2152 2123 2157 2223 2233
+2189 9
+2160 2149 2246 2209 2221 2128 2193 2133 2238
+2190 6
+2244 2089 2186 2191 2258 2121
+2191 6
+2081 2089 2190 2258 2171 2236
+2192 5
+2176 2129 2224 2085 2225
+2193 6
+2176 2154 2189 2221 2128 2225
+2194 6
+2256 2058 2224 2129 2135 2200
+2195 6
+2113 2148 2220 2125 2159 2204
+2196 6
+2210 2181 2252 2094 2169 2139
+2197 7
+2124 2157 2228 2297 2233 2170 2300
+2198 7
+2249 2250 2253 2158 2166 2199 2175
+2199 7
+2209 2249 2158 2160 2198 2234 2175
+2200 6
+2179 2256 2194 2135 2237 2271
+2201 2
+2204 2148
+2202 5
+2240 2162 2172 2181 2254
+2203 5
+2161 2130 2243 2219 2178
+2204 4
+2201 2220 2195 2148
+2205 6
+2147 2281 2230 2264 2140 2206
+2206 6
+2208 2147 2276 2264 2140 2205
+2207 7
+2373 2086 2057 2251 2350 2168 2137
+2208 6
+2244 2186 2206 2276 2140 2302
+2209 5
+2160 2234 2189 2238 2199
+2210 7
+2214 2252 2285 2259 2196 2265 2169
+2211 7
+2144 2146 2213 2215 2293 2294 2295
+2212 6
+2131 2214 2153 2226 2163 2265
+2213 7
+2145 2146 2211 2228 2293 2294 2170
+2214 5
+2169 2210 2131 2212 2265
+2215 7
+2144 2211 2216 2294 2295 2296 2143
+2216 7
+2215 2217 2295 2296 2298 2142 2143
+2217 7
+2216 2218 2301 2296 2298 2141 2142
+2218 6
+2112 2217 2301 2164 2298 2141
+2219 8
+2178 2243 2282 2222 2159 2289 2132 2203
+2220 6
+2113 2312 2222 2159 2195 2204
+2221 6
+2273 2246 2225 2189 2193 2260
+2222 5
+2312 2289 2219 2220 2159
+2223 6
+2245 2152 2185 2188 2257 2267
+2224 8
+2256 2225 2192 2129 2194 2261 2268 2270
+2225 7
+2176 2224 2221 2192 2193 2260 2261
+2226 6
+2212 2163 2292 2229 2265 2269
+2227 6
+2179 2161 2290 2243 2231 2174
+2228 6
+2213 2293 2170 2299 2300 2197
+2229 6
+2272 2180 2122 2226 2163 2269
+2230 5
+2264 2281 2147 2187 2205
+2231 6
+2179 2291 2319 2290 2227 2237
+2232 5
+2240 2287 2172 2173 2239
+2233 6
+2245 2188 2157 2197 2297 2300
+2234 10
+2209 2274 2308 2343 2249 2199 2327 2303 2238 2335
+2235 5
+2184 2185 2257 2277 2241
+2236 5
+2258 2171 2309 2247 2191
+2237 5
+2200 2179 2291 2271 2231
+2238 7
+2209 2274 2310 2246 2284 2189 2234
+2239 7
+2304 2177 2183 2287 2232 2266 2173
+2240 6
+2316 2254 2287 2232 2202 2172
+2241 7
+2242 2277 2151 2184 2283 2288 2235
+2242 6
+2241 2275 2150 2151 2187 2283
+2243 6
+2282 2219 2161 2290 2227 2203
+2244 6
+2208 2307 2186 2190 2258 2302
+2245 6
+2188 2223 2354 2233 2297 2267
+2246 6
+2273 2284 2189 2221 2260 2238
+2247 7
+2177 2309 2248 2155 2171 2236 2334
+2248 6
+2177 2339 2247 2155 2266 2334
+2249 5
+2234 2308 2253 2198 2199
+2250 6
+2180 2166 2278 2253 2286 2198
+2251 6
+2436 2373 2164 2137 2333 2207
+2252 7
+2305 2210 2181 2314 2254 2259 2196
+2253 6
+2308 2249 2250 2286 2198 2303
+2254 6
+2240 2305 2181 2316 2252 2202
+2255 5
+3080 2344 1957 2358 2279
+2256 6
+2224 2321 2194 2200 2270 2271
+2257 6
+2267 2277 2280 2185 2223 2235
+2258 7
+2307 2244 2309 2190 2191 2318 2236
+2259 5
+2210 2338 2252 2314 2285
+2260 5
+2225 2273 2221 2246 2261
+2261 7
+2273 2313 2317 2224 2225 2260 2268
+2262 5
+3080 2353 2002 2499 2182
+2263 3
+1956 2117 1958
+2264 7
+2276 2281 2349 2230 2331 2205 2206
+2265 7
+2210 2212 2214 2285 2226 2292 2329
+2266 6
+2304 2177 2339 2248 2383 2239
+2267 6
+2245 2280 2223 2257 2354 2359
+2268 6
+2317 2224 2261 2330 2270 2325
+2269 6
+2272 2336 2322 2226 2292 2229
+2270 5
+2224 2256 2268 2325 2321
+2271 6
+2337 2256 2321 2291 2200 2237
+2272 5
+2322 2180 2229 2278 2269
+2273 7
+2306 2246 2313 2284 2221 2260 2261
+2274 5
+2328 2234 2310 2238 2327
+2275 8
+2242 2187 2372 2347 2381 2320 2283 2366
+2276 6
+2208 2349 2356 2302 2264 2206
+2277 7
+2241 2280 2288 2257 2360 2235 2364
+2278 8
+2272 2180 2345 2250 2286 2322 2346 2303
+2279 4
+2344 2351 1957 2255
+2280 5
+2360 2257 2267 2277 2359
+2281 6
+2187 2320 2230 2264 2331 2205
+2282 6
+2243 2342 2219 2319 2289 2290
+2283 6
+2241 2242 2275 2372 2347 2288
+2284 6
+2273 2306 2246 2310 2315 2238
+2285 5
+2265 2210 2259 2338 2329
+2286 4
+2250 2253 2278 2303
+2287 7
+2240 2304 2316 2323 2324 2232 2239
+2288 6
+2241 2283 2277 2347 2363 2364
+2289 5
+2312 2282 2219 2342 2222
+2290 5
+2319 2282 2227 2243 2231
+2291 7
+2337 2348 2319 2388 2231 2237 2271
+2292 6
+2336 2226 2329 2392 2265 2269
+2293 8
+2370 2371 2213 2211 2228 2294 2299 2367
+2294 8
+2370 2211 2213 2375 2215 2371 2293 2295
+2295 8
+2370 2211 2215 2216 2375 2376 2294 2296
+2296 8
+2375 2216 2217 2215 2376 2295 2298 2377
+2297 7
+2368 2369 2245 2354 2197 2233 2300
+2298 7
+2216 2217 2218 2376 2377 2296 2301
+2299 6
+2368 2371 2228 2293 2300 2367
+2300 8
+2368 2369 2228 2197 2297 2233 2299 2367
+2301 7
+2217 2218 2379 2164 2377 2298 2333
+2302 6
+2208 2307 2276 2374 2356 2244
+2303 8
+2308 2278 2343 2345 2253 2286 2234 2362
+2304 6
+2378 2383 2323 2266 2287 2239
+2305 5
+2316 2314 2252 2254 2332
+2306 6
+2273 2341 2313 2315 2284 2352
+2307 5
+2258 2318 2244 2374 2302
+2308 4
+2249 2234 2253 2303
+2309 6
+2247 2318 2258 2236 2365 2334
+2310 5
+2328 2274 2315 2284 2238
+2311 6
+2514 2165 2326 2396 1726 2463
+2312 5
+2416 2289 2220 2342 2222
+2313 6
+2273 2306 2317 2352 2355 2261
+2314 6
+2305 2338 2252 2259 2390 2332
+2315 6
+2306 2341 2310 2284 2357 2328
+2316 7
+2240 2305 2340 2254 2287 2324 2332
+2317 5
+2313 2330 2355 2268 2261
+2318 5
+2258 2307 2309 2374 2365
+2319 9
+2402 2342 2282 2348 2384 2290 2291 2231 2361
+2320 6
+2275 2281 2187 2381 2387 2331
+2321 5
+2256 2337 2325 2270 2271
+2322 6
+2272 2336 2278 2407 2346 2269
+2323 5
+2304 2378 2324 2389 2287
+2324 5
+2340 2323 2316 2389 2287
+2325 6
+2337 2321 2386 2330 2268 2270
+2326 7
+2499 2182 2311 1707 2353 2514 1726
+2327 5
+2328 2234 2274 2382 2335
+2328 6
+2274 2310 2315 2382 2357 2327
+2329 7
+2338 2403 2285 2292 2422 2392 2265
+2330 6
+2317 2385 2386 2355 2325 2268
+2331 7
+2281 2349 2415 2320 2417 2387 2264
+2332 6
+2305 2340 2314 2316 2390 2391
+2333 9
+2561 2379 2436 2506 2251 2444 2577 2164 2301
+2334 9
+2339 2309 2247 2248 2418 2424 2425 2426 2365
+2335 4
+2234 2343 2382 2327
+2336 6
+2407 2410 2322 2292 2392 2269
+2337 6
+2321 2386 2291 2388 2325 2271
+2338 7
+2434 2403 2314 2285 2259 2390 2329
+2339 6
+2248 2383 2408 2418 2266 2334
+2340 5
+2324 2316 2389 2332 2391
+2341 6
+2401 2306 2315 2352 2357 2398
+2342 6
+2416 2312 2282 2319 2384 2289
+2343 7
+2395 2382 2303 2234 2362 2394 2335
+2344 3
+2279 2358 2255
+2345 7
+2278 2406 2405 2346 2362 2395 2303
+2346 5
+2345 2322 2406 2278 2407
+2347 6
+2275 2372 2283 2380 2288 2363
+2348 5
+2404 2361 2291 2388 2319
+2349 7
+2276 2415 2417 2419 2356 2264 2331
+2350 7
+2373 2411 2481 2165 2168 2396 2207
+2351 4
+2279 1956 1957 2117
+2352 6
+2400 2401 2306 2341 2313 2355
+2353 4
+2326 2499 2262 2182
+2354 7
+2369 2245 2359 2297 2267 2428 2430
+2355 7
+2400 2313 2317 2352 2385 2393 2330
+2356 6
+2276 2374 2349 2419 2420 2302
+2357 6
+2341 2315 2382 2328 2397 2398
+2358 3
+2344 3080 2255
+2359 6
+2280 2354 2360 2267 2430 2431
+2360 6
+2432 2277 2280 2359 2364 2431
+2361 6
+2402 2404 2348 2414 2319 2384
+2362 4
+2345 2395 2343 2303
+2363 8
+2433 2435 2347 2380 2288 2458 2459 2364
+2364 6
+2432 2433 2277 2288 2360 2363
+2365 6
+2309 2374 2318 2421 2424 2334
+2366 4
+2409 2275 2372 2381
+2367 7
+2368 2371 2437 2438 2293 2299 2300
+2368 7
+2369 2437 2443 2297 2299 2300 2367
+2369 6
+2368 2428 2443 2354 2297 2300
+2370 7
+2371 2375 2440 2439 2293 2294 2295
+2371 7
+2370 2438 2439 2293 2294 2299 2367
+2372 7
+2275 2409 2347 2380 2423 2283 2366
+2373 8
+2251 2436 2411 2350 2575 2578 2579 2207
+2374 8
+2307 2318 2356 2421 2424 2420 2365 2302
+2375 7
+2370 2440 2441 2376 2294 2295 2296
+2376 7
+2375 2441 2442 2295 2377 2296 2298
+2377 7
+2376 2442 2379 2444 2296 2298 2301
+2378 6
+2304 2413 2383 2323 2452 2389
+2379 4
+2377 2444 2301 2333
+2380 5
+2363 2423 2347 2372 2435
+2381 7
+2275 2409 2320 2450 2387 2453 2366
+2382 8
+2343 2357 2327 2328 2394 2399 2397 2335
+2383 6
+2304 2339 2408 2378 2413 2266
+2384 5
+2416 2361 2402 2342 2319
+2385 4
+2393 2330 2355 2386
+2386 8
+2337 2385 2388 2325 2393 2330 2460 2461
+2387 5
+2320 2415 2331 2381 2453
+2388 8
+2337 2404 2412 2348 2446 2386 2291 2460
+2389 9
+2340 2378 2413 2447 2448 2323 2324 2391 2452
+2390 6
+2338 2314 2434 2449 2391 2332
+2391 7
+2340 2447 2448 2449 2389 2390 2332
+2392 6
+2336 2410 2292 2422 2455 2329
+2393 6
+2400 2385 2386 2355 2427 2461
+2394 7
+2343 2382 2454 2456 2395 2429 2399
+2395 6
+2405 2343 2345 2456 2362 2394
+2396 5
+2481 2463 2165 2350 2311
+2397 5
+2429 2398 2357 2382 2399
+2398 7
+2401 2341 2445 2429 2357 2457 2397
+2399 4
+2394 2397 2382 2429
+2400 5
+2352 2393 2427 2355 2401
+2401 7
+2400 2341 2445 2352 2451 2427 2398
+2402 7
+2416 2404 2537 2414 2319 2384 2361
+2403 5
+2329 2338 2434 2422 2497
+2404 6
+2402 2412 2348 2414 2388 2361
+2405 5
+2456 2345 2395 2467 2406
+2406 7
+2464 2467 2405 2407 2345 2346 2483
+2407 6
+2336 2464 2406 2410 2322 2346
+2408 4
+2418 2339 2413 2383
+2409 5
+2450 2372 2381 2366 2423
+2410 7
+2336 2464 2471 2472 2407 2455 2392
+2411 4
+2481 2579 2373 2350
+2412 6
+2404 2414 2388 2478 2446 2485
+2413 8
+2408 2378 2383 2480 2418 2452 2389 2490
+2414 7
+2402 2404 2537 2412 2466 2485 2361
+2415 9
+2540 2349 2417 2387 2453 2521 2331 2525 2462
+2416 4
+2312 2384 2402 2342
+2417 6
+2540 2349 2415 2419 2486 2331
+2418 6
+2339 2408 2413 2480 2426 2334
+2419 5
+2420 2417 2356 2349 2486
+2420 6
+2465 2374 2419 2356 2421 2486
+2421 6
+2465 2469 2374 2420 2424 2365
+2422 5
+2392 2329 2403 2455 2497
+2423 7
+2435 2372 2409 2380 2450 2473 2459
+2424 7
+2469 2374 2476 2421 2425 2365 2334
+2425 4
+2424 2426 2476 2334
+2426 6
+2476 2480 2418 2425 2491 2334
+2427 7
+2400 2401 2470 2451 2489 2393 2461
+2428 7
+2369 2503 2443 2508 2354 2430 2494
+2429 7
+2468 2454 2457 2394 2397 2398 2399
+2430 6
+2428 2354 2359 2492 2494 2431
+2431 6
+2432 2359 2360 2492 2430 2495
+2432 6
+2433 2360 2458 2431 2364 2495
+2433 4
+2432 2458 2363 2364
+2434 7
+2528 2497 2338 2403 2449 2390 2487
+2435 4
+2363 2380 2459 2423
+2436 6
+2373 2251 2575 2576 2577 2333
+2437 6
+2368 2438 2503 2504 2443 2367
+2438 7
+2371 2500 2437 2439 2504 2507 2367
+2439 7
+2370 2371 2500 2438 2502 2440 2507
+2440 7
+2370 2507 2501 2502 2439 2441 2375
+2441 7
+2501 2502 2375 2376 2505 2442 2440
+2442 7
+2441 2501 2376 2377 2506 2444 2505
+2443 5
+2368 2369 2428 2437 2503
+2444 6
+2377 2506 2379 2333 2505 2442
+2445 6
+2401 2474 2475 2451 2457 2398
+2446 4
+2412 2460 2388 2478
+2447 7
+2391 2479 2448 2449 2389 2487 2488
+2448 6
+2479 2452 2389 2391 2488 2447
+2449 6
+2434 2391 2447 2390 2487 2488
+2450 7
+2409 2423 2381 2477 2453 2473 2462
+2451 6
+2401 2470 2475 2445 2489 2427
+2452 7
+2378 2413 2479 2448 2484 2389 2490
+2453 5
+2450 2387 2381 2462 2415
+2454 7
+2498 2468 2429 2482 2456 2394 2493
+2455 5
+2392 2497 2410 2472 2422
+2456 6
+2467 2405 2482 2454 2394 2395
+2457 5
+2474 2468 2429 2398 2445
+2458 6
+2432 2433 2459 2522 2363 2495
+2459 6
+2435 2473 2423 2458 2363 2522
+2460 10
+2496 2530 2446 2511 2386 2388 2478 2523 2524 2461
+2461 6
+2470 2511 2386 2393 2427 2460
+2462 6
+2477 2415 2450 2453 2519 2521
+2463 4
+2481 2514 2396 2311
+2464 6
+2406 2471 2410 2407 2512 2483
+2465 5
+2513 2420 2421 2486 2469
+2466 4
+2537 2594 2485 2414
+2467 6
+2405 2406 2509 2482 2483 2456
+2468 7
+2474 2510 2493 2516 2454 2457 2429
+2469 6
+2465 2533 2476 2513 2421 2424
+2470 7
+2531 2515 2511 2451 2489 2427 2461
+2471 5
+2464 2472 2410 2512 2517
+2472 6
+2497 2471 2410 2535 2517 2455
+2473 7
+2423 2477 2450 2519 2553 2522 2459
+2474 5
+2457 2475 2468 2445 2510
+2475 7
+2532 2474 2445 2510 2451 2520 2489
+2476 7
+2469 2518 2424 2425 2426 2491 2533
+2477 4
+2473 2450 2462 2519
+2478 7
+2496 2536 2412 2541 2446 2485 2460
+2479 7
+2544 2542 2447 2448 2484 2488 2452
+2480 7
+2539 2413 2545 2418 2426 2491 2490
+2481 4
+2411 2396 2350 2463
+2482 5
+2456 2498 2467 2509 2454
+2483 6
+2464 2529 2467 2406 2509 2512
+2484 6
+2568 2570 2479 2544 2452 2490
+2485 6
+2466 2412 2594 2478 2541 2414
+2486 8
+2465 2513 2540 2417 2419 2420 2551 2552
+2487 7
+2528 2434 2542 2447 2449 2488 2543
+2488 8
+2544 2542 2479 2448 2449 2543 2487 2447
+2489 6
+2470 2475 2451 2515 2520 2427
+2490 7
+2568 2570 2413 2480 2545 2484 2452
+2491 5
+2480 2426 2539 2476 2518
+2492 7
+2534 2549 2431 2430 2495 2494 2527
+2493 7
+2498 2468 2546 2547 2516 2454 2548
+2494 5
+2492 2527 2428 2430 2508
+2495 7
+2432 2534 2458 2492 2522 2526 2431
+2496 5
+2536 2524 2530 2460 2478
+2497 8
+2528 2434 2403 2535 2472 2580 2422 2455
+2498 5
+2546 2482 2493 2454 2509
+2499 6
+2262 3080 2353 2514 2326 2615
+2500 6
+2563 2564 2438 2439 2504 2507
+2501 7
+2502 2440 2441 2442 2505 2556 2558
+2502 7
+2560 2501 2439 2440 2441 2507 2556
+2503 7
+2437 2504 2538 2443 2508 2428 2565
+2504 6
+2564 2565 2438 2503 2500 2437
+2505 7
+2501 2441 2506 2444 2442 2558 2559
+2506 7
+2561 2562 2505 2442 2444 2333 2559
+2507 7
+2560 2563 2500 2438 2502 2439 2440
+2508 7
+2503 2538 2572 2549 2428 2494 2527
+2509 7
+2529 2498 2467 2546 2482 2483 2550
+2510 6
+2468 2474 2475 2532 2548 2516
+2511 6
+2531 2470 2460 2523 2524 2461
+2512 7
+2464 2529 2567 2569 2471 2483 2517
+2513 5
+2465 2533 2469 2486 2551
+2514 5
+2615 2499 2463 2326 2311
+2515 6
+2531 2470 2520 2489 2554 2555
+2516 4
+2548 2468 2493 2510
+2517 6
+2535 2472 2569 2471 2512 2584
+2518 5
+2583 2491 2476 2533 2539
+2519 7
+2596 2473 2477 2582 2521 2553 2462
+2520 5
+2489 2475 2532 2515 2555
+2521 5
+2525 2519 2582 2462 2415
+2522 8
+2566 2473 2573 2553 2458 2459 2526 2495
+2523 5
+2585 2531 2460 2524 2511
+2524 7
+2496 2530 2511 2585 2586 2523 2460
+2525 6
+2598 2540 2415 2582 2552 2521
+2526 6
+2534 2566 2603 2573 2522 2495
+2527 4
+2508 2492 2549 2494
+2528 7
+2497 2434 2543 2580 2487 2621 2591
+2529 5
+2512 2483 2509 2550 2567
+2530 8
+2496 2536 2460 2602 2574 2586 2524 2557
+2531 6
+2470 2511 2515 2585 2554 2523
+2532 6
+2475 2571 2510 2548 2520 2555
+2533 7
+2469 2551 2476 2513 2518 2583 2588
+2534 6
+2593 2603 2549 2492 2526 2495
+2535 6
+2497 2472 2600 2580 2517 2584
+2536 5
+2496 2530 2541 2478 2557
+2537 3
+2402 2466 2414
+2538 5
+2616 2572 2508 2565 2503
+2539 8
+2592 2480 2545 2611 2613 2518 2583 2491
+2540 5
+2552 2417 2525 2486 2415
+2541 5
+2536 2594 2485 2478 2557
+2542 7
+2601 2543 2544 2487 2488 2479 2591
+2543 5
+2488 2528 2591 2542 2487
+2544 7
+2568 2601 2542 2479 2484 2606 2488
+2545 5
+2480 2592 2490 2539 2570
+2546 7
+2498 2509 2589 2547 2581 2550 2493
+2547 5
+2546 2587 2548 2493 2589
+2548 8
+2532 2571 2510 2547 2516 2587 2493 2590
+2549 7
+2593 2534 2572 2508 2618 2492 2527
+2550 7
+2529 2567 2632 2509 2607 2546 2581
+2551 6
+2533 2605 2513 2486 2552 2588
+2552 8
+2662 2598 2540 2605 2644 2486 2551 2525
+2553 7
+2596 2566 2473 2635 2641 2519 2522
+2554 6
+2531 2599 2604 2515 2585 2555
+2555 7
+2532 2597 2599 2571 2515 2520 2554
+2556 6
+2560 2628 2501 2502 2620 2558
+2557 5
+2536 2530 2622 2541 2574
+2558 6
+2627 2628 2501 2505 2556 2559
+2559 6
+2562 2627 2628 2505 2506 2558
+2560 5
+2563 2620 2507 2556 2502
+2561 5
+2624 2577 2506 2562 2333
+2562 6
+2624 2561 2626 2627 2506 2559
+2563 6
+2560 2564 2507 2637 2500 2620
+2564 6
+2563 2500 2565 2504 2637 2645
+2565 7
+2564 2503 2504 2538 2645 2646 2616
+2566 6
+2629 2635 2573 2553 2522 2526
+2567 6
+2529 2631 2632 2569 2512 2550
+2568 7
+2570 2606 2544 2609 2484 2612 2490
+2569 6
+2631 2567 2638 2512 2517 2584
+2570 7
+2592 2568 2545 2484 2617 2612 2490
+2571 6
+2595 2532 2597 2548 2555 2590
+2572 6
+2538 2508 2549 2616 2649 2618
+2573 5
+2522 2603 2566 2526 2629
+2574 4
+2530 2602 2557 2622
+2575 5
+2576 2578 2436 2373 2685
+2576 6
+2624 2436 2575 2577 2660 2685
+2577 5
+2576 2561 2624 2436 2333
+2578 5
+2741 2579 2373 2685 2575
+2579 3
+2578 2411 2373
+2580 6
+2528 2497 2535 2600 2673 2621
+2581 5
+2608 2546 2589 2550 2607
+2582 7
+2625 2596 2598 2634 2519 2521 2525
+2583 6
+2533 2539 2669 2611 2518 2588
+2584 6
+2535 2600 2569 2638 2517 2614
+2585 7
+2619 2531 2604 2554 2523 2524 2586
+2586 6
+2530 2602 2640 2585 2619 2524
+2587 6
+2630 2547 2548 2589 2590 2623
+2588 6
+2533 2551 2605 2669 2644 2583
+2589 6
+2630 2608 2546 2547 2581 2587
+2590 5
+2587 2595 2571 2548 2623
+2591 7
+2528 2656 2657 2601 2542 2543 2621
+2592 6
+2633 2570 2539 2545 2613 2617
+2593 6
+2534 2603 2672 2642 2549 2618
+2594 3
+2466 2485 2541
+2595 7
+2597 2571 2636 2670 2652 2590 2623
+2596 5
+2553 2625 2641 2582 2519
+2597 6
+2595 2599 2571 2636 2610 2555
+2598 6
+2582 2662 2634 2678 2552 2525
+2599 5
+2610 2554 2555 2604 2597
+2600 6
+2535 2673 2674 2580 2614 2584
+2601 6
+2656 2606 2544 2542 2591 2655
+2602 7
+2530 2663 2574 2640 2650 2586 2622
+2603 7
+2593 2629 2534 2664 2573 2642 2526
+2604 8
+2658 2599 2610 2647 2648 2585 2554 2619
+2605 4
+2552 2644 2588 2551
+2606 6
+2568 2601 2544 2609 2654 2655
+2607 6
+2632 2671 2608 2643 2581 2550
+2608 5
+2643 2607 2581 2630 2589
+2609 5
+2568 2612 2653 2606 2654
+2610 5
+2636 2647 2604 2597 2599
+2611 7
+2661 2539 2700 2669 2707 2613 2583
+2612 6
+2568 2570 2667 2609 2617 2653
+2613 5
+2592 2611 2539 2661 2633
+2614 4
+2584 2600 2674 2638
+2615 2
+2514 2499
+2616 5
+2649 2538 2572 2565 2646
+2617 7
+2592 2633 2570 2667 2612 2665 2666
+2618 5
+2672 2593 2572 2549 2649
+2619 5
+2640 2585 2586 2648 2604
+2620 8
+2560 2688 2563 2628 2637 2639 2556 2687
+2621 6
+2528 2657 2673 2580 2680 2591
+2622 4
+2602 2650 2557 2574
+2623 6
+2587 2595 2630 2651 2652 2590
+2624 6
+2561 2562 2660 2626 2576 2577
+2625 5
+2641 2634 2596 2682 2582
+2626 5
+2624 2689 2562 2627 2660
+2627 6
+2689 2562 2628 2626 2558 2559
+2628 7
+2689 2627 2556 2639 2620 2558 2559
+2629 7
+2603 2566 2664 2635 2573 2712 2686
+2630 6
+2587 2608 2643 2651 2589 2623
+2631 5
+2632 2569 2676 2638 2567
+2632 6
+2631 2567 2671 2676 2550 2607
+2633 5
+2592 2617 2613 2665 2661
+2634 5
+2625 2682 2678 2598 2582
+2635 7
+2659 2629 2566 2697 2641 2553 2686
+2636 6
+2595 2597 2668 2670 2610 2647
+2637 6
+2688 2563 2564 2645 2620 2687
+2638 8
+2631 2569 2763 2674 2676 2614 2711 2584
+2639 4
+2628 2687 2620 2689
+2640 5
+2648 2586 2619 2602 2663
+2641 8
+2625 2659 2596 2696 2635 2553 2682 2683
+2642 5
+2664 2593 2603 2684 2672
+2643 7
+2691 2630 2671 2607 2608 2675 2651
+2644 7
+2662 2605 2669 2710 2552 2588 2718
+2645 6
+2688 2564 2565 2637 2646 2713
+2646 7
+2565 2698 2645 2649 2616 2713 2714
+2647 7
+2658 2636 2699 2668 2610 2677 2604
+2648 8
+2658 2695 2663 2604 2640 2679 2619 2748
+2649 6
+2698 2572 2672 2646 2616 2618
+2650 4
+2681 2602 2622 2663
+2651 6
+2690 2630 2643 2675 2652 2623
+2652 7
+2690 2595 2692 2693 2670 2651 2623
+2653 6
+2667 2701 2702 2609 2612 2654
+2654 5
+2609 2655 2653 2606 2701
+2655 6
+2656 2601 2701 2606 2715 2654
+2656 6
+2657 2601 2706 2591 2715 2655
+2657 5
+2656 2680 2706 2621 2591
+2658 5
+2648 2679 2604 2677 2647
+2659 5
+2641 2683 2635 2717 2697
+2660 7
+2624 2736 2626 2689 2735 2576 2685
+2661 6
+2633 2705 2611 2707 2613 2665
+2662 6
+2598 2644 2678 2552 2718 2719
+2663 6
+2695 2602 2640 2648 2681 2650
+2664 7
+2629 2726 2603 2642 2712 2684 2686
+2665 7
+2661 2633 2666 2703 2704 2705 2617
+2666 5
+2617 2667 2703 2702 2665
+2667 5
+2617 2666 2612 2653 2702
+2668 5
+2699 2636 2670 2694 2647
+2669 8
+2722 2707 2700 2611 2644 2710 2583 2588
+2670 6
+2595 2693 2694 2636 2668 2652
+2671 6
+2691 2632 2607 2643 2708 2676
+2672 7
+2593 2684 2698 2642 2649 2618 2716
+2673 6
+2600 2766 2674 2580 2680 2621
+2674 7
+2600 2763 2764 2638 2673 2766 2614
+2675 4
+2651 2643 2691 2690
+2676 7
+2631 2632 2638 2671 2708 2709 2711
+2677 5
+2679 2721 2658 2699 2647
+2678 7
+2598 2662 2634 2682 2779 2719 2757
+2679 6
+2721 2658 2677 2648 2747 2748
+2680 5
+2725 2657 2706 2621 2673
+2681 4
+2650 2695 2724 2663
+2682 7
+2752 2625 2696 2634 2641 2678 2779
+2683 6
+2752 2659 2759 2696 2641 2717
+2684 6
+2726 2664 2672 2642 2716 2751
+2685 6
+2660 2735 2576 2578 2741 2575
+2686 6
+2720 2629 2664 2697 2635 2712
+2687 7
+2688 2689 2728 2731 2637 2639 2620
+2688 6
+2728 2637 2645 2713 2620 2687
+2689 9
+2626 2627 2660 2731 2639 2736 2774 2628 2687
+2690 7
+2691 2692 2729 2675 2708 2651 2652
+2691 5
+2675 2643 2708 2690 2671
+2692 5
+2732 2690 2652 2693 2729
+2693 6
+2692 2694 2732 2670 2734 2652
+2694 6
+2693 2699 2668 2670 2740 2734
+2695 5
+2648 2681 2748 2724 2663
+2696 4
+2752 2641 2682 2683
+2697 6
+2720 2659 2635 2743 2717 2686
+2698 7
+2754 2646 2672 2742 2649 2714 2716
+2699 7
+2721 2694 2668 2740 2677 2647 2744
+2700 8
+2722 2755 2756 2737 2669 2705 2611 2707
+2701 8
+2730 2733 2702 2739 2715 2653 2654 2655
+2702 6
+2730 2666 2667 2701 2703 2653
+2703 7
+2665 2666 2702 2704 2738 2771 2730
+2704 5
+2705 2665 2738 2727 2703
+2705 7
+2661 2727 2665 2700 2704 2737 2707
+2706 5
+2656 2657 2715 2725 2680
+2707 6
+2661 2705 2700 2669 2737 2611
+2708 10
+2690 2691 2729 2671 2723 2676 2709 2745 2746 2750
+2709 4
+2708 2723 2676 2711
+2710 6
+2722 2758 2760 2669 2644 2718
+2711 6
+2723 2763 2638 2676 2709 2750
+2712 6
+2720 2629 2726 2664 2765 2686
+2713 6
+2688 2753 2728 2645 2646 2714
+2714 5
+2713 2698 2754 2646 2753
+2715 7
+2656 2725 2701 2733 2706 2739 2655
+2716 6
+2698 2672 2742 2684 2762 2751
+2717 5
+2697 2683 2659 2743 2759
+2718 6
+2662 2758 2760 2644 2710 2719
+2719 5
+2758 2718 2678 2662 2757
+2720 6
+2697 2765 2743 2712 2777 2686
+2721 5
+2744 2699 2747 2677 2679
+2722 6
+2755 2760 2700 2669 2767 2710
+2723 4
+2708 2709 2750 2711
+2724 4
+2681 2748 2749 2695
+2725 4
+2680 2706 2715 2733
+2726 5
+2664 2712 2684 2765 2751
+2727 6
+2768 2756 2737 2704 2705 2738
+2728 7
+2688 2753 2731 2773 2775 2713 2687
+2729 6
+2690 2692 2732 2708 2745 2746
+2730 4
+2739 2701 2702 2703
+2731 5
+2728 2689 2773 2774 2687
+2732 6
+2692 2693 2729 2734 2772 2746
+2733 4
+2715 2739 2701 2725
+2734 6
+2693 2694 2732 2770 2740 2772
+2735 4
+2736 2660 2685 2741
+2736 4
+2689 2660 2774 2735
+2737 5
+2705 2707 2700 2756 2727
+2738 5
+2704 2768 2727 2771 2703
+2739 4
+2730 2715 2701 2733
+2740 7
+2694 2761 2699 2734 2744 2770 2776
+2741 3
+2578 2685 2735
+2742 6
+2754 2762 2795 2769 2716 2698
+2743 6
+2720 2784 2759 2697 2777 2717
+2744 5
+2721 2699 2740 2747 2761
+2745 5
+2729 2746 2708 2750 2780
+2746 7
+2729 2732 2799 2708 2745 2772 2780
+2747 8
+2721 2788 2761 2802 2803 2679 2744 2748
+2748 9
+2724 2789 2695 2749 2679 2648 2788 2747 2781
+2749 4
+2748 2778 2724 2781
+2750 5
+2745 2723 2708 2780 2711
+2751 6
+2726 2684 2762 2765 2716 2783
+2752 6
+2779 2759 2696 2682 2683 2782
+2753 6
+2754 2790 2728 2775 2713 2714
+2754 7
+2753 2790 2698 2798 2769 2742 2714
+2755 7
+2816 2722 2756 2794 2700 2767 2801
+2756 7
+2816 2755 2727 2794 2700 2768 2737
+2757 5
+2779 2796 2758 2678 2719
+2758 7
+2787 2757 2760 2796 2710 2718 2719
+2759 7
+2752 2784 2743 2807 2683 2717 2782
+2760 6
+2722 2787 2758 2767 2710 2718
+2761 5
+2744 2776 2747 2740 2803
+2762 6
+2716 2795 2742 2751 2812 2783
+2763 4
+2674 2764 2638 2711
+2764 3
+2674 2763 2766
+2765 7
+2720 2726 2791 2712 2777 2783 2751
+2766 3
+2673 2674 2764
+2767 5
+2760 2816 2722 2755 2787
+2768 6
+2785 2756 2727 2794 2738 2771
+2769 5
+2808 2754 2795 2798 2742
+2770 4
+2776 2810 2740 2734
+2771 6
+2785 2792 2703 2768 2738 2804
+2772 4
+2746 2732 2734 2799
+2773 7
+2819 2820 2728 2793 2731 2774 2775
+2774 5
+2736 2689 2731 2820 2773
+2775 6
+2753 2790 2728 2793 2805 2773
+2776 5
+2761 2770 2803 2740 2810
+2777 6
+2720 2784 2791 2765 2743 2811
+2778 4
+2809 2815 2749 2781
+2779 6
+2752 2757 2796 2678 2682 2782
+2780 4
+2745 2746 2750 2799
+2781 6
+2789 2802 2809 2778 2748 2749
+2782 4
+2752 2779 2807 2759
+2783 8
+2817 2791 2762 2795 2765 2800 2812 2751
+2784 7
+2821 2823 2759 2743 2807 2777 2811
+2785 4
+2768 2794 2771 2804
+2786 3
+2834 2797 2806
+2787 5
+2760 2816 2796 2758 2767
+2788 4
+2802 2747 2748 2789
+2789 5
+2788 2802 2748 2781 2809
+2790 6
+2753 2754 2798 2805 2775 2813
+2791 6
+2818 2765 2800 2777 2811 2783
+2792 3
+2834 2771 2804
+2793 4
+2805 2819 2773 2775
+2794 8
+2816 2785 2755 2756 2828 2768 2801 2804
+2795 6
+2762 2769 2742 2808 2812 2783
+2796 4
+2779 2757 2758 2787
+2797 2
+2786 2806
+2798 5
+2808 2769 2754 2813 2790
+2799 3
+2780 2746 2772
+2800 5
+2817 2818 2791 2830 2783
+2801 4
+2816 2794 2755 2828
+2802 8
+2788 2789 2822 2824 2803 2809 2747 2781
+2803 6
+2822 2761 2802 2776 2810 2747
+2804 6
+2785 2792 2794 2828 2834 2771
+2805 6
+2819 2790 2793 2775 2813 2814
+2806 3
+2786 2834 2797
+2807 4
+2784 2823 2782 2759
+2808 6
+2825 2795 2798 2769 2812 2813
+2809 7
+2789 2824 2826 2802 2778 2781 2815
+2810 4
+2776 2770 2803 2822
+2811 5
+2784 2777 2818 2821 2791
+2812 8
+2817 2827 2825 2762 2795 2833 2808 2783
+2813 7
+2790 2825 2798 2831 2805 2808 2814
+2814 6
+2819 2831 2836 2837 2813 2805
+2815 3
+2809 2778 2826
+2816 6
+2755 2756 2794 2767 2801 2787
+2817 6
+2827 2830 2800 2835 2812 2783
+2818 6
+2832 2821 2791 2830 2800 2811
+2819 8
+2820 2837 2793 2836 2773 2838 2814 2805
+2820 5
+2838 2819 2836 2773 2774
+2821 5
+2784 2832 2818 2811 2823
+2822 5
+2824 2802 2803 2810 2839
+2823 3
+2784 2821 2807
+2824 7
+2822 2826 2829 2802 2839 2840 2809
+2825 5
+2808 2833 2812 2813 2831
+2826 7
+2850 2824 2829 2809 2842 2844 2815
+2827 5
+2817 2835 2812 2841 2833
+2828 3
+2801 2794 2804
+2829 7
+2853 2854 2824 2826 2840 2842 2844
+2830 6
+2817 2818 2855 2800 2835 2832
+2831 8
+2848 2825 2833 2837 2843 2813 2814 2847
+2832 4
+2818 2821 2830 2855
+2833 7
+2825 2827 2831 2841 2843 2812 2845
+2834 4
+2792 2786 2804 2806
+2835 6
+2817 2851 2855 2827 2830 2841
+2836 6
+2849 2819 2820 2837 2838 2814
+2837 7
+2848 2849 2819 2831 2836 2814 2847
+2838 5
+2856 2836 2819 2820 2849
+2839 4
+2824 2840 2846 2822
+2840 6
+2852 2853 2824 2829 2839 2846
+2841 5
+2835 2833 2827 2845 2851
+2842 4
+2826 2850 2844 2829
+2843 7
+2848 2859 2861 2862 2831 2833 2845
+2844 6
+2850 2853 2854 2826 2829 2842
+2845 5
+2851 2833 2843 2859 2841
+2846 3
+2840 2852 2839
+2847 6
+2848 2849 2856 2861 2831 2837
+2848 6
+2856 2861 2831 2837 2843 2847
+2849 5
+2856 2836 2837 2838 2847
+2850 5
+2826 2844 2842 2854 2863
+2851 6
+2855 2859 2860 2835 2841 2845
+2852 5
+2840 2857 2858 2853 2846
+2853 10
+2852 2854 2858 2829 2865 2866 2867 2868 2840 2844
+2854 7
+2850 2853 2829 2863 2866 2868 2844
+2855 5
+2832 2851 2835 2860 2830
+2856 7
+2848 2849 2861 2838 2872 2874 2847
+2857 2
+2858 2852
+2858 5
+2857 2867 2852 2853 2865
+2859 6
+2851 2860 2862 2864 2843 2845
+2860 3
+2859 2851 2855
+2861 7
+2848 2875 2856 2862 2872 2843 2847
+2862 6
+2843 2859 2861 2864 2869 2875
+2863 4
+2850 2868 2870 2854
+2864 3
+2859 2869 2862
+2865 5
+2867 2858 2866 2853 2873
+2866 7
+2853 2854 2865 2868 2873 2877 2879
+2867 5
+2865 2858 2873 2853 2871
+2868 9
+2880 2881 2853 2854 2863 2866 2870 2876 2877
+2869 4
+2864 2875 2878 2862
+2870 3
+2876 2868 2863
+2871 3
+2873 2867 2879
+2872 5
+2856 2875 2882 2874 2861
+2873 6
+2865 2866 2867 2871 2877 2879
+2874 3
+2856 2872 2882
+2875 7
+2882 2883 2861 2862 2869 2872 2878
+2876 5
+2880 2881 2868 2877 2870
+2877 8
+2880 2881 2884 2866 2868 2873 2876 2879
+2878 4
+2883 2875 2869 2885
+2879 6
+2881 2884 2866 2871 2873 2877
+2880 5
+2888 2876 2868 2877 2881
+2881 8
+2880 2884 2886 2888 2868 2876 2877 2879
+2882 4
+2872 2874 2875 2883
+2883 5
+2882 2875 2885 2878 2887
+2884 4
+2881 2877 2886 2879
+2885 3
+2883 2878 2887
+2886 5
+2888 2881 2890 2884 2889
+2887 2
+2883 2885
+2888 4
+2880 2881 2886 2889
+2889 3
+2888 2890 2886
+2890 2
+2889 2886
+2891 1
+50
+2892 1
+2893
+2893 4
+2892 2901 2894 2895
+2894 3
+2897 2893 2901
+2895 3
+2896 2893 2901
+2896 6
+2895 2899 2901 2903 2905 2909
+2897 4
+2902 2894 158 2901
+2898 4
+2904 2906 2899 2900
+2899 4
+2896 2905 2898 2900
+2900 4
+2904 2905 2898 2899
+2901 9
+2893 2894 2895 2896 2897 2902 2903 2907 2908
+2902 6
+199 158 2897 2901 2907 2910
+2903 5
+2896 2911 2908 2901 2909
+2904 3
+2898 2900 2906
+2905 4
+2896 2899 2900 2909
+2906 2
+2904 2898
+2907 7
+2916 3079 2901 2902 2908 2910 2911
+2908 4
+2911 2907 2901 2903
+2909 4
+2896 2905 2911 2903
+2910 5
+2915 2907 2916 2902 199
+2911 5
+3079 2907 2908 2909 2903
+2912 2
+2913 2917
+2913 5
+2912 2921 2914 2922 2917
+2914 3
+2913 2922 2919
+2915 5
+2924 2916 2910 246 199
+2916 5
+2915 2907 2924 2910 3079
+2917 4
+2912 2913 2920 2921
+2918 1
+2932
+2919 4
+2928 2914 2930 2922
+2920 5
+2921 2929 2917 2926 2927
+2921 7
+2913 2917 2920 2922 2926 2927 2930
+2922 7
+2913 2914 2919 2921 2927 2928 2930
+2923 2
+2929 2931
+2924 8
+2915 2916 246 3079 273 2933 2934 315
+2925 5
+376 378 284 2949 2943
+2926 7
+2920 2921 2927 2929 2939 2940 2941
+2927 8
+2920 2921 2922 2926 2930 2940 2941 2942
+2928 5
+2922 2938 2930 2942 2919
+2929 7
+2920 2923 2926 2931 2937 2939 2940
+2930 8
+2919 2921 2922 2927 2928 2938 2941 2942
+2931 4
+2929 2939 2923 2937
+2932 3
+2944 2918 2935
+2933 6
+2945 2946 3079 2924 2934 315
+2934 6
+2946 2947 3079 2924 2933 2936
+2935 5
+2944 2948 2932 2936 3079
+2936 5
+3079 2947 2948 2934 2935
+2937 5
+2929 2939 2931 2950 2951
+2938 4
+2928 2953 2930 2942
+2939 7
+2951 2952 2926 2929 2931 2937 2940
+2940 7
+2952 2954 2926 2927 2929 2939 2941
+2941 7
+2954 2955 2926 2927 2930 2940 2942
+2942 7
+2953 2955 2927 2928 2930 2938 2941
+2943 3
+2968 2925 2949
+2944 4
+2961 2948 2932 2935
+2945 7
+352 2946 2971 2958 2933 2970 315
+2946 6
+2945 2947 2958 2959 2933 2934
+2947 6
+2946 2948 2959 2960 2934 2936
+2948 6
+2944 2947 2960 2961 2935 2936
+2949 5
+2968 378 2973 2925 2943
+2950 4
+2937 2963 2965 2951
+2951 7
+2950 2952 2963 2965 2966 2937 2939
+2952 7
+2951 2954 2964 2965 2966 2939 2940
+2953 4
+2938 2955 2957 2942
+2954 7
+2952 2955 2962 2964 2966 2940 2941
+2955 7
+2953 2954 2957 2962 2964 2941 2942
+2956 2
+2969 2967
+2957 5
+2953 2962 2955 2974 2967
+2958 4
+2945 2946 2970 2959
+2959 6
+2946 2947 2958 2960 2970 2972
+2960 6
+2947 2948 2959 2961 2972 2975
+2961 5
+2944 2960 2976 2948 2975
+2962 6
+2979 2954 2955 2957 2964 2974
+2963 4
+2977 2965 2950 2951
+2964 7
+2978 2979 2952 2954 2955 2962 2966
+2965 8
+2977 2978 2950 2951 2952 2986 2963 2966
+2966 6
+2978 2951 2952 2954 2964 2965
+2967 6
+2985 2956 2957 2989 2969 2974
+2968 6
+2980 2949 2988 2994 2973 2943
+2969 4
+2985 2995 2956 2967
+2970 7
+352 2945 2987 2958 2959 2971 2972
+2971 7
+352 2945 418 2987 429 469 2970
+2972 7
+2992 2987 2991 2960 2970 2959 2975
+2973 5
+2968 378 506 2994 2949
+2974 6
+2979 2989 2957 2962 2996 2967
+2975 6
+2976 2992 2993 2960 2961 2972
+2976 4
+2992 2961 2993 2975
+2977 4
+2986 2963 2965 2990
+2978 7
+2979 2998 2986 2964 2965 2966 2999
+2979 7
+2978 2962 2996 2998 2999 2964 2974
+2980 6
+3011 2984 3020 2988 2968 3002
+2981 3
+3000 2997 2982
+2982 3
+2997 2981 2983
+2983 5
+2984 3001 3003 2997 2982
+2984 4
+3001 3002 2980 2983
+2985 6
+2989 2995 2967 2969 3004 3005
+2986 7
+2977 2978 3014 3016 2990 2965 2998
+2987 7
+3008 2991 469 2970 2971 2972 3007
+2988 4
+2968 2994 3011 2980
+2989 6
+3012 2985 2996 2967 3005 2974
+2990 3
+2977 2986 3014
+2991 5
+3008 3009 2992 2987 2972
+2992 7
+2976 3009 3010 2991 2993 2972 2975
+2993 4
+2976 2992 3010 2975
+2994 6
+3011 3018 2988 2968 506 2973
+2995 3
+2969 3004 2985
+2996 6
+2979 3012 3013 2989 2999 2974
+2997 6
+2981 2982 2983 3000 3003 3006
+2998 6
+2978 2979 3015 3016 2986 2999
+2999 6
+2978 2979 3013 3015 2996 2998
+3000 5
+3024 3006 2981 3022 2997
+3001 6
+2983 2984 3017 3021 3002 3003
+3002 6
+2980 2984 3017 3020 3023 3001
+3003 6
+2983 3021 3025 2997 3001 3006
+3004 4
+2985 2995 3005 3031
+3005 6
+3012 2985 2989 3031 3032 3004
+3006 5
+3000 3024 3003 2997 3025
+3007 6
+3008 514 2987 3026 3027 469
+3008 6
+3009 2987 2991 3027 3029 3007
+3009 7
+3008 3010 3019 2991 2992 3028 3029
+3010 4
+2992 2993 3019 3009
+3011 7
+2980 3020 3018 2988 2994 3037 3038
+3012 6
+3013 2989 2996 3032 3033 3005
+3013 6
+3012 3015 2996 2999 3032 3033
+3014 5
+3016 2986 3036 3034 2990
+3015 6
+3013 3016 2998 2999 3033 3035
+3016 6
+3014 3015 2986 2998 3034 3035
+3017 4
+3001 3002 3021 3023
+3018 5
+3037 506 3011 541 2994
+3019 4
+3009 3010 3028 3030
+3020 6
+3011 2980 3023 3002 3038 3039
+3021 8
+3017 3054 3023 3025 3058 3001 3003 3039
+3022 4
+3000 3040 3024 3055
+3023 5
+3017 3002 3020 3021 3039
+3024 6
+3057 3022 3055 3025 3000 3006
+3025 6
+3021 3024 3057 3058 3003 3006
+3026 6
+514 3043 3042 3027 542 3007
+3027 6
+3008 3042 3046 3026 3029 3007
+3028 7
+3009 3044 3045 3046 3019 3029 3030
+3029 6
+3008 3009 3042 3046 3027 3028
+3030 3
+3044 3019 3028
+3031 4
+3032 3004 3005 3047
+3032 7
+3012 3013 3047 3048 3031 3033 3005
+3033 7
+3012 3013 3015 3048 3049 3032 3035
+3034 7
+3014 3016 3050 3051 3052 3035 3036
+3035 7
+3015 3016 3049 3050 3051 3033 3034
+3036 5
+3041 3034 3051 3052 3014
+3037 6
+3011 3018 3056 600 541 3038
+3038 6
+3011 3020 3053 3056 3037 3039
+3039 6
+3020 3053 3054 3021 3023 3038
+3040 4
+3055 3060 3022 3063
+3041 4
+3052 3066 3036 3065
+3042 7
+3043 3046 3026 3027 3061 3062 3029
+3043 6
+3042 553 588 3026 3061 542
+3044 6
+3045 3059 3028 3030 631 634
+3045 6
+3044 3046 3059 3028 631 633
+3046 8
+3042 3045 650 3027 3028 3029 3062 633
+3047 4
+632 3032 3048 3031
+3048 6
+3047 3049 3032 632 3033 636
+3049 7
+3048 3050 3033 3035 636 3069 639
+3050 6
+3049 3051 3034 3035 3068 3069
+3051 7
+3067 3068 3050 3052 3034 3035 3036
+3052 6
+3041 3051 3034 3067 3036 3066
+3053 7
+645 3054 3056 660 662 3038 3039
+3054 5
+3058 3053 660 3021 3039
+3055 8
+3040 652 3022 655 3024 3057 654 3063
+3056 6
+645 649 3053 600 3037 3038
+3057 7
+3025 652 3055 3024 657 3058 655
+3058 7
+3057 3025 685 3054 3021 657 660
+3059 4
+633 3044 3045 631
+3060 5
+3040 681 3064 637 3063
+3061 7
+3042 3043 618 588 3062 663 664
+3062 5
+650 3061 3042 3046 663
+3063 6
+3040 681 682 654 3055 3060
+3064 3
+3060 637 653
+3065 5
+720 3041 3066 635 692
+3066 6
+3041 683 3052 692 3065 3067
+3067 7
+3051 683 3052 691 3066 3068 666
+3068 6
+3067 3050 3051 666 667 3069
+3069 6
+3049 3050 666 667 3068 639
+3070 4
+818 763 830 710
+3071 8
+3077 3078 1673 1614 1686 1623 1529 1534
+3072 3
+1744 1796 1799
+3073 5
+1665 1690 3075 1797 1703
+3074 9
+1251 1158 3083 1260 1166 1283 1271 1311 1215
+3075 8
+3073 1669 1798 1800 1744 1684 1690 1797
+3076 7
+1668 1803 1805 1806 1746 1686 1623
+3077 8
+1473 1411 1542 1529 1498 1372 1534 3071
+3078 7
+1640 1673 1652 1688 1564 1534 3071
+3079 8
+2916 2924 2933 2934 2935 2936 2907 2911
+3080 7
+2262 2499 1957 2255 2002 2358 1722
+3081 7
+2086 2056 1876 2165 2168 1726 1919
+3082 4
+1672 1619 1691 1662
+3083 4
+3074 1341 1166 1247
+3084 10
+161 132 118 232 211 308 182 279 317 190
diff --git a/pysal/examples/natregimes.dbf b/pysal/examples/natregimes.dbf
new file mode 100644
index 0000000..3456e2e
Binary files /dev/null and b/pysal/examples/natregimes.dbf differ
diff --git a/pysal/examples/natregimes.shp b/pysal/examples/natregimes.shp
new file mode 100644
index 0000000..4bf58a6
Binary files /dev/null and b/pysal/examples/natregimes.shp differ
diff --git a/pysal/examples/natregimes.shx b/pysal/examples/natregimes.shx
new file mode 100644
index 0000000..c46b05a
Binary files /dev/null and b/pysal/examples/natregimes.shx differ
diff --git a/pysal/examples/nonplanarsegments.dbf b/pysal/examples/nonplanarsegments.dbf
new file mode 100644
index 0000000..b547c5f
Binary files /dev/null and b/pysal/examples/nonplanarsegments.dbf differ
diff --git a/pysal/examples/nonplanarsegments.prj b/pysal/examples/nonplanarsegments.prj
new file mode 100644
index 0000000..a30c00a
--- /dev/null
+++ b/pysal/examples/nonplanarsegments.prj
@@ -0,0 +1 @@
+GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]
\ No newline at end of file
diff --git a/pysal/examples/nonplanarsegments.qpj b/pysal/examples/nonplanarsegments.qpj
new file mode 100644
index 0000000..5fbc831
--- /dev/null
+++ b/pysal/examples/nonplanarsegments.qpj
@@ -0,0 +1 @@
+GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]
diff --git a/pysal/examples/nonplanarsegments.shp b/pysal/examples/nonplanarsegments.shp
new file mode 100644
index 0000000..bd8d4b3
Binary files /dev/null and b/pysal/examples/nonplanarsegments.shp differ
diff --git a/pysal/examples/nonplanarsegments.shx b/pysal/examples/nonplanarsegments.shx
new file mode 100644
index 0000000..4ab10fc
Binary files /dev/null and b/pysal/examples/nonplanarsegments.shx differ
diff --git a/pysal/examples/ohio.swm b/pysal/examples/ohio.swm
new file mode 100644
index 0000000..f258e5d
Binary files /dev/null and b/pysal/examples/ohio.swm differ
diff --git a/pysal/examples/rook31.dbf b/pysal/examples/rook31.dbf
new file mode 100644
index 0000000..fddc13c
Binary files /dev/null and b/pysal/examples/rook31.dbf differ
diff --git a/pysal/examples/rook31.gal b/pysal/examples/rook31.gal
new file mode 100644
index 0000000..c9d91b8
--- /dev/null
+++ b/pysal/examples/rook31.gal
@@ -0,0 +1,7 @@
+0 3 rook31 POLY_ID
+1 1
+2
+2 2
+3 1
+3 1
+2
diff --git a/pysal/examples/rook31.shp b/pysal/examples/rook31.shp
new file mode 100644
index 0000000..3f2ec3f
Binary files /dev/null and b/pysal/examples/rook31.shp differ
diff --git a/pysal/examples/rook31.shx b/pysal/examples/rook31.shx
new file mode 100644
index 0000000..ba074d4
Binary files /dev/null and b/pysal/examples/rook31.shx differ
diff --git a/pysal/examples/sacramentot2.dbf b/pysal/examples/sacramentot2.dbf
new file mode 100644
index 0000000..104dad8
Binary files /dev/null and b/pysal/examples/sacramentot2.dbf differ
diff --git a/pysal/examples/sacramentot2.gal b/pysal/examples/sacramentot2.gal
new file mode 100644
index 0000000..c2137d2
--- /dev/null
+++ b/pysal/examples/sacramentot2.gal
@@ -0,0 +1,807 @@
+0 403 sacramentot2 POLYID
+1 8
+10 9 8 7 5 4 3 2
+2 4
+6 4 3 1
+3 3
+6 2 1
+4 4
+7 6 1 2
+5 10
+29 28 27 18 12 10 16 6 9 1
+6 8
+9 8 7 11 3 2 5 4
+7 4
+8 4 6 1
+8 4
+9 6 7 1
+9 4
+6 1 8 5
+10 3
+12 5 1
+11 8
+44 34 30 25 20 16 26 6
+12 4
+29 14 5 10
+13 6
+40 36 19 33 39 15
+14 2
+17 12
+15 5
+33 23 17 29 13
+16 7
+57 52 51 18 44 11 5
+17 3
+29 14 15
+18 5
+54 45 27 16 5
+19 11
+81 78 75 69 67 65 37 36 53 40 13
+20 6
+25 24 21 22 26 11
+21 2
+22 20
+22 3
+26 21 20
+23 5
+38 33 28 29 15
+24 3
+25 20 26
+25 4
+34 20 24 11
+26 7
+44 34 30 22 24 20 11
+27 10
+94 61 59 43 35 33 28 45 18 5
+28 6
+38 35 5 27 29 23
+29 6
+5 12 28 23 17 15
+30 3
+34 26 11
+31 3
+70 189 32
+32 3
+56 70 31
+33 9
+43 42 39 38 27 35 23 15 13
+34 4
+26 30 11 25
+35 4
+38 27 28 33
+36 4
+37 40 13 19
+37 2
+36 19
+38 4
+35 23 33 28
+39 6
+47 42 41 40 33 13
+40 7
+53 46 41 39 13 36 19
+41 5
+48 47 46 39 40
+42 6
+64 62 50 43 33 39
+43 7
+100 94 73 64 27 33 42
+44 6
+85 63 51 26 11 16
+45 5
+59 54 121 18 27
+46 5
+49 41 48 40 53
+47 6
+55 53 50 48 39 41
+48 5
+53 49 41 47 46
+49 6
+65 60 58 53 46 48
+50 4
+62 55 42 47
+51 5
+66 63 52 44 16
+52 6
+90 76 57 66 51 16
+53 10
+65 62 58 55 47 48 40 49 46 19
+54 5
+121 76 57 18 45
+55 3
+47 50 53
+56 8
+249 248 233 219 70 180 74 32
+57 4
+76 52 16 54
+58 7
+78 77 68 60 62 49 53
+59 8
+118 117 98 97 61 121 45 27
+60 5
+78 69 65 49 58
+61 5
+101 97 94 59 27
+62 9
+72 71 68 42 64 73 50 53 58
+63 5
+90 66 44 85 51
+64 4
+73 43 42 62
+65 6
+69 67 49 53 60 19
+66 4
+90 63 51 52
+67 4
+69 65 78 19
+68 7
+88 86 77 72 80 62 58
+69 5
+60 65 67 78 19
+70 14
+265 189 185 175 174 162 159 155 141 132 130 56 32 31
+71 6
+92 80 72 73 79 62
+72 4
+62 71 80 68
+73 6
+100 79 43 64 71 62
+74 6
+183 180 115 163 75 56
+75 6
+119 115 99 81 19 74
+76 5
+93 52 90 57 54
+77 6
+87 86 84 78 68 58
+78 9
+84 83 82 77 58 60 19 69 67
+79 3
+95 73 71
+80 5
+102 89 71 72 68
+81 6
+108 99 91 82 19 75
+82 4
+91 83 78 81
+83 5
+91 78 82 84 109
+84 5
+109 87 77 78 83
+85 5
+143 123 90 44 63
+86 5
+106 88 87 68 77
+87 5
+107 106 77 86 84
+88 6
+105 104 103 102 68 86
+89 5
+103 102 96 92 80
+90 7
+123 93 63 85 66 52 76
+91 4
+108 82 83 81
+92 7
+148 113 96 95 129 71 89
+93 6
+143 122 121 90 76 123
+94 9
+140 124 110 100 27 61 101 43 139
+95 5
+129 116 100 79 92
+96 5
+114 113 111 92 89
+97 5
+101 118 179 59 61
+98 4
+117 121 122 59
+99 5
+120 119 81 108 75
+100 6
+140 116 43 94 73 95
+101 5
+139 97 179 61 94
+102 4
+103 80 89 88
+103 5
+111 104 89 102 88
+104 6
+135 128 126 105 103 88
+105 5
+128 107 106 88 104
+106 4
+107 86 87 105
+107 7
+137 128 112 109 87 105 106
+108 7
+136 127 120 91 109 81 99
+109 6
+125 112 107 84 108 83
+110 3
+140 124 94
+111 5
+133 126 114 96 103
+112 4
+137 134 107 109
+113 6
+148 147 131 114 92 96
+114 6
+149 133 131 96 111 113
+115 5
+75 119 120 163 74
+116 6
+150 145 140 129 100 95
+117 4
+118 98 122 59
+118 5
+117 122 179 59 97
+119 4
+120 75 99 115
+120 6
+163 108 136 99 119 115
+121 6
+122 54 93 98 59 45
+122 7
+179 143 121 93 118 117 98
+123 4
+143 90 85 93
+124 5
+160 140 94 110 139
+125 5
+144 138 134 127 109
+126 6
+153 152 135 133 111 104
+127 4
+138 136 125 108
+128 7
+161 146 142 135 104 105 107
+129 4
+150 95 116 92
+130 3
+141 132 70
+131 3
+113 114 147
+132 6
+158 157 156 141 130 70
+133 5
+152 151 111 114 126
+134 4
+154 112 125 137
+135 5
+146 104 126 128 153
+136 8
+186 176 163 144 138 108 127 120
+137 6
+166 154 142 107 112 134
+138 4
+144 125 127 136
+139 5
+164 160 101 124 94
+140 8
+165 145 160 94 100 110 124 116
+141 4
+155 132 130 70
+142 5
+177 166 161 128 137
+143 5
+179 85 123 93 122
+144 6
+176 166 154 136 125 138
+145 5
+168 165 150 140 116
+146 5
+167 161 128 135 153
+147 5
+170 149 148 113 131
+148 5
+169 150 147 92 113
+149 4
+151 147 170 114
+150 6
+169 168 145 116 129 148
+151 4
+171 152 149 133
+152 5
+171 153 151 133 126
+153 6
+172 167 152 126 146 135
+154 4
+166 134 137 144
+155 4
+162 156 70 141
+156 5
+175 162 157 155 132
+157 4
+175 158 156 132
+158 4
+175 157 159 132
+159 2
+70 158
+160 5
+165 139 164 124 140
+161 6
+178 177 167 142 128 146
+162 5
+175 174 156 155 70
+163 7
+186 182 181 136 120 115 74
+164 6
+193 165 179 139 204 160
+165 6
+193 168 140 160 164 145
+166 6
+176 142 144 177 137 154
+167 5
+178 173 153 161 146
+168 5
+193 169 145 150 165
+169 6
+198 187 170 148 150 168
+170 5
+187 171 169 147 149
+171 5
+188 172 151 152 170
+172 5
+190 188 173 171 153
+173 5
+191 178 167 172 190
+174 3
+175 162 70
+175 6
+156 157 158 162 174 70
+176 10
+224 206 203 199 186 144 166 177 184 136
+177 6
+184 178 161 142 176 166
+178 8
+197 192 191 184 161 167 173 177
+179 7
+204 143 122 164 118 101 97
+180 7
+219 215 212 196 183 74 56
+181 5
+202 194 186 182 163
+182 4
+194 183 163 181
+183 8
+214 207 196 194 225 180 182 74
+184 5
+195 192 177 178 176
+185 12
+312 304 298 293 292 289 285 279 244 189 265 70
+186 5
+203 136 176 163 181
+187 4
+198 188 169 170
+188 7
+205 201 200 190 171 187 172
+189 3
+185 70 31
+190 5
+200 191 188 172 173
+191 5
+197 173 190 200 178
+192 6
+210 208 195 178 184 197
+193 9
+271 232 220 201 198 204 164 165 168
+194 5
+214 181 182 183 202
+195 4
+208 199 184 192
+196 6
+212 211 209 207 183 180
+197 7
+216 213 210 191 200 178 192
+198 4
+201 169 187 193
+199 6
+222 218 208 206 195 176
+200 6
+213 205 188 190 197 191
+201 5
+221 205 188 193 198
+202 4
+214 203 181 194
+203 6
+225 224 214 176 186 202
+204 8
+371 353 322 288 271 179 193 164
+205 7
+234 231 221 213 188 201 200
+206 6
+226 224 223 176 199 222
+207 8
+236 217 215 212 209 183 225 196
+208 5
+218 210 192 195 199
+209 4
+217 211 196 207
+210 6
+218 192 197 208 216 229
+211 4
+212 196 209 217
+212 6
+215 207 217 180 196 211
+213 7
+276 234 230 216 205 200 197
+214 5
+183 194 202 203 225
+215 6
+233 219 207 212 236 180
+216 4
+230 213 197 210
+217 4
+207 209 212 211
+218 6
+228 227 222 208 210 199
+219 3
+215 180 56
+220 4
+232 231 221 193
+221 4
+231 220 205 201
+222 5
+227 226 218 199 206
+223 6
+237 235 226 225 224 206
+224 4
+176 206 203 223
+225 7
+236 235 223 203 214 207 183
+226 5
+237 227 223 206 222
+227 5
+238 228 218 222 226
+228 5
+240 238 229 227 218
+229 4
+241 230 228 210
+230 5
+276 242 213 229 216
+231 5
+239 234 220 221 205
+232 5
+255 239 193 220 271
+233 5
+253 251 215 236 56
+234 7
+276 252 243 205 213 231 239
+235 8
+273 261 250 246 237 236 223 225
+236 11
+256 254 251 247 245 225 235 250 207 233 215
+237 5
+246 226 238 223 235
+238 5
+258 240 228 227 237
+239 5
+255 252 231 232 234
+240 5
+264 258 241 228 238
+241 4
+264 229 240 242
+242 6
+276 274 264 263 230 241
+243 3
+276 252 234
+244 3
+294 279 185
+245 3
+257 247 236
+246 4
+261 237 258 235
+247 4
+259 254 236 245
+248 3
+253 249 56
+249 5
+291 265 253 248 56
+250 5
+280 260 256 235 236
+251 4
+262 233 236 257
+252 9
+276 272 263 255 234 239 243 271 288
+253 7
+291 281 268 262 233 248 249
+254 4
+266 256 247 236
+255 4
+232 239 252 271
+256 5
+270 250 254 260 236
+257 4
+267 259 245 251
+258 5
+264 261 240 238 246
+259 5
+269 267 266 247 257
+260 4
+277 270 250 256
+261 4
+246 258 264 235
+262 4
+268 267 251 253
+263 5
+284 276 274 252 242
+264 8
+283 274 273 240 241 242 258 261
+265 7
+308 304 362 291 249 70 185
+266 4
+275 270 254 259
+267 5
+268 257 259 262 269
+268 5
+281 253 262 267 278
+269 4
+278 275 259 267
+270 7
+287 275 256 260 266 277 290
+271 6
+288 193 204 255 252 232
+272 3
+284 252 288
+273 3
+280 264 235
+274 6
+305 286 283 242 263 264
+275 4
+282 266 270 269
+276 7
+213 234 243 252 230 242 263
+277 4
+290 280 260 270
+278 5
+307 282 281 269 268
+279 4
+296 295 244 185
+280 5
+273 283 300 250 277
+281 7
+314 291 302 253 268 278 307
+282 5
+301 299 287 275 278
+283 5
+300 286 274 264 280
+284 6
+309 305 303 288 272 263
+285 3
+304 289 185
+286 4
+300 297 274 283
+287 5
+311 301 290 270 282
+288 7
+322 309 204 271 284 272 252
+289 4
+304 298 285 185
+290 5
+313 300 277 287 270
+291 11
+362 355 342 338 337 332 314 281 249 253 265
+292 3
+293 312 185
+293 4
+312 294 185 292
+294 4
+312 295 293 244
+295 4
+312 296 294 279
+296 4
+312 295 298 279
+297 3
+305 300 286
+298 5
+312 304 289 185 296
+299 4
+307 306 301 282
+300 12
+326 323 317 313 310 305 322 286 297 283 290 280
+301 7
+316 315 311 306 287 282 299
+302 6
+321 318 314 307 306 281
+303 4
+310 309 305 284
+304 6
+308 265 285 289 298 185
+305 6
+310 284 303 300 274 297
+306 5
+316 307 301 302 299
+307 5
+299 306 278 302 281
+308 2
+265 304
+309 5
+310 288 284 303 322
+310 5
+303 305 309 322 300
+311 6
+319 315 287 301 313 320
+312 7
+294 295 296 298 293 185 292
+313 4
+317 300 290 311
+314 6
+333 332 321 281 302 291
+315 6
+331 330 319 316 301 311
+316 6
+331 325 318 301 306 315
+317 5
+324 323 320 300 313
+318 4
+325 321 316 302
+319 4
+330 311 315 320
+320 6
+330 329 324 317 319 311
+321 6
+334 302 314 318 325 336
+322 9
+360 344 326 204 353 288 310 309 300
+323 4
+327 324 300 317
+324 4
+328 323 317 320
+325 6
+336 335 331 316 318 321
+326 7
+349 344 340 339 327 322 300
+327 4
+339 328 323 326
+328 4
+339 329 327 324
+329 4
+346 330 328 320
+330 6
+331 315 319 320 329 346
+331 8
+348 335 315 316 330 345 346 325
+332 6
+356 341 337 333 314 291
+333 8
+356 355 334 352 361 314 332 336
+334 3
+321 333 336
+335 5
+351 350 336 331 325
+336 7
+352 351 335 325 334 333 321
+337 4
+341 338 332 291
+338 4
+342 341 291 337
+339 5
+346 340 326 327 328
+340 4
+347 346 326 339
+341 7
+356 355 354 342 332 337 338
+342 4
+355 291 341 338
+343 7
+368 357 347 346 345 366 367
+344 4
+359 349 322 326
+345 5
+357 348 346 343 331
+346 7
+339 340 329 343 345 331 330
+347 3
+349 343 340
+348 5
+358 350 345 357 331
+349 4
+367 344 326 347
+350 5
+361 358 351 348 335
+351 5
+361 352 350 335 336
+352 4
+361 351 336 333
+353 11
+397 395 393 392 387 378 374 360 371 204 322
+354 3
+356 355 341
+355 9
+362 356 333 354 361 370 291 341 342
+356 5
+333 354 341 355 332
+357 6
+369 368 358 343 345 348
+358 5
+363 361 348 350 357
+359 4
+367 364 360 344
+360 6
+374 373 364 322 353 359
+361 8
+370 363 350 358 351 352 355 333
+362 5
+398 370 291 355 265
+363 5
+385 370 358 369 361
+364 5
+377 373 365 359 360
+365 5
+377 372 367 366 364
+366 5
+372 368 367 365 343
+367 5
+359 349 365 366 343
+368 6
+376 372 369 366 343 357
+369 4
+375 368 357 363
+370 8
+398 385 384 393 361 363 362 355
+371 4
+396 392 204 353
+372 5
+376 365 366 368 377
+373 4
+378 360 364 374
+374 3
+353 360 373
+375 4
+385 379 376 369
+376 7
+381 380 379 368 372 377 375
+377 6
+382 378 364 365 376 372
+378 5
+387 382 353 373 377
+379 4
+385 380 376 375
+380 5
+385 383 381 376 379
+381 6
+389 383 382 386 380 376
+382 5
+388 386 378 377 381
+383 5
+391 390 385 381 380
+384 3
+385 393 370
+385 9
+380 383 384 391 393 370 375 379 363
+386 5
+389 388 382 393 381
+387 4
+395 388 353 378
+388 5
+394 387 382 386 393
+389 4
+390 381 386 393
+390 4
+391 389 383 393
+391 4
+390 383 393 385
+392 4
+399 397 371 353
+393 12
+397 395 394 353 391 390 389 388 386 385 384 370
+394 3
+395 388 393
+395 4
+353 387 394 393
+396 2
+399 371
+397 6
+401 400 402 393 392 353
+398 3
+403 370 362
+399 4
+402 400 396 392
+400 4
+402 401 399 397
+401 3
+400 402 397
+402 4
+399 400 401 397
+403 1
+398
diff --git a/pysal/examples/sacramentot2.sbn b/pysal/examples/sacramentot2.sbn
new file mode 100644
index 0000000..7222396
Binary files /dev/null and b/pysal/examples/sacramentot2.sbn differ
diff --git a/pysal/examples/sacramentot2.sbx b/pysal/examples/sacramentot2.sbx
new file mode 100644
index 0000000..b5447a0
Binary files /dev/null and b/pysal/examples/sacramentot2.sbx differ
diff --git a/pysal/examples/sacramentot2.shp b/pysal/examples/sacramentot2.shp
new file mode 100644
index 0000000..1050f67
Binary files /dev/null and b/pysal/examples/sacramentot2.shp differ
diff --git a/pysal/examples/sacramentot2.shx b/pysal/examples/sacramentot2.shx
new file mode 100644
index 0000000..efe3f77
Binary files /dev/null and b/pysal/examples/sacramentot2.shx differ
diff --git a/pysal/examples/sids2.dbf b/pysal/examples/sids2.dbf
new file mode 100644
index 0000000..753885d
Binary files /dev/null and b/pysal/examples/sids2.dbf differ
diff --git a/pysal/examples/sids2.gal b/pysal/examples/sids2.gal
new file mode 100644
index 0000000..aa280d1
--- /dev/null
+++ b/pysal/examples/sids2.gal
@@ -0,0 +1,201 @@
+0 100 sids2 FIPSNO
+37009 3
+37189 37193 37005
+37005 3
+37193 37171 37009
+37171 5
+37067 37197 37193 37169 37005
+37053 2
+37055 37029
+37131 4
+37083 37185 37091 37015
+37091 3
+37015 37073 37131
+37029 3
+37139 37073 37053
+37073 5
+37041 37143 37139 37029 37091
+37185 4
+37069 37181 37131 37083
+37169 3
+37067 37157 37171
+37033 4
+37135 37001 37145 37157
+37157 4
+37081 37033 37001 37169
+37077 5
+37183 37063 37069 37181 37145
+37145 4
+37063 37135 37077 37033
+37181 3
+37069 37185 37077
+37083 6
+37117 37065 37127 37015 37131 37185
+37139 3
+37143 37029 37073
+37193 8
+37003 37097 37027 37189 37197 37171 37005 37009
+37189 4
+37027 37011 37193 37009
+37143 3
+37041 37139 37073
+37041 2
+37143 37073
+37011 5
+37111 37023 37027 37121 37189
+37197 5
+37059 37097 37067 37171 37193
+37069 5
+37183 37127 37077 37185 37181
+37067 6
+37057 37059 37081 37197 37169 37171
+37081 5
+37151 37057 37001 37157 37067
+37001 6
+37037 37151 37135 37033 37081 37157
+37015 5
+37187 37117 37091 37083 37131
+37135 5
+37037 37063 37001 37145 37033
+37063 5
+37037 37183 37135 37077 37145
+37127 5
+37101 37195 37065 37083 37069
+37121 3
+37111 37199 37011
+37065 5
+37147 37195 37117 37083 37127
+37027 6
+37023 37003 37035 37011 37193 37189
+37199 4
+37021 37111 37115 37121
+37117 6
+37147 37013 37065 37015 37187 37083
+37183 6
+37085 37101 37037 37069 37077 37063
+37115 3
+37087 37021 37199
+37097 9
+37119 37109 37035 37003 37159 37025 37059 37197 37193
+37059 5
+37159 37057 37067 37197 37097
+37003 4
+37035 37097 37193 37027
+37057 6
+37159 37151 37123 37081 37059 37067
+37023 6
+37045 37161 37111 37035 37011 37027
+37187 5
+37013 37177 37095 37015 37117
+37177 2
+37095 37187
+37111 6
+37021 37023 37161 37011 37121 37199
+37151 6
+37123 37037 37125 37001 37081 37057
+37037 8
+37125 37105 37183 37085 37063 37001 37135 37151
+37195 6
+37191 37079 37101 37065 37147 37127
+37159 5
+37167 37025 37057 37059 37097
+37147 7
+37107 37079 37013 37049 37065 37117 37195
+37035 5
+37109 37097 37003 37023 37027
+37021 6
+37089 37087 37161 37111 37115 37199
+37101 6
+37163 37085 37191 37195 37183 37127
+37087 5
+37175 37099 37173 37021 37115
+37055 2
+37095 37053
+37013 6
+37049 37137 37095 37187 37147 37117
+37173 4
+37113 37075 37087 37099
+37079 4
+37107 37191 37147 37195
+37105 3
+37125 37085 37037
+37161 6
+37149 37089 37045 37023 37021 37111
+37191 6
+37061 37163 37107 37079 37195 37101
+37085 7
+37051 37163 37125 37101 37183 37105 37037
+37045 4
+37071 37161 37109 37023
+37109 5
+37071 37119 37097 37035 37045
+37099 4
+37113 37175 37087 37173
+37125 8
+37153 37093 37123 37085 37051 37105 37037 37151
+37119 5
+37071 37179 37025 37109 37097
+37025 5
+37179 37167 37159 37119 37097
+37123 6
+37153 37007 37167 37125 37151 37057
+37167 5
+37179 37123 37007 37025 37159
+37089 4
+37149 37175 37161 37021
+37075 3
+37039 37113 37173
+37107 6
+37061 37103 37049 37191 37147 37079
+37175 3
+37089 37099 37087
+37071 3
+37119 37109 37045
+37149 2
+37161 37089
+37113 5
+37043 37039 37099 37075 37173
+37163 7
+37017 37051 37141 37061 37191 37085 37101
+37137 2
+37049 37013
+37039 3
+37113 37043 37075
+37051 6
+37017 37155 37093 37163 37085 37125
+37103 5
+37133 37061 37031 37049 37107
+37179 4
+37007 37167 37025 37119
+37007 4
+37153 37123 37179 37167
+37093 4
+37155 37165 37051 37125
+37095 4
+37055 37177 37013 37187
+37061 6
+37141 37103 37133 37107 37191 37163
+37153 4
+37165 37007 37125 37123
+37043 2
+37113 37039
+37049 6
+37031 37137 37013 37103 37107 37147
+37165 3
+37155 37093 37153
+37133 4
+37141 37031 37103 37061
+37155 5
+37047 37017 37051 37093 37165
+37031 3
+37049 37133 37103
+37017 5
+37047 37141 37163 37051 37155
+37141 7
+37019 37129 37047 37133 37061 37017 37163
+37047 4
+37019 37141 37017 37155
+37129 2
+37019 37141
+37019 3
+37129 37141 37047
diff --git a/pysal/examples/sids2.html b/pysal/examples/sids2.html
new file mode 100644
index 0000000..7936caa
--- /dev/null
+++ b/pysal/examples/sids2.html
@@ -0,0 +1,124 @@
+<?xml version="1.0" encoding="iso-8859-1"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
+      "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+  <title>SAL Data Sets - SIDS2</title>
+</head>
+
+<body>
+
+<h1>SIDS2</h1>
+
+<h2>Data provided "as is," no warranties</h2>
+
+<h2>Description</h2>
+
+<p>Sudden Infant Death Syndrome sample data for North Carolina
+   counties, two time periods (1974-78 and 1979-84). Same as
+   SIDS data set, except that the computed rates are included.</p>
+
+<p>Type = polygon shape file, unprojected, lat-lon</p>
+
+<p>Observations = 100</p>
+
+<p>Variables = 18</p> 
+
+
+<h2>Source</h2>
+
+<p>Cressie, Noel (1993). Statistics for Spatial Data.
+   New York, Wiley, pp. 386-389. Rates computed.</p>
+
+<h2>Variables</h2>
+
+<table>
+<thead>
+   <tr>
+      <th>Variable</th>
+      <th>Description</th>
+   </tr>
+</thead>
+<tbody>
+   <tr>
+      <td>AREA</td>
+      <td>county area (computed by ArcView)</td>
+   </tr>
+   <tr>
+       <td>PERIMETER</td>
+       <td>county perimeter (computed by ArcView)</td>
+   </tr>
+   <tr>
+       <td>CNTY_</td>
+       <td>county internal ID</td>
+   </tr>
+   <tr>
+       <td>CNTY_ID</td>
+       <td>county internal ID</td>
+   </tr>
+   <tr>
+       <td>NAME</td>
+       <td>county name</td>
+   </tr>
+   <tr>
+       <td>FIPS</td>
+       <td>county fips code, as character (state code + county code)</td>
+   </tr>
+   <tr>
+       <td>FIPSNO</td>
+       <td>county fips code, numeric, used in GeoDa User's Guide and tutorials</td>
+   </tr>
+   <tr>
+       <td>CRESS_ID</td>
+       <td>county ID used by Cressie</td>
+   </tr>
+   <tr>
+       <td>BIR74</td>
+       <td>live births, 1974-78</td>
+   </tr>
+   <tr>
+       <td>SID74</td>
+       <td>SIDS deaths, 1974-78</td>
+   </tr>
+   <tr>
+       <td>NWBIR74</td>
+       <td>non-white births, 1974-78</td>
+   </tr>
+   <tr>
+       <td>BIR79</td>
+       <td>live births, 1979-84</td>
+   </tr>
+   <tr>
+       <td>SID79</td>
+       <td>SIDS deaths, 1979-84</td>
+   </tr>
+   <tr>
+       <td>NWBIR79</td>
+       <td>non-white births, 1979-84</td>
+   </tr>
+   <tr>
+       <td>SIDR74</td>
+       <td>SIDS death rate per 1,000 (1974-78)</td>
+   </tr>
+   <tr>
+       <td>SIDR79</td>
+       <td>SIDS death rate per 1,000 (1979-84)</td>
+   </tr>
+   <tr>
+       <td>NWR74</td>
+       <td>non-white birth rate (non-white per 1000 births), 1974-78</td>
+   </tr>
+   <tr>
+       <td>NWR79</td>
+       <td>non-white birth rate (non-white per 1000 births), 1979-84</td>
+   </tr>
+</tbody>
+</table>
+
+<br />
+<hr />
+<p>Prepared by <a href="mailto:anselin at uiuc.edu">Luc Anselin</a></p>
+<p><a href="http://sal.agecon.uiuc.edu">UIUC-ACE Spatial Analysis Laboratory</a></p>
+<p>Last updated June 16, 2003</p>
+</body>
+</html>
diff --git a/pysal/examples/sids2.shp b/pysal/examples/sids2.shp
new file mode 100644
index 0000000..79c4a41
Binary files /dev/null and b/pysal/examples/sids2.shp differ
diff --git a/pysal/examples/sids2.shx b/pysal/examples/sids2.shx
new file mode 100644
index 0000000..670fa43
Binary files /dev/null and b/pysal/examples/sids2.shx differ
diff --git a/pysal/examples/snow_maps/SohoPeople.dbf b/pysal/examples/snow_maps/SohoPeople.dbf
new file mode 100644
index 0000000..f39d10c
Binary files /dev/null and b/pysal/examples/snow_maps/SohoPeople.dbf differ
diff --git a/pysal/examples/snow_maps/SohoPeople.prj b/pysal/examples/snow_maps/SohoPeople.prj
new file mode 100644
index 0000000..5c6f76d
--- /dev/null
+++ b/pysal/examples/snow_maps/SohoPeople.prj
@@ -0,0 +1 @@
+PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]]
\ No newline at end of file
diff --git a/pysal/examples/snow_maps/SohoPeople.sbn b/pysal/examples/snow_maps/SohoPeople.sbn
new file mode 100644
index 0000000..b1fb71e
Binary files /dev/null and b/pysal/examples/snow_maps/SohoPeople.sbn differ
diff --git a/pysal/examples/snow_maps/SohoPeople.sbx b/pysal/examples/snow_maps/SohoPeople.sbx
new file mode 100644
index 0000000..1e433b7
Binary files /dev/null and b/pysal/examples/snow_maps/SohoPeople.sbx differ
diff --git a/pysal/examples/snow_maps/SohoPeople.shp b/pysal/examples/snow_maps/SohoPeople.shp
new file mode 100644
index 0000000..7fa8187
Binary files /dev/null and b/pysal/examples/snow_maps/SohoPeople.shp differ
diff --git a/pysal/examples/snow_maps/SohoPeople.shx b/pysal/examples/snow_maps/SohoPeople.shx
new file mode 100644
index 0000000..5ad7991
Binary files /dev/null and b/pysal/examples/snow_maps/SohoPeople.shx differ
diff --git a/pysal/examples/snow_maps/SohoWater.dbf b/pysal/examples/snow_maps/SohoWater.dbf
new file mode 100644
index 0000000..2352fc6
Binary files /dev/null and b/pysal/examples/snow_maps/SohoWater.dbf differ
diff --git a/pysal/examples/snow_maps/SohoWater.prj b/pysal/examples/snow_maps/SohoWater.prj
new file mode 100644
index 0000000..5c6f76d
--- /dev/null
+++ b/pysal/examples/snow_maps/SohoWater.prj
@@ -0,0 +1 @@
+PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]]
\ No newline at end of file
diff --git a/pysal/examples/snow_maps/SohoWater.sbn b/pysal/examples/snow_maps/SohoWater.sbn
new file mode 100644
index 0000000..dc382d6
Binary files /dev/null and b/pysal/examples/snow_maps/SohoWater.sbn differ
diff --git a/pysal/examples/snow_maps/SohoWater.sbx b/pysal/examples/snow_maps/SohoWater.sbx
new file mode 100644
index 0000000..f0e9eea
Binary files /dev/null and b/pysal/examples/snow_maps/SohoWater.sbx differ
diff --git a/pysal/examples/snow_maps/SohoWater.shp b/pysal/examples/snow_maps/SohoWater.shp
new file mode 100644
index 0000000..b1ad50e
Binary files /dev/null and b/pysal/examples/snow_maps/SohoWater.shp differ
diff --git a/pysal/examples/snow_maps/SohoWater.shx b/pysal/examples/snow_maps/SohoWater.shx
new file mode 100644
index 0000000..3a4b6ce
Binary files /dev/null and b/pysal/examples/snow_maps/SohoWater.shx differ
diff --git a/pysal/examples/snow_maps/Soho_Network.dbf b/pysal/examples/snow_maps/Soho_Network.dbf
new file mode 100644
index 0000000..0c9b08b
Binary files /dev/null and b/pysal/examples/snow_maps/Soho_Network.dbf differ
diff --git a/pysal/examples/snow_maps/Soho_Network.prj b/pysal/examples/snow_maps/Soho_Network.prj
new file mode 100644
index 0000000..5c6f76d
--- /dev/null
+++ b/pysal/examples/snow_maps/Soho_Network.prj
@@ -0,0 +1 @@
+PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]]
\ No newline at end of file
diff --git a/pysal/examples/snow_maps/Soho_Network.sbn b/pysal/examples/snow_maps/Soho_Network.sbn
new file mode 100644
index 0000000..e1bf444
Binary files /dev/null and b/pysal/examples/snow_maps/Soho_Network.sbn differ
diff --git a/pysal/examples/snow_maps/Soho_Network.sbx b/pysal/examples/snow_maps/Soho_Network.sbx
new file mode 100644
index 0000000..f0c9317
Binary files /dev/null and b/pysal/examples/snow_maps/Soho_Network.sbx differ
diff --git a/pysal/examples/snow_maps/Soho_Network.shp b/pysal/examples/snow_maps/Soho_Network.shp
new file mode 100644
index 0000000..593cea3
Binary files /dev/null and b/pysal/examples/snow_maps/Soho_Network.shp differ
diff --git a/pysal/examples/snow_maps/Soho_Network.shx b/pysal/examples/snow_maps/Soho_Network.shx
new file mode 100644
index 0000000..4473fa2
Binary files /dev/null and b/pysal/examples/snow_maps/Soho_Network.shx differ
diff --git a/pysal/examples/south.dbf b/pysal/examples/south.dbf
new file mode 100644
index 0000000..336ead8
Binary files /dev/null and b/pysal/examples/south.dbf differ
diff --git a/pysal/examples/south.shp b/pysal/examples/south.shp
new file mode 100644
index 0000000..7fc6b91
Binary files /dev/null and b/pysal/examples/south.shp differ
diff --git a/pysal/examples/south.shx b/pysal/examples/south.shx
new file mode 100644
index 0000000..0655fc1
Binary files /dev/null and b/pysal/examples/south.shx differ
diff --git a/pysal/examples/south_q.gal b/pysal/examples/south_q.gal
new file mode 100644
index 0000000..8188264
--- /dev/null
+++ b/pysal/examples/south_q.gal
@@ -0,0 +1,2825 @@
+0 1412 south FIPSNO
+54029 1
+54009
+54009 2
+54069 54029
+54069 2
+54051 54009
+54051 2
+54103 54069
+10003 3
+24029 24015 10001
+24043 6
+54037 54003 54065 24001 24021 51107
+24001 5
+54027 54057 24023 24043 54065
+24015 3
+24029 24025 10003
+24023 5
+54093 54023 54077 54057 24001
+24025 2
+24005 24015
+54061 4
+54091 54049 54103 54077
+54077 6
+54001 54091 24023 54023 54093 54061
+24013 4
+24031 24027 24021 24005
+24005 5
+24003 24027 24510 24025 24013
+54103 6
+54017 54095 54049 54033 54061 54051
+24021 5
+51107 24013 24027 24031 24043
+54065 5
+51069 54027 54003 24043 24001
+54057 5
+54023 54027 54031 24001 24023
+54049 4
+54091 54033 54061 54103
+54003 5
+51043 51069 54037 24043 54065
+54095 4
+54085 54073 54103 54017
+54027 6
+54031 54023 54065 51069 24001 54057
+54037 5
+51043 51069 51107 54003 24043
+54073 3
+54107 54095 54085
+54033 7
+54097 54041 54017 54091 54001 54049 54103
+51069 9
+51187 51171 51840 54031 51043 54037 54003 54065 54027
+54017 6
+54021 54085 54033 54041 54103 54095
+54091 5
+54077 54001 54049 54061 54033
+54107 4
+54035 54105 54073 54085
+54085 7
+54013 54105 54021 54017 54095 54107 54073
+24029 4
+10003 10001 24015 24035
+24510 2
+24005 24003
+10001 5
+24011 24035 10005 24029 10003
+24027 6
+24033 24031 24005 24003 24013 24021
+24031 8
+51059 51013 11001 51107 24033 24013 24027 24021
+54023 8
+54071 54083 54093 54031 54027 54057 24023 54077
+51107 8
+51153 51061 51043 24031 51059 24021 54037 24043
+54001 6
+54097 54093 54083 54077 54091 54033
+24035 4
+24041 24011 10001 24029
+54093 5
+54083 54023 24023 54001 54077
+51043 6
+51187 51107 51061 54037 54003 51069
+54031 7
+51165 54071 51069 51171 54027 54023 54057
+24003 5
+24009 24033 24005 24027 24510
+51840 1
+51069
+54105 5
+54087 54035 54085 54013 54107
+54041 6
+54007 54021 54097 54101 54033 54017
+21015 3
+21081 21077 21117
+24011 5
+24019 24041 10005 10001 24035
+24033 8
+51059 24017 51510 11001 24009 24003 24027 24031
+54083 7
+54075 54101 54097 54071 54023 54093 54001
+21037 2
+21191 21117
+54097 5
+54101 54001 54083 54033 54041
+54021 5
+54013 54041 54007 54017 54085
+51171 5
+51139 51165 51187 51069 54031
+54035 6
+54079 54053 54087 54039 54105 54107
+21117 4
+21081 21037 21191 21015
+54013 6
+54087 54007 54015 54021 54085 54105
+51187 6
+51139 51157 51043 51061 51069 51171
+54053 3
+54011 54079 54035
+51061 7
+51047 51157 51179 51153 51107 51187 51043
+11001 5
+51059 51510 51013 24033 24031
+10005 5
+24047 24045 24019 24011 10001
+54071 7
+51091 54075 51165 51015 54031 54023 54083
+24041 3
+24019 24011 24035
+54087 5
+54039 54013 54015 54105 54035
+51013 5
+51059 51510 51610 11001 24031
+54007 6
+54067 54015 54101 54041 54013 54021
+51610 2
+51059 51013
+21191 5
+21081 21023 21097 21037 21117
+21077 4
+21041 21081 21015 21187
+51157 5
+51113 51139 51061 51047 51187
+51510 4
+51059 24033 11001 51013
+51165 8
+51660 51015 51079 51003 51139 51171 54031 54071
+51139 6
+51079 51157 51113 51171 51187 51165
+21023 4
+21097 21201 21161 21191
+21081 7
+21187 21191 21097 21209 21117 21015 21077
+24009 4
+24037 24017 24003 24033
+21041 4
+21103 21223 21187 21077
+21161 4
+21201 21135 21069 21023
+21089 3
+21019 21043 21135
+54101 7
+54067 54083 54075 54025 54097 54007 54041
+21223 3
+21185 21041 21103
+54075 6
+54025 51091 51017 54071 54083 54101
+21187 6
+21073 21103 21081 21209 21041 21077
+21135 5
+21205 21069 21089 21043 21161
+24017 4
+51059 24009 24037 24033
+24019 4
+10005 24045 24011 24041
+51047 6
+51137 51113 51179 51177 51061 51157
+54079 5
+54043 54011 54035 54039 54053
+54015 6
+54019 54039 54007 54067 54087 54013
+54039 9
+54005 54043 54067 54019 54081 54015 54087 54079 54035
+51113 5
+51079 51047 51157 51137 51139
+21201 5
+21097 21161 21069 21023 21181
+21103 6
+21211 21185 21187 21073 21041 21223
+51179 7
+51153 51630 51177 51099 51033 51047 51061
+54011 4
+54099 54079 54053 54043
+51091 4
+51017 51015 54071 54075
+21097 7
+21017 21209 21023 21201 21181 21081 21191
+54067 6
+54019 54025 54101 54007 54039 54015
+24045 4
+24039 10005 24047 24019
+21185 4
+21111 21223 21103 21211
+21069 6
+21011 21181 21205 21135 21201 21161
+24037 2
+24009 24017
+21043 6
+21063 21205 21127 21019 21089 21135
+21019 4
+54099 21127 21089 21043
+21209 7
+21239 21067 21073 21017 21097 21187 21081
+51015 9
+51163 51125 51820 51790 51017 51003 51165 51091 54071
+51079 5
+51137 51003 51113 51139 51165
+51660 1
+51165
+21181 6
+21173 21017 21069 21011 21097 21201
+24047 4
+51001 24039 10005 24045
+54099 6
+21159 21127 54043 54059 54011 21019
+51137 6
+51109 51003 51177 51047 51079 51113
+21205 7
+21165 21011 21043 21063 21175 21135 21069
+21111 5
+21093 21029 21211 21215 21185
+51177 7
+51109 51630 51033 51085 51179 51137 51047
+51099 4
+51033 51193 51057 51179
+54043 7
+54059 54039 54005 54045 54079 54099 54011
+21073 6
+21005 21239 21211 21209 21187 21103
+21017 7
+21049 21067 21011 21173 21181 21097 21209
+21211 6
+21005 21215 21073 21103 21111 21185
+51630 2
+51179 51177
+21011 6
+21165 21173 21205 21069 21017 21181
+24039 3
+24047 51001 24045
+51193 4
+51159 51057 51133 51099
+51003 9
+51029 51125 51540 51109 51065 51137 51079 51015 51165
+21127 7
+21115 21175 21063 54099 21159 21019 21043
+21063 4
+21175 21127 21043 21205
+51017 6
+51005 54025 51015 51091 51163 54075
+54025 8
+54063 54089 54019 51005 51017 54075 54067 54101
+54019 6
+54081 54025 54067 54089 54015 54039
+51033 7
+51085 51057 51097 51101 51099 51177 51179
+54005 5
+54045 54081 54109 54039 54043
+21067 6
+21113 21239 21017 21049 21151 21209
+21163 2
+21027 21093
+21173 6
+21049 21165 21011 21197 21181 21017
+21239 6
+21167 21005 21209 21067 21113 21073
+51790 1
+51015
+51057 6
+51097 51119 51159 51193 51033 51099
+21215 5
+21179 21029 21005 21211 21111
+51109 6
+51075 51065 51085 51177 51137 51003
+21005 7
+21229 21179 21239 21073 21167 21211 21215
+21175 7
+21237 21153 21165 21115 21127 21063 21205
+21029 4
+21093 21179 21215 21111
+21049 6
+21151 21173 21197 21065 21017 21067
+51159 4
+51133 51103 51193 51057
+51820 1
+51015
+51540 1
+51003
+21165 6
+21197 21175 21237 21205 21011 21173
+21027 5
+21183 21091 21093 21163 21085
+51125 6
+51163 51009 51003 51029 51011 51015
+51001 3
+51131 24047 24039
+51133 3
+51103 51159 51193
+54045 4
+54059 54109 54005 54043
+21093 8
+21099 21085 21179 21029 21123 21111 21027 21163
+51085 7
+51087 51075 51101 51127 51033 51109 51177
+51065 5
+51049 51029 51075 51109 51003
+21115 5
+21071 21153 21159 21127 21175
+21113 5
+21079 21167 21151 21067 21239
+21091 3
+21183 21059 21027
+21179 7
+21155 21123 21229 21005 21215 21093 21029
+54081 6
+54055 54109 54089 54019 54005 54039
+54059 8
+21195 21159 54109 54047 54045 51027 54043 54099
+51097 7
+51127 51101 51119 51073 51095 51057 51033
+21101 4
+21233 21225 21059 21149
+21167 6
+21021 21229 21113 21079 21239 21005
+21159 6
+21071 54059 21195 54099 21115 21127
+51005 8
+51045 54063 51023 51580 51560 51163 51017 54025
+21197 6
+21065 21237 21165 21129 21049 21173
+21059 4
+21149 21091 21183 21101
+21151 7
+21203 21079 21065 21109 21049 21113 21067
+51101 4
+51127 51097 51085 51033
+21229 5
+21155 21167 21021 21179 21005
+51075 7
+51145 51049 51087 51085 51041 51109 51065
+21225 3
+21055 21101 21233
+21153 6
+21025 21237 21115 21071 21119 21175
+54089 5
+54055 54025 54063 54081 54019
+21237 6
+21129 21153 21175 21025 21197 21165
+51560 1
+51005
+21065 5
+21109 21129 21197 21151 21049
+51103 2
+51133 51159
+21079 6
+21137 21021 21151 21203 21113 21167
+51009 6
+51163 51019 51031 51680 51011 51125
+51580 1
+51005
+54109 6
+54047 54055 54081 54059 54045 54005
+51023 5
+51161 51045 51019 51163 51005
+51029 6
+51011 51049 51147 51065 51003 51125
+51119 3
+51073 51097 51057
+21071 5
+21119 21195 21159 21115 21153
+21195 8
+51195 21133 51051 21119 51027 54059 21071 21159
+51049 6
+51147 51145 51007 51075 51065 51029
+51530 1
+51163
+21183 7
+21177 21149 21085 21031 21027 21091 21059
+21123 6
+21099 21087 21155 21217 21179 21093
+21155 6
+21217 21045 21021 21229 21179 21123
+54063 6
+51071 54055 51045 51005 54025 54089
+21129 6
+21189 21109 21237 21025 21065 21197
+21021 6
+21045 21079 21137 21167 21155 21229
+51087 7
+51041 51760 51145 51127 51036 51085 51075
+21025 6
+21193 21189 21119 21153 21129 21237
+51145 5
+51007 51087 51041 51075 51049
+21149 6
+21107 21233 21183 21177 21059 21101
+51045 6
+51161 51121 51071 51023 51005 54063
+21233 6
+21033 21055 21107 21149 21101 21225
+21137 5
+21045 21203 21199 21079 21021
+51127 6
+51036 51095 51097 51101 51087 51085
+21085 6
+21061 21031 21099 21093 21183 21027
+51760 2
+51087 51041
+51073 4
+51095 51115 51119 51097
+54055 8
+51021 51185 54047 54063 51071 54089 54081 54109
+21109 7
+21125 21203 21189 21051 21129 21065 21151
+21107 5
+21033 21177 21149 21047 21233
+51041 11
+51730 51053 51570 51670 51007 51036 51149 51087 51760 51145 51075
+51011 6
+51037 51031 51147 51029 51009 51125
+51131 1
+51001
+21045 7
+21207 21001 21217 21137 21199 21021 21155
+54047 5
+51027 54055 54109 51185 54059
+21055 5
+21143 21033 21139 21233 21225
+21189 5
+21051 21193 21025 21129 21109
+21203 6
+21199 21109 21125 21151 21137 21079
+21119 6
+21193 21195 21071 21133 21025 21153
+51027 6
+51167 51051 51185 54047 21195 54059
+51115 1
+51073
+51007 6
+51135 51147 51041 51053 51145 51049
+51036 7
+51670 51149 51095 51181 51127 51041 51087
+21217 5
+21001 21087 21045 21155 21123
+51071 6
+51155 51021 51045 51121 54063 54055
+51680 3
+51019 51009 51031
+21087 5
+21169 21099 21217 21001 21123
+51095 7
+51830 51700 51073 51199 51036 51127 51097
+21099 7
+21009 21061 21087 21169 21123 21093 21085
+21193 7
+21095 21131 21051 21133 21119 21025 21189
+51031 7
+51019 51143 51037 51011 51083 51009 51680
+21139 4
+21145 21143 21157 21055
+21177 7
+21219 21047 21031 21141 21183 21107 21149
+51147 7
+51037 51007 51135 51049 51111 51011 51029
+21031 6
+21141 21227 21061 21085 21177 21183
+21033 6
+21221 21143 21047 21233 21107 21055
+51121 6
+51063 51750 51155 51161 51045 51071
+21199 7
+21231 21207 21125 21147 21203 21045 21137
+21051 7
+21013 21121 21125 21193 21131 21189 21109
+51185 6
+51173 51167 51021 54055 51027 54047
+21061 5
+21227 21099 21009 21085 21031
+21125 7
+21147 21235 21051 21121 21109 21199 21203
+51149 8
+51183 51730 51053 51570 51670 51181 51036 51041
+51770 1
+51161
+51670 3
+51036 51149 51041
+21131 4
+21013 21193 21095 21051
+21001 6
+21057 21169 21207 21045 21217 21087
+51021 6
+51173 51197 51155 51071 54055 51185
+51051 4
+51195 51167 51027 21195
+51135 5
+51111 51053 51025 51007 51147
+51570 3
+51149 51730 51041
+51830 2
+51199 51095
+51053 8
+51025 51149 51183 51081 51730 51041 51135 51007
+21133 5
+21095 51195 21195 21193 21119
+51037 6
+51083 51111 51117 51147 51011 51031
+51181 5
+51175 51183 51093 51149 51036
+51730 4
+51149 51041 51053 51570
+51155 7
+51035 51197 51750 51121 51063 51071 51021
+51700 3
+51650 51199 51095
+51067 6
+51161 51089 51141 51063 51143 51019
+21007 2
+21039 21145
+21145 5
+21083 21039 21157 21139 21007
+51195 8
+51169 51105 51720 21095 51167 51051 21195 21133
+21227 6
+21213 21141 21009 21003 21061 21031
+21207 6
+21053 21057 21199 21231 21045 21001
+21143 5
+21157 21033 21221 21055 21139
+21169 6
+21171 21009 21001 21057 21087 21099
+21009 6
+21003 21169 21171 21099 21227 21061
+21047 7
+47161 21221 21219 47125 21177 21033 21107
+51750 2
+51121 51155
+51093 3
+51175 51800 51181
+51167 7
+51169 51185 51173 51191 51027 51195 51051
+51143 8
+51019 37157 37033 51590 51089 51083 51031 51067
+51063 6
+51141 51035 51067 51121 51161 51155
+51111 5
+51117 51025 51135 51037 51147
+51183 5
+51081 51181 51149 51175 51053
+51650 2
+51199 51700
+21219 5
+47125 21141 47147 21177 21047
+21141 6
+21227 21031 21213 47147 21219 21177
+51197 5
+51077 51173 51155 51035 51021
+21157 6
+21035 21083 21221 21143 21145 21139
+21121 4
+21235 21051 21013 21125
+51025 7
+37185 51117 51081 37131 51053 51111 51135
+21095 6
+21013 51195 21133 21193 51105 21131
+51173 6
+51191 51197 51021 51077 51185 51167
+21221 6
+21035 21047 21033 47161 21157 21143
+40035 5
+40097 40131 40105 40041 40115
+40105 3
+40131 40147 40035
+40115 2
+40041 40035
+40113 5
+40117 40103 40071 40143 40147
+40071 4
+40047 40103 40053 40113
+40147 4
+40143 40105 40131 40113
+40003 4
+40093 40151 40053 40047
+40053 4
+40047 40071 40103 40003
+40025 3
+48111 40139 48421
+40151 4
+40153 40059 40003 40093
+21231 5
+47137 21053 21147 21199 21207
+40007 5
+48357 48295 40139 40059 40045
+40139 5
+48195 48421 40007 48357 40025
+40059 4
+40045 40151 40153 40007
+21235 7
+47013 47151 21147 21013 47025 21121 21125
+21039 4
+21105 21083 21145 21007
+21147 6
+47151 47137 21235 21125 21231 21199
+21013 7
+51105 47025 21131 21095 21051 21235 21121
+51720 1
+51195
+21083 7
+47183 21105 21035 47079 21157 21145 21039
+21057 6
+47027 21171 21207 21053 21001 21169
+21003 6
+47165 21213 21171 21009 47111 21227
+51035 7
+37171 51640 51077 51141 51063 51155 51197
+51191 7
+47091 47163 51520 51169 51077 51173 51167
+51800 5
+51175 37073 37029 51710 51093
+51117 7
+51083 37181 37077 37185 51025 51111 51037
+51105 6
+47025 47067 51169 51195 21013 21095
+21213 5
+47147 21003 21227 47165 21141
+51169 7
+47073 47067 51191 47163 51167 51195 51105
+21053 5
+47027 21231 21207 47137 21057
+51141 6
+37169 37171 51089 51067 51063 51035
+51089 6
+37157 37169 51690 51143 51067 51141
+21171 6
+47111 21057 21169 47027 21003 21009
+40153 5
+40045 40093 40043 40151 40059
+51077 9
+37005 37009 47091 51640 51035 37171 51197 51191 51173
+21105 5
+47131 21075 21083 47183 21039
+21035 5
+47161 47079 21221 21157 21083
+51690 1
+51089
+51640 2
+51035 51077
+47161 7
+47005 47083 47079 47125 21047 21035 21221
+40041 6
+40001 40021 40097 5007 40115 40035
+47147 7
+47037 47021 47125 47165 21213 21141 21219
+47165 7
+47037 47111 47169 47189 21003 47147 21213
+47125 7
+47043 47083 47147 47021 21219 47161 21047
+47111 7
+47159 47169 47027 47087 21171 47165 21003
+51590 1
+51143
+47027 7
+47087 47137 47133 21053 21057 47111 21171
+47137 7
+47133 47049 21147 47151 21231 47027 21053
+51520 2
+51191 47163
+47163 7
+47179 47019 47073 47091 51191 51520 51169
+47091 7
+37011 47019 51077 37009 37189 51191 47163
+40103 7
+40083 40047 40117 40113 40119 40071 40053
+47151 7
+47129 47049 47013 47001 21235 21147 47137
+40131 6
+40143 40097 40145 40035 40105 40147
+47067 5
+47057 47025 51169 47073 51105
+47025 7
+47057 47173 47013 47067 51105 21013 21235
+47013 5
+47001 47173 47025 21235 47151
+40045 7
+48211 48295 40043 40153 40129 40059 40007
+40047 7
+40073 40093 40103 40083 40071 40053 40003
+47073 7
+47063 47059 47057 47163 47179 51169 47067
+37009 5
+37189 37005 37193 51077 47091
+47049 6
+47035 47141 47133 47151 47129 47137
+37005 4
+37193 37171 51077 37009
+37171 8
+37197 37193 37169 37067 51141 51035 37005 51077
+21075 3
+47131 47095 21105
+40117 5
+40119 40143 40037 40113 40103
+37053 3
+51710 37029 37055
+37131 7
+51081 37083 37185 37015 37091 51175 51025
+37091 4
+51175 37015 37073 37131
+37029 5
+37139 37073 37053 51710 51800
+37073 7
+51175 37041 37029 37139 37143 51800 37091
+37185 7
+37069 37181 37131 37083 37127 51025 51117
+37169 6
+37067 51089 37157 37081 51141 37171
+37033 6
+37001 37157 37145 37135 51083 51143
+37157 7
+37081 37067 37033 37001 51143 51089 37169
+37077 7
+51083 37063 37145 37181 37069 37183 51117
+37145 5
+37135 37077 37063 51083 37033
+37181 4
+37185 37069 51117 37077
+47133 5
+47141 47087 47049 47137 47027
+47087 5
+47159 47133 47027 47141 47111
+40097 5
+40145 40041 40021 40035 40131
+37083 7
+37065 37127 37069 37015 37117 37131 37185
+40093 7
+40043 40011 40047 40073 40003 40153 40151
+37139 3
+37143 37029 37073
+47019 6
+37121 47171 47179 37011 47091 47163
+47095 3
+47131 47045 21075
+47131 6
+47045 47183 47053 21105 21075 47095
+47183 6
+47053 47079 47017 21083 47131 21105
+47079 6
+47017 47005 47161 47183 21035 21083
+47169 4
+47189 47111 47159 47165
+48295 5
+48211 48393 48357 40045 40007
+48421 7
+48341 48205 48111 48195 48233 40139 40025
+5021 2
+5055 5121
+48111 4
+48205 48421 48341 40025
+48195 6
+48341 48233 48357 48393 40139 48421
+5005 5
+5129 5089 5065 5137 5049
+5049 3
+5065 5135 5005
+5135 5
+5065 5075 5121 5063 5049
+5121 4
+5075 5021 5055 5135
+5007 5
+40001 5143 5015 5087 40041
+5089 3
+5129 5009 5005
+5015 4
+5087 5009 5101 5007
+5009 4
+5101 5089 5129 5015
+48357 7
+48233 48393 48295 48211 40007 48195 40139
+47021 5
+47043 47037 47187 47147 47125
+47179 5
+47171 47059 47019 47163 47073
+47173 5
+47093 47001 47057 47025 47013
+40143 7
+40111 40037 40131 40145 40147 40117 40113
+37193 8
+37003 37027 37189 37197 37097 37171 37005 37009
+47159 6
+47189 47141 47041 47087 47111 47169
+47057 7
+47089 47093 47063 47073 47067 47025 47173
+47059 6
+47029 47063 47171 47179 37115 47073
+47037 6
+47187 47189 47149 47165 47147 47021
+37189 5
+37011 37193 37027 37009 47091
+37143 3
+37041 37139 37073
+47083 5
+47085 47005 47043 47125 47161
+47129 5
+47035 47001 47151 47145 47049
+47005 7
+47039 47017 47083 47085 47135 47079 47161
+47189 7
+47015 47149 47041 47159 47169 47037 47165
+47063 5
+47089 47059 47029 47073 47057
+37041 2
+37143 37073
+47043 6
+47081 47085 47021 47187 47125 47083
+47141 7
+47185 47041 47049 47035 47133 47159 47087
+5087 7
+5033 5047 5143 5101 5071 5015 5007
+47001 7
+47105 47145 47173 47093 47013 47129 47151
+37011 7
+37111 37121 37189 37027 37023 47091 47019
+37197 5
+37097 37067 37059 37171 37193
+5055 4
+5031 5075 5021 5121
+5075 6
+5067 5063 5055 5031 5121 5135
+37069 7
+37101 37183 37083 37185 37127 37181 37077
+5065 5
+5137 5135 5063 5049 5005
+47171 6
+37115 47019 37121 37199 47179 47059
+37067 7
+37057 37059 37081 37157 37169 37197 37171
+40119 5
+40081 40083 40037 40117 40103
+47085 5
+47135 47081 47043 47083 47005
+37081 6
+37151 37057 37001 37157 37067 37169
+37001 6
+37151 37135 37037 37033 37081 37157
+37015 5
+37117 37187 37091 37083 37131
+5143 4
+5033 40001 5087 5007
+37135 5
+37037 37063 37145 37001 37033
+37063 5
+37037 37077 37183 37135 37145
+47053 6
+47113 47033 47045 47017 47183 47131
+47045 6
+47097 5093 47053 47033 47131 47095
+37127 7
+37101 37195 37183 37065 37083 37069 37185
+47093 8
+47009 47105 47145 47155 47057 47089 47173 47001
+47089 5
+47155 47029 47063 47057 47093
+47029 6
+47155 37115 37087 47059 47089 47063
+40021 6
+40101 40145 40001 40135 40041 40097
+40037 6
+40081 40111 40107 40143 40119 40117
+47035 8
+47007 47175 47143 47185 47145 47129 47049 47141
+40001 6
+40135 5143 5033 5007 40041 40021
+40073 6
+40017 40011 40083 40109 40047 40093
+40083 7
+40109 40017 40081 40119 40103 40073 40047
+40145 6
+40111 40021 40101 40097 40143 40131
+40011 6
+40039 40043 40073 40017 40015 40093
+40043 6
+40039 40129 40011 40093 40045 40153
+47017 7
+47113 47077 47039 47005 47079 47053 47183
+37121 5
+37199 37011 37111 47019 47171
+37065 5
+37195 37117 37147 37083 37127
+47041 6
+47177 47015 47185 47141 47189 47159
+5101 6
+5071 5129 5115 5009 5087 5015
+5129 7
+5115 5137 5141 5005 5089 5101 5009
+37027 6
+37023 37003 37035 37193 37011 37189
+5137 6
+5023 5141 5063 5065 5129 5005
+47149 7
+47003 47117 47187 47015 47031 47189 47037
+47185 5
+47175 47177 47035 47141 47041
+37199 5
+37021 37115 37111 37121 47171
+37117 6
+37147 37187 37013 37015 37065 37083
+37183 7
+37085 37037 37127 37101 37069 37063 37077
+48393 8
+48179 48065 48233 48211 48483 48295 48357 48195
+48211 7
+48483 48179 40129 40045 48295 48393 48357
+47145 8
+47107 47121 47143 47105 47093 47001 47035 47129
+48233 7
+48065 48375 48341 48393 48357 48195 48421
+48205 5
+48359 48341 48375 48421 48111
+47187 7
+47117 47119 47081 47149 47037 47043 47021
+48341 8
+48359 48375 48233 48065 48195 48421 48205 48111
+37115 6
+37087 37199 37021 47171 47029 47059
+37097 9
+37109 37035 37003 37059 37159 37025 37119 37197 37193
+37059 5
+37057 37159 37067 37097 37197
+47155 6
+47009 47029 37087 37173 47089 47093
+37003 4
+37035 37097 37193 37027
+37057 7
+37167 37159 37151 37123 37081 37067 37059
+40129 6
+40009 48483 40039 40043 48211 40045
+47081 6
+47101 47135 47187 47119 47043 47085
+5093 7
+5035 47157 5111 5031 47097 47167 47045
+47033 5
+47075 47097 47053 47113 47045
+37023 7
+37045 37161 37111 37035 37109 37027 37011
+5031 5
+5111 5067 5093 5055 5075
+47015 5
+47177 47031 47041 47189 47149
+37187 5
+37013 37177 37095 37117 37015
+37177 2
+37095 37187
+37111 6
+37021 37023 37161 37011 37199 37121
+47097 5
+47167 47033 47075 47045 5093
+40081 6
+40125 40109 40037 40107 40119 40083
+5063 7
+5145 5023 5067 5075 5137 5065 5135
+37151 6
+37123 37037 37125 37001 37081 37057
+47105 6
+47107 47093 47009 47123 47145 47001
+5067 7
+5147 5145 5037 5031 5111 5063 5075
+47009 6
+37075 47123 47155 37173 47093 47105
+37037 8
+37125 37105 37183 37085 37063 37135 37151 37001
+40101 6
+40091 40111 40135 40061 40021 40145
+40111 6
+40107 40101 40091 40145 40143 40037
+47177 7
+47031 47185 47175 47153 47061 47041 47015
+47039 6
+47071 47077 47181 47135 47005 47017
+47119 6
+47055 47099 47101 47117 47187 47081
+37195 6
+37191 37101 37147 37065 37079 37127
+37159 6
+37167 37025 37123 37057 37059 37097
+47135 6
+47181 47081 47101 47085 47039 47005
+47143 5
+47065 47007 47145 47121 47035
+47175 5
+47153 47035 47007 47185 47177
+37147 7
+37107 37079 37013 37049 37117 37195 37065
+47077 5
+47023 47113 47039 47071 47017
+37035 6
+37109 37045 37097 37003 37023 37027
+47075 6
+47047 47167 47069 47113 47033 47097
+37021 7
+37175 37089 37087 37111 37161 37199 37115
+37101 7
+37163 37085 37191 37195 37127 37069 37183
+40039 6
+40149 40009 40011 40015 40043 40129
+47113 7
+47069 47017 47077 47023 47053 47075 47033
+5141 6
+5045 5029 5115 5023 5137 5129
+37087 8
+37099 37173 37089 37175 37115 37021 47155 47029
+47007 5
+47065 47153 47143 47035 47175
+5047 5
+5131 5033 5071 5083 5087
+5071 5
+5083 5115 5101 5047 5087
+37055 2
+37095 37053
+5033 7
+40079 5131 40135 5047 5087 5143 40001
+47121 5
+47011 47065 47107 47145 47143
+37013 6
+37049 37137 37095 37187 37147 37117
+5115 7
+5149 5083 5141 5029 5129 5071 5101
+40017 8
+40051 40015 40109 40027 40087 40083 40073 40011
+40109 6
+40027 40125 40081 40083 40017 40073
+47117 6
+47055 47003 47103 47149 47187 47119
+5023 5
+5045 5145 5063 5137 5141
+47031 7
+47051 47127 47003 47061 47177 47015 47149
+5111 5
+5037 5093 5035 5031 5067
+47003 5
+47103 47127 47031 47149 47117
+37173 7
+37113 37075 47123 37087 37099 47009 47155
+47123 7
+47139 47107 37173 37075 37039 47009 47105
+47101 5
+47099 47181 47119 47081 47135
+37079 4
+37107 37191 37147 37195
+47107 6
+47139 47011 47123 47105 47145 47121
+40107 7
+40063 40133 40125 40091 40111 40081 40037
+40135 7
+40079 40061 5033 5131 40001 40101 40021
+47167 5
+47157 47075 47047 47097 5093
+48375 7
+48381 48359 48065 48011 48233 48341 48205
+48065 8
+48011 48381 48179 48129 48393 48233 48375 48341
+48179 7
+48129 48011 48483 48087 48211 48393 48065
+48483 7
+48129 48087 40009 40129 48211 48179 48393
+48359 5
+48117 48375 48381 48341 48205
+37105 3
+37125 37085 37037
+37161 8
+45083 37149 37089 37045 45021 37023 37021 37111
+47023 5
+47109 47069 47071 47077 47113
+37191 6
+37163 37079 37107 37061 37195 37101
+37085 7
+37051 37125 37101 37163 37183 37105 37037
+47153 6
+47115 47061 47007 47065 47175 47177
+37045 7
+45021 37109 37071 45091 37035 37023 37161
+40091 6
+40063 40121 40061 40101 40107 40111
+37109 6
+37071 37119 37097 37035 37045 37023
+40015 7
+40031 40075 40149 40051 40017 40039 40011
+47061 5
+47115 47051 47153 47031 47177
+5145 7
+5085 5117 5045 5147 5067 5063 5023
+37099 6
+13241 37113 45073 37175 37087 37173
+37125 9
+37165 37153 37093 37123 37051 37085 37105 37037 37151
+37119 7
+45057 45091 37071 37179 37025 37109 37097
+37025 5
+37179 37167 37159 37119 37097
+40009 8
+40057 40055 48087 40149 40075 40039 40129 48483
+37123 7
+37007 37167 37125 37153 37151 37159 37057
+37167 6
+37179 37123 37007 37057 37159 37025
+47181 6
+47071 47099 47101 1077 47135 47039
+37089 6
+37175 37161 37149 45045 37021 37087
+47157 6
+28033 5035 47047 28093 47167 5093
+40149 4
+40075 40015 40039 40009
+40125 7
+40087 40027 40133 40123 40107 40081 40109
+40061 6
+40077 40121 40079 40135 40091 40101
+47065 9
+13295 13083 13047 47115 47011 47121 47007 47143 47153
+5029 5
+5105 5149 5045 5141 5115
+47099 5
+1077 47055 47101 47119 47181
+47055 6
+1083 1077 47103 47117 47119 47099
+37075 5
+37039 37113 37173 47009 47123
+40133 4
+40123 40063 40107 40125
+5131 6
+40079 5127 5047 5083 5033 40135
+5037 5
+5147 5035 5123 5111 5067
+5147 6
+5117 5123 5095 5037 5067 5145
+5035 8
+28143 5077 5123 47157 28033 5093 5037 5111
+47069 8
+28009 47047 47109 28003 28139 47023 47113 47075
+47071 8
+28141 28003 47109 1077 47181 47039 47023 47077
+5083 6
+5127 5115 5071 5149 5131 5047
+37107 6
+37061 37049 37103 37147 37079 37191
+47127 4
+47103 47031 47051 47003
+37175 7
+45073 37089 45045 45077 37021 37099 37087
+37071 4
+37119 37109 45091 37045
+47047 6
+28093 47069 28009 47075 47157 47167
+40079 9
+40089 40127 40077 5113 5131 5127 5033 40135 40061
+37149 4
+45045 37161 45083 37089
+47109 4
+47071 28003 47023 47069
+40027 5
+40051 40125 40087 40109 40017
+47103 7
+1089 1083 47051 47127 47003 47055 47117
+40051 7
+40031 40027 40087 40049 40137 40017 40015
+5045 7
+5105 5085 5119 5023 5145 5141 5029
+47051 7
+1089 47115 1071 47061 47031 47103 47127
+47011 7
+13047 13313 47139 13213 47107 47121 47065
+40087 6
+40049 40123 40125 40051 40027 40017
+47115 6
+13083 1071 47065 47153 47061 47051
+37113 7
+13241 37043 37039 37099 45073 37173 37075
+37163 7
+37017 37051 37141 37061 37191 37101 37085
+40121 7
+40005 40029 40063 40127 40077 40061 40091
+37137 2
+37049 37013
+5149 7
+5097 5127 5105 5051 5029 5115 5083
+40063 6
+40029 40123 40121 40091 40107 40133
+37039 7
+13111 13291 47139 37113 37043 37075 47123
+47139 6
+13213 37039 13111 47123 47107 47011
+37051 6
+37155 37093 37163 37017 37085 37125
+37103 5
+37133 37061 37031 37049 37107
+45045 8
+45001 45007 45077 45083 45059 37149 37175 37089
+37179 6
+45057 37007 45025 37167 37025 37119
+37007 6
+45069 37153 45025 37123 37179 37167
+37093 5
+37165 37153 37051 37155 37125
+45083 6
+45059 45087 45021 37161 45045 37149
+37095 4
+37055 37013 37177 37187
+37061 6
+37141 37133 37107 37103 37163 37191
+45021 5
+45087 45091 37045 45083 37161
+48381 7
+48069 48117 48011 48437 48065 48375 48359
+48011 7
+48437 48129 48045 48179 48065 48381 48375
+48087 7
+48191 48075 48129 40057 40009 48483 48179
+48129 7
+48045 48087 48191 48483 48179 48011 48065
+48117 4
+48369 48381 48069 48359
+37153 7
+45069 45025 37165 37093 37125 37007 37123
+45091 7
+45023 45087 37119 45057 45021 37071 37045
+37043 5
+13291 13281 37113 13241 37039
+5123 5
+5077 5095 5035 5147 5037
+40075 7
+40141 40065 40055 40031 40015 40149 40009
+40055 4
+40057 40075 40065 40009
+5105 6
+5051 5045 5119 5125 5029 5149
+5127 6
+5113 5149 5097 5083 40079 5131
+5117 5
+5085 5095 5001 5147 5145
+45077 4
+45073 45045 45007 37175
+5085 6
+5119 5001 5117 5069 5145 5045
+40077 4
+40127 40079 40061 40121
+37049 6
+37137 37031 37013 37103 37107 37147
+45057 7
+45039 45023 45025 45055 37179 37119 45091
+45073 10
+13147 13119 13257 13137 13241 45077 45007 37175 37113 37099
+37165 5
+45069 37093 37155 37153 37125
+40057 6
+48075 40065 48197 40055 40009 48087
+5119 6
+5053 5125 5085 5069 5105 5045
+1077 8
+1033 28141 47055 1083 47099 1079 47071 47181
+1083 6
+1079 1089 1103 47103 47055 1077
+5095 6
+5001 5107 5123 5077 5117 5147
+28003 6
+28117 28139 28141 47071 47109 47069
+28141 7
+28057 28117 1033 1059 1077 47071 28003
+28139 5
+28145 28009 28117 28003 47069
+28009 5
+28093 28139 28145 47069 47047
+28033 5
+28137 28143 28093 47157 5035
+28093 7
+28071 28137 28145 28009 47047 28033 47157
+13241 6
+13137 13281 37099 45073 37113 37043
+1089 6
+1103 1071 1095 47051 47103 1083
+1071 6
+1095 1049 13083 47115 1089 47051
+13313 5
+13129 13295 13047 13213 47011
+13213 6
+13129 13111 13123 47139 13313 47011
+13047 4
+13295 47011 13313 47065
+13083 5
+1049 47065 13295 47115 1071
+13295 8
+13055 13115 1049 13129 13313 13047 47065 13083
+13281 5
+13311 13291 13241 13137 37043
+13291 6
+13187 13111 13311 37043 13281 37039
+13111 7
+13085 13123 13187 13291 37039 13213 47139
+37133 4
+37141 37031 37103 37061
+40123 8
+40069 40099 40049 40029 40063 40133 40087 40125
+37155 8
+45033 45069 37017 37047 45051 37051 37165 37093
+5077 5
+5107 5035 28143 5123 5095
+45087 7
+45071 45059 45039 45023 45091 45021 45083
+1033 4
+1059 1079 1077 28141
+28143 8
+28027 5107 28137 28107 28119 28033 5035 5077
+37031 3
+37133 37049 37103
+40049 6
+40019 40137 40123 40099 40087 40051
+5125 5
+5053 5059 5051 5119 5105
+1049 7
+1055 1019 1095 13295 13055 13083 1071
+40031 6
+40033 40141 40137 40051 40015 40075
+40065 6
+48197 40141 40075 48487 40057 40055
+37017 5
+37047 37141 37163 37155 37051
+13123 5
+13227 13129 13085 13111 13213
+45023 4
+45039 45057 45091 45087
+13137 7
+13011 13139 13311 45073 13257 13241 13281
+45007 7
+13105 45001 13147 45059 45045 45073 45077
+45025 8
+45061 45031 45055 45069 37153 45057 37007 37179
+1079 7
+1133 1059 1103 1043 1083 1033 1077
+45069 8
+45041 45031 37155 45033 37165 37153 45025 37007
+13311 5
+13139 13187 13137 13281 13291
+28137 5
+28107 28093 28071 28033 28143
+45059 7
+45047 45001 45071 45087 45083 45007 45045
+5051 5
+5059 5097 5125 5105 5149
+40029 5
+40069 40121 40005 40063 40123
+28117 6
+28081 28145 28141 28057 28003 28139
+5097 8
+5109 5061 5113 5051 5059 5019 5149 5127
+48369 4
+48017 48069 48279 48117
+48069 6
+48279 48437 48189 48381 48369 48117
+48437 6
+48189 48045 48153 48011 48069 48381
+48045 6
+48153 48191 48345 48129 48437 48011
+48075 5
+48101 48191 40057 48197 48087
+48191 6
+48345 48075 48101 48087 48045 48129
+5113 7
+5133 40089 5109 5061 5097 5127 40079
+37141 7
+37019 37129 37047 37133 37061 37017 37163
+13187 5
+13085 13311 13139 13291 13111
+40127 6
+40023 40005 40089 40079 40077 40121
+40005 6
+40013 40069 40127 40023 40121 40029
+40137 6
+40067 40033 40019 40049 40031 40051
+1103 5
+1043 1095 1089 1079 1083
+13257 4
+13011 13119 45073 13137
+5107 7
+28011 5041 5001 28027 28143 5077 5095
+40099 4
+40019 40123 40069 40049
+40141 6
+48487 40033 48485 40031 40075 40065
+13129 8
+13015 13115 13123 13227 13057 13213 13313 13295
+45033 6
+45067 45041 37047 37155 45051 45069
+13085 7
+13117 13057 13227 13139 13187 13111 13123
+45055 7
+45079 45039 45031 45061 45085 45025 45057
+28145 7
+28115 28071 28117 28081 28139 28093 28009
+1095 7
+1009 1043 1055 1049 1071 1103 1089
+13115 6
+1019 13055 13015 13233 13129 13295
+13055 4
+1019 13115 13295 1049
+1059 6
+1093 28057 1079 1133 1033 28141
+48197 6
+48155 48101 48487 40065 48075 40057
+5001 7
+5079 5069 5107 5041 5095 5085 5117
+28071 7
+28013 28161 28107 28115 28145 28093 28137
+28107 6
+28135 28119 28071 28161 28137 28143
+45039 6
+45071 45055 45079 45057 45023 45087
+13227 5
+13057 13015 13085 13123 13129
+13119 5
+13011 13147 45073 13257 13195
+28027 7
+28133 28011 5041 28119 28135 28143 5107
+45031 5
+45061 45041 45069 45025 45055
+45071 7
+45081 45047 45039 45079 45063 45087 45059
+1019 7
+1015 1055 13115 13233 1029 13055 1049
+28081 6
+28017 28115 28057 28095 28117 28145
+28119 4
+28107 28135 28027 28143
+40069 7
+40095 40019 40005 40013 40029 40123 40099
+40019 7
+40085 40067 40069 40095 40099 40049 40137
+5059 6
+5019 5053 5039 5125 5051 5097
+40089 8
+48387 40023 5133 5081 48037 5113 40079 40127
+40033 6
+48485 40067 48077 40137 40031 40141
+13139 9
+13135 13117 13157 13013 13137 13011 13311 13085 13187
+5053 6
+5039 5069 5025 5119 5125 5059
+5069 6
+5025 5079 5001 5053 5119 5085
+13147 5
+13195 45007 13105 45073 13119
+13011 6
+13157 13257 13119 13195 13137 13139
+45001 6
+13105 45059 45047 45065 45045 45007
+37047 6
+45051 37019 37141 37017 45033 37155
+28057 6
+28095 1093 1059 28141 28081 28117
+48487 7
+48023 48155 48485 48009 40141 48197 40065
+13015 7
+13223 13233 13057 13067 13227 13129 13115
+13057 7
+13067 13121 13117 13085 13227 13015 13129
+45047 6
+45065 45071 45081 45059 45037 45001
+37129 2
+37019 37141
+28115 5
+28013 28081 28017 28145 28071
+45061 5
+45085 45041 45031 45025 45055
+37019 4
+45051 37129 37141 37047
+5061 6
+5081 5133 5057 5109 5097 5113
+5109 6
+5057 5019 5099 5097 5061 5113
+5019 6
+5099 5039 5103 5059 5109 5097
+13117 5
+13121 13139 13135 13085 13057
+1093 7
+1075 28095 1133 1127 1057 1059 28057
+48345 7
+48107 48125 48153 48101 48269 48191 48045
+1043 6
+1127 1133 1095 1009 1103 1079
+48101 7
+48125 48269 48197 48155 48075 48345 48191
+1133 5
+1127 1043 1079 1093 1059
+48153 7
+48303 48107 48189 48345 48125 48045 48437
+48189 7
+48219 48303 48279 48153 48107 48437 48069
+48279 7
+48079 48219 48017 48189 48303 48069 48369
+48017 4
+48079 48279 48219 48369
+45041 8
+45089 45027 45085 45067 45033 45069 45061 45031
+45051 6
+45043 45067 37019 37047 45033 37155
+13157 6
+13219 13059 13013 13195 13011 13139
+45067 5
+45089 45051 45043 45033 45041
+40067 6
+48337 48077 40019 40085 40137 40033
+13105 8
+13221 13195 45065 13181 45001 13317 45007 13147
+13195 7
+13059 13105 13221 13147 13157 13011 13119
+1009 6
+1073 1127 1115 1055 1095 1043
+45079 6
+45063 45085 45017 45055 45071 45039
+48155 6
+48275 48269 48023 48487 48197 48101
+1055 6
+1115 1019 1015 1049 1009 1095
+5133 5
+5057 5061 5081 5113 40089
+45063 6
+45003 45081 45017 45079 45075 45071
+28161 5
+28043 28135 28013 28071 28107
+48485 6
+48023 48009 48077 40033 48487 40141
+13121 10
+13077 13113 13063 13097 13045 13067 13135 13089 13117 13057
+40095 5
+40085 40013 40069 48181 40019
+5079 5
+5043 5025 5041 5001 5069
+45081 5
+45037 45063 45071 45003 45047
+13135 7
+13247 13089 13297 13013 13139 13121 13117
+28135 7
+28083 28133 28043 28161 28107 28119 28027
+28013 6
+28043 28017 28155 28115 28071 28161
+40013 7
+48147 48181 48277 40023 40005 40095 40069
+40023 6
+48277 40089 48387 40127 40013 40005
+45085 6
+45017 45027 45041 45061 45079 45055
+5039 6
+5013 5103 5025 5053 5019 5059
+48077 6
+48237 48009 48337 40067 48485 40033
+13013 6
+13297 13059 13219 13157 13135 13139
+5041 7
+5017 5043 28011 28027 5107 5079 5001
+28011 6
+28151 5017 28133 28027 5107 5041
+13067 5
+13097 13223 13121 13057 13015
+13233 6
+13143 1029 13015 13223 1019 13115
+28095 7
+28087 28025 28017 1075 1093 28057 28081
+13223 6
+13045 13143 13067 13097 13015 13233
+28017 6
+28155 28095 28025 28081 28013 28115
+45065 6
+13073 13181 45037 45047 13105 45001
+40085 6
+48097 48337 48181 40095 40019 40067
+5025 7
+5011 5013 5079 5043 5069 5039 5053
+1075 5
+1107 28087 1057 1093 28095
+13059 5
+13219 13221 13195 13157 13013
+13221 7
+13265 13133 13219 13317 13105 13059 13195
+5057 7
+5073 5091 5081 5099 5109 5133 5061
+1127 7
+1125 1057 1009 1073 1043 1133 1093
+28133 6
+28053 28151 28083 28135 28027 28011
+48337 6
+48237 40085 48097 48497 40067 48077
+13317 6
+13301 13265 13181 13189 13221 13105
+1115 6
+1117 1073 1015 1121 1055 1009
+13181 5
+13189 13073 45065 13317 13105
+45037 6
+13245 13073 45003 45081 45065 45047
+1015 5
+1121 1029 1019 1115 1055
+13219 7
+13211 13297 13221 13133 13059 13157 13013
+13089 5
+13151 13063 13135 13247 13121
+48387 8
+48343 48159 48449 48119 48277 48037 40089 40023
+5099 6
+5073 5103 5019 5027 5057 5109
+48181 7
+48121 48097 48147 48085 40013 40085 40095
+1029 8
+1027 1111 1121 13045 13143 13233 1015 1019
+48097 5
+48497 48181 48121 40085 48337
+5081 6
+48037 5057 5091 5061 5133 40089
+45027 6
+45075 45017 45041 45089 45015 45085
+48277 6
+48119 48147 48387 48159 40023 40013
+13297 6
+13217 13247 13219 13211 13013 13135
+1057 5
+1107 1127 1125 1075 1093
+13143 4
+13223 13045 13233 1029
+28043 7
+28015 28083 28013 28155 28097 28161 28135
+48147 6
+48085 48231 48277 48119 40013 48181
+45089 5
+45015 45043 45067 45041 45027
+45017 5
+45027 45085 45075 45063 45079
+45003 7
+13033 45011 13245 45075 45063 45037 45081
+1073 6
+1007 1125 1117 1115 1009 1127
+48269 8
+48433 48263 48125 48275 48207 48155 48101 48345
+48125 8
+48169 48263 48107 48269 48433 48101 48345 48153
+48107 8
+48305 48169 48303 48125 48263 48345 48153 48189
+48303 8
+48445 48305 48219 48107 48169 48153 48189 48279
+48009 7
+48503 48447 48023 48237 48077 48485 48487
+48219 7
+48445 48079 48303 48305 48189 48279 48017
+48275 6
+48433 48207 48023 48447 48155 48269
+48023 8
+48447 48207 48009 48503 48485 48487 48275 48155
+48079 5
+48501 48219 48445 48279 48017
+5103 6
+5027 5013 5139 5039 5099 5019
+13045 8
+13149 1111 13121 13097 13223 13077 13143 1029
+28083 6
+28053 28015 28051 28043 28135 28133
+28025 5
+28105 28155 28095 28087 28017
+13211 6
+13159 13217 13133 13237 13219 13297
+5013 5
+5011 5139 5025 5039 5103
+13097 4
+13067 13121 13045 13223
+5043 6
+5003 5011 5017 5041 5079 5025
+13247 5
+13151 13297 13217 13135 13089
+45043 5
+45019 45015 45051 45089 45067
+13133 6
+13237 13141 13265 13221 13211 13219
+28087 6
+28103 28105 1107 1075 28095 28025
+13217 6
+13035 13151 13159 13211 13297 13247
+28155 7
+28019 28097 28025 28017 28105 28043 28013
+13265 5
+13141 13317 13301 13221 13133
+5011 5
+5139 5043 5003 5025 5013
+45075 9
+45029 45009 45011 45015 45035 45027 45003 45017 45063
+48037 6
+48067 48343 5091 5081 48387 40089
+13073 5
+13189 13245 45037 45065 13181
+1121 6
+1037 1117 1027 1029 1015 1115
+28015 5
+28051 28097 28007 28043 28083
+28097 5
+28007 28019 28155 28015 28043
+13189 6
+13163 13301 13073 13245 13181 13317
+13063 5
+13113 13151 13255 13089 13121
+13151 6
+13255 13217 13035 13247 13089 13063
+1125 7
+1065 1063 1107 1073 1007 1127 1057
+13301 6
+13125 13141 13189 13163 13317 13265
+5091 7
+48067 5073 22015 22017 5057 48037 5081
+28105 6
+28159 28019 28087 28103 28025 28155
+5017 9
+22123 22067 5003 28151 28055 22035 28011 5041 5043
+13113 4
+13077 13063 13255 13121
+1117 6
+1021 1007 1121 1037 1115 1073
+13245 6
+13163 45003 13033 45037 13189 13073
+1107 7
+1119 28103 1063 1125 1057 1075 28087
+28151 6
+28055 28133 28053 28125 28011 5017
+28019 5
+28007 28105 28159 28155 28097
+13159 6
+13207 13035 13237 13169 13211 13217
+45015 6
+45035 45019 45043 45089 45075 45027
+13077 7
+13285 13199 13149 13113 13255 13121 13045
+1111 7
+1123 1027 13149 13285 1017 13045 1029
+1027 5
+1037 1123 1111 1029 1121
+48119 6
+48223 48231 48387 48159 48277 48147
+45011 5
+13033 45009 45075 45005 45003
+5073 7
+22015 22017 5027 22119 5099 5057 5091
+13237 6
+13169 13009 13141 13133 13159 13211
+48237 7
+48363 48503 48497 48367 48337 48077 48009
+13141 7
+13009 13125 13301 13303 13265 13237 13133
+5027 6
+22119 5103 5139 22027 5073 5099
+13035 6
+13171 13255 13159 13217 13207 13151
+48497 6
+48367 48121 48439 48097 48237 48337
+48121 6
+48439 48085 48113 48181 48497 48097
+45009 6
+45049 45005 45035 45029 45075 45011
+13149 4
+13285 13077 13045 1111
+48231 8
+48257 48397 48085 48223 48379 48467 48119 48147
+48085 6
+48397 48113 48231 48147 48121 48181
+5003 6
+22067 22111 5139 5017 5043 5011
+48449 4
+48159 48343 48063 48387
+48207 7
+48253 48433 48447 48417 48023 48275 48269
+48447 7
+48417 48503 48429 48009 48023 48207 48275
+48503 6
+48429 48363 48237 48009 48447 48023
+48263 7
+48415 48169 48433 48151 48269 48125 48107
+48433 7
+48151 48207 48253 48275 48269 48263 48125
+48159 7
+48499 48223 48449 48063 48387 48119 48277
+5139 8
+22111 22027 22067 5003 5011 5027 5013 5103
+48169 7
+48033 48305 48263 48415 48125 48107 48303
+48305 7
+48115 48445 48169 48033 48107 48303 48219
+48445 7
+48165 48501 48305 48115 48303 48219 48079
+28051 6
+28089 28163 28053 28007 28015 28083
+48501 3
+48445 48165 48079
+48223 5
+48379 48159 48499 48119 48231
+48343 7
+48459 48063 48067 48315 48037 48387 48449
+13255 8
+13231 13171 13199 13035 13151 13077 13113 13063
+28053 6
+28125 28051 28163 28083 28133 28151
+45035 5
+45029 45015 45019 45009 45075
+13125 4
+13303 13163 13301 13141
+13163 8
+13167 13303 13033 13107 13245 13189 13125 13301
+48067 5
+48315 22017 5091 48037 48343
+13033 8
+13107 13165 45005 13251 45011 45003 13163 13245
+28007 9
+28089 28079 28163 28159 28099 28019 28097 28051 28015
+28103 6
+28069 28159 1107 1119 28087 28105
+28159 7
+28079 28099 28103 28069 28105 28007 28019
+1007 6
+1105 1065 1021 1117 1073 1125
+13303 6
+13319 13009 13163 13167 13125 13141
+13199 7
+13145 13285 13231 13293 13263 13255 13077
+13285 6
+1017 13199 13145 13077 13149 1111
+13171 5
+13293 13231 13207 13035 13255
+13207 7
+13079 13293 13169 13021 13159 13171 13035
+13231 4
+13293 13171 13255 13199
+13009 5
+13169 13303 13319 13141 13237
+13169 7
+13289 13021 13009 13319 13237 13207 13159
+45029 7
+45013 45049 45005 45019 45035 45075 45009
+1063 5
+1119 1065 1091 1125 1107
+45005 6
+13251 45029 45049 45009 13033 45011
+1017 5
+1081 1123 13285 13145 1111
+1123 7
+1051 1037 1081 1087 1017 1111 1027
+1037 6
+1021 1123 1051 1027 1121 1117
+28125 4
+28055 28053 28163 28151
+48063 5
+48459 48499 48343 48159 48449
+1021 7
+1047 1001 1105 1037 1051 1117 1007
+13251 6
+13031 13165 45049 13103 45005 13033
+28163 8
+28049 28149 28055 28089 28007 28051 28125 28053
+45049 7
+13103 45053 45029 45013 45009 13251 45005
+22017 9
+22031 48365 48203 48315 22015 22081 5073 48067 5091
+45019 4
+45043 45029 45035 45015
+22015 6
+22013 22119 22081 22017 5073 5091
+28055 6
+22035 28163 28149 28125 28151 5017
+22119 5
+22027 22013 5027 22015 5073
+22027 6
+22013 22111 22061 5139 22119 5027
+22111 6
+22061 22067 22073 5003 5139 22027
+1065 5
+1091 1105 1007 1125 1063
+48499 7
+48467 48379 48459 48423 48063 48159 48223
+22067 7
+22083 22073 22123 5017 5003 22111 5139
+22123 4
+22083 22035 5017 22067
+22035 6
+22065 22083 28149 28055 22123 5017
+13319 7
+13023 13289 13167 13175 13303 13169 13009
+48363 7
+48133 48429 48367 48221 48143 48237 48503
+13293 7
+13263 13171 13207 13079 13231 13269 13199
+48367 6
+48221 48439 48251 48497 48363 48237
+48439 6
+48251 48113 48139 48121 48367 48497
+48113 6
+48139 48257 48397 48085 48439 48121
+48397 4
+48231 48257 48085 48113
+1119 7
+1023 28075 28069 1091 1063 1107 28103
+48379 4
+48499 48223 48467 48231
+48417 6
+48059 48253 48429 48133 48447 48207
+48253 7
+48353 48441 48151 48417 48059 48207 48433
+48151 7
+48353 48335 48415 48253 48441 48433 48263
+48415 7
+48335 48227 48033 48151 48353 48263 48169
+48033 7
+48317 48227 48115 48415 48335 48169 48305
+48115 7
+48003 48317 48165 48033 48227 48305 48445
+48429 5
+48363 48133 48503 48417 48447
+48165 5
+48003 48115 48317 48445 48501
+13021 6
+13153 13225 13079 13289 13169 13207
+13165 4
+13107 13251 13031 13033
+28099 7
+28101 28123 28079 28069 28075 28159 28007
+28079 6
+28123 28089 28099 28101 28159 28007
+28069 6
+28101 28075 1119 28103 28099 28159
+48459 7
+48423 48203 48183 48315 48343 48063 48499
+13289 6
+13153 13175 13319 13023 13169 13021
+28089 7
+28049 28121 28079 28123 28007 28051 28163
+48315 5
+48203 22017 48067 48459 48343
+13263 7
+13053 13197 13215 13145 13269 13293 13199
+1105 5
+1091 1047 1021 1007 1065
+13145 6
+13215 1081 13263 13199 1017 13285
+13079 5
+13225 13269 13021 13207 13293
+48257 6
+48139 48467 48213 48231 48397 48113
+48467 6
+48213 48423 48499 48257 48379 48231
+13107 10
+13279 13209 13283 13167 13031 13043 13267 13165 13033 13163
+13167 6
+13283 13175 13107 13163 13319 13303
+48203 6
+48365 48401 48183 22017 48315 48459
+1051 6
+1101 1001 1123 1087 1021 1037
+22061 5
+22049 22013 22073 22111 22027
+45053 4
+13051 13103 45013 45049
+1081 6
+1087 13215 13145 1113 1017 1123
+13269 7
+13249 13197 13225 13079 13193 13263 13293
+22073 6
+22021 22049 22067 22083 22061 22111
+13175 7
+13309 13091 13023 13283 13167 13289 13319
+1001 5
+1085 1047 1051 1101 1021
+1047 6
+1131 1091 1085 1001 1021 1105
+48423 7
+48213 48401 48073 48183 48459 48467 48499
+13225 5
+13193 13153 13079 13021 13269
+13153 7
+13093 13193 13023 13289 13235 13021 13225
+22083 7
+22021 22041 22035 22065 22123 22067 22073
+48183 4
+48401 48203 48423 48459
+13031 7
+13109 13043 13103 13029 13251 13107 13165
+28123 7
+28129 28121 28099 28101 28061 28079 28089
+28149 7
+28021 22107 22065 28049 28163 22035 28055
+13215 5
+1113 13053 13263 13145 1081
+1087 6
+1011 1101 1113 1081 1051 1123
+28121 6
+28029 28127 28049 28129 28123 28089
+13103 6
+13029 45053 13051 45049 13031 13251
+22013 8
+22069 22081 22049 22127 22061 22027 22119 22015
+28075 7
+28023 28061 28101 1119 1023 28069 28099
+28101 8
+28129 28061 28069 28075 28023 28099 28123 28079
+13023 6
+13235 13175 13091 13319 13153 13289
+28049 7
+28029 28021 28121 28127 28089 28163 28149
+22065 5
+22107 22041 28149 22035 22083
+48251 7
+48035 48425 48221 48139 48217 48439 48367
+13197 7
+13307 13259 13053 13249 13261 13269 13263
+48221 5
+48425 48143 48251 48367 48363
+48139 7
+48217 48213 48349 48257 48113 48251 48439
+13043 5
+13279 13031 13109 13267 13107
+1091 8
+1025 1023 1047 1131 1105 1065 1119 1063
+13053 5
+13259 1113 13263 13197 13215
+48441 6
+48399 48353 48059 48083 48253 48151
+48227 7
+48173 48317 48335 48431 48415 48033 48115
+48335 7
+48431 48353 48081 48151 48415 48227 48033
+48133 7
+48049 48059 48143 48093 48363 48429 48417
+48059 6
+48083 48133 48049 48417 48441 48253
+48317 7
+48329 48003 48227 48173 48033 48115 48165
+48003 6
+48135 48495 48317 48329 48115 48165
+48353 7
+48081 48441 48399 48253 48151 48335 48415
+48143 7
+48093 48425 48035 48193 48221 48133 48363
+1113 7
+1005 1011 13259 13215 13053 1087 1081
+13193 6
+13261 13249 13093 13153 13225 13269
+13283 6
+13309 13209 13279 13107 13167 13175
+1101 7
+1041 1085 1011 1109 1087 1051 1001
+22049 5
+22127 22021 22073 22061 22013
+13091 6
+13315 13235 13309 13271 13175 13023
+13249 4
+13193 13261 13269 13197
+1085 6
+1013 1131 1101 1041 1001 1047
+22041 5
+22025 22021 22107 22083 22065
+48401 7
+48073 48419 48365 48347 48203 48183 48423
+13235 5
+13093 13091 13315 13023 13153
+48365 5
+22031 48419 22017 48203 48401
+48213 8
+48161 48349 48073 48423 48001 48467 48139 48257
+13209 5
+13161 13309 13279 13107 13283
+13279 7
+13161 13267 13001 13043 13107 13209 13283
+22031 6
+48419 22069 22081 22085 22017 48365
+48349 6
+48293 48217 48001 48213 48161 48139
+48425 4
+48251 48035 48221 48143
+1023 7
+1129 28153 28023 1025 1091 1119 28075
+1011 5
+1109 1113 1005 1087 1101
+13267 8
+13001 13109 13179 13183 13305 13279 13043 13107
+13309 6
+13271 13209 13161 13175 13283 13091
+13093 6
+13081 13261 13235 13315 13153 13193
+22021 7
+22059 22127 22083 22041 22025 22073 22049
+48217 6
+48309 48035 48293 48349 48139 48251
+1131 6
+1099 1025 1085 1013 1047 1091
+13109 5
+13029 13179 13031 13267 13043
+45013 3
+45029 45053 45049
+48093 5
+48333 48049 48193 48143 48133
+22107 8
+28001 22029 22025 28021 28063 28149 22065 22041
+22081 5
+22013 22069 22031 22015 22017
+13029 5
+13179 13051 13103 13109 13031
+28061 7
+28067 28129 28023 28153 28075 28101 28123
+28023 5
+28153 1023 28075 28061 28101
+28129 7
+28031 28127 28067 28061 28101 28123 28121
+28021 5
+28063 28029 28049 28149 22107
+13259 7
+13239 1005 13197 13307 13243 13053 1113
+13307 5
+13243 13261 13273 13197 13259
+13261 8
+13177 13273 13093 13081 13193 13307 13249 13197
+48035 7
+48099 48193 48217 48309 48251 48425 48143
+1005 7
+1045 1067 1109 13239 13259 1113 1011
+22127 6
+22069 22059 22043 22021 22049 22013
+22069 8
+22115 22085 22079 22127 22043 22013 22081 22031
+48073 7
+48225 48001 48347 48005 48401 48213 48423
+13271 6
+13069 13017 13315 13161 13309 13091
+13315 7
+13287 13081 13271 13091 13017 13093 13235
+48431 6
+48383 48173 48081 48451 48335 48227
+13179 5
+13191 13183 13029 13109 13267
+48173 6
+48383 48461 48329 48431 48227 48317
+48329 6
+48461 48135 48173 48383 48317 48003
+48081 5
+48451 48399 48353 48431 48335
+48495 4
+48475 48301 48135 48003
+48135 6
+48103 48475 48329 48461 48003 48495
+48399 6
+48095 48451 48083 48441 48081 48353
+48083 6
+48095 48049 48307 48059 48399 48441
+48049 7
+48307 48093 48333 48411 48133 48083 48059
+48001 6
+48289 48161 48073 48225 48349 48213
+1109 6
+1031 1041 1005 1045 1011 1101
+28127 7
+28077 28029 28129 28031 28065 28121 28049
+1041 6
+1039 1013 1109 1031 1101 1085
+28029 7
+28085 28063 28127 28077 28121 28049 28021
+13081 6
+13321 13177 13315 13287 13093 13261
+48193 6
+48281 48333 48035 48099 48093 48143
+48161 5
+48289 48293 48001 48213 48349
+13183 4
+13305 13191 13179 13267
+48109 3
+48229 48389 48243
+48389 5
+48243 48301 48371 48475 48109
+48301 3
+48495 48475 48389
+48229 4
+48141 48109 48243 48377
+48141 1
+48229
+1025 6
+1129 1099 1003 1131 1091 1023
+13239 5
+1067 13243 13061 13259 1005
+48419 7
+48405 48347 22085 48403 22031 48365 48401
+22025 6
+22009 22059 22029 22107 22041 22021
+1013 6
+1035 1099 1041 1039 1085 1131
+13161 7
+13069 13001 13005 13279 13209 13271 13309
+13001 6
+13229 13005 13305 13267 13161 13279
+13273 6
+13037 13243 13177 13095 13261 13307
+22059 6
+22079 22043 22025 22009 22021 22127
+13243 6
+13061 13273 13037 13307 13239 13259
+13177 5
+13095 13321 13081 13261 13273
+28153 7
+28041 28111 28067 1023 1129 28023 28061
+28063 6
+28001 28029 28085 28037 28021 22107
+48309 6
+48027 48099 48293 48145 48217 48035
+13051 3
+45053 13029 13103
+13321 7
+13205 13095 13277 13071 13287 13081 13177
+48347 5
+48405 48005 48419 48073 48401
+13287 6
+13017 13155 13277 13315 13321 13081
+22085 6
+48351 48403 22069 22115 48419 22031
+13017 5
+13155 13271 13069 13287 13315
+1099 6
+1003 1035 1053 1013 1131 1025
+28067 6
+28035 28031 28153 28111 28061 28129
+13305 7
+13025 13229 13191 13127 13183 13001 13267
+48293 7
+48395 48145 48289 48161 48349 48309 48217
+13069 8
+13003 13019 13155 13299 13005 13161 13271 13017
+22043 4
+22059 22079 22069 22127
+28031 6
+28073 28065 28067 28035 28129 28127
+1067 6
+1069 1045 13099 13239 13061 1005
+13061 5
+13243 13037 13099 1067 13239
+28065 5
+28091 28077 28031 28073 28127
+13155 5
+13019 13277 13069 13017 13287
+22029 7
+22077 22009 28001 28157 22125 22107 22025
+28077 6
+28147 28085 28091 28065 28127 28029
+28001 5
+28157 28037 28063 22107 22029
+1035 4
+1053 1039 1013 1099
+48333 5
+48411 48281 48193 48093 48049
+28085 7
+28113 28005 28037 28077 28147 28029 28063
+48099 5
+48281 48309 48027 48035 48193
+13005 5
+13229 13001 13299 13069 13161
+48451 8
+48413 48235 48383 48095 48327 48399 48081 48431
+1129 6
+1097 28041 1025 1003 1023 28153
+13191 4
+13127 13179 13305 13183
+13095 6
+13205 13007 13037 13321 13177 13273
+48475 6
+48103 48371 48135 48495 48301 48389
+48405 5
+48005 48403 48241 48419 48347
+48103 5
+48461 48105 48371 48135 48475
+48461 6
+48383 48105 48173 48329 48103 48135
+48383 7
+48451 48235 48105 48431 48173 48461 48329
+48289 7
+48041 48395 48225 48313 48001 48161 48293
+13037 6
+13099 13095 13273 13007 13061 13243
+1031 5
+1039 1045 1061 1109 1041
+1045 6
+1061 1067 1069 1005 1031 1109
+28037 5
+28005 28157 28085 28001 28063
+48403 5
+48241 22085 48351 48405 48419
+48225 7
+48471 48313 48455 48005 48073 48289 48001
+13277 6
+13071 13075 13019 13155 13287 13321
+48095 6
+48413 48307 48327 48083 48399 48451
+1039 8
+12091 1053 1031 1061 12131 1041 1035 1013
+48235 4
+48105 48413 48451 48383
+13229 5
+13299 13305 13025 13001 13005
+48005 8
+48457 48373 48455 48405 48241 48225 48347 48073
+48145 5
+48331 48027 48395 48293 48309
+13099 7
+13253 13201 1069 13007 13037 13061 1067
+22079 7
+22003 22039 22115 22009 22059 22043 22069
+48307 6
+48327 48411 48319 48049 48095 48083
+48411 7
+48299 48319 48053 48281 48333 48307 48049
+13019 7
+13185 13173 13075 13069 13003 13155 13277
+13299 8
+12003 13065 13003 13049 13025 13229 13005 13069
+48281 6
+48053 48027 48099 48193 48411 48333
+13127 4
+13039 13025 13191 13305
+13007 6
+13087 13201 13205 13095 13099 13037
+13205 7
+13131 13087 13321 13071 13275 13095 13007
+28091 6
+22117 28147 28073 28109 28065 28077
+28073 5
+28109 28035 28031 28091 28065
+28035 6
+28109 28111 28131 28067 28073 28031
+28111 6
+28131 28039 28041 28153 28035 28067
+28041 5
+28039 1129 1097 28153 28111
+13003 5
+13173 13299 13065 13069 13019
+48455 5
+48407 48471 48373 48005 48225
+48371 7
+48043 48243 48443 48105 48103 48475 48389
+28157 6
+22125 28037 28005 22037 28001 22029
+22115 6
+22011 48351 22079 22003 22069 22085
+13025 6
+13049 13127 13039 13305 13299 13229
+48395 7
+48051 48331 48289 48313 48041 48293 48145
+28005 7
+22037 22091 28113 22105 28085 28037 28157
+28113 5
+22105 28147 22117 28085 28005
+28147 5
+28091 28077 22117 28113 28085
+13075 5
+13027 13071 13019 13185 13277
+22009 8
+22097 22039 22077 22125 22029 22025 22079 22059
+13071 6
+13275 13277 13075 13027 13205 13321
+1069 6
+1061 13099 13253 1067 12063 1045
+48027 7
+48491 48053 48331 48145 48309 48281 48099
+1003 6
+1097 12033 1053 1099 1129 1025
+1053 7
+12033 12113 1039 12091 1035 1003 1099
+13201 4
+13253 13007 13087 13099
+1061 7
+12131 12059 12063 1045 1069 1039 1031
+13173 5
+13185 13065 13101 13003 13019
+48351 7
+48361 48241 22011 22019 22115 22085 48403
+13065 6
+12023 13101 12003 13299 13173 13003
+1097 5
+28059 28039 1003 1129 28041
+13039 4
+12089 13049 13127 13025
+48241 7
+48199 48457 48351 48361 48403 48005 48405
+48373 6
+48291 48407 48457 48199 48005 48455
+48331 6
+48287 48491 48051 48395 48145 48027
+48243 6
+48377 48371 48043 48389 48229 48109
+48313 6
+48185 48041 48225 48471 48395 48289
+48413 7
+48435 48105 48327 48267 48095 48451 48235
+48327 6
+48319 48307 48267 48413 48095 48451
+48105 9
+48443 48435 48465 48413 48235 48371 48383 48461 48103
+13087 7
+12039 12063 13253 13131 13007 13205 13201
+13253 5
+12063 13087 13201 13099 1069
+13131 5
+12073 12039 13275 13205 13087
+13275 6
+12065 12073 13027 13071 13131 13205
+13027 6
+12065 13185 12079 13075 13275 13071
+13049 5
+12003 12089 13039 13025 13299
+48457 4
+48199 48241 48005 48373
+48471 6
+48185 48407 48339 48455 48225 48313
+48053 7
+48031 48299 48491 48453 48027 48281 48411
+13185 7
+12079 13173 13101 12047 13019 13027 13075
+22077 8
+22097 22121 22047 22099 22037 22125 22029 22009
+28109 8
+28045 22103 22117 28047 28131 28035 28073 28091
+22117 6
+22105 28109 22103 28091 28147 28113
+22105 7
+22095 22063 22091 22103 22117 28113 28005
+22091 5
+22033 22037 22105 22063 28005
+22037 7
+22121 22033 22125 22091 28005 22077 28157
+28039 5
+28059 28131 1097 28041 28111
+22125 6
+22121 22037 28157 22077 22009 22029
+12063 10
+12005 12013 12133 12059 12039 12077 13087 13253 1061 1069
+12059 4
+12133 12131 12063 1061
+22039 6
+22001 22053 22003 22097 22009 22079
+12131 6
+12091 12133 12005 12059 1061 1039
+48041 6
+48477 48051 48185 48289 48313 48395
+48319 6
+48267 48299 48171 48411 48327 48307
+48299 5
+48171 48053 48031 48411 48319
+28131 7
+28045 28047 28039 28059 28111 28109 28035
+48491 6
+48453 48331 48287 48021 48027 48053
+48407 5
+48339 48373 48291 48455 48471
+22003 6
+22053 22011 22039 22001 22079 22115
+22011 5
+22019 22003 22053 22115 48351
+48185 6
+48473 48477 48471 48339 48313 48041
+13101 5
+12047 12023 13065 13185 13173
+22097 6
+22055 22001 22077 22099 22009 22039
+12133 4
+12005 12063 12059 12131
+12089 4
+12003 12031 13039 13049
+28059 4
+28047 1097 28039 28131
+48051 5
+48287 48041 48477 48395 48331
+22033 6
+22047 22121 22063 22005 22091 22037
+48267 7
+48137 48435 48171 48319 48265 48327 48413
+12039 6
+12077 12013 12073 13131 13087 12063
+22103 4
+28045 28109 22105 22117
+48435 5
+48465 48267 48137 48413 48105
+12073 6
+12129 12077 12065 13275 13131 12039
+28047 4
+28045 28059 28131 28109
+12065 6
+12129 12123 12079 13027 13275 12073
+48043 4
+48377 48443 48371 48243
+48443 4
+48465 48105 48043 48371
+22121 5
+22033 22047 22037 22125 22077
+22063 5
+22005 22105 22095 22033 22091
+28045 4
+28047 28109 28131 22103
+12079 7
+12123 12047 12121 12067 13185 12065 13027
+48377 3
+48043 48243 48229
+48453 6
+48055 48209 48031 48021 48491 48053
+48339 6
+48473 48291 48201 48407 48185 48471
+12047 5
+12121 12023 13101 12079 13185
+12013 5
+12045 12005 12039 12077 12063
+12077 7
+12045 12129 12037 12073 12039 12013 12063
+12023 8
+12041 12121 12003 12125 12001 13065 12047 13101
+12003 9
+12007 12125 12031 12019 12089 13049 13299 12023 13065
+12031 4
+12019 12109 12003 12089
+12005 5
+12045 12013 12063 12133 12131
+48287 6
+48149 48021 48477 48051 48331 48491
+48199 6
+48291 48361 48245 48241 48457 48373
+48171 6
+48265 48031 48259 48299 48267 48319
+48031 7
+48091 48259 48453 48209 48053 48171 48299
+22099 7
+22045 22101 22055 22007 22047 22097 22077
+22047 7
+22045 22007 22005 22033 22099 22121 22077
+22019 5
+48361 22053 22023 22011 48351
+22053 7
+22023 22001 22113 22039 22003 22019 22011
+48291 7
+48201 48245 48199 48071 48373 48339 48407
+22001 6
+22055 22113 22039 22097 22053 22003
+12121 5
+12067 12023 12041 12047 12079
+48021 6
+48177 48055 48287 48149 48453 48491
+12091 4
+12113 12131 1039 1053
+12113 3
+12033 12091 1053
+48477 7
+48015 48149 48473 48185 48041 48287 48051
+12033 3
+12113 1053 1003
+22055 5
+22113 22099 22045 22097 22001
+22005 6
+22007 22093 22095 22063 22047 22033
+48209 5
+48187 48091 48055 48453 48031
+12123 4
+12029 12067 12079 12065
+22095 6
+22089 22093 22057 22105 22005 22063
+12129 4
+12037 12065 12073 12077
+48137 7
+48271 48465 48385 48463 48265 48267 48435
+48465 5
+48271 48137 48435 48443 48105
+48265 6
+48385 48259 48171 48019 48137 48267
+12067 5
+12029 12041 12121 12123 12079
+48473 6
+48015 48201 48157 48339 48185 48477
+12109 4
+12035 12107 12019 12031
+48361 6
+48245 22019 22023 48351 48199 48241
+12045 4
+12037 12077 12013 12005
+12019 6
+12107 12001 12007 12109 12031 12003
+48245 4
+48071 48361 48291 48199
+48149 8
+48285 48177 48055 48015 48089 48477 48287 48021
+48201 7
+48039 48157 48071 48167 48291 48473 48339
+22093 4
+22007 22095 22057 22005
+22071 3
+22051 22087 22075
+22113 5
+22023 22045 22055 22001 22053
+12125 4
+12001 12007 12003 12023
+12007 5
+12001 12019 12107 12003 12125
+48259 6
+48019 48091 48029 48031 48265 48171
+48015 6
+48089 48157 48473 48481 48477 48149
+22089 3
+22051 22057 22095
+48385 4
+48019 48463 48265 48137
+22007 8
+22045 22101 22057 22109 22093 22005 22047 22099
+48055 6
+48187 48149 48177 48021 48453 48209
+22023 4
+22113 22053 48361 22019
+22087 2
+22075 22071
+48091 5
+48029 48209 48187 48031 48259
+12037 3
+12129 12045 12077
+48089 5
+48239 48285 48481 48015 48149
+22101 4
+22045 22007 22109 22099
+12001 8
+12075 12041 12019 12107 12007 12083 12125 12023
+12041 6
+12029 12001 12075 12023 12067 12121
+22057 6
+22051 22109 22007 22089 22093 22095
+48019 6
+48463 48325 48029 48259 48385 48265
+22075 3
+22051 22087 22071
+48071 4
+48167 48245 48201 48291
+48187 6
+48493 48029 48177 48055 48209 48091
+12107 7
+12083 12035 12127 12109 12019 12001 12007
+12029 4
+12075 12041 12067 12123
+48157 5
+48481 48039 48201 48015 48473
+48177 8
+48255 48493 48285 48123 48149 48021 48187 48055
+48029 7
+48325 48187 48493 48013 48091 48019 48259
+48325 6
+48163 48507 48463 48029 48013 48019
+12035 3
+12127 12109 12107
+22045 6
+22007 22101 22099 22047 22113 22055
+48285 6
+48469 48123 48089 48239 48149 48177
+48481 6
+48239 48039 48321 48157 48089 48015
+48463 8
+48323 48507 48271 48325 48163 48019 48385 48137
+48271 5
+48323 48463 48507 48137 48465
+48039 5
+48321 48167 48201 48481 48157
+12075 5
+12083 12017 12001 12029 12041
+48167 3
+48071 48039 48201
+12083 7
+12119 12017 12127 12069 12107 12075 12001
+48493 5
+48013 48255 48177 48187 48029
+12127 7
+12095 12117 12069 12009 12083 12035 12107
+48123 5
+48175 48255 48469 48285 48177
+22109 3
+22057 22101 22007
+12069 7
+12119 12095 12097 12105 12117 12127 12083
+48239 6
+48057 48469 48321 48481 48089 48285
+48013 8
+48311 48283 48163 48255 48493 48297 48325 48029
+48321 4
+48057 48039 48239 48481
+48255 7
+48297 48123 48175 48025 48177 48013 48493
+22051 4
+22075 22057 22071 22089
+48469 6
+48391 48175 48057 48239 48285 48123
+48507 6
+48127 48323 48163 48325 48463 48271
+48163 7
+48127 48283 48013 48311 48325 48507 48463
+48323 5
+48127 48479 48507 48463 48271
+12017 4
+12119 12083 12053 12075
+12119 6
+12101 12053 12069 12105 12083 12017
+48175 5
+48025 48469 48391 48123 48255
+12117 4
+12009 12127 12095 12069
+48297 7
+48249 48131 48311 48409 48025 48255 48013
+12095 6
+12097 12105 12009 12127 12117 12069
+48025 5
+48391 48409 48175 48297 48255
+12053 3
+12101 12119 12017
+48283 6
+48479 48127 48311 48131 48013 48163
+48311 6
+48479 48297 48131 48013 48283 48163
+48127 5
+48283 48479 48163 48507 48323
+12009 5
+12097 12061 12095 12117 12127
+48391 6
+48007 48409 48057 48469 48025 48175
+12101 5
+12057 12103 12105 12119 12053
+12105 10
+12081 12049 12057 12093 12055 12097 12095 12101 12119 12069
+12097 7
+12055 12061 12093 12009 12095 12105 12069
+48057 5
+48007 48321 48239 48391 48469
+48479 7
+48505 48131 48247 48311 48283 48127 48323
+12103 2
+12057 12101
+12057 5
+12081 12105 12049 12101 12103
+48409 6
+48249 48007 48391 48355 48025 48297
+48007 3
+48057 48391 48409
+48131 7
+48247 48249 48047 48297 48479 48311 48283
+48249 6
+48047 48273 48355 48409 48297 48131
+12061 4
+12111 12093 12097 12009
+48355 3
+48273 48249 48409
+12049 5
+12027 12081 12055 12105 12057
+12055 7
+12015 12027 12093 12043 12097 12049 12105
+12081 5
+12115 12049 12027 12105 12057
+12093 9
+12051 12043 12085 12099 12111 12061 12055 12097 12105
+48273 4
+48261 48047 48355 48249
+12111 3
+12085 12061 12093
+12115 3
+12015 12027 12081
+48247 5
+48427 48505 48047 48131 48479
+12027 6
+12015 12055 12043 12049 12115 12081
+48505 3
+48247 48427 48479
+48047 7
+48427 48261 48215 48273 48249 48247 48131
+12085 5
+12051 12099 12043 12111 12093
+12043 8
+12071 12051 12015 12085 12099 12093 12027 12055
+48261 4
+48215 48489 48273 48047
+12015 6
+12071 12043 12051 12055 12027 12115
+12099 5
+12011 12051 12085 12043 12093
+12051 8
+12021 12071 12099 12011 12085 12093 12043 12015
+48427 4
+48215 48047 48247 48505
+48215 5
+48061 48489 48261 48427 48047
+12071 4
+12051 12021 12043 12015
+48489 3
+48061 48215 48261
+12021 5
+12011 12025 12087 12051 12071
+48061 2
+48489 48215
+12011 4
+12025 12099 12021 12051
+12025 3
+12087 12011 12021
+12087 2
+12025 12021
+51019 8
+51161 51163 51143 51680 51009 51031 51067 51023
+51710 3
+37053 37029 51800
+51081 5
+51175 37131 51183 51025 51053
+51059 9
+51153 24033 24017 11001 51510 51013 51610 24031 51107
+51175 8
+51800 37073 37091 51093 51181 51081 37131 51183
+51083 7
+51117 37077 51037 37145 37033 51143 51031
+51163 8
+51125 51009 51019 51015 51530 51023 51005 51017
+51161 7
+51770 51019 51023 51067 51045 51063 51121
+51199 4
+51650 51700 51830 51095
+51153 4
+51179 51059 51107 51061
diff --git a/pysal/examples/spat-sym-us.mat b/pysal/examples/spat-sym-us.mat
new file mode 100644
index 0000000..678f0c0
Binary files /dev/null and b/pysal/examples/spat-sym-us.mat differ
diff --git a/pysal/examples/spat-sym-us.wk1 b/pysal/examples/spat-sym-us.wk1
new file mode 100644
index 0000000..9c9bd51
Binary files /dev/null and b/pysal/examples/spat-sym-us.wk1 differ
diff --git a/pysal/examples/spdep_listw2WB_columbus b/pysal/examples/spdep_listw2WB_columbus
new file mode 100644
index 0000000..958bada
--- /dev/null
+++ b/pysal/examples/spdep_listw2WB_columbus
@@ -0,0 +1,59 @@
+list(adj = c(2, 3, 1, 3, 4, 1, 2, 4, 5, 2, 3, 5, 8, 3, 4, 6, 
+8, 9, 11, 15, 5, 9, 8, 12, 13, 14, 4, 5, 7, 11, 12, 13, 5, 6, 
+10, 15, 20, 22, 25, 26, 9, 17, 20, 22, 5, 8, 12, 15, 16, 7, 8, 
+11, 13, 14, 16, 7, 8, 12, 14, 7, 12, 13, 16, 18, 19, 5, 9, 11, 
+16, 25, 26, 11, 12, 14, 15, 18, 24, 25, 10, 20, 23, 14, 16, 19, 
+24, 14, 18, 24, 9, 10, 17, 22, 23, 27, 32, 33, 35, 40, 24, 30, 
+34, 9, 10, 20, 26, 27, 28, 17, 20, 32, 16, 18, 19, 21, 25, 29, 
+30, 9, 15, 16, 24, 26, 28, 29, 9, 15, 22, 25, 28, 29, 20, 22, 
+28, 33, 22, 25, 26, 27, 29, 33, 35, 37, 38, 24, 25, 26, 28, 30, 
+37, 38, 21, 24, 29, 37, 34, 36, 20, 23, 40, 41, 20, 27, 28, 35, 
+21, 31, 36, 42, 20, 28, 33, 38, 40, 43, 44, 31, 34, 39, 42, 46, 
+28, 29, 30, 38, 43, 45, 28, 29, 35, 37, 43, 44, 36, 46, 20, 32, 
+35, 41, 47, 32, 40, 47, 34, 36, 35, 37, 38, 44, 45, 48, 35, 38, 
+43, 48, 49, 37, 43, 48, 49, 36, 39, 40, 41, 43, 44, 45, 49, 44, 
+45, 48), weights = c(0.5, 0.5, 0.333333333333333, 0.333333333333333, 
+0.333333333333333, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 
+0.25, 0.142857142857143, 0.142857142857143, 0.142857142857143, 
+0.142857142857143, 0.142857142857143, 0.142857142857143, 0.142857142857143, 
+0.5, 0.5, 0.25, 0.25, 0.25, 0.25, 0.166666666666667, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.25, 
+0.25, 0.25, 0.25, 0.2, 0.2, 0.2, 0.2, 0.2, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.166666666666667, 0.25, 0.25, 0.25, 0.25, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.142857142857143, 
+0.142857142857143, 0.142857142857143, 0.142857142857143, 0.142857142857143, 
+0.142857142857143, 0.142857142857143, 0.333333333333333, 0.333333333333333, 
+0.333333333333333, 0.25, 0.25, 0.25, 0.25, 0.333333333333333, 
+0.333333333333333, 0.333333333333333, 0.1, 0.1, 0.1, 0.1, 0.1, 
+0.1, 0.1, 0.1, 0.1, 0.1, 0.333333333333333, 0.333333333333333, 
+0.333333333333333, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.333333333333333, 
+0.333333333333333, 0.333333333333333, 0.142857142857143, 0.142857142857143, 
+0.142857142857143, 0.142857142857143, 0.142857142857143, 0.142857142857143, 
+0.142857142857143, 0.142857142857143, 0.142857142857143, 0.142857142857143, 
+0.142857142857143, 0.142857142857143, 0.142857142857143, 0.142857142857143, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.25, 0.25, 0.25, 0.25, 
+0.111111111111111, 0.111111111111111, 0.111111111111111, 0.111111111111111, 
+0.111111111111111, 0.111111111111111, 0.111111111111111, 0.111111111111111, 
+0.111111111111111, 0.142857142857143, 0.142857142857143, 0.142857142857143, 
+0.142857142857143, 0.142857142857143, 0.142857142857143, 0.142857142857143, 
+0.25, 0.25, 0.25, 0.25, 0.5, 0.5, 0.25, 0.25, 0.25, 0.25, 0.25, 
+0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.142857142857143, 
+0.142857142857143, 0.142857142857143, 0.142857142857143, 0.142857142857143, 
+0.142857142857143, 0.142857142857143, 0.2, 0.2, 0.2, 0.2, 0.2, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.5, 0.5, 0.2, 0.2, 0.2, 0.2, 0.2, 0.333333333333333, 0.333333333333333, 
+0.333333333333333, 0.5, 0.5, 0.166666666666667, 0.166666666666667, 
+0.166666666666667, 0.166666666666667, 0.166666666666667, 0.166666666666667, 
+0.2, 0.2, 0.2, 0.2, 0.2, 0.25, 0.25, 0.25, 0.25, 0.5, 0.5, 0.5, 
+0.5, 0.25, 0.25, 0.25, 0.25, 0.333333333333333, 0.333333333333333, 
+0.333333333333333), num = c(2, 3, 4, 4, 7, 2, 4, 6, 8, 4, 5, 
+6, 4, 6, 6, 7, 3, 4, 3, 10, 3, 6, 3, 7, 7, 6, 4, 9, 7, 4, 2, 
+4, 4, 4, 7, 5, 6, 6, 2, 5, 3, 2, 6, 5, 4, 2, 2, 4, 3))
\ No newline at end of file
diff --git a/pysal/examples/spi_download.csv b/pysal/examples/spi_download.csv
new file mode 100644
index 0000000..a547490
--- /dev/null
+++ b/pysal/examples/spi_download.csv
@@ -0,0 +1,66 @@
+"Per capita personal income 2/","FIPS","AreaName","1969  ","1970  ","1971  ","1972  ","1973  ","1974  ","1975  ","1976  ","1977  ","1978  ","1979  ","1980  ","1981  ","1982  ","1983  ","1984  ","1985  ","1986  ","1987  ","1988  ","1989  ","1990  ","1991  ","1992  ","1993  ","1994  ","1995  ","1996  ","1997  ","1998  ","1999  ","2000  ","2001  ","2002  ","2003  ","2004  ","2005  ","2006  ","2007  ","2008  " 
+"400","00","United States 3/",3836 ,4084 ,4340 ,4717 ,5230 ,5708 ,6172 ,6754 ,7402 ,8243 ,9138 ,10091 ,11209 ,11901 ,12583 ,13807 ,14637 ,15338 ,16137 ,17244 ,18402 ,19354 ,19818 ,20799 ,21385 ,22297 ,23262 ,24442 ,25654 ,27258 ,28333 ,30318 ,31149 ,31470 ,32284 ,33899 ,35447 ,37728 ,39430 ,40208 
+"400","01","Alabama",2734 ,2962 ,3206 ,3526 ,3943 ,4336 ,4766 ,5313 ,5794 ,6464 ,7139 ,7825 ,8659 ,9152 ,9772 ,10752 ,11504 ,12080 ,12735 ,13639 ,14776 ,15618 ,16337 ,17264 ,17766 ,18656 ,19551 ,20245 ,21118 ,22217 ,22961 ,24070 ,25097 ,25816 ,26753 ,28405 ,29889 ,31484 ,32883 ,33768 
+"400","02","Alaska 3/",4769 ,5248 ,5583 ,5940 ,6805 ,8130 ,10666 ,12109 ,12388 ,12495 ,13199 ,14975 ,16528 ,18819 ,18843 ,19395 ,20104 ,19673 ,19244 ,19848 ,21525 ,22594 ,23092 ,23706 ,24478 ,25186 ,25778 ,26179 ,27197 ,27943 ,28538 ,30534 ,32274 ,33160 ,33543 ,34898 ,36812 ,38898 ,41153 ,44039 
+"400","04","Arizona",3495 ,3829 ,4131 ,4473 ,4904 ,5301 ,5535 ,6061 ,6615 ,7532 ,8509 ,9484 ,10582 ,10882 ,11630 ,12803 ,13636 ,14335 ,14896 ,15583 ,16281 ,16806 ,17253 ,17762 ,18371 ,19385 ,20164 ,21159 ,22231 ,23722 ,24583 ,26261 ,26937 ,27192 ,27859 ,29567 ,31563 ,33498 ,34413 ,34335 
+"400","05","Arkansas",2616 ,2840 ,3088 ,3409 ,3978 ,4368 ,4668 ,5157 ,5657 ,6469 ,7020 ,7521 ,8508 ,8947 ,9463 ,10486 ,11128 ,11625 ,12023 ,12855 ,13727 ,14402 ,15103 ,16204 ,16692 ,17496 ,18260 ,19170 ,19846 ,20798 ,21556 ,22578 ,23883 ,24299 ,25475 ,26905 ,27981 ,29573 ,31646 ,32397 
+"400","06","California",4532 ,4801 ,5027 ,5451 ,5944 ,6557 ,7136 ,7835 ,8572 ,9572 ,10719 ,11928 ,13144 ,13742 ,14517 ,15913 ,16777 ,17526 ,18447 ,19515 ,20448 ,21380 ,21734 ,22439 ,22744 ,23448 ,24498 ,25788 ,27063 ,29195 ,30679 ,33394 ,33869 ,34006 ,34922 ,36830 ,38670 ,41404 ,43221 ,43641 
+"400","08","Colorado",3687 ,4040 ,4399 ,4774 ,5283 ,5837 ,6322 ,6880 ,7535 ,8485 ,9502 ,10714 ,12078 ,12904 ,13538 ,14669 ,15267 ,15713 ,16205 ,17072 ,18398 ,19377 ,20123 ,21102 ,22152 ,23237 ,24575 ,25964 ,27402 ,29174 ,30919 ,33979 ,35305 ,35032 ,35160 ,36649 ,38539 ,40912 ,42444 ,42985 
+"400","09","Connecticut",4821 ,5071 ,5295 ,5692 ,6226 ,6794 ,7252 ,7878 ,8684 ,9656 ,10855 ,12321 ,13780 ,14849 ,15737 ,17496 ,18635 ,19895 ,21573 ,23557 ,25397 ,26198 ,26430 ,28287 ,29051 ,29891 ,31366 ,32835 ,34877 ,37226 ,38718 ,41921 ,43614 ,43346 ,43730 ,46417 ,48485 ,52702 ,55609 ,56272 
+"400","10","Delaware",4406 ,4594 ,4888 ,5297 ,5858 ,6336 ,6740 ,7345 ,7892 ,8624 ,9482 ,10756 ,11827 ,12684 ,13468 ,14748 ,15900 ,16589 ,17604 ,18819 ,20559 ,21209 ,22073 ,22500 ,22885 ,23487 ,24409 ,25808 ,26574 ,28397 ,29072 ,31007 ,32410 ,33212 ,33879 ,35753 ,37062 ,39168 ,40068 ,40519 
+"400","11","District of Columbia",4483 ,4970 ,5497 ,6026 ,6470 ,7252 ,8034 ,8728 ,9639 ,10367 ,11224 ,12218 ,13381 ,14570 ,15379 ,16986 ,18051 ,18932 ,20046 ,22157 ,23843 ,26015 ,27333 ,28694 ,29883 ,30804 ,31291 ,32981 ,34807 ,36503 ,37093 ,40485 ,44870 ,45442 ,47529 ,51458 ,55268 ,60080 ,63881 ,66119 
+"400","12","Florida",3658 ,3998 ,4282 ,4696 ,5217 ,5591 ,5901 ,6360 ,6972 ,7841 ,8731 ,9921 ,11101 ,11682 ,12640 ,13718 ,14643 ,15391 ,16193 ,17291 ,18734 ,19437 ,19776 ,20474 ,21197 ,21919 ,23014 ,24050 ,24919 ,26453 ,27329 ,29079 ,29834 ,30530 ,31364 ,33659 ,35769 ,38308 ,39204 ,39267 
+"400","13","Georgia",3149 ,3379 ,3650 ,4023 ,4481 ,4854 ,5157 ,5687 ,6200 ,6951 ,7656 ,8408 ,9393 ,10041 ,10857 ,12148 ,13052 ,13917 ,14679 ,15721 ,16658 ,17563 ,18110 ,19139 ,19866 ,20945 ,22023 ,23340 ,24287 ,25680 ,26772 ,28531 ,29205 ,29272 ,29683 ,30639 ,32176 ,33473 ,34650 ,34893 
+"400","15","Hawaii 3/",4532 ,5077 ,5319 ,5671 ,6128 ,6911 ,7396 ,7880 ,8338 ,9111 ,10098 ,11394 ,12235 ,12780 ,13883 ,14909 ,15591 ,16288 ,17118 ,18496 ,20285 ,21818 ,22763 ,24014 ,24566 ,24847 ,25160 ,25253 ,25892 ,26546 ,27467 ,29073 ,29506 ,30533 ,31520 ,33787 ,35851 ,38520 ,40907 ,42055 
+"400","16","Idaho",3264 ,3539 ,3739 ,4133 ,4686 ,5353 ,5573 ,6100 ,6479 ,7253 ,7796 ,8637 ,9378 ,9622 ,10331 ,10988 ,11497 ,11774 ,12428 ,13437 ,14632 ,15603 ,16015 ,17063 ,18110 ,18865 ,19665 ,20525 ,20961 ,22234 ,23269 ,24684 ,25656 ,26029 ,26472 ,28453 ,29642 ,31668 ,32905 ,33074 
+"400","17","Illinois",4333 ,4568 ,4867 ,5262 ,5892 ,6452 ,7008 ,7620 ,8359 ,9222 ,10140 ,10980 ,12150 ,12868 ,13420 ,14760 ,15524 ,16304 ,17236 ,18551 ,19672 ,20835 ,21148 ,22553 ,23068 ,24181 ,25382 ,26806 ,28130 ,29746 ,30619 ,32636 ,33183 ,33696 ,34569 ,35957 ,37168 ,39549 ,41569 ,42347 
+"400","18","Indiana",3692 ,3791 ,4086 ,4439 ,5083 ,5423 ,5837 ,6504 ,7167 ,7950 ,8726 ,9353 ,10287 ,10673 ,11184 ,12402 ,13065 ,13739 ,14522 ,15424 ,16668 ,17454 ,17865 ,19099 ,19885 ,20973 ,21644 ,22655 ,23607 ,25169 ,25899 ,27461 ,28054 ,28534 ,29588 ,30645 ,31302 ,32881 ,33756 ,34605 
+"400","19","Iowa",3665 ,3878 ,4025 ,4498 ,5409 ,5603 ,6231 ,6596 ,7267 ,8379 ,8994 ,9573 ,10840 ,11236 ,11529 ,12815 ,13370 ,13993 ,14786 ,15206 ,16484 ,17350 ,17700 ,18789 ,18700 ,20367 ,21006 ,22787 ,23747 ,24898 ,25539 ,27295 ,27900 ,28832 ,29444 ,31674 ,32306 ,33853 ,35699 ,37402 
+"400","20","Kansas",3555 ,3824 ,4152 ,4619 ,5275 ,5710 ,6206 ,6716 ,7279 ,8020 ,9119 ,9939 ,11197 ,12056 ,12475 ,13629 ,14312 ,14957 ,15567 ,16207 ,17008 ,18034 ,18605 ,19710 ,20371 ,21235 ,21870 ,23255 ,24504 ,26032 ,26826 ,28479 ,29670 ,29759 ,30822 ,31918 ,33130 ,35756 ,37389 ,38820 
+"400","21","Kentucky",2964 ,3176 ,3383 ,3705 ,4132 ,4595 ,4940 ,5457 ,6075 ,6758 ,7603 ,8113 ,8992 ,9552 ,9861 ,11035 ,11503 ,11819 ,12485 ,13553 ,14530 ,15360 ,16142 ,17150 ,17554 ,18308 ,18978 ,19982 ,21021 ,22244 ,23032 ,24786 ,25336 ,25838 ,26348 ,27518 ,28557 ,30129 ,31206 ,32076 
+"400","22","Louisiana",2886 ,3089 ,3310 ,3576 ,3977 ,4493 ,4958 ,5549 ,6107 ,6912 ,7744 ,8767 ,9991 ,10561 ,10871 ,11595 ,12024 ,11998 ,12219 ,13029 ,13963 ,15171 ,15931 ,16857 ,17559 ,18641 ,19367 ,20155 ,21088 ,22119 ,22458 ,23570 ,25373 ,25948 ,26708 ,28066 ,30100 ,33750 ,35363 ,36424 
+"400","23","Maine",3138 ,3413 ,3590 ,3858 ,4308 ,4747 ,5026 ,5698 ,6116 ,6700 ,7412 ,8333 ,9164 ,9901 ,10548 ,11628 ,12462 ,13406 ,14434 ,15593 ,16683 ,17211 ,17457 ,18214 ,18690 ,19552 ,20372 ,21507 ,22566 ,24171 ,25151 ,26697 ,28204 ,28898 ,29939 ,31474 ,32022 ,33735 ,35078 ,36457 
+"400","24","Maryland",4199 ,4558 ,4881 ,5281 ,5805 ,6368 ,6890 ,7534 ,8174 ,9030 ,9971 ,11164 ,12335 ,13334 ,14281 ,15701 ,16935 ,17932 ,19067 ,20468 ,21733 ,22681 ,23282 ,24112 ,24805 ,25780 ,26618 ,27689 ,29000 ,30742 ,32216 ,34681 ,36272 ,37164 ,38212 ,40625 ,42601 ,45121 ,47050 ,48378 
+"400","25","Massachusetts",4185 ,4472 ,4743 ,5102 ,5541 ,6011 ,6453 ,6993 ,7611 ,8422 ,9371 ,10570 ,11744 ,12892 ,13942 ,15639 ,16798 ,18003 ,19397 ,21127 ,22095 ,22797 ,23314 ,24422 ,25182 ,26393 ,27662 ,29279 ,30911 ,33006 ,34671 ,38213 ,39500 ,39512 ,40161 ,42123 ,43897 ,47330 ,49885 ,51254 
+"400","26","Michigan",4147 ,4198 ,4501 ,4970 ,5552 ,5926 ,6307 ,7092 ,7950 ,8800 ,9629 ,10291 ,11075 ,11522 ,12284 ,13588 ,14685 ,15520 ,15964 ,16894 ,18072 ,18719 ,19129 ,20179 ,21046 ,22593 ,23428 ,24279 ,25349 ,26903 ,27858 ,29392 ,29977 ,30188 ,31214 ,31650 ,32265 ,33198 ,34188 ,34949 
+"400","27","Minnesota",3775 ,4050 ,4270 ,4626 ,5421 ,5824 ,6231 ,6725 ,7538 ,8420 ,9312 ,10229 ,11258 ,12015 ,12649 ,14250 ,15023 ,15810 ,16737 ,17351 ,18744 ,19710 ,20129 ,21306 ,21601 ,23003 ,24144 ,25871 ,27095 ,29273 ,30562 ,32598 ,33345 ,34076 ,35289 ,37079 ,37991 ,40015 ,41764 ,43037 
+"400","28","Mississippi",2403 ,2628 ,2855 ,3194 ,3597 ,3920 ,4207 ,4746 ,5233 ,5768 ,6491 ,7005 ,7842 ,8231 ,8570 ,9377 ,9857 ,10174 ,10799 ,11566 ,12499 ,13117 ,13749 ,14651 ,15426 ,16512 ,17176 ,18079 ,18880 ,19947 ,20555 ,21556 ,22819 ,23148 ,23999 ,25169 ,26836 ,28010 ,29549 ,30399 
+"400","29","Missouri",3569 ,3855 ,4118 ,4453 ,4935 ,5280 ,5759 ,6313 ,6986 ,7753 ,8632 ,9306 ,10383 ,11110 ,11808 ,13011 ,13836 ,14471 ,15139 ,15926 ,16928 ,17582 ,18320 ,19327 ,19951 ,21035 ,21832 ,22901 ,24104 ,25419 ,26218 ,27892 ,28624 ,29266 ,30239 ,31435 ,32278 ,34062 ,35308 ,36631 
+"400","30","Montana",3294 ,3624 ,3790 ,4350 ,5000 ,5367 ,5814 ,6202 ,6622 ,7655 ,8192 ,9038 ,10187 ,10628 ,11054 ,11617 ,11762 ,12350 ,12848 ,13241 ,14569 ,15346 ,16250 ,16859 ,17787 ,17989 ,18546 ,19261 ,20033 ,21459 ,22045 ,23470 ,25315 ,25685 ,27000 ,28613 ,30141 ,32204 ,33948 ,34644 
+"400","31","Nebraska",3571 ,3793 ,4119 ,4525 ,5259 ,5452 ,6182 ,6445 ,6961 ,8030 ,8646 ,9155 ,10579 ,11361 ,11749 ,13040 ,13756 ,14232 ,14947 ,15915 ,16790 ,17948 ,18523 ,19403 ,19836 ,21024 ,22008 ,23853 ,24359 ,25859 ,27017 ,28600 ,29906 ,30329 ,32141 ,33279 ,34331 ,35726 ,37908 ,39150 
+"400","32","Nevada",4530 ,4932 ,5218 ,5553 ,6093 ,6479 ,7029 ,7731 ,8519 ,9718 ,10639 ,11679 ,12711 ,13104 ,13600 ,14501 ,15310 ,15992 ,16713 ,18052 ,19165 ,20042 ,20777 ,22099 ,22833 ,23892 ,24914 ,26239 ,27118 ,28624 ,29650 ,30985 ,31186 ,31336 ,32710 ,35350 ,38231 ,39376 ,41145 ,41182 
+"400","33","New Hampshire",3745 ,3883 ,4091 ,4411 ,4863 ,5262 ,5602 ,6249 ,6866 ,7730 ,8686 ,9816 ,10985 ,11979 ,13015 ,14455 ,15663 ,16819 ,18088 ,19361 ,20235 ,20236 ,21056 ,21861 ,22311 ,23642 ,24845 ,26649 ,27546 ,29664 ,31036 ,34089 ,34716 ,35126 ,35699 ,37612 ,38412 ,40999 ,42831 ,43623 
+"400","34","New Jersey",4500 ,4813 ,5113 ,5513 ,6027 ,6561 ,7053 ,7699 ,8439 ,9359 ,10372 ,11676 ,12986 ,13997 ,15027 ,16506 ,17571 ,18618 ,19952 ,21763 ,23235 ,24354 ,24754 ,26270 ,26799 ,27593 ,29022 ,30613 ,32326 ,34212 ,35360 ,38666 ,39680 ,39964 ,40504 ,42406 ,43994 ,47655 ,50265 ,51358 
+"400","35","New Mexico",2920 ,3189 ,3416 ,3748 ,4119 ,4551 ,5050 ,5520 ,6059 ,6802 ,7545 ,8331 ,9286 ,9916 ,10434 ,11233 ,11959 ,12217 ,12638 ,13227 ,14009 ,14823 ,15577 ,16260 ,17039 ,17772 ,18617 ,19289 ,19968 ,21059 ,21461 ,22752 ,24796 ,25063 ,25773 ,27300 ,28931 ,30587 ,32163 ,33430 
+"400","36","New York",4573 ,4868 ,5166 ,5528 ,5963 ,6474 ,6970 ,7469 ,8132 ,8928 ,9819 ,10985 ,12260 ,13321 ,14267 ,15727 ,16761 ,17833 ,18978 ,20720 ,22202 ,23710 ,23685 ,24693 ,25089 ,25807 ,27106 ,28497 ,30012 ,31416 ,32625 ,34629 ,35458 ,35417 ,36165 ,38398 ,40678 ,43973 ,47612 ,48753 
+"400","37","North Carolina",3046 ,3273 ,3500 ,3890 ,4353 ,4734 ,5048 ,5581 ,6041 ,6744 ,7401 ,8183 ,9142 ,9711 ,10500 ,11771 ,12592 ,13393 ,14241 ,15341 ,16454 ,17194 ,17691 ,18886 ,19704 ,20630 ,21615 ,22714 ,23945 ,25301 ,26326 ,27916 ,28394 ,28479 ,28979 ,30586 ,32066 ,33640 ,34952 ,35344 
+"400","38","North Dakota",3099 ,3257 ,3715 ,4412 ,6207 ,6136 ,6390 ,6206 ,6421 ,8095 ,8290 ,7894 ,10300 ,11009 ,11417 ,12268 ,12728 ,13025 ,13589 ,12653 ,14371 ,15866 ,16167 ,17639 ,17696 ,19156 ,19004 ,21279 ,20854 ,23177 ,23502 ,25625 ,26699 ,27369 ,29761 ,30339 ,32353 ,33602 ,36695 ,39870 
+"400","39","Ohio",3911 ,4088 ,4322 ,4688 ,5209 ,5724 ,6101 ,6751 ,7492 ,8283 ,9172 ,10022 ,10922 ,11470 ,12173 ,13437 ,14249 ,14873 ,15529 ,16549 ,17672 ,18638 ,19013 ,20025 ,20676 ,21818 ,22653 ,23545 ,24912 ,26418 ,27293 ,28694 ,29280 ,29855 ,30698 ,31617 ,32498 ,34093 ,35307 ,36021 
+"400","40","Oklahoma",3204 ,3475 ,3710 ,4016 ,4515 ,4976 ,5493 ,5974 ,6570 ,7343 ,8395 ,9487 ,10943 ,11816 ,11737 ,12618 ,13171 ,13312 ,13455 ,14166 ,15192 ,16077 ,16434 ,17269 ,17772 ,18427 ,18973 ,19936 ,20899 ,21949 ,22757 ,24606 ,26228 ,26232 ,26929 ,28810 ,30492 ,33280 ,34336 ,35985 
+"400","41","Oregon",3675 ,3927 ,4197 ,4605 ,5109 ,5701 ,6185 ,6898 ,7526 ,8409 ,9295 ,10086 ,10802 ,11116 ,11849 ,12811 ,13429 ,14059 ,14724 ,15776 ,16956 ,17895 ,18469 ,19201 ,20077 ,21219 ,22531 ,23751 ,24854 ,26016 ,27016 ,28719 ,29238 ,29761 ,30549 ,31598 ,32488 ,34623 ,35712 ,36297 
+"400","42","Pennsylvania",3804 ,4069 ,4289 ,4678 ,5155 ,5692 ,6182 ,6796 ,7473 ,8262 ,9142 ,10040 ,11100 ,11880 ,12448 ,13468 ,14318 ,15017 ,15857 ,17067 ,18412 ,19433 ,20171 ,21050 ,21655 ,22355 ,23226 ,24384 ,25566 ,27367 ,28348 ,30111 ,30704 ,31506 ,32427 ,33852 ,34978 ,37326 ,39058 ,40140 
+"400","44","Rhode Island",3847 ,4098 ,4285 ,4613 ,4955 ,5385 ,5851 ,6402 ,6975 ,7636 ,8497 ,9645 ,10733 ,11540 ,12389 ,13629 ,14538 ,15445 ,16387 ,17909 ,19389 ,19821 ,19981 ,20820 ,21600 ,22199 ,23364 ,24299 ,25621 ,26945 ,27741 ,29485 ,31166 ,32158 ,33469 ,35090 ,36233 ,38392 ,40219 ,41368 
+"400","45","South Carolina",2821 ,3055 ,3265 ,3594 ,4016 ,4451 ,4730 ,5255 ,5668 ,6301 ,6988 ,7736 ,8606 ,9078 ,9790 ,10900 ,11590 ,12194 ,12939 ,13906 ,14931 ,15844 ,16256 ,17010 ,17651 ,18579 ,19384 ,20359 ,21287 ,22573 ,23550 ,25082 ,25653 ,26080 ,26704 ,27933 ,29270 ,31031 ,32065 ,32666 
+"400","46","South Dakota",3030 ,3286 ,3564 ,4089 ,5175 ,5187 ,5706 ,5600 ,6352 ,7302 ,8059 ,8054 ,9410 ,9946 ,10293 ,11621 ,11898 ,12452 ,13133 ,13631 ,14653 ,16075 ,16666 ,17740 ,18248 ,19503 ,19610 ,21672 ,22085 ,23736 ,24816 ,26428 ,27870 ,28073 ,30452 ,32175 ,33150 ,33767 ,36489 ,38661 
+"400","47","Tennessee",2957 ,3176 ,3439 ,3800 ,4284 ,4685 ,5026 ,5570 ,6089 ,6862 ,7555 ,8227 ,9120 ,9696 ,10293 ,11413 ,12152 ,12895 ,13754 ,14783 ,15718 ,16574 ,17242 ,18527 ,19331 ,20283 ,21339 ,22136 ,23031 ,24462 ,25370 ,26692 ,27535 ,28143 ,29026 ,30297 ,31360 ,32986 ,34287 ,34976 
+"400","48","Texas",3364 ,3628 ,3840 ,4175 ,4659 ,5170 ,5738 ,6347 ,6943 ,7856 ,8832 ,9870 ,11320 ,11965 ,12348 ,13377 ,14110 ,14182 ,14453 ,15245 ,16165 ,17260 ,17763 ,18765 ,19413 ,20161 ,21070 ,22260 ,23812 ,25376 ,26399 ,28504 ,29166 ,28935 ,29581 ,31073 ,33172 ,35275 ,36829 ,37774 
+"400","49","Utah",3105 ,3389 ,3649 ,3971 ,4316 ,4738 ,5173 ,5755 ,6344 ,7055 ,7792 ,8492 ,9347 ,9953 ,10506 ,11371 ,11926 ,12322 ,12652 ,13162 ,13941 ,14847 ,15479 ,16135 ,16845 ,17775 ,18765 ,19899 ,21001 ,22188 ,22943 ,24519 ,25536 ,25648 ,25830 ,26827 ,28599 ,30320 ,31739 ,31944 
+"400","50","Vermont",3380 ,3625 ,3848 ,4163 ,4528 ,4855 ,5203 ,5747 ,6153 ,6979 ,7756 ,8599 ,9650 ,10324 ,10930 ,11977 ,12867 ,13731 ,14755 ,15822 ,17195 ,17643 ,17869 ,18941 ,19446 ,20255 ,21057 ,22106 ,23168 ,24921 ,26268 ,28184 ,29482 ,30013 ,31013 ,32713 ,33416 ,36021 ,37717 ,38686 
+"400","51","Virginia",3560 ,3792 ,4091 ,4486 ,4971 ,5485 ,5960 ,6549 ,7193 ,8025 ,8950 ,10107 ,11227 ,12095 ,12993 ,14298 ,15284 ,16188 ,17191 ,18442 ,19614 ,20312 ,20953 ,21842 ,22596 ,23534 ,24360 ,25354 ,26695 ,28199 ,29617 ,31641 ,33263 ,33776 ,35029 ,36912 ,38980 ,41367 ,43275 ,44224 
+"400","53","Washington",4085 ,4189 ,4361 ,4713 ,5284 ,5892 ,6535 ,7165 ,7797 ,8820 ,9847 ,10810 ,11834 ,12435 ,13144 ,13972 ,14619 ,15422 ,16090 ,17055 ,18405 ,19637 ,20583 ,21581 ,22139 ,22981 ,23778 ,25280 ,26749 ,28821 ,30521 ,32407 ,32950 ,33107 ,33869 ,35986 ,36773 ,39623 ,42020 ,42857 
+"400","54","West Virginia",2792 ,3109 ,3369 ,3673 ,4009 ,4438 ,4973 ,5468 ,6026 ,6667 ,7373 ,8066 ,8767 ,9340 ,9575 ,10355 ,10851 ,11212 ,11619 ,12532 ,13398 ,14436 ,15086 ,16081 ,16549 ,17269 ,17817 ,18567 ,19373 ,20472 ,21049 ,22174 ,23610 ,24388 ,24916 ,25784 ,26684 ,28722 ,30144 ,31641 
+"400","55","Wisconsin",3747 ,3981 ,4241 ,4601 ,5127 ,5619 ,6090 ,6679 ,7403 ,8247 ,9197 ,10085 ,10973 ,11573 ,12026 ,13112 ,13719 ,14424 ,15182 ,15953 ,17192 ,17986 ,18494 ,19674 ,20398 ,21550 ,22387 ,23509 ,24777 ,26619 ,27652 ,29140 ,30102 ,30809 ,31656 ,32736 ,33689 ,35665 ,37008 ,37767 
+"400","56","Wyoming",3587 ,3910 ,4257 ,4692 ,5389 ,6150 ,6721 ,7224 ,8152 ,9366 ,10515 ,11668 ,12808 ,13349 ,12703 ,13416 ,14137 ,14244 ,14287 ,14821 ,16382 ,17910 ,18589 ,19344 ,20065 ,20741 ,21358 ,22233 ,23774 ,25496 ,27192 ,29280 ,31319 ,32082 ,33929 ,36274 ,39464 ,44700 ,46741 ,48608 
+"400","91","New England",4175 ,4438 ,4674 ,5025 ,5477 ,5954 ,6376 ,6954 ,7586 ,8407 ,9381 ,10598 ,11800 ,12833 ,13770 ,15342 ,16440 ,17592 ,18958 ,20612 ,21848 ,22462 ,22867 ,24077 ,24773 ,25804 ,27048 ,28521 ,30087 ,32128 ,33581 ,36603 ,37979 ,38113 ,38788 ,40842 ,42391 ,45652 ,48027 ,49146 
+"400","92","Mideast",4308 ,4606 ,4889 ,5272 ,5742 ,6273 ,6771 ,7346 ,8026 ,8845 ,9758 ,10874 ,12081 ,13048 ,13901 ,15241 ,16257 ,17209 ,18303 ,19874 ,21301 ,22542 ,22899 ,23939 ,24460 ,25217 ,26357 ,27691 ,29125 ,30776 ,31932 ,34183 ,35133 ,35509 ,36317 ,38317 ,40137 ,43156 ,45859 ,47001 
+"400","93","Great Lakes",4034 ,4195 ,4473 ,4867 ,5447 ,5918 ,6360 ,7026 ,7780 ,8609 ,9483 ,10263 ,11215 ,11773 ,12394 ,13661 ,14479 ,15208 ,15921 ,16955 ,18117 ,19021 ,19407 ,20585 ,21273 ,22491 ,23404 ,24476 ,25699 ,27294 ,28187 ,29819 ,30441 ,30922 ,31843 ,32824 ,33717 ,35430 ,36793 ,37566 
+"400","94","Plains",3592 ,3849 ,4104 ,4514 ,5265 ,5566 ,6073 ,6492 ,7154 ,8074 ,8886 ,9540 ,10757 ,11451 ,11975 ,13279 ,13984 ,14628 ,15375 ,15986 ,17121 ,18048 ,18589 ,19674 ,20071 ,21325 ,22119 ,23661 ,24692 ,26299 ,27231 ,29018 ,29896 ,30496 ,31667 ,33154 ,34096 ,35926 ,37647 ,39115 
+"400","95","Southeast",3078 ,3325 ,3580 ,3941 ,4406 ,4825 ,5185 ,5713 ,6255 ,7009 ,7778 ,8629 ,9638 ,10230 ,10918 ,12024 ,12794 ,13439 ,14183 ,15229 ,16353 ,17186 ,17757 ,18709 ,19403 ,20305 ,21233 ,22230 ,23215 ,24561 ,25481 ,27050 ,27996 ,28477 ,29255 ,30858 ,32514 ,34516 ,35800 ,36336 
+"400","96","Southwest",3325 ,3599 ,3826 ,4159 ,4633 ,5119 ,5635 ,6206 ,6794 ,7676 ,8649 ,9672 ,11050 ,11684 ,12057 ,13070 ,13792 ,13971 ,14277 ,15035 ,15930 ,16902 ,17394 ,18284 ,18917 ,19693 ,20536 ,21639 ,22997 ,24467 ,25407 ,27372 ,28227 ,28133 ,28793 ,30366 ,32378 ,34499 ,35892 ,36745 
+"400","97","Rocky Mountain",3441 ,3765 ,4053 ,4451 ,4964 ,5494 ,5930 ,6461 ,7049 ,7950 ,8789 ,9787 ,10909 ,11542 ,12055 ,12962 ,13497 ,13907 ,14359 ,15087 ,16278 ,17248 ,17932 ,18792 ,19729 ,20614 ,21672 ,22829 ,23993 ,25528 ,26836 ,29111 ,30419 ,30447 ,30818 ,32289 ,34061 ,36312 ,37799 ,38275 
+"400","98","Far West 3/",4412 ,4671 ,4898 ,5308 ,5815 ,6435 ,7037 ,7734 ,8434 ,9412 ,10506 ,11654 ,12796 ,13395 ,14162 ,15425 ,16241 ,16974 ,17823 ,18888 ,19930 ,20913 ,21381 ,22154 ,22556 ,23307 ,24315 ,25582 ,26834 ,28805 ,30236 ,32678 ,33170 ,33368 ,34270 ,36176 ,37869 ,40481 ,42331 ,42845 
+"Source: Regional Economic Information System, Bureau of Economic Analysis, U.S. Department of Commerce"
+"=HYPERLINK(""http://www.bea.gov/regional/docs/footnotes.cfm?tablename=SA1-3"",""SA1-3 Footnotes"")","http://www.bea.gov/regional/docs/footnotes.cfm?tablename=SA1-3"
+"Regional Economic Information System"
+"Bureau of Economic Analysis"
+"'October 2009'"
diff --git a/pysal/examples/stata_full.txt b/pysal/examples/stata_full.txt
new file mode 100644
index 0000000..b4bc27c
--- /dev/null
+++ b/pysal/examples/stata_full.txt
@@ -0,0 +1,57 @@
+56
+1 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 0 .125 .125 0 0
+2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 .125 .125 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0
+3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 .125 0 0 0 0 0 .125 0 0 0 0
+4 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0
+5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+6 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 .125 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0
+7 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 .125 0 0
+8 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 .125
+9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+10 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+11 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+12 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+13 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 .125 0
+14 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+15 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+16 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 .125 0 0 .125 0 0 0 0 0 .125 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0
+18 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 .125 0
+19 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 .125
+20 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 .125 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0
+21 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0
+22 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+23 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0
+24 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 .125 0 .125 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+25 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+26 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 .125 .125
+27 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 .125 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 .125 0 0 0 0 0 0 0
+28 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 .125 0 0 .125 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0
+29 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+30 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 .125 0 .125 0 .125 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0
+31 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+32 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 .125 .125 .125 0 0 0 0 0 0 0 0 .125 .125 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0
+33 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 .125 .125 0 0 .125 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+34 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 .125 0 0 0 0 0 0 0
+35 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 0 0 .125 0 0 0 .125 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0
+36 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 .125 0 0 0 0 0 .125 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+37 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 .125 0 0 0 0 0 0 0 0 0 0
+38 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 .125 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 .125
+39 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 .125 0 .125 0 .125 .125 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+40 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 .125 .125 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0
+41 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0
+42 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 .125 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0
+43 0 0 .125 .125 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0
+44 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 .125
+45 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0
+46 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 .125 .125 0 0 0
+47 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+48 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 .125 .125 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+49 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 .125 0 0 .125 0 .125 0 .125 0 0 0 0 0 .125 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+50 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+51 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 0
+52 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 .125 0 0 0
+53 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 .125 .125 0 .125 0 0
+54 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0
+55 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 .125 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 .125
+56 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 0 .125 0 0 0 0 0 .125 0 0 0 0 0 0 0 0 0 0 .125 0
diff --git a/pysal/examples/stata_sparse.txt b/pysal/examples/stata_sparse.txt
new file mode 100644
index 0000000..5d50281
--- /dev/null
+++ b/pysal/examples/stata_sparse.txt
@@ -0,0 +1,57 @@
+56
+1 7 45 51 53 54
+2 19 28 29 32 38 48
+3 43 46 52
+4 6 43
+5
+6 4 30 34 37 43 49
+7 1 53 54
+8 16 22 38 44 56
+9
+10
+11
+12
+13 18 26 50 55
+14
+15
+16 8 22 29 32 38 40
+17 20 42 48
+18 13 44 55
+19 2 20 26 38 48 56
+20 17 19 26 48
+21 45
+22 8 16 31 40
+23 25 27 41 49
+24 30 33 35 39
+25 23 27
+26 13 19 20 50 55 56
+27 23 25 34 47 49
+28 2 32 36 39 42 48
+29 2 16 32 38
+30 6 24 32 35 37 39 49
+31 22 40 41
+32 2 16 28 29 30 39 40 49
+33 24 35 36 39 42
+34 6 27 47 49
+35 24 30 33 37 46
+36 28 33 39 42
+37 6 30 35 43 46
+38 2 8 16 19 29 44 56
+39 24 28 30 32 33 36
+40 16 22 31 32 41 49
+41 23 31 40 49
+42 17 28 33 36 48
+43 3 4 6 37 46
+44 8 18 38 55 56
+45 1 21 51
+46 3 35 37 43 52 53
+47 27 34
+48 2 17 19 20 28 42
+49 6 23 27 30 32 34 40 41
+50 13 26
+51 1 45 53
+52 3 46 53
+53 1 7 46 51 52 54
+54 1 7 53
+55 13 18 26 44 56
+56 8 19 26 38 44 55
diff --git a/pysal/examples/states48.gal b/pysal/examples/states48.gal
new file mode 100644
index 0000000..ed8fbfa
--- /dev/null
+++ b/pysal/examples/states48.gal
@@ -0,0 +1,97 @@
+48
+0 4
+7 8 21 39
+1 5
+3 4 25 28 41
+2 6
+15 21 22 33 39 40
+3 3
+1 25 34
+4 7
+1 13 24 28 33 41 47
+5 3
+18 29 36
+6 3
+17 27 35
+7 2
+0 8
+8 5
+0 7 30 37 39
+9 6
+23 25 34 41 44 47
+10 5
+11 12 14 22 46
+11 4
+10 14 19 32
+12 6
+10 20 22 24 38 46
+13 4
+4 22 24 33
+14 7
+10 11 22 32 39 43 45
+15 3
+2 21 40
+16 1
+26
+17 4
+6 35 43 45
+18 5
+5 26 29 36 42
+19 3
+11 32 46
+20 4
+12 31 38 46
+21 4
+0 2 15 39
+22 8
+2 10 12 13 14 24 33 39
+23 4
+9 31 38 47
+24 6
+4 12 13 22 38 47
+25 5
+1 3 9 34 41
+26 3
+16 18 42
+27 3
+6 29 35
+28 5
+1 4 33 40 41
+29 5
+5 18 27 35 42
+30 4
+8 37 39 43
+31 3
+20 23 38
+32 5
+11 14 19 35 45
+33 6
+2 4 13 22 28 40
+34 4
+3 9 25 44
+35 6
+6 17 27 29 32 45
+36 2
+5 18
+37 2
+8 30
+38 6
+12 20 23 24 31 47
+39 8
+0 2 8 14 21 22 30 43
+40 4
+2 15 28 33
+41 6
+1 4 9 25 28 47
+42 3
+18 26 29
+43 5
+14 17 30 39 45
+44 2
+9 34
+45 5
+14 17 32 35 43
+46 4
+10 12 19 20
+47 6
+4 9 23 24 38 41
diff --git a/pysal/examples/stl.gal b/pysal/examples/stl.gal
new file mode 100644
index 0000000..be30409
--- /dev/null
+++ b/pysal/examples/stl.gal
@@ -0,0 +1,157 @@
+78
+1 3
+7 3 6
+2 3
+10 8 5
+3 3
+7 4 1
+4 4
+9 5 3 7
+5 4
+10 4 9 2
+6 5
+16 12 11 7 1
+7 8
+19 9 11 18 1 6 3 4
+8 3
+15 10 2
+9 7
+20 19 13 10 7 4 5
+10 9
+17 15 9 13 20 21 5 2 8
+11 4
+18 16 6 7
+12 3
+16 14 6
+13 3
+20 9 10
+14 3
+22 16 12
+15 4
+23 10 17 8
+16 8
+28 27 18 22 14 12 6 11
+17 6
+30 26 23 21 10 15
+18 7
+33 32 19 16 27 11 7
+19 6
+24 20 18 33 7 9
+20 6
+24 21 19 9 13 10
+21 6
+35 26 24 20 17 10
+22 4
+29 28 14 16
+23 5
+31 25 17 30 15
+24 5
+35 33 21 19 20
+25 3
+42 31 23
+26 5
+34 30 21 35 17
+27 7
+41 39 32 28 36 16 18
+28 5
+29 36 27 22 16
+29 4
+38 36 22 28
+30 6
+43 34 31 26 17 23
+31 6
+44 42 43 30 23 25
+32 4
+33 27 41 18
+33 8
+46 40 35 32 41 18 24 19
+34 5
+43 35 45 26 30
+35 7
+45 37 33 21 24 34 26
+36 6
+47 39 38 29 28 27
+37 6
+51 45 40 46 49 35
+38 4
+48 47 29 36
+39 6
+52 50 41 47 36 27
+40 3
+46 37 33
+41 6
+50 46 39 27 33 32
+42 4
+53 31 44 25
+43 8
+61 59 54 44 45 34 30 31
+44 5
+54 53 43 31 42
+45 7
+60 59 51 37 35 43 34
+46 7
+49 50 57 41 33 40 37
+47 7
+56 55 52 48 38 36 39
+48 3
+55 47 38
+49 5
+63 51 46 57 37
+50 6
+58 57 52 39 41 46
+51 6
+64 60 49 63 37 45
+52 6
+62 58 56 47 39 50
+53 4
+65 54 44 42
+54 5
+65 61 43 44 53
+55 3
+56 48 47
+56 4
+62 55 47 52
+57 7
+67 63 66 58 50 49 46
+58 5
+66 52 62 50 57
+59 6
+69 61 60 70 45 43
+60 5
+70 64 51 45 59
+61 6
+72 65 59 69 43 54
+62 5
+68 66 56 52 58
+63 5
+64 57 67 49 51
+64 6
+71 70 63 67 51 60
+65 4
+61 72 54 53
+66 6
+73 67 62 68 58 57
+67 7
+76 75 71 66 57 64 63
+68 3
+73 62 66
+69 6
+77 72 70 74 59 61
+70 7
+74 71 78 64 60 69 59
+71 5
+78 75 67 64 70
+72 4
+77 69 61 65
+73 3
+76 66 68
+74 4
+77 78 70 69
+75 4
+78 76 67 71
+76 3
+73 67 75
+77 3
+74 69 72
+78 4
+75 71 74 70
diff --git a/pysal/examples/stl_hom.csv b/pysal/examples/stl_hom.csv
new file mode 100644
index 0000000..f21c203
--- /dev/null
+++ b/pysal/examples/stl_hom.csv
@@ -0,0 +1,79 @@
+WKT,NAME,STATE_NAME,STATE_FIPS,CNTY_FIPS,FIPS,FIPSNO,HR7984,HR8488,HR8893,HC7984,HC8488,HC8893,PO7984,PO8488,PO8893,PE77,PE82,PE87,RDAC80,RDAC85,RDAC90
+"POLYGON ((-89.585220336914062 39.978794097900391,-89.581146240234375 40.094867706298828,-89.603988647460938 40.095306396484375,-89.60589599609375 40.136119842529297,-89.6103515625 40.3251953125,-89.269027709960938 40.329566955566406,-89.268562316894531 40.285579681396484,-89.154655456542969 40.285774230957031,-89.152763366699219 40.054969787597656,-89.151618957519531 39.919403076171875,-89.224777221679688 39.918678283691406,-89.411857604980469 39.918041229248047,-89.412437438964844 39.9 [...]
+"POLYGON ((-90.921539306640625 39.847461700439453,-90.922317504882812 39.765838623046875,-91.373420715332031 39.761272430419922,-91.3817138671875 39.80377197265625,-91.449188232421875 39.863048553466797,-91.45098876953125 39.885242462158203,-91.434051513671875 39.901828765869141,-91.430389404296875 39.921836853027344,-91.447242736816406 39.946063995361328,-91.487289428710938 40.005752563476562,-91.504005432128906 40.06671142578125,-91.516128540039062 40.134544372558594,-91.50654602050781 [...]
+"POLYGON ((-89.997596740722656 39.906204223632812,-90.000251770019531 40.114761352539062,-89.965545654296875 40.140285491943359,-89.924041748046875 40.140884399414062,-89.892723083496094 40.133277893066406,-89.879554748535156 40.144657135009766,-89.865646362304688 40.130641937255859,-89.845245361328125 40.140674591064453,-89.828346252441406 40.126659393310547,-89.802490234375 40.128086090087891,-89.778472900390625 40.136756896972656,-89.756202697753906 40.132270812988281,-89.717750549316 [...]
+"POLYGON ((-90.585357666015625 39.880741119384766,-90.542747497558594 39.917827606201172,-90.514976501464844 39.989692687988281,-90.442146301269531 40.020187377929688,-90.427772521972656 40.069713592529297,-90.40264892578125 40.078514099121094,-90.38873291015625 40.119422912597656,-90.353958129882812 40.127822875976562,-90.314056396484375 40.109951019287109,-90.289535522460938 40.067028045654297,-90.272071838378906 40.063060760498047,-90.257720947265625 40.0699462890625,-90.2377548217773 [...]
+"POLYGON ((-90.577041625976562 39.845447540283203,-90.87890625 39.842494964599609,-90.921539306640625 39.847461700439453,-90.923110961914062 40.108623504638672,-90.698799133300781 40.106422424316406,-90.712196350097656 40.080902099609375,-90.679664611816406 40.076221466064453,-90.682174682617188 40.045818328857422,-90.645256042480469 40.0289306640625,-90.613410949707031 40.027854919433594,-90.609687805175781 40.020179748535156,-90.622215270996094 40.015083312988281,-90.607040405273438 40 [...]
+"POLYGON ((-89.032203674316406 39.656520843505859,-89.14691162109375 39.656898498535156,-89.149192810058594 39.801521301269531,-89.191635131835938 39.816139221191406,-89.223388671875 39.811679840087891,-89.224777221679688 39.918678283691406,-89.151618957519531 39.919403076171875,-89.152763366699219 40.054969787597656,-88.755012512207031 40.059139251708984,-88.748764038085938 39.794776916503906,-88.761344909667969 39.793941497802734,-88.763114929199219 39.738189697265625,-88.8145675659179 [...]
+"POLYGON ((-89.703964233398438 39.528034210205078,-89.929985046386719 39.527008056640625,-89.928977966308594 39.558292388916016,-89.986045837402344 39.704959869384766,-89.998001098632812 39.877189636230469,-89.997596740722656 39.906204223632812,-89.775184631347656 39.908252716064453,-89.769828796386719 39.919143676757812,-89.710472106933594 39.920162200927734,-89.705833435058594 39.976844787597656,-89.585220336914062 39.978794097900391,-89.4927978515625 39.980186462402344,-89.49520111083 [...]
+"POLYGON ((-91.850715637207031 39.661178588867188,-91.848159790039062 39.94964599609375,-91.447242736816406 39.946063995361328,-91.430389404296875 39.921836853027344,-91.434051513671875 39.901828765869141,-91.45098876953125 39.885242462158203,-91.449188232421875 39.863048553466797,-91.3817138671875 39.80377197265625,-91.373420715332031 39.761272430419922,-91.367088317871094 39.724639892578125,-91.317665100097656 39.685916900634766,-91.721794128417969 39.686203002929688,-91.72276306152343 [...]
+"POLYGON ((-90.154800415039062 39.525581359863281,-90.303291320800781 39.524715423583984,-90.300384521484375 39.639423370361328,-90.341606140136719 39.640064239501953,-90.341934204101562 39.667713165283203,-90.375999450683594 39.667934417724609,-90.375862121582031 39.754978179931641,-90.484756469726562 39.755531311035156,-90.483451843261719 39.79180908203125,-90.608596801757812 39.793941497802734,-90.588485717773438 39.809986114501953,-90.577041625976562 39.845447540283203,-90.5853576660 [...]
+"POLYGON ((-91.2032470703125 39.600021362304688,-91.317665100097656 39.685916900634766,-91.367088317871094 39.724639892578125,-91.373420715332031 39.761272430419922,-90.922317504882812 39.765838623046875,-90.921539306640625 39.847461700439453,-90.87890625 39.842494964599609,-90.577041625976562 39.845447540283203,-90.588485717773438 39.809986114501953,-90.608596801757812 39.793941497802734,-90.646682739257812 39.704738616943359,-90.640907287597656 39.679855346679688,-90.612258911132812 39 [...]
+"POLYGON ((-89.539955139160156 39.528648376464844,-89.540618896484375 39.645149230957031,-89.489250183105469 39.646518707275391,-89.490440368652344 39.68414306640625,-89.4366455078125 39.686393737792969,-89.435997009277344 39.748046875,-89.408485412597656 39.742588043212891,-89.332466125488281 39.764270782470703,-89.283317565917969 39.793663024902344,-89.261077880859375 39.820823669433594,-89.223388671875 39.811679840087891,-89.191635131835938 39.816139221191406,-89.149192810058594 39.80 [...]
+"POLYGON ((-88.472915649414062 39.451450347900391,-88.590293884277344 39.451000213623047,-88.589958190917969 39.477745056152344,-88.604248046875 39.478302001953125,-88.604682922363281 39.491451263427734,-88.624359130859375 39.491134643554688,-88.624168395996094 39.506999969482422,-88.646820068359375 39.507160186767578,-88.648399353027344 39.524848937988281,-88.719940185546875 39.527130126953125,-88.720550537109375 39.581531524658203,-88.813041687011719 39.583431243896484,-88.815338134765 [...]
+"POLYGON ((-90.5877685546875 39.525283813476562,-90.582412719726562 39.565677642822266,-90.612258911132812 39.643844604492188,-90.640907287597656 39.679855346679688,-90.646682739257812 39.704738616943359,-90.608596801757812 39.793941497802734,-90.483451843261719 39.79180908203125,-90.484756469726562 39.755531311035156,-90.375862121582031 39.754978179931641,-90.375999450683594 39.667934417724609,-90.341934204101562 39.667713165283203,-90.341606140136719 39.640064239501953,-90.300384521484 [...]
+"POLYGON ((-88.013847351074219 39.379283905029297,-88.472190856933594 39.37664794921875,-88.472915649414062 39.451450347900391,-88.474266052246094 39.650478363037109,-88.058303833007812 39.65771484375,-88.059532165527344 39.685836791992188,-87.968612670898438 39.688838958740234,-87.965187072753906 39.484779357910156,-88.014053344726562 39.485370635986328,-88.013847351074219 39.379283905029297))",Coles,Illinois,17,029,17029,           17029,         3.789541,           3.059402,           [...]
+"POLYGON ((-91.444122314453125 39.321300506591797,-91.713165283203125 39.327243804931641,-91.723670959472656 39.340206146240234,-91.715087890625 39.604248046875,-91.722267150878906 39.604576110839844,-91.722763061523438 39.659435272216797,-91.721794128417969 39.686203002929688,-91.317665100097656 39.685916900634766,-91.2032470703125 39.600021362304688,-91.465652465820312 39.456977844238281,-91.444122314453125 39.321300506591797))",Ralls,Missouri,29,173,29173,           29173,         0.0 [...]
+"POLYGON ((-88.810005187988281 39.214427947998047,-89.142524719238281 39.21673583984375,-89.138893127441406 39.349987030029297,-89.031814575195312 39.349178314208984,-89.032203674316406 39.656520843505859,-88.815338134765625 39.655979156494141,-88.813041687011719 39.583431243896484,-88.720550537109375 39.581531524658203,-88.719940185546875 39.527130126953125,-88.648399353027344 39.524848937988281,-88.646820068359375 39.507160186767578,-88.624168395996094 39.506999969482422,-88.6243591308 [...]
+"POLYGON ((-91.417518615722656 39.147624969482422,-91.444122314453125 39.321300506591797,-91.465652465820312 39.456977844238281,-91.2032470703125 39.600021362304688,-91.15618896484375 39.552593231201172,-91.093612670898438 39.528926849365234,-91.064384460449219 39.473983764648438,-91.036338806152344 39.444412231445312,-90.947891235351562 39.400585174560547,-90.850494384765625 39.350452423095703,-90.779342651367188 39.296802520751953,-90.738082885742188 39.247810363769531,-90.732337951660 [...]
+"POLYGON ((-89.702468872070312 38.996799468994141,-89.710609436035156 39.354412078857422,-89.704071044921875 39.354877471923828,-89.703964233398438 39.528034210205078,-89.539955139160156 39.528648376464844,-89.537483215332031 39.349597930908203,-89.138893127441406 39.349987030029297,-89.142524719238281 39.21673583984375,-89.255943298339844 39.216102600097656,-89.257766723632812 39.025283813476562,-89.594169616699219 39.028202056884766,-89.592948913574219 38.998291015625,-89.6456375122070 [...]
+"POLYGON ((-90.151832580566406 38.997974395751953,-90.152389526367188 39.258148193359375,-90.154800415039062 39.525581359863281,-89.929985046386719 39.527008056640625,-89.703964233398438 39.528034210205078,-89.704071044921875 39.354877471923828,-89.710609436035156 39.354412078857422,-89.702468872070312 38.996799468994141,-90.151832580566406 38.997974395751953))",Macoupin,Illinois,17,117,17117,           17117,         1.014034,           1.637640,          2.085136,3,4,6,295848,244254,28 [...]
+"POLYGON ((-90.152389526367188 39.258148193359375,-90.204612731933594 39.251968383789062,-90.205551147460938 39.225673675537109,-90.317192077636719 39.224990844726562,-90.317848205566406 39.177394866943359,-90.4923095703125 39.175216674804688,-90.506370544433594 39.161960601806641,-90.519172668457031 39.185878753662109,-90.568428039550781 39.185012817382812,-90.586715698242188 39.177600860595703,-90.582313537597656 39.160869598388672,-90.608352661132812 39.117576599121094,-90.61427307128 [...]
+"POLYGON ((-90.608352661132812 39.117576599121094,-90.611167907714844 39.107578277587891,-90.576240539550781 39.031734466552734,-90.575263977050781 39.005451202392578,-90.550468444824219 38.969856262207031,-90.531951904296875 38.957775115966797,-90.488288879394531 38.967193603515625,-90.469841003417969 38.959178924560547,-90.530426025390625 38.891609191894531,-90.570327758789062 38.871326446533203,-90.627212524414062 38.880794525146484,-90.668876647949219 38.935253143310547,-90.706069946 [...]
+"POLYGON ((-88.010856628417969 39.177513122558594,-88.36407470703125 39.174442291259766,-88.469703674316406 39.175361633300781,-88.471542358398438 39.213001251220703,-88.472190856933594 39.37664794921875,-88.013847351074219 39.379283905029297,-88.010856628417969 39.177513122558594))",Cumberland,Illinois,17,035,17035,           17035,         3.032738,           1.850310,         10.855743,2,1,7,65947,54045,64482,        3.248611,        3.147752,     2.636248,    -0.618608,      -0.61806 [...]
+"POLYGON ((-92.316963195800781 39.249309539794922,-92.314231872558594 39.347320556640625,-91.723670959472656 39.340206146240234,-91.713165283203125 39.327243804931641,-91.444122314453125 39.321300506591797,-91.417518615722656 39.147624969482422,-91.638282775878906 39.148555755615234,-91.643470764160156 39.062778472900391,-92.111885070800781 39.066963195800781,-92.104530334472656 39.242141723632812,-92.316963195800781 39.249309539794922))",Audrain,Missouri,29,007,29007,           29007,   [...]
+"POLYGON ((-90.151832580566406 38.997974395751953,-90.279121398925781 38.997692108154297,-90.2789306640625 38.924716949462891,-90.319740295410156 38.924907684326172,-90.413070678710938 38.962329864501953,-90.469841003417969 38.959178924560547,-90.488288879394531 38.967193603515625,-90.531951904296875 38.957775115966797,-90.550468444824219 38.969856262207031,-90.575263977050781 39.005451202392578,-90.576240539550781 39.031734466552734,-90.611167907714844 39.107578277587891,-90.60835266113 [...]
+"POLYGON ((-92.395118713378906 38.736011505126953,-92.409675598144531 38.760608673095703,-92.392578125 38.790939331054688,-92.393402099609375 38.811775207519531,-92.432884216308594 38.823997497558594,-92.474708557128906 38.864265441894531,-92.499984741210938 38.918071746826172,-92.566192626953125 38.968196868896484,-92.565650939941406 39.002174377441406,-92.432899475097656 39.250255584716797,-92.316963195800781 39.249309539794922,-92.104530334472656 39.242141723632812,-92.111885070800781 [...]
+"POLYGON ((-90.96160888671875 38.871410369873047,-91.116539001464844 38.874469757080078,-91.11834716796875 38.929298400878906,-91.200668334960938 38.933181762695312,-91.199104309082031 38.9930419921875,-91.26904296875 38.996143341064453,-91.264923095703125 39.143535614013672,-91.193122863769531 39.143173217773438,-91.186759948730469 39.226673126220703,-90.732337951660156 39.224746704101562,-90.718193054199219 39.195873260498047,-90.71673583984375 39.144210815429688,-90.690399169921875 39 [...]
+"POLYGON ((-89.257766723632812 39.025283813476562,-89.255943298339844 39.216102600097656,-89.142524719238281 39.21673583984375,-88.810005187988281 39.214427947998047,-88.810455322265625 38.917091369628906,-88.701034545898438 38.917793273925781,-88.702590942382812 38.830322265625,-89.143257141113281 38.825576782226562,-89.144851684570312 38.741279602050781,-89.262840270996094 38.742015838623047,-89.264930725097656 39.007617950439453,-89.256050109863281 39.008510589599609,-89.2577667236328 [...]
+"POLYGON ((-88.365158081054688 38.915172576904297,-88.701034545898438 38.917793273925781,-88.810455322265625 38.917091369628906,-88.810005187988281 39.214427947998047,-88.471542358398438 39.213001251220703,-88.469703674316406 39.175361633300781,-88.36407470703125 39.174442291259766,-88.365158081054688 38.915172576904297))",Effingham,Illinois,17,049,17049,           17049,         1.070223,           0.000000,          1.571158,2,0,3,186877,156669,190942,        4.849288,        6.511668, [...]
+"POLYGON ((-87.955390930175781 38.855442047119141,-88.262115478515625 38.853900909423828,-88.365455627441406 38.858062744140625,-88.365158081054688 38.915172576904297,-88.36407470703125 39.174442291259766,-88.010856628417969 39.177513122558594,-87.955650329589844 39.177749633789062,-87.955390930175781 38.855442047119141))",Jasper,Illinois,17,079,17079,           17079,         0.000000,           1.804761,          3.127590,0,1,2,68404,55409,63947,        5.745876,        2.505821,     4 [...]
+"POLYGON ((-91.42498779296875 38.713211059570312,-91.493743896484375 38.703960418701172,-91.561454772949219 38.678821563720703,-91.593994140625 38.682807922363281,-91.653541564941406 38.704471588134766,-91.643470764160156 39.062778472900391,-91.638282775878906 39.148555755615234,-91.417518615722656 39.147624969482422,-91.264923095703125 39.143535614013672,-91.26904296875 38.996143341064453,-91.275390625 38.843738555908203,-91.418510437011719 38.848403930664062,-91.42498779296875 38.71321 [...]
+"POLYGON ((-92.219749450683594 38.638427734375,-92.193374633789062 38.658500671386719,-92.199501037597656 38.680587768554688,-92.224853515625 38.69635009765625,-92.2191162109375 38.7164306640625,-92.187454223632812 38.737979888916016,-92.143646240234375 38.8155517578125,-92.16339111328125 38.870895385742188,-92.1585693359375 38.884601593017578,-92.170272827148438 38.897499084472656,-92.155441284179688 38.928653717041016,-92.137191772460938 39.061893463134766,-92.111885070800781 39.066963 [...]
+"POLYGON ((-89.605598449707031 38.741294860839844,-89.606971740722656 38.871822357177734,-89.644203186035156 38.871788024902344,-89.645637512207031 38.996425628662109,-89.592948913574219 38.998291015625,-89.594169616699219 39.028202056884766,-89.257766723632812 39.025283813476562,-89.256050109863281 39.008510589599609,-89.264930725097656 39.007617950439453,-89.262840270996094 38.742015838623047,-89.605598449707031 38.741294860839844))",Bond,Illinois,17,005,17005,           17005,         [...]
+"POLYGON ((-90.121726989746094 38.800510406494141,-90.113121032714844 38.830467224121094,-90.1328125 38.853031158447266,-90.243927001953125 38.914508819580078,-90.2789306640625 38.924716949462891,-90.279121398925781 38.997692108154297,-90.151832580566406 38.997974395751953,-89.702468872070312 38.996799468994141,-89.645637512207031 38.996425628662109,-89.644203186035156 38.871788024902344,-89.606971740722656 38.871822357177734,-89.605598449707031 38.741294860839844,-89.60430908203125 38.6 [...]
+"POLYGON ((-90.968650817871094 38.546318054199219,-91.013717651367188 38.562995910644531,-91.060562133789062 38.606834411621094,-91.088890075683594 38.609649658203125,-91.142280578613281 38.600337982177734,-91.204971313476562 38.611728668212891,-91.225311279296875 38.625041961669922,-91.247871398925781 38.656909942626953,-91.296379089355469 38.688400268554688,-91.334465026855469 38.702346801757812,-91.375076293945312 38.699016571044922,-91.42498779296875 38.713211059570312,-91.4185104370 [...]
+"POLYGON ((-90.737709045410156 38.634517669677734,-90.783836364746094 38.577388763427734,-90.802215576171875 38.584445953369141,-90.822776794433594 38.581962585449219,-90.91204833984375 38.540630340576172,-90.968650817871094 38.546318054199219,-90.96160888671875 38.871410369873047,-90.958541870117188 38.895469665527344,-90.939468383789062 38.887535095214844,-90.880401611328125 38.890472412109375,-90.813392639160156 38.879413604736328,-90.805099487304688 38.911231994628906,-90.78813171386 [...]
+"POLYGON ((-88.7020263671875 38.611400604248047,-88.702590942382812 38.830322265625,-88.701034545898438 38.917793273925781,-88.365158081054688 38.915172576904297,-88.365455627441406 38.858062744140625,-88.262115478515625 38.853900909423828,-88.261642456054688 38.742389678955078,-88.288833618164062 38.739486694335938,-88.27716064453125 38.661411285400391,-88.293960571289062 38.643444061279297,-88.286087036132812 38.620704650878906,-88.265701293945312 38.606903076171875,-88.7020263671875 3 [...]
+"POLYGON ((-90.739524841308594 38.463615417480469,-90.737709045410156 38.634517669677734,-90.688613891601562 38.6585693359375,-90.680091857910156 38.678142547607422,-90.6396484375 38.693027496337891,-90.602333068847656 38.682033538818359,-90.547653198242188 38.692028045654297,-90.533950805664062 38.723419189453125,-90.490196228027344 38.759128570556641,-90.452125549316406 38.826511383056641,-90.43328857421875 38.831188201904297,-90.403091430664062 38.825519561767578,-90.360061645507812 3 [...]
+"POLYGON ((-87.958946228027344 38.575351715087891,-88.149078369140625 38.576217651367188,-88.147964477539062 38.60430908203125,-88.265701293945312 38.606903076171875,-88.286087036132812 38.620704650878906,-88.293960571289062 38.643444061279297,-88.27716064453125 38.661411285400391,-88.288833618164062 38.739486694335938,-88.261642456054688 38.742389678955078,-88.262115478515625 38.853900909423828,-87.955390930175781 38.855442047119141,-87.917572021484375 38.855419158935547,-87.91657257080 [...]
+"POLYGON ((-89.145423889160156 38.501522064208984,-89.144851684570312 38.741279602050781,-89.143257141113281 38.825576782226562,-88.702590942382812 38.830322265625,-88.7020263671875 38.611400604248047,-88.703521728515625 38.474075317382812,-89.144378662109375 38.474323272705078,-89.145423889160156 38.501522064208984))",Marion,Illinois,17,121,17121,           17121,         3.770739,           3.702093,          3.190047,10,8,8,265200,216094,250780,        3.860683,        3.610808,     4 [...]
+"POLYGON ((-90.26123046875 38.532768249511719,-90.31646728515625 38.580005645751953,-90.302742004394531 38.670291900634766,-90.239692687988281 38.730060577392578,-90.19219970703125 38.760704040527344,-90.171195983886719 38.786655426025391,-90.121726989746094 38.800510406494141,-90.135177612304688 38.785484313964844,-90.163398742675781 38.773097991943359,-90.196571350097656 38.723964691162109,-90.202239990234375 38.700363159179688,-90.183578491210938 38.658771514892578,-90.183708190917969 [...]
+"POLYGON ((-89.144851684570312 38.741279602050781,-89.145423889160156 38.501522064208984,-89.265357971191406 38.50860595703125,-89.297721862792969 38.5023193359375,-89.357093811035156 38.512825012207031,-89.398292541503906 38.488391876220703,-89.430625915527344 38.493400573730469,-89.457084655761719 38.486160278320312,-89.481193542480469 38.466224670410156,-89.524101257324219 38.480274200439453,-89.542320251464844 38.473468780517578,-89.572303771972656 38.481159210205078,-89.618721008300 [...]
+"POLYGON ((-91.954345703125 38.5972900390625,-92.028793334960938 38.553165435791016,-92.012504577636719 38.507251739501953,-92.034919738769531 38.475055694580078,-92.074729919433594 38.470149993896484,-92.109535217285156 38.45672607421875,-92.143455505371094 38.468242645263672,-92.167549133300781 38.467723846435547,-92.159446716308594 38.438880920410156,-92.128013610839844 38.414619445800781,-92.124382019042969 38.395198822021484,-92.137992858886719 38.381759643554688,-92.178337097167969 [...]
+"POLYGON ((-91.377273559570312 38.210758209228516,-91.540191650390625 38.213146209716797,-91.542747497558594 38.157341003417969,-91.638160705566406 38.157077789306641,-91.652229309082031 38.157737731933594,-91.651405334472656 38.289676666259766,-91.653564453125 38.703544616699219,-91.653541564941406 38.704471588134766,-91.593994140625 38.682807922363281,-91.561454772949219 38.678821563720703,-91.493743896484375 38.703960418701172,-91.42498779296875 38.713211059570312,-91.375076293945312  [...]
+"POLYGON ((-92.195640563964844 38.288467407226562,-92.196113586425781 38.333343505859375,-92.1925048828125 38.362895965576172,-92.178337097167969 38.376808166503906,-92.137992858886719 38.381759643554688,-92.124382019042969 38.395198822021484,-92.128013610839844 38.414619445800781,-92.159446716308594 38.438880920410156,-92.167549133300781 38.467723846435547,-92.143455505371094 38.468242645263672,-92.109535217285156 38.45672607421875,-92.074729919433594 38.470149993896484,-92.034919738769 [...]
+"POLYGON ((-90.782661437988281 38.207981109619141,-91.102394104003906 38.2042236328125,-91.34429931640625 38.204006195068359,-91.377708435058594 38.204860687255859,-91.377273559570312 38.210758209228516,-91.369621276855469 38.416683197021484,-91.375076293945312 38.699016571044922,-91.334465026855469 38.702346801757812,-91.296379089355469 38.688400268554688,-91.247871398925781 38.656909942626953,-91.225311279296875 38.625041961669922,-91.204971313476562 38.611728668212891,-91.142280578613 [...]
+"POLYGON ((-89.904197692871094 38.223079681396484,-89.930282592773438 38.276473999023438,-89.923301696777344 38.285110473632812,-89.910957336425781 38.279712677001953,-89.917572021484375 38.309150695800781,-90.0313720703125 38.311885833740234,-90.031501770019531 38.329559326171875,-90.145423889160156 38.408786773681641,-90.146163940429688 38.426914215087891,-90.265785217285156 38.518688201904297,-90.26123046875 38.532768249511719,-90.240943908691406 38.56280517578125,-90.183708190917969  [...]
+"POLYGON ((-88.153724670410156 38.259429931640625,-88.374153137207031 38.256660461425781,-88.707603454589844 38.259716033935547,-88.703521728515625 38.474075317382812,-88.7020263671875 38.611400604248047,-88.265701293945312 38.606903076171875,-88.147964477539062 38.60430908203125,-88.149078369140625 38.576217651367188,-88.153724670410156 38.259429931640625))",Wayne,Illinois,17,191,17191,           17191,         4.509258,           2.208554,          0.969791,5,2,1,110883,90557,103115,   [...]
+"POLYGON ((-88.153724670410156 38.259429931640625,-88.149078369140625 38.576217651367188,-87.958946228027344 38.575351715087891,-87.949729919433594 38.538066864013672,-87.963546752929688 38.496536254882812,-87.952728271484375 38.451976776123047,-87.959526062011719 38.436199188232422,-87.9515380859375 38.424766540527344,-87.979057312011719 38.401084899902344,-87.97894287109375 38.377967834472656,-87.963676452636719 38.350124359130859,-87.960708618164062 38.295692443847656,-87.988426208496 [...]
+"POLYGON ((-89.904197692871094 38.223079681396484,-90.040107727050781 38.223464965820312,-90.038887023925781 38.13690185546875,-90.207527160644531 38.088905334472656,-90.254058837890625 38.122169494628906,-90.289634704589844 38.166816711425781,-90.336715698242188 38.188713073730469,-90.364768981933594 38.234298706054688,-90.369346618652344 38.323558807373047,-90.358688354492188 38.365329742431641,-90.339607238769531 38.390846252441406,-90.301841735839844 38.427356719970703,-90.2657852172 [...]
+"POLYGON ((-89.597236633300781 38.216907501220703,-89.714385986328125 38.219024658203125,-89.709686279296875 38.418910980224609,-89.669769287109375 38.427585601806641,-89.664512634277344 38.442092895507812,-89.645706176757812 38.440757751464844,-89.626335144042969 38.449390411376953,-89.618721008300781 38.466167449951172,-89.572303771972656 38.481159210205078,-89.542320251464844 38.473468780517578,-89.524101257324219 38.480274200439453,-89.481193542480469 38.466224670410156,-89.457084655 [...]
+"POLYGON ((-90.638938903808594 38.080215454101562,-90.657737731933594 38.085933685302734,-90.656219482421875 38.100906372070312,-90.684783935546875 38.095195770263672,-90.687408447265625 38.112846374511719,-90.782661437988281 38.207981109619141,-90.740684509277344 38.393348693847656,-90.739524841308594 38.463615417480469,-90.690193176269531 38.465919494628906,-90.684532165527344 38.443305969238281,-90.669242858886719 38.442546844482422,-90.658111572265625 38.481632232666016,-90.613876342 [...]
+"POLYGON ((-88.708427429199219 38.129184722900391,-89.133186340332031 38.1275634765625,-89.151893615722656 38.129432678222656,-89.150909423828125 38.213733673095703,-89.144378662109375 38.474323272705078,-88.703521728515625 38.474075317382812,-88.707603454589844 38.259716033935547,-88.708427429199219 38.129184722900391))",Jefferson,Illinois,17,081,17081,           17081,         3.999876,          10.015550,          4.941533,9,19,11,225007,189705,222603,        3.855739,        3.964437 [...]
+"POLYGON ((-92.404434204101562 38.020709991455078,-92.516227722167969 38.024806976318359,-92.517822265625 38.035198211669922,-92.555259704589844 38.049690246582031,-92.558685302734375 38.061847686767578,-92.575721740722656 38.063690185546875,-92.575897216796875 38.095882415771484,-92.589996337890625 38.097339630126953,-92.589363098144531 38.110054016113281,-92.599922180175781 38.110694885253906,-92.599800109863281 38.135639190673828,-92.610389709472656 38.136730194091797,-92.608802795410 [...]
+"POLYGON ((-92.188056945800781 38.017032623291016,-92.182304382324219 38.164070129394531,-92.199317932128906 38.165054321289062,-92.195640563964844 38.288467407226562,-91.651405334472656 38.289676666259766,-91.652229309082031 38.157737731933594,-91.638160705566406 38.157077789306641,-91.639785766601562 38.051868438720703,-91.903495788574219 38.053779602050781,-91.923751831054688 38.047489166259766,-91.933311462402344 38.035964965820312,-91.958641052246094 38.041805267333984,-91.974563598 [...]
+"POLYGON ((-88.37646484375 37.914005279541016,-88.374153137207031 38.256660461425781,-88.153724670410156 38.259429931640625,-87.988426208496094 38.259773254394531,-87.980018615722656 38.241085052490234,-87.986007690429688 38.234813690185547,-87.977928161621094 38.200714111328125,-87.932289123535156 38.171131134033203,-87.931991577148438 38.157527923583984,-87.950569152832031 38.136913299560547,-87.973503112792969 38.131759643554688,-88.018547058105469 38.103302001953125,-88.0123291015625 [...]
+"POLYGON ((-88.37646484375 37.914005279541016,-88.708442687988281 37.909805297851562,-88.708427429199219 38.129184722900391,-88.707603454589844 38.259716033935547,-88.374153137207031 38.256660461425781,-88.37646484375 37.914005279541016))",Hamilton,Illinois,17,065,17065,           17065,         3.595829,           2.235586,          3.905411,2,1,2,55620,44731,51211,        3.672357,        2.415505,     2.094327,    -0.013288,       0.096903,      0.229475
+"POLYGON ((-90.207527160644531 38.088905334472656,-90.038887023925781 38.13690185546875,-90.040107727050781 38.223464965820312,-89.904197692871094 38.223079681396484,-89.714385986328125 38.219024658203125,-89.597236633300781 38.216907501220703,-89.60272216796875 37.954021453857422,-89.667251586914062 37.839733123779297,-89.685874938964844 37.828826904296875,-89.691055297851562 37.804794311523438,-89.728446960449219 37.840991973876953,-89.851715087890625 37.905063629150391,-89.86104583740 [...]
+"POLYGON ((-89.133186340332031 38.1275634765625,-89.140304565429688 38.107643127441406,-89.1234130859375 38.093090057373047,-89.1400146484375 38.047359466552734,-89.145538330078125 37.991172790527344,-89.179008483886719 37.949115753173828,-89.60272216796875 37.954021453857422,-89.597236633300781 38.216907501220703,-89.150909423828125 38.213733673095703,-89.151893615722656 38.129432678222656,-89.133186340332031 38.1275634765625))",Perry,Illinois,17,145,17145,           17145,         3.79 [...]
+"POLYGON ((-91.110374450683594 37.739475250244141,-91.155738830566406 37.737960815429688,-91.155967712402344 37.696247100830078,-91.164108276367188 37.696136474609375,-91.320724487304688 37.701606750488281,-91.31866455078125 37.783241271972656,-91.535430908203125 37.787075042724609,-91.53155517578125 38.154808044433594,-91.542747497558594 38.157341003417969,-91.540191650390625 38.213146209716797,-91.377273559570312 38.210758209228516,-91.377708435058594 38.204860687255859,-91.34429931640 [...]
+"POLYGON ((-90.649925231933594 37.735160827636719,-91.110374450683594 37.739475250244141,-91.102394104003906 38.2042236328125,-90.782661437988281 38.207981109619141,-90.687408447265625 38.112846374511719,-90.684783935546875 38.095195770263672,-90.656219482421875 38.100906372070312,-90.657737731933594 38.085933685302734,-90.638938903808594 38.080215454101562,-90.649925231933594 37.735160827636719))",Washington,Missouri,29,221,29221,           29221,        11.022928,           8.400979,   [...]
+"POLYGON ((-92.030677795410156 38.011772155761719,-91.974563598632812 38.011104583740234,-91.958641052246094 38.041805267333984,-91.933311462402344 38.035964965820312,-91.923751831054688 38.047489166259766,-91.903495788574219 38.053779602050781,-91.639785766601562 38.051868438720703,-91.638160705566406 38.157077789306641,-91.542747497558594 38.157341003417969,-91.53155517578125 38.154808044433594,-91.535430908203125 37.787075042724609,-91.816192626953125 37.787055969238281,-91.8102722167 [...]
+"POLYGON ((-88.708442687988281 37.909805297851562,-88.709480285644531 37.867202758789062,-89.154891967773438 37.865646362304688,-89.155082702636719 37.949047088623047,-89.179008483886719 37.949115753173828,-89.145538330078125 37.991172790527344,-89.1400146484375 38.047359466552734,-89.1234130859375 38.093090057373047,-89.140304565429688 38.107643127441406,-89.133186340332031 38.1275634765625,-88.708427429199219 38.129184722900391,-88.708442687988281 37.909805297851562))",Franklin,Illinoi [...]
+"POLYGON ((-90.116325378417969 37.672393798828125,-90.160240173339844 37.706611633300781,-90.202995300292969 37.676002502441406,-90.463249206542969 37.880016326904297,-90.3250732421875 37.986186981201172,-90.415153503417969 38.045375823974609,-90.329277038574219 38.099925994873047,-90.297019958496094 38.091983795166016,-90.254058837890625 38.122169494628906,-90.207527160644531 38.088905334472656,-90.134712219238281 38.053951263427734,-90.119338989257812 38.032272338867188,-90.04192352294 [...]
+"POLYGON ((-90.152122497558594 37.643196105957031,-90.539215087890625 37.642776489257812,-90.653701782226562 37.641746520996094,-90.649925231933594 37.735160827636719,-90.638938903808594 38.080215454101562,-90.609596252441406 38.073234558105469,-90.617973327636719 38.047321319580078,-90.612892150878906 38.020626068115234,-90.624412536621094 38.009639739990234,-90.602691650390625 38.002586364746094,-90.415153503417969 38.045375823974609,-90.3250732421875 37.986186981201172,-90.46324920654 [...]
+"POLYGON ((-92.249320983886719 37.607109069824219,-92.249122619628906 37.648834228515625,-92.409805297851562 37.712673187255859,-92.4080810546875 37.861454010009766,-92.404434204101562 38.020709991455078,-92.188056945800781 38.017032623291016,-92.030677795410156 38.011772155761719,-92.029937744140625 37.785987854003906,-92.022674560546875 37.777976989746094,-92.031288146972656 37.604129791259766,-92.249320983886719 37.607109069824219))",Pulaski,Missouri,29,169,29169,           29169,     [...]
+"POLYGON ((-89.1531982421875 37.604103088378906,-89.459335327148438 37.606403350830078,-89.461090087890625 37.583286285400391,-89.476776123046875 37.570144653320312,-89.524971008300781 37.571956634521484,-89.51336669921875 37.615928649902344,-89.519180297851562 37.650375366210938,-89.513374328613281 37.679840087890625,-89.521522521972656 37.694797515869141,-89.581436157226562 37.706104278564453,-89.666458129882812 37.745452880859375,-89.675857543945312 37.783969879150391,-89.691055297851 [...]
+"POLYGON ((-90.152122497558594 37.643196105957031,-90.116325378417969 37.672393798828125,-90.007438659667969 37.819301605224609,-89.937873840332031 37.878044128417969,-89.900550842285156 37.875904083251953,-89.866813659667969 37.891876220703125,-89.861045837402344 37.905487060546875,-89.851715087890625 37.905063629150391,-89.728446960449219 37.840991973876953,-89.691055297851562 37.804794311523438,-89.675857543945312 37.783969879150391,-89.666458129882812 37.745452880859375,-89.581436157 [...]
+"POLYGON ((-88.712860107421875 37.605228424072266,-89.046318054199219 37.603298187255859,-89.1531982421875 37.604103088378906,-89.154891967773438 37.865646362304688,-88.709480285644531 37.867202758789062,-88.712860107421875 37.605228424072266))",Williamson,Illinois,17,199,17199,           17199,         5.497033,           6.842239,          3.146192,19,20,11,345641,292302,349629,        2.439993,        3.482278,     3.390967,    -0.572489,      -0.284405,     -0.023560
+"POLYGON ((-91.654685974121094 37.421852111816406,-91.760848999023438 37.424907684326172,-91.764305114746094 37.595333099365234,-91.8201904296875 37.59881591796875,-91.818557739257812 37.714012145996094,-91.810272216796875 37.746814727783203,-91.816192626953125 37.787055969238281,-91.535430908203125 37.787075042724609,-91.31866455078125 37.783241271972656,-91.320724487304688 37.701606750488281,-91.164108276367188 37.696136474609375,-91.164642333984375 37.590499877929688,-91.3209381103515 [...]
+"POLYGON ((-90.539215087890625 37.642776489257812,-90.543220520019531 37.596954345703125,-90.554252624511719 37.596408843994141,-90.555435180664062 37.312156677246094,-90.560089111328125 37.273578643798828,-90.742828369140625 37.271846771240234,-90.755569458007812 37.273075103759766,-90.750823974609375 37.369235992431641,-90.778640747070312 37.370307922363281,-90.777984619140625 37.601970672607422,-91.005767822265625 37.604354858398438,-91.110031127929688 37.589874267578125,-91.164642333 [...]
+"POLYGON ((-90.222129821777344 37.311878204345703,-90.555435180664062 37.312156677246094,-90.554252624511719 37.596408843994141,-90.543220520019531 37.596954345703125,-90.539215087890625 37.642776489257812,-90.152122497558594 37.643196105957031,-90.1494140625 37.599239349365234,-90.149795532226562 37.311836242675781,-90.222129821777344 37.311878204345703))",Madison,Missouri,29,123,29123,           29123,         3.092146,           5.436063,          2.980271,2,3,2,64680,55187,67108,     [...]
+"POLYGON ((-92.090667724609375 37.058235168457031,-92.25982666015625 37.061748504638672,-92.249061584472656 37.255203247070312,-92.257232666015625 37.257282257080078,-92.2520751953125 37.477806091308594,-92.249320983886719 37.607109069824219,-92.031288146972656 37.604129791259766,-91.8201904296875 37.59881591796875,-91.764305114746094 37.595333099365234,-91.760848999023438 37.424907684326172,-91.654685974121094 37.421852111816406,-91.6669921875 37.04888916015625,-92.090667724609375 37.05 [...]
+"POLYGON ((-89.468742370605469 37.339408874511719,-89.435737609863281 37.355716705322266,-89.427574157714844 37.411018371582031,-89.453620910644531 37.45318603515625,-89.494781494140625 37.491725921630859,-89.524971008300781 37.571956634521484,-89.476776123046875 37.570144653320312,-89.461090087890625 37.583286285400391,-89.459335327148438 37.606403350830078,-89.1531982421875 37.604103088378906,-89.046318054199219 37.603298187255859,-89.0496826171875 37.33721923828125,-89.245895385742188 [...]
+"POLYGON ((-90.742828369140625 37.271846771240234,-90.743385314941406 37.166652679443359,-90.758384704589844 37.165592193603516,-90.760856628417969 37.141082763671875,-90.783943176269531 37.140842437744141,-90.788192749023438 37.052379608154297,-90.971664428710938 37.057086944580078,-90.971343994140625 37.099258422851562,-91.024444580078125 37.099964141845703,-91.027008056640625 37.140739440917969,-91.040885925292969 37.141468048095703,-91.043159484863281 37.167739868164062,-91.075447082 [...]
+"POLYGON ((-89.869422912597656 37.131675720214844,-89.960624694824219 37.131359100341797,-89.964256286621094 37.065155029296875,-89.995964050292969 37.063217163085938,-89.997604370117188 37.049606323242188,-90.114639282226562 37.048614501953125,-90.114952087402344 37.086696624755859,-90.221672058105469 37.086109161376953,-90.222129821777344 37.311878204345703,-90.149795532226562 37.311836242675781,-90.1494140625 37.599239349365234,-89.865379333496094 37.601329803466797,-89.86942291259765 [...]
+"POLYGON ((-89.865379333496094 37.601329803466797,-89.809608459472656 37.601028442382812,-89.773551940917969 37.589332580566406,-89.733505249023438 37.598934173583984,-89.721290588378906 37.593063354492188,-89.710273742675781 37.599430084228516,-89.691093444824219 37.595382690429688,-89.685264587402344 37.586776733398438,-89.641731262207031 37.600433349609375,-89.633583068847656 37.590927124023438,-89.609199523925781 37.596843719482422,-89.591163635253906 37.574195861816406,-89.524971008 [...]
+"POLYGON ((-91.6669921875 37.04888916015625,-91.654685974121094 37.421852111816406,-91.223251342773438 37.412868499755859,-91.182647705078125 37.411170959472656,-91.181083679199219 37.316432952880859,-91.161956787109375 37.314884185791016,-91.16180419921875 37.256397247314453,-91.12933349609375 37.252304077148438,-91.130210876464844 37.239597320556641,-91.138282775878906 37.238578796386719,-91.13690185546875 37.201873779296875,-91.097038269042969 37.202407836914062,-91.095329284667969 37 [...]
+"POLYGON ((-90.680656433105469 36.925613403320312,-90.697952270507812 36.926803588867188,-90.699165344238281 36.966693878173828,-90.718193054199219 36.967864990234375,-90.7186279296875 36.994609832763672,-90.739395141601562 36.9962158203125,-90.74029541015625 37.050163269042969,-90.788192749023438 37.052379608154297,-90.783943176269531 37.140842437744141,-90.760856628417969 37.141082763671875,-90.758384704589844 37.165592193603516,-90.743385314941406 37.166652679443359,-90.74282836914062 [...]
diff --git a/pysal/examples/stl_hom.dbf b/pysal/examples/stl_hom.dbf
new file mode 100644
index 0000000..b3cde10
Binary files /dev/null and b/pysal/examples/stl_hom.dbf differ
diff --git a/pysal/examples/stl_hom.html b/pysal/examples/stl_hom.html
new file mode 100644
index 0000000..e37239d
--- /dev/null
+++ b/pysal/examples/stl_hom.html
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="iso-8859-1"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
+      "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+  <title>SAL Data Sets - St Louis Region Homicides</title>
+</head>
+
+<body>
+
+<h1>St Louis Region Homicides</h1>
+
+<h2>Data provided "as is," no warranties</h2>
+
+<h2>Description</h2>
+
+<p>Homicides and selected socio-economic characteristics
+   for counties surrounding St Louis, MO. Data aggregated
+   for three time
+   periods: 1979-84 (steady decline in homicides),
+   1984-88 (stable period), and 1988-93 (steady increase
+   in homicides).</p>
+
+<p>Type = polygon shape file, unprojected, lat-lon</p>
+
+<p>Observations = 78</p>
+
+<p>Variables = 21</p> 
+
+
+<h2>Source</h2>
+
+<p>S. Messner, L. Anselin, D. Hawkins, G. Deane, S. Tolnay,
+   R. Baller (2000). An Atlas of the Spatial Patterning of
+   County-Level Homicide, 1960-1990. Pittsburgh, PA, 
+   <a href="http://www.ncovr.heinz.cmu.edu" target="_blank">National
+   Consortium on Violence Research (NCOVR)</a>.</p>
+
+<h2>Variables</h2>
+
+<table>
+<thead>
+   <tr>
+      <th>Variable</th>
+      <th>Description</th>
+   </tr>
+</thead>
+<tbody>
+   <tr>
+      <td>NAME</td>
+      <td>county name</td>
+   </tr>
+   <tr>
+      <td>STATE_NAME</td>
+      <td>state name</td>
+   </tr>
+   <tr>
+      <td>STATE_FIPS</td>
+      <td>state fips code (character)</td>
+   </tr>
+   <tr>
+      <td>CNTY_FIPS</td>
+      <td>county fips code (character)</td>
+   </tr>
+   <tr>
+      <td>FIPS</td>
+      <td>combined state and county fips code (character)</td>
+   </tr>
+   <tr>
+      <td>FIPSNO</td>
+      <td>fips code as numeric variable</td>
+   </tr>
+   <tr>
+      <td>HR7984</td>
+      <td>homicide rate per 100,000 (1979-84)</td>
+   </tr>
+   <tr>
+      <td>HR8488</td>
+      <td>homicide rate per 100,000 (1984-88)</td>
+   </tr>
+   <tr>
+      <td>HR8893</td>
+      <td>homicide rate per 100,000 (1988-93)</td>
+   </tr>
+   <tr>
+      <td>HC7984</td>
+      <td>homicide count (1979-84)</td>
+   </tr>
+   <tr>
+      <td>HC8488</td>
+      <td>homicide count (1984-88)</td>
+   </tr>
+   <tr>
+      <td>HC8893</td>
+      <td>homicide count (1988-93)</td>
+   </tr>
+   <tr>
+      <td>PO7984</td>
+      <td>population total (1979-84)</td>
+   </tr>
+   <tr>
+      <td>PO8488</td>
+      <td>population total (1984-88)</td>
+   </tr>
+   <tr>
+      <td>PO8893</td>
+      <td>population total (1988-93)</td>
+   </tr>
+   <tr>
+      <td>PE77</td>
+      <td>police expenditures per capita, 1977</td>
+   </tr>
+   <tr>
+      <td>PE82</td>
+      <td>police expenditures per capita, 1982</td>
+   </tr>
+   <tr>
+      <td>PE87</td>
+      <td>police expenditures per capita, 1987</td>
+   </tr>
+   <tr>
+      <td>RDAC80</td>
+      <td>resource deprivation/affluence composite variable, 1980</td>
+   </tr>
+   <tr>
+      <td>RDAC85</td>
+      <td>resource deprivation/affluence composite variable, 1985</td>
+   </tr>
+   <tr>
+      <td>RDAC90</td>
+      <td>resource deprivation/affluence composite variable, 1990</td>
+   </tr>
+
+</tbody>
+</table>
+
+<br />
+<hr />
+<p>Prepared by <a href="mailto:anselin at uiuc.edu">Luc Anselin</a></p>
+<p><a href="http://sal.agecon.uiuc.edu">UIUC-ACE Spatial Analysis Laboratory</a></p>
+<p>Last updated June 16, 2003</p>
+</body>
+</html>
diff --git a/pysal/examples/stl_hom.shp b/pysal/examples/stl_hom.shp
new file mode 100644
index 0000000..577420f
Binary files /dev/null and b/pysal/examples/stl_hom.shp differ
diff --git a/pysal/examples/stl_hom.shx b/pysal/examples/stl_hom.shx
new file mode 100644
index 0000000..81eda1d
Binary files /dev/null and b/pysal/examples/stl_hom.shx differ
diff --git a/pysal/examples/stl_hom.txt b/pysal/examples/stl_hom.txt
new file mode 100644
index 0000000..cf94cc8
--- /dev/null
+++ b/pysal/examples/stl_hom.txt
@@ -0,0 +1,80 @@
+78,4
+"FIPSNO","HR8488","HR8893","HC8488"
+17107,1.290722,1.624458,2
+17001,2.655839,2.255492,9
+17129,1.742433,1.46789,1
+17017,1.437029,2.484256,1
+17009,0,0,0
+17115,6.036815,9.048673,37
+17167,5.441418,6.029489,48
+29127,0,1.800385,0
+17137,2.166249,4.581251,4
+17149,3.298008,3.790607,3
+17021,1.696334,1.447436,3
+17139,1.401227,1.191966,1
+17171,0,0,0
+17029,3.059402,1.608017,8
+29173,4.551143,1.949812,2
+17173,1.74703,0.74509,2
+29163,3.68062,4.173318,3
+17135,1.897425,3.783252,3
+17117,1.63764,2.085136,4
+17061,2.545112,2.176302,2
+17013,3.631346,6.309347,1
+17035,1.85031,10.855743,1
+29007,0.804887,4.211354,1
+17083,2.944756,0.80481,3
+29019,3.377637,3.215331,18
+29113,5.406741,2.833664,7
+17051,0.925198,1.59204,1
+17049,0,1.571158,0
+17079,1.804761,3.12759,1
+29139,3.567161,4.416896,2
+29027,3.769152,3.017486,6
+17005,2.575594,9.924245,2
+17119,6.715531,7.973957,83
+29219,7.870032,5.005464,7
+29183,2.305501,2.463891,21
+17025,0,0,0
+29189,5.860835,7.377974,289
+17159,1.147276,1.003875,1
+17121,3.702093,3.190047,8
+29510,36.000126,45.905406,763
+17027,0.592031,2.447597,1
+29051,6.140481,1.294958,19
+29073,4.395604,5.933098,3
+29151,1.648397,4.133997,1
+29071,4.457906,4.298311,17
+17163,20.15847,27.483827,268
+17191,2.208554,0.969791,2
+17047,2.502127,0,1
+17133,0.951095,2.934466,1
+17189,0,4.456427,0
+29099,4.159251,4.629264,33
+17081,10.01555,4.941533,19
+29131,3.941081,3.990041,4
+29125,7.53712,2.064324,3
+17193,2.269143,3.040253,2
+17065,2.235586,3.905411,1
+17157,4.534839,4.332839,8
+17145,1.821245,3.894111,2
+29055,9.644545,6.828794,9
+29221,8.400979,3.263947,8
+29161,4.036001,3.282163,7
+17055,7.543185,3.295762,16
+29186,5.125642,7.249679,4
+29187,4.428286,3.041846,10
+29169,3.429187,1.618018,7
+17077,2.610668,4.910801,8
+29157,7.23493,1.991457,6
+17199,6.842239,3.146192,20
+29065,1.42849,7.26665,1
+29093,9.329751,3.110904,5
+29123,5.436063,2.980271,3
+29215,6.577835,3.866767,7
+17181,4.392081,1.868408,4
+29179,5.856001,12.577034,2
+29017,5.763799,7.803599,3
+29031,2.651026,3.47149,8
+29203,7.79403,4.334822,3
+29223,0,8.451537,0
diff --git a/pysal/examples/stl_hom.wkt b/pysal/examples/stl_hom.wkt
new file mode 100644
index 0000000..a436b5b
--- /dev/null
+++ b/pysal/examples/stl_hom.wkt
@@ -0,0 +1,78 @@
+POLYGON ((-89.585220336914062 39.978794097900391,-89.581146240234375 40.094867706298828,-89.603988647460938 40.095306396484375,-89.60589599609375 40.136119842529297,-89.6103515625 40.3251953125,-89.269027709960938 40.329566955566406,-89.268562316894531 40.285579681396484,-89.154655456542969 40.285774230957031,-89.152763366699219 40.054969787597656,-89.151618957519531 39.919403076171875,-89.224777221679688 39.918678283691406,-89.411857604980469 39.918041229248047,-89.412437438964844 39.93 [...]
+POLYGON ((-90.921539306640625 39.847461700439453,-90.922317504882812 39.765838623046875,-91.373420715332031 39.761272430419922,-91.3817138671875 39.80377197265625,-91.449188232421875 39.863048553466797,-91.45098876953125 39.885242462158203,-91.434051513671875 39.901828765869141,-91.430389404296875 39.921836853027344,-91.447242736816406 39.946063995361328,-91.487289428710938 40.005752563476562,-91.504005432128906 40.06671142578125,-91.516128540039062 40.134544372558594,-91.506546020507812 [...]
+POLYGON ((-89.997596740722656 39.906204223632812,-90.000251770019531 40.114761352539062,-89.965545654296875 40.140285491943359,-89.924041748046875 40.140884399414062,-89.892723083496094 40.133277893066406,-89.879554748535156 40.144657135009766,-89.865646362304688 40.130641937255859,-89.845245361328125 40.140674591064453,-89.828346252441406 40.126659393310547,-89.802490234375 40.128086090087891,-89.778472900390625 40.136756896972656,-89.756202697753906 40.132270812988281,-89.7177505493164 [...]
+POLYGON ((-90.585357666015625 39.880741119384766,-90.542747497558594 39.917827606201172,-90.514976501464844 39.989692687988281,-90.442146301269531 40.020187377929688,-90.427772521972656 40.069713592529297,-90.40264892578125 40.078514099121094,-90.38873291015625 40.119422912597656,-90.353958129882812 40.127822875976562,-90.314056396484375 40.109951019287109,-90.289535522460938 40.067028045654297,-90.272071838378906 40.063060760498047,-90.257720947265625 40.0699462890625,-90.23775482177734 [...]
+POLYGON ((-90.577041625976562 39.845447540283203,-90.87890625 39.842494964599609,-90.921539306640625 39.847461700439453,-90.923110961914062 40.108623504638672,-90.698799133300781 40.106422424316406,-90.712196350097656 40.080902099609375,-90.679664611816406 40.076221466064453,-90.682174682617188 40.045818328857422,-90.645256042480469 40.0289306640625,-90.613410949707031 40.027854919433594,-90.609687805175781 40.020179748535156,-90.622215270996094 40.015083312988281,-90.607040405273438 40. [...]
+POLYGON ((-89.032203674316406 39.656520843505859,-89.14691162109375 39.656898498535156,-89.149192810058594 39.801521301269531,-89.191635131835938 39.816139221191406,-89.223388671875 39.811679840087891,-89.224777221679688 39.918678283691406,-89.151618957519531 39.919403076171875,-89.152763366699219 40.054969787597656,-88.755012512207031 40.059139251708984,-88.748764038085938 39.794776916503906,-88.761344909667969 39.793941497802734,-88.763114929199219 39.738189697265625,-88.81456756591796 [...]
+POLYGON ((-89.703964233398438 39.528034210205078,-89.929985046386719 39.527008056640625,-89.928977966308594 39.558292388916016,-89.986045837402344 39.704959869384766,-89.998001098632812 39.877189636230469,-89.997596740722656 39.906204223632812,-89.775184631347656 39.908252716064453,-89.769828796386719 39.919143676757812,-89.710472106933594 39.920162200927734,-89.705833435058594 39.976844787597656,-89.585220336914062 39.978794097900391,-89.4927978515625 39.980186462402344,-89.495201110839 [...]
+POLYGON ((-91.850715637207031 39.661178588867188,-91.848159790039062 39.94964599609375,-91.447242736816406 39.946063995361328,-91.430389404296875 39.921836853027344,-91.434051513671875 39.901828765869141,-91.45098876953125 39.885242462158203,-91.449188232421875 39.863048553466797,-91.3817138671875 39.80377197265625,-91.373420715332031 39.761272430419922,-91.367088317871094 39.724639892578125,-91.317665100097656 39.685916900634766,-91.721794128417969 39.686203002929688,-91.722763061523438 [...]
+POLYGON ((-90.154800415039062 39.525581359863281,-90.303291320800781 39.524715423583984,-90.300384521484375 39.639423370361328,-90.341606140136719 39.640064239501953,-90.341934204101562 39.667713165283203,-90.375999450683594 39.667934417724609,-90.375862121582031 39.754978179931641,-90.484756469726562 39.755531311035156,-90.483451843261719 39.79180908203125,-90.608596801757812 39.793941497802734,-90.588485717773438 39.809986114501953,-90.577041625976562 39.845447540283203,-90.58535766601 [...]
+POLYGON ((-91.2032470703125 39.600021362304688,-91.317665100097656 39.685916900634766,-91.367088317871094 39.724639892578125,-91.373420715332031 39.761272430419922,-90.922317504882812 39.765838623046875,-90.921539306640625 39.847461700439453,-90.87890625 39.842494964599609,-90.577041625976562 39.845447540283203,-90.588485717773438 39.809986114501953,-90.608596801757812 39.793941497802734,-90.646682739257812 39.704738616943359,-90.640907287597656 39.679855346679688,-90.612258911132812 39. [...]
+POLYGON ((-89.539955139160156 39.528648376464844,-89.540618896484375 39.645149230957031,-89.489250183105469 39.646518707275391,-89.490440368652344 39.68414306640625,-89.4366455078125 39.686393737792969,-89.435997009277344 39.748046875,-89.408485412597656 39.742588043212891,-89.332466125488281 39.764270782470703,-89.283317565917969 39.793663024902344,-89.261077880859375 39.820823669433594,-89.223388671875 39.811679840087891,-89.191635131835938 39.816139221191406,-89.149192810058594 39.801 [...]
+POLYGON ((-88.472915649414062 39.451450347900391,-88.590293884277344 39.451000213623047,-88.589958190917969 39.477745056152344,-88.604248046875 39.478302001953125,-88.604682922363281 39.491451263427734,-88.624359130859375 39.491134643554688,-88.624168395996094 39.506999969482422,-88.646820068359375 39.507160186767578,-88.648399353027344 39.524848937988281,-88.719940185546875 39.527130126953125,-88.720550537109375 39.581531524658203,-88.813041687011719 39.583431243896484,-88.8153381347656 [...]
+POLYGON ((-90.5877685546875 39.525283813476562,-90.582412719726562 39.565677642822266,-90.612258911132812 39.643844604492188,-90.640907287597656 39.679855346679688,-90.646682739257812 39.704738616943359,-90.608596801757812 39.793941497802734,-90.483451843261719 39.79180908203125,-90.484756469726562 39.755531311035156,-90.375862121582031 39.754978179931641,-90.375999450683594 39.667934417724609,-90.341934204101562 39.667713165283203,-90.341606140136719 39.640064239501953,-90.3003845214843 [...]
+POLYGON ((-88.013847351074219 39.379283905029297,-88.472190856933594 39.37664794921875,-88.472915649414062 39.451450347900391,-88.474266052246094 39.650478363037109,-88.058303833007812 39.65771484375,-88.059532165527344 39.685836791992188,-87.968612670898438 39.688838958740234,-87.965187072753906 39.484779357910156,-88.014053344726562 39.485370635986328,-88.013847351074219 39.379283905029297))
+POLYGON ((-91.444122314453125 39.321300506591797,-91.713165283203125 39.327243804931641,-91.723670959472656 39.340206146240234,-91.715087890625 39.604248046875,-91.722267150878906 39.604576110839844,-91.722763061523438 39.659435272216797,-91.721794128417969 39.686203002929688,-91.317665100097656 39.685916900634766,-91.2032470703125 39.600021362304688,-91.465652465820312 39.456977844238281,-91.444122314453125 39.321300506591797))
+POLYGON ((-88.810005187988281 39.214427947998047,-89.142524719238281 39.21673583984375,-89.138893127441406 39.349987030029297,-89.031814575195312 39.349178314208984,-89.032203674316406 39.656520843505859,-88.815338134765625 39.655979156494141,-88.813041687011719 39.583431243896484,-88.720550537109375 39.581531524658203,-88.719940185546875 39.527130126953125,-88.648399353027344 39.524848937988281,-88.646820068359375 39.507160186767578,-88.624168395996094 39.506999969482422,-88.62435913085 [...]
+POLYGON ((-91.417518615722656 39.147624969482422,-91.444122314453125 39.321300506591797,-91.465652465820312 39.456977844238281,-91.2032470703125 39.600021362304688,-91.15618896484375 39.552593231201172,-91.093612670898438 39.528926849365234,-91.064384460449219 39.473983764648438,-91.036338806152344 39.444412231445312,-90.947891235351562 39.400585174560547,-90.850494384765625 39.350452423095703,-90.779342651367188 39.296802520751953,-90.738082885742188 39.247810363769531,-90.7323379516601 [...]
+POLYGON ((-89.702468872070312 38.996799468994141,-89.710609436035156 39.354412078857422,-89.704071044921875 39.354877471923828,-89.703964233398438 39.528034210205078,-89.539955139160156 39.528648376464844,-89.537483215332031 39.349597930908203,-89.138893127441406 39.349987030029297,-89.142524719238281 39.21673583984375,-89.255943298339844 39.216102600097656,-89.257766723632812 39.025283813476562,-89.594169616699219 39.028202056884766,-89.592948913574219 38.998291015625,-89.64563751220703 [...]
+POLYGON ((-90.151832580566406 38.997974395751953,-90.152389526367188 39.258148193359375,-90.154800415039062 39.525581359863281,-89.929985046386719 39.527008056640625,-89.703964233398438 39.528034210205078,-89.704071044921875 39.354877471923828,-89.710609436035156 39.354412078857422,-89.702468872070312 38.996799468994141,-90.151832580566406 38.997974395751953))
+POLYGON ((-90.152389526367188 39.258148193359375,-90.204612731933594 39.251968383789062,-90.205551147460938 39.225673675537109,-90.317192077636719 39.224990844726562,-90.317848205566406 39.177394866943359,-90.4923095703125 39.175216674804688,-90.506370544433594 39.161960601806641,-90.519172668457031 39.185878753662109,-90.568428039550781 39.185012817382812,-90.586715698242188 39.177600860595703,-90.582313537597656 39.160869598388672,-90.608352661132812 39.117576599121094,-90.614273071289 [...]
+POLYGON ((-90.608352661132812 39.117576599121094,-90.611167907714844 39.107578277587891,-90.576240539550781 39.031734466552734,-90.575263977050781 39.005451202392578,-90.550468444824219 38.969856262207031,-90.531951904296875 38.957775115966797,-90.488288879394531 38.967193603515625,-90.469841003417969 38.959178924560547,-90.530426025390625 38.891609191894531,-90.570327758789062 38.871326446533203,-90.627212524414062 38.880794525146484,-90.668876647949219 38.935253143310547,-90.7060699462 [...]
+POLYGON ((-88.010856628417969 39.177513122558594,-88.36407470703125 39.174442291259766,-88.469703674316406 39.175361633300781,-88.471542358398438 39.213001251220703,-88.472190856933594 39.37664794921875,-88.013847351074219 39.379283905029297,-88.010856628417969 39.177513122558594))
+POLYGON ((-92.316963195800781 39.249309539794922,-92.314231872558594 39.347320556640625,-91.723670959472656 39.340206146240234,-91.713165283203125 39.327243804931641,-91.444122314453125 39.321300506591797,-91.417518615722656 39.147624969482422,-91.638282775878906 39.148555755615234,-91.643470764160156 39.062778472900391,-92.111885070800781 39.066963195800781,-92.104530334472656 39.242141723632812,-92.316963195800781 39.249309539794922))
+POLYGON ((-90.151832580566406 38.997974395751953,-90.279121398925781 38.997692108154297,-90.2789306640625 38.924716949462891,-90.319740295410156 38.924907684326172,-90.413070678710938 38.962329864501953,-90.469841003417969 38.959178924560547,-90.488288879394531 38.967193603515625,-90.531951904296875 38.957775115966797,-90.550468444824219 38.969856262207031,-90.575263977050781 39.005451202392578,-90.576240539550781 39.031734466552734,-90.611167907714844 39.107578277587891,-90.608352661132 [...]
+POLYGON ((-92.395118713378906 38.736011505126953,-92.409675598144531 38.760608673095703,-92.392578125 38.790939331054688,-92.393402099609375 38.811775207519531,-92.432884216308594 38.823997497558594,-92.474708557128906 38.864265441894531,-92.499984741210938 38.918071746826172,-92.566192626953125 38.968196868896484,-92.565650939941406 39.002174377441406,-92.432899475097656 39.250255584716797,-92.316963195800781 39.249309539794922,-92.104530334472656 39.242141723632812,-92.111885070800781  [...]
+POLYGON ((-90.96160888671875 38.871410369873047,-91.116539001464844 38.874469757080078,-91.11834716796875 38.929298400878906,-91.200668334960938 38.933181762695312,-91.199104309082031 38.9930419921875,-91.26904296875 38.996143341064453,-91.264923095703125 39.143535614013672,-91.193122863769531 39.143173217773438,-91.186759948730469 39.226673126220703,-90.732337951660156 39.224746704101562,-90.718193054199219 39.195873260498047,-90.71673583984375 39.144210815429688,-90.690399169921875 39. [...]
+POLYGON ((-89.257766723632812 39.025283813476562,-89.255943298339844 39.216102600097656,-89.142524719238281 39.21673583984375,-88.810005187988281 39.214427947998047,-88.810455322265625 38.917091369628906,-88.701034545898438 38.917793273925781,-88.702590942382812 38.830322265625,-89.143257141113281 38.825576782226562,-89.144851684570312 38.741279602050781,-89.262840270996094 38.742015838623047,-89.264930725097656 39.007617950439453,-89.256050109863281 39.008510589599609,-89.25776672363281 [...]
+POLYGON ((-88.365158081054688 38.915172576904297,-88.701034545898438 38.917793273925781,-88.810455322265625 38.917091369628906,-88.810005187988281 39.214427947998047,-88.471542358398438 39.213001251220703,-88.469703674316406 39.175361633300781,-88.36407470703125 39.174442291259766,-88.365158081054688 38.915172576904297))
+POLYGON ((-87.955390930175781 38.855442047119141,-88.262115478515625 38.853900909423828,-88.365455627441406 38.858062744140625,-88.365158081054688 38.915172576904297,-88.36407470703125 39.174442291259766,-88.010856628417969 39.177513122558594,-87.955650329589844 39.177749633789062,-87.955390930175781 38.855442047119141))
+POLYGON ((-91.42498779296875 38.713211059570312,-91.493743896484375 38.703960418701172,-91.561454772949219 38.678821563720703,-91.593994140625 38.682807922363281,-91.653541564941406 38.704471588134766,-91.643470764160156 39.062778472900391,-91.638282775878906 39.148555755615234,-91.417518615722656 39.147624969482422,-91.264923095703125 39.143535614013672,-91.26904296875 38.996143341064453,-91.275390625 38.843738555908203,-91.418510437011719 38.848403930664062,-91.42498779296875 38.713211 [...]
+POLYGON ((-92.219749450683594 38.638427734375,-92.193374633789062 38.658500671386719,-92.199501037597656 38.680587768554688,-92.224853515625 38.69635009765625,-92.2191162109375 38.7164306640625,-92.187454223632812 38.737979888916016,-92.143646240234375 38.8155517578125,-92.16339111328125 38.870895385742188,-92.1585693359375 38.884601593017578,-92.170272827148438 38.897499084472656,-92.155441284179688 38.928653717041016,-92.137191772460938 39.061893463134766,-92.111885070800781 39.0669631 [...]
+POLYGON ((-89.605598449707031 38.741294860839844,-89.606971740722656 38.871822357177734,-89.644203186035156 38.871788024902344,-89.645637512207031 38.996425628662109,-89.592948913574219 38.998291015625,-89.594169616699219 39.028202056884766,-89.257766723632812 39.025283813476562,-89.256050109863281 39.008510589599609,-89.264930725097656 39.007617950439453,-89.262840270996094 38.742015838623047,-89.605598449707031 38.741294860839844))
+POLYGON ((-90.121726989746094 38.800510406494141,-90.113121032714844 38.830467224121094,-90.1328125 38.853031158447266,-90.243927001953125 38.914508819580078,-90.2789306640625 38.924716949462891,-90.279121398925781 38.997692108154297,-90.151832580566406 38.997974395751953,-89.702468872070312 38.996799468994141,-89.645637512207031 38.996425628662109,-89.644203186035156 38.871788024902344,-89.606971740722656 38.871822357177734,-89.605598449707031 38.741294860839844,-89.60430908203125 38.66 [...]
+POLYGON ((-90.968650817871094 38.546318054199219,-91.013717651367188 38.562995910644531,-91.060562133789062 38.606834411621094,-91.088890075683594 38.609649658203125,-91.142280578613281 38.600337982177734,-91.204971313476562 38.611728668212891,-91.225311279296875 38.625041961669922,-91.247871398925781 38.656909942626953,-91.296379089355469 38.688400268554688,-91.334465026855469 38.702346801757812,-91.375076293945312 38.699016571044922,-91.42498779296875 38.713211059570312,-91.41851043701 [...]
+POLYGON ((-90.737709045410156 38.634517669677734,-90.783836364746094 38.577388763427734,-90.802215576171875 38.584445953369141,-90.822776794433594 38.581962585449219,-90.91204833984375 38.540630340576172,-90.968650817871094 38.546318054199219,-90.96160888671875 38.871410369873047,-90.958541870117188 38.895469665527344,-90.939468383789062 38.887535095214844,-90.880401611328125 38.890472412109375,-90.813392639160156 38.879413604736328,-90.805099487304688 38.911231994628906,-90.788131713867 [...]
+POLYGON ((-88.7020263671875 38.611400604248047,-88.702590942382812 38.830322265625,-88.701034545898438 38.917793273925781,-88.365158081054688 38.915172576904297,-88.365455627441406 38.858062744140625,-88.262115478515625 38.853900909423828,-88.261642456054688 38.742389678955078,-88.288833618164062 38.739486694335938,-88.27716064453125 38.661411285400391,-88.293960571289062 38.643444061279297,-88.286087036132812 38.620704650878906,-88.265701293945312 38.606903076171875,-88.7020263671875 38 [...]
+POLYGON ((-90.739524841308594 38.463615417480469,-90.737709045410156 38.634517669677734,-90.688613891601562 38.6585693359375,-90.680091857910156 38.678142547607422,-90.6396484375 38.693027496337891,-90.602333068847656 38.682033538818359,-90.547653198242188 38.692028045654297,-90.533950805664062 38.723419189453125,-90.490196228027344 38.759128570556641,-90.452125549316406 38.826511383056641,-90.43328857421875 38.831188201904297,-90.403091430664062 38.825519561767578,-90.360061645507812 38 [...]
+POLYGON ((-87.958946228027344 38.575351715087891,-88.149078369140625 38.576217651367188,-88.147964477539062 38.60430908203125,-88.265701293945312 38.606903076171875,-88.286087036132812 38.620704650878906,-88.293960571289062 38.643444061279297,-88.27716064453125 38.661411285400391,-88.288833618164062 38.739486694335938,-88.261642456054688 38.742389678955078,-88.262115478515625 38.853900909423828,-87.955390930175781 38.855442047119141,-87.917572021484375 38.855419158935547,-87.916572570800 [...]
+POLYGON ((-89.145423889160156 38.501522064208984,-89.144851684570312 38.741279602050781,-89.143257141113281 38.825576782226562,-88.702590942382812 38.830322265625,-88.7020263671875 38.611400604248047,-88.703521728515625 38.474075317382812,-89.144378662109375 38.474323272705078,-89.145423889160156 38.501522064208984))
+POLYGON ((-90.26123046875 38.532768249511719,-90.31646728515625 38.580005645751953,-90.302742004394531 38.670291900634766,-90.239692687988281 38.730060577392578,-90.19219970703125 38.760704040527344,-90.171195983886719 38.786655426025391,-90.121726989746094 38.800510406494141,-90.135177612304688 38.785484313964844,-90.163398742675781 38.773097991943359,-90.196571350097656 38.723964691162109,-90.202239990234375 38.700363159179688,-90.183578491210938 38.658771514892578,-90.183708190917969  [...]
+POLYGON ((-89.144851684570312 38.741279602050781,-89.145423889160156 38.501522064208984,-89.265357971191406 38.50860595703125,-89.297721862792969 38.5023193359375,-89.357093811035156 38.512825012207031,-89.398292541503906 38.488391876220703,-89.430625915527344 38.493400573730469,-89.457084655761719 38.486160278320312,-89.481193542480469 38.466224670410156,-89.524101257324219 38.480274200439453,-89.542320251464844 38.473468780517578,-89.572303771972656 38.481159210205078,-89.6187210083007 [...]
+POLYGON ((-91.954345703125 38.5972900390625,-92.028793334960938 38.553165435791016,-92.012504577636719 38.507251739501953,-92.034919738769531 38.475055694580078,-92.074729919433594 38.470149993896484,-92.109535217285156 38.45672607421875,-92.143455505371094 38.468242645263672,-92.167549133300781 38.467723846435547,-92.159446716308594 38.438880920410156,-92.128013610839844 38.414619445800781,-92.124382019042969 38.395198822021484,-92.137992858886719 38.381759643554688,-92.178337097167969  [...]
+POLYGON ((-91.377273559570312 38.210758209228516,-91.540191650390625 38.213146209716797,-91.542747497558594 38.157341003417969,-91.638160705566406 38.157077789306641,-91.652229309082031 38.157737731933594,-91.651405334472656 38.289676666259766,-91.653564453125 38.703544616699219,-91.653541564941406 38.704471588134766,-91.593994140625 38.682807922363281,-91.561454772949219 38.678821563720703,-91.493743896484375 38.703960418701172,-91.42498779296875 38.713211059570312,-91.375076293945312 3 [...]
+POLYGON ((-92.195640563964844 38.288467407226562,-92.196113586425781 38.333343505859375,-92.1925048828125 38.362895965576172,-92.178337097167969 38.376808166503906,-92.137992858886719 38.381759643554688,-92.124382019042969 38.395198822021484,-92.128013610839844 38.414619445800781,-92.159446716308594 38.438880920410156,-92.167549133300781 38.467723846435547,-92.143455505371094 38.468242645263672,-92.109535217285156 38.45672607421875,-92.074729919433594 38.470149993896484,-92.0349197387695 [...]
+POLYGON ((-90.782661437988281 38.207981109619141,-91.102394104003906 38.2042236328125,-91.34429931640625 38.204006195068359,-91.377708435058594 38.204860687255859,-91.377273559570312 38.210758209228516,-91.369621276855469 38.416683197021484,-91.375076293945312 38.699016571044922,-91.334465026855469 38.702346801757812,-91.296379089355469 38.688400268554688,-91.247871398925781 38.656909942626953,-91.225311279296875 38.625041961669922,-91.204971313476562 38.611728668212891,-91.1422805786132 [...]
+POLYGON ((-89.904197692871094 38.223079681396484,-89.930282592773438 38.276473999023438,-89.923301696777344 38.285110473632812,-89.910957336425781 38.279712677001953,-89.917572021484375 38.309150695800781,-90.0313720703125 38.311885833740234,-90.031501770019531 38.329559326171875,-90.145423889160156 38.408786773681641,-90.146163940429688 38.426914215087891,-90.265785217285156 38.518688201904297,-90.26123046875 38.532768249511719,-90.240943908691406 38.56280517578125,-90.183708190917969 3 [...]
+POLYGON ((-88.153724670410156 38.259429931640625,-88.374153137207031 38.256660461425781,-88.707603454589844 38.259716033935547,-88.703521728515625 38.474075317382812,-88.7020263671875 38.611400604248047,-88.265701293945312 38.606903076171875,-88.147964477539062 38.60430908203125,-88.149078369140625 38.576217651367188,-88.153724670410156 38.259429931640625))
+POLYGON ((-88.153724670410156 38.259429931640625,-88.149078369140625 38.576217651367188,-87.958946228027344 38.575351715087891,-87.949729919433594 38.538066864013672,-87.963546752929688 38.496536254882812,-87.952728271484375 38.451976776123047,-87.959526062011719 38.436199188232422,-87.9515380859375 38.424766540527344,-87.979057312011719 38.401084899902344,-87.97894287109375 38.377967834472656,-87.963676452636719 38.350124359130859,-87.960708618164062 38.295692443847656,-87.9884262084960 [...]
+POLYGON ((-89.904197692871094 38.223079681396484,-90.040107727050781 38.223464965820312,-90.038887023925781 38.13690185546875,-90.207527160644531 38.088905334472656,-90.254058837890625 38.122169494628906,-90.289634704589844 38.166816711425781,-90.336715698242188 38.188713073730469,-90.364768981933594 38.234298706054688,-90.369346618652344 38.323558807373047,-90.358688354492188 38.365329742431641,-90.339607238769531 38.390846252441406,-90.301841735839844 38.427356719970703,-90.26578521728 [...]
+POLYGON ((-89.597236633300781 38.216907501220703,-89.714385986328125 38.219024658203125,-89.709686279296875 38.418910980224609,-89.669769287109375 38.427585601806641,-89.664512634277344 38.442092895507812,-89.645706176757812 38.440757751464844,-89.626335144042969 38.449390411376953,-89.618721008300781 38.466167449951172,-89.572303771972656 38.481159210205078,-89.542320251464844 38.473468780517578,-89.524101257324219 38.480274200439453,-89.481193542480469 38.466224670410156,-89.4570846557 [...]
+POLYGON ((-90.638938903808594 38.080215454101562,-90.657737731933594 38.085933685302734,-90.656219482421875 38.100906372070312,-90.684783935546875 38.095195770263672,-90.687408447265625 38.112846374511719,-90.782661437988281 38.207981109619141,-90.740684509277344 38.393348693847656,-90.739524841308594 38.463615417480469,-90.690193176269531 38.465919494628906,-90.684532165527344 38.443305969238281,-90.669242858886719 38.442546844482422,-90.658111572265625 38.481632232666016,-90.6138763427 [...]
+POLYGON ((-88.708427429199219 38.129184722900391,-89.133186340332031 38.1275634765625,-89.151893615722656 38.129432678222656,-89.150909423828125 38.213733673095703,-89.144378662109375 38.474323272705078,-88.703521728515625 38.474075317382812,-88.707603454589844 38.259716033935547,-88.708427429199219 38.129184722900391))
+POLYGON ((-92.404434204101562 38.020709991455078,-92.516227722167969 38.024806976318359,-92.517822265625 38.035198211669922,-92.555259704589844 38.049690246582031,-92.558685302734375 38.061847686767578,-92.575721740722656 38.063690185546875,-92.575897216796875 38.095882415771484,-92.589996337890625 38.097339630126953,-92.589363098144531 38.110054016113281,-92.599922180175781 38.110694885253906,-92.599800109863281 38.135639190673828,-92.610389709472656 38.136730194091797,-92.6088027954101 [...]
+POLYGON ((-92.188056945800781 38.017032623291016,-92.182304382324219 38.164070129394531,-92.199317932128906 38.165054321289062,-92.195640563964844 38.288467407226562,-91.651405334472656 38.289676666259766,-91.652229309082031 38.157737731933594,-91.638160705566406 38.157077789306641,-91.639785766601562 38.051868438720703,-91.903495788574219 38.053779602050781,-91.923751831054688 38.047489166259766,-91.933311462402344 38.035964965820312,-91.958641052246094 38.041805267333984,-91.9745635986 [...]
+POLYGON ((-88.37646484375 37.914005279541016,-88.374153137207031 38.256660461425781,-88.153724670410156 38.259429931640625,-87.988426208496094 38.259773254394531,-87.980018615722656 38.241085052490234,-87.986007690429688 38.234813690185547,-87.977928161621094 38.200714111328125,-87.932289123535156 38.171131134033203,-87.931991577148438 38.157527923583984,-87.950569152832031 38.136913299560547,-87.973503112792969 38.131759643554688,-88.018547058105469 38.103302001953125,-88.0123291015625  [...]
+POLYGON ((-88.37646484375 37.914005279541016,-88.708442687988281 37.909805297851562,-88.708427429199219 38.129184722900391,-88.707603454589844 38.259716033935547,-88.374153137207031 38.256660461425781,-88.37646484375 37.914005279541016))
+POLYGON ((-90.207527160644531 38.088905334472656,-90.038887023925781 38.13690185546875,-90.040107727050781 38.223464965820312,-89.904197692871094 38.223079681396484,-89.714385986328125 38.219024658203125,-89.597236633300781 38.216907501220703,-89.60272216796875 37.954021453857422,-89.667251586914062 37.839733123779297,-89.685874938964844 37.828826904296875,-89.691055297851562 37.804794311523438,-89.728446960449219 37.840991973876953,-89.851715087890625 37.905063629150391,-89.861045837402 [...]
+POLYGON ((-89.133186340332031 38.1275634765625,-89.140304565429688 38.107643127441406,-89.1234130859375 38.093090057373047,-89.1400146484375 38.047359466552734,-89.145538330078125 37.991172790527344,-89.179008483886719 37.949115753173828,-89.60272216796875 37.954021453857422,-89.597236633300781 38.216907501220703,-89.150909423828125 38.213733673095703,-89.151893615722656 38.129432678222656,-89.133186340332031 38.1275634765625))
+POLYGON ((-91.110374450683594 37.739475250244141,-91.155738830566406 37.737960815429688,-91.155967712402344 37.696247100830078,-91.164108276367188 37.696136474609375,-91.320724487304688 37.701606750488281,-91.31866455078125 37.783241271972656,-91.535430908203125 37.787075042724609,-91.53155517578125 38.154808044433594,-91.542747497558594 38.157341003417969,-91.540191650390625 38.213146209716797,-91.377273559570312 38.210758209228516,-91.377708435058594 38.204860687255859,-91.344299316406 [...]
+POLYGON ((-90.649925231933594 37.735160827636719,-91.110374450683594 37.739475250244141,-91.102394104003906 38.2042236328125,-90.782661437988281 38.207981109619141,-90.687408447265625 38.112846374511719,-90.684783935546875 38.095195770263672,-90.656219482421875 38.100906372070312,-90.657737731933594 38.085933685302734,-90.638938903808594 38.080215454101562,-90.649925231933594 37.735160827636719))
+POLYGON ((-92.030677795410156 38.011772155761719,-91.974563598632812 38.011104583740234,-91.958641052246094 38.041805267333984,-91.933311462402344 38.035964965820312,-91.923751831054688 38.047489166259766,-91.903495788574219 38.053779602050781,-91.639785766601562 38.051868438720703,-91.638160705566406 38.157077789306641,-91.542747497558594 38.157341003417969,-91.53155517578125 38.154808044433594,-91.535430908203125 37.787075042724609,-91.816192626953125 37.787055969238281,-91.81027221679 [...]
+POLYGON ((-88.708442687988281 37.909805297851562,-88.709480285644531 37.867202758789062,-89.154891967773438 37.865646362304688,-89.155082702636719 37.949047088623047,-89.179008483886719 37.949115753173828,-89.145538330078125 37.991172790527344,-89.1400146484375 38.047359466552734,-89.1234130859375 38.093090057373047,-89.140304565429688 38.107643127441406,-89.133186340332031 38.1275634765625,-88.708427429199219 38.129184722900391,-88.708442687988281 37.909805297851562))
+POLYGON ((-90.116325378417969 37.672393798828125,-90.160240173339844 37.706611633300781,-90.202995300292969 37.676002502441406,-90.463249206542969 37.880016326904297,-90.3250732421875 37.986186981201172,-90.415153503417969 38.045375823974609,-90.329277038574219 38.099925994873047,-90.297019958496094 38.091983795166016,-90.254058837890625 38.122169494628906,-90.207527160644531 38.088905334472656,-90.134712219238281 38.053951263427734,-90.119338989257812 38.032272338867188,-90.041923522949 [...]
+POLYGON ((-90.152122497558594 37.643196105957031,-90.539215087890625 37.642776489257812,-90.653701782226562 37.641746520996094,-90.649925231933594 37.735160827636719,-90.638938903808594 38.080215454101562,-90.609596252441406 38.073234558105469,-90.617973327636719 38.047321319580078,-90.612892150878906 38.020626068115234,-90.624412536621094 38.009639739990234,-90.602691650390625 38.002586364746094,-90.415153503417969 38.045375823974609,-90.3250732421875 37.986186981201172,-90.463249206542 [...]
+POLYGON ((-92.249320983886719 37.607109069824219,-92.249122619628906 37.648834228515625,-92.409805297851562 37.712673187255859,-92.4080810546875 37.861454010009766,-92.404434204101562 38.020709991455078,-92.188056945800781 38.017032623291016,-92.030677795410156 38.011772155761719,-92.029937744140625 37.785987854003906,-92.022674560546875 37.777976989746094,-92.031288146972656 37.604129791259766,-92.249320983886719 37.607109069824219))
+POLYGON ((-89.1531982421875 37.604103088378906,-89.459335327148438 37.606403350830078,-89.461090087890625 37.583286285400391,-89.476776123046875 37.570144653320312,-89.524971008300781 37.571956634521484,-89.51336669921875 37.615928649902344,-89.519180297851562 37.650375366210938,-89.513374328613281 37.679840087890625,-89.521522521972656 37.694797515869141,-89.581436157226562 37.706104278564453,-89.666458129882812 37.745452880859375,-89.675857543945312 37.783969879150391,-89.6910552978515 [...]
+POLYGON ((-90.152122497558594 37.643196105957031,-90.116325378417969 37.672393798828125,-90.007438659667969 37.819301605224609,-89.937873840332031 37.878044128417969,-89.900550842285156 37.875904083251953,-89.866813659667969 37.891876220703125,-89.861045837402344 37.905487060546875,-89.851715087890625 37.905063629150391,-89.728446960449219 37.840991973876953,-89.691055297851562 37.804794311523438,-89.675857543945312 37.783969879150391,-89.666458129882812 37.745452880859375,-89.5814361572 [...]
+POLYGON ((-88.712860107421875 37.605228424072266,-89.046318054199219 37.603298187255859,-89.1531982421875 37.604103088378906,-89.154891967773438 37.865646362304688,-88.709480285644531 37.867202758789062,-88.712860107421875 37.605228424072266))
+POLYGON ((-91.654685974121094 37.421852111816406,-91.760848999023438 37.424907684326172,-91.764305114746094 37.595333099365234,-91.8201904296875 37.59881591796875,-91.818557739257812 37.714012145996094,-91.810272216796875 37.746814727783203,-91.816192626953125 37.787055969238281,-91.535430908203125 37.787075042724609,-91.31866455078125 37.783241271972656,-91.320724487304688 37.701606750488281,-91.164108276367188 37.696136474609375,-91.164642333984375 37.590499877929688,-91.32093811035156 [...]
+POLYGON ((-90.539215087890625 37.642776489257812,-90.543220520019531 37.596954345703125,-90.554252624511719 37.596408843994141,-90.555435180664062 37.312156677246094,-90.560089111328125 37.273578643798828,-90.742828369140625 37.271846771240234,-90.755569458007812 37.273075103759766,-90.750823974609375 37.369235992431641,-90.778640747070312 37.370307922363281,-90.777984619140625 37.601970672607422,-91.005767822265625 37.604354858398438,-91.110031127929688 37.589874267578125,-91.1646423339 [...]
+POLYGON ((-90.222129821777344 37.311878204345703,-90.555435180664062 37.312156677246094,-90.554252624511719 37.596408843994141,-90.543220520019531 37.596954345703125,-90.539215087890625 37.642776489257812,-90.152122497558594 37.643196105957031,-90.1494140625 37.599239349365234,-90.149795532226562 37.311836242675781,-90.222129821777344 37.311878204345703))
+POLYGON ((-92.090667724609375 37.058235168457031,-92.25982666015625 37.061748504638672,-92.249061584472656 37.255203247070312,-92.257232666015625 37.257282257080078,-92.2520751953125 37.477806091308594,-92.249320983886719 37.607109069824219,-92.031288146972656 37.604129791259766,-91.8201904296875 37.59881591796875,-91.764305114746094 37.595333099365234,-91.760848999023438 37.424907684326172,-91.654685974121094 37.421852111816406,-91.6669921875 37.04888916015625,-92.090667724609375 37.058 [...]
+POLYGON ((-89.468742370605469 37.339408874511719,-89.435737609863281 37.355716705322266,-89.427574157714844 37.411018371582031,-89.453620910644531 37.45318603515625,-89.494781494140625 37.491725921630859,-89.524971008300781 37.571956634521484,-89.476776123046875 37.570144653320312,-89.461090087890625 37.583286285400391,-89.459335327148438 37.606403350830078,-89.1531982421875 37.604103088378906,-89.046318054199219 37.603298187255859,-89.0496826171875 37.33721923828125,-89.245895385742188  [...]
+POLYGON ((-90.742828369140625 37.271846771240234,-90.743385314941406 37.166652679443359,-90.758384704589844 37.165592193603516,-90.760856628417969 37.141082763671875,-90.783943176269531 37.140842437744141,-90.788192749023438 37.052379608154297,-90.971664428710938 37.057086944580078,-90.971343994140625 37.099258422851562,-91.024444580078125 37.099964141845703,-91.027008056640625 37.140739440917969,-91.040885925292969 37.141468048095703,-91.043159484863281 37.167739868164062,-91.0754470825 [...]
+POLYGON ((-89.869422912597656 37.131675720214844,-89.960624694824219 37.131359100341797,-89.964256286621094 37.065155029296875,-89.995964050292969 37.063217163085938,-89.997604370117188 37.049606323242188,-90.114639282226562 37.048614501953125,-90.114952087402344 37.086696624755859,-90.221672058105469 37.086109161376953,-90.222129821777344 37.311878204345703,-90.149795532226562 37.311836242675781,-90.1494140625 37.599239349365234,-89.865379333496094 37.601329803466797,-89.869422912597656 [...]
+POLYGON ((-89.865379333496094 37.601329803466797,-89.809608459472656 37.601028442382812,-89.773551940917969 37.589332580566406,-89.733505249023438 37.598934173583984,-89.721290588378906 37.593063354492188,-89.710273742675781 37.599430084228516,-89.691093444824219 37.595382690429688,-89.685264587402344 37.586776733398438,-89.641731262207031 37.600433349609375,-89.633583068847656 37.590927124023438,-89.609199523925781 37.596843719482422,-89.591163635253906 37.574195861816406,-89.5249710083 [...]
+POLYGON ((-91.6669921875 37.04888916015625,-91.654685974121094 37.421852111816406,-91.223251342773438 37.412868499755859,-91.182647705078125 37.411170959472656,-91.181083679199219 37.316432952880859,-91.161956787109375 37.314884185791016,-91.16180419921875 37.256397247314453,-91.12933349609375 37.252304077148438,-91.130210876464844 37.239597320556641,-91.138282775878906 37.238578796386719,-91.13690185546875 37.201873779296875,-91.097038269042969 37.202407836914062,-91.095329284667969 37. [...]
+POLYGON ((-90.680656433105469 36.925613403320312,-90.697952270507812 36.926803588867188,-90.699165344238281 36.966693878173828,-90.718193054199219 36.967864990234375,-90.7186279296875 36.994609832763672,-90.739395141601562 36.9962158203125,-90.74029541015625 37.050163269042969,-90.788192749023438 37.052379608154297,-90.783943176269531 37.140842437744141,-90.760856628417969 37.141082763671875,-90.758384704589844 37.165592193603516,-90.743385314941406 37.166652679443359,-90.742828369140625 [...]
diff --git a/pysal/examples/stl_hom_rook.gal b/pysal/examples/stl_hom_rook.gal
new file mode 100644
index 0000000..7d8e0f8
--- /dev/null
+++ b/pysal/examples/stl_hom_rook.gal
@@ -0,0 +1,157 @@
+0 78 stl_hom POLY_ID_OG
+1 3
+7 3 6
+2 3
+10 8 5
+3 3
+7 4 1
+4 4
+9 5 3 7
+5 4
+10 4 9 2
+6 5
+16 12 11 7 1
+7 8
+19 9 11 18 1 6 3 4
+8 3
+15 10 2
+9 7
+20 19 13 10 7 4 5
+10 9
+17 15 9 13 20 21 5 2 8
+11 4
+18 16 6 7
+12 3
+16 14 6
+13 3
+20 9 10
+14 3
+22 16 12
+15 4
+23 10 17 8
+16 8
+28 27 18 22 14 12 6 11
+17 6
+30 26 23 21 10 15
+18 7
+33 32 19 16 27 11 7
+19 6
+24 20 18 33 7 9
+20 6
+24 21 19 9 13 10
+21 6
+35 26 24 20 17 10
+22 4
+29 28 14 16
+23 5
+31 25 17 30 15
+24 5
+35 33 21 19 20
+25 3
+42 31 23
+26 5
+34 30 21 35 17
+27 7
+41 39 32 28 36 16 18
+28 5
+29 36 27 22 16
+29 4
+38 36 22 28
+30 6
+43 34 31 26 17 23
+31 6
+44 42 43 30 23 25
+32 4
+33 27 41 18
+33 8
+46 40 35 32 41 18 24 19
+34 5
+43 35 45 26 30
+35 7
+45 37 33 21 24 34 26
+36 6
+47 39 38 29 28 27
+37 6
+51 45 40 46 49 35
+38 4
+48 47 29 36
+39 6
+52 50 41 47 36 27
+40 3
+46 37 33
+41 6
+50 46 39 27 33 32
+42 4
+53 31 44 25
+43 8
+61 59 54 44 45 34 30 31
+44 5
+54 53 43 31 42
+45 7
+60 59 51 37 35 43 34
+46 7
+49 50 57 41 33 40 37
+47 7
+56 55 52 48 38 36 39
+48 3
+55 47 38
+49 5
+63 51 46 57 37
+50 6
+58 57 52 39 41 46
+51 6
+64 60 49 63 37 45
+52 6
+62 58 56 47 39 50
+53 4
+65 54 44 42
+54 5
+65 61 43 44 53
+55 3
+56 48 47
+56 4
+62 55 47 52
+57 7
+67 63 66 58 50 49 46
+58 5
+66 52 62 50 57
+59 6
+69 61 60 70 45 43
+60 5
+70 64 51 45 59
+61 6
+72 65 59 69 43 54
+62 5
+68 66 56 52 58
+63 5
+64 57 67 49 51
+64 6
+71 70 63 67 51 60
+65 4
+61 72 54 53
+66 6
+73 67 62 68 58 57
+67 7
+76 75 71 66 57 64 63
+68 3
+73 62 66
+69 6
+77 72 70 74 59 61
+70 7
+74 71 78 64 60 69 59
+71 5
+78 75 67 64 70
+72 4
+77 69 61 65
+73 3
+76 66 68
+74 4
+77 78 70 69
+75 4
+78 76 67 71
+76 3
+73 67 75
+77 3
+74 69 72
+78 4
+75 71 74 70
diff --git a/pysal/examples/street_net_pts.dbf b/pysal/examples/street_net_pts.dbf
new file mode 100644
index 0000000..622c92c
Binary files /dev/null and b/pysal/examples/street_net_pts.dbf differ
diff --git a/pysal/examples/street_net_pts.prj b/pysal/examples/street_net_pts.prj
new file mode 100644
index 0000000..a30c00a
--- /dev/null
+++ b/pysal/examples/street_net_pts.prj
@@ -0,0 +1 @@
+GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]
\ No newline at end of file
diff --git a/pysal/examples/street_net_pts.qpj b/pysal/examples/street_net_pts.qpj
new file mode 100644
index 0000000..5fbc831
--- /dev/null
+++ b/pysal/examples/street_net_pts.qpj
@@ -0,0 +1 @@
+GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]
diff --git a/pysal/examples/street_net_pts.shp b/pysal/examples/street_net_pts.shp
new file mode 100644
index 0000000..977293c
Binary files /dev/null and b/pysal/examples/street_net_pts.shp differ
diff --git a/pysal/examples/street_net_pts.shx b/pysal/examples/street_net_pts.shx
new file mode 100644
index 0000000..841b1c2
Binary files /dev/null and b/pysal/examples/street_net_pts.shx differ
diff --git a/pysal/examples/taz.dbf b/pysal/examples/taz.dbf
new file mode 100644
index 0000000..29e6756
Binary files /dev/null and b/pysal/examples/taz.dbf differ
diff --git a/pysal/examples/taz.shp b/pysal/examples/taz.shp
new file mode 100644
index 0000000..df6d3c6
Binary files /dev/null and b/pysal/examples/taz.shp differ
diff --git a/pysal/examples/taz.shx b/pysal/examples/taz.shx
new file mode 100644
index 0000000..c3d9972
Binary files /dev/null and b/pysal/examples/taz.shx differ
diff --git a/pysal/examples/us48.dbf b/pysal/examples/us48.dbf
new file mode 100644
index 0000000..a289e04
Binary files /dev/null and b/pysal/examples/us48.dbf differ
diff --git a/pysal/examples/us48.shp b/pysal/examples/us48.shp
new file mode 100644
index 0000000..1d032dd
Binary files /dev/null and b/pysal/examples/us48.shp differ
diff --git a/pysal/examples/us48.shx b/pysal/examples/us48.shx
new file mode 100644
index 0000000..aa683fc
Binary files /dev/null and b/pysal/examples/us48.shx differ
diff --git a/pysal/examples/usjoin.csv b/pysal/examples/usjoin.csv
new file mode 100644
index 0000000..24ab198
--- /dev/null
+++ b/pysal/examples/usjoin.csv
@@ -0,0 +1,49 @@
+"Name","STATE_FIPS",1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009
+"Alabama",1,323,267,224,162,166,211,217,251,267,244,252,281,375,518,658,738,784,754,805,881,833,909,1045,1106,1161,1139,1273,1356,1421,1468,1526,1558,1587,1667,1758,1890,2030,2169,2294,2516,2748,2979,3225,3544,3960,4351,4765,5323,5817,6500,7199,7892,8712,9185,9783,10800,11583,12202,12912,13842,14899,15832,16536,17462,17991,18860,19683,20329,21129,22123,22987,23471,24467,25161,26065,27665,29097,30634,31988,32819,32274
+"Arizona",4,600,520,429,321,308,362,416,462,504,478,490,505,638,917,1004,1050,1124,1117,1186,1324,1305,1367,1623,1716,1716,1696,1752,1850,1893,1885,1974,2059,2103,2167,2204,2310,2412,2587,2754,3092,3489,3843,4145,4487,4929,5329,5528,6074,6642,7586,8604,9590,10658,10945,11654,12885,13808,14463,15130,15795,16568,17211,17563,18131,18756,19774,20634,21611,22781,24133,25189,25578,26232,26469,27106,28753,30671,32552,33470,33445,32077
+"Arkansas",5,310,228,215,157,157,187,207,247,256,231,249,260,342,481,559,687,740,756,741,886,813,847,957,1027,1066,1074,1176,1230,1244,1314,1412,1405,1511,1574,1656,1777,1886,2094,2219,2409,2619,2849,3096,3415,3985,4376,4655,5155,5670,6509,7088,7586,8564,8952,9476,10560,11264,11734,12184,13016,13813,14509,15255,16425,16995,17750,18546,19442,20229,21260,22244,22257,23532,23929,25074,26465,27512,29041,31070,31800,31493
+"California",6,991,887,749,580,546,603,660,771,795,771,781,844,1013,1286,1549,1583,1583,1671,1693,1763,1744,1877,2080,2207,2249,2227,2379,2495,2579,2596,2740,2823,2880,3004,3102,3274,3417,3663,3878,4207,4540,4815,5034,5451,5947,6553,7091,7815,8570,9618,10846,12029,13205,13774,14491,15927,16909,17628,18625,19713,20765,21889,22024,22722,22927,23473,24496,25563,26759,28280,29910,32275,32750,32900,33801,35663,37463,40169,41943,42377,40902
+"Colorado",8,634,578,471,354,353,368,444,542,532,506,516,545,649,895,1039,1065,1189,1211,1359,1454,1430,1521,1796,1880,1813,1773,1869,1960,2104,2157,2261,2340,2417,2471,2539,2638,2800,2982,3142,3381,3686,4055,4413,4791,5310,5864,6321,6895,7567,8539,9596,10809,12141,12945,13570,14751,15416,15772,16417,17285,18548,19703,20487,21447,22526,23498,24865,26231,27950,29860,31546,32949,34228,33963,34092,35543,37388,39662,41165,41719,40093
+"Connecticut",9,1024,921,801,620,583,653,706,806,860,769,836,918,1145,1418,1593,1599,1565,1580,1702,1720,1663,1891,2158,2298,2393,2350,2477,2684,2810,2731,2832,2926,3042,3175,3250,3401,3583,3874,4195,4443,4847,5090,5300,5697,6241,6813,7239,7885,8712,9720,10971,12439,13865,14903,15799,17580,18763,20038,21895,24007,25797,26736,26863,28635,29602,30532,31947,33472,35596,37452,39300,40640,42279,42021,42398,45009,47022,51133,53930,54528,52736
+"Delaware",10,1032,857,775,590,564,645,701,868,949,795,899,1027,1164,1291,1464,1504,1526,1565,1664,1685,1805,2075,2155,2244,2341,2306,2507,2749,2645,2651,2724,2805,2815,2933,3049,3210,3468,3610,3785,4079,4421,4608,4892,5303,5871,6347,6729,7349,7913,8658,9549,10803,11873,12727,13529,14816,16056,16781,17933,19312,20930,21636,22342,23094,23823,24530,25391,26640,27405,29571,30778,31255,32664,33463,34123,35998,37297,39358,40251,40698,40135
+"Florida",12,518,470,398,319,288,348,376,450,487,460,495,522,609,787,1009,1114,1174,1169,1167,1201,1208,1304,1388,1474,1567,1562,1673,1791,1836,1887,2010,2023,2039,2113,2200,2348,2498,2685,2909,3249,3659,4006,4286,4703,5235,5616,5895,6376,7010,7921,8879,10049,11195,11789,12637,13764,14705,15423,16415,17593,19045,19855,20189,20661,21652,22340,23512,24616,25722,26930,27780,28145,28852,29499,30277,32462,34460,36934,37781,37808,36565
+"Georgia",13,347,307,256,200,204,244,268,302,313,290,309,337,419,567,725,831,879,845,887,988,969,1065,1204,1280,1326,1302,1423,1499,1523,1583,1660,1698,1744,1844,1961,2082,2258,2462,2655,2891,3162,3394,3666,4038,4497,4867,5152,5694,6221,6989,7722,8474,9435,10054,10849,12185,13143,13990,14820,15876,16803,17738,18289,19333,20129,21170,22230,23586,24547,26134,27340,27940,28596,28660,29060,29995,31498,32739,33895,34127,33086
+"Idaho",16,507,503,374,274,227,403,399,475,423,426,437,463,596,914,1025,1095,1135,1203,1278,1345,1277,1329,1497,1644,1550,1559,1596,1728,1783,1817,1891,1898,1975,2092,2168,2250,2554,2591,2755,2910,3269,3558,3761,4150,4709,5382,5571,6116,6510,7319,7894,8735,9405,9621,10315,11069,11647,11968,12611,13548,14803,15866,16195,17236,18258,18846,19630,20353,20830,21923,22835,24180,25124,25485,25912,27846,29003,30954,32168,32322,30987
+"Illinois",17,948,807,671,486,437,505,573,650,731,648,704,751,892,1036,1258,1386,1465,1534,1639,1816,1685,1831,2026,2091,2209,2177,2270,2452,2522,2511,2642,2700,2789,2902,2981,3131,3360,3599,3785,4045,4354,4580,4874,5266,5903,6464,6986,7623,8385,9277,10243,11077,12250,12771,13289,14682,15508,16284,17289,18461,19634,20756,21320,22764,23386,24440,25643,27005,28347,29974,31145,32259,32808,33325,34205,35599,36825,39220,41238,42049,40933
+"Indiana",18,607,514,438,310,294,359,421,481,547,472,518,551,724,913,1138,1198,1251,1204,1313,1456,1361,1524,1711,1778,1943,1805,1907,2008,2042,2015,2131,2209,2246,2399,2486,2617,2856,3041,3153,3401,3714,3810,4105,4455,5100,5440,5830,6516,7199,8006,8814,9449,10355,10698,11203,12445,13143,13821,14664,15616,16770,17625,18055,19269,20112,21153,21845,22775,23748,25182,26143,27011,27590,28059,29089,30126,30768,32305,33151,33978,33174
+"Iowa",19,581,510,400,297,253,269,425,393,523,458,475,501,609,835,1024,1003,1092,1245,1217,1642,1356,1532,1638,1725,1657,1788,1670,1758,1939,1990,2026,2061,2180,2284,2432,2549,2833,3067,3093,3312,3652,3862,4005,4473,5398,5596,6192,6580,7283,8438,9114,9671,10968,11227,11485,12798,13395,14020,14899,15315,16562,17380,17859,18939,18929,20498,21181,22713,23798,24844,25615,26723,27315,28232,28835,31027,31656,33177,35008,36726,35983
+"Kansas",20,532,467,401,266,250,287,362,387,428,383,382,425,552,851,1045,1169,1165,1136,1310,1352,1299,1463,1602,1827,1739,1791,1753,1821,1911,2098,2106,2165,2227,2305,2384,2516,2689,2894,3032,3268,3548,3816,4145,4613,5274,5717,6186,6721,7307,8082,9240,10038,11248,11989,12373,13602,14330,14904,15583,16331,17093,18182,18832,19955,20510,21352,21889,23121,24355,25687,26824,27816,28979,29067,30109,31181,32367,34934,36546,37983,37036
+"Kentucky",21,393,325,291,211,205,233,265,294,341,297,305,320,395,538,701,767,803,826,865,995,938,990,1153,1237,1305,1289,1346,1437,1491,1543,1604,1633,1731,1822,1906,1972,2134,2329,2501,2718,2971,3184,3391,3715,4145,4607,4933,5462,6095,6784,7640,8231,9110,9589,9859,11062,11558,11995,12782,13570,14602,15484,16241,17320,17815,18514,19215,20155,21215,22353,23237,24294,24816,25297,25777,26891,27881,29392,30443,31302,31250
+"Louisiana",22,414,355,318,241,227,265,290,330,353,348,360,364,449,592,786,876,889,833,885,1019,1074,1117,1210,1278,1343,1339,1397,1502,1616,1635,1685,1690,1741,1806,1909,2003,2139,2331,2527,2749,2901,3106,3328,3593,3994,4510,4956,5557,6135,6951,7813,8833,10037,10558,10865,11628,12121,12028,12266,13113,13997,15223,16076,16968,17717,18779,19541,20254,21209,22352,22847,23334,25116,25683,26434,27776,29785,33438,34986,35730,35151
+"Maine",23,601,576,491,377,371,416,430,506,510,471,495,526,631,857,1102,1102,1079,1134,1169,1240,1183,1195,1319,1438,1449,1448,1581,1669,1720,1797,1850,1919,1903,1983,2049,2212,2383,2539,2658,2872,3140,3423,3594,3864,4319,4764,5019,5708,6142,6751,7497,8408,9231,9873,10551,11665,12533,13463,14595,15813,16886,17479,17662,18350,18810,19531,20240,21293,22305,23529,24603,25623,27068,27731,28727,30201,30721,32340,33620,34906,35268
+"Maryland",24,768,712,638,512,466,523,548,618,665,633,663,710,870,1117,1287,1324,1312,1315,1355,1500,1496,1642,1815,1944,2017,1938,2047,2184,2267,2258,2324,2407,2507,2638,2722,2881,3055,3284,3516,3831,4209,4573,4894,5291,5822,6382,6878,7536,8191,9062,10035,11230,12403,13246,14228,15693,16961,17966,19216,20626,22001,23023,23571,24358,25104,26046,26896,27844,29222,30850,32465,33872,35430,36293,37309,39651,41555,43990,45827,47040,47159
+"Massachusetts",25,906,836,759,613,559,609,643,714,732,672,724,779,901,1073,1262,1299,1348,1396,1435,1512,1480,1656,1817,1895,1947,1930,2071,2194,2296,2321,2430,2511,2605,2734,2799,2932,3101,3331,3583,3903,4207,4486,4748,5106,5551,6024,6439,6994,7636,8480,9472,10673,11830,12803,13859,15549,16720,17954,19504,21334,22458,23223,23749,24876,25664,26841,28051,29618,31332,33394,35551,37992,39247,39238,39869,41792,43520,46893,49361,50607,49590
+"Michigan",26,790,657,540,394,347,453,530,619,685,572,625,680,829,1050,1354,1391,1325,1329,1466,1563,1527,1718,1896,1985,2202,2076,2238,2275,2309,2246,2358,2438,2427,2592,2728,2949,3215,3450,3554,3906,4145,4194,4501,4966,5552,5926,6279,7084,7957,8834,9701,10369,11125,11462,12243,13576,14734,15573,16130,17198,18276,19022,19318,20278,21390,22862,23975,24447,25570,26807,28113,29612,30196,30410,31446,31890,32516,33452,34441,35215,34280
+"Minnesota",27,599,552,457,363,308,358,451,472,540,494,517,524,615,797,947,1006,1110,1190,1270,1451,1328,1437,1582,1633,1711,1724,1790,1838,1930,2016,2065,2155,2236,2331,2460,2540,2781,2997,3182,3463,3779,4053,4275,4628,5431,5838,6216,6729,7559,8471,9409,10320,11320,11992,12594,14255,15093,15881,16899,17592,18966,20011,20489,21698,22068,23467,24583,26267,27548,29503,30793,32101,32835,33553,34744,36505,37400,39367,41059,42299,40920
+"Mississippi",28,286,202,175,127,131,174,177,229,224,201,205,215,307,439,533,629,629,611,670,802,705,770,851,906,940,928,1045,1051,1063,1156,1245,1237,1322,1362,1499,1557,1688,1839,2001,2197,2408,2641,2867,3208,3613,3936,4205,4757,5259,5806,6549,7076,7901,8301,8615,9463,9922,10293,10913,11695,12540,13164,13806,14711,15468,16549,17185,18044,18885,20013,20688,20993,22222,22540,23365,24501,26120,27276,28772,29591,29318
+"Missouri",29,621,561,491,365,334,367,420,466,508,475,504,519,640,806,963,1068,1130,1191,1224,1376,1327,1427,1554,1659,1737,1726,1818,1905,1949,2044,2126,2156,2215,2330,2427,2533,2738,2895,3067,3370,3561,3843,4107,4443,4937,5282,5733,6306,6991,7787,8713,9390,10457,11035,11716,12960,13868,14505,15250,16086,17083,17751,18560,19542,20295,21267,22094,23099,24252,25403,26376,27445,28156,28771,29702,30847,31644,33354,34558,35775,35106
+"Montana",30,592,501,382,339,298,364,476,475,512,517,533,569,713,901,1150,1183,1206,1308,1484,1642,1412,1654,1806,1823,1810,1771,1894,1929,1983,2068,2023,2075,2025,2363,2330,2366,2548,2730,2805,2955,3284,3625,3789,4355,5012,5380,5794,6200,6636,7721,8299,9143,10244,10672,11045,11705,11900,12465,12996,13362,14623,15524,16509,17114,18072,18129,18764,19383,20167,21324,22019,22569,24342,24699,25963,27517,28987,30942,32625,33293,32699
+"Nebraska",31,596,521,413,307,275,259,409,396,415,405,400,442,551,822,1019,1091,1186,1186,1274,1558,1346,1560,1641,1761,1676,1753,1650,1675,1936,2025,2022,2125,2134,2292,2341,2392,2656,2890,2990,3167,3572,3796,4121,4527,5269,5465,6168,6453,6993,8120,8784,9272,10685,11228,11601,12968,13743,14215,15035,15984,16878,18088,18766,19688,20167,21168,22196,24045,24590,25861,27049,27829,29098,29499,31262,32371,33395,34753,36880,38128,37057
+"Nevada",32,868,833,652,550,495,546,658,843,762,780,861,895,982,1558,1511,1483,1611,1757,1770,1758,1795,1991,2211,2397,2447,2415,2527,2488,2562,2594,2749,2890,2957,3184,3174,3209,3299,3471,3660,4114,4520,4946,5227,5557,6114,6490,7009,7719,8550,9780,10765,11780,12780,12986,13465,14435,15332,16027,16886,18180,19568,20674,21283,22694,23465,24635,25808,27142,28201,29806,31022,30529,30718,30849,32182,34757,37555,38652,40326,40332,38009
+"New Hampshire",33,686,647,558,427,416,476,498,537,565,533,561,579,708,851,976,1054,1111,1150,1217,1291,1274,1348,1508,1577,1653,1703,1829,1900,2007,2004,2124,2197,2281,2392,2427,2552,2708,2963,3162,3441,3744,3896,4102,4423,4880,5279,5592,6258,6892,7786,8781,9915,11079,11906,13041,14534,15819,16974,18371,19759,20635,20713,21326,22154,22521,23820,25008,26042,27607,29679,31114,33332,33940,34335,34892,36758,37536,39997,41720,42461,41882
+"New Jersey",34,918,847,736,587,523,573,625,709,747,697,749,820,957,1167,1429,1554,1582,1526,1571,1648,1624,1802,2000,2114,2232,2219,2300,2448,2551,2525,2660,2764,2830,2990,3064,3224,3414,3652,3892,4237,4525,4835,5127,5521,6043,6576,7037,7703,8462,9408,10469,11778,13057,13999,15036,16549,17652,18711,20230,22142,23595,24766,25153,26597,27101,27885,29277,30795,32372,34310,35551,36983,37959,38240,38768,40603,42142,45668,48172,49233,48123
+"New Mexico",35,410,334,289,208,211,247,292,343,362,338,357,378,474,636,773,881,942,929,1013,1124,1145,1204,1346,1423,1444,1462,1543,1626,1733,1825,1895,1884,1952,2006,2054,2134,2250,2386,2490,2709,2921,3197,3431,3761,4137,4568,5045,5527,6087,6847,7619,8402,9334,9894,10367,11215,11999,12226,12686,13322,14085,14960,15744,16425,17226,17946,18852,19478,20233,21178,21853,22203,24193,24446,25128,26606,28180,29778,31320,32585,32197
+"New York",36,1152,1035,881,676,626,680,722,808,838,789,825,869,996,1169,1384,1538,1644,1697,1725,1771,1729,1858,2002,2057,2144,2175,2295,2416,2522,2553,2695,2788,2866,2980,3070,3254,3422,3657,3923,4295,4603,4887,5179,5538,5980,6492,6955,7477,8153,8979,9927,11095,12364,13344,14188,15739,16734,17827,19031,20604,21966,23315,23942,25199,25589,26359,27721,29266,30480,32236,33890,34547,35371,35332,36077,38312,40592,43892,47514,48692,46844
+"North Carolina",37,332,292,248,187,208,253,271,297,324,295,315,324,422,572,693,766,823,866,900,1003,969,1077,1192,1230,1274,1293,1368,1436,1424,1505,1581,1629,1689,1797,1877,2009,2143,2363,2525,2754,3051,3285,3510,3899,4365,4743,5039,5584,6058,6780,7461,8247,9184,9690,10480,11788,12649,13444,14325,15461,16539,17367,17879,19120,20042,20931,21938,22940,24188,25454,26003,27194,27650,27726,28208,29769,31209,32692,33966,34340,33564
+"North Dakota",38,382,311,187,176,146,180,272,234,326,282,319,355,529,665,966,1031,1046,1088,1494,1483,1211,1360,1444,1318,1336,1364,1490,1556,1598,1847,1679,1821,1653,2320,2142,2112,2463,2507,2592,2719,3052,3214,3669,4377,6172,6120,6334,6184,6427,8136,8398,8095,10342,10990,11386,12307,12811,13126,13565,12745,14357,15880,16270,17692,17830,19033,19084,21166,20798,22767,23313,25068,26118,26770,29109,29676,31644,32856,35882,39009,38672
+"Ohio",39,771,661,563,400,385,455,516,593,648,561,615,658,821,1021,1253,1312,1340,1310,1401,1539,1456,1608,1835,1916,2024,1965,2087,2182,2243,2177,2314,2391,2405,2515,2604,2754,2948,3181,3309,3616,3934,4101,4328,4691,5218,5733,6087,6753,7511,8326,9251,10103,10982,11485,12167,13449,14295,14933,15675,16739,17825,18792,19217,20242,20999,22063,22887,23613,24913,26164,27152,28400,28966,29522,30345,31240,32097,33643,34814,35521,35018
+"Oklahoma",40,455,368,301,216,222,252,298,321,376,346,349,374,432,626,782,947,970,952,1028,1143,1166,1144,1290,1401,1475,1458,1516,1593,1657,1794,1857,1916,1947,1994,2055,2196,2361,2517,2702,2948,3198,3477,3711,4020,4524,4986,5475,5974,6586,7387,8485,9580,11003,11817,11725,12687,13265,13288,13464,14257,15265,16214,16721,17526,18085,18730,19394,20151,21106,22199,22953,23517,25059,25059,25719,27516,29122,31753,32781,34378,33708
+"Oregon",41,668,607,505,379,358,439,458,548,556,531,571,609,818,1118,1381,1389,1357,1379,1504,1646,1604,1657,1830,1912,1916,1867,1978,2070,2055,2106,2244,2283,2349,2457,2541,2685,2852,3033,3201,3448,3677,3940,4212,4625,5135,5726,6181,6913,7556,8476,9415,10196,10862,11128,11832,12866,13547,14162,14911,16062,17222,18253,18806,19558,20404,21421,22668,23649,24845,25958,27023,28350,28866,29387,30172,31217,32108,34212,35279,35899,35210
+"Pennsylvania",42,772,712,600,449,417,482,517,601,636,563,601,650,776,951,1146,1250,1280,1289,1364,1436,1405,1552,1713,1789,1894,1827,1915,2063,2174,2164,2240,2301,2334,2439,2511,2662,2830,3040,3246,3507,3815,4077,4294,4683,5168,5704,6170,6800,7493,8305,9225,10151,11184,11887,12455,13512,14445,15186,16142,17323,18725,19823,20505,21550,22211,22864,23738,24838,26092,27358,28605,29539,30085,30840,31709,33069,34131,36375,38003,39008,38827
+"Rhode Island",44,874,788,711,575,559,600,645,711,731,672,720,750,934,1149,1201,1274,1278,1369,1459,1433,1378,1553,1717,1765,1855,1847,1958,1998,2024,2067,2183,2234,2329,2457,2552,2691,2870,3113,3336,3595,3865,4114,4295,4625,4972,5405,5844,6411,7004,7693,8595,9742,10815,11605,12439,13717,14685,15587,16651,18271,19657,20194,20363,21257,22137,22762,24046,25123,26631,28012,29377,29685,31378,32374,33690,35318,36461,38610,40421,41542,41283
+"South Carolina",45,271,243,205,159,175,211,229,258,273,250,276,310,394,545,648,732,753,780,793,911,873,925,1115,1196,1233,1166,1229,1260,1286,1316,1392,1437,1498,1602,1669,1787,1958,2176,2340,2570,2827,3064,3274,3603,4029,4459,4720,5257,5684,6334,7044,7794,8651,9071,9775,10910,11666,12258,13056,14045,14834,16050,16409,17165,17805,18686,19473,20403,21385,22544,23545,24321,24871,25279,25875,27057,28337,29990,30958,31510,30835
+"South Dakota",46,426,366,241,189,129,184,309,244,323,320,345,361,475,757,846,979,1086,1132,1270,1526,1116,1283,1497,1327,1436,1457,1342,1419,1669,1740,1564,1870,1863,2092,2020,2000,2278,2500,2577,2773,2995,3256,3538,4065,5163,5178,5667,5591,6351,7347,8158,8142,9451,9915,10195,11619,11942,12486,13217,13807,14767,16238,16961,17966,18565,19607,19848,21736,22275,23797,25045,26115,27531,27727,30072,31765,32726,33320,35998,38188,36499
+"Tennessee",47,378,325,277,198,204,245,264,304,334,300,311,340,436,561,728,864,912,872,892,965,951,1028,1122,1178,1276,1274,1327,1422,1476,1512,1598,1618,1690,1771,1855,1965,2128,2332,2473,2741,2967,3189,3451,3808,4298,4696,5017,5574,6108,6895,7618,8319,9196,9695,10276,11453,12247,12995,13909,14910,15883,16821,17503,18840,19741,20696,21800,22450,23324,24576,25574,26239,27059,27647,28501,29734,30764,32314,33578,34243,33512
+"Texas",48,479,412,348,266,257,294,326,372,418,404,417,438,528,719,944,1045,1058,1047,1147,1214,1300,1363,1488,1564,1598,1630,1697,1784,1856,1871,1948,1955,2021,2079,2155,2284,2433,2630,2840,3105,3373,3646,3861,4192,4683,5194,5738,6362,6979,7912,8929,9957,11391,11961,12303,13396,14196,14165,14486,15324,16323,17458,18150,19146,19825,20590,21526,22557,24242,25803,26858,27871,28519,28295,28929,30392,32448,34489,36020,36969,35674
+"Utah",49,551,498,369,305,298,310,389,463,444,444,458,480,594,880,1126,1049,1121,1094,1180,1257,1262,1348,1544,1596,1604,1573,1666,1758,1862,1888,1970,2035,2091,2230,2281,2386,2494,2605,2721,2900,3103,3391,3658,3979,4326,4743,5150,5739,6328,7041,7786,8464,9290,9807,10333,11233,11846,12248,12638,13156,13977,14996,15661,16354,17031,17912,18858,19955,21156,22294,23288,23907,24899,25010,25192,26169,27905,29582,31009,31253,30107
+"Vermont",50,634,576,474,365,338,383,414,471,485,457,491,516,643,775,932,953,1035,1090,1127,1192,1125,1169,1334,1380,1432,1456,1524,1655,1720,1732,1832,1923,1994,2072,2128,2266,2456,2729,2885,3128,3388,3634,3856,4176,4548,4869,5192,5753,6179,7036,7853,8702,9717,10287,10968,12048,12994,13842,14992,16197,17517,18055,18218,19293,19785,20553,21359,22295,23362,24803,25889,26901,28140,28651,29609,31240,31920,34394,36018,36940,36752
+"Virginia",51,434,384,370,284,285,320,350,390,423,390,426,467,582,785,843,900,950,1002,1015,1148,1132,1257,1420,1510,1533,1554,1637,1712,1738,1784,1885,1936,2005,2121,2218,2403,2563,2746,2967,3252,3558,3795,4092,4486,4972,5484,5934,6534,7192,8040,8995,10176,11291,12075,12936,14298,15286,16237,17332,18556,19780,20538,21092,21965,22773,23709,24456,25495,26768,28343,29789,31162,32747,33235,34451,36285,38304,40644,42506,43409,43211
+"Washington",53,741,658,534,402,376,443,490,569,599,582,614,658,864,1196,1469,1527,1419,1401,1504,1624,1595,1721,1874,1973,2066,2077,2116,2172,2262,2281,2380,2436,2535,2680,2735,2858,3078,3385,3566,3850,4097,4205,4381,4731,5312,5919,6533,7181,7832,8887,9965,10913,11903,12431,13124,14021,14738,15522,16300,17270,18670,20026,20901,21917,22414,23119,23878,25287,26817,28632,30392,31528,32053,32206,32934,34984,35738,38477,40782,41588,40619
+"West Virginia",54,460,408,356,257,259,313,337,390,418,370,388,407,498,613,739,820,888,925,1032,1110,1023,1056,1182,1247,1276,1225,1318,1477,1594,1551,1597,1625,1671,1762,1853,1974,2126,2271,2417,2573,2798,3117,3378,3682,4026,4457,4974,5479,6053,6703,7432,8172,8866,9439,9626,10417,10936,11464,11950,12708,13529,14579,15219,16118,16724,17413,17913,18566,19388,20246,20966,21915,23333,24103,24626,25484,26374,28379,29769,31265,31843
+"Wisconsin",55,673,588,469,362,333,380,461,518,551,507,513,547,672,866,1053,1109,1182,1211,1296,1434,1387,1506,1736,1799,1839,1774,1875,1990,2060,2075,2225,2258,2304,2412,2458,2616,2789,3025,3180,3441,3751,3983,4238,4595,5130,5622,6061,6670,7417,8292,9281,10161,11006,11592,12046,13182,13845,14530,15358,16201,17299,18160,18711,19872,20639,21699,22573,23554,24790,26245,27390,28232,29161,29838,30657,31703,32625,34535,35839,36594,35676
+"Wyoming",56,675,585,476,374,371,411,496,551,607,561,587,602,779,944,1150,1224,1258,1362,1506,1610,1647,1719,1955,1912,1932,1858,1913,2011,2132,2172,2278,2312,2387,2502,2535,2588,2743,2906,3121,3315,3584,3919,4269,4709,5410,6172,6701,7212,8152,9384,10572,11753,12879,13251,12723,13493,14242,14004,14194,14968,16383,17996,18867,19550,20287,20957,21514,22098,23820,24927,26396,27230,29122,29828,31544,33721,36683,41548,43453,45177,42504
diff --git a/pysal/examples/virginia.dbf b/pysal/examples/virginia.dbf
new file mode 100644
index 0000000..7ed087e
Binary files /dev/null and b/pysal/examples/virginia.dbf differ
diff --git a/pysal/examples/virginia.gal b/pysal/examples/virginia.gal
new file mode 100644
index 0000000..c8ccf1c
--- /dev/null
+++ b/pysal/examples/virginia.gal
@@ -0,0 +1,273 @@
+0 136 virginia POLY_ID
+1 4
+7 5 4 3
+2 4
+9 8 6 3
+3 4
+8 7 2 1
+4 1
+1
+5 4
+16 15 7 1
+6 6
+14 12 11 10 9 2
+7 6
+16 13 8 5 3 1
+8 7
+19 13 9 21 2 3 7
+9 6
+21 18 17 6 2 8
+10 3
+14 11 6
+11 2
+10 6
+12 1
+6
+13 5
+20 16 8 19 7
+14 2
+10 6
+15 6
+25 23 24 31 16 5
+16 6
+24 20 13 7 5 15
+17 2
+18 9
+18 2
+9 17
+19 6
+26 20 21 27 8 13
+20 5
+24 19 26 13 16
+21 7
+33 29 27 28 9 19 8
+22 2
+32 23
+23 8
+41 39 38 34 32 31 15 22
+24 5
+31 20 26 16 15
+25 1
+15
+26 6
+36 31 19 27 24 20
+27 7
+44 36 33 29 21 26 19
+28 4
+35 33 30 21
+29 2
+21 27
+30 4
+43 37 35 28
+31 9
+56 45 41 40 36 24 26 23 15
+32 4
+47 23 39 22
+33 7
+44 35 46 48 27 28 21
+34 1
+23
+35 6
+46 37 57 30 28 33
+36 6
+49 45 44 27 26 31
+37 4
+51 43 30 35
+38 1
+23
+39 9
+64 59 55 54 52 47 41 23 32
+40 1
+31
+41 6
+68 52 56 31 23 39
+42 1
+69
+43 3
+51 37 30
+44 7
+60 49 48 63 33 27 36
+45 5
+58 56 36 49 31
+46 7
+76 63 48 66 57 35 33
+47 6
+62 55 53 50 39 32
+48 4
+63 46 44 33
+49 7
+61 58 44 60 67 36 45
+50 1
+47
+51 2
+37 43
+52 6
+77 75 64 68 41 39
+53 1
+47
+54 1
+39
+55 5
+78 62 64 39 47
+56 6
+79 68 58 45 31 41
+57 3
+66 46 35
+58 6
+79 61 72 56 49 45
+59 1
+39
+60 7
+67 65 61 63 73 44 49
+61 5
+72 60 67 49 58
+62 5
+81 78 74 55 47
+63 6
+76 73 46 48 60 44
+64 9
+105 99 82 78 77 75 52 39 55
+65 2
+67 60
+66 4
+76 71 57 46
+67 11
+96 93 91 86 72 73 84 60 65 61 49
+68 6
+94 77 79 56 41 52
+69 1
+42
+70 3
+104 89 83
+71 1
+66
+72 6
+90 79 93 67 61 58
+73 7
+95 86 84 76 63 67 60
+74 4
+97 88 81 62
+75 3
+77 52 64
+76 7
+92 80 98 66 46 63 73
+77 7
+105 68 94 111 64 75 52
+78 8
+106 87 85 81 64 99 55 62
+79 7
+107 94 72 90 58 56 68
+80 5
+109 101 98 92 76
+81 6
+102 97 106 78 74 62
+82 1
+64
+83 4
+113 104 88 70
+84 8
+108 96 95 93 91 86 73 67
+85 1
+78
+86 3
+73 84 67
+87 1
+78
+88 5
+113 110 97 74 83
+89 3
+100 104 70
+90 5
+112 107 93 72 79
+91 3
+96 84 67
+92 2
+76 80
+93 8
+121 112 96 108 84 67 90 72
+94 6
+111 122 107 79 68 77
+95 5
+114 108 103 84 73
+96 4
+84 93 91 67
+97 7
+118 110 102 81 106 74 88
+98 3
+109 80 76
+99 6
+127 125 106 105 64 78
+100 5
+124 123 116 89 104
+101 2
+109 80
+102 2
+81 97
+103 4
+134 120 114 95
+104 7
+124 119 113 83 70 100 89
+105 6
+135 127 111 77 64 99
+106 6
+125 118 99 78 97 81
+107 5
+122 90 112 79 94
+108 5
+121 114 95 84 93
+109 3
+101 80 98
+110 5
+129 113 118 97 88
+111 5
+132 122 94 105 77
+112 5
+122 121 90 93 107
+113 6
+119 110 129 88 83 104
+114 6
+134 121 103 120 108 95
+115 3
+128 117 126
+116 1
+100
+117 2
+126 115
+118 6
+133 129 106 125 97 110
+119 5
+136 124 113 129 104
+120 4
+128 126 103 114
+121 5
+130 108 114 93 112
+122 4
+112 107 111 94
+123 2
+124 100
+124 4
+119 104 100 123
+125 4
+127 106 99 118
+126 4
+128 117 120 115
+127 4
+131 105 99 125
+128 3
+115 126 120
+129 5
+133 110 118 119 113
+130 1
+121
+131 1
+127
+132 1
+111
+133 2
+118 129
+134 2
+103 114
+135 1
+105
+136 1
+119
diff --git a/pysal/examples/virginia.prj b/pysal/examples/virginia.prj
new file mode 100644
index 0000000..a30c00a
--- /dev/null
+++ b/pysal/examples/virginia.prj
@@ -0,0 +1 @@
+GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]
\ No newline at end of file
diff --git a/pysal/examples/virginia.shp b/pysal/examples/virginia.shp
new file mode 100644
index 0000000..5d72561
Binary files /dev/null and b/pysal/examples/virginia.shp differ
diff --git a/pysal/examples/virginia.shx b/pysal/examples/virginia.shx
new file mode 100644
index 0000000..e58f4a9
Binary files /dev/null and b/pysal/examples/virginia.shx differ
diff --git a/pysal/examples/virginia_rook.gal b/pysal/examples/virginia_rook.gal
new file mode 100644
index 0000000..08f0381
--- /dev/null
+++ b/pysal/examples/virginia_rook.gal
@@ -0,0 +1,273 @@
+0 136 virginia POLY_ID
+1 4
+7 5 4 3
+2 4
+9 8 6 3
+3 4
+8 7 2 1
+4 1
+1
+5 4
+16 15 7 1
+6 6
+14 12 11 10 9 2
+7 6
+16 13 8 5 3 1
+8 7
+19 13 9 21 2 3 7
+9 6
+21 18 17 6 2 8
+10 3
+14 11 6
+11 2
+10 6
+12 1
+6
+13 5
+20 16 8 19 7
+14 2
+10 6
+15 6
+25 23 24 31 16 5
+16 6
+24 20 13 7 5 15
+17 2
+18 9
+18 2
+9 17
+19 6
+26 20 21 27 8 13
+20 5
+24 19 26 13 16
+21 7
+33 29 27 28 9 19 8
+22 2
+32 23
+23 8
+41 39 38 34 32 31 15 22
+24 5
+31 20 26 16 15
+25 1
+15
+26 6
+36 31 19 27 24 20
+27 7
+44 36 33 29 21 26 19
+28 4
+35 33 30 21
+29 2
+21 27
+30 4
+43 37 35 28
+31 9
+56 45 41 40 36 24 26 23 15
+32 4
+47 23 39 22
+33 7
+44 35 46 48 27 28 21
+34 1
+23
+35 6
+46 37 57 30 28 33
+36 6
+49 45 44 27 26 31
+37 4
+51 43 30 35
+38 1
+23
+39 9
+64 59 55 54 52 47 41 23 32
+40 1
+31
+41 6
+68 52 56 31 23 39
+42 1
+69
+43 3
+51 37 30
+44 7
+60 49 48 63 33 27 36
+45 5
+58 56 36 49 31
+46 7
+76 63 48 66 57 35 33
+47 6
+62 55 53 50 39 32
+48 4
+63 46 44 33
+49 6
+61 58 44 60 36 45
+50 1
+47
+51 2
+37 43
+52 6
+77 75 64 68 41 39
+53 1
+47
+54 1
+39
+55 5
+78 62 64 39 47
+56 6
+79 68 58 45 31 41
+57 3
+66 46 35
+58 6
+79 61 72 56 49 45
+59 1
+39
+60 6
+67 65 63 73 44 49
+61 4
+72 67 49 58
+62 5
+81 78 74 55 47
+63 6
+76 73 46 48 60 44
+64 9
+105 99 82 78 77 75 52 39 55
+65 2
+67 60
+66 4
+76 71 57 46
+67 10
+96 93 91 86 72 73 84 60 65 61
+68 6
+94 77 79 56 41 52
+69 1
+42
+70 3
+104 89 83
+71 1
+66
+72 6
+90 79 93 67 61 58
+73 6
+95 84 76 63 67 60
+74 4
+97 88 81 62
+75 3
+77 52 64
+76 7
+92 80 98 66 46 63 73
+77 7
+105 68 94 111 64 75 52
+78 8
+106 87 85 81 64 99 55 62
+79 7
+107 94 72 90 58 56 68
+80 5
+109 101 98 92 76
+81 6
+102 97 106 78 74 62
+82 1
+64
+83 4
+113 104 88 70
+84 7
+108 96 95 93 86 73 67
+85 1
+78
+86 2
+84 67
+87 1
+78
+88 5
+113 110 97 74 83
+89 3
+100 104 70
+90 5
+112 107 93 72 79
+91 2
+96 67
+92 2
+76 80
+93 8
+121 112 96 108 84 67 90 72
+94 6
+111 122 107 79 68 77
+95 5
+114 108 103 84 73
+96 4
+84 93 91 67
+97 7
+118 110 102 81 106 74 88
+98 3
+109 80 76
+99 6
+127 125 106 105 64 78
+100 5
+124 123 116 89 104
+101 1
+80
+102 2
+81 97
+103 4
+134 120 114 95
+104 7
+124 119 113 83 70 100 89
+105 6
+135 127 111 77 64 99
+106 6
+125 118 99 78 97 81
+107 5
+122 90 112 79 94
+108 5
+121 114 95 84 93
+109 2
+80 98
+110 5
+129 113 118 97 88
+111 5
+132 122 94 105 77
+112 5
+122 121 90 93 107
+113 6
+119 110 129 88 83 104
+114 6
+134 121 103 120 108 95
+115 2
+117 126
+116 1
+100
+117 2
+126 115
+118 6
+133 129 106 125 97 110
+119 5
+136 124 113 129 104
+120 4
+128 126 103 114
+121 5
+130 108 114 93 112
+122 4
+112 107 111 94
+123 2
+124 100
+124 4
+119 104 100 123
+125 4
+127 106 99 118
+126 4
+128 117 120 115
+127 4
+131 105 99 125
+128 2
+126 120
+129 5
+133 110 118 119 113
+130 1
+121
+131 1
+127
+132 1
+111
+133 2
+118 129
+134 2
+103 114
+135 1
+105
+136 1
+119
diff --git a/pysal/examples/wmat.dat b/pysal/examples/wmat.dat
new file mode 100644
index 0000000..c604f0c
--- /dev/null
+++ b/pysal/examples/wmat.dat
@@ -0,0 +1,232 @@
+    2.0000    1.0000    0.2500
+    5.0000    1.0000    0.5000
+    6.0000    1.0000    0.2500
+    1.0000    2.0000    0.3333
+    3.0000    2.0000    0.1667
+    6.0000    2.0000    0.2500
+    7.0000    2.0000    0.1250
+    2.0000    3.0000    0.2500
+    4.0000    3.0000    0.2500
+    7.0000    3.0000    0.1250
+   37.0000    3.0000    0.1429
+   38.0000    3.0000    0.2000
+   39.0000    3.0000    0.2500
+    3.0000    4.0000    0.1667
+   37.0000    4.0000    0.1429
+   39.0000    4.0000    0.2500
+   40.0000    4.0000    0.1667
+    1.0000    5.0000    0.3333
+    6.0000    5.0000    0.2500
+    1.0000    6.0000    0.3333
+    2.0000    6.0000    0.2500
+    5.0000    6.0000    0.5000
+    7.0000    6.0000    0.1250
+    2.0000    7.0000    0.2500
+    3.0000    7.0000    0.1667
+    6.0000    7.0000    0.2500
+    8.0000    7.0000    0.5000
+    9.0000    7.0000    0.1667
+   18.0000    7.0000    0.1429
+   36.0000    7.0000    0.1250
+   38.0000    7.0000    0.2000
+    7.0000    8.0000    0.1250
+   18.0000    8.0000    0.1429
+    7.0000    9.0000    0.1250
+   18.0000    9.0000    0.1429
+   20.0000    9.0000    0.2000
+   32.0000    9.0000    0.1429
+   36.0000    9.0000    0.1250
+   38.0000    9.0000    0.2000
+   11.0000   10.0000    0.3333
+   17.0000   10.0000    0.1000
+   18.0000   10.0000    0.1429
+   19.0000   10.0000    0.1667
+   10.0000   11.0000    0.2500
+   12.0000   11.0000    0.3333
+   17.0000   11.0000    0.1000
+   11.0000   12.0000    0.3333
+   13.0000   12.0000    0.2500
+   17.0000   12.0000    0.1000
+   12.0000   13.0000    0.3333
+   14.0000   13.0000    0.3333
+   16.0000   13.0000    0.2000
+   17.0000   13.0000    0.1000
+   13.0000   14.0000    0.2500
+   15.0000   14.0000    0.5000
+   16.0000   14.0000    0.2000
+   14.0000   15.0000    0.3333
+   16.0000   15.0000    0.2000
+   13.0000   16.0000    0.2500
+   14.0000   16.0000    0.3333
+   15.0000   16.0000    0.5000
+   17.0000   16.0000    0.1000
+   23.0000   16.0000    0.1429
+   10.0000   17.0000    0.2500
+   11.0000   17.0000    0.3333
+   12.0000   17.0000    0.3333
+   13.0000   17.0000    0.2500
+   16.0000   17.0000    0.2000
+   18.0000   17.0000    0.1429
+   19.0000   17.0000    0.1667
+   21.0000   17.0000    0.2500
+   22.0000   17.0000    0.2500
+   23.0000   17.0000    0.1429
+    7.0000   18.0000    0.1250
+    8.0000   18.0000    0.5000
+    9.0000   18.0000    0.1667
+   10.0000   18.0000    0.2500
+   17.0000   18.0000    0.1000
+   19.0000   18.0000    0.1667
+   20.0000   18.0000    0.2000
+   10.0000   19.0000    0.2500
+   17.0000   19.0000    0.1000
+   18.0000   19.0000    0.1429
+   20.0000   19.0000    0.2000
+   21.0000   19.0000    0.2500
+   31.0000   19.0000    0.1111
+    9.0000   20.0000    0.1667
+   18.0000   20.0000    0.1429
+   19.0000   20.0000    0.1667
+   31.0000   20.0000    0.1111
+   32.0000   20.0000    0.1429
+   17.0000   21.0000    0.1000
+   19.0000   21.0000    0.1667
+   22.0000   21.0000    0.2500
+   31.0000   21.0000    0.1111
+   17.0000   22.0000    0.1000
+   21.0000   22.0000    0.2500
+   23.0000   22.0000    0.1429
+   31.0000   22.0000    0.1111
+   16.0000   23.0000    0.2000
+   17.0000   23.0000    0.1000
+   22.0000   23.0000    0.2500
+   24.0000   23.0000    0.1667
+   25.0000   23.0000    0.2000
+   29.0000   23.0000    0.1667
+   31.0000   23.0000    0.1111
+   23.0000   24.0000    0.1429
+   25.0000   24.0000    0.2000
+   29.0000   24.0000    0.1667
+   30.0000   24.0000    0.1667
+   31.0000   24.0000    0.1111
+   33.0000   24.0000    0.1667
+   23.0000   25.0000    0.1429
+   24.0000   25.0000    0.1667
+   26.0000   25.0000    0.3333
+   27.0000   25.0000    0.2500
+   29.0000   25.0000    0.1667
+   25.0000   26.0000    0.2000
+   27.0000   26.0000    0.2500
+   28.0000   26.0000    0.2500
+   25.0000   27.0000    0.2000
+   26.0000   27.0000    0.3333
+   28.0000   27.0000    0.2500
+   29.0000   27.0000    0.1667
+   26.0000   28.0000    0.3333
+   27.0000   28.0000    0.2500
+   29.0000   28.0000    0.1667
+   30.0000   28.0000    0.1667
+   23.0000   29.0000    0.1429
+   24.0000   29.0000    0.1667
+   25.0000   29.0000    0.2000
+   27.0000   29.0000    0.2500
+   28.0000   29.0000    0.2500
+   30.0000   29.0000    0.1667
+   24.0000   30.0000    0.1667
+   28.0000   30.0000    0.2500
+   29.0000   30.0000    0.1667
+   31.0000   30.0000    0.1111
+   33.0000   30.0000    0.1667
+   34.0000   30.0000    0.2000
+   19.0000   31.0000    0.1667
+   20.0000   31.0000    0.2000
+   21.0000   31.0000    0.2500
+   22.0000   31.0000    0.2500
+   23.0000   31.0000    0.1429
+   24.0000   31.0000    0.1667
+   30.0000   31.0000    0.1667
+   32.0000   31.0000    0.1429
+   33.0000   31.0000    0.1667
+    9.0000   32.0000    0.1667
+   20.0000   32.0000    0.2000
+   31.0000   32.0000    0.1111
+   33.0000   32.0000    0.1667
+   34.0000   32.0000    0.2000
+   35.0000   32.0000    0.1429
+   36.0000   32.0000    0.1250
+   24.0000   33.0000    0.1667
+   30.0000   33.0000    0.1667
+   31.0000   33.0000    0.1111
+   32.0000   33.0000    0.1429
+   34.0000   33.0000    0.2000
+   35.0000   33.0000    0.1429
+   30.0000   34.0000    0.1667
+   32.0000   34.0000    0.1429
+   33.0000   34.0000    0.1667
+   35.0000   34.0000    0.1429
+   43.0000   34.0000    0.3333
+   32.0000   35.0000    0.1429
+   33.0000   35.0000    0.1667
+   34.0000   35.0000    0.2000
+   36.0000   35.0000    0.1250
+   41.0000   35.0000    0.3333
+   42.0000   35.0000    0.2000
+   43.0000   35.0000    0.3333
+    7.0000   36.0000    0.1250
+    9.0000   36.0000    0.1667
+   32.0000   36.0000    0.1429
+   35.0000   36.0000    0.1429
+   37.0000   36.0000    0.1429
+   38.0000   36.0000    0.2000
+   40.0000   36.0000    0.1667
+   42.0000   36.0000    0.2000
+    3.0000   37.0000    0.1667
+    4.0000   37.0000    0.2500
+   36.0000   37.0000    0.1250
+   38.0000   37.0000    0.2000
+   39.0000   37.0000    0.2500
+   40.0000   37.0000    0.1667
+   42.0000   37.0000    0.2000
+    3.0000   38.0000    0.1667
+    7.0000   38.0000    0.1250
+    9.0000   38.0000    0.1667
+   36.0000   38.0000    0.1250
+   37.0000   38.0000    0.1429
+    3.0000   39.0000    0.1667
+    4.0000   39.0000    0.2500
+   37.0000   39.0000    0.1429
+   40.0000   39.0000    0.1667
+    4.0000   40.0000    0.2500
+   36.0000   40.0000    0.1250
+   37.0000   40.0000    0.1429
+   39.0000   40.0000    0.2500
+   41.0000   40.0000    0.3333
+   42.0000   40.0000    0.2000
+   35.0000   41.0000    0.1429
+   40.0000   41.0000    0.1667
+   42.0000   41.0000    0.2000
+   35.0000   42.0000    0.1429
+   36.0000   42.0000    0.1250
+   37.0000   42.0000    0.1429
+   40.0000   42.0000    0.1667
+   41.0000   42.0000    0.3333
+   34.0000   43.0000    0.2000
+   35.0000   43.0000    0.1429
+   44.0000   43.0000    0.2500
+   43.0000   44.0000    0.3333
+   45.0000   44.0000    0.5000
+   46.0000   44.0000    0.2000
+   49.0000   44.0000    0.5000
+   44.0000   45.0000    0.2500
+   46.0000   45.0000    0.2000
+   44.0000   46.0000    0.2500
+   45.0000   46.0000    0.5000
+   47.0000   46.0000    0.5000
+   48.0000   46.0000    0.5000
+   49.0000   46.0000    0.5000
+   46.0000   47.0000    0.2000
+   48.0000   47.0000    0.5000
+   46.0000   48.0000    0.2000
+   47.0000   48.0000    0.5000
+   44.0000   49.0000    0.2500
+   46.0000   49.0000    0.2000
\ No newline at end of file
diff --git a/pysal/examples/wmat.mtx b/pysal/examples/wmat.mtx
new file mode 100644
index 0000000..b6e8eb5
--- /dev/null
+++ b/pysal/examples/wmat.mtx
@@ -0,0 +1,237 @@
+%%MatrixMarket matrix coordinate real general
+%==================================================
+% This is a test file generated from wmat.dat file.
+% ==================================================
+49    49    232	
+2	  1	  0.2500
+5	  1	  0.5000
+6	  1	  0.2500
+1	  2	  0.3333
+3	  2	  0.1667
+6	  2	  0.2500
+7	  2	  0.1250
+2	  3	  0.2500 
+4	  3	  0.2500 
+7	  3	  0.1250
+37	  3	  0.1429
+38	  3	  0.2000
+39	  3	  0.2500 
+3	  4	  0.1667
+37	  4	  0.1429
+39	  4	  0.2500
+40	  4	  0.1667 
+1	  5	  0.3333 
+6	  5	  0.2500 
+1	  6	  0.3333 
+2	  6	  0.2500 
+5	  6	  0.5000 
+7	  6	  0.1250 
+2	  7	  0.2500 
+3	  7	  0.1667 
+6	  7	  0.2500 
+8	  7	  0.5000 
+9	  7	  0.1667
+18	  7	  0.1429
+36	  7	  0.1250
+38	  7	  0.2000 
+7	  8	  0.1250
+18	  8	  0.1429 
+7	  9	  0.1250
+18	  9	  0.1429
+20	  9	  0.2000
+32	  9	  0.1429
+36	  9	  0.1250
+38	  9	  0.2000
+11	 10	  0.3333
+17	 10	  0.1000
+18	 10	  0.1429
+19	 10	  0.1667
+10	 11	  0.2500
+12	 11	  0.3333
+17	 11	  0.1000
+11	 12	  0.3333
+13	 12	  0.2500
+17	 12	  0.1000
+12	 13	  0.3333
+14	 13	  0.3333
+16	 13	  0.2000
+17	 13	  0.1000
+13	 14	  0.2500
+15	 14	  0.5000
+16	 14	  0.2000
+14	 15	  0.3333
+16	 15	  0.2000
+13	 16	  0.2500
+14	 16	  0.3333
+15	 16	  0.5000
+17	 16	  0.1000
+23	 16	  0.1429
+10	 17	  0.2500
+11	 17	  0.3333
+12	 17	  0.3333
+13	 17	  0.2500
+16	 17	  0.2000
+18	 17	  0.1429
+19	 17	  0.1667
+21	 17	  0.2500
+22	 17	  0.2500
+23	 17	  0.1429 
+7	 18	  0.1250 
+8	 18	  0.5000 
+9	 18	  0.1667
+10	 18	  0.2500
+17	 18	  0.1000
+19	 18	  0.1667
+20	 18	  0.2000
+10	 19	  0.2500
+17	 19	  0.1000
+18	 19	  0.1429
+20	 19	  0.2000
+21	 19	  0.2500
+31	 19	  0.1111 
+9	 20	  0.1667
+18	 20	  0.1429
+19	 20	  0.1667
+31	 20	  0.1111
+32	 20	  0.1429
+17	 21	  0.1000
+19	 21	  0.1667
+22	 21	  0.2500
+31	 21	  0.1111
+17	 22	  0.1000
+21	 22	  0.2500
+23	 22	  0.1429
+31	 22	  0.1111
+16	 23	  0.2000
+17	 23	  0.1000
+22	 23	  0.2500
+24	 23	  0.1667
+25	 23	  0.2000
+29	 23	  0.1667
+31	 23	  0.1111
+23	 24	  0.1429
+25	 24	  0.2000
+29	 24	  0.1667
+30	 24	  0.1667
+31	 24	  0.1111
+33	 24	  0.1667
+23	 25	  0.1429
+24	 25	  0.1667
+26	 25	  0.3333
+27	 25	  0.2500
+29	 25	  0.1667
+25	 26	  0.2000
+27	 26	  0.2500
+28	 26	  0.2500
+25	 27	  0.2000
+26	 27	  0.3333
+28	 27	  0.2500
+29	 27	  0.1667
+26	 28	  0.3333
+27	 28	  0.2500
+29	 28	  0.1667
+30	 28	  0.1667
+23	 29	  0.1429
+24	 29	  0.1667
+25	 29	  0.2000
+27	 29	  0.2500
+28	 29	  0.2500
+30	 29	  0.1667
+24	 30	  0.1667
+28	 30	  0.2500
+29	 30	  0.1667
+31	 30	  0.1111
+33	 30	  0.1667
+34	 30	  0.2000
+19	 31	  0.1667
+20	 31	  0.2000
+21	 31	  0.2500
+22	 31	  0.2500
+23	 31	  0.1429
+24	 31	  0.1667
+30	 31	  0.1667
+32	 31	  0.1429
+33	 31	  0.1667 
+9	 32	  0.1667
+20	 32	  0.2000
+31	 32	  0.1111
+33	 32	  0.1667
+34	 32	  0.2000
+35	 32	  0.1429
+36	 32	  0.1250
+24	 33	  0.1667
+30	 33	  0.1667
+31	 33	  0.1111
+32	 33	  0.1429
+34	 33	  0.2000
+35	 33	  0.1429
+30	 34	  0.1667
+32	 34	  0.1429
+33	 34	  0.1667
+35	 34	  0.1429
+43	 34	  0.3333
+32	 35	  0.1429
+33	 35	  0.1667
+34	 35	  0.2000
+36	 35	  0.1250
+41	 35	  0.3333
+42	 35	  0.2000
+43	 35	  0.3333 
+7	 36	  0.1250 
+9	 36	  0.1667
+32	 36	  0.1429
+35	 36	  0.1429
+37	 36	  0.1429
+38	 36	  0.2000
+40	 36	  0.1667
+42	 36	  0.2000 
+3	 37	  0.1667 
+4	 37	  0.2500
+36	 37	  0.1250
+38	 37	  0.2000
+39	 37	  0.2500
+40	 37	  0.1667
+42	 37	  0.2000 
+3	 38	  0.1667 
+7	 38	  0.1250 
+9	 38	  0.1667
+36	 38	  0.1250
+37	 38	  0.1429 
+3	 39	  0.1667 
+4	 39	  0.2500
+37	 39	  0.1429
+40	 39	  0.1667 
+4	 40	  0.2500
+36	 40	  0.1250
+37	 40	  0.1429
+39	 40	  0.2500
+41	 40	  0.3333
+42	 40	  0.2000
+35	 41	  0.1429
+40	 41	  0.1667
+42	 41	  0.2000
+35	 42	  0.1429
+36	 42	  0.1250
+37	 42	  0.1429
+40	 42	  0.1667
+41	 42	  0.3333
+34	 43	  0.2000
+35	 43	  0.1429
+44	 43	  0.2500
+43	 44	  0.3333
+45	 44	  0.5000
+46	 44	  0.2000
+49	 44	  0.5000
+44	 45	  0.2500
+46	 45	  0.2000
+44	 46	  0.2500
+45	 46	  0.5000
+47	 46	  0.5000
+48	 46	  0.5000
+49	 46	  0.5000
+46	 47	  0.2000
+48	 47	  0.5000
+46	 48	  0.2000
+47	 48	  0.5000
+44	 49	  0.2500
+46	 49	  0.2000
\ No newline at end of file
diff --git a/pysal/inequality/__init__.py b/pysal/inequality/__init__.py
new file mode 100644
index 0000000..5e43131
--- /dev/null
+++ b/pysal/inequality/__init__.py
@@ -0,0 +1,8 @@
+"""
+:mod:`inequality` --- Spatial Inequality Analysis
+=================================================
+
+"""
+
+import theil
+import gini
diff --git a/pysal/inequality/_indices.py b/pysal/inequality/_indices.py
new file mode 100644
index 0000000..1c47758
--- /dev/null
+++ b/pysal/inequality/_indices.py
@@ -0,0 +1,579 @@
+'''
+Diversity indices as suggested in Nijkamp & Poot (2013)
+'''
+
+import itertools
+import numpy as np
+
+def abundance(x):
+    '''
+    Abundance index
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Abundance index
+    '''
+    xs = x.sum(axis=0)
+    return np.sum([1 for i in xs if i>0])
+
+def margalev_md(x):
+    '''
+    Margalev MD index
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Margalev MD index
+    '''
+    a = abundance(x)
+    return (a - 1.) / np.log(x.sum())
+
+def menhinick_mi(x):
+    '''
+    Menhinick MI index
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Menhinick MI index
+    '''
+    a = abundance(x)
+    return (a - 1.) / np.sqrt(x.sum())
+
+def simpson_so(x):
+    '''
+    Simpson diversity index SO
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Simpson diversity index SO
+    '''
+    xs0 = x.sum(axis=0)
+    xs = x.sum()
+    num = (xs0 * (xs0 - 1.)).sum()
+    den = xs * (xs - 1.)
+    return num / den
+
+def simpson_sd(x):
+    '''
+    Simpson diversity index SD
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Simpson diversity index SD
+    '''
+    return 1. - simpson_so(x)
+
+def herfindahl_hd(x):
+    '''
+    Herfindahl index HD
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Herfindahl index HD
+    '''
+    pgs = x.sum(axis=0)
+    p = pgs.sum()
+    return ((pgs * 1. / p)**2).sum()
+
+def fractionalization_gs(x):
+    '''
+    Fractionalization Gini-Simpson index GS
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Fractionalization Gini-Simpson index GS
+    '''
+    return 1. - herfindahl_hd(x)
+
+def polarization(x):
+    return 'Not implemented'
+
+def shannon_se(x):
+    '''
+    Shannon index SE
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Shannon index SE
+    '''
+    pgs = x.sum(axis=0)
+    p = pgs.sum()
+    ratios = pgs * 1. / p
+    return - (ratios * np.log(ratios)).sum()
+
+def gini_gi(x):
+    '''
+    Gini GI index
+
+    NOTE: based on 3rd eq. of "Calculation" in:
+
+            http://en.wikipedia.org/wiki/Gini_coefficient
+
+         Returns same value as `gini` method in the R package `reldist` (see
+         http://rss.acs.unt.edu/Rdoc/library/reldist/html/gini.html) if every
+         category has at least one observation
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Gini GI index
+    '''
+    ys = x.sum(axis=0)
+    return _gini(ys)
+
+def gini_gi_m(x):
+    '''
+    Gini GI index (equivalent to `gini_gi`, not vectorized)
+
+    NOTE: based on Wolfram Mathworld formula in:
+
+            http://mathworld.wolfram.com/GiniCoefficient.html
+
+         Returns same value as `gini_gi`.
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Gini GI index
+    '''
+    xs = x.sum(axis=0)
+    num = np.sum([np.abs(xi - xj) for xi, xj in itertools.permutations(xs, 2)])
+    den = 2. * xs.shape[0]**2 * np.mean(xs)
+    return num / den
+
+def _gini(ys):
+    '''
+    Gini for a single row to be used both by `gini_gi` and `gini_gig`
+    '''
+    n = ys.flatten().shape[0]
+    ys.sort()
+    num = 2. * ((np.arange(n)+1) * ys).sum()
+    den = n * ys.sum()
+    return (num / den) - ((n + 1.) / n)
+
+def hoover_hi(x):
+    '''
+    Hoover index HI
+
+    NOTE: based on
+
+            http://en.wikipedia.org/wiki/Hoover_index
+
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : float
+              Hoover HI index
+    '''
+    es = x.sum(axis=0)
+    e_total = es.sum()
+    a_total = es.shape[0]
+    s = np.abs((es*1./e_total) - (1./a_total)).sum()
+    return s / 2.
+
+def similarity_w_wd(x, tau):
+    '''
+    Similarity weighted diversity
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    tau     : array
+              k x k array where tau_ij represents dissimilarity between group
+              i and group j. Diagonal elements are assumed to be one.
+
+    Returns
+    -------
+    a       : float
+              Similarity weighted diversity index
+    '''
+    pgs = x.sum(axis=0)
+    pgs = pgs * 1. / pgs.sum()
+    s = sum([pgs[i] * pgs[j] * tau[i, j] for i,j in \
+            itertools.product(np.arange(pgs.shape[0]), repeat=2)])
+    return 1. - s
+
+def segregation_gsg(x):
+    '''
+    Segregation index GS
+
+    This is a Duncan&Duncan index of a group against the rest combined
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : array
+              Array with GSg indices for the k groups
+    '''
+    pgs = x.sum(axis=0)
+    pas = x.sum(axis=1)
+    p = pgs.sum()
+    first = (x.T * 1. / pgs[:, None]).T
+    paMpga = pas[:, None] - x
+    pMpg = p - pgs
+    second = paMpga * 1. / pMpg[None, :]
+    return 0.5 * (np.abs(first - second)).sum(axis=0)
+
+def modified_segregation_msg(x):
+    '''
+    Modified segregation index GS
+
+    This is a modified version of GSg index as used by Van Mourik et al. (1989)
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : array
+              Array with MSg indices for the k groups
+    '''
+    pgs = x.sum(axis=0)
+    p = pgs.sum()
+    ms_inds = segregation_gsg(x) # To be updated in loop below
+    for gi in np.arange(x.shape[1]):
+        pg = pgs[gi]
+        pgp = pg * 1. / p
+        ms_inds[gi] = 2. * pgp * (1. - pgp) * ms_inds[gi]
+    return ms_inds
+
+def isolation_isg(x):
+    '''
+    Isolation index IS
+
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : array
+              Array with ISg indices for the k groups
+    '''
+    ws = x * 1. / x.sum(axis=0)
+    pgapa = (x.T * 1. / x.sum(axis=1)).T
+    pgp = x.sum(axis=0) * 1. / x.sum()
+    return (ws * pgapa / pgp).sum(axis=0)
+
+def gini_gig(x):
+    '''
+    Gini GI index
+
+    NOTE: based on Wolfram Mathworld formula in:
+
+            http://mathworld.wolfram.com/GiniCoefficient.html
+
+         Returns same value as `gini_gi`.
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : array
+              Gini GI index for every group k
+    '''
+    return np.apply_along_axis(_gini, 0, x)
+
+def ellison_glaeser_egg(x, hs=None):
+    '''
+    Ellison and Glaeser (1997) [1]_ index of concentration. Implemented as in
+    equation (5) of original reference
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per area) and k columns
+              (one per industry). Each cell indicates employment figures for
+              area n and industry k
+    hs      : array
+              [Optional] Array of dimension (k,) containing the Herfindahl
+              indices of each industry's plant sizes. If not passed, it is
+              assumed every plant contains one and only one worker and thus
+              H_k = 1 / P_k, where P_k is the total employment in k
+
+    Returns
+    -------
+    a       : array
+              EG index for every group k
+
+    References
+    ----------
+
+    .. [1] Ellison, G. and Glaeser, E. L. "Geographic Concentration in U.S.
+    Manufacturing Industries: A Dartboard Approach". Journal of Political
+    Economy. 105: 889-927
+
+    '''
+    industry_totals = x.sum(axis=0)
+    if hs==None:
+        hs = 1. / industry_totals
+    xs = x.sum(axis=1) * 1. / x.sum()
+    part = 1. - (xs**2).sum()
+    eg_inds = np.zeros(x.shape[1])
+    for gi in np.arange(x.shape[1]):
+        ss = x[:, gi] * 1. / industry_totals[gi]
+        g = ((ss - xs)**2).sum()
+        h = hs[gi]
+        eg_inds[gi] = (g - part * h) / (part * (1. - h))
+    return eg_inds
+
+def ellison_glaeser_egg_pop(x):
+    '''
+    Ellison and Glaeser (1997) [1]_ index of concentration. Implemented to be
+    computed with data about people (segregation/diversity) rather than as
+    industry concentration, following Mare et al (2012) [2]_
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : array
+              EG index for every group k
+
+    References
+    ----------
+
+    .. [1] Ellison, G. and Glaeser, E. L. "Geographic Concentration in U.S.
+    Manufacturing Industries: A Dartboard Approach". Journal of Political
+    Economy. 105: 889-927
+
+    .. [2] Mare, D., Pinkerton, R., Poot, J. and Coleman, A. (2012)
+    Residential Sorting Across Auckland Neighbourhoods. Mimeo. Wellington:
+    Motu Economic and Public Policy Research.
+
+    '''
+    pas = x.sum(axis=1)
+    pgs = x.sum(axis=0)
+    p = pas.sum()
+    pap = pas * 1. / p
+    opg = 1./ pgs
+    oopg = 1. - opg
+    eg_inds = np.zeros(x.shape[1])
+    for g in np.arange(x.shape[1]):
+        pgas = x[:, g]
+        pg = pgs[g]
+        num1n = (((pgas * 1. / pg) - (pas * 1. / p))**2).sum()
+        num1d = 1. - ((pas * 1. / p)**2).sum()
+        num2 = opg[g]
+        den = oopg[g]
+        eg_inds[g] = ((num1n / num1d) - num2) / den
+    return eg_inds
+
+def maurel_sedillot_msg(x, hs=None):
+    '''
+    Maurel and Sedillot (1999) [1]_ index of concentration. Implemented as in
+    equation (7) of original reference
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    hs      : array
+              [Optional] Array of dimension (k,) containing the Herfindahl
+              indices of each industry's plant sizes. If not passed, it is
+              assumed every plant contains one and only one worker and thus
+              H_k = 1 / P_k, where P_k is the total employment in k
+
+    Returns
+    -------
+    a       : array
+              MS index for every group k
+
+    References
+    ----------
+
+    .. [1] Maurel, F. and Sedillot, B. (1999). "A Measure of the Geographic
+    Concentration in French Manufacturing Industries". Regional Science
+    and Urban Economics 29: 575-604
+    
+    '''
+    industry_totals = x.sum(axis=0)
+    if hs==None:
+        hs = 1. / industry_totals
+    x2s = np.sum((x.sum(axis=1) * 1. / x.sum())**2)
+    ms_inds = np.zeros(x.shape[1])
+    for gi in np.arange(x.shape[1]):
+        s2s = np.sum((x[:, gi] * 1. / industry_totals[gi])**2)
+        h = hs[gi]
+        num = ((s2s - x2s) / (1. - x2s)) - h
+        den = 1. - h
+        ms_inds[gi] = num / den
+    return ms_inds
+
+def maurel_sedillot_msg_pop(x):
+    '''
+    Maurel and Sedillot (1999) [1]_ index of concentration. Implemented to be
+    computed with data about people (segregation/diversity) rather than as
+    industry concentration, following Mare et al (2012) [2]_
+
+    ...
+
+    Arguments
+    ---------
+    x       : array
+              N x k array containing N rows (one per neighborhood) and k columns
+              (one per cultural group)
+    Returns
+    -------
+    a       : array
+              MS index for every group k
+
+    References
+    ----------
+
+    .. [1] Maurel, F. and Sedillot, B. (1999). "A Measure of the Geographic
+    Concentration in French Manufacturing Industries". Regional Science
+    and Urban Economics 29: 575-604
+
+    .. [2] Mare, D., Pinkerton, R., Poot, J. and Coleman, A. (2012)
+    Residential Sorting Across Auckland Neighbourhoods. Mimeo. Wellington:
+    Motu Economic and Public Policy Research.
+    
+    '''
+    pas = x.sum(axis=1)
+    pgs = x.sum(axis=0)
+    p = pas.sum()
+    pap = pas * 1. / p
+    eg_inds = np.zeros(x.shape[1])
+    for g in np.arange(x.shape[1]):
+        pgas = x[:, g]
+        pg = pgs[g]
+        num1n = ((pgas * 1. / pg)**2 - (pas * 1. / p)**2).sum()
+        num1d = 1. - ((pas * 1. / p)**2).sum()
+        num2 = 1. / pg
+        den = 1. - (1. / pg)
+        eg_inds[g] = ((num1n / num1d) - num2) / den
+    return eg_inds
+
+if __name__=='__main__':
+    np.random.seed(1)
+    x = np.round(np.random.random((10, 3)) * 100).astype(int)
+    #x[:, 2] = 0
+    ids = [abundance, \
+            margalev_md, \
+            menhinick_mi, \
+            simpson_so, \
+            simpson_sd, \
+            fractionalization_gs, \
+            herfindahl_hd, \
+            shannon_se, \
+            gini_gi, \
+            gini_gi_m, \
+            hoover_hi, \
+            segregation_gsg, \
+            modified_segregation_msg, \
+            isolation_isg, \
+            gini_gig, \
+            ellison_glaeser_egg, \
+            ellison_glaeser_egg_pop, \
+            maurel_sedillot_msg, \
+            maurel_sedillot_msg_pop, \
+            ]
+    res = [(f_i.func_name, f_i(x)) for f_i in ids]
+    print '\nIndices'
+    for r in res:
+        print r[1], '\t', r[0]
+
+    tau = np.random.random((x.shape[1], x.shape[1]))
+    for i in range(tau.shape[0]):
+        tau[i, i] = 1.
+    print similarity_w_wd(x, tau)
+
diff --git a/pysal/inequality/gini.py b/pysal/inequality/gini.py
new file mode 100644
index 0000000..46d65f8
--- /dev/null
+++ b/pysal/inequality/gini.py
@@ -0,0 +1,167 @@
+"""
+Gini based Inequality Metrics
+"""
+
+__author__ = "Sergio J. Rey <srey at asu.edu> "
+
+#from pysal.common import *
+import numpy as np
+from scipy.stats import norm as NORM
+
+__all__ = ['Gini', 'Gini_Spatial']
+
+
+class Gini:
+    """
+    Classic Gini coefficient in absolute deviation form
+
+    Parameters
+    ----------
+
+    y : array (n,1)
+       attribute
+
+    Attributes
+    ----------
+
+    g : float
+       Gini coefficient
+
+    """
+
+    def __init__(self, x):
+
+        x.shape = (x.shape[0],)
+        d = np.abs(np.array([x - xi for xi in x]))
+        n = len(x)
+        xbar = x.mean()
+        den = xbar * 2 * n**2
+        dtotal = d.sum()
+        self.g = dtotal/den
+
+
+class Gini_Spatial:
+    """
+    Spatial Gini coefficient
+
+    Provides for computationally based inference regarding the contribution of
+    spatial neighbor pairs to overall inequality across a set of regions. [1]_
+
+    Parameters
+    ----------
+
+    y : array (n,1)
+       attribute
+
+    w : binary spatial weights object
+
+    permutations : int (default = 99)
+       number of permutations for inference
+
+    Attributes
+    ----------
+
+    g : float
+       Gini coefficient
+
+    wg : float
+       Neighbor inequality component (geographic inequality)
+
+    wcg : float
+       Non-neighbor inequality component (geographic complement inequality)
+
+    wcg_share : float
+       Share of inequality in non-neighbor component
+
+    If Permuations > 0
+
+    p_sim : float
+       pseudo p-value for spatial gini
+
+    e_wcg : float
+       expected value of non-neighbor inequality component (level) from permutations
+
+    s_wcg : float
+           standard deviation non-neighbor inequality component (level) from permutations
+
+    z_wcg : float
+           z-value non-neighbor inequality component (level) from permutations
+
+    p_z_sim : float
+             pseudo  p-value based on standard normal approximation of permutation based values
+
+
+    Examples
+    --------
+    >>> import pysal
+    >>> import numpy as np
+
+    Use data from the 32 Mexican States, Decade frequency 1940-2010
+
+    >>> f=pysal.open(pysal.examples.get_path("mexico.csv"))
+    >>> vnames=["pcgdp%d"%dec for dec in range(1940,2010,10)]
+    >>> y=np.transpose(np.array([f.by_col[v] for v in vnames]))
+
+    Define regime neighbors
+
+    >>> regimes=np.array(f.by_col('hanson98'))
+    >>> w = pysal.block_weights(regimes)
+    >>> np.random.seed(12345)
+    >>> gs = pysal.inequality.gini.Gini_Spatial(y[:,0],w)
+    >>> gs.p_sim
+    0.01
+    >>> gs.wcg
+    4353856.0
+    >>> gs.e_wcg
+    1067629.2525252525
+    >>> gs.s_wcg
+    95869.167798782844
+    >>> gs.z_wcg
+    34.2782442252145
+    >>> gs.p_z_sim
+    0.0
+
+    Thus, the amount of inequality between pairs of states that are not in the
+    same regime (neighbors) is significantly higher than what is expected
+    under the null of random spatial inequality.
+
+
+    References
+    ----------
+
+    .. [1] Rey, S.J. and R. Smith (2012) "A spatial decomposition of the Gini
+        coefficient." Letters in Spatial and Resource Sciences. DOI 10.1007/s12076-012-00860z
+
+    """
+    def __init__(self, x, w, permutations=99):
+        x.shape = (x.shape[0],)
+        d = np.abs(np.array([x - xi for xi in x]))
+        n = len(x)
+        xbar = x.mean()
+        den = xbar * 2 * n**2
+        wg = w.sparse.multiply(d).sum()
+        self.wg = wg  # spatial inequality component
+        dtotal = d.sum()
+        wcg = dtotal - wg  # complement to spatial inequality component
+        self.wcg = wcg
+        self.g = dtotal / den
+        self.wcg_share = wcg / dtotal
+        self.dtotal = dtotal
+        self.den = den
+
+        if permutations:
+            ids = np.arange(n)
+            wcgp = np.zeros((permutations, 1))
+            for perm in xrange(permutations):
+                # permute rows/cols of d
+                np.random.shuffle(ids)
+                wcgp[perm] = w.sparse.multiply(d[ids, :][:, ids]).sum()
+            above = wcgp >= self.wcg
+            larger = above.sum()
+            if (permutations - larger) < larger:
+                larger = permutations - larger
+            self.p_sim = (larger + 1.) / (permutations + 1.)
+            self.e_wcg = wcgp.mean()
+            self.s_wcg = wcgp.std()
+            self.z_wcg = (self.wcg - self.e_wcg) / self.s_wcg
+            self.p_z_sim = 1.0 - NORM.cdf(self.z_wcg)
diff --git a/pysal/inequality/tests/__init__.py b/pysal/inequality/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/inequality/tests/test_theil.py b/pysal/inequality/tests/test_theil.py
new file mode 100644
index 0000000..f6266d5
--- /dev/null
+++ b/pysal/inequality/tests/test_theil.py
@@ -0,0 +1,42 @@
+import unittest
+import pysal
+import numpy as np
+from pysal.inequality.theil import *
+
+
+class test_Theil(unittest.TestCase):
+
+    def test___init__(self):
+        # theil = Theil(y)
+        f = pysal.open(pysal.examples.get_path("mexico.csv"))
+        vnames = ["pcgdp%d" % dec for dec in range(1940, 2010, 10)]
+        y = np.transpose(np.array([f.by_col[v] for v in vnames]))
+        theil_y = Theil(y)
+        np.testing.assert_almost_equal(theil_y.T, np.array([0.20894344, 0.15222451, 0.10472941, 0.10194725, 0.09560113, 0.10511256, 0.10660832]))
+
+
+class test_TheilD(unittest.TestCase):
+    def test___init__(self):
+        # theil_d = TheilD(y, partition)
+        f = pysal.open(pysal.examples.get_path("mexico.csv"))
+        vnames = ["pcgdp%d" % dec for dec in range(1940, 2010, 10)]
+        y = np.transpose(np.array([f.by_col[v] for v in vnames]))
+        regimes = np.array(f.by_col('hanson98'))
+        theil_d = TheilD(y, regimes)
+        np.testing.assert_almost_equal(theil_d.bg, np.array([0.0345889, 0.02816853, 0.05260921, 0.05931219, 0.03205257, 0.02963731, 0.03635872]))
+
+
+class test_TheilDSim(unittest.TestCase):
+    def test___init__(self):
+        f = pysal.open(pysal.examples.get_path("mexico.csv"))
+        vnames = ["pcgdp%d" % dec for dec in range(1940, 2010, 10)]
+        y = np.transpose(np.array([f.by_col[v] for v in vnames]))
+        regimes = np.array(f.by_col('hanson98'))
+        np.random.seed(10)
+        theil_ds = TheilDSim(y, regimes, 999)
+        np.testing.assert_almost_equal(theil_ds.bg_pvalue, np.array(
+            [0.4, 0.344, 0.001, 0.001, 0.034, 0.072, 0.032]))
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/inequality/theil.py b/pysal/inequality/theil.py
new file mode 100644
index 0000000..31d7ced
--- /dev/null
+++ b/pysal/inequality/theil.py
@@ -0,0 +1,201 @@
+"""Theil Inequality metrics
+
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu> "
+
+from pysal.common import *
+import numpy as np
+__all__ = ['Theil', 'TheilD', 'TheilDSim']
+
+SMALL = np.finfo('float').tiny
+
+
+class Theil:
+    """
+    Classic Theil measure of inequality
+
+        .. math::
+
+            T = \sum_{i=1}^n \left( \\frac{y_i}{\sum_{i=1}^n y_i} \ln \left[ N \\frac{y_i}{\sum_{i=1}^n y_i}\\right] \\right)
+
+    Parameters
+    ----------
+    y   : array (n,t) or (n,)
+          with n taken as the observations across which inequality is
+          calculated.  If y is (n,) then a scalar inequality value is
+          determined. If y is (n,t) then an array of inequality values are
+          determined, one value for each column in y.
+
+    Attributes
+    ----------
+
+    T   : array (t,) or (1,)
+          Theil's T for each column of y
+
+    Notes
+    -----
+    This computation involves natural logs. To prevent ln[0] from occurring, a
+    small value is added to each element of y before beginning the computation.
+
+    Examples
+    --------
+    >>> import pysal
+    >>> f=pysal.open(pysal.examples.get_path("mexico.csv"))
+    >>> vnames=["pcgdp%d"%dec for dec in range(1940,2010,10)]
+    >>> y=np.transpose(np.array([f.by_col[v] for v in vnames]))
+    >>> theil_y=Theil(y)
+    >>> theil_y.T
+    array([ 0.20894344,  0.15222451,  0.10472941,  0.10194725,  0.09560113,
+            0.10511256,  0.10660832])
+    """
+
+    def __init__(self, y):
+
+        n = len(y)
+        y = y + SMALL * (y == 0)  # can't have 0 values
+        yt = y.sum(axis=0)
+        s = y / (yt * 1.0)
+        lns = np.log(n * s)
+        slns = s * lns
+        t = sum(slns)
+        self.T = t
+
+
+class TheilD:
+    """Decomposition of Theil's T based on partitioning of
+    observations into exhaustive and mutually exclusive groups
+
+    Parameters
+    ----------
+    y         : array  (n,t) or (n, )
+                with n taken as the observations across which inequality is
+                calculated If y is (n,) then a scalar inequality value is
+                determined. If y is (n,t) then an array of inequality values are
+                determined, one value for each column in y.
+    partition : array (n, )
+                elements indicating which partition each observation belongs
+                to. These are assumed to be exhaustive.
+
+    Attributes
+    ----------
+    T  : array (n,t) or (n,)
+         global inequality T
+    bg : array (n,t) or (n,)
+         between group inequality
+    wg : array (n,t) or (n,)
+         within group inequality
+
+    Examples
+    --------
+    >>> import pysal
+    >>> f=pysal.open(pysal.examples.get_path("mexico.csv"))
+    >>> vnames=["pcgdp%d"%dec for dec in range(1940,2010,10)]
+    >>> y=np.transpose(np.array([f.by_col[v] for v in vnames]))
+    >>> regimes=np.array(f.by_col('hanson98'))
+    >>> theil_d=TheilD(y,regimes)
+    >>> theil_d.bg
+    array([ 0.0345889 ,  0.02816853,  0.05260921,  0.05931219,  0.03205257,
+            0.02963731,  0.03635872])
+    >>> theil_d.wg
+    array([ 0.17435454,  0.12405598,  0.0521202 ,  0.04263506,  0.06354856,
+            0.07547525,  0.0702496 ])
+   """
+    def __init__(self, y, partition):
+        groups = np.unique(partition)
+        T = Theil(y).T
+        ytot = y.sum(axis=0)
+
+        #group totals
+        gtot = np.array([y[partition == gid].sum(axis=0) for gid in groups])
+        mm = np.dot
+
+        if ytot.size == 1:  # y is 1-d
+            sg = gtot / (ytot * 1.)
+            sg.shape = (sg.size, 1)
+        else:
+            sg = mm(gtot, np.diag(1. / ytot))
+        ng = np.array([sum(partition == gid) for gid in groups])
+        ng.shape = (ng.size,)  # ensure ng is 1-d
+        n = y.shape[0]
+        # between group inequality
+        bg = np.multiply(sg, np.log(mm(np.diag(n * 1. / ng), sg))).sum(axis=0)
+        self.T = T
+        self.bg = bg
+        self.wg = T - bg
+
+
+class TheilDSim:
+    """Random permutation based inference on Theil's inequality decomposition.
+
+    Provides for computationally based inference regarding the inequality
+    decomposition using random spatial permutations. [1]_
+
+    Parameters
+    ----------
+    y            : array  (n,t) or (n, )
+                   with n taken as the observations across which inequality is
+                   calculated If y is (n,) then a scalar inequality value is
+                   determined. If y is (n,t) then an array of inequality values are
+                   determined, one value for each column in y.
+    partition    : array (n, )
+                   elements indicating which partition each observation belongs
+                   to. These are assumed to be exhaustive.
+    permutations : int
+                   Number of random spatial permutations for computationally
+                   based inference on the decomposition.
+
+    Attributes
+    ----------
+
+    observed   : array (n,t) or (n,)
+                 TheilD instance for the observed data.
+
+    bg         : array (permutations+1,t)
+                 between group inequality
+
+    bg_pvalue  : array (t,1)
+                 p-value for the between group measure.  Measures the
+                 percentage of the realized values that were greater than
+                 or equal to the observed bg value. Includes the observed
+                 value.
+
+    wg         : array (size=permutations+1)
+                 within group inequality Depending on the shape of y, 1 or 2-dimensional
+
+    Examples
+    --------
+    >>> import pysal
+    >>> f=pysal.open(pysal.examples.get_path("mexico.csv"))
+    >>> vnames=["pcgdp%d"%dec for dec in range(1940,2010,10)]
+    >>> y=np.transpose(np.array([f.by_col[v] for v in vnames]))
+    >>> regimes=np.array(f.by_col('hanson98'))
+    >>> np.random.seed(10)
+    >>> theil_ds=TheilDSim(y,regimes,999)
+    >>> theil_ds.bg_pvalue
+    array([ 0.4  ,  0.344,  0.001,  0.001,  0.034,  0.072,  0.032])
+
+    References
+    ----------
+    .. [1] Rey, S.J. (2004) "Spatial analysis of regional economic growth,
+       inequality and change," in  M.F. Goodchild and D.G. Jannelle (eds.)
+       Spatially Integrated Social Science. Oxford University Press: Oxford.
+       Pages 280-299.
+
+    """
+    def __init__(self, y, partition, permutations=99):
+
+        observed = TheilD(y, partition)
+        bg_ct = observed.bg == observed.bg  # already have one extreme value
+        bg_ct = bg_ct * 1.0
+        results = [observed]
+        for perm in range(permutations):
+            yp = np.random.permutation(y)
+            t = TheilD(yp, partition)
+            bg_ct += (1.0 * t.bg >= observed.bg)
+            results.append(t)
+        self.results = results
+        self.T = observed.T
+        self.bg_pvalue = bg_ct / (permutations * 1.0 + 1)
+        self.bg = np.array([r.bg for r in results])
+        self.wg = np.array([r.wg for r in results])
+
diff --git a/pysal/meta/akern1.wmd b/pysal/meta/akern1.wmd
new file mode 100644
index 0000000..61b6d7e
--- /dev/null
+++ b/pysal/meta/akern1.wmd
@@ -0,0 +1,15 @@
+{
+    "input1": {
+        "data1": {
+        "type": "shp",
+        "uri": "../examples/columbus.shp"
+        }
+    },
+    "weight_type": "akernel",
+    "transform": "O",
+    "parameters": {
+        "function": "triangular",
+        "bandwidths": null,
+        "k": 2
+    }
+}
diff --git a/pysal/meta/chain.wmd b/pysal/meta/chain.wmd
new file mode 100644
index 0000000..89100f1
--- /dev/null
+++ b/pysal/meta/chain.wmd
@@ -0,0 +1,14 @@
+
+{
+    "input1": {
+        "type": "prov",
+        "uri": "http://toae.org/pub/wrook1.wmd"
+    },
+    "weight_type": "higher_order",
+    "transform": "O",
+    "parameters": {
+        "order":2,
+        "lower":0
+    }
+}
+
diff --git a/pysal/meta/chain2.wmd b/pysal/meta/chain2.wmd
new file mode 100644
index 0000000..0ac918c
--- /dev/null
+++ b/pysal/meta/chain2.wmd
@@ -0,0 +1,14 @@
+
+{
+    "input1": {
+        "type": "prov",
+        "uri": "chain.wmd"
+    },
+    "weight_type": "higher_order",
+    "transform": "O",
+    "parameters": {
+        "order":2,
+        "lower":0
+    }
+}
+
diff --git a/pysal/meta/kernel.wmd b/pysal/meta/kernel.wmd
new file mode 100644
index 0000000..ef3369a
--- /dev/null
+++ b/pysal/meta/kernel.wmd
@@ -0,0 +1,17 @@
+{
+    "input1": {
+        "data1":{
+            "type": "shp",
+            "uri": "../examples/columbus.shp"}
+    },
+    "weight_type": "kernel",
+    "transform": "O",
+    "parameters": {
+        "function": "triangular",
+        "bandwidths": null,
+        "k": 2
+    }
+}
+
+
+
diff --git a/pysal/meta/knn.wmd b/pysal/meta/knn.wmd
new file mode 100644
index 0000000..f5091ec
--- /dev/null
+++ b/pysal/meta/knn.wmd
@@ -0,0 +1,14 @@
+{
+    "input1": {
+        "type": "shp",
+        "uri": "../examples/columbus.shp"
+    },
+    "weight_type": "knn",
+    "transform": "O",
+    "parameters": {
+        "p": 2,
+        "k": 2,
+        "radius": null,
+        "id_variable": null
+    }
+}
diff --git a/pysal/meta/taz_block.wmd b/pysal/meta/taz_block.wmd
new file mode 100644
index 0000000..ed45335
--- /dev/null
+++ b/pysal/meta/taz_block.wmd
@@ -0,0 +1,11 @@
+
+{
+    "input1": {
+        "data1":{
+            "type": "dbf",
+            "uri": "http://toae.org/pub/taz.dbf"}
+    },
+    "weight_type": "block",
+    "parameters": {"block_variable": "CNTY"},
+    "transform": "O"
+}
diff --git a/pysal/meta/taz_intersection.wmd b/pysal/meta/taz_intersection.wmd
new file mode 100644
index 0000000..e25704b
--- /dev/null
+++ b/pysal/meta/taz_intersection.wmd
@@ -0,0 +1,16 @@
+
+{
+    "input1":{
+        "data1": {
+                "type": "prov",
+                "uri": "taz_rook.wmd"},
+        "data2": {
+                "type": "prov",
+                "uri": "taz_block.wmd"}
+    },
+    "weight_type": "intersection",
+    "transform": "O",
+    "parameters": {
+    }
+}
+
diff --git a/pysal/meta/taz_rook.wmd b/pysal/meta/taz_rook.wmd
new file mode 100644
index 0000000..06a0e32
--- /dev/null
+++ b/pysal/meta/taz_rook.wmd
@@ -0,0 +1,10 @@
+
+{
+    "input1": {
+        "data1":{
+            "type": "shp",
+            "uri": "http://toae.org/pub/taz.shp"}
+    },
+    "weight_type": "rook",
+    "transform": "O"
+}
diff --git a/pysal/meta/wmd.py b/pysal/meta/wmd.py
new file mode 100644
index 0000000..3cef3dd
--- /dev/null
+++ b/pysal/meta/wmd.py
@@ -0,0 +1,476 @@
+"""
+Weights Meta Data
+
+Prototyping meta data functions and classes for weights provenance
+
+
+Based on Anselin, L., S.J. Rey and W. Li (2014) "Metadata and provenance for
+spatial analysis: the case of spatial weights." International Journal of
+Geographical Information Science.  DOI:10.1080/13658816.2014.917313
+
+
+
+TODO
+----
+
+- Document each public function with working doctest
+- Abstract the test files as they currently assume location is source
+  directory
+- have wmd_reader take either a wmd file or a wmd dictionary/object
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu>, Wenwen Li <wenwen at asu.edu>"
+import pysal as ps
+import io, json
+import httplib
+from urlparse import urlparse
+import urllib2 as urllib
+import copy
+import numpy as np
+
+def wmd_reader(fileName):
+    """
+
+    Examples
+    --------
+    >>> import wmd
+    >>> wr = wmd.wmd_reader('w1rook.wmd')
+    wmd_reader failed:  w1rook.wmd
+    >>> wr = wmd.wmd_reader('wrook1.wmd')
+    >>> wr.neighbors[2]
+    [0, 1, 3, 4]
+    """
+
+    try:
+        meta_data = _uri_reader(fileName)
+    except:
+        try:
+            with open(fileName, 'r') as fp:
+                meta_data = json.load(fp)
+                global fullmeta
+                fullmeta = {}
+                fullmeta['root'] =  copy.deepcopy(meta_data)
+                w = _wmd_parser(meta_data)
+                return w
+        except:
+            print 'wmd_reader failed: ', fileName
+
+class WMD(ps.W):
+    """Weights Meta Data Class"""
+    def __init__(self, neighbors=None,  weights=None, id_order=None):
+        self.meta_data = {}
+        super(WMD, self).__init__(neighbors, weights, id_order)
+
+    # override transform property to record any post-instantiation
+    # transformations in meta data
+
+    @ps.W.transform.setter
+    def transform(self, value):
+        super(WMD, WMD).transform.__set__(self, value)
+        self.meta_data['transform'] = self._transform
+
+    def write(self, fileName, data=False):
+        """
+
+        Examples
+        --------
+        >>> import wmd
+        >>> wr = wmd.wmd_reader('w1rook.wmd')
+        wmd_reader failed:  w1rook.wmd
+        >>> wr = wmd.wmd_reader('wrook1.wmd')
+        >>> wr.write('wr1.wmd')
+        >>> wr1 = wmd.wmd_reader('wr1.wmd')
+        >>> wr.neighbors[2]
+        [0, 1, 3, 4]
+        >>> wr1.neighbors[2]
+        [0, 1, 3, 4]
+        >>>
+
+        """
+        _wmd_writer(self, fileName, data=data)
+
+######################### Private functions #########################
+    
+def _wmd_writer(wmd_object, fileName, data=False):
+    try:
+        with open(fileName, 'w') as f:
+            if data:
+                wmd_object.meta_data['data'] = {}
+                wmd_object.meta_data['data']['weights'] = wmd_object.weights
+                wmd_object.meta_data['data']['neighbors'] = wmd_object.neighbors
+            json.dump(wmd_object.meta_data,
+                    f,
+                    indent=4,
+                    separators=(',', ': '))
+    except:
+        print 'wmd_writer failed.'
+
+def _block(arg_dict):
+    """
+    General handler for block weights
+
+
+    Examples
+    --------
+    >>> w = wmd_reader('taz_block.wmd')
+    >>> w.n
+    4109
+    >>> w.meta_data
+    {'root': {u'input1': {u'data1': {u'type': u'dbf', u'uri': u'http://toae.org/pub/taz.dbf'}}, u'weight_type': u'block', u'transform': u'O', u'parameters': {u'block_variable': u'CNTY'}}}
+
+    """
+    input1 = arg_dict['input1']
+    for key in input1:
+        input1 = input1[key]
+        break
+    uri = input1['uri']
+    weight_type = arg_dict['weight_type'].lower()
+    file_name = uri
+
+    var_name = arg_dict['parameters']['block_variable']
+    dbf = ps.open(uri)
+    block = np.array(dbf.by_col(var_name))
+    dbf.close()
+    w = ps.weights.util.block_weights(block)
+    w = WMD(w.neighbors, w.weights)
+    w.meta_data = {}
+    w.meta_data['input1'] = {"type": 'dbf', 'uri': uri}
+    w.meta_data['transform'] = w.transform
+    w.meta_data['weight_type'] = weight_type
+    w.meta_data['parameters'] = {'block_variable':var_name}
+
+    return w
+
+def _contiguity(arg_dict):
+    """
+    General handler for building contiguity weights from shapefiles
+
+    Examples
+    --------
+
+    >>> w = wmd_reader('wrook1.wmd')
+    >>> w.n
+    49
+    >>> w.meta_data
+    {'root': {u'input1': {u'data1': {u'type': u'shp', u'uri': u'http://toae.org/pub/columbus.shp'}}, u'weight_type': u'rook', u'transform': u'O'}}
+    """
+    input1 = arg_dict['input1']
+    for key in input1:
+        input1 = input1[key]
+        break
+    uri = input1['uri']
+    weight_type = arg_dict['weight_type']
+    weight_type = weight_type.lower()
+    if weight_type == 'rook':
+        w = ps.rook_from_shapefile(uri)
+    elif weight_type == 'queen':
+        w = ps.queen_from_shapefile(uri)
+    else:
+        print "Unsupported contiguity criterion: ",weight_type
+        return None
+    if 'parameters' in arg_dict:
+        order = arg_dict['parameters'].get('order',1) # default to 1st order
+        lower = arg_dict['parameters'].get('lower',0) # default to exclude lower orders
+        if order > 1:
+            w_orig = w
+            w = ps.higher_order(w,order)
+            if lower:
+                for o in xrange(order-1,1,-1):
+                    w = ps.weights.w_union(ps.higher_order(w_orig,o), w)
+                w = ps.weights.w_union(w, w_orig)
+        parameters = arg_dict['parameters']
+    else:
+        parameters = {'lower': 0, 'order':1 }
+    w = WMD(w.neighbors, w.weights)
+    w.meta_data = {}
+    w.meta_data["input1"] = {"type": 'shp', 'uri':uri}
+    w.meta_data["transform"] = w.transform
+    w.meta_data["weight_type"] =  weight_type
+    w.meta_data['parameters'] = parameters
+    return w
+
+def _kernel(arg_dict):
+    """
+    General handler for building kernel based weights from shapefiles
+
+    Examples
+    --------
+
+    >>> w = wmd_reader('kernel.wmd')
+    >>> w.n
+    49
+    >>> w.meta_data
+    {'root': {u'input1': {u'data1': {u'type': u'shp', u'uri': u'../examples/columbus.shp'}}, u'weight_type': u'kernel', u'transform': u'O', u'parameters': {u'function': u'triangular', u'bandwidths': None, u'k': 2}}}
+
+
+
+    """
+    input1 = arg_dict['input1']['data1']
+    uri = input1['uri']
+    weight_type = arg_dict['weight_type']
+    weight_type = weight_type.lower()
+    k = 2
+    bandwidths = None
+    function = 'triangular'
+    if 'parameters' in arg_dict:
+        k = arg_dict['parameters'].get('k',k) # set default to 2
+        bandwidths = arg_dict['parameters'].get('bandwidths',bandwidths)
+        function = arg_dict['parameters'].get('function', function)
+    else:
+        parameters = {}
+        parameters['k'] = k
+        parameters['bandwidths'] = bandwidths
+        parameters['function'] = function
+        arg_dict['parameters'] = parameters
+
+
+    if weight_type == 'akernel':
+        # adaptive kernel
+        w = ps.adaptive_kernelW_from_shapefile(uri, bandwidths = bandwidths,
+                k=k, function = function)
+    elif weight_type == 'kernel':
+        w = ps.kernelW_from_shapefile(uri, k=k, function = function)
+    else:
+        print "Unsupported kernel: ",weight_type
+        return None
+    w = WMD(w.neighbors, w.weights)
+    w.meta_data = {}
+    w.meta_data["input1"] = {"type": 'shp', 'uri':uri}
+    w.meta_data["transform"] = w.transform
+    w.meta_data["weight_type"] =  weight_type
+    w.meta_data['parameters'] = arg_dict['parameters']
+    return w
+
+def _distance(arg_dict):
+    """
+    General handler for distance based weights obtained from shapefiles
+    """
+    input1 = arg_dict['input1']
+    uri = input1['uri']
+    weight_type = arg_dict['weight_type']
+    weight_type = weight_type.lower()
+    k = 2
+    id_variable = None
+    p = 2
+    radius = None
+    if 'parameters' in arg_dict:
+        k = arg_dict['parameters'].get('k',k) # set default to 2
+        id_variable = arg_dict['parameters'].get('id_variable', id_variable)
+        p = arg_dict['parameters'].get('p',p)
+        radius = arg_dict['parameters'].get('radius', radius)
+
+    else:
+        parameters = {}
+        parameters['k'] = 2
+        parameters['id_variable'] = None
+        parameters['radius'] = None
+        parameters['p'] = 2
+        arg_dict['parameters'] = parameters
+
+    if weight_type == 'knn':
+        w = ps.knnW_from_shapefile(uri,k=k,p=p,idVariable=id_variable,
+                radius=radius)
+        w = WMD(w.neighbors, w.weights)
+        w.meta_data = {}
+        w.meta_data["input1"] = {"type": 'shp', 'uri':uri}
+        w.meta_data["weight_type"] =  'knn'
+        w.meta_data["transform"] = w.transform
+        w.meta_data['parameters'] = arg_dict['parameters']
+        return w
+
+def _higher_order(arg_dict):
+    wmd = arg_dict['wmd']
+    order = 2
+    if 'parameters' in arg_dict:
+        order = arg_dict['parameters'].get('order', order)
+    else:
+        parameters = {}
+        parameters['order'] = order
+        arg_dict['parameters'] = parameters
+
+
+    w = ps.higher_order(wmd, order)
+    w = WMD(w.neighbors, w.weights)
+    w.meta_data = {}
+    w.meta_data['input1'] = arg_dict['input1']
+    w.meta_data['parameters'] = arg_dict['parameters']
+
+    return w
+
+def _intersection(arg_dict):
+    #wmd = arg_dict['wmd']
+    w1 = arg_dict['input1']['data1']['uri']
+    w2 = arg_dict['input1']['data2']['uri']
+    w = ps.w_intersection(w1,w2)
+    w = WMD(w.neighbors, w.weights)
+    return w
+
+def _geojsonf(arg_dict):
+    """
+    Handler for local geojson files
+    """
+    input1 = arg_dict['input1']
+    uri = input1['uri']
+    weight_type = arg_dict['weight_type']
+    weight_type = weight_type.lower()
+    id_variable = None
+
+    if weight_type == 'queen_geojsonf':
+        w = ps.weights.user.queen_from_geojsonf(uri)
+        w.meta_data = {}
+        w.meta_data["input1"] = {"type": 'geojsonf', 'uri':uri}
+        w.meta_data["weight_type"] =  'queen'
+        w.meta_data["transform"] = w.transform
+        return w
+
+# wrapper dict that maps specific weights types to a handler function that
+# builds the specific weights instance
+WEIGHT_TYPES = {}
+WEIGHT_TYPES['rook'] = _contiguity
+WEIGHT_TYPES['queen'] = _contiguity
+WEIGHT_TYPES['akernel'] = _kernel
+WEIGHT_TYPES['kernel'] = _kernel
+WEIGHT_TYPES['knn'] = _distance
+WEIGHT_TYPES['higher_order'] = _higher_order
+WEIGHT_TYPES['block'] = _block
+WEIGHT_TYPES['intersection'] = _intersection
+#WEIGHT_TYPES['queen_geojsonf'] = geojsonf
+#WEIGHT_TYPES['geojsons'] = geojsons
+
+
+def _uri_reader(uri):
+    j = json.load(urllib.urlopen(uri))
+    return j
+
+def _wmd_read_only(fileName):
+    try:
+        meta_data = _uri_reader(fileName)
+    except:
+        try:
+            with open(fileName, 'r') as fp:
+                meta_data = json.load(fp)
+                return meta_data
+        except:
+            print '_wmd_read_only failed: ', fileName
+
+def _wmd_parser(wmd_object):
+    if 'root' in wmd_object:
+        wmd_object = wmd_object['root']
+    weight_type = wmd_object['weight_type'].lower()
+    for key in wmd_object['input1']:
+        #print key
+        if wmd_object['input1'][key]['type'] == 'prov':
+            #      call wmd_reader
+            uri = wmd_object['input1'][key]['uri']
+
+            meta_data = _wmd_read_only(uri)
+            fullmeta[uri] = copy.deepcopy(meta_data) #add full metadata
+            wmd = _wmd_parser(meta_data)
+            wmd_object['input1'][key]['uri'] = wmd
+        else:
+            # handle distributed files
+            uri = wmd_object['input1'][key]['uri']
+            try:
+                tmp = open(uri)
+                #print ' tmp: ', tmp
+                wmd_object['input1'][key]['uri'] = uri
+            except:
+                _download_shapefiles(uri)
+                uri = uri.split("/")[-1]
+                wmd_object['input1'][key]['uri'] = uri # use local copy
+
+    if weight_type in WEIGHT_TYPES:
+        #print weight_type
+        wmd  = WEIGHT_TYPES[weight_type](wmd_object)
+        wmd.meta_data = fullmeta
+    else:
+        print 'Unsupported weight type: ', weight_type
+
+    return wmd
+
+def _download_shapefiles(file_name):
+    file_parts = file_name.split("/")
+    file_prefix = file_parts[-1].split(".")[0]
+    exts = [ ".shp", ".dbf", ".shx" ]
+    for ext in exts:
+        # rebuild url
+        file_name = file_prefix + ext
+        file_parts[-1] = file_name
+        new_url = "/".join(file_parts)
+        #print file_name, new_url
+        u = urllib.urlopen(new_url)
+        f = open(file_name, 'wb')
+        meta = u.info()
+        file_size = int(meta.getheaders("Content-Length")[0])
+        #print "Downloading: %s Bytes: %s" % (file_name, file_size)
+        file_size_dl = 0
+        block_sz = 8192
+        while True:
+            bf = u.read(block_sz)
+            if not bf:
+                break
+            file_size_dl += len(bf)
+            f.write(bf)
+            status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. /
+                    file_size)
+            status = status + chr(8)* (len(status)+1)
+        #print status, f.close()
+
+if __name__ == '__main__':
+
+    # distributed file
+    w1 = wmd_reader("wrook1.wmd")
+
+##    # order
+##    w1o = wmd_reader('wrooko1.wmd')
+##    w2o = wmd_reader('wrooko2.wmd')
+##    w2ol = wmd_reader('wrooko2l.wmd')
+##
+##    # kernels
+    ak1 = wmd_reader('akern1.wmd')
+    kern = wmd_reader('kernel.wmd')
+##
+##    # knn
+##    knn = wmd_reader('knn.wmd')
+##
+##
+##
+##    # moran workflow
+##    import pysal as ps
+
+
+    # geojson
+    #wj = wmd_reader("wgeojson.wmd")
+
+
+    # here we test chaining
+#    r1 = wmd_reader('chain2inputs.wmd')
+#    print "full metadata is listed below: \n", fullmeta
+    # r2 = wmd_reader('chain2.wmd')
+
+    taz_int = wmd_reader("taz_intersection.wmd")
+
+
+
+    ## intersection between queen and block weights
+    #import numpy as np
+    #w = ps.lat2W(4,4)
+    #block_variable = np.ones((w.n,1))
+    #block_variable[:8] = 0
+    #w_block = ps.weights.util.block_weights(block_variable)
+
+    #w_intersection = ps.w_intersection(w, w_block)
+
+
+    ## with Columbus example using EW as the block and queen
+    #dbf = ps.open("columbus.dbf")
+    #ew = np.array(dbf.by_col("EW"))
+    #dbf.close()
+    #w_ew = ps.weights.util.block_weights(ew)
+    #wr = ps.rook_from_shapefile("columbus.shp")
+    #w_int = ps.w_intersection(w_ew, wr)
+
+
+    #blk = wmd_reader('block2.wmd')
+
+    #taz_int = wmd_reader("http://spatial.csf.asu.edu/taz_intersection.wmd")
+
diff --git a/pysal/meta/wrook1.wmd b/pysal/meta/wrook1.wmd
new file mode 100644
index 0000000..eebfd13
--- /dev/null
+++ b/pysal/meta/wrook1.wmd
@@ -0,0 +1,9 @@
+{
+    "input1": {
+        "data1":{
+            "type": "shp",
+            "uri": "http://toae.org/pub/columbus.shp"}
+    },
+    "weight_type": "rook",
+    "transform": "O"
+}
\ No newline at end of file
diff --git a/pysal/meta/wrooko1.wmd b/pysal/meta/wrooko1.wmd
new file mode 100644
index 0000000..a2a0159
--- /dev/null
+++ b/pysal/meta/wrooko1.wmd
@@ -0,0 +1,12 @@
+{
+    "input1": {
+        "type": "shp",
+        "uri": "../examples/columbus.shp"
+    },
+    "weight_type": "rook",
+    "transform": "O",
+    "parameters": {
+        "order":1,
+        "lower":0
+    }
+}
diff --git a/pysal/meta/wrooko2.wmd b/pysal/meta/wrooko2.wmd
new file mode 100644
index 0000000..5a838ea
--- /dev/null
+++ b/pysal/meta/wrooko2.wmd
@@ -0,0 +1,12 @@
+{
+    "input1": {
+        "type": "shp",
+        "uri": "../examples/columbus.shp"
+    },
+    "weight_type": "rook",
+    "transform": "O",
+    "parameters": {
+        "order":2,
+        "lower":0
+    }
+}
diff --git a/pysal/meta/wrooko2l.wmd b/pysal/meta/wrooko2l.wmd
new file mode 100644
index 0000000..58b16a6
--- /dev/null
+++ b/pysal/meta/wrooko2l.wmd
@@ -0,0 +1,12 @@
+{
+    "input1": {
+        "type": "shp",
+        "uri": "../examples/columbus.shp"
+    },
+    "weight_type": "rook",
+    "transform": "O",
+    "parameters": {
+        "order":2,
+        "lower":1
+    }
+}
diff --git a/pysal/network/Network Usage.ipynb b/pysal/network/Network Usage.ipynb
new file mode 100644
index 0000000..95aeaec
--- /dev/null
+++ b/pysal/network/Network Usage.ipynb	
@@ -0,0 +1,644 @@
+{
+ "metadata": {
+  "name": "",
+  "signature": "sha256:db645dee5bfbe11d1d339edf10b272aa72bf2d40c4f62e2ec987e6dad1f6e73d"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "%pylab inline\n",
+      "import pysal as ps\n",
+      "\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Populating the interactive namespace from numpy and matplotlib\n"
+       ]
+      }
+     ],
+     "prompt_number": 1
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ntw = ps.Network(ps.examples.get_path('geodanet/streets.shp'))\n",
+      "\n",
+      "#Snap a point pattern to the network\n",
+      "ntw.snapobservations(ps.examples.get_path('geodanet/crimes.shp'), 'crimes', attribute=True)\n",
+      "ntw.snapobservations(ps.examples.get_path('geodanet/schools.shp'), 'schools', attribute=False)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 2
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "####A network is composed of a single topological representation of a road and $n$ point patterns which are snapped to the network."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ntw.pointpatterns"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 3,
+       "text": [
+        "{'crimes': <pysal.network.network.PointPattern instance at 0x10a552d88>,\n",
+        " 'schools': <pysal.network.network.PointPattern instance at 0x10a5621b8>}"
+       ]
+      }
+     ],
+     "prompt_number": 3
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "dir(ntw.pointpatterns['crimes'])"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 4,
+       "text": [
+        "['__doc__',\n",
+        " '__init__',\n",
+        " '__module__',\n",
+        " 'dist_to_node',\n",
+        " 'npoints',\n",
+        " 'obs_to_edge',\n",
+        " 'obs_to_node',\n",
+        " 'points',\n",
+        " 'snapped_coordinates']"
+       ]
+      }
+     ],
+     "prompt_number": 4
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "#####Attributes for every point pattern\n",
+      "\n",
+      "1. dist_to_node dict keyed by pointid with the value being a dict in the form {node: distance to node, node: distance to node}\n",
+      "2. obs_to_edge dict keyed by edge with the value being a dict in the form {pointID:(x-coord, y-coord), pointID:(x-coord, y-coord), ... }\n",
+      "3. obs_to_node\n",
+      "4. points geojson like representation of the point pattern.  Includes properties if read with attributes=True\n",
+      "5. snapped_coordinates dict keyed by pointid with the value being (x-coord, y-coord)"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "####Counts per edge are important, but should not be precomputed since we have different representations of the network (digitized and graph currently).  (Relatively) Uniform segmentation still needs to be done."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "counts = ntw.count_per_edge(ntw.pointpatterns['crimes'].obs_to_edge,\n",
+      "                            graph=False)\n",
+      "sum(counts.values()) / float(len(counts.keys()))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 5,
+       "text": [
+        "2.6822429906542058"
+       ]
+      }
+     ],
+     "prompt_number": 5
+    },
+    {
+     "cell_type": "heading",
+     "level": 3,
+     "metadata": {},
+     "source": [
+      "Segmentation"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "n200 = ntw.segment_edges(200.0)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 6
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "counts = n200.count_per_edge(n200.pointpatterns['crimes'].obs_to_edge, graph=False)\n",
+      "sum(counts.values()) / float(len(counts.keys()))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 7,
+       "text": [
+        "2.0354609929078014"
+       ]
+      }
+     ],
+     "prompt_number": 7
+    },
+    {
+     "cell_type": "heading",
+     "level": 4,
+     "metadata": {},
+     "source": [
+      "Visualization of the shapefile derived, unsegmented network with nodes in a larger, semi-opaque form and the distance segmented network with small, fully opaque nodes."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import networkx as nx\n",
+      "figsize(10,10)\n",
+      "\n",
+      "g = nx.Graph()\n",
+      "for e in ntw.edges:\n",
+      "    g.add_edge(*e)\n",
+      "for n, p in ntw.node_coords.iteritems():\n",
+      "    g.node[n] = p\n",
+      "nx.draw(g, ntw.node_coords, node_size=300, alpha=0.5)\n",
+      "\n",
+      "g = nx.Graph()\n",
+      "for e in n200.edges:\n",
+      "    g.add_edge(*e)\n",
+      "for n, p in n200.node_coords.iteritems():\n",
+      "    g.node[n] = p\n",
+      "nx.draw(g, n200.node_coords, node_size=25, alpha=1.0)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAxkAAALxCAYAAAAwg2ukAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xt8FOW9P/DPzu4mkw25kCsJEhRCYENIQIjc6qWtCgrq\nsT2ttlYukYtgLVRROafntD22WGgjoscqIoSLl9ZfPW0VVPBSqxYEIYRcSCCEm4FcyP22O7s7O/P7\nIzvLXmYvSWZ3B/y+X6+8lM3uM8/MPtmdz/M884xGFEURhBBCCCGEEKIQJtIVIIQQQgghhFxdKGQQ\nQgghhBBCFEUhgxBCCCGEEKIoChmEEEIIIYQQRVHIIIQQQgghhCiKQgYhhBBCCCFEURQyCCGEEEII\nIYqikEEIIYQQQghRFIUMQgghhBBCiKIoZBBCCCGEEEIURSGDEEIIIYQQoigKGYQQQgghhBBFUcgg\nhBBCCCGEK [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10ae232d0>"
+       ]
+      }
+     ],
+     "prompt_number": 8
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "#####Moran's I using the digitized network"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "#Binary Adjacency\n",
+      "#ntw.contiguityweights(graph=False)\n",
+      "\n",
+      "w = ntw.contiguityweights(graph=False)\n",
+      "\n",
+      "\n",
+      "#Build the y vector\n",
+      "#edges = ntw.w.neighbors.keys()\n",
+      "edges = w.neighbors.keys()\n",
+      "y = np.zeros(len(edges))\n",
+      "for i, e in enumerate(edges):\n",
+      "    if e in counts.keys():\n",
+      "        y[i] = counts[e]\n",
+      "\n",
+      "#Moran's I\n",
+      "#res = ps.esda.moran.Moran(y, ntw.w, permutations=99)\n",
+      "res = ps.esda.moran.Moran(y, w, permutations=99)\n",
+      "print dir(res)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "['EI', 'EI_sim', 'I', 'VI_norm', 'VI_rand', 'VI_sim', '_Moran__calc', '_Moran__moments', '__doc__', '__init__', '__module__', 'n', 'p_norm', 'p_rand', 'p_sim', 'p_z_sim', 'permutations', 'seI_norm', 'seI_rand', 'seI_sim', 'sim', 'w', 'y', 'z', 'z2ss', 'z_norm', 'z_rand', 'z_sim']\n"
+       ]
+      }
+     ],
+     "prompt_number": 9
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "####Moran's I using the graph representation to generate the W\n",
+      "\n",
+      "Note that we have to regenerate the counts per edge, since the graph will have less edges."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "counts = ntw.count_per_edge(ntw.pointpatterns['crimes'].obs_to_edge, graph=True)\n",
+      "\n",
+      "#Binary Adjacency\n",
+      "#ntw.contiguityweights(graph=True)\n",
+      "w = ntw.contiguityweights(graph=True)\n",
+      "#Build the y vector\n",
+      "#edges = ntw.w.neighbors.keys()\n",
+      "edges = w.neighbors.keys()\n",
+      "y = np.zeros(len(edges))\n",
+      "for i, e in enumerate(edges):\n",
+      "    if e in counts.keys():\n",
+      "        y[i] = counts[e]\n",
+      "\n",
+      "#Moran's I\n",
+      "#res = ps.esda.moran.Moran(y, ntw.w, permutations=99)\n",
+      "res = ps.esda.moran.Moran(y, w, permutations=99)\n",
+      "\n",
+      "\n",
+      "print dir(res)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "['EI', 'EI_sim', 'I', 'VI_norm', 'VI_rand', 'VI_sim', '_Moran__calc', '_Moran__moments', '__doc__', '__init__', '__module__', 'n', 'p_norm', 'p_rand', 'p_sim', 'p_z_sim', 'permutations', 'seI_norm', 'seI_rand', 'seI_sim', 'sim', 'w', 'y', 'z', 'z2ss', 'z_norm', 'z_rand', 'z_sim']\n"
+       ]
+      }
+     ],
+     "prompt_number": 10
+    },
+    {
+     "cell_type": "heading",
+     "level": 4,
+     "metadata": {},
+     "source": [
+      "Moran's I using the segmented network and intensities instead of counts"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "#Binary Adjacency\n",
+      "#n200.contiguityweights(graph=False)\n",
+      "w = n200.contiguityweights(graph=False)\n",
+      "\n",
+      "\n",
+      "#Compute the counts\n",
+      "counts = n200.count_per_edge(n200.pointpatterns['crimes'].obs_to_edge, graph=False)\n",
+      "\n",
+      "#Build the y vector and convert from raw counts to intensities\n",
+      "#edges = n200.w.neighbors.keys()\n",
+      "edges = w.neighbors.keys()\n",
+      "y = np.zeros(len(edges))\n",
+      "for i, e in enumerate(edges):\n",
+      "    if e in counts.keys():\n",
+      "        length = n200.edge_lengths[e]\n",
+      "        y[i] = counts[e] / length\n",
+      "      \n",
+      "#Moran's I\n",
+      "#res = ps.esda.moran.Moran(y, n200.w, permutations=99)\n",
+      "res = ps.esda.moran.Moran(y, w, permutations=99)\n",
+      "\n",
+      "\n",
+      "print dir(res)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "['EI', 'EI_sim', 'I', 'VI_norm', 'VI_rand', 'VI_sim', '_Moran__calc', '_Moran__moments', '__doc__', '__init__', '__module__', 'n', 'p_norm', 'p_rand', 'p_sim', 'p_z_sim', 'permutations', 'seI_norm', 'seI_rand', 'seI_sim', 'sim', 'w', 'y', 'z', 'z2ss', 'z_norm', 'z_rand', 'z_sim']\n"
+       ]
+      }
+     ],
+     "prompt_number": 11
+    },
+    {
+     "cell_type": "heading",
+     "level": 3,
+     "metadata": {},
+     "source": [
+      "Timings for distance based methods, e.g. G-function"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import time\n",
+      "t1 = time.time()\n",
+      "n0 = ntw.allneighbordistances(ntw.pointpatterns['crimes'])\n",
+      "print time.time()-t1"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "1.19567084312\n"
+       ]
+      }
+     ],
+     "prompt_number": 12
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import time\n",
+      "t1 = time.time()\n",
+      "n1 = n200.allneighbordistances(n200.pointpatterns['crimes'])\n",
+      "print time.time()-t1"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "11.2402291298\n"
+       ]
+      }
+     ],
+     "prompt_number": 13
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Note that the first time these methods are called, the underlying node-to-node shortest path distance matrix has to be calculated. Subsequent calls will not require this, and will be much faster:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import time\n",
+      "t1 = time.time()\n",
+      "n0 = ntw.allneighbordistances(ntw.pointpatterns['crimes'])\n",
+      "print time.time()-t1"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "0.136250972748\n"
+       ]
+      }
+     ],
+     "prompt_number": 14
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import time\n",
+      "t1 = time.time()\n",
+      "n1 = n200.allneighbordistances(n200.pointpatterns['crimes'])\n",
+      "print time.time()-t1"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "0.160092830658\n"
+       ]
+      }
+     ],
+     "prompt_number": 15
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "####Simulate a point pattern on the network\n",
+      "\n",
+      "Need to supply a count of the number of points and a distirbution (default is uniform).  Generally this will not be called by the user, since the simulation will be used for Monte Carlo permutation."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "npts = ntw.pointpatterns['crimes'].npoints\n",
+      "sim = ntw.simulate_observations(npts)\n",
+      "sim"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 16,
+       "text": [
+        "<pysal.network.network.SimulatedPointPattern instance at 0x1208e2878>"
+       ]
+      }
+     ],
+     "prompt_number": 16
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "####Create a nearest neighbor matrix using the crimes point pattern\n",
+      "\n",
+      "Right now, both the G and K functions generate a full distance matrix.  This is because, I know that the full generation is correct and I believe that the truncated generated, e.g. nearest neighbor, has a big."
+     ]
+    },
+    {
+     "cell_type": "heading",
+     "level": 3,
+     "metadata": {},
+     "source": [
+      "G-function"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "gres = ps.NetworkG(ntw,\n",
+      "                         ntw.pointpatterns['crimes'],\n",
+      "                         permutations = 99)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 18
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "figsize(5,5)\n",
+      "plot(gres.xaxis, gres.observed, 'b-', linewidth=1.5, label='Observed')\n",
+      "plot(gres.xaxis, gres.upperenvelope, 'r--', label='Upper')\n",
+      "plot(gres.xaxis, gres.lowerenvelope, 'k--', label='Lower')\n",
+      "legend(loc='best')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 19,
+       "text": [
+        "<matplotlib.legend.Legend at 0x10bb0d690>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAUEAAAE4CAYAAADFI0E4AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd4lFX2wPHvTeghdEFIKIKIoLCAiHSC0kRlXSyAiuAq\nAuqK67MLgqtmFQR/LmV3QZdVioWiK8WGgpQg0sEAggSk915CTT2/P+6QThKSmXmnnM/zzGNm5ua9\nZ0I8ue+tRkRQSqlgFeJ0AEop5SRNgkqpoKZJUCkV1DQJKqWCmiZBpVRQ0ySolApqeSZBY8wUY8wx\nY8wvuZT5lzHmN2PMJmNME/eGqJRSnpOfluBUoOu13jTGdANuFpG6wLPA+26KTSmlPC7PJCgiy4Ez\nuRTpDnzkKrsGKGeMqeKe8JRSyrPc0ScYARzI8PwgEOmG6yqllMe5a2DEZHmua/GUUn6hiBuucQio\nnuF5pOu1T [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10aedb510>"
+       ]
+      }
+     ],
+     "prompt_number": 19
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "###K-function"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "kres = ps.NetworkK(ntw,\n",
+      "                         ntw.pointpatterns['crimes'],\n",
+      "                         permutations=99)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stderr",
+       "text": [
+        "/Users/serge/Dropbox/p/pysal/src/pysal/pysal/network/analysis.py:142: RuntimeWarning: invalid value encountered in less_equal\n",
+        "  y[i] = len(nearest[nearest <= s])\n"
+       ]
+      }
+     ],
+     "prompt_number": 20
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "figsize(5,5)\n",
+      "plot(kres.xaxis, kres.observed, 'b-', linewidth=1.5, label='Observed')\n",
+      "plot(kres.xaxis, kres.upperenvelope, 'r--', label='Upper')\n",
+      "plot(kres.xaxis, kres.lowerenvelope, 'k--', label='Lower')\n",
+      "legend(loc='best')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "metadata": {},
+       "output_type": "pyout",
+       "prompt_number": 21,
+       "text": [
+        "<matplotlib.legend.Legend at 0x12143c7d0>"
+       ]
+      },
+      {
+       "metadata": {},
+       "output_type": "display_data",
+       "png": "iVBORw0KGgoAAAANSUhEUgAAAUcAAAE+CAYAAAAeZNJiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmcjWX/wPHPNQzGTlMY+5asD/GgbFOWNIRHC4qSNknb\n01MoZRKp/EiKIokSiqylbDWIrDFkCDFkN2XJDLOd6/fHdUxnzpwZM+acc5/l+369zmvOue9r7vM9\nxnznuq9Vaa0RQgiRWYjVAQghhC+S5CiEEC5IchRCCBckOQohhAuSHIUQwgVJjkII4YLXkqNSarpS\n6pRSalcuyo5XSm23P35TSp31RoxCCHGF8tY4R6VUG+Ai8JnWumEevm8w0Fhr/ajHghNCCCdeqzlq\nrdcBmWqASqmaSqnvlFJblVJrlVJ1XHzr/cAcrwQphBB2BS1+/6nAE1rrA0qpFsBkoP2Vk0qpqkA1\n4AdrwhNCB [...]
+       "text": [
+        "<matplotlib.figure.Figure at 0x10aeb22d0>"
+       ]
+      }
+     ],
+     "prompt_number": 21
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 15
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file
diff --git a/pysal/network/__init__.py b/pysal/network/__init__.py
new file mode 100644
index 0000000..610cb77
--- /dev/null
+++ b/pysal/network/__init__.py
@@ -0,0 +1,7 @@
+"""
+:mod:`network` --- Network spatial analysis
+===========================================
+
+"""
+
+import network
diff --git a/pysal/network/analysis.py b/pysal/network/analysis.py
new file mode 100644
index 0000000..2e63291
--- /dev/null
+++ b/pysal/network/analysis.py
@@ -0,0 +1,186 @@
+import numpy as np
+
+
+class NetworkBase(object):
+    def __init__(self, ntw, pointpattern,nsteps=10,
+                 permutations=99, threshold=0.5,
+                 distirbution='poisson',
+                 lowerbound=None,upperbound=None):
+
+        self.ntw = ntw
+        self.pointpattern = pointpattern
+        self.nsteps = nsteps
+        self.permutations = permutations
+        self.threshold = threshold
+
+        self.distirbution = distirbution
+        self.validatedistribution()
+
+        self.sim = np.empty((permutations, nsteps))
+        self.npts = self.pointpattern.npoints
+
+        self.lowerbound = lowerbound
+        self.upperbound = upperbound
+
+        #Compute Statistic
+        self.computeobserved()
+        self.computepermutations()
+
+        #Compute the envelope vectors
+        self.computeenvelope()
+
+    def validatedistribution(self):
+        valid_distributions = ['uniform', 'poisson']
+        assert(self.distirbution in valid_distributions),"Disstribution not in {}".format(valid_distributions)
+
+    def computeenvelope(self):
+        upper = 1.0 - self.threshold / 2.0
+        lower = self.threshold / 2.0
+
+        self.upperenvelope = np.nanmax(self.sim, axis=0) * upper
+        self.lowerenvelope = np.nanmin(self.sim, axis=0) * lower
+
+    def setbounds(self, nearest):
+        if self.lowerbound == None:
+            self.lowerbound = 0
+        if self.upperbound == None:
+            self.upperbound = np.nanmax(nearest)
+
+class NetworkG(NetworkBase):
+    """
+    Compute a network constrained G statistic
+
+    Attributes
+    ==========
+
+    """
+
+    def computeobserved(self):
+        nearest = np.nanmin(self.ntw.allneighbordistances(self.pointpattern), axis=1)
+        self.setbounds(nearest)
+        observedx, observedy = gfunction(nearest,self.lowerbound,self.upperbound,
+                                     nsteps=self.nsteps)
+        self.observed = observedy
+        self.xaxis = observedx
+
+    def computepermutations(self):
+        for p in xrange(self.permutations):
+            sim = self.ntw.simulate_observations(self.npts,
+                                                 distribution=self.distirbution)
+            nearest = np.nanmin(self.ntw.allneighbordistances(sim), axis=1)
+
+            simx, simy = gfunction(nearest,
+                                   self.lowerbound,
+                                   self.upperbound,
+                                   nsteps=self.nsteps)
+            self.sim[p] = simy
+
+
+class NetworkK(NetworkBase):
+    """
+    Network constrained K Function
+    """
+
+    def computeobserved(self):
+        nearest = self.ntw.allneighbordistances(self.pointpattern)
+        self.setbounds(nearest)
+
+        self.lam = self.npts / np.sum(np.array(self.ntw.edge_lengths.values()))
+        observedx, observedy = kfunction(nearest,
+                                         self.upperbound,
+                                         self.lam,
+                                         nsteps = self.nsteps)
+        self.observed = observedy
+        self.xaxis = observedx
+
+    def computepermutations(self):
+        for p in xrange(self.permutations):
+            sim = self.ntw.simulate_observations(self.npts,
+                                                 distribution=self.distirbution)
+            nearest = self.ntw.allneighbordistances(sim)
+
+            simx, simy = kfunction(nearest,
+                                   self.upperbound,
+                                   self.lam,
+                                   nsteps=self.nsteps)
+            self.sim[p] = simy
+
+class NetworkF(NetworkBase):
+     """
+     Network constrained F Function
+
+     This requires the capability to compute a distance matrix between two
+     point patterns.  In this case one will be observed and one will be simulated
+     """
+
+     def computeobserved(self):
+         self.fsim = self.ntw.simulate_observations(self.npts)
+         #Nearest neighbor distances from the simulated to the observed
+         nearest = np.nanmin(self.ntw.allneighbordistances(self.fsim, self.pointpattern), axis=1)
+         self.setbounds(nearest)
+         #Generate a random distribution of points
+         observedx, observedy = ffunction(nearest, self.lowerbound, self.upperbound,
+                                          nsteps=self.nsteps, npts=self.npts)
+         self.observed = observedy
+         self.xaxis = observedx
+
+     def computepermutations(self):
+         for p in xrange(self.permutations):
+             sim = self.ntw.simulate_observations(self.npts,
+                                                  distribution=self.distribution)
+             nearest = np.nanmin(self.ntw.allneighbordistances(sim, self.fsim), axis=1)
+             simx, simy = ffunction(nearest, self.lowerbound, self.upperbound,
+                                    self.npts, nsteps=self.nsteps)
+             self.sim[p] = simy
+
+def kfunction(nearest, upperbound, intensity, nsteps=10):
+    nobs = len(nearest)
+    x = np.linspace(0, upperbound, nsteps)
+    y = np.empty(len(x))
+
+    for i, s in enumerate(x):
+        y[i] = len(nearest[nearest <= s])
+    y *= (intensity ** -1)
+    return x, y
+
+def ffunction(nearest, lowerbound, upperbound, npts, nsteps = 10):
+    nobs = len(nearest)
+    x = np.linspace(lowerbound, upperbound, nsteps)
+    nearest = np.sort(nearest)
+    y = np.empty(len(x))
+    for i,r in enumerate(x):
+        cnt = len(nearest[nearest <= r])
+        if cnt > 0:
+            g = cnt / float(npts)
+        else:
+            g = 0
+        y[i] = g
+    return x, y
+
+def gfunction(nearest, lowerbound, upperbound, nsteps = 10):
+    """
+    Compute a G-Function
+
+    Parameters
+    ----------
+    nearest         ndarray A vector of nearest neighbor distances
+    nsteps          int The number of distance bands
+    permutations    int The number of permutations to perform
+    threshold       int Upper and lower significance threshold
+    envelope        bool Return results of all permutations
+    poisson         bool Use a poisson distribution to
+                         determine the number of points
+    """
+    nobs = len(nearest)
+    x = np.linspace(lowerbound, upperbound, nsteps)
+    nearest = np.sort(nearest)
+
+    y = np.empty(len(x))
+    for i,r in enumerate(x):
+        cnt = len(nearest[nearest <= r])
+        if cnt > 0:
+            g = cnt / float(nobs)
+        else:
+            g = 0
+        y[i] = g
+    return x, y
diff --git a/pysal/network/network.py b/pysal/network/network.py
new file mode 100644
index 0000000..61ea8b0
--- /dev/null
+++ b/pysal/network/network.py
@@ -0,0 +1,1162 @@
+from collections import defaultdict, OrderedDict
+import math
+import os
+import cPickle
+import copy
+
+import numpy as np
+import pysal as ps
+from pysal.weights.util import get_ids
+
+from analysis import NetworkG, NetworkK, NetworkF
+import util
+
+__all__ = ["Network", "PointPattern", "NetworkG", "NetworkK", "NetworkF"  ]
+
+
+class Network:
+
+    """
+    Spatially constrained network representation and analytical functionality.
+
+    Parameters
+    -----------
+    in_shp : string
+             A topoligically correct input shapefile
+
+    Attributes
+    ----------
+    in_shp : string
+             input shapefile name
+
+    adjacencylist : list
+                    of lists storing node adjacency
+
+    nodes : dict
+            key are tuple of node coords and value is the node ID
+
+    edge_lengths : dict
+                   key is a tuple of sorted node IDs representing an edge
+                   value is the length
+
+    pointpatterns : dict
+                    key is a string name of the pattern
+                    value is a point pattern class instance
+
+    node_coords : dict
+                  key is th node ID and value are the (x,y) coordinates
+                  inverse to nodes
+
+    edges : list
+            of edges, where each edge is a sorted tuple of node IDs
+
+    node_list : list
+                node IDs
+
+    alldistances : dict
+                   key is the node ID
+                   value is a list of all distances from the source to all destinations
+
+    Examples
+    --------
+
+    Instantiate an instance of a network
+
+    >>> ntw = network.Network(ps.examples.get_path('geodanet/streets.shp'))
+
+    Snap point observations to the network with attribute information
+
+    >>> ntw.snapobservations(ps.examples.get_path('geodanet/crimes.shp'), 'crimes', attribute=True)
+
+    And without attribute information
+
+    >>> ntw.snapobservations(ps.examples.get_path('geodanet/schools.shp'), 'schools', attribute=False)
+
+    """
+
+    def __init__(self, in_shp=None):
+        if in_shp:
+            self.in_shp = in_shp
+
+            self.adjacencylist = defaultdict(list)
+            self.nodes = {}
+            self.edge_lengths = {}
+            self.edges = []
+
+            self.pointpatterns = {}
+
+            self._extractnetwork()
+            self.node_coords = dict((value, key) for key, value in self.nodes.iteritems())
+
+            #This is a spatial representation of the network.
+            self.edges = sorted(self.edges)
+
+            #Extract the graph
+            self.extractgraph()
+
+            self.node_list = sorted(self.nodes.values())
+
+    def _extractnetwork(self):
+        """
+        Used internally, to extract a network from a polyline shapefile
+        """
+        nodecount = 0
+        shps = ps.open(self.in_shp)
+        for shp in shps:
+            vertices = shp.vertices
+            for i, v in enumerate(vertices[:-1]):
+                try:
+                    vid = self.nodes[v]
+                except:
+                    self.nodes[v] = vid = nodecount
+                    nodecount += 1
+                try:
+                    nvid = self.nodes[vertices[i+1]]
+                except:
+                    self.nodes[vertices[i+1]] = nvid = nodecount
+                    nodecount += 1
+
+                self.adjacencylist[vid].append(nvid)
+                self.adjacencylist[nvid].append(vid)
+
+                #Sort the edges so that mono-directional keys can be stored.
+                edgenodes = sorted([vid, nvid])
+                edge = tuple(edgenodes)
+                self.edges.append(edge)
+                length = util.compute_length(v, vertices[i+1])
+                self.edge_lengths[edge] = length
+
+    def extractgraph(self):
+        """
+        Using the existing network representation, create a graph based representation,
+        by removing all nodes with neighbor incidence of two.  That is, we assume these
+        nodes are bridges between nodes with higher incidence.
+        """
+        self.graphedges = []
+        self.edge_to_graph = {}
+        self.graph_lengths = {}
+
+        #Find all nodes with cardinality 2
+        segment_nodes = []
+        for k, v in self.adjacencylist.iteritems():
+            #len(v) == 1 #cul-de-sac
+            #len(v) == 2 #bridge segment
+            #len(v) > 2 #intersection
+            if len(v) == 2:
+                segment_nodes.append(k)
+
+        #Start with a copy of the spatial representation and iteratively
+        # remove edges deemed to be segments
+        self.graphedges = copy.deepcopy(self.edges)
+        self.graph_lengths = copy.deepcopy(self.edge_lengths)
+        self.graph_to_edges = {}  #Mapping all the edges contained within a single graph represented edge
+
+        bridges = []
+        for s in segment_nodes:
+            bridge = [s]
+            neighbors = self._yieldneighbor(s, segment_nodes, bridge)
+            while neighbors:
+                cnode = neighbors.pop()
+                segment_nodes.remove(cnode)
+                bridge.append(cnode)
+                newneighbors = self._yieldneighbor(cnode, segment_nodes, bridge)
+                neighbors += newneighbors
+            bridges.append(bridge)
+
+        for bridge in bridges:
+            if len(bridge) == 1:
+                n = self.adjacencylist[bridge[0]]
+                newedge = tuple(sorted([n[0], n[1]]))
+                #Identify the edges to be removed
+                e1 = tuple(sorted([bridge[0], n[0]]))
+                e2 = tuple(sorted([bridge[0], n[1]]))
+                #Remove from the graph
+                self.graphedges.remove(e1)
+                self.graphedges.remove(e2)
+                #Remove from the edge lengths
+                length_e1 = self.edge_lengths[e1]
+                length_e2 = self.edge_lengths[e2]
+                self.graph_lengths.pop(e1, None)
+                self.graph_lengths.pop(e2, None)
+                self.graph_lengths[newedge] = length_e1 + length_e2
+                #Update the pointers
+                self.graph_to_edges[e1] = newedge
+                self.graph_to_edges[e2] = newedge
+            else:
+                cumulative_length = 0
+                startend = {}
+                redundant = set([])
+                for b in bridge:
+                    for n in self.adjacencylist[b]:
+                        if n not in bridge:
+                            startend[b] = n
+                        else:
+                            redundant.add(tuple(sorted([b,n])))
+
+                newedge = tuple(sorted(startend.values()))
+                for k, v in startend.iteritems():
+                    redundant.add(tuple(sorted([k,v])))
+
+                for r in redundant:
+                    self.graphedges.remove(r)
+                    cumulative_length += self.edge_lengths[r]
+                    self.graph_lengths.pop(r, None)
+                    self.graph_to_edges[r] = newedge
+                self.graph_lengths[newedge] = cumulative_length
+
+            self.graphedges.append(newedge)
+        self.graphedges = sorted(self.graphedges)
+
+    def _yieldneighbor(self, node, segment_nodes, bridge):
+        """
+        Used internally, this method traverses a bridge segement
+        to find the source and destination nodes.
+        """
+        n = []
+        for i in self.adjacencylist[node]:
+            if i in segment_nodes and i not in bridge:
+                n.append(i)
+        return n
+
+    def contiguityweights(self, graph=True, weightings=None):
+        """
+        Create a contiguity based W object
+
+        Parameters
+        ----------
+        graph : boolean
+                {True, False } controls whether the W is generated using the spatial
+                representation or the graph representation
+
+        weightings : dict
+                     of lists of weightings for each edge
+
+        Returns
+        -------
+         : W
+           A PySAL W Object representing the binary adjacency of the network
+
+        Examples
+        --------
+        >>> w = ntw.contiguityweights(graph=False)
+
+        Using the W object, access to ESDA functionality is provided.  First,
+        a vector of attributes is created for all edges with observations.
+
+        >>> w = ntw.contiguityweights(graph=False)
+        >>> edges = w.neighbors.keys()
+        >>> y = np.zeros(len(edges))
+        >>> for i, e in enumerate(edges):
+        >>>     if e in counts.keys():
+        >>>         y[i] = counts[e]
+
+        Next, a standard call ot Moran is made and the result placed into `res`
+
+        >>> res = ps.esda.moran.Moran(y, ntw.w, permutations=99)
+
+        """
+
+        neighbors = {}
+        neighbors = OrderedDict()
+
+        if graph:
+            edges = self.graphedges
+        else:
+            edges = self.edges
+
+        if weightings:
+            weights = {}
+        else:
+            weights = None
+
+        for key in edges:
+            neighbors[key] = []
+            if weightings:
+                weights[key] = []
+
+            for neigh in edges:
+                if key == neigh:
+                    continue
+                if key[0] == neigh[0] or key[0] == neigh[1] or key[1] == neigh[0] or key[1] == neigh[1]:
+                    neighbors[key].append(neigh)
+                    if weightings:
+                        weights[key].append(weightings[neigh])
+                #TODO: Add a break condition - everything is sorted, so we know when we have stepped beyond a possible neighbor.
+                #if key[1] > neigh[1]:  #NOT THIS
+                    #break
+
+        return ps.weights.W(neighbors, weights=weights)
+
+    def distancebandweights(self, threshold):
+        """
+        Create distance based weights
+        """
+        try:
+            hasattr(self.alldistances)
+        except:
+            self.node_distance_matrix()
+
+        neighbor_query = np.where(self.distancematrix < threshold)
+        neighbors = defaultdict(list)
+        for i, n in enumerate(neighbor_query[0]):
+            neigh = neighbor_query[1][i]
+            if n != neigh:
+                neighbors[n].append(neighbor_query[1][i])
+
+        self.w = ps.weights.W(neighbors)
+
+    def snapobservations(self, shapefile, name, idvariable=None, attribute=None):
+        """
+        Snap a point pattern shapefile to this network object.  The point pattern
+        is the stored in the network.pointpattern['key'] attribute of the network
+        object.
+
+        Parameters
+        ----------
+        shapefile : str
+                    The PATH to the shapefile
+
+        name : str
+               Name to be assigned to the point dataset
+
+        idvariable : str
+                     Column name to be used as ID variable
+
+        attribute : bool
+                    Defines whether attributes should be extracted
+
+        Returns
+        -------
+
+        """
+
+        self.pointpatterns[name] = PointPattern(shapefile, idvariable=idvariable, attribute=attribute)
+        self._snap_to_edge(self.pointpatterns[name])
+
+    def compute_distance_to_nodes(self, x, y, edge):
+        """
+        Given an observation on a network edge, return the distance to the two
+        nodes that bound that end.
+
+        Parameters
+        ----------
+        x : float
+            x-coordinate of the snapped point
+
+        y : float
+            y-coordiante of the snapped point
+
+        edge : tuple
+              (node0, node1) representation of the network edge
+
+        Returns
+        -------
+        d1 : float
+             the distance to node0, always the node with the lesser id
+
+        d2 : float
+             the distance to node1, always the node with the greater id
+        """
+
+        d1 = util.compute_length((x,y), self.node_coords[edge[0]])
+        d2 = util.compute_length((x,y), self.node_coords[edge[1]])
+        return d1, d2
+
+    def _snap_to_edge(self, pointpattern):
+        """
+        Used internally to snap point observations to network edges.
+
+        Parameters
+        -----------
+        pointpattern : obj
+                       PySAL Point Pattern Object
+
+        Returns
+        -------
+        obs_to_edge : dict
+                      with edge as key and list of points as value
+
+        edge_to_obs : dict
+                      with point id as key and edge tuple as value
+
+        dist_to_node : dict
+                       with edge as key and tuple of distances to nodes as value
+        """
+
+        obs_to_edge = {}
+        dist_to_node = {}
+
+        pointpattern.snapped_coordinates = {}
+
+        for pt_index, point in pointpattern.points.iteritems():
+            x0 = point['coordinates'][0]
+            y0 = point['coordinates'][1]
+
+            d = {}
+            vectors = {}
+            c = 0
+
+            #Components of this for loop can be pre computed and cached, like denom to distance =
+            for edge in self.edges:
+                xi = self.node_coords[edge[0]][0]
+                yi = self.node_coords[edge[0]][1]
+                xi1 = self.node_coords[edge[1]][0]
+                yi1 = self.node_coords[edge[1]][1]
+
+                num = ((yi1 - yi)*(x0-xi)-(xi1-xi)*(y0-yi))
+                denom = ((yi1-yi)**2 + (xi1-xi)**2)
+                k = num / float(denom)
+                distance = abs(num) / math.sqrt(((yi1-yi)**2 + (xi1-xi)**2))
+                vectors[c] = (xi, xi1, yi, yi1,k,edge)
+                d[distance] = c
+                c += 1
+
+            min_dist = SortedEdges(sorted(d.items()))
+
+            for dist, vector_id in min_dist.iteritems():
+                value = vectors[vector_id]
+                xi = value[0]
+                xi1 = value[1]
+                yi = value[2]
+                yi1 = value[3]
+                k = value[4]
+                edge = value[5]
+
+                #Okabe Method
+                x = x0 - k * (yi1 - yi)
+                y = y0 + k * (xi1 - xi)
+
+                #Compute the distance from the new point to the nodes
+                d1, d2 = self.compute_distance_to_nodes(x, y, edge)
+
+                if xi <= x <= xi1 or xi1 <= x <= xi and yi <= y <= yi1 or yi1 <=y <= yi:
+                    #print "{} intersections edge {} at {}".format(pt_index, edge, (x,y))
+                    #We are assuming undirected - this should never be true.
+                    if edge not in obs_to_edge.keys():
+                        obs_to_edge[edge] = {pt_index: (x,y)}
+                    else:
+                        obs_to_edge[edge][pt_index] =  (x,y)
+                    dist_to_node[pt_index] = {edge[0]:d1, edge[1]:d2}
+                    pointpattern.snapped_coordinates[pt_index] = (x,y)
+
+                    break
+                else:
+                    #either pi or pi+1 are the nearest point on that edge.
+                    #If this point is closer than the next distance, we can break, the
+                    # observation intersects the node with the shorter
+                    # distance.
+                    pi = (xi, yi)
+                    pi1 = (xi1, yi1)
+                    p0 = (x0,y0)
+                    #Maybe this call to ps.cg should go as well - as per the call in the class above
+                    dist_pi = ps.cg.standalone.get_points_dist(p0, pi)
+                    dist_pi1 = ps.cg.standalone.get_points_dist(p0, pi1)
+
+                    if dist_pi < dist_pi1:
+                        node_dist = dist_pi
+                        (x,y) = pi
+                    else:
+                        node_dist = dist_pi1
+                        (x,y) = pi1
+
+                    d1, d2 = self.compute_distance_to_nodes(x, y, edge)
+
+                    if node_dist < min_dist.next_key(dist):
+                        if edge not in obs_to_edge.keys():
+                            obs_to_edge[edge] = {pt_index: (x, y)}
+                        else:
+                            obs_to_edge[edge][pt_index] =  (x, y)
+                        dist_to_node[pt_index] = {edge[0]:d1, edge[1]:d2}
+                        pointpattern.snapped_coordinates[pt_index] = (x,y)
+                        break
+
+        obs_to_node = defaultdict(list)
+        for k, v in obs_to_edge.iteritems():
+            keys = v.keys()
+            obs_to_node[k[0]] = keys
+            obs_to_node[k[1]] = keys
+
+        pointpattern.obs_to_edge = obs_to_edge
+        pointpattern.dist_to_node = dist_to_node
+        pointpattern.obs_to_node = obs_to_node
+
+    def count_per_edge(self, obs_on_network, graph=True):
+        """
+        Compute the counts per edge.
+
+        Parameters
+        ----------
+        obs_on_network : dict
+                         of observations on the network
+                         {(edge): {pt_id: (coords)}} or {edge: [(coord), (coord), (coord)]}
+        Returns
+        -------
+        counts: dict {(edge):count}
+
+        Example
+        -------
+
+        Note that this passes the obs_to_edge attribute of a point pattern
+        snapped to the network.
+
+        >>> counts = ntw.count_per_edge(ntw.pointpatterns['crimes'].obs_to_edge,
+                            graph=False)
+
+        """
+        counts = {}
+        if graph:
+            for key, observations in obs_on_network.iteritems():
+                cnt = len(observations)
+                if key in self.graph_to_edges.keys():
+                    key = self.graph_to_edges[key]
+                try:
+                    counts[key] += cnt
+                except:
+                    counts[key] = cnt
+        else:
+            for key in obs_on_network.iterkeys():
+                counts[key] = len(obs_on_network[key])
+        return counts
+
+    def _newpoint_coords(self, edge, distance):
+        """
+        Used internally to compute new point coordinates during snapping
+        """
+        x1 = self.node_coords[edge[0]][0]
+        y1 = self.node_coords[edge[0]][1]
+        x2 = self.node_coords[edge[1]][0]
+        y2 = self.node_coords[edge[1]][1]
+        m = (y2 - y1) / (x2 - x1)
+        if x1 > x2:
+            x0 = x1 - distance / math.sqrt(1 + m**2)
+        elif x1 < x2:
+            x0 = x1 + distance / math.sqrt(1 + m**2)
+        y0 = m * (x0 - x1) + y1
+        return x0, y0
+
+    def simulate_observations(self, count, distribution='uniform'):
+        """
+        Generate a simulated point pattern on the network.
+
+        Parameters
+        ----------
+        count : integer
+                number of points to create or mean of the distribution
+                if not 'uniform'
+
+        distribution : string
+                       {'uniform', 'poisson'} distribution of random points
+
+        Returns
+        -------
+        random_pts : dict
+                     key is the edge tuple
+                     value is a list of new point coordinates
+
+        Example
+        -------
+
+        >>> npts = ntw.pointpatterns['crimes'].npoints
+        >>> sim = ntw.simulate_observations(npts)
+        >>> sim
+        <network.SimulatedPointPattern instance at 0x1133d8710>
+
+        """
+        simpts = SimulatedPointPattern()
+
+        #Cumulative Network Length
+        edges = []
+        lengths = np.zeros(len(self.edge_lengths))
+        for i, key in enumerate(self.edge_lengths.iterkeys()):
+            edges.append(key)
+            lengths[i] = self.edge_lengths[key]
+        stops = np.cumsum(lengths)
+        totallength = stops[-1]
+
+        if distribution is 'uniform':
+            nrandompts = np.random.uniform(0, totallength, size=(count,))
+        elif distribution is 'poisson':
+            nrandompts = np.random.uniform(0, totallength, size=(np.random.poisson(count),))
+
+        for i, r in enumerate(nrandompts):
+            idx = np.where(r < stops)[0][0]
+            assignment_edge = edges[idx]
+            distance_from_start = stops[idx] - r
+            #Populate the coordinates dict
+            x0, y0 = self._newpoint_coords(assignment_edge, distance_from_start)
+            simpts.snapped_coordinates[i] = (x0, y0)
+            simpts.obs_to_node[assignment_edge[0]].append(i)
+            simpts.obs_to_node[assignment_edge[1]].append(i)
+
+            #Populate the distance to node
+            simpts.dist_to_node[i] = {assignment_edge[0] : distance_from_start,
+                    assignment_edge[1] : self.edge_lengths[edges[idx]] - distance_from_start}
+
+            simpts.points = simpts.snapped_coordinates
+
+        return simpts
+
+    def enum_links_node(self, v0):
+        """
+        Returns the edges (links) around node
+
+        Parameters
+        -----------
+        v0 : int
+             node id
+
+        Returns
+        -------
+        links : list
+                list of tuple edge adjacent to the node
+        """
+        links = []
+        neighbornodes =  self.adjacencylist[v0]
+        for n in neighbornodes:
+            links.append(tuple(sorted([n, v0])))
+        return links
+
+    def node_distance_matrix(self):
+        self.alldistances = {}
+        nnodes = len(self.node_list)
+        self.distancematrix = np.empty((nnodes, nnodes))
+        for node in self.node_list:
+            distance, pred = util.dijkstra(self, self.edge_lengths, node, n=float('inf'))
+            pred = np.array(pred)
+            tree = util.generatetree(pred)
+            self.alldistances[node] = (distance, tree)
+            self.distancematrix[node] = distance
+
+    def allneighbordistances(self, sourcepattern, destpattern=None):
+        """
+        Compute either all distances between i and j in a single point pattern
+        or all distances between each i from a source pattern and all j
+        from a destination pattern
+
+        Parameters
+        ----------
+        sourcepattern : str
+                        The key of a point pattern snapped to the network.
+
+        destpattern :str
+                    (Optional) The key of a point pattern snapped to the network.
+
+        Returns
+        -------
+        nearest : array (n,n)
+                  An array or shape n,n storing distances between all points
+
+        """
+
+        if not hasattr(self,'alldistances'):
+            self.node_distance_matrix()
+
+
+
+        src_indices = sourcepattern.points.keys()
+        nsource_pts = len(src_indices)
+        dist_to_node = sourcepattern.dist_to_node
+        if destpattern == None:
+            destpattern = sourcepattern
+        dest_indices = destpattern.points.keys()
+        ndest_pts = len(dest_indices)
+
+        searchpts = copy.deepcopy(dest_indices)
+        nearest  = np.empty((nsource_pts, ndest_pts))
+        nearest[:] = np.inf
+
+        searchnodes = {}
+        for s in searchpts:
+            e1, e2 = dist_to_node[s].keys()
+            searchnodes[s] = (e1, e2)
+
+        for p1 in src_indices:
+            #Get the source nodes and dist to source nodes
+            source1, source2 = searchnodes[p1]
+            set1 = set(searchnodes[p1])
+            # distance from node1 to p, distance from node2 to p
+            sdist1, sdist2 = dist_to_node[p1].values()
+
+            searchpts.remove(p1)
+            for p2 in searchpts:
+                dest1, dest2 = searchnodes[p2]
+                set2 = set(searchnodes[p2])
+                if set1 == set2: #same edge
+                    x1,y1 = sourcepattern.snapped_coordinates[p1]
+                    x2,y2 = destpattern.snapped_coordinates[p2]
+                    xd = x1-x2
+                    yd = y1-y2
+                    nearest[p1,p2] = np.sqrt(xd*xd + yd*yd)
+                    nearest[p2,p1] = nearest[p1,p2]
+
+                else:
+                    ddist1, ddist2 = dist_to_node[p2].values()
+                    d11 = self.alldistances[source1][0][dest1]
+                    d21 = self.alldistances[source2][0][dest1]
+                    d12 = self.alldistances[source1][0][dest2]
+                    d22 = self.alldistances[source2][0][dest2]
+
+                    # find shortest distance from path passing through each of two origin nodes
+                    # to first destination node
+                    sd_1 = d11 + sdist1
+                    sd_21 = d21 + sdist2
+                    if sd_1 > sd_21:
+                        sd_1 = sd_21
+                    # now add point to node one distance on destination edge
+                    len_1 = sd_1 + ddist1
+
+
+                    # repeat but now for paths entering at second node of second edge
+                    sd_2 = d12 + sdist1
+                    sd_22 = d22 + sdist2
+                    b = 0
+                    if sd_2 > sd_22:
+                        sd_2 = sd_22
+                        b = 1
+                    len_2 = sd_2 + ddist2
+
+                    # now find shortest length path between the point 1 on edge 1 and
+                    # point 2 on edge 2, and assign
+                    sp_12 = len_1
+                    if len_1 > len_2:
+                        sp_12 = len_2
+                    nearest[p1, p2] =  sp_12
+                    nearest[p2, p1] = sp_12
+                    #print p1,p2, sp_12
+        np.fill_diagonal(nearest, np.nan)
+        return nearest
+
+    def nearestneighbordistances(self, sourcepattern, destpattern=None):
+        """
+        Compute the interpattern nearest neighbor distances or the intrapattern
+        nearest neight distances between a source pattern and a destination pattern.
+
+        Parameters
+        ----------
+        sourcepattern   str The key of a point pattern snapped to the network.
+
+        destpattern      str (Optional) The key of a point pattern snapped to the network.
+
+        Returns
+        -------
+        nearest         ndarray (n,2) With column[:,0] containing the id of the nearest
+                        neighbor and column [:,1] containing the distance.
+        """
+
+        if not sourcepattern in self.pointpatterns.keys():
+            print "Key Error: Available point patterns are {}".format(self.pointpatterns.key())
+            return
+
+        if not hasattr(self,'alldistances'):
+            self.node_distance_matrix()
+
+        print sourcepattern
+        pt_indices = self.pointpatterns[sourcepattern].points.keys()
+        dist_to_node = self.pointpatterns[sourcepattern].dist_to_node
+        nearest = np.zeros((len(pt_indices), 2), dtype=np.float32)
+        nearest[:,1] = np.inf
+
+        if destpattern == None:
+            destpattern = sourcepattern
+
+        searchpts = copy.deepcopy(pt_indices)
+
+
+        searchnodes = {}
+        for s in searchpts:
+            e1, e2 = dist_to_node[s].keys()
+            searchnodes[s] = (e1, e2)
+
+        for p1 in pt_indices:
+            #Get the source nodes and dist to source nodes
+            source1, source2 = searchnodes[p1]
+            sdist1, sdist2 = dist_to_node[p1].values()
+
+            searchpts.remove(p1)
+            for p2 in searchpts:
+                dest1, dest2 = searchnodes[p2]
+                ddist1, ddist2 = dist_to_node[p2].values()
+                source1_to_dest1 = sdist1 + self.alldistances[source1][0][dest1] + ddist1
+                source1_to_dest2 = sdist1 + self.alldistances[source1][0][dest2] + ddist2
+                source2_to_dest1 = sdist2 + self.alldistances[source2][0][dest1] + ddist1
+                source2_to_dest2 = sdist2 + self.alldistances[source2][0][dest2] + ddist2
+
+
+                if source1_to_dest1 < nearest[p1, 1]:
+                    nearest[p1, 0] = p2
+                    nearest[p1, 1] = source1_to_dest1
+                if source1_to_dest1 < nearest[p2, 1]:
+                    nearest[p2, 0] = p1
+                    nearest[p2, 1] = source1_to_dest1
+
+                if source1_to_dest2 < nearest[p1, 1]:
+                    nearest[p1, 0] = p2
+                    nearest[p1, 1] = source1_to_dest2
+                if source1_to_dest1 < nearest[p2, 1]:
+                    nearest[p2, 0] = p1
+                    nearest[p2, 1] = source1_to_dest2
+
+                if source2_to_dest1 < nearest[p1, 1]:
+                    nearest[p1, 0] = p2
+                    nearest[p1, 1] = source2_to_dest1
+                if source2_to_dest1 < nearest[p2, 1]:
+                    nearest[p2, 0] = p1
+                    nearest[p2, 1] = source2_to_dest1
+
+                if source2_to_dest2 < nearest[p1, 1]:
+                    nearest[p1, 0] = p2
+                    nearest[p1, 1] = source2_to_dest2
+                if source2_to_dest2 < nearest[p2, 1]:
+                    nearest[p2, 0] = p1
+                    nearest[p2, 1] = source2_to_dest2
+
+        return nearest
+
+    def NetworkF(self, pointpattern, nsteps=10, permutations=99,
+                 threshold=0.2, distribution='uniform',
+                 lowerbound=None, upperbound=None):
+        """
+        Computes a network constrained F-Function
+
+        Parameters
+        ----------
+        pointpattern : object
+                       A PySAL point pattern object
+
+        nsteps : int
+                 The number of steps at which the count of the nearest
+                 neighbors is computed
+
+        permutations : int
+                       The number of permutations to perform (default 99)
+
+        threshold : float
+                    The level at which significance is computed.  0.5 would be 97.5% and 2.5%
+
+        distribution : str
+                       The distirbution from which random points are sampled: uniform or poisson
+
+        lowerbound : float
+                     The lower bound at which the G-function is computed. (default 0)
+
+        upperbound : float
+                     The upper bound at which the G-function is computed.
+                     Defaults to the maximum pbserved nearest neighbor distance.
+
+        Returns
+        -------
+        NetworkF : object
+                   A network F class instance
+
+        """
+        return NetworkF(self, pointpattern, nsteps=nsteps,
+                        permutations=permutations,threshold=threshold,
+                        distribution=distribution,lowerbound=lowerbound,
+                        upperbound=upperbound)
+
+    def NetworkG(self, pointpattern, nsteps=10, permutations=99,
+                 threshold=0.5, distribution='uniform',
+                 lowerbound=None, upperbound=None):
+        """
+        Computes a network constrained G-Function
+
+        Parameters
+        ----------
+        pointpattern : object
+                       A PySAL point pattern object
+
+        nsteps : int
+                 The number of steps at which the count of the nearest
+                 neighbors is computed
+
+        permutations : int
+                       The number of permutations to perform (default 99)
+
+        threshold : float
+                    The level at which significance is computed.  0.5 would be 97.5% and 2.5%
+
+        distribution : str
+                       The distirbution from which random points are sampled: uniform or poisson
+
+        lowerbound : float
+                     The lower bound at which the G-function is computed. (default 0)
+
+        upperbound : float
+                     The upper bound at which the G-function is computed.
+                     Defaults to the maximum pbserved nearest neighbor distance.
+
+        Returns
+        -------
+        NetworkG : object
+                   A network G class object
+
+        """
+
+        return NetworkG(self, pointpattern, nsteps=nsteps,
+                        permutations=permutations,threshold=threshold,
+                        distribution=distribution,lowerbound=lowerbound,
+                        upperbound=upperbound)
+
+    def NetworkK(self, pointpattern, nsteps=10, permutations=99,
+                 threshold=0.5, distribution='uniform',
+                 lowerbound=None, upperbound=None):
+        """
+        Computes a network constrained G-Function
+
+        Parameters
+        ----------
+        pointpattern : object
+                       A PySAL point pattern object
+
+        nsteps : int
+                 The number of steps at which the count of the nearest
+                 neighbors is computed
+
+        permutations : int
+                       The number of permutations to perform (default 99)
+
+        threshold : float
+                    The level at which significance is computed.  0.5 would be 97.5% and 2.5%
+
+        distribution : str
+                       The distirbution from which random points are sampled: uniform or poisson
+
+        lowerbound : float
+                     The lower bound at which the G-function is computed. (default 0)
+
+        upperbound : float
+                     The upper bound at which the G-function is computed.
+                     Defaults to the maximum pbserved nearest neighbor distance.
+
+        Returns
+        -------
+        NetworkK : object
+                   A network K class object
+
+        """
+        return NetworkK(self, pointpattern, nsteps=nsteps,
+                        permutations=permutations,threshold=threshold,
+                        distribution=distribution,lowerbound=lowerbound,
+                        upperbound=upperbound)
+
+    def segment_edges(self, distance):
+        """
+        Segment all of the edges in the network at either
+        a fixed distance or a fixed number of segments.
+
+        Parameters
+        -----------
+        distance : float
+                   The distance at which edges are split
+
+        Returns
+        -------
+        sn : object
+             PySAL Network Object
+
+        Example
+        -------
+
+        >>> n200 = ntw.segment_edges(200.0)
+
+        """
+
+        sn = Network()
+        sn.adjacencylist = copy.deepcopy(self.adjacencylist)
+        sn.edge_lengths = copy.deepcopy(self.edge_lengths)
+        sn.edges = set(copy.deepcopy(self.edges))
+        sn.node_coords = copy.deepcopy(self.node_coords)
+        sn.node_list = copy.deepcopy(self.node_list)
+        sn.nodes = copy.deepcopy(self.nodes)
+        sn.pointpatterns = copy.deepcopy(self.pointpatterns)
+        sn.in_shp = self.in_shp
+
+        current_node_id = max(self.nodes.values())
+
+        newedges = set()
+        removeedges = set()
+        for e in sn.edges:
+            length = sn.edge_lengths[e]
+            interval = distance
+
+            totallength = 0
+            currentstart = startnode = e[0]
+            endnode = e[1]
+
+            #If the edge will be segmented, remove the
+            # current edge from the adjacency list
+            if interval < length:
+                sn.adjacencylist[e[0]].remove(e[1])
+                sn.adjacencylist[e[1]].remove(e[0])
+                sn.edge_lengths.pop(e, None)
+                removeedges.add(e)
+            else:
+                continue
+
+            while totallength < length:
+                currentstop = current_node_id
+                if totallength + interval > length:
+                    currentstop = endnode
+                    interval = length - totallength
+                    totallength = length
+                else:
+                    current_node_id += 1
+                    currentstop = current_node_id
+                    totallength += interval
+
+                    #Compute the new node coordinate
+                    newx, newy = self._newpoint_coords(e, totallength)
+
+                    #Update node_list
+                    if currentstop not in sn.node_list:
+                        sn.node_list.append(currentstop)
+
+                    #Update nodes and node_coords
+                    sn.node_coords[currentstop] = newx, newy
+                    sn.nodes[(newx, newy)] = currentstop
+
+                #Update the adjacencylist
+                sn.adjacencylist[currentstart].append(currentstop)
+                sn.adjacencylist[currentstop].append(currentstart)
+
+
+                #Add the new edge to the edge dict
+                #Iterating over this, so we need to add after iterating
+                newedges.add(tuple(sorted([currentstart, currentstop])))
+
+                #Modify edge_lengths
+                sn.edge_lengths[tuple(sorted([currentstart, currentstop]))] = interval
+
+                #Increment the start to the stop
+                currentstart = currentstop
+
+        sn.edges.update(newedges)
+        sn.edges.difference_update(removeedges)
+        sn.edges = list(sn.edges)
+        #Update the point pattern snapping
+        for instance in sn.pointpatterns.itervalues():
+            sn._snap_to_edge(instance)
+
+        return sn
+
+    def savenetwork(self, filename):
+        """
+        Save a network to disk as a binary file
+
+        Parameters
+        ----------
+        filename : str
+                   The filename where the network should be saved.
+                   This should be a full PATH or the file is saved
+                   whereever this method is called from.
+
+        Example
+        --------
+
+        >>> ntw.savenetwork('mynetwork.pkl')
+
+        """
+        with open(filename, 'wb') as networkout:
+            cPickle.dump(self, networkout, protocol=2)
+
+    @staticmethod
+    def loadnetwork(filename):
+        with open(filename, 'rb') as networkin:
+            self = cPickle.load(networkin)
+
+        return self
+
+
+class PointPattern():
+    """
+    A stub point pattern class used to store a point pattern.
+    This class is monkey patched with network specific attributes
+    when the points are snapped to a network.
+
+    In the future this class may be replaced with a generic point
+    pattern class.
+
+    Parameters
+    ----------
+    shapefile : string
+                input shapefile
+
+    idvariable : string
+                 field in the shapefile to use as an idvariable
+
+    attribute : boolean
+                {False, True} A flag to indicate whether all attributes
+                are tagged to this class.
+
+    Attributes
+    ----------
+    points : dict
+             key is the point id
+             value are the coordiantes
+
+    npoints : integer
+              the number of points
+
+
+    """
+    def __init__(self, shapefile, idvariable=None, attribute=False):
+        self.points = {}
+        self.npoints = 0
+
+        if idvariable:
+            ids = get_ids(shapefile, idvariable)
+        else:
+            ids = None
+
+        pts = ps.open(shapefile)
+
+        #Get attributes if requested
+        if attribute == True:
+            dbname = os.path.splitext(shapefile)[0] + '.dbf'
+            db = ps.open(dbname)
+        else:
+            db = None
+
+        for i, pt in enumerate(pts):
+            if ids and db:
+                self.points[ids[i]] = {'coordinates':pt, 'properties':db[i]}
+            elif ids and not db:
+                self.points[ids[i]] = {'coordinates':pt, 'properties':None}
+            elif not ids and db:
+                self.points[i] = {'coordinates':pt, 'properties':db[i]}
+            else:
+                self.points[i] = {'coordinates':pt, 'properties':None}
+
+        pts.close()
+        if db:
+            db.close()
+        self.npoints = len(self.points.keys())
+
+
+class SimulatedPointPattern():
+    """
+    Struct style class to mirror the Point Pattern Class.
+
+    If the PointPattern class has methods, it might make sense to
+    make this a child of that class.
+
+    This class is not intended to be used by the external user.
+
+    """
+    def __init__(self):
+        self.npoints = 0
+        self.obs_to_edge = {}
+        self.obs_to_node = defaultdict(list)
+        self.dist_to_node = {}
+        self.snapped_coordinates = {}
+
+
+class SortedEdges(OrderedDict):
+    def next_key(self, key):
+        next = self._OrderedDict__map[key][1]
+        if next is self._OrderedDict__root:
+            raise ValueError("{!r} is the last key.".format(key))
+        return next[2]
+    def first_key(self):
+        for key in self: return key
+        raise ValueError("No sorted edges remain.")
diff --git a/pysal/network/util.py b/pysal/network/util.py
new file mode 100644
index 0000000..52f77a7
--- /dev/null
+++ b/pysal/network/util.py
@@ -0,0 +1,155 @@
+from collections import OrderedDict
+import math
+import operator
+
+import numpy as np
+
+
+def nearestneighborsearch(obs_to_node, alldistances, endnode, dist):
+    """
+    Given a node on a network which is tagged to an observation, find the
+    nearest node which also has one or more observations.
+    """
+    searching = True
+    #sorted dict of nodes by distance
+    for k, v in alldistances[endnode][0].iteritems():
+        #List of the neighbors tagged to the node
+        possibleneighbors = obs_to_node[k]
+        if possibleneighbors:
+            for n in possibleneighbors:
+                if n == v:
+                    continue
+                else:
+                    nearest_obs = n
+                    nearest_node = k
+                    nearest_node_distance = v + dist
+                    searching = False
+        if searching == False:
+            break
+
+    return nearest_obs, nearest_node, nearest_node_distance
+
+def compute_length(v0, v1):
+    """
+    Compute the euclidean distance between two points.
+
+    Parameters
+    ----------
+    v0      sequence in the form x, y
+    vq      sequence in the form x, y
+
+    Returns
+    --------
+    Euclidean distance
+    """
+
+    return math.sqrt((v0[0] - v1[0])**2 + (v0[1] - v1[1])**2)
+
+
+def get_neighbor_distances(ntw, v0, l):
+    edges = ntw.enum_links_node(v0)
+    neighbors = {}
+    for e in edges:
+        if e[0] != v0:
+            neighbors[e[0]] = l[e]
+        else:
+            neighbors[e[1]] = l[e]
+    return neighbors
+
+
+def generatetree(pred):
+    tree = {}
+    for p in pred:
+        idx = p
+        path = [idx]
+        while idx > 0:
+            nextnode = pred[idx]
+            idx = nextnode
+            if idx > 0:
+                path.append(nextnode)
+        if p > 0:
+            tree[p] = path
+    return tree
+
+
+def cumulativedistances(distance, tree):
+    distances = {}
+    for k, v in tree.iteritems():
+        subset_distance = distance[v]
+        distances[k] = np.sum(subset_distance)
+    return OrderedDict(sorted(distances.iteritems(), key=operator.itemgetter(1)))
+
+
+def dijkstra(ntw, cost, node, n=float('inf')):
+    """
+    Compute the shortest path between a start node and
+        all other nodes in the wed.
+    Parameters
+    ----------
+    ntw: PySAL network object
+    cost: Cost per edge to travel, e.g. distance
+    node: Start node ID
+    n: integer break point to stop iteration and return n
+     neighbors
+    Returns:
+    distance: List of distances from node to all other nodes
+    pred : List of preceeding nodes for traversal route
+    """
+
+    v0 = node
+    distance = [float('inf') for x in ntw.node_list]
+    idx = ntw.node_list.index(v0)
+    distance[ntw.node_list.index(v0)] = 0
+    pred = [-1 for x in ntw.node_list]
+    a = set()
+    a.add(v0)
+    while len(a) > 0:
+        #Get node with the lowest value from distance
+        dist = float('inf')
+        for node in a:
+            if distance[node] < dist:
+                dist = distance[node]
+                v = node
+        #Remove that node from the set
+        a.remove(v)
+        last = v
+        #4. Get the neighbors to the current node
+        neighbors = get_neighbor_distances(ntw, v, cost)
+        for v1, indiv_cost in neighbors.iteritems():
+            if distance[v1] > distance[v] + indiv_cost:
+                distance[v1] = distance[v] + indiv_cost
+                pred[v1] = v
+                a.add(v1)
+    return distance, np.array(pred, dtype=np.int)
+
+
+def shortest_path(ntw, cost, start, end):
+    distance, pred = dijkstra(ntw, cost, start)
+    path = [end]
+    previous = pred[end]
+    while previous != start:
+        path.append(previous)
+        end = previous
+        previous = pred[end]
+    path.append(start)
+    return tuple(path)
+
+def nearest_neighbor_search(pt_indices, dist_to_node, obs_to_node, alldistances, snappedcoords):
+    nearest = np.empty((2,len(pt_indices)))
+
+    for i, p1 in enumerate(pt_indices):
+        dist1, dist2 = dist_to_node[p1].values()
+        endnode1, endnode2 = dist_to_node[p1].keys()
+
+        snapped_coords = snappedcoords[p1]
+        nearest_obs1, nearest_node1, nearest_node_distance1 = nearestneighborsearch(obs_to_node, alldistances, endnode1, dist1)
+        nearest_obs2, nearest_node2, nearest_node_distance2 = nearestneighborsearch(obs_to_node, alldistances, endnode2, dist2)
+
+        if nearest_node_distance2 <= nearest_node_distance1:
+            nearest[i,0] = nearest_obs2
+            nearest[i,1] = nearest_node_distance2
+        else:
+            nearest[i,0] = nearest_obs1
+            nearest[i,1] = nearest_node_distance1
+
+    return nearest
diff --git a/pysal/region/__init__.py b/pysal/region/__init__.py
new file mode 100644
index 0000000..1300f39
--- /dev/null
+++ b/pysal/region/__init__.py
@@ -0,0 +1,9 @@
+"""
+:mod:`region` -- Regionalization
+================================
+
+"""
+
+from maxp import *
+from randomregion import *
+from components import *
diff --git a/pysal/region/components.py b/pysal/region/components.py
new file mode 100644
index 0000000..7fb6398
--- /dev/null
+++ b/pysal/region/components.py
@@ -0,0 +1,160 @@
+"""
+Checking for connected components in a graph.
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu>"
+
+
+__all__ = ["check_contiguity"]
+
+from operator import lt
+
+
+def is_component(w, ids):
+    """Check if the set of ids form a single connected component
+
+    Parameters
+    ----------
+
+    w   : spatial weights boject
+
+    ids : list
+          identifiers of units that are tested to be a single connected
+          component
+
+
+    Returns
+    -------
+
+    True    : if the list of ids represents a single connected component
+
+    False   : if the list of ids forms more than a single connected component
+
+    """
+
+    components = 0
+    marks = dict([(node, 0) for node in ids])
+    q = []
+    for node in ids:
+        if marks[node] == 0:
+            components += 1
+            q.append(node)
+            if components > 1:
+                return False
+        while q:
+            node = q.pop()
+            marks[node] = components
+            others = [neighbor for neighbor in w.neighbors[node]
+                      if neighbor in ids]
+            for other in others:
+                if marks[other] == 0 and other not in q:
+                    q.append(other)
+    return True
+
+
+def check_contiguity(w, neighbors, leaver):
+    """Check if contiguity is maintained if leaver is removed from neighbors
+
+
+    Parameters
+    ----------
+
+    w           : spatial weights object
+                  simple contiguity based weights
+    neighbors   : list
+                  nodes that are to be checked if they form a single \
+                          connected component
+    leaver      : id
+                  a member of neighbors to check for removal
+
+
+    Returns
+    -------
+
+    True        : if removing leaver from neighbors does not break contiguity
+                  of remaining set
+                  in neighbors
+    False       : if removing leaver from neighbors breaks contiguity
+
+    Example
+    -------
+
+    Setup imports and a 25x25 spatial weights matrix on a 5x5 square region.
+
+    >>> import pysal
+    >>> w = pysal.lat2W(5, 5)
+
+    Test removing various areas from a subset of the region's areas.  In the
+    first case the subset is defined as observations 0, 1, 2, 3 and 4. The
+    test shows that observations 0, 1, 2 and 3 remain connected even if
+    observation 4 is removed.
+
+    >>> pysal.region.check_contiguity(w,[0,1,2,3,4],4)
+    True
+    >>> pysal.region.check_contiguity(w,[0,1,2,3,4],3)
+    False
+    >>> pysal.region.check_contiguity(w,[0,1,2,3,4],0)
+    True
+    >>> pysal.region.check_contiguity(w,[0,1,2,3,4],1)
+    False
+    >>>
+    """
+
+    ids = neighbors[:]
+    ids.remove(leaver)
+    return is_component(w, ids)
+
+
+class Graph(object):
+    def __init__(self, undirected=True):
+        self.nodes = set()
+        self.edges = {}
+        self.cluster_lookup = {}
+        self.no_link = {}
+        self.undirected = undirected
+
+    def add_edge(self, n1, n2, w):
+        self.nodes.add(n1)
+        self.nodes.add(n2)
+        self.edges.setdefault(n1, {}).update({n2: w})
+        if self.undirected:
+            self.edges.setdefault(n2, {}).update({n1: w})
+
+    def connected_components(self, threshold=0.9, op=lt):
+        if not self.undirected:
+            warn = "Warning, connected _components not "
+            warn += "defined for a directed graph"
+            print warn
+            return None
+        else:
+            nodes = set(self.nodes)
+            components, visited = [], set()
+            while len(nodes) > 0:
+                connected, visited = self.dfs(
+                    nodes.pop(), visited, threshold, op)
+                connected = set(connected)
+                for node in connected:
+                    if node in nodes:
+                        nodes.remove(node)
+                subgraph = Graph()
+                subgraph.nodes = connected
+                subgraph.no_link = self.no_link
+                for s in subgraph.nodes:
+                    for k, v in self.edges.get(s, {}).iteritems():
+                        if k in subgraph.nodes:
+                            subgraph.edges.setdefault(s, {}).update({k: v})
+                    if s in self.cluster_lookup:
+                        subgraph.cluster_lookup[s] = self.cluster_lookup[s]
+                components.append(subgraph)
+            return components
+
+    def dfs(self, v, visited, threshold, op=lt, first=None):
+        aux = [v]
+        visited.add(v)
+        if first is None:
+            first = v
+        for i in (n for n, w in self.edges.get(v, {}).iteritems()
+                  if op(w, threshold) and n not in visited):
+            x, y = self.dfs(i, visited, threshold, op, first)
+            aux.extend(x)
+            visited = visited.union(y)
+        return aux, visited
diff --git a/pysal/region/maxp.py b/pysal/region/maxp.py
new file mode 100644
index 0000000..b097ae5
--- /dev/null
+++ b/pysal/region/maxp.py
@@ -0,0 +1,588 @@
+"""
+Max p regionalization
+
+Heuristically form the maximum number (p) of regions given a set of n
+areas and a floor constraint.
+"""
+
+__author__ = "Serge Rey <srey at asu.edu>, David Folch <david.folch at asu.edu>"
+
+
+import pysal
+from components import check_contiguity
+import copy
+import random
+import numpy as np
+#from pysal.common import *
+from pysal.region import randomregion as RR
+
+__all__ = ["Maxp", "Maxp_LISA"]
+
+LARGE = 10 ** 6
+MAX_ATTEMPTS = 100
+
+
+class Maxp:
+    """Try to find the maximum number of regions for a set of areas such that
+    each region combines contiguous areas that satisfy a given threshold
+    constraint.
+
+
+    Parameters
+    ----------
+
+    w               : W
+                      spatial weights object
+
+    z               : array
+                      n*m array of observations on m attributes across n
+                      areas. This is used to calculate intra-regional
+                      homogeneity
+    floor           : int
+                      a minimum bound for a variable that has to be
+                      obtained in each region
+    floor_variable  : array
+                      n*1 vector of observations on variable for the floor
+    initial         : int
+                      number of initial solutions to generate
+    verbose         : binary
+                      if true debugging information is printed
+    seeds           : list
+                      ids of observations to form initial seeds. If
+                      len(ids) is less than the number of observations, the
+                      complementary ids are added to the end of seeds. Thus
+                      the specified seeds get priority in the solution
+
+    Attributes
+    ----------
+
+    area2region     : dict
+                      mapping of areas to region. key is area id, value is
+                      region id
+    regions         : list
+                      list of lists of regions (each list has the ids of areas
+                      in that region)
+    p               : int
+                      number of regions
+    swap_iterations : int
+                      number of swap iterations
+    total_moves     : int
+                      number of moves into internal regions
+
+    Examples
+    --------
+
+    Setup imports and set seeds for random number generators to insure the
+    results are identical for each run.
+
+    >>> import random
+    >>> import numpy as np
+    >>> import pysal
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+
+    Setup a spatial weights matrix describing the connectivity of a square
+    community with 100 areas.  Generate two random data attributes for each
+    area in the community (a 100x2 array) called z. p is the data vector used to
+    compute the floor for a region, and floor is the floor value; in this case
+    p is simply a vector of ones and the floor is set to three. This means
+    that each region will contain at least three areas.  In other cases the
+    floor may be computed based on a minimum population count for example.
+
+    >>> import random
+    >>> import numpy as np
+    >>> import pysal
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> w = pysal.lat2W(10,10)
+    >>> z = np.random.random_sample((w.n,2))
+    >>> p = np.ones((w.n,1), float)
+    >>> floor = 3
+    >>> solution = pysal.region.Maxp(w, z, floor, floor_variable=p, initial=100)
+    >>> solution.p
+    29
+    >>> min([len(region) for region in solution.regions])
+    3
+    >>> solution.regions[0]
+    [4, 14, 5, 24, 3]
+    >>>
+
+    """
+    def __init__(self, w, z, floor, floor_variable,
+                 verbose=False, initial=100, seeds=[]):
+
+        self.w = w
+        self.z = z
+        self.floor = floor
+        self.floor_variable = floor_variable
+        self.verbose = verbose
+        self.seeds = seeds
+        self.initial_solution()
+        if not self.p:
+            self.feasible = False
+        else:
+            self.feasible = True
+            best_val = self.objective_function()
+            self.current_regions = copy.copy(self.regions)
+            self.current_area2region = copy.copy(self.area2region)
+            self.initial_wss = []
+            self.attempts = 0
+            for i in range(initial):
+                self.initial_solution()
+                if self.p:
+                    val = self.objective_function()
+                    self.initial_wss.append(val)
+                    if self.verbose:
+                        print 'initial solution: ', i, val, best_val
+                    if val < best_val:
+                        self.current_regions = copy.copy(self.regions)
+                        self.current_area2region = copy.copy(self.area2region)
+                        best_val = val
+                    self.attempts += 1
+            self.regions = copy.copy(self.current_regions)
+            self.p = len(self.regions)
+            self.area2region = self.current_area2region
+            if verbose:
+                print "smallest region ifs: ", min([len(region) for region in self.regions])
+                raw_input='wait'
+
+            self.swap()
+
+    def initial_solution(self):
+        self.p = 0
+        solving = True
+        attempts = 0
+        while solving and attempts <= MAX_ATTEMPTS:
+            regions = []
+            enclaves = []
+            if not self.seeds:
+                candidates = copy.copy(self.w.id_order)
+                candidates = np.random.permutation(candidates)
+                candidates = candidates.tolist()
+            else:
+                seeds = copy.copy(self.seeds)
+                nonseeds = [i for i in self.w.id_order if i not in seeds]
+                candidates = seeds
+                candidates.extend(nonseeds)
+            while candidates:
+                seed = candidates.pop(0)
+                # try to grow it till threshold constraint is satisfied
+                region = [seed]
+                building_region = True
+                while building_region:
+                    # check if floor is satisfied
+                    if self.check_floor(region):
+                        regions.append(region)
+                        building_region = False
+                    else:
+                        potential = []
+                        for area in region:
+                            neighbors = self.w.neighbors[area]
+                            neighbors = [neigh for neigh in neighbors if neigh in candidates]
+                            neighbors = [neigh for neigh in neighbors if neigh not in region]
+                            neighbors = [neigh for neigh in neighbors if neigh not in potential]
+                            potential.extend(neighbors)
+                        if potential:
+                            # add a random neighbor
+                            neigID = random.randint(0, len(potential) - 1)
+                            neigAdd = potential.pop(neigID)
+                            region.append(neigAdd)
+                            # remove it from candidates
+                            candidates.remove(neigAdd)
+                        else:
+                            #print 'enclave'
+                            #print region
+                            enclaves.extend(region)
+                            building_region = False
+            # check to see if any regions were made before going to enclave stage
+            if regions:
+                feasible = True
+            else:
+                attempts += 1
+                break
+            self.enclaves = enclaves[:]
+            a2r = {}
+            for r, region in enumerate(regions):
+                for area in region:
+                    a2r[area] = r
+            encCount = len(enclaves)
+            encAttempts = 0
+            while enclaves and encAttempts != encCount:
+                enclave = enclaves.pop(0)
+                neighbors = self.w.neighbors[enclave]
+                neighbors = [neighbor for neighbor in neighbors if neighbor not in enclaves]
+                candidates = []
+                for neighbor in neighbors:
+                    region = a2r[neighbor]
+                    if region not in candidates:
+                        candidates.append(region)
+                if candidates:
+                    # add enclave to random region
+                    regID = random.randint(0, len(candidates) - 1)
+                    rid = candidates[regID]
+                    regions[rid].append(enclave)
+                    a2r[enclave] = rid
+                    # structure to loop over enclaves until no more joining is possible
+                    encCount = len(enclaves)
+                    encAttempts = 0
+                    feasible = True
+                else:
+                    # put back on que, no contiguous regions yet
+                    enclaves.append(enclave)
+                    encAttempts += 1
+                    feasible = False
+            if feasible:
+                solving = False
+                self.regions = regions
+                self.area2region = a2r
+                self.p = len(regions)
+            else:
+                if attempts == MAX_ATTEMPTS:
+                    print 'No initial solution found'
+                    self.p = 0
+                attempts += 1
+
+    def swap(self):
+        swapping = True
+        swap_iteration = 0
+        if self.verbose:
+            print 'Initial solution, objective function: ', self.objective_function()
+        total_moves = 0
+        self.k = len(self.regions)
+        changed_regions = [1] * self.k
+        nr = range(self.k)
+        while swapping:
+            moves_made = 0
+            regionIds = [r for r in nr if changed_regions[r]]
+            np.random.permutation(regionIds)
+            changed_regions = [0] * self.k
+            swap_iteration += 1
+            for seed in regionIds:
+                local_swapping = True
+                local_attempts = 0
+                while local_swapping:
+                    local_moves = 0
+                    # get neighbors
+                    members = self.regions[seed]
+                    neighbors = []
+                    for member in members:
+                        candidates = self.w.neighbors[member]
+                        candidates = [candidate for candidate in candidates if candidate not in members]
+                        candidates = [candidate for candidate in candidates if candidate not in neighbors]
+                        neighbors.extend(candidates)
+                    candidates = []
+                    for neighbor in neighbors:
+                        block = copy.copy(self.regions[self.area2region[
+                            neighbor]])
+                        if check_contiguity(self.w, block, neighbor):
+                            block.remove(neighbor)
+                            fv = self.check_floor(block)
+                            if fv:
+                                candidates.append(neighbor)
+                    # find the best local move
+                    if not candidates:
+                        local_swapping = False
+                    else:
+                        nc = len(candidates)
+                        moves = np.zeros([nc, 1], float)
+                        best = None
+                        cv = 0.0
+                        for area in candidates:
+                            current_internal = self.regions[seed]
+                            current_outter = self.regions[self.area2region[
+                                area]]
+                            current = self.objective_function([current_internal, current_outter])
+                            new_internal = copy.copy(current_internal)
+                            new_outter = copy.copy(current_outter)
+                            new_internal.append(area)
+                            new_outter.remove(area)
+                            new = self.objective_function([new_internal,
+                                                           new_outter])
+                            change = new - current
+                            if change < cv:
+                                best = area
+                                cv = change
+                        if best:
+                            # make the move
+                            area = best
+                            old_region = self.area2region[area]
+                            self.regions[old_region].remove(area)
+                            self.area2region[area] = seed
+                            self.regions[seed].append(area)
+                            moves_made += 1
+                            changed_regions[seed] = 1
+                            changed_regions[old_region] = 1
+                        else:
+                            # no move improves the solution
+                            local_swapping = False
+                    local_attempts += 1
+                    if self.verbose:
+                        print 'swap_iteration: ', swap_iteration, 'moves_made: ', moves_made
+                        print 'number of regions: ', len(self.regions)
+                        print 'number of changed regions: ', sum(
+                            changed_regions)
+                        print 'internal region: ', seed, 'local_attempts: ', local_attempts
+                        print 'objective function: ', self.objective_function()
+                        print 'smallest region size: ',min([len(region) for region in self.regions])
+            total_moves += moves_made
+            if moves_made == 0:
+                swapping = False
+                self.swap_iterations = swap_iteration
+                self.total_moves = total_moves
+            if self.verbose:
+                print 'moves_made: ', moves_made
+                print 'objective function: ', self.objective_function()
+
+    def check_floor(self, region):
+        selectionIDs = [self.w.id_order.index(i) for i in region]
+        cv = sum(self.floor_variable[selectionIDs])
+        if cv >= self.floor:
+            #print len(selectionIDs)
+            return True
+        else:
+            return False
+
+    def objective_function(self, solution=None):
+        # solution is a list of lists of region ids [[1,7,2],[0,4,3],...] such
+        # that the first region has areas 1,7,2 the second region 0,4,3 and so
+        # on. solution does not have to be exhaustive
+        if not solution:
+            solution = self.regions
+        wss = 0
+        for region in solution:
+            selectionIDs = [self.w.id_order.index(i) for i in region]
+            m = self.z[selectionIDs, :]
+            var = m.var(axis=0)
+            wss += sum(np.transpose(var)) * len(region)
+        return wss
+
+    def inference(self, nperm=99):
+        """Compare the within sum of squares for the solution against
+        simulated solutions where areas are randomly assigned to regions that
+        maintain the cardinality of the original solution.
+
+        Parameters
+        ----------
+
+        nperm       : int
+                      number of random permutations for calculation of
+                      pseudo-p_values
+
+        Attributes
+        ----------
+
+        pvalue      : float
+                      pseudo p_value
+
+        Examples
+        --------
+
+        Setup is the same as shown above except using a 5x5 community.
+
+        >>> import random
+        >>> import numpy as np
+        >>> import pysal
+        >>> random.seed(100)
+        >>> np.random.seed(100)
+        >>> w=pysal.weights.lat2W(5,5)
+        >>> z=np.random.random_sample((w.n,2))
+        >>> p=np.ones((w.n,1),float)
+        >>> floor=3
+        >>> solution=pysal.region.Maxp(w,z,floor,floor_variable=p,initial=100)
+
+        Set nperm to 9 meaning that 9 random regions are computed and used for
+        the computation of a pseudo-p-value for the actual Max-p solution. In
+        empirical work this would typically be set much higher, e.g. 999 or
+        9999.
+
+        >>> solution.inference(nperm=9)
+        >>> solution.pvalue
+        0.2
+
+        """
+        ids = self.w.id_order
+        num_regions = len(self.regions)
+        wsss = np.zeros(nperm + 1)
+        self.wss = self.objective_function()
+        cards = [len(i) for i in self.regions]
+        sim_solutions = RR.Random_Regions(ids, num_regions,
+                                          cardinality=cards, permutations=nperm)
+        cv = 1
+        c = 1
+        for solution in sim_solutions.solutions_feas:
+            wss = self.objective_function(solution.regions)
+            wsss[c] = wss
+            if wss <= self.wss:
+                cv += 1
+            c += 1
+        self.pvalue = cv / (1. + len(sim_solutions.solutions_feas))
+        self.wss_perm = wsss
+        self.wss_perm[0] = self.wss
+
+    def cinference(self, nperm=99, maxiter=1000):
+        """Compare the within sum of squares for the solution against
+        conditional simulated solutions where areas are randomly assigned to
+        regions that maintain the cardinality of the original solution and
+        respect contiguity relationships.
+
+        Parameters
+        ----------
+
+        nperm       : int
+                      number of random permutations for calculation of
+                      pseudo-p_values
+
+        maxiter     : int
+                      maximum number of attempts to find each permutation
+
+        Attributes
+        ----------
+
+        pvalue      : float
+                      pseudo p_value
+
+        feas_sols   : int
+                      number of feasible solutions found
+
+        Notes
+        -----
+
+        it is possible for the number of feasible solutions (feas_sols) to be
+        less than the number of permutations requested (nperm); an exception
+        is raised if this occurs.
+
+        Examples
+        --------
+
+        Setup is the same as shown above except using a 5x5 community.
+
+        >>> import random
+        >>> import numpy as np
+        >>> import pysal
+        >>> random.seed(100)
+        >>> np.random.seed(100)
+        >>> w=pysal.weights.lat2W(5,5)
+        >>> z=np.random.random_sample((w.n,2))
+        >>> p=np.ones((w.n,1),float)
+        >>> floor=3
+        >>> solution=pysal.region.Maxp(w,z,floor,floor_variable=p,initial=100)
+
+        Set nperm to 9 meaning that 9 random regions are computed and used for
+        the computation of a pseudo-p-value for the actual Max-p solution. In
+        empirical work this would typically be set much higher, e.g. 999 or
+        9999.
+
+        >>> solution.cinference(nperm=9, maxiter=100)
+        >>> solution.cpvalue
+        0.1
+
+        """
+        ids = self.w.id_order
+        num_regions = len(self.regions)
+        wsss = np.zeros(nperm + 1)
+        self.cwss = self.objective_function()
+        cards = [len(i) for i in self.regions]
+        sim_solutions = RR.Random_Regions(ids, num_regions,
+                                          cardinality=cards, contiguity=self.w,
+                                          maxiter=maxiter, permutations=nperm)
+        self.cfeas_sols = len(sim_solutions.solutions_feas)
+        if self.cfeas_sols < nperm:
+            raise Exception('not enough feasible solutions found')
+        cv = 1
+        c = 1
+        for solution in sim_solutions.solutions_feas:
+            wss = self.objective_function(solution.regions)
+            wsss[c] = wss
+            if wss <= self.cwss:
+                cv += 1
+            c += 1
+        self.cpvalue = cv / (1. + self.cfeas_sols)
+        self.cwss_perm = wsss
+        self.cwss_perm[0] = self.cwss
+
+
+class Maxp_LISA(Maxp):
+    """Max-p regionalization using LISA seeds
+
+    Parameters
+    ----------
+
+    w              : W
+                     spatial weights object
+    z              : array
+                     nxk array of n observations on k variables used to
+                     measure similarity between areas within the regions.
+    y              : array
+                     nx1 array used to calculate the LISA statistics and
+                     to set the intial seed order
+    floor          : float
+                     value that each region must obtain on floor_variable
+    floor_variable : array
+                     nx1 array of values for regional floor threshold
+    initial        : int
+                     number of initial feasible solutions to generate
+                     prior to swapping
+
+    Attributes
+    ----------
+
+    area2region     : dict
+                      mapping of areas to region. key is area id, value is
+                      region id
+    regions         : list
+                      list of lists of regions (each list has the ids of areas
+                      in that region)
+    swap_iterations : int
+                      number of swap iterations
+    total_moves     : int
+                      number of moves into internal regions
+
+
+    Notes
+    -----
+
+    We sort the observations based on the value of the LISAs. This
+    ordering then gives the priority for seeds forming the p regions. The
+    initial priority seeds are not guaranteed to be separated in the final
+    solution.
+
+    Examples
+    --------
+
+    Setup imports and set seeds for random number generators to insure the
+    results are identical for each run.
+
+    >>> import random
+    >>> import numpy as np
+    >>> import pysal
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+
+    Setup a spatial weights matrix describing the connectivity of a square
+    community with 100 areas.  Generate two random data attributes for each area
+    in the community (a 100x2 array) called z. p is the data vector used to
+    compute the floor for a region, and floor is the floor value; in this case
+    p is simply a vector of ones and the floor is set to three. This means
+    that each region will contain at least three areas.  In other cases the
+    floor may be computed based on a minimum population count for example.
+
+    >>> w=pysal.lat2W(10,10)
+    >>> z=np.random.random_sample((w.n,2))
+    >>> p=np.ones(w.n)
+    >>> mpl=pysal.region.Maxp_LISA(w,z,p,floor=3,floor_variable=p)
+    >>> mpl.p
+    31
+    >>> mpl.regions[0]
+    [99, 89, 98]
+
+    """
+    def __init__(self, w, z, y, floor, floor_variable, initial=100):
+
+        lis = pysal.Moran_Local(y, w)
+        ids = np.argsort(lis.Is)
+        ids = ids[range(w.n - 1, -1, -1)]
+        ids = ids.tolist()
+        mp = Maxp.__init__(
+            self, w, z, floor=floor, floor_variable=floor_variable,
+            initial=initial, seeds=ids)
+
diff --git a/pysal/region/randomregion.py b/pysal/region/randomregion.py
new file mode 100644
index 0000000..7431567
--- /dev/null
+++ b/pysal/region/randomregion.py
@@ -0,0 +1,527 @@
+"""
+Generate random regions
+
+Randomly form regions given various types of constraints on cardinality and
+composition.
+"""
+
+__author__ = "David Folch dfolch at asu.edu, Serge Rey srey at asu.edu"
+
+import numpy as np
+from pysal.common import copy
+
+__all__ = ["Random_Regions", "Random_Region"]
+
+
+class Random_Regions:
+    """Generate a list of Random_Region instances.
+
+    Parameters
+    ----------
+
+    area_ids        : list
+                      IDs indexing the areas to be grouped into regions (must
+                      be in the same order as spatial weights matrix if this
+                      is provided)
+
+    num_regions     : integer
+                      number of regions to generate (if None then this is
+                      chosen randomly from 2 to n where n is the number of
+                      areas)
+
+    cardinality     : list
+                      list containing the number of areas to assign to regions
+                      (if num_regions is also provided then len(cardinality)
+                      must equal num_regions; if cardinality=None then a list
+                      of length num_regions will be generated randomly)
+
+    contiguity      : W
+                      spatial weights object (if None then contiguity will be
+                      ignored)
+
+    maxiter         : int
+                      maximum number attempts (for each permutation) at finding
+                      a feasible solution (only affects contiguity constrained
+                      regions)
+
+    compact         : boolean
+                      attempt to build compact regions, note (only affects
+                      contiguity constrained regions)
+
+    max_swaps       : int
+                      maximum number of swaps to find a feasible solution
+                      (only affects contiguity constrained regions)
+
+    permutations    : int
+                      number of Random_Region instances to generate
+
+    Attributes
+    ----------
+
+    solutions       : list
+                      list of length permutations containing all Random_Region instances generated
+
+    solutions_feas  : list
+                      list of the Random_Region instances that resulted in feasible solutions
+
+    Examples
+    --------
+
+    Setup the data
+
+    >>> import random
+    >>> import numpy as np
+    >>> import pysal
+    >>> nregs = 13
+    >>> cards = range(2,14) + [10]
+    >>> w = pysal.lat2W(10,10,rook=False)
+    >>> ids = w.id_order
+
+    Unconstrained
+
+    >>> random.seed(10)
+    >>> np.random.seed(10)
+    >>> t0 = pysal.region.Random_Regions(ids, permutations=2)
+    >>> t0.solutions[0].regions[0]
+    [19, 14, 43, 37, 66, 3, 79, 41, 38, 68, 2, 1, 60]
+
+    Cardinality and contiguity constrained (num_regions implied)
+
+    >>> random.seed(60)
+    >>> np.random.seed(60)
+    >>> t1 = pysal.region.Random_Regions(ids, num_regions=nregs, cardinality=cards, contiguity=w, permutations=2)
+    >>> t1.solutions[0].regions[0]
+    [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+
+    Cardinality constrained (num_regions implied)
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t2 = pysal.region.Random_Regions(ids, num_regions=nregs, cardinality=cards, permutations=2)
+    >>> t2.solutions[0].regions[0]
+    [37, 62]
+
+    Number of regions and contiguity constrained
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t3 = pysal.region.Random_Regions(ids, num_regions=nregs, contiguity=w, permutations=2)
+    >>> t3.solutions[0].regions[1]
+    [71, 72, 70, 93, 51, 91, 85, 74, 63, 73, 61, 62, 82]
+
+    Cardinality and contiguity constrained
+
+    >>> random.seed(60)
+    >>> np.random.seed(60)
+    >>> t4 = pysal.region.Random_Regions(ids, cardinality=cards, contiguity=w, permutations=2)
+    >>> t4.solutions[0].regions[0]
+    [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+
+    Number of regions constrained
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t5 = pysal.region.Random_Regions(ids, num_regions=nregs, permutations=2)
+    >>> t5.solutions[0].regions[0]
+    [37, 62, 26, 41, 35, 25, 36]
+
+    Cardinality constrained
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t6 = pysal.region.Random_Regions(ids, cardinality=cards, permutations=2)
+    >>> t6.solutions[0].regions[0]
+    [37, 62]
+
+    Contiguity constrained
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t7 = pysal.region.Random_Regions(ids, contiguity=w, permutations=2)
+    >>> t7.solutions[0].regions[1]
+    [62, 52, 51, 50]
+
+    """
+    def __init__(
+        self, area_ids, num_regions=None, cardinality=None, contiguity=None,
+        maxiter=100, compact=False, max_swaps=1000000, permutations=99):
+
+        solutions = []
+        for i in range(permutations):
+            solutions.append(Random_Region(area_ids, num_regions, cardinality,
+                                           contiguity, maxiter, compact, max_swaps))
+        self.solutions = solutions
+        self.solutions_feas = []
+        for i in solutions:
+            if i.feasible == True:
+                self.solutions_feas.append(i)
+
+
+class Random_Region:
+    """Randomly combine a given set of areas into two or more regions based
+    on various constraints.
+
+
+    Parameters
+    ----------
+
+    area_ids        : list
+                      IDs indexing the areas to be grouped into regions (must
+                      be in the same order as spatial weights matrix if this
+                      is provided)
+
+    num_regions     : integer
+                      number of regions to generate (if None then this is
+                      chosen randomly from 2 to n where n is the number of
+                      areas)
+
+    cardinality     : list
+                      list containing the number of areas to assign to regions
+                      (if num_regions is also provided then len(cardinality)
+                      must equal num_regions; if cardinality=None then a list
+                      of length num_regions will be generated randomly)
+
+    contiguity      : W
+                      spatial weights object (if None then contiguity will be
+                      ignored)
+
+    maxiter         : int
+                      maximum number attempts at finding a feasible solution
+                      (only affects contiguity constrained regions)
+
+    compact         : boolean
+                      attempt to build compact regions (only affects
+                      contiguity constrained regions)
+
+    max_swaps       : int
+                      maximum number of swaps to find a feasible solution
+                      (only affects contiguity constrained regions)
+
+    Attributes
+    ----------
+
+    feasible        : boolean
+                      if True then solution was found
+
+    regions         : list
+                      list of lists of regions (each list has the ids of areas
+                      in that region)
+
+    Examples
+    --------
+
+    Setup the data
+
+    >>> import random
+    >>> import numpy as np
+    >>> import pysal
+    >>> nregs = 13
+    >>> cards = range(2,14) + [10]
+    >>> w = pysal.weights.lat2W(10,10,rook=False)
+    >>> ids = w.id_order
+
+    Unconstrained
+
+    >>> random.seed(10)
+    >>> np.random.seed(10)
+    >>> t0 = pysal.region.Random_Region(ids)
+    >>> t0.regions[0]
+    [19, 14, 43, 37, 66, 3, 79, 41, 38, 68, 2, 1, 60]
+
+    Cardinality and contiguity constrained (num_regions implied)
+
+    >>> random.seed(60)
+    >>> np.random.seed(60)
+    >>> t1 = pysal.region.Random_Region(ids, num_regions=nregs, cardinality=cards, contiguity=w)
+    >>> t1.regions[0]
+    [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+
+    Cardinality constrained (num_regions implied)
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t2 = pysal.region.Random_Region(ids, num_regions=nregs, cardinality=cards)
+    >>> t2.regions[0]
+    [37, 62]
+
+    Number of regions and contiguity constrained
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t3 = pysal.region.Random_Region(ids, num_regions=nregs, contiguity=w)
+    >>> t3.regions[1]
+    [71, 72, 70, 93, 51, 91, 85, 74, 63, 73, 61, 62, 82]
+
+    Cardinality and contiguity constrained
+
+    >>> random.seed(60)
+    >>> np.random.seed(60)
+    >>> t4 = pysal.region.Random_Region(ids, cardinality=cards, contiguity=w)
+    >>> t4.regions[0]
+    [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+
+    Number of regions constrained
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t5 = pysal.region.Random_Region(ids, num_regions=nregs)
+    >>> t5.regions[0]
+    [37, 62, 26, 41, 35, 25, 36]
+
+    Cardinality constrained
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t6 = pysal.region.Random_Region(ids, cardinality=cards)
+    >>> t6.regions[0]
+    [37, 62]
+
+    Contiguity constrained
+
+    >>> random.seed(100)
+    >>> np.random.seed(100)
+    >>> t7 = pysal.region.Random_Region(ids, contiguity=w)
+    >>> t7.regions[0]
+    [37, 27, 36, 17]
+
+    """
+    def __init__(
+        self, area_ids, num_regions=None, cardinality=None, contiguity=None,
+                    maxiter=1000, compact=False, max_swaps=1000000):
+
+        self.n = len(area_ids)
+        ids = copy.copy(area_ids)
+        self.ids = list(np.random.permutation(ids))
+        self.area_ids = area_ids
+        self.regions = []
+        self.feasible = True
+
+        # tests for input argument consistency
+        if cardinality:
+            if self.n != sum(cardinality):
+                self.feasible = False
+                raise Exception('number of areas does not match cardinality')
+        if contiguity:
+            if area_ids != contiguity.id_order:
+                self.feasible = False
+                raise Exception('order of area_ids must match order in contiguity')
+        if num_regions and cardinality:
+            if num_regions != len(cardinality):
+                self.feasible = False
+                raise Exception('number of regions does not match cardinality')
+
+        # dispatches the appropriate algorithm
+        if num_regions and cardinality and contiguity:
+            # conditioning on cardinality and contiguity (number of regions implied)
+            self.build_contig_regions(num_regions, cardinality, contiguity,
+                                      maxiter, compact, max_swaps)
+        elif num_regions and cardinality:
+            # conditioning on cardinality (number of regions implied)
+            region_breaks = self.cards2breaks(cardinality)
+            self.build_noncontig_regions(num_regions, region_breaks)
+        elif num_regions and contiguity:
+            # conditioning on number of regions and contiguity
+            cards = self.get_cards(num_regions)
+            self.build_contig_regions(num_regions, cards, contiguity,
+                                      maxiter, compact, max_swaps)
+        elif cardinality and contiguity:
+            # conditioning on cardinality and contiguity
+            num_regions = len(cardinality)
+            self.build_contig_regions(num_regions, cardinality, contiguity,
+                                      maxiter, compact, max_swaps)
+        elif num_regions:
+            # conditioning on number of regions only
+            region_breaks = self.get_region_breaks(num_regions)
+            self.build_noncontig_regions(num_regions, region_breaks)
+        elif cardinality:
+            # conditioning on number of cardinality only
+            num_regions = len(cardinality)
+            region_breaks = self.cards2breaks(cardinality)
+            self.build_noncontig_regions(num_regions, region_breaks)
+        elif contiguity:
+            # conditioning on number of contiguity only
+            num_regions = self.get_num_regions()
+            cards = self.get_cards(num_regions)
+            self.build_contig_regions(num_regions, cards, contiguity,
+                                      maxiter, compact, max_swaps)
+        else:
+            # unconditioned
+            num_regions = self.get_num_regions()
+            region_breaks = self.get_region_breaks(num_regions)
+            self.build_noncontig_regions(num_regions, region_breaks)
+
+    def get_num_regions(self):
+        return np.random.random_integers(2, self.n)
+
+    def get_region_breaks(self, num_regions):
+        region_breaks = set([])
+        while len(region_breaks) < num_regions - 1:
+            region_breaks.add(np.random.random_integers(1, self.n - 1))
+        region_breaks = list(region_breaks)
+        region_breaks.sort()
+        return region_breaks
+
+    def get_cards(self, num_regions):
+        region_breaks = self.get_region_breaks(num_regions)
+        cards = []
+        start = 0
+        for i in region_breaks:
+            cards.append(i - start)
+            start = i
+        cards.append(self.n - start)
+        return cards
+
+    def cards2breaks(self, cards):
+        region_breaks = []
+        break_point = 0
+        for i in cards:
+            break_point += i
+            region_breaks.append(break_point)
+        region_breaks.pop()
+        return region_breaks
+
+    def build_noncontig_regions(self, num_regions, region_breaks):
+        start = 0
+        for i in region_breaks:
+            self.regions.append(self.ids[start:i])
+            start = i
+        self.regions.append(self.ids[start:])
+
+    def grow_compact(self, w, test_card, region, candidates, potential):
+        # try to build a compact region by exhausting all existing
+        # potential areas before adding new potential areas
+        add_areas = []
+        while potential and len(region) < test_card:
+            pot_index = np.random.random_integers(0, len(potential) - 1)
+            add_area = potential[pot_index]
+            region.append(add_area)
+            candidates.remove(add_area)
+            potential.remove(add_area)
+            add_areas.append(add_area)
+        for i in add_areas:
+            potential.extend([j for j in w.neighbors[i]
+                                 if j not in region and
+                                    j not in potential and
+                                    j in candidates])
+        return region, candidates, potential
+
+    def grow_free(self, w, test_card, region, candidates, potential):
+        # increment potential areas after each new area is
+        # added to the region (faster than the grow_compact)
+        pot_index = np.random.random_integers(0, len(potential) - 1)
+        add_area = potential[pot_index]
+        region.append(add_area)
+        candidates.remove(add_area)
+        potential.remove(add_area)
+        potential.extend([i for i in w.neighbors[add_area]
+                             if i not in region and
+                                i not in potential and
+                                i in candidates])
+        return region, candidates, potential
+
+    def build_contig_regions(self, num_regions, cardinality, w,
+                                maxiter, compact, max_swaps):
+        if compact:
+            grow_region = self.grow_compact
+        else:
+            grow_region = self.grow_free
+        iter = 0
+        while iter < maxiter:
+
+            # regionalization setup
+            regions = []
+            size_pre = 0
+            counter = -1
+            area2region = {}
+            self.feasible = False
+            swap_count = 0
+            cards = copy.copy(cardinality)
+            cards.sort()  # try to build largest regions first (pop from end of list)
+            candidates = copy.copy(self.ids)  # these are already shuffled
+
+            # begin building regions
+            while candidates and swap_count < max_swaps:
+                # setup test to determine if swapping is needed
+                if size_pre == len(regions):
+                    counter += 1
+                else:
+                    counter = 0
+                    size_pre = len(regions)
+                # test if swapping is needed
+                if counter == len(candidates):
+
+                    # start swapping
+                    # swapping simply changes the candidate list
+                    swap_in = None   # area to become new candidate
+                    while swap_in is None:  # PEP8 E711
+                        swap_count += 1
+                        swap_out = candidates.pop(0)  # area to remove from candidates
+                        swap_neighs = copy.copy(w.neighbors[swap_out])
+                        swap_neighs = list(np.random.permutation(swap_neighs))
+                        # select area to add to candidates (i.e. remove from an existing region)
+                        for i in swap_neighs:
+                            if i not in candidates:
+                                join = i  # area linking swap_in to swap_out
+                                swap_index = area2region[join]
+                                swap_region = regions[swap_index]
+                                swap_region = list(np.random.permutation(swap_region))
+                                for j in swap_region:
+                                    if j != join:  # leave the join area to ensure regional connectivity
+                                        swap_in = j
+                                        break
+                            if swap_in is not None:  # PEP8 E711
+                                break
+                        else:
+                            candidates.append(swap_out)
+                    # swapping cleanup
+                    regions[swap_index].remove(swap_in)
+                    regions[swap_index].append(swap_out)
+                    area2region.pop(swap_in)
+                    area2region[swap_out] = swap_index
+                    candidates.append(swap_in)
+                    counter = 0
+
+                # setup to build a single region
+                building = True
+                seed = candidates.pop(0)
+                region = [seed]
+                potential = [i for i in w.neighbors[seed] if i in candidates]
+                test_card = cards.pop()
+
+                # begin building single region
+                while building and len(region) < test_card:
+                    if potential:
+                        region, candidates, potential = grow_region(
+                            w, test_card,
+                                        region, candidates, potential)
+                    else:
+                        # not enough potential neighbors to reach test_card size
+                        building = False
+                        cards.append(test_card)
+                        if len(region) in cards:
+                            # constructed region matches another candidate region size
+                            cards.remove(len(region))
+                        else:
+                            # constructed region doesn't match a candidate region size
+                            candidates.extend(region)
+                            region = []
+
+                # cleanup when successful region built
+                if region:
+                    regions.append(region)
+                    region_index = len(regions) - 1
+                    for i in region:
+                        area2region[i] = region_index   # area2region needed for swapping
+
+            # handling of regionalization result
+            if len(regions) < num_regions:
+                # regionalization failed
+                self.ids = list(np.random.permutation(self.ids))
+                regions = []
+                iter += 1
+            else:
+                # regionalization successful
+                self.feasible = True
+                iter = maxiter
+        self.regions = regions
+
diff --git a/pysal/region/tests/__init__.py b/pysal/region/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/region/tests/test_components.py b/pysal/region/tests/test_components.py
new file mode 100644
index 0000000..48b7662
--- /dev/null
+++ b/pysal/region/tests/test_components.py
@@ -0,0 +1,25 @@
+
+import unittest
+import pysal
+
+
+class Test_Components(unittest.TestCase):
+    def setUp(self):
+        self.w = pysal.lat2W(5, 5)
+
+    def test_check_contiguity(self):
+        result = pysal.region.check_contiguity(self.w, [0, 1, 2, 3, 4], 4)
+        self.assertEquals(result, True)
+        result = pysal.region.check_contiguity(self.w, [0, 1, 2, 3, 4], 3)
+        self.assertEquals(result, False)
+        result = pysal.region.check_contiguity(self.w, [0, 1, 2, 3, 4], 0)
+        self.assertEquals(result, True)
+        result = pysal.region.check_contiguity(self.w, [0, 1, 2, 3, 4], 1)
+        self.assertEquals(result, False)
+
+
+suite = unittest.TestLoader().loadTestsFromTestCase(Test_Components)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/region/tests/test_maxp.py b/pysal/region/tests/test_maxp.py
new file mode 100644
index 0000000..18a66cf
--- /dev/null
+++ b/pysal/region/tests/test_maxp.py
@@ -0,0 +1,56 @@
+
+import unittest
+import pysal
+import numpy as np
+import random
+
+
+class Test_Maxp(unittest.TestCase):
+    def setUp(self):
+        random.seed(100)
+        np.random.seed(100)
+
+    def test_Maxp(self):
+        w = pysal.lat2W(10, 10)
+        z = np.random.random_sample((w.n, 2))
+        p = np.ones((w.n, 1), float)
+        floor = 3
+        solution = pysal.region.Maxp(
+            w, z, floor, floor_variable=p, initial=100)
+        self.assertEquals(solution.p, 29)
+        self.assertEquals(solution.regions[0], [4, 14, 5, 24, 3])
+
+    def test_inference(self):
+        w = pysal.weights.lat2W(5, 5)
+        z = np.random.random_sample((w.n, 2))
+        p = np.ones((w.n, 1), float)
+        floor = 3
+        solution = pysal.region.Maxp(
+            w, z, floor, floor_variable=p, initial=100)
+        solution.inference(nperm=9)
+        self.assertAlmostEquals(solution.pvalue, 0.20000000000000001, 10)
+
+    def test_cinference(self):
+        w = pysal.weights.lat2W(5, 5)
+        z = np.random.random_sample((w.n, 2))
+        p = np.ones((w.n, 1), float)
+        floor = 3
+        solution = pysal.region.Maxp(
+            w, z, floor, floor_variable=p, initial=100)
+        solution.cinference(nperm=9, maxiter=100)
+        self.assertAlmostEquals(solution.cpvalue, 0.10000000000000001, 10)
+
+    def test_Maxp_LISA(self):
+        w = pysal.lat2W(10, 10)
+        z = np.random.random_sample((w.n, 2))
+        p = np.ones(w.n)
+        mpl = pysal.region.Maxp_LISA(w, z, p, floor=3, floor_variable=p)
+        self.assertEquals(mpl.p, 31)
+        self.assertEquals(mpl.regions[0], [99, 89, 98])
+
+
+suite = unittest.TestLoader().loadTestsFromTestCase(Test_Maxp)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/region/tests/test_randomregion.py b/pysal/region/tests/test_randomregion.py
new file mode 100644
index 0000000..8410fee
--- /dev/null
+++ b/pysal/region/tests/test_randomregion.py
@@ -0,0 +1,177 @@
+
+import unittest
+import pysal
+import numpy as np
+import random
+
+
+class Test_Random_Regions(unittest.TestCase):
+    def setUp(self):
+        self.nregs = 13
+        self.cards = range(2, 14) + [10]
+        self.w = pysal.lat2W(10, 10, rook=False)
+        self.ids = self.w.id_order
+
+    def test_Random_Regions(self):
+        random.seed(10)
+        np.random.seed(10)
+        t0 = pysal.region.Random_Regions(self.ids, permutations=2)
+        result = [19, 14, 43, 37, 66, 3, 79, 41, 38, 68, 2, 1, 60]
+        for i in range(len(result)):
+            self.assertEquals(t0.solutions[0].regions[0][i], result[i])
+        for i in range(len(t0.solutions)):
+            self.assertEquals(t0.solutions_feas[i], t0.solutions[i])
+
+        random.seed(60)
+        np.random.seed(60)
+        t0 = pysal.region.Random_Regions(self.ids, num_regions=self.nregs,
+                                         cardinality=self.cards, contiguity=self.w, permutations=2)
+        result = [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+        for i in range(len(result)):
+            self.assertEquals(t0.solutions[0].regions[0][i], result[i])
+        for i in range(len(t0.solutions)):
+            self.assertEquals(t0.solutions_feas[i], t0.solutions[i])
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Regions(self.ids, num_regions=self.nregs,
+                                         cardinality=self.cards, permutations=2)
+        result = [37, 62]
+        for i in range(len(result)):
+            self.assertEquals(t0.solutions[0].regions[0][i], result[i])
+        for i in range(len(t0.solutions)):
+            self.assertEquals(t0.solutions_feas[i], t0.solutions[i])
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Regions(self.ids,
+                                         num_regions=self.nregs, contiguity=self.w, permutations=2)
+        result = [71, 72, 70, 93, 51, 91, 85, 74, 63, 73, 61, 62, 82]
+        for i in range(len(result)):
+            self.assertEquals(t0.solutions[0].regions[1][i], result[i])
+        for i in range(len(t0.solutions)):
+            self.assertEquals(t0.solutions_feas[i], t0.solutions[i])
+
+        random.seed(60)
+        np.random.seed(60)
+        t0 = pysal.region.Random_Regions(self.ids,
+                                         cardinality=self.cards, contiguity=self.w, permutations=2)
+        result = [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+        for i in range(len(result)):
+            self.assertEquals(t0.solutions[0].regions[0][i], result[i])
+        for i in range(len(t0.solutions)):
+            self.assertEquals(t0.solutions_feas[i], t0.solutions[i])
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Regions(
+            self.ids, num_regions=self.nregs, permutations=2)
+        result = [37, 62, 26, 41, 35, 25, 36]
+        for i in range(len(result)):
+            self.assertEquals(t0.solutions[0].regions[0][i], result[i])
+        for i in range(len(t0.solutions)):
+            self.assertEquals(t0.solutions_feas[i], t0.solutions[i])
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Regions(
+            self.ids, cardinality=self.cards, permutations=2)
+        result = [37, 62]
+        for i in range(len(result)):
+            self.assertEquals(t0.solutions[0].regions[0][i], result[i])
+        for i in range(len(t0.solutions)):
+            self.assertEquals(t0.solutions_feas[i], t0.solutions[i])
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Regions(
+            self.ids, contiguity=self.w, permutations=2)
+        result = [62, 52, 51, 50]
+        for i in range(len(result)):
+            self.assertEquals(t0.solutions[0].regions[1][i], result[i])
+        for i in range(len(t0.solutions)):
+            self.assertEquals(t0.solutions_feas[i], t0.solutions[i])
+
+    def test_Random_Region(self):
+        random.seed(10)
+        np.random.seed(10)
+        t0 = pysal.region.Random_Region(self.ids)
+        t0.regions[0]
+        result = [19, 14, 43, 37, 66, 3, 79, 41, 38, 68, 2, 1, 60]
+        for i in range(len(result)):
+            self.assertEquals(t0.regions[0][i], result[i])
+        self.assertEquals(t0.feasible, True)
+
+        random.seed(60)
+        np.random.seed(60)
+        t0 = pysal.region.Random_Region(self.ids, num_regions=self.nregs,
+                                        cardinality=self.cards, contiguity=self.w)
+        t0.regions[0]
+        result = [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+        for i in range(len(result)):
+            self.assertEquals(t0.regions[0][i], result[i])
+        self.assertEquals(t0.feasible, True)
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Region(
+            self.ids, num_regions=self.nregs, cardinality=self.cards)
+        t0.regions[0]
+        result = [37, 62]
+        for i in range(len(result)):
+            self.assertEquals(t0.regions[0][i], result[i])
+        self.assertEquals(t0.feasible, True)
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Region(
+            self.ids, num_regions=self.nregs, contiguity=self.w)
+        t0.regions[1]
+        result = [71, 72, 70, 93, 51, 91, 85, 74, 63, 73, 61, 62, 82]
+        for i in range(len(result)):
+            self.assertEquals(t0.regions[1][i], result[i])
+        self.assertEquals(t0.feasible, True)
+
+        random.seed(60)
+        np.random.seed(60)
+        t0 = pysal.region.Random_Region(
+            self.ids, cardinality=self.cards, contiguity=self.w)
+        t0.regions[0]
+        result = [88, 97, 98, 89, 99, 86, 78, 59, 49, 69, 68, 79, 77]
+        for i in range(len(result)):
+            self.assertEquals(t0.regions[0][i], result[i])
+        self.assertEquals(t0.feasible, True)
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Region(self.ids, num_regions=self.nregs)
+        t0.regions[0]
+        result = [37, 62, 26, 41, 35, 25, 36]
+        for i in range(len(result)):
+            self.assertEquals(t0.regions[0][i], result[i])
+        self.assertEquals(t0.feasible, True)
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Region(self.ids, cardinality=self.cards)
+        t0.regions[0]
+        result = [37, 62]
+        for i in range(len(result)):
+            self.assertEquals(t0.regions[0][i], result[i])
+        self.assertEquals(t0.feasible, True)
+
+        random.seed(100)
+        np.random.seed(100)
+        t0 = pysal.region.Random_Region(self.ids, contiguity=self.w)
+        t0.regions[0]
+        result = [37, 27, 36, 17]
+        for i in range(len(result)):
+            self.assertEquals(t0.regions[0][i], result[i])
+        self.assertEquals(t0.feasible, True)
+
+
+suite = unittest.TestLoader().loadTestsFromTestCase(Test_Random_Regions)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/spatial_dynamics/__init__.py b/pysal/spatial_dynamics/__init__.py
new file mode 100644
index 0000000..23fc1f2
--- /dev/null
+++ b/pysal/spatial_dynamics/__init__.py
@@ -0,0 +1,10 @@
+"""
+:mod:`spatial_dynamics` --- Spatial Dynamics and Mobility
+=========================================================
+
+"""
+
+from markov import *
+from rank import *
+from directional import *
+from interaction import *
diff --git a/pysal/spatial_dynamics/directional.py b/pysal/spatial_dynamics/directional.py
new file mode 100644
index 0000000..43854bb
--- /dev/null
+++ b/pysal/spatial_dynamics/directional.py
@@ -0,0 +1,187 @@
+"""
+Directional Analysis of Dynamic LISAs
+
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu"
+
+__all__ = ['rose']
+
+import numpy as np
+import pysal
+
+
+def rose(Y, w, k=8, permutations=0):
+    """
+    Calculation of rose diagram for local indicators of spatial association
+
+    Parameters
+    ----------
+
+    Y: array (n,2)
+       variable observed on n spatial units over 2 time periods
+
+    w: spatial weights object
+
+    k: int
+       number of circular sectors in rose diagram
+
+    permutations: int
+       number of random spatial permutations for calculation of pseudo
+       p-values
+
+    Returns
+    -------
+
+    results: dictionary (keys defined below)
+
+    counts:  array (k,1)
+        number of vectors with angular movement falling in each sector
+
+    cuts: array (k,1)
+        intervals defining circular sectors (in radians)
+
+    random_counts: array (permutations,k)
+        counts from random permutations
+
+    pvalues: array (kx1)
+        one sided (upper tail) pvalues for observed counts
+
+    Notes
+    -----
+    Based on Rey, Murray, and Anselin (2011) [1]_
+
+    Examples
+    --------
+
+    Constructing data for illustration of directional LISA analytics.
+    Data is for the 48 lower US states over the period 1969-2009 and
+    includes per capita income normalized to the national average. 
+
+    Load comma delimited data file in and convert to a numpy array
+
+    >>> f=open(pysal.examples.get_path("spi_download.csv"),'r')
+    >>> lines=f.readlines()
+    >>> f.close()
+    >>> lines=[line.strip().split(",") for line in lines]
+    >>> names=[line[2] for line in lines[1:-5]]
+    >>> data=np.array([map(int,line[3:]) for line in lines[1:-5]])
+
+    Bottom of the file has regional data which we don't need for this example
+    so we will subset only those records that match a state name
+
+    >>> sids=range(60)
+    >>> out=['"United States 3/"',
+    ...      '"Alaska 3/"',
+    ...      '"District of Columbia"',
+    ...      '"Hawaii 3/"',
+    ...      '"New England"',
+    ...      '"Mideast"',
+    ...      '"Great Lakes"',
+    ...      '"Plains"',
+    ...      '"Southeast"',
+    ...      '"Southwest"',
+    ...      '"Rocky Mountain"',
+    ...      '"Far West 3/"']
+    >>> snames=[name for name in names if name not in out]
+    >>> sids=[names.index(name) for name in snames]
+    >>> states=data[sids,:]
+    >>> us=data[0]
+    >>> years=np.arange(1969,2009)
+
+    Now we convert state incomes to express them relative to the national
+    average
+
+    >>> rel=states/(us*1.)
+
+    Create our contiguity matrix from an external GAL file and row standardize
+    the resulting weights
+
+    >>> gal=pysal.open(pysal.examples.get_path('states48.gal'))
+    >>> w=gal.read()
+    >>> w.transform='r'
+
+    Take the first and last year of our income data as the interval to do the
+    directional directional analysis
+
+    >>> Y=rel[:,[0,-1]]
+
+    Set the random seed generator which is used in the permutation based
+    inference for the rose diagram so that we can replicate our example
+    results
+
+    >>> np.random.seed(100)
+
+    Call the rose function to construct the directional histogram for the
+    dynamic LISA statistics. We will use four circular sectors for our
+    histogram
+
+    >>> r4=rose(Y,w,k=4,permutations=999)
+
+    What are the cut-offs for our histogram - in radians
+
+    >>> r4['cuts']
+    array([ 0.        ,  1.57079633,  3.14159265,  4.71238898,  6.28318531])
+
+    How many vectors fell in each sector
+
+    >>> r4['counts']
+    array([32,  5,  9,  2])
+
+    What are the pseudo-pvalues for these counts based on 999 random spatial
+    permutations of the state income data
+
+    >>> r4['pvalues']
+    array([ 0.02 ,  0.001,  0.001,  0.001])
+
+    Repeat the exercise but now for 8 rather than 4 sectors
+
+    >>> r8=rose(Y,w,permutations=999)
+    >>> r8['counts']
+    array([19, 13,  3,  2,  7,  2,  1,  1])
+    >>> r8['pvalues']
+    array([ 0.445,  0.042,  0.079,  0.003,  0.005,  0.1  ,  0.269,  0.002])
+
+    References
+    ----------
+
+    .. [1] Rey, S.J., A.T. Murray and L. Anselin. 2011. "Visualizing
+        regional income distribution dynamics." Letters in Spatial and Resource Sciences, 4: 81-90.
+
+    """
+    results = {}
+    sw = 2 * np.pi / k
+    cuts = np.arange(0.0, 2 * np.pi + sw, sw)
+    wY = pysal.lag_spatial(w, Y)
+    dx = Y[:, -1] - Y[:, 0]
+    dy = wY[:, -1] - wY[:, 0]
+    theta = np.arctan2(dy, dx)
+    neg = theta < 0.0
+    utheta = theta * (1 - neg) + neg * (2 * np.pi + theta)
+    counts, bins = np.histogram(utheta, cuts)
+    results['counts'] = counts
+    results['cuts'] = cuts
+    if permutations:
+        n, k1 = Y.shape
+        ids = np.arange(n)
+        all_counts = np.zeros((permutations, k))
+        for i in range(permutations):
+            rid = np.random.permutation(ids)
+            YR = Y[rid, :]
+            wYR = pysal.lag_spatial(w, YR)
+            dx = YR[:, -1] - YR[:, 0]
+            dy = wYR[:, -1] - wYR[:, 0]
+            theta = np.arctan2(dy, dx)
+            neg = theta < 0.0
+            utheta = theta * (1 - neg) + neg * (2 * np.pi + theta)
+            c, b = np.histogram(utheta, cuts)
+            c.shape = (1, k)
+            all_counts[i, :] = c
+        larger = sum(all_counts >= counts)
+        p_l = permutations - larger
+        extreme = (p_l) < larger
+        extreme = np.where(extreme, p_l, larger)
+        p = (extreme + 1.) / (permutations + 1.)
+        results['pvalues'] = p
+        results['random_counts'] = all_counts
+
+    return results
diff --git a/pysal/spatial_dynamics/ergodic.py b/pysal/spatial_dynamics/ergodic.py
new file mode 100644
index 0000000..138a614
--- /dev/null
+++ b/pysal/spatial_dynamics/ergodic.py
@@ -0,0 +1,179 @@
+"""
+Summary measures for ergodic Markov chains
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu>"
+
+__all__ = ['steady_state', 'fmpt', 'var_fmpt']
+
+import numpy as np
+import numpy.linalg as la
+
+
+def steady_state(P):
+    """
+    Calculates the steady state probability vector for a regular Markov
+    transition matrix P
+
+    Parameters
+    ----------
+
+    P  : matrix (kxk)
+         an ergodic Markov transition probability matrix
+
+    Returns
+    -------
+
+    implicit : matrix (kx1)
+               steady state distribution
+
+    Examples
+    --------
+    Taken from Kemeny and Snell.  Land of Oz example where the states are
+    Rain, Nice and Snow, so there is 25 percent chance that if it
+    rained in Oz today, it will snow tomorrow, while if it snowed today in
+    Oz there is a 50 percent chance of snow again tomorrow and a 25
+    percent chance of a nice day (nice, like when the witch with the monkeys
+    is melting).
+
+    >>> import numpy as np
+    >>> p=np.matrix([[.5, .25, .25],[.5,0,.5],[.25,.25,.5]])
+    >>> steady_state(p)
+    matrix([[ 0.4],
+            [ 0.2],
+            [ 0.4]])
+
+    Thus, the long run distribution for Oz is to have 40 percent of the
+    days classified as Rain, 20 percent as Nice, and 40 percent as Snow
+    (states are mutually exclusive).
+
+    """
+
+    v, d = la.eig(np.transpose(P))
+
+    # for a regular P maximum eigenvalue will be 1
+    mv = max(v)
+    # find its position
+    i = v.tolist().index(mv)
+
+    # normalize eigenvector corresponding to the eigenvalue 1
+    return d[:, i] / sum(d[:, i])
+
+
+def fmpt(P):
+    """
+    Calculates the matrix of first mean passage times for an
+    ergodic transition probability matrix.
+
+    Parameters
+    ----------
+
+    P    : matrix (kxk)
+           an ergodic Markov transition probability matrix
+
+    Returns
+    -------
+
+    M    : matrix (kxk)
+           elements are the expected value for the number of intervals
+           required for  a chain starting in state i to first enter state j
+           If i=j then this is the recurrence time.
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> p=np.matrix([[.5, .25, .25],[.5,0,.5],[.25,.25,.5]])
+    >>> fm=fmpt(p)
+    >>> fm
+    matrix([[ 2.5       ,  4.        ,  3.33333333],
+            [ 2.66666667,  5.        ,  2.66666667],
+            [ 3.33333333,  4.        ,  2.5       ]])
+
+
+    Thus, if it is raining today in Oz we can expect a nice day to come
+    along in another 4 days, on average, and snow to hit in 3.33 days. We can
+    expect another rainy day in 2.5 days. If it is nice today in Oz, we would
+    experience a change in the weather (either rain or snow) in 2.67 days from
+    today. (That wicked witch can only die once so I reckon that is the
+    ultimate absorbing state).
+
+    Notes
+    -----
+
+    Uses formulation (and examples on p. 218) in Kemeny and Snell (1976)
+
+    References
+    ----------
+
+    .. [1] Kemeny, John, G. and J. Laurie Snell (1976) Finite Markov
+         Chains. Springer-Verlag. Berlin
+
+    """
+    A = np.zeros_like(P)
+    ss = steady_state(P)
+    k = ss.shape[0]
+    for i in range(k):
+        A[:, i] = ss
+    A = A.transpose()
+    I = np.identity(k)
+    Z = la.inv(I - P + A)
+    E = np.ones_like(Z)
+    A_diag = np.diag(A)
+    A_diag = A_diag + (A_diag==0)
+    D = np.diag(1. / A_diag)
+    Zdg = np.diag(np.diag(Z))
+    M = (I - Z + E * Zdg) * D
+    return M
+
+
+def var_fmpt(P):
+    """
+    Variances of first mean passage times for an ergodic transition
+    probability matrix
+
+    Parameters
+    ----------
+
+    P    : matrix (kxk)
+           an ergodic Markov transition probability matrix
+
+    Returns
+    -------
+
+    implic : matrix (kxk)
+             elements are the variances for the number of intervals
+             required for  a chain starting in state i to first enter state j
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> p=np.matrix([[.5, .25, .25],[.5,0,.5],[.25,.25,.5]])
+    >>> vfm=var_fmpt(p)
+    >>> vfm
+    matrix([[  5.58333333,  12.        ,   6.88888889],
+            [  6.22222222,  12.        ,   6.22222222],
+            [  6.88888889,  12.        ,   5.58333333]])
+
+
+
+    Notes
+    -----
+
+    Uses formulation (and examples on p. 83) in Kemeny and Snell (1976)
+
+
+    """
+    A = P ** 1000
+    n, k = A.shape
+    I = np.identity(k)
+    Z = la.inv(I - P + A)
+    E = np.ones_like(Z)
+    D = np.diag(1. / np.diag(A))
+    Zdg = np.diag(np.diag(Z))
+    M = (I - Z + E * Zdg) * D
+    ZM = Z * M
+    ZMdg = np.diag(np.diag(ZM))
+    W = M * (2 * Zdg * D - I) + 2 * (ZM - E * ZMdg)
+    return W - np.multiply(M, M)
+
diff --git a/pysal/spatial_dynamics/interaction.ipynb b/pysal/spatial_dynamics/interaction.ipynb
new file mode 100644
index 0000000..cbf4296
--- /dev/null
+++ b/pysal/spatial_dynamics/interaction.ipynb
@@ -0,0 +1,4151 @@
+{
+ "metadata": {
+  "name": "interaction"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import interaction"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 256
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import pysal as ps"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 257
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "events = interaction.SpaceTimeEvents(ps.examples.get_path(\"burkitt\"), 'T')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 258
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "kd_t = ps.cg.KDTree(events.time)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 259
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "kd_t"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 260,
+       "text": [
+        "<scipy.spatial.kdtree.KDTree at 0x7fc57c1009d0>"
+       ]
+      }
+     ],
+     "prompt_number": 260
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neighbors_t = kd_t.query_pairs(5)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 261
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neighbors_t"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 262,
+       "text": [
+        "set([(118, 119),\n",
+        "     (175, 176),\n",
+        "     (69, 70),\n",
+        "     (174, 175),\n",
+        "     (107, 108),\n",
+        "     (128, 129),\n",
+        "     (30, 31),\n",
+        "     (150, 151),\n",
+        "     (99, 101),\n",
+        "     (113, 114),\n",
+        "     (25, 26),\n",
+        "     (70, 71),\n",
+        "     (168, 169),\n",
+        "     (84, 85),\n",
+        "     (43, 44),\n",
+        "     (150, 152),\n",
+        "     (153, 154),\n",
+        "     (56, 57),\n",
+        "     (100, 101),\n",
+        "     (125, 126),\n",
+        "     (99, 100),\n",
+        "     (98, 100),\n",
+        "     (139, 141),\n",
+        "     (115, 116),\n",
+        "     (81, 82),\n",
+        "     (116, 118),\n",
+        "     (158, 159),\n",
+        "     (139, 140),\n",
+        "     (117, 119),\n",
+        "     (184, 186),\n",
+        "     (162, 163),\n",
+        "     (41, 42),\n",
+        "     (178, 179),\n",
+        "     (46, 47),\n",
+        "     (167, 168),\n",
+        "     (122, 123),\n",
+        "     (51, 52),\n",
+        "     (48, 49),\n",
+        "     (137, 138),\n",
+        "     (116, 119),\n",
+        "     (102, 103),\n",
+        "     (117, 118),\n",
+        "     (35, 36),\n",
+        "     (69, 71),\n",
+        "     (151, 152),\n",
+        "     (185, 186),\n",
+        "     (116, 117),\n",
+        "     (140, 141),\n",
+        "     (184, 185),\n",
+        "     (98, 99),\n",
+        "     (143, 144),\n",
+        "     (155, 156)])"
+       ]
+      }
+     ],
+     "prompt_number": 262
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "kd_s = ps.cg.KDTree(events.space)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 263
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neighbors_s = kd_s.query_pairs(20)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 264
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neighbors_s"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 265,
+       "text": [
+        "set([(143, 183),\n",
+        "     (18, 121),\n",
+        "     (93, 158),\n",
+        "     (38, 124),\n",
+        "     (21, 28),\n",
+        "     (152, 165),\n",
+        "     (118, 142),\n",
+        "     (28, 162),\n",
+        "     (56, 79),\n",
+        "     (1, 64),\n",
+        "     (57, 163),\n",
+        "     (94, 167),\n",
+        "     (59, 131),\n",
+        "     (69, 75),\n",
+        "     (119, 165),\n",
+        "     (47, 119),\n",
+        "     (106, 170),\n",
+        "     (33, 154),\n",
+        "     (52, 98),\n",
+        "     (126, 169),\n",
+        "     (36, 177),\n",
+        "     (38, 149),\n",
+        "     (15, 111),\n",
+        "     (49, 58),\n",
+        "     (87, 128),\n",
+        "     (65, 95),\n",
+        "     (35, 123),\n",
+        "     (127, 138),\n",
+        "     (37, 158),\n",
+        "     (114, 173),\n",
+        "     (116, 118),\n",
+        "     (8, 87),\n",
+        "     (46, 164),\n",
+        "     (85, 87),\n",
+        "     (32, 77),\n",
+        "     (88, 154),\n",
+        "     (109, 177),\n",
+        "     (147, 185),\n",
+        "     (77, 163),\n",
+        "     (21, 37),\n",
+        "     (4, 35),\n",
+        "     (5, 187),\n",
+        "     (25, 162),\n",
+        "     (28, 185),\n",
+        "     (104, 122),\n",
+        "     (16, 38),\n",
+        "     (54, 87),\n",
+        "     (57, 164),\n",
+        "     (94, 174),\n",
+        "     (4, 178),\n",
+        "     (79, 85),\n",
+        "     (27, 115),\n",
+        "     (47, 124),\n",
+        "     (51, 121),\n",
+        "     (16, 181),\n",
+        "     (35, 147),\n",
+        "     (71, 138),\n",
+        "     (130, 165),\n",
+        "     (82, 170),\n",
+        "     (45, 54),\n",
+        "     (137, 173),\n",
+        "     (139, 177),\n",
+        "     (30, 111),\n",
+        "     (14, 165),\n",
+        "     (125, 179),\n",
+        "     (1, 176),\n",
+        "     (28, 159),\n",
+        "     (31, 111),\n",
+        "     (25, 58),\n",
+        "     (45, 71),\n",
+        "     (138, 180),\n",
+        "     (63, 163),\n",
+        "     (109, 170),\n",
+        "     (146, 180),\n",
+        "     (38, 110),\n",
+        "     (21, 46),\n",
+        "     (15, 156),\n",
+        "     (5, 164),\n",
+        "     (44, 129),\n",
+        "     (47, 148),\n",
+        "     (157, 186),\n",
+        "     (128, 172),\n",
+        "     (148, 167),\n",
+        "     (4, 185),\n",
+        "     (5, 53),\n",
+        "     (34, 167),\n",
+        "     (119, 187),\n",
+        "     (36, 79),\n",
+        "     (179, 181),\n",
+        "     (35, 152),\n",
+        "     (37, 63),\n",
+        "     (58, 148),\n",
+        "     (24, 28),\n",
+        "     (136, 139),\n",
+        "     (45, 47),\n",
+        "     (155, 172),\n",
+        "     (66, 84),\n",
+        "     (31, 108),\n",
+        "     (14, 172),\n",
+        "     (37, 128),\n",
+        "     (99, 108),\n",
+        "     (16, 120),\n",
+        "     (62, 152),\n",
+        "     (65, 148),\n",
+        "     (140, 143),\n",
+        "     (166, 170),\n",
+        "     (5, 173),\n",
+        "     (25, 172),\n",
+        "     (63, 123),\n",
+        "     (56, 139),\n",
+        "     (148, 174),\n",
+        "     (26, 85),\n",
+        "     (51, 71),\n",
+        "     (16, 163),\n",
+        "     (16, 93),\n",
+        "     (109, 122),\n",
+        "     (56, 122),\n",
+        "     (20, 96),\n",
+        "     (58, 159),\n",
+        "     (6, 87),\n",
+        "     (136, 178),\n",
+        "     (14, 183),\n",
+        "     (108, 131),\n",
+        "     (127, 161),\n",
+        "     (17, 27),\n",
+        "     (8, 76),\n",
+        "     (46, 185),\n",
+        "     (138, 166),\n",
+        "     (85, 98),\n",
+        "     (51, 174),\n",
+        "     (87, 137),\n",
+        "     (146, 166),\n",
+        "     (166, 173),\n",
+        "     (112, 171),\n",
+        "     (5, 150),\n",
+        "     (43, 111),\n",
+        "     (20, 112),\n",
+        "     (11, 70),\n",
+        "     (33, 53),\n",
+        "     (56, 146),\n",
+        "     (148, 181),\n",
+        "     (131, 151),\n",
+        "     (5, 39),\n",
+        "     (8, 170),\n",
+        "     (7, 146),\n",
+        "     (119, 137),\n",
+        "     (103, 158),\n",
+        "     (160, 176),\n",
+        "     (109, 115),\n",
+        "     (19, 119),\n",
+        "     (150, 181),\n",
+        "     (136, 185),\n",
+        "     (135, 163),\n",
+        "     (37, 178),\n",
+        "     (162, 164),\n",
+        "     (24, 153),\n",
+        "     (65, 102),\n",
+        "     (28, 110),\n",
+        "     (138, 161),\n",
+        "     (49, 93),\n",
+        "     (51, 183),\n",
+        "     (90, 102),\n",
+        "     (146, 161),\n",
+        "     (21, 57),\n",
+        "     (4, 7),\n",
+        "     (5, 159),\n",
+        "     (26, 52),\n",
+        "     (7, 122),\n",
+        "     (48, 105),\n",
+        "     (36, 88),\n",
+        "     (21, 180),\n",
+        "     (94, 138),\n",
+        "     (131, 156),\n",
+        "     (7, 139),\n",
+        "     (27, 109),\n",
+        "     (106, 137),\n",
+        "     (51, 85),\n",
+        "     (35, 143),\n",
+        "     (18, 173),\n",
+        "     (76, 123),\n",
+        "     (6, 65),\n",
+        "     (88, 162),\n",
+        "     (10, 12),\n",
+        "     (141, 149),\n",
+        "     (128, 153),\n",
+        "     (3, 103),\n",
+        "     (4, 127),\n",
+        "     (23, 120),\n",
+        "     (8, 122),\n",
+        "     (120, 148),\n",
+        "     (140, 180),\n",
+        "     (87, 159),\n",
+        "     (146, 152),\n",
+        "     (57, 88),\n",
+        "     (76, 140),\n",
+        "     (22, 176),\n",
+        "     (43, 93),\n",
+        "     (44, 101),\n",
+        "     (3, 24),\n",
+        "     (46, 58),\n",
+        "     (177, 185),\n",
+        "     (124, 128),\n",
+        "     (61, 92),\n",
+        "     (64, 160),\n",
+        "     (27, 39),\n",
+        "     (49, 174),\n",
+        "     (51, 90),\n",
+        "     (16, 164),\n",
+        "     (35, 180),\n",
+        "     (18, 164),\n",
+        "     (112, 156),\n",
+        "     (76, 114),\n",
+        "     (24, 120),\n",
+        "     (25, 110),\n",
+        "     (6, 56),\n",
+        "     (63, 185),\n",
+        "     (85, 140),\n",
+        "     (159, 161),\n",
+        "     (14, 136),\n",
+        "     (91, 160),\n",
+        "     (37, 164),\n",
+        "     (40, 88),\n",
+        "     (39, 128),\n",
+        "     (128, 179),\n",
+        "     (85, 125),\n",
+        "     (87, 148),\n",
+        "     (43, 120),\n",
+        "     (73, 144),\n",
+        "     (146, 147),\n",
+        "     (76, 139),\n",
+        "     (44, 92),\n",
+        "     (7, 104),\n",
+        "     (47, 177),\n",
+        "     (177, 178),\n",
+        "     (124, 159),\n",
+        "     (52, 172),\n",
+        "     (73, 97),\n",
+        "     (36, 115),\n",
+        "     (5, 18),\n",
+        "     (47, 143),\n",
+        "     (119, 148),\n",
+        "     (107, 144),\n",
+        "     (88, 103),\n",
+        "     (0, 73),\n",
+        "     (19, 138),\n",
+        "     (58, 179),\n",
+        "     (25, 119),\n",
+        "     (79, 172),\n",
+        "     (27, 185),\n",
+        "     (85, 149),\n",
+        "     (50, 89),\n",
+        "     (14, 147),\n",
+        "     (18, 46),\n",
+        "     (110, 135),\n",
+        "     (38, 53),\n",
+        "     (28, 150),\n",
+        "     (4, 109),\n",
+        "     (95, 148),\n",
+        "     (8, 104),\n",
+        "     (120, 162),\n",
+        "     (28, 123),\n",
+        "     (122, 138),\n",
+        "     (140, 170),\n",
+        "     (51, 138),\n",
+        "     (87, 173),\n",
+        "     (90, 115),\n",
+        "     (145, 177),\n",
+        "     (18, 65),\n",
+        "     (76, 130),\n",
+        "     (5, 138),\n",
+        "     (99, 182),\n",
+        "     (157, 179),\n",
+        "     (53, 158),\n",
+        "     (36, 122),\n",
+        "     (128, 164),\n",
+        "     (78, 142),\n",
+        "     (6, 155),\n",
+        "     (172, 183),\n",
+        "     (10, 86),\n",
+        "     (52, 90),\n",
+        "     (18, 150),\n",
+        "     (54, 183),\n",
+        "     (19, 147),\n",
+        "     (40, 183),\n",
+        "     (58, 186),\n",
+        "     (82, 123),\n",
+        "     (50, 80),\n",
+        "     (88, 163),\n",
+        "     (14, 154),\n",
+        "     (35, 51),\n",
+        "     (93, 174),\n",
+        "     (23, 135),\n",
+        "     (64, 92),\n",
+        "     (104, 137),\n",
+        "     (36, 147),\n",
+        "     (73, 130),\n",
+        "     (57, 115),\n",
+        "     (95, 186),\n",
+        "     (98, 138),\n",
+        "     (27, 138),\n",
+        "     (167, 187),\n",
+        "     (47, 71),\n",
+        "     (52, 178),\n",
+        "     (128, 157),\n",
+        "     (37, 125),\n",
+        "     (93, 112),\n",
+        "     (94, 102),\n",
+        "     (4, 138),\n",
+        "     (98, 125),\n",
+        "     (6, 146),\n",
+        "     (153, 174),\n",
+        "     (63, 179),\n",
+        "     (47, 52),\n",
+        "     (65, 71),\n",
+        "     (125, 138),\n",
+        "     (6, 128),\n",
+        "     (19, 152),\n",
+        "     (3, 175),\n",
+        "     (25, 65),\n",
+        "     (6, 45),\n",
+        "     (18, 90),\n",
+        "     (50, 75),\n",
+        "     (88, 170),\n",
+        "     (161, 180),\n",
+        "     (35, 56),\n",
+        "     (75, 81),\n",
+        "     (38, 39),\n",
+        "     (3, 67),\n",
+        "     (42, 170),\n",
+        "     (45, 143),\n",
+        "     (3, 32),\n",
+        "     (53, 127),\n",
+        "     (114, 127),\n",
+        "     (115, 119),\n",
+        "     (7, 87),\n",
+        "     (104, 185),\n",
+        "     (52, 185),\n",
+        "     (36, 104),\n",
+        "     (37, 102),\n",
+        "     (42, 115),\n",
+        "     (6, 149),\n",
+        "     (62, 118),\n",
+        "     (102, 153),\n",
+        "     (30, 31),\n",
+        "     (31, 151),\n",
+        "     (112, 184),\n",
+        "     (21, 141),\n",
+        "     (77, 96),\n",
+        "     (6, 36),\n",
+        "     (45, 119),\n",
+        "     (114, 139),\n",
+        "     (3, 20),\n",
+        "     (18, 27),\n",
+        "     (38, 94),\n",
+        "     (39, 164),\n",
+        "     (97, 165),\n",
+        "     (23, 149),\n",
+        "     (32, 131),\n",
+        "     (34, 186),\n",
+        "     (70, 144),\n",
+        "     (79, 140),\n",
+        "     (46, 82),\n",
+        "     (7, 76),\n",
+        "     (47, 85),\n",
+        "     (123, 140),\n",
+        "     (36, 71),\n",
+        "     (52, 128),\n",
+        "     (17, 98),\n",
+        "     (71, 179),\n",
+        "     (28, 36),\n",
+        "     (6, 140),\n",
+        "     (27, 98),\n",
+        "     (47, 58),\n",
+        "     (123, 125),\n",
+        "     (178, 183),\n",
+        "     (34, 93),\n",
+        "     (54, 152),\n",
+        "     (19, 166),\n",
+        "     (40, 172),\n",
+        "     (114, 143),\n",
+        "     (65, 164),\n",
+        "     (28, 40),\n",
+        "     (32, 43),\n",
+        "     (107, 116),\n",
+        "     (35, 38),\n",
+        "     (110, 163),\n",
+        "     (39, 93),\n",
+        "     (65, 85),\n",
+        "     (131, 168),\n",
+        "     (148, 162),\n",
+        "     (53, 65),\n",
+        "     (76, 166),\n",
+        "     (149, 163),\n",
+        "     (8, 147),\n",
+        "     (83, 108),\n",
+        "     (121, 161),\n",
+        "     (47, 82),\n",
+        "     (106, 183),\n",
+        "     (24, 51),\n",
+        "     (5, 127),\n",
+        "     (153, 158),\n",
+        "     (20, 67),\n",
+        "     (137, 143),\n",
+        "     (27, 71),\n",
+        "     (87, 104),\n",
+        "     (125, 149),\n",
+        "     (127, 145),\n",
+        "     (54, 147),\n",
+        "     (112, 182),\n",
+        "     (21, 159),\n",
+        "     (24, 162),\n",
+        "     (23, 58),\n",
+        "     (79, 137),\n",
+        "     (109, 114),\n",
+        "     (28, 39),\n",
+        "     (85, 178),\n",
+        "     (35, 47),\n",
+        "     (39, 90),\n",
+        "     (116, 130),\n",
+        "     (7, 45),\n",
+        "     (29, 64),\n",
+        "     (34, 141),\n",
+        "     (70, 130),\n",
+        "     (36, 54),\n",
+        "     (94, 179),\n",
+        "     (8, 45),\n",
+        "     (149, 172),\n",
+        "     (8, 154),\n",
+        "     (13, 50),\n",
+        "     (121, 170),\n",
+        "     (123, 154),\n",
+        "     (52, 150),\n",
+        "     (71, 161),\n",
+        "     (19, 71),\n",
+        "     (24, 58),\n",
+        "     (62, 107),\n",
+        "     (30, 112),\n",
+        "     (125, 174),\n",
+        "     (35, 71),\n",
+        "     (19, 180),\n",
+        "     (40, 154),\n",
+        "     (59, 93),\n",
+        "     (23, 51),\n",
+        "     (138, 145),\n",
+        "     (85, 187),\n",
+        "     (14, 121),\n",
+        "     (147, 165),\n",
+        "     (28, 173),\n",
+        "     (32, 168),\n",
+        "     (96, 99),\n",
+        "     (70, 133),\n",
+        "     (83, 184),\n",
+        "     (149, 181),\n",
+        "     (79, 121),\n",
+        "     (26, 119),\n",
+        "     (7, 187),\n",
+        "     (121, 179),\n",
+        "     (123, 147),\n",
+        "     (106, 185),\n",
+        "     (16, 65),\n",
+        "     (36, 140),\n",
+        "     (19, 76),\n",
+        "     (51, 128),\n",
+        "     (88, 173),\n",
+        "     (27, 28),\n",
+        "     (137, 185),\n",
+        "     (66, 113),\n",
+        "     (125, 167),\n",
+        "     (15, 184),\n",
+        "     (59, 151),\n",
+        "     (5, 174),\n",
+        "     (21, 161),\n",
+        "     (28, 53),\n",
+        "     (138, 152),\n",
+        "     (21, 174),\n",
+        "     (18, 127),\n",
+        "     (147, 170),\n",
+        "     (4, 62),\n",
+        "     (116, 144),\n",
+        "     (7, 35),\n",
+        "     (28, 164),\n",
+        "     (122, 187),\n",
+        "     (5, 16),\n",
+        "     (32, 175),\n",
+        "     (34, 159),\n",
+        "     (57, 153),\n",
+        "     (53, 54),\n",
+        "     (8, 136),\n",
+        "     (121, 180),\n",
+        "     (36, 139),\n",
+        "     (19, 85),\n",
+        "     (20, 77),\n",
+        "     (6, 104),\n",
+        "     (82, 185),\n",
+        "     (34, 95),\n",
+        "     (31, 112),\n",
+        "     (87, 119),\n",
+        "     (127, 140),\n",
+        "     (37, 148),\n",
+        "     (30, 184),\n",
+        "     (23, 65),\n",
+        "     (65, 128),\n",
+        "     (46, 162),\n",
+        "     (163, 186),\n",
+        "     (88, 148),\n",
+        "     (109, 183),\n",
+        "     (38, 125),\n",
+        "     (21, 27),\n",
+        "     (152, 166),\n",
+        "     (58, 149),\n",
+        "     (33, 40),\n",
+        "     (34, 150),\n",
+        "     (57, 162),\n",
+        "     (94, 164),\n",
+        "     (4, 180),\n",
+        "     (8, 143),\n",
+        "     (154, 178),\n",
+        "     (46, 124),\n",
+        "     (123, 161),\n",
+        "     (88, 125),\n",
+        "     (51, 115),\n",
+        "     (71, 140),\n",
+        "     (36, 178),\n",
+        "     (150, 174),\n",
+        "     (63, 150),\n",
+        "     (139, 187),\n",
+        "     (125, 185),\n",
+        "     (35, 122),\n",
+        "     (39, 57),\n",
+        "     (40, 143),\n",
+        "     (114, 170),\n",
+        "     (23, 94),\n",
+        "     (65, 153),\n",
+        "     (140, 154),\n",
+        "     (57, 58),\n",
+        "     (21, 36),\n",
+        "     (115, 185),\n",
+        "     (152, 173),\n",
+        "     (48, 78),\n",
+        "     (104, 123),\n",
+        "     (12, 92),\n",
+        "     (16, 39),\n",
+        "     (83, 99),\n",
+        "     (53, 174),\n",
+        "     (148, 153),\n",
+        "     (121, 148),\n",
+        "     (98, 178),\n",
+        "     (117, 129),\n",
+        "     (7, 166),\n",
+        "     (84, 129),\n",
+        "     (47, 127),\n",
+        "     (123, 166),\n",
+        "     (106, 146),\n",
+        "     (33, 146),\n",
+        "     (123, 136),\n",
+        "     (35, 146),\n",
+        "     (56, 109),\n",
+        "     (37, 53),\n",
+        "     (38, 141),\n",
+        "     (150, 161),\n",
+        "     (63, 159),\n",
+        "     (137, 172),\n",
+        "     (154, 180),\n",
+        "     (30, 108),\n",
+        "     (14, 170),\n",
+        "     (125, 178),\n",
+        "     (15, 151),\n",
+        "     (127, 178),\n",
+        "     (111, 131),\n",
+        "     (114, 165),\n",
+        "     (25, 57),\n",
+        "     (46, 172),\n",
+        "     (49, 131),\n",
+        "     (140, 145),\n",
+        "     (14, 85),\n",
+        "     (90, 138),\n",
+        "     (18, 104),\n",
+        "     (88, 123),\n",
+        "     (152, 180),\n",
+        "     (43, 112),\n",
+        "     (99, 111),\n",
+        "     (7, 14),\n",
+        "     (63, 125),\n",
+        "     (27, 94),\n",
+        "     (105, 116),\n",
+        "     (124, 125),\n",
+        "     (39, 167),\n",
+        "     (57, 172),\n",
+        "     (94, 150),\n",
+        "     (41, 42),\n",
+        "     (5, 52),\n",
+        "     (51, 65),\n",
+        "     (179, 180),\n",
+        "     (35, 155),\n",
+        "     (19, 104),\n",
+        "     (76, 79),\n",
+        "     (133, 144),\n",
+        "     (136, 140),\n",
+        "     (102, 161),\n",
+        "     (158, 168),\n",
+        "     (14, 173),\n",
+        "     (35, 104),\n",
+        "     (127, 187),\n",
+        "     (40, 125),\n",
+        "     (39, 159),\n",
+        "     (85, 88),\n",
+        "     (150, 168),\n",
+        "     (141, 161),\n",
+        "     (70, 73),\n",
+        "     (71, 85),\n",
+        "     (146, 172),\n",
+        "     (20, 184),\n",
+        "     (21, 54),\n",
+        "     (5, 172),\n",
+        "     (118, 144),\n",
+        "     (33, 51),\n",
+        "     (16, 21),\n",
+        "     (34, 65),\n",
+        "     (56, 140),\n",
+        "     (94, 153),\n",
+        "     (26, 82),\n",
+        "     (123, 180),\n",
+        "     (33, 172),\n",
+        "     (159, 179),\n",
+        "     (17, 42),\n",
+        "     (109, 121),\n",
+        "     (56, 123),\n",
+        "     (37, 39),\n",
+        "     (3, 131),\n",
+        "     (150, 179),\n",
+        "     (63, 141),\n",
+        "     (12, 176),\n",
+        "     (21, 138),\n",
+        "     (43, 168),\n",
+        "     (183, 185),\n",
+        "     (39, 148),\n",
+        "     (120, 135),\n",
+        "     (121, 127),\n",
+        "     (84, 117),\n",
+        "     (32, 83),\n",
+        "     (82, 154),\n",
+        "     (71, 82),\n",
+        "     (109, 155),\n",
+        "     (21, 63),\n",
+        "     (5, 149),\n",
+        "     (43, 110),\n",
+        "     (26, 42),\n",
+        "     (33, 52),\n",
+        "     (21, 181),\n",
+        "     (53, 185),\n",
+        "     (56, 147),\n",
+        "     (94, 128),\n",
+        "     (5, 38),\n",
+        "     (119, 136),\n",
+        "     (27, 106),\n",
+        "     (103, 153),\n",
+        "     (106, 143),\n",
+        "     (52, 71),\n",
+        "     (35, 137),\n",
+        "     (18, 179),\n",
+        "     (58, 167),\n",
+        "     (13, 75),\n",
+        "     (6, 79),\n",
+        "     (82, 148),\n",
+        "     (63, 138),\n",
+        "     (40, 82),\n",
+        "     (175, 182),\n",
+        "     (31, 93),\n",
+        "     (43, 59),\n",
+        "     (111, 158),\n",
+        "     (3, 31),\n",
+        "     (45, 177),\n",
+        "     (143, 147),\n",
+        "     (18, 85),\n",
+        "     (74, 182),\n",
+        "     (21, 56),\n",
+        "     (80, 81),\n",
+        "     (43, 103),\n",
+        "     (26, 53),\n",
+        "     (28, 158),\n",
+        "     (12, 64),\n",
+        "     (34, 181),\n",
+        "     (3, 77),\n",
+        "     (73, 118),\n",
+        "     (37, 88),\n",
+        "     (58, 63),\n",
+        "     (5, 47),\n",
+        "     (6, 183),\n",
+        "     (7, 138),\n",
+        "     (27, 36),\n",
+        "     (103, 150),\n",
+        "     (18, 170),\n",
+        "     (19, 127),\n",
+        "     (76, 116),\n",
+        "     (18, 187),\n",
+        "     (21, 38),\n",
+        "     (164, 172),\n",
+        "     (124, 149),\n",
+        "     (38, 56),\n",
+        "     (8, 123),\n",
+        "     (120, 149),\n",
+        "     (28, 102),\n",
+        "     (85, 115),\n",
+        "     (33, 85),\n",
+        "     (143, 152),\n",
+        "     (16, 167),\n",
+        "     (1, 12),\n",
+        "     (3, 135),\n",
+        "     (21, 65),\n",
+        "     (115, 154),\n",
+        "     (25, 134),\n",
+        "     (7, 114),\n",
+        "     (173, 187),\n",
+        "     (28, 149),\n",
+        "     (50, 134),\n",
+        "     (56, 161),\n",
+        "     (37, 65),\n",
+        "     (5, 40),\n",
+        "     (104, 121),\n",
+        "     (30, 32),\n",
+        "     (52, 85),\n",
+        "     (35, 183),\n",
+        "     (79, 166),\n",
+        "     (101, 117),\n",
+        "     (85, 139),\n",
+        "     (31, 67),\n",
+        "     (14, 137),\n",
+        "     (144, 152),\n",
+        "     (18, 52),\n",
+        "     (6, 40),\n",
+        "     (38, 51),\n",
+        "     (3, 111),\n",
+        "     (97, 152),\n",
+        "     (149, 167),\n",
+        "     (18, 82),\n",
+        "     (65, 119),\n",
+        "     (28, 125),\n",
+        "     (85, 124),\n",
+        "     (140, 172),\n",
+        "     (14, 56),\n",
+        "     (18, 71),\n",
+        "     (165, 180),\n",
+        "     (58, 94),\n",
+        "     (5, 128),\n",
+        "     (173, 180),\n",
+        "     (51, 53),\n",
+        "     (53, 148),\n",
+        "     (36, 124),\n",
+        "     (121, 128),\n",
+        "     (51, 57),\n",
+        "     (6, 161),\n",
+        "     (172, 185),\n",
+        "     (30, 43),\n",
+        "     (39, 149),\n",
+        "     (28, 58),\n",
+        "     (76, 106),\n",
+        "     (23, 24),\n",
+        "     (63, 161),\n",
+        "     (83, 168),\n",
+        "     (85, 148),\n",
+        "     (35, 128),\n",
+        "     (18, 47),\n",
+        "     (37, 172),\n",
+        "     (97, 145),\n",
+        "     (42, 185),\n",
+        "     (120, 163),\n",
+        "     (47, 125),\n",
+        "     (122, 139),\n",
+        "     (87, 172),\n",
+        "     (44, 84),\n",
+        "     (27, 180),\n",
+        "     (47, 185),\n",
+        "     (30, 131),\n",
+        "     (51, 58),\n",
+        "     (52, 180),\n",
+        "     (15, 32),\n",
+        "     (73, 105),\n",
+        "     (36, 123),\n",
+        "     (77, 156),\n",
+        "     (97, 115),\n",
+        "     (6, 152),\n",
+        "     (27, 37),\n",
+        "     (137, 146),\n",
+        "     (16, 135),\n",
+        "     (54, 180),\n",
+        "     (19, 146),\n",
+        "     (76, 97),\n",
+        "     (79, 180),\n",
+        "     (63, 174),\n",
+        "     (49, 65),\n",
+        "     (50, 81),\n",
+        "     (106, 114),\n",
+        "     (14, 155),\n",
+        "     (18, 38),\n",
+        "     (38, 45),\n",
+        "     (3, 93),\n",
+        "     (97, 170),\n",
+        "     (51, 149),\n",
+        "     (28, 115),\n",
+        "     (122, 146),\n",
+        "     (67, 184),\n",
+        "     (150, 163),\n",
+        "     (51, 146),\n",
+        "     (90, 123),\n",
+        "     (54, 109),\n",
+        "     (165, 166),\n",
+        "     (25, 153),\n",
+        "     (21, 150),\n",
+        "     (124, 174),\n",
+        "     (52, 179),\n",
+        "     (36, 98),\n",
+        "     (94, 103),\n",
+        "     (4, 139),\n",
+        "     (102, 168),\n",
+        "     (6, 147),\n",
+        "     (62, 76),\n",
+        "     (137, 155),\n",
+        "     (103, 186),\n",
+        "     (35, 170),\n",
+        "     (54, 143),\n",
+        "     (19, 155),\n",
+        "     (114, 154),\n",
+        "     (149, 150),\n",
+        "     (23, 46),\n",
+        "     (82, 115),\n",
+        "     (85, 166),\n",
+        "     (36, 57),\n",
+        "     (141, 159),\n",
+        "     (18, 33),\n",
+        "     (110, 150),\n",
+        "     (75, 80),\n",
+        "     (39, 174),\n",
+        "     (23, 159),\n",
+        "     (157, 163),\n",
+        "     (57, 123),\n",
+        "     (76, 177),\n",
+        "     (114, 124),\n",
+        "     (47, 65),\n",
+        "     (46, 87),\n",
+        "     (47, 79),\n",
+        "     (54, 56),\n",
+        "     (16, 102),\n",
+        "     (128, 149),\n",
+        "     (2, 9),\n",
+        "     (94, 110),\n",
+        "     (24, 38),\n",
+        "     (6, 138),\n",
+        "     (27, 51),\n",
+        "     (154, 172),\n",
+        "     (122, 127),\n",
+        "     (103, 179),\n",
+        "     (34, 67),\n",
+        "     (59, 175),\n",
+        "     (37, 163),\n",
+        "     (23, 39),\n",
+        "     (65, 162),\n",
+        "     (88, 178),\n",
+        "     (18, 24),\n",
+        "     (110, 153),\n",
+        "     (19, 45),\n",
+        "     (40, 53),\n",
+        "     (88, 115),\n",
+        "     (23, 148),\n",
+        "     (138, 183),\n",
+        "     (28, 65),\n",
+        "     (3, 112),\n",
+        "     (87, 187),\n",
+        "     (53, 71),\n",
+        "     (54, 127),\n",
+        "     (114, 119),\n",
+        "     (115, 127),\n",
+        "     (79, 109),\n",
+        "     (26, 27),\n",
+        "     (7, 79),\n",
+        "     (46, 94),\n",
+        "     (123, 143),\n",
+        "     (24, 128),\n",
+        "     (17, 97),\n",
+        "     (71, 178),\n",
+        "     (19, 56),\n",
+        "     (77, 135),\n",
+        "     (119, 123),\n",
+        "     (66, 101),\n",
+        "     (75, 89),\n",
+        "     (103, 168),\n",
+        "     (178, 180),\n",
+        "     (18, 128),\n",
+        "     (34, 135),\n",
+        "     (32, 120),\n",
+        "     (40, 173),\n",
+        "     (114, 140),\n",
+        "     (60, 176),\n",
+        "     (25, 82),\n",
+        "     (6, 28),\n",
+        "     (18, 178),\n",
+        "     (36, 103),\n",
+        "     (88, 185),\n",
+        "     (166, 178),\n",
+        "     (91, 132),\n",
+        "     (93, 184),\n",
+        "     (28, 88),\n",
+        "     (163, 168),\n",
+        "     (58, 125),\n",
+        "     (21, 119),\n",
+        "     (149, 162),\n",
+        "     (170, 172),\n",
+        "     (79, 106),\n",
+        "     (26, 98),\n",
+        "     (83, 111),\n",
+        "     (106, 180),\n",
+        "     (124, 187),\n",
+        "     (17, 90),\n",
+        "     (36, 159),\n",
+        "     (112, 131),\n",
+        "     (42, 98),\n",
+        "     (61, 113),\n",
+        "     (62, 97),\n",
+        "     (103, 161),\n",
+        "     (52, 63),\n",
+        "     (40, 148),\n",
+        "     (21, 158),\n",
+        "     (24, 163),\n",
+        "     (23, 53),\n",
+        "     (79, 136),\n",
+        "     (45, 104),\n",
+        "     (85, 177),\n",
+        "     (88, 128),\n",
+        "     (14, 119),\n",
+        "     (90, 172),\n",
+        "     (35, 46),\n",
+        "     (20, 151),\n",
+        "     (39, 85),\n",
+        "     (11, 41),\n",
+        "     (8, 155),\n",
+        "     (154, 166),\n",
+        "     (46, 103),\n",
+        "     (42, 109),\n",
+        "     (88, 172),\n",
+        "     (6, 127),\n",
+        "     (137, 183),\n",
+        "     (19, 183),\n",
+        "     (21, 167),\n",
+        "     (79, 145),\n",
+        "     (65, 181),\n",
+        "     (28, 63),\n",
+        "     (70, 107),\n",
+        "     (71, 123),\n",
+        "     (113, 129),\n",
+        "     (39, 82),\n",
+        "     (23, 163),\n",
+        "     (8, 51),\n",
+        "     (63, 88),\n",
+        "     (47, 122),\n",
+        "     (122, 177),\n",
+        "     (120, 141),\n",
+        "     (16, 51),\n",
+        "     (72, 80),\n",
+        "     (53, 82),\n",
+        "     (61, 84),\n",
+        "     (21, 121),\n",
+        "     (149, 180),\n",
+        "     (52, 120),\n",
+        "     (121, 178),\n",
+        "     (123, 146),\n",
+        "     (106, 166),\n",
+        "     (52, 158),\n",
+        "     (19, 79),\n",
+        "     (38, 153),\n",
+        "     (150, 157),\n",
+        "     (136, 145),\n",
+        "     (9, 101),\n",
+        "     (139, 140),\n",
+        "     (82, 159),\n",
+        "     (56, 114),\n",
+        "     (34, 103),\n",
+        "     (35, 79),\n",
+        "     (114, 185),\n",
+        "     (25, 37),\n",
+        "     (49, 159),\n",
+        "     (32, 49),\n",
+        "     (16, 128),\n",
+        "     (135, 186),\n",
+        "     (18, 124),\n",
+        "     (110, 181),\n",
+        "     (147, 173),\n",
+        "     (115, 170),\n",
+        "     (116, 145),\n",
+        "     (14, 146),\n",
+        "     (63, 65),\n",
+        "     (150, 186),\n",
+        "     (36, 37),\n",
+        "     (2, 117),\n",
+        "     (94, 162),\n",
+        "     (115, 146),\n",
+        "     (82, 164),\n",
+        "     (8, 137),\n",
+        "     (121, 187),\n",
+        "     (47, 104),\n",
+        "     (88, 119),\n",
+        "     (51, 109),\n",
+        "     (52, 57),\n",
+        "     (36, 180),\n",
+        "     (77, 171),\n",
+        "     (136, 152),\n",
+        "     (30, 99),\n",
+        "     (127, 143),\n",
+        "     (46, 51),\n",
+        "     (25, 46),\n",
+        "     (46, 163),\n",
+        "     (172, 187),\n",
+        "     (88, 149),\n",
+        "     (141, 181),\n",
+        "     (90, 185),\n",
+        "     (143, 177),\n",
+        "     (18, 119),\n",
+        "     (147, 178),\n",
+        "     (21, 26),\n",
+        "     (95, 103),\n",
+        "     (116, 152),\n",
+        "     (47, 128),\n",
+        "     (53, 164),\n",
+        "     (92, 129),\n",
+        "     (79, 138),\n",
+        "     (46, 125),\n",
+        "     (71, 143),\n",
+        "     (36, 179),\n",
+        "     (58, 128),\n",
+        "     (8, 140),\n",
+        "     (5, 82),\n",
+        "     (27, 33),\n",
+        "     (137, 170),\n",
+        "     (154, 183),\n",
+        "     (31, 120),\n",
+        "     (127, 180),\n",
+        "     (21, 178),\n",
+        "     (59, 99),\n",
+        "     (138, 139),\n",
+        "     (140, 155),\n",
+        "     (65, 149),\n",
+        "     (141, 174),\n",
+        "     (103, 120),\n",
+        "     (71, 102),\n",
+        "     (90, 170),\n",
+        "     (146, 187),\n",
+        "     (147, 187),\n",
+        "     (20, 171),\n",
+        "     (21, 35),\n",
+        "     (8, 40),\n",
+        "     (63, 119),\n",
+        "     (47, 137),\n",
+        "     (120, 125),\n",
+        "     (16, 24),\n",
+        "     (53, 173),\n",
+        "     (54, 85),\n",
+        "     (57, 170),\n",
+        "     (94, 172),\n",
+        "     (79, 87),\n",
+        "     (8, 183),\n",
+        "     (104, 139),\n",
+        "     (123, 185),\n",
+        "     (106, 147),\n",
+        "     (51, 123),\n",
+        "     (83, 163),\n",
+        "     (37, 52),\n",
+        "     (77, 93),\n",
+        "     (63, 158),\n",
+        "     (10, 22),\n",
+        "     (92, 113),\n",
+        "     (77, 168),\n",
+        "     (40, 119),\n",
+        "     (21, 187),\n",
+        "     (138, 178),\n",
+        "     (85, 94),\n",
+        "     (140, 146),\n",
+        "     (51, 162),\n",
+        "     (141, 167),\n",
+        "     (3, 158),\n",
+        "     (146, 178),\n",
+        "     (166, 177),\n",
+        "     (8, 47),\n",
+        "     (48, 70),\n",
+        "     (29, 60),\n",
+        "     (58, 161),\n",
+        "     (34, 169),\n",
+        "     (36, 82),\n",
+        "     (57, 179),\n",
+        "     (148, 161),\n",
+        "     (5, 51),\n",
+        "     (106, 154),\n",
+        "     (33, 170),\n",
+        "     (16, 174),\n",
+        "     (109, 127),\n",
+        "     (36, 161),\n",
+        "     (6, 82),\n",
+        "     (82, 163),\n",
+        "     (14, 178),\n",
+        "     (155, 179),\n",
+        "     (39, 158),\n",
+        "     (24, 141),\n",
+        "     (23, 111),\n",
+        "     (8, 71),\n",
+        "     (64, 100),\n",
+        "     (46, 180),\n",
+        "     (85, 103),\n",
+        "     (32, 93),\n",
+        "     (90, 146),\n",
+        "     (146, 173),\n",
+        "     (38, 103),\n",
+        "     (21, 53),\n",
+        "     (4, 19),\n",
+        "     (25, 178),\n",
+        "     (99, 151),\n",
+        "     (104, 106),\n",
+        "     (36, 94),\n",
+        "     (37, 85),\n",
+        "     (57, 180),\n",
+        "     (94, 158),\n",
+        "     (67, 112),\n",
+        "     (153, 163),\n",
+        "     (8, 165),\n",
+        "     (119, 178),\n",
+        "     (156, 184),\n",
+        "     (123, 183),\n",
+        "     (23, 93),\n",
+        "     (52, 121),\n",
+        "     (17, 41),\n",
+        "     (18, 185),\n",
+        "     (56, 124),\n",
+        "     (37, 38),\n",
+        "     (58, 157),\n",
+        "     (6, 85),\n",
+        "     (136, 180),\n",
+        "     (135, 164),\n",
+        "     (24, 148),\n",
+        "     (45, 183),\n",
+        "     (33, 90),\n",
+        "     (87, 139),\n",
+        "     (109, 154),\n",
+        "     (76, 152),\n",
+        "     (5, 148),\n",
+        "     (4, 140),\n",
+        "     (44, 113),\n",
+        "     (7, 127),\n",
+        "     (63, 98),\n",
+        "     (48, 116),\n",
+        "     (47, 164),\n",
+        "     (37, 94),\n",
+        "     (5, 37),\n",
+        "     (153, 164),\n",
+        "     (8, 172),\n",
+        "     (7, 140),\n",
+        "     (10, 176),\n",
+        "     (106, 140),\n",
+        "     (33, 164),\n",
+        "     (16, 162),\n",
+        "     (35, 136),\n",
+        "     (19, 121),\n",
+        "     (58, 164),\n",
+        "     (93, 153),\n",
+        "     (25, 98),\n",
+        "     (82, 149),\n",
+        "     (63, 181),\n",
+        "     (139, 166),\n",
+        "     (159, 167),\n",
+        "     (36, 173),\n",
+        "     (39, 125),\n",
+        "     (111, 153),\n",
+        "     (3, 96),\n",
+        "     (4, 114),\n",
+        "     (23, 125),\n",
+        "     (28, 104),\n",
+        "     (49, 181),\n",
+        "     (51, 185),\n",
+        "     (14, 47),\n",
+        "     (34, 49),\n",
+        "     (143, 146),\n",
+        "     (109, 147),\n",
+        "     (1, 10),\n",
+        "     (57, 85),\n",
+        "     (21, 71),\n",
+        "     (48, 107),\n",
+        "     (47, 173),\n",
+        "     (30, 175),\n",
+        "     (30, 49),\n",
+        "     (56, 155),\n",
+        "     (37, 71),\n",
+        "     (4, 144),\n",
+        "     (5, 46),\n",
+        "     (6, 180),\n",
+        "     (51, 87),\n",
+        "     (35, 177),\n",
+        "     (37, 40),\n",
+        "     (38, 174),\n",
+        "     (26, 63),\n",
+        "     (6, 71),\n",
+        "     (63, 178),\n",
+        "     (157, 169),\n",
+        "     (161, 174),\n",
+        "     (18, 58),\n",
+        "     (37, 185),\n",
+        "     (38, 57),\n",
+        "     (28, 148),\n",
+        "     (4, 121),\n",
+        "     (26, 138),\n",
+        "     (65, 125),\n",
+        "     (28, 103),\n",
+        "     (98, 154),\n",
+        "     (85, 114),\n",
+        "     (14, 54),\n",
+        "     (143, 155),\n",
+        "     (109, 140),\n",
+        "     (57, 94),\n",
+        "     (4, 8),\n",
+        "     (7, 109),\n",
+        "     (46, 56),\n",
+        "     (47, 170),\n",
+        "     (157, 181),\n",
+        "     (52, 167),\n",
+        "     (53, 138),\n",
+        "     (93, 99),\n",
+        "     (131, 135),\n",
+        "     (88, 164),\n",
+        "     (9, 44),\n",
+        "     (111, 151),\n",
+        "     (161, 178),\n",
+        "     (18, 162),\n",
+        "     (54, 187),\n",
+        "     (38, 161),\n",
+        "     (31, 184),\n",
+        "     (20, 30),\n",
+        "     (85, 138),\n",
+        "     (161, 167),\n",
+        "     (18, 53),\n",
+        "     (37, 162),\n",
+        "     (40, 90),\n",
+        "     (62, 130),\n",
+        "     (120, 157),\n",
+        "     (40, 71),\n",
+        "     (85, 123),\n",
+        "     (140, 173),\n",
+        "     (33, 109),\n",
+        "     (159, 174),\n",
+        "     (57, 103),\n",
+        "     (58, 95),\n",
+        "     (7, 106),\n",
+        "     (47, 179),\n",
+        "     (51, 52),\n",
+        "     (52, 174),\n",
+        "     (36, 125),\n",
+        "     (2, 61),\n",
+        "     (78, 133),\n",
+        "     (6, 166),\n",
+        "     (103, 135),\n",
+        "     (54, 178),\n",
+        "     (19, 140),\n",
+        "     (40, 178),\n",
+        "     (24, 65),\n",
+        "     (43, 175),\n",
+        "     (10, 60),\n",
+        "     (85, 147),\n",
+        "     (14, 145),\n",
+        "     (180, 185),\n",
+        "     (40, 65),\n",
+        "     (97, 144),\n",
+        "     (8, 106),\n",
+        "     (38, 95),\n",
+        "     (45, 155),\n",
+        "     (122, 136),\n",
+        "     (32, 112),\n",
+        "     (33, 102),\n",
+        "     (145, 183),\n",
+        "     (1, 29),\n",
+        "     (15, 67),\n",
+        "     (58, 102),\n",
+        "     (21, 82),\n",
+        "     (22, 64),\n",
+        "     (43, 77),\n",
+        "     (99, 184),\n",
+        "     (121, 139),\n",
+        "     (63, 146),\n",
+        "     (58, 168),\n",
+        "     (52, 181),\n",
+        "     (19, 36),\n",
+        "     (94, 125),\n",
+        "     (64, 176),\n",
+        "     (172, 177),\n",
+        "     (137, 145),\n",
+        "     (31, 163),\n",
+        "     (18, 148),\n",
+        "     (40, 185),\n",
+        "     (130, 144),\n",
+        "     (77, 108),\n",
+        "     (43, 164),\n",
+        "     (79, 183),\n",
+        "     (106, 115),\n",
+        "     (14, 152),\n",
+        "     (35, 53),\n",
+        "     (18, 39),\n",
+        "     (33, 71),\n",
+        "     (122, 147),\n",
+        "     (157, 164),\n",
+        "     (33, 127),\n",
+        "     (1, 22),\n",
+        "     (95, 164),\n",
+        "     (121, 140),\n",
+        "     (104, 180),\n",
+        "     (96, 112),\n",
+        "     (56, 183),\n",
+        "     (55, 129),\n",
+        "     (61, 101),\n",
+        "     (137, 154),\n",
+        "     (47, 54),\n",
+        "     (157, 159),\n",
+        "     (31, 168),\n",
+        "     (35, 173),\n",
+        "     (18, 143),\n",
+        "     (54, 140),\n",
+        "     (19, 154),\n",
+        "     (114, 155),\n",
+        "     (6, 35),\n",
+        "     (45, 124),\n",
+        "     (47, 109),\n",
+        "     (102, 119),\n",
+        "     (107, 118),\n",
+        "     (106, 122),\n",
+        "     (3, 108),\n",
+        "     (23, 158),\n",
+        "     (122, 154),\n",
+        "     (51, 154),\n",
+        "     (53, 125),\n",
+        "     (149, 159),\n",
+        "     (97, 116),\n",
+        "     (83, 96),\n",
+        "     (121, 149),\n",
+        "     (104, 187),\n",
+        "     (123, 137),\n",
+        "     (16, 103),\n",
+        "     (71, 180),\n",
+        "     (128, 150),\n",
+        "     (24, 39),\n",
+        "     (53, 57),\n",
+        "     (6, 139),\n",
+        "     (62, 116),\n",
+        "     (119, 125),\n",
+        "     (102, 159),\n",
+        "     (47, 63),\n",
+        "     (114, 146),\n",
+        "     (6, 26),\n",
+        "     (65, 161),\n",
+        "     (102, 110),\n",
+        "     (159, 172),\n",
+        "     (88, 179),\n",
+        "     (14, 106),\n",
+        "     (18, 25),\n",
+        "     (110, 158),\n",
+        "     (40, 54),\n",
+        "     (23, 151),\n",
+        "     (65, 82),\n",
+        "     (122, 165),\n",
+        "     (145, 154),\n",
+        "     (54, 124),\n",
+        "     (96, 162),\n",
+        "     (78, 118),\n",
+        "     (44, 66),\n",
+        "     (27, 154),\n",
+        "     (46, 95),\n",
+        "     (47, 87),\n",
+        "     (16, 94),\n",
+        "     (71, 173),\n",
+        "     (36, 145),\n",
+        "     (24, 46),\n",
+        "     (9, 113),\n",
+        "     (119, 122),\n",
+        "     (102, 150),\n",
+        "     (139, 152),\n",
+        "     (123, 127),\n",
+        "     (31, 158),\n",
+        "     (15, 175),\n",
+        "     (40, 174),\n",
+        "     (21, 148),\n",
+        "     (24, 93),\n",
+        "     (23, 63),\n",
+        "     (82, 98),\n",
+        "     (65, 186),\n",
+        "     (24, 52),\n",
+        "     (85, 183),\n",
+        "     (13, 69),\n",
+        "     (14, 109),\n",
+        "     (90, 162),\n",
+        "     (35, 40),\n",
+        "     (30, 67),\n",
+        "     (110, 161),\n",
+        "     (38, 87),\n",
+        "     (42, 154),\n",
+        "     (116, 133),\n",
+        "     (7, 54),\n",
+        "     (63, 85),\n",
+        "     (122, 172),\n",
+        "     (32, 156),\n",
+        "     (87, 179),\n",
+        "     (145, 147),\n",
+        "     (54, 119),\n",
+        "     (149, 161),\n",
+        "     (170, 173),\n",
+        "     (7, 71),\n",
+        "     (46, 102),\n",
+        "     (124, 180),\n",
+        "     (16, 85),\n",
+        "     (71, 170),\n",
+        "     (50, 72),\n",
+        "     (6, 7),\n",
+        "     (24, 53),\n",
+        "     (5, 125),\n",
+        "     (53, 149),\n",
+        "     (139, 145),\n",
+        "     (31, 135),\n",
+        "     (127, 147),\n",
+        "     (54, 145),\n",
+        "     (19, 177),\n",
+        "     (40, 149),\n",
+        "     (4, 119),\n",
+        "     (24, 164),\n",
+        "     (25, 90),\n",
+        "     (79, 139),\n",
+        "     (65, 179),\n",
+        "     (28, 33),\n",
+        "     (138, 148),\n",
+        "     (143, 165),\n",
+        "     (110, 168),\n",
+        "     (3, 184),\n",
+        "     (15, 131),\n",
+        "     (116, 140),\n",
+        "     (9, 129),\n",
+        "     (63, 82),\n",
+        "     (122, 183),\n",
+        "     (141, 186),\n",
+        "     (102, 174),\n",
+        "     (65, 103),\n",
+        "     (143, 170),\n",
+        "     (79, 114),\n",
+        "     (26, 106),\n",
+        "     (27, 104),\n",
+        "     (124, 179),\n",
+        "     (77, 184),\n",
+        "     (42, 106),\n",
+        "     (6, 57),\n",
+        "     (6, 124),\n",
+        "     (62, 105),\n",
+        "     (67, 126),\n",
+        "     (125, 172),\n",
+        "     (91, 100),\n",
+        "     (112, 175),\n",
+        "     (95, 120),\n",
+        "     (46, 150),\n",
+        "     (85, 185),\n",
+        "     (32, 59),\n",
+        "     (14, 127),\n",
+        "     (71, 122),\n",
+        "     (110, 179),\n",
+        "     (38, 65),\n",
+        "     (21, 23),\n",
+        "     (116, 139),\n",
+        "     (7, 36),\n",
+        "     (18, 159),\n",
+        "     (67, 156),\n",
+        "     (16, 52),\n",
+        "     (72, 81),\n",
+        "     (36, 63),\n",
+        "     (57, 150),\n",
+        "     (21, 120),\n",
+        "     (149, 179),\n",
+        "     (79, 123),\n",
+        "     (121, 177),\n",
+        "     (51, 103),\n",
+        "     (52, 159),\n",
+        "     (6, 119),\n",
+        "     (136, 146),\n",
+        "     (139, 143),\n",
+        "     (23, 25),\n",
+        "     (39, 63),\n",
+        "     (92, 101),\n",
+        "     (6, 51),\n",
+        "     (25, 36),\n",
+        "     (65, 141),\n",
+        "     (46, 153),\n",
+        "     (49, 158),\n",
+        "     (3, 43),\n",
+        "     (18, 125),\n",
+        "     (110, 186),\n",
+        "     (147, 172),\n",
+        "     (122, 185),\n",
+        "     (23, 52),\n",
+        "     (34, 157),\n",
+        "     (53, 90),\n",
+        "     (36, 38),\n",
+        "     (57, 159),\n",
+        "     (148, 149),\n",
+        "     (151, 168),\n",
+        "     (8, 138),\n",
+        "     (7, 178),\n",
+        "     (46, 123),\n",
+        "     (128, 187),\n",
+        "     (123, 170),\n",
+        "     (33, 134),\n",
+        "     (52, 102),\n",
+        "     (18, 181),\n",
+        "     (26, 125),\n",
+        "     (5, 104),\n",
+        "     (31, 49),\n",
+        "     (102, 178),\n",
+        "     (30, 96),\n",
+        "     (35, 119),\n",
+        "     (40, 138),\n",
+        "     (114, 177),\n",
+        "     (46, 120),\n",
+        "     (49, 151),\n",
+        "     (88, 150),\n",
+        "     (90, 134),\n",
+        "     (71, 104),\n",
+        "     (153, 168),\n",
+        "     (21, 25),\n",
+        "     (115, 178),\n",
+        "     (71, 125),\n",
+        "     (48, 73),\n",
+        "     (32, 184),\n",
+        "     (33, 46),\n",
+        "     (34, 148),\n",
+        "     (36, 45),\n",
+        "     (98, 185),\n",
+        "     (94, 168),\n",
+        "     (8, 177),\n",
+        "     (27, 127),\n",
+        "     (119, 166),\n",
+        "     (88, 127),\n",
+        "     (30, 59),\n",
+        "     (56, 104),\n",
+        "     (88, 94),\n",
+        "     (82, 174),\n",
+        "     (35, 124),\n",
+        "     (127, 183),\n",
+        "     (102, 158),\n",
+        "     (23, 88),\n",
+        "     (26, 172),\n",
+        "     (7, 47),\n",
+        "     (143, 185),\n",
+        "     (39, 120),\n",
+        "     (82, 180),\n",
+        "     (7, 19),\n",
+        "     (28, 180),\n",
+        "     (163, 174),\n",
+        "     (76, 122),\n",
+        "     (16, 25),\n",
+        "     (53, 172),\n",
+        "     (56, 128),\n",
+        "     (156, 171),\n",
+        "     (47, 121),\n",
+        "     (106, 144),\n",
+        "     (51, 122),\n",
+        "     (16, 168),\n",
+        "     (15, 96),\n",
+        "     (36, 187),\n",
+        "     (37, 51),\n",
+        "     (20, 93),\n",
+        "     (150, 167),\n",
+        "     (6, 88),\n",
+        "     (63, 153),\n",
+        "     (102, 172),\n",
+        "     (139, 178),\n",
+        "     (135, 153),\n",
+        "     (31, 96),\n",
+        "     (159, 181),\n",
+        "     (24, 135),\n",
+        "     (25, 63),\n",
+        "     (26, 183),\n",
+        "     (46, 178),\n",
+        "     (140, 147),\n",
+        "     (71, 94),\n",
+        "     (18, 102),\n",
+        "     (57, 65),\n",
+        "     (5, 161),\n",
+        "     (82, 110),\n",
+        "     (7, 8),\n",
+        "     (63, 127),\n",
+        "     (28, 179),\n",
+        "     (98, 172),\n",
+        "     (128, 161),\n",
+        "     (53, 181),\n",
+        "     (57, 178),\n",
+        "     (94, 148),\n",
+        "     (24, 186),\n",
+        "     (119, 180),\n",
+        "     (123, 177),\n",
+        "     (3, 120),\n",
+        "     (36, 162),\n",
+        "     (19, 106),\n",
+        "     (24, 31),\n",
+        "     (135, 174),\n",
+        "     (155, 161),\n",
+        "     (158, 174),\n",
+        "     (127, 165),\n",
+        "     (40, 127),\n",
+        "     (39, 153),\n",
+        "     (59, 112),\n",
+        "     (23, 110),\n",
+        "     (33, 94),\n",
+        "     (69, 72),\n",
+        "     (46, 181),\n",
+        "     (85, 102),\n",
+        "     (51, 170),\n",
+        "     (87, 141),\n",
+        "     (71, 87),\n",
+        "     (146, 170),\n",
+        "     (26, 143),\n",
+        "     (21, 52),\n",
+        "     (26, 33),\n",
+        "     (28, 138),\n",
+        "     (65, 120),\n",
+        "     (16, 23),\n",
+        "     (38, 63),\n",
+        "     (36, 90),\n",
+        "     (94, 159),\n",
+        "     (59, 171),\n",
+        "     (22, 29),\n",
+        "     (6, 187),\n",
+        "     (8, 166),\n",
+        "     (106, 130),\n",
+        "     (33, 162),\n",
+        "     (14, 180),\n",
+        "     (108, 111),\n",
+        "     (109, 119),\n",
+        "     (32, 111),\n",
+        "     (20, 99),\n",
+        "     (24, 102),\n",
+        "     (155, 166),\n",
+        "     (17, 185),\n",
+        "     (183, 187),\n",
+        "     (39, 150),\n",
+        "     (4, 116),\n",
+        "     (24, 149),\n",
+        "     (23, 103),\n",
+        "     (8, 79),\n",
+        "     (121, 125),\n",
+        "     (138, 165),\n",
+        "     (51, 179),\n",
+        "     (87, 138),\n",
+        "     (90, 154),\n",
+        "     (18, 88),\n",
+        "     (146, 165),\n",
+        "     (39, 103),\n",
+        "     (43, 96),\n",
+        "     (26, 40),\n",
+        "     (27, 170),\n",
+        "     (46, 47),\n",
+        "     (47, 167),\n",
+        "     (124, 141),\n",
+        "     (36, 65),\n",
+        "     (4, 170),\n",
+        "     (5, 36),\n",
+        "     (6, 178),\n",
+        "     (8, 173),\n",
+        "     (7, 143),\n",
+        "     (119, 138),\n",
+        "     (135, 168),\n",
+        "     (52, 65),\n",
+        "     (35, 139),\n",
+        "     (37, 46),\n",
+        "     (38, 180),\n",
+        "     (76, 127),\n",
+        "     (82, 146),\n",
+        "     (63, 180),\n",
+        "     (127, 152),\n",
+        "     (27, 46),\n",
+        "     (86, 160),\n",
+        "     (17, 178),\n",
+        "     (39, 52),\n",
+        "     (164, 167),\n",
+        "     (40, 109),\n",
+        "     (3, 99),\n",
+        "     (149, 153),\n",
+        "     (138, 172),\n",
+        "     (85, 104),\n",
+        "     (33, 82),\n",
+        "     (109, 146),\n",
+        "     (76, 144),\n",
+        "     (26, 51),\n",
+        "     (7, 119),\n",
+        "     (46, 54),\n",
+        "     (47, 172),\n",
+        "     (52, 164),\n",
+        "     (73, 116),\n",
+        "     (128, 180),\n",
+        "     (4, 145),\n",
+        "     (5, 45),\n",
+        "     (161, 179),\n",
+        "     (16, 148),\n",
+        "     (17, 26),\n",
+        "     (54, 161),\n",
+        "     (58, 172),\n",
+        "     (132, 160),\n",
+        "     (155, 180),\n",
+        "     (154, 177),\n",
+        "     (85, 128),\n",
+        "     (12, 160),\n",
+        "     (161, 173),\n",
+        "     (164, 174),\n",
+        "     (55, 66),\n",
+        "     (4, 122),\n",
+        "     (25, 27),\n",
+        "     (65, 124),\n",
+        "     (140, 183),\n",
+        "     (143, 154),\n",
+        "     (109, 139),\n",
+        "     (46, 187),\n",
+        "     (76, 143),\n",
+        "     (173, 185),\n",
+        "     (46, 57),\n",
+        "     (30, 151),\n",
+        "     (36, 119),\n",
+        "     (4, 152),\n",
+        "     (6, 172),\n",
+        "     (119, 152),\n",
+        "     (29, 176),\n",
+        "     (52, 87),\n",
+        "     (35, 185),\n",
+        "     (181, 186),\n",
+        "     (76, 109),\n",
+        "     (25, 115),\n",
+        "     (6, 63),\n",
+        "     (136, 170),\n",
+        "     (85, 137),\n",
+        "     (31, 77),\n",
+        "     (14, 143),\n",
+        "     (23, 162),\n",
+        "     (10, 64),\n",
+        "     (37, 161),\n",
+        "     (4, 97),\n",
+        "     (25, 28),\n",
+        "     (26, 146),\n",
+        "     (45, 161),\n",
+        "     (28, 127),\n",
+        "     (85, 122),\n",
+        "     (90, 119),\n",
+        "     (165, 178),\n",
+        "     (96, 135),\n",
+        "     (145, 170),\n",
+        "     (173, 178),\n",
+        "     (47, 178),\n",
+        "     (177, 183),\n",
+        "     (53, 146),\n",
+        "     (56, 170),\n",
+        "     (26, 47),\n",
+        "     (167, 174),\n",
+        "     (98, 102),\n",
+        "     (9, 84),\n",
+        "     (119, 145),\n",
+        "     (52, 94),\n",
+        "     (54, 179),\n",
+        "     (19, 143),\n",
+        "     (40, 179),\n",
+        "     (6, 54),\n",
+        "     (82, 127),\n",
+        "     (104, 109),\n",
+        "     (170, 178),\n",
+        "     (85, 146),\n",
+        "     (43, 184),\n",
+        "     (21, 149),\n",
+        "     (144, 145),\n",
+        "     (18, 45),\n",
+        "     (38, 40),\n",
+        "     (4, 104),\n",
+        "     (122, 137),\n",
+        "     (140, 165),\n",
+        "     (51, 143),\n",
+        "     (54, 104),\n",
+        "     (58, 103),\n",
+        "     (96, 158),\n",
+        "     (25, 150),\n",
+        "     (104, 178),\n",
+        "     (121, 138),\n",
+        "     (47, 187),\n",
+        "     (56, 177),\n",
+        "     (94, 98),\n",
+        "     (5, 24),\n",
+        "     (57, 185),\n",
+        "     (172, 178),\n",
+        "     (58, 141),\n",
+        "     (16, 110),\n",
+        "     (15, 83),\n",
+        "     (18, 149),\n",
+        "     (54, 138),\n",
+        "     (130, 145),\n",
+        "     (25, 125),\n",
+        "     (111, 162),\n",
+        "     (102, 125),\n",
+        "     (85, 155),\n",
+        "     (13, 89),\n",
+        "     (107, 130),\n",
+        "     (18, 36),\n",
+        "     (110, 141),\n",
+        "     (93, 171),\n",
+        "     (39, 179),\n",
+        "     (32, 99),\n",
+        "     (8, 18),\n",
+        "     (62, 165),\n",
+        "     (45, 147),\n",
+        "     (67, 186),\n",
+        "     (51, 148),\n",
+        "     (87, 167),\n",
+        "     (58, 110),\n",
+        "     (95, 167),\n",
+        "     (170, 177),\n",
+        "     (25, 159),\n",
+        "     (27, 143),\n",
+        "     (121, 147),\n",
+        "     (126, 156),\n",
+        "     (39, 43),\n",
+        "     (174, 186),\n",
+        "     (6, 145),\n",
+        "     (10, 92),\n",
+        "     (80, 89),\n",
+        "     (31, 171),\n",
+        "     (35, 172),\n",
+        "     (104, 127),\n",
+        "     (40, 161),\n",
+        "     (114, 152),\n",
+        "     (57, 63),\n",
+        "     (23, 40),\n",
+        "     (45, 123),\n",
+        "     (15, 43),\n",
+        "     (85, 164),\n",
+        "     (106, 123),\n",
+        "     (141, 157),\n",
+        "     (3, 151),\n",
+        "     (110, 148),\n",
+        "     (93, 164),\n",
+        "     (39, 168),\n",
+        "     (23, 153),\n",
+        "     (64, 86),\n",
+        "     (45, 140),\n",
+        "     (122, 155),\n",
+        "     (33, 119),\n",
+        "     (14, 19),\n",
+        "     (53, 124),\n",
+        "     (54, 122),\n",
+        "     (165, 173),\n",
+        "     (114, 122),\n",
+        "     (149, 158),\n",
+        "     (118, 133),\n",
+        "     (46, 85),\n",
+        "     (12, 29),\n",
+        "     (124, 167),\n",
+        "     (16, 88),\n",
+        "     (126, 135),\n",
+        "     (71, 183),\n",
+        "     (24, 40),\n",
+        "     (98, 115),\n",
+        "     (6, 136),\n",
+        "     (27, 53),\n",
+        "     (125, 128),\n",
+        "     (35, 85),\n",
+        "     (114, 147),\n",
+        "     (6, 27),\n",
+        "     (115, 134),\n",
+        "     (85, 173),\n",
+        "     (88, 180),\n",
+        "     (141, 150),\n",
+        "     (96, 184),\n",
+        "     (110, 159),\n",
+        "     (20, 131),\n",
+        "     (39, 161),\n",
+        "     (23, 150),\n",
+        "     (7, 56),\n",
+        "     (120, 186),\n",
+        "     (87, 181),\n",
+        "     (76, 170),\n",
+        "     (96, 163),\n",
+        "     (95, 149),\n",
+        "     (16, 95),\n",
+        "     (71, 172),\n",
+        "     (36, 146),\n",
+        "     (137, 139),\n",
+        "     (139, 155),\n",
+        "     (36, 114),\n",
+        "     (125, 153),\n",
+        "     (114, 138),\n",
+        "     (24, 94),\n",
+        "     (6, 18),\n",
+        "     (106, 109),\n",
+        "     (14, 114),\n",
+        "     (147, 152),\n",
+        "     (39, 94),\n",
+        "     (4, 76),\n",
+        "     (28, 90),\n",
+        "     (122, 173),\n",
+        "     (145, 146),\n",
+        "     (98, 146),\n",
+        "     (27, 146),\n",
+        "     (121, 166),\n",
+        "     (104, 170),\n",
+        "     (106, 178),\n",
+        "     (124, 181),\n",
+        "     (52, 138),\n",
+        "     (36, 153),\n",
+        "     (5, 124),\n",
+        "     (6, 122),\n",
+        "     (137, 140),\n",
+        "     (67, 120),\n",
+        "     (103, 163),\n",
+        "     (125, 146),\n",
+        "     (127, 146),\n",
+        "     (40, 150),\n",
+        "     (43, 131),\n",
+        "     (6, 21),\n",
+        "     (82, 90),\n",
+        "     (138, 149),\n",
+        "     (49, 95),\n",
+        "     (70, 118),\n",
+        "     (71, 124),\n",
+        "     (33, 88),\n",
+        "     (23, 164),\n",
+        "     (8, 14),\n",
+        "     (122, 180),\n",
+        "     (53, 87),\n",
+        "     (54, 79),\n",
+        "     (3, 23),\n",
+        "     (46, 110),\n",
+        "     (104, 145),\n",
+        "     (76, 104),\n",
+        "     (125, 148),\n",
+        "     (36, 128),\n",
+        "     (6, 125),\n",
+        "     (66, 117),\n",
+        "     (16, 43),\n",
+        "     (127, 155),\n",
+        "     (19, 185),\n",
+        "     (60, 64),\n",
+        "     (79, 147),\n",
+        "     (82, 85),\n",
+        "     (20, 32),\n",
+        "     (28, 57),\n",
+        "     (70, 105),\n",
+        "     (143, 173),\n",
+        "     (147, 166),\n",
+        "     (63, 90),\n",
+        "     (29, 86),\n",
+        "     (94, 163),\n",
+        "     (32, 171),\n",
+        "     (16, 53),\n",
+        "     (36, 56),\n",
+        "     (57, 149),\n",
+        "     (94, 185),\n",
+        "     (95, 128),\n",
+        "     (79, 122),\n",
+        "     (7, 180),\n",
+        "     (104, 152),\n",
+        "     (51, 102),\n",
+        "     (3, 15),\n",
+        "     (15, 20),\n",
+        "     (71, 155),\n",
+        "     (36, 143),\n",
+        "     (38, 159),\n",
+        "     (5, 110),\n",
+        "     (61, 129),\n",
+        "     (136, 147),\n",
+        "     (148, 163),\n",
+        "     (27, 102),\n",
+        "     (87, 123),\n",
+        "     (125, 164),\n",
+        "     (162, 174),\n",
+        "     (114, 183),\n",
+        "     (24, 179),\n",
+        "     (79, 152),\n",
+        "     (46, 158),\n",
+        "     (37, 159),\n",
+        "     (49, 157),\n",
+        "     (128, 148),\n",
+        "     (14, 71),\n",
+        "     (71, 114),\n",
+        "     (38, 121),\n",
+        "     (115, 172),\n",
+        "     (5, 181),\n",
+        "     (23, 186),\n",
+        "     (51, 88),\n",
+        "     (16, 186),\n",
+        "     (72, 89),\n",
+        "     (36, 39),\n",
+        "     (57, 158),\n",
+        "     (148, 150),\n",
+        "     (24, 37),\n",
+        "     (8, 139),\n",
+        "     (7, 173),\n",
+        "     (121, 185),\n",
+        "     (123, 173),\n",
+        "     (88, 121),\n",
+        "     (52, 103),\n",
+        "     (38, 150),\n",
+        "     (5, 87),\n",
+        "     (136, 154),\n",
+        "     (96, 108),\n",
+        "     (18, 103),\n",
+        "     (127, 137),\n",
+        "     (96, 120),\n",
+        "     (60, 86),\n",
+        "     (26, 162),\n",
+        "     (46, 161),\n",
+        "     (84, 92),\n",
+        "     (49, 150),\n",
+        "     (141, 179),\n",
+        "     (3, 156),\n",
+        "     (109, 180),\n",
+        "     (147, 180),\n",
+        "     (21, 24),\n",
+        "     (53, 110),\n",
+        "     (8, 35),\n",
+        "     (104, 119),\n",
+        "     (34, 149),\n",
+        "     (53, 162),\n",
+        "     (36, 46),\n",
+        "     (148, 157),\n",
+        "     (171, 184),\n",
+        "     (4, 183),\n",
+        "     (8, 178),\n",
+        "     (7, 170),\n",
+        "     (119, 161),\n",
+        "     (33, 158),\n",
+        "     (52, 110),\n",
+        "     (71, 137),\n",
+        "     (102, 170),\n",
+        "     (14, 166),\n",
+        "     (35, 127),\n",
+        "     (0, 11),\n",
+        "     (162, 168),\n",
+        "     (25, 53),\n",
+        "     (26, 173),\n",
+        "     (65, 158),\n",
+        "     (88, 158),\n",
+        "     (53, 58),\n",
+        "     (109, 173),\n",
+        "     (146, 185),\n",
+        "     (79, 187),\n",
+        "     (21, 33),\n",
+        "     (95, 110),\n",
+        "     (5, 167),\n",
+        "     (7, 18),\n",
+        "     (47, 139),\n",
+        "     (65, 163),\n",
+        "     (3, 30),\n",
+        "     (36, 85),\n",
+        "     (148, 164),\n",
+        "     (59, 156),\n",
+        "     (26, 109),\n",
+        "     (8, 185),\n",
+        "     (27, 119),\n",
+        "     (123, 187),\n",
+        "     (106, 145),\n",
+        "     (51, 125),\n",
+        "     (15, 99),\n",
+        "     (36, 164),\n",
+        "     (38, 128),\n",
+        "     (24, 25),\n",
+        "     (150, 164),\n",
+        "     (53, 159),\n",
+        "     (83, 131),\n",
+        "     (67, 93),\n",
+        "     (30, 83),\n",
+        "     (56, 173),\n",
+        "     (40, 121),\n",
+        "     (21, 185),\n",
+        "     (60, 100),\n",
+        "     (26, 180),\n",
+        "     (46, 179),\n",
+        "     (42, 134),\n",
+        "     (51, 164),\n",
+        "     (109, 166),\n",
+        "     (166, 183),\n",
+        "     (152, 183),\n",
+        "     (4, 130),\n",
+        "     (26, 39),\n",
+        "     (40, 52),\n",
+        "     (12, 86),\n",
+        "     (23, 95),\n",
+        "     (33, 63),\n",
+        "     (88, 161),\n",
+        "     (53, 180),\n",
+        "     (56, 136),\n",
+        "     (1, 86),\n",
+        "     (2, 92),\n",
+        "     (94, 149),\n",
+        "     (4, 165),\n",
+        "     (7, 152),\n",
+        "     (119, 183),\n",
+        "     (106, 152),\n",
+        "     (56, 119),\n",
+        "     (19, 109),\n",
+        "     (38, 187),\n",
+        "     (136, 143),\n",
+        "     (102, 164),\n",
+        "     (139, 170),\n",
+        "     (5, 103),\n",
+        "     (35, 109),\n",
+        "     (120, 131),\n",
+        "     (121, 123),\n",
+        "     (138, 187),\n",
+        "     (51, 173),\n",
+        "     (14, 35),\n",
+        "     (18, 94),\n",
+        "     (120, 181),\n",
+        "     (21, 51),\n",
+        "     (115, 136),\n",
+        "     (26, 46),\n",
+        "     (63, 103),\n",
+        "     (30, 163),\n",
+        "     (56, 143),\n",
+        "     (4, 172),\n",
+        "     (5, 58),\n",
+        "     (82, 161),\n",
+        "     (7, 145),\n",
+        "     (119, 140),\n",
+        "     (103, 157),\n",
+        "     (52, 123),\n",
+        "     (36, 170),\n",
+        "     (19, 114),\n",
+        "     (24, 103),\n",
+        "     (57, 125),\n",
+        "     (34, 163),\n",
+        "     (30, 93),\n",
+        "     (16, 34),\n",
+        "     (14, 187),\n",
+        "     (127, 173),\n",
+        "     (6, 53),\n",
+        "     (40, 103),\n",
+        "     (28, 154),\n",
+        "     (24, 150),\n",
+        "     (23, 102),\n",
+        "     (26, 134),\n",
+        "     (121, 124),\n",
+        "     (85, 110),\n",
+        "     (51, 178),\n",
+        "     (71, 79),\n",
+        "     (109, 152),\n",
+        "     (57, 82),\n",
+        "     (76, 154),\n",
+        "     (39, 102),\n",
+        "     (162, 178),\n",
+        "     (5, 146),\n",
+        "     (25, 185),\n",
+        "     (7, 121),\n",
+        "     (48, 118),\n",
+        "     (47, 166),\n",
+        "     (105, 107),\n",
+        "     (6, 14),\n",
+        "     (5, 35),\n",
+        "     (6, 179),\n",
+        "     (26, 88),\n",
+        "     (27, 90),\n",
+        "     (106, 138),\n",
+        "     (16, 158),\n",
+        "     (35, 138),\n",
+        "     (18, 174),\n",
+        "     (19, 123),\n",
+        "     (38, 181),\n",
+        "     (58, 162),\n",
+        "     (26, 71),\n",
+        "     (157, 174),\n",
+        "     (105, 133),\n",
+        "     (127, 170),\n",
+        "     (55, 68),\n",
+        "     (40, 110),\n",
+        "     (93, 175),\n",
+        "     (24, 157),\n",
+        "     (167, 186),\n",
+        "     (8, 119),\n",
+        "     (138, 173),\n",
+        "     (85, 119),\n",
+        "     (140, 185),\n",
+        "     (14, 45),\n",
+        "     (90, 98),\n",
+        "     (109, 145),\n",
+        "     (76, 145),\n",
+        "     (5, 155),\n",
+        "     (171, 175),\n",
+        "     (27, 162),\n",
+        "     (28, 153),\n",
+        "     (128, 181),\n",
+        "     (59, 184),\n",
+        "     (4, 146),\n",
+        "     (6, 170),\n",
+        "     (87, 140),\n",
+        "     (16, 149),\n",
+        "     (35, 179),\n",
+        "     (54, 166),\n",
+        "     (38, 172),\n",
+        "     (82, 138),\n",
+        "     (155, 183),\n",
+        "     (85, 143),\n",
+        "     (175, 184),\n",
+        "     (161, 172),\n",
+        "     (16, 150),\n",
+        "     (17, 170),\n",
+        "     (18, 56),\n",
+        "     (37, 167),\n",
+        "     (40, 85),\n",
+        "     (4, 123),\n",
+        "     (42, 178),\n",
+        "     (25, 26),\n",
+        "     (26, 136),\n",
+        "     (33, 106),\n",
+        "     (87, 155),\n",
+        "     (90, 109),\n",
+        "     (53, 103),\n",
+        "     (109, 138),\n",
+        "     (76, 136),\n",
+        "     (3, 153),\n",
+        "     (31, 156),\n",
+        "     (47, 180),\n",
+        "     (52, 161),\n",
+        "     (32, 96),\n",
+        "     (5, 21),\n",
+        "     (6, 173),\n",
+        "     (27, 40),\n",
+        "     (119, 155),\n",
+        "     (51, 94),\n",
+        "     (34, 58),\n",
+        "     (54, 185),\n",
+        "     (19, 137),\n",
+        "     (38, 167),\n",
+        "     (77, 120),\n",
+        "     (49, 120),\n",
+        "     (23, 28),\n",
+        "     (85, 136),\n",
+        "     (49, 67),\n",
+        "     (14, 140),\n",
+        "     (18, 51),\n",
+        "     (110, 128),\n",
+        "     (38, 54),\n",
+        "     (162, 163),\n",
+        "     (120, 159),\n",
+        "     (122, 143),\n",
+        "     (85, 121),\n",
+        "     (165, 177),\n",
+        "     (21, 87),\n",
+        "     (152, 154),\n",
+        "     (43, 67),\n",
+        "     (173, 177),\n",
+        "     (46, 65),\n",
+        "     (128, 167),\n",
+        "     (124, 155),\n",
+        "     (36, 127),\n",
+        "     (37, 119),\n",
+        "     (94, 120),\n",
+        "     (172, 180),\n",
+        "     (35, 161),\n",
+        "     (18, 155),\n",
+        "     (112, 151),\n",
+        "     (40, 180),\n",
+        "     (63, 162),\n",
+        "     (85, 145),\n",
+        "     (65, 159),\n",
+        "     (180, 187),\n",
+        "     (153, 161),\n",
+        "     (39, 181),\n",
+        "     (26, 154),\n",
+        "     (28, 119),\n",
+        "     (140, 166),\n",
+        "     (90, 127),\n",
+        "     (57, 110),\n",
+        "     (170, 183),\n",
+        "     (25, 149),\n",
+        "     (121, 137),\n",
+        "     (51, 63),\n",
+        "     (17, 115),\n",
+        "     (56, 178),\n",
+        "     (4, 143),\n",
+        "     (5, 7),\n",
+        "     (172, 179),\n",
+        "     (172, 173),\n",
+        "     (49, 163),\n",
+        "     (125, 141),\n",
+        "     (35, 166),\n",
+        "     (18, 146),\n",
+        "     (54, 139),\n",
+        "     (40, 187),\n",
+        "     (3, 49),\n",
+        "     (41, 134),\n",
+        "     (79, 177),\n",
+        "     (82, 119),\n",
+        "     (96, 175),\n",
+        "     (85, 154),\n",
+        "     (107, 133),\n",
+        "     (18, 37),\n",
+        "     (23, 131),\n",
+        "     (8, 19),\n",
+        "     (45, 146),\n",
+        "     (122, 145),\n",
+        "     (33, 125),\n",
+        "     (110, 125),\n",
+        "     (57, 119),\n",
+        "     (154, 170),\n",
+        "     (98, 134),\n",
+        "     (25, 158),\n",
+        "     (83, 93),\n",
+        "     (121, 146),\n",
+        "     (71, 185),\n",
+        "     (36, 109),\n",
+        "     (19, 47),\n",
+        "     (24, 34),\n",
+        "     (27, 63),\n",
+        "     (137, 152),\n",
+        "     (87, 121),\n",
+        "     (18, 141),\n",
+        "     (40, 162),\n",
+        "     (21, 128),\n",
+        "     (77, 99),\n",
+        "     (23, 43),\n",
+        "     (33, 39),\n",
+        "     (45, 122),\n",
+        "     (66, 129),\n",
+        "     (88, 174),\n",
+        "     (18, 28),\n",
+        "     (110, 149),\n",
+        "     (93, 163),\n",
+        "     (116, 177),\n",
+        "     (8, 26),\n",
+        "     (45, 139),\n",
+        "     (122, 152),\n",
+        "     (96, 111),\n",
+        "     (53, 123),\n",
+        "     (54, 123),\n",
+        "     (114, 123),\n",
+        "     (149, 157),\n",
+        "     (170, 185),\n",
+        "     (99, 168),\n",
+        "     (121, 155),\n",
+        "     (123, 139),\n",
+        "     (17, 109),\n",
+        "     (36, 148),\n",
+        "     (37, 98),\n",
+        "     (37, 181),\n",
+        "     (6, 137),\n",
+        "     (27, 52),\n",
+        "     (119, 127),\n",
+        "     (47, 57),\n",
+        "     (99, 175),\n",
+        "     (125, 159),\n",
+        "     (19, 165),\n",
+        "     (24, 88),\n",
+        "     (23, 32),\n",
+        "     (81, 89),\n",
+        "     (65, 167),\n",
+        "     (85, 172),\n",
+        "     (31, 32),\n",
+        "     (14, 104),\n",
+        "     (18, 23),\n",
+        "     (40, 56),\n",
+        "     (167, 179),\n",
+        "     (99, 131),\n",
+        "     (104, 140),\n",
+        "     (67, 171),\n",
+        "     (32, 135),\n",
+        "     (87, 180),\n",
+        "     (145, 152),\n",
+        "     (54, 114),\n",
+        "     (39, 49),\n",
+        "     (52, 82),\n",
+        "     (26, 102),\n",
+        "     (56, 71),\n",
+        "     (93, 131),\n",
+        "     (5, 114),\n",
+        "     (61, 117),\n",
+        "     (100, 176),\n",
+        "     (137, 138),\n",
+        "     (102, 148),\n",
+        "     (139, 154),\n",
+        "     (0, 41),\n",
+        "     (19, 170),\n",
+        "     (21, 146),\n",
+        "     (59, 67),\n",
+        "     (24, 95),\n",
+        "     (23, 57),\n",
+        "     (6, 19),\n",
+        "     (46, 138),\n",
+        "     (102, 103),\n",
+        "     (110, 167),\n",
+        "     (147, 155),\n",
+        "     (38, 85),\n",
+        "     (23, 174),\n",
+        "     (51, 153),\n",
+        "     (63, 87),\n",
+        "     (122, 170),\n",
+        "     (57, 138),\n",
+        "     (58, 120),\n",
+        "     (39, 46),\n",
+        "     (95, 157),\n",
+        "     (79, 119),\n",
+        "     (83, 112),\n",
+        "     (48, 62),\n",
+        "     (47, 94),\n",
+        "     (43, 99),\n",
+        "     (71, 164),\n",
+        "     (36, 154),\n",
+        "     (58, 174),\n",
+        "     (42, 97),\n",
+        "     (5, 123),\n",
+        "     (6, 123),\n",
+        "     (27, 173),\n",
+        "     (139, 147),\n",
+        "     (103, 162),\n",
+        "     (52, 58),\n",
+        "     (96, 168),\n",
+        "     (25, 88),\n",
+        "     (46, 141),\n",
+        "     (138, 146),\n",
+        "     (14, 122),\n",
+        "     (71, 127),\n",
+        "     (110, 174),\n",
+        "     (28, 82),\n",
+        "     (16, 63),\n",
+        "     (21, 125),\n",
+        "     (98, 170),\n",
+        "     (26, 104),\n",
+        "     (154, 165),\n",
+        "     (104, 146),\n",
+        "     (33, 138),\n",
+        "     (52, 146),\n",
+        "     (56, 85),\n",
+        "     (26, 127),\n",
+        "     (150, 153),\n",
+        "     (6, 114),\n",
+        "     (137, 180),\n",
+        "     (49, 110),\n",
+        "     (87, 125),\n",
+        "     (125, 170),\n",
+        "     (127, 154),\n",
+        "     (40, 158),\n",
+        "     (21, 164),\n",
+        "     (25, 33),\n",
+        "     (79, 146),\n",
+        "     (65, 138),\n",
+        "     (46, 148),\n",
+        "     (88, 138),\n",
+        "     (90, 178),\n",
+        "     (143, 172),\n",
+        "     (28, 174),\n",
+        "     (20, 43),\n",
+        "     (93, 135),\n",
+        "     (38, 71),\n",
+        "     (3, 163),\n",
+        "     (8, 54),\n",
+        "     (93, 111),\n",
+        "     (34, 128),\n",
+        "     (54, 71),\n",
+        "     (57, 148),\n",
+        "     (26, 115),\n",
+        "     (7, 183),\n",
+        "     (121, 183),\n",
+        "     (98, 119),\n",
+        "     (106, 165),\n",
+        "     (52, 153),\n",
+        "     (71, 154),\n",
+        "     (36, 136),\n",
+        "     (77, 175),\n",
+        "     (102, 185),\n",
+        "     (87, 122),\n",
+        "     (125, 163),\n",
+        "     (1, 160),\n",
+        "     (114, 180),\n",
+        "     (79, 155),\n",
+        "     (45, 87),\n",
+        "     (46, 159),\n",
+        "     (70, 97),\n",
+        "     (71, 109),\n",
+        "     (18, 123),\n",
+        "     (3, 168),\n",
+        "     (5, 180),\n",
+        "     (23, 181),\n",
+        "     (67, 135),\n",
+        "     (32, 163),\n",
+        "     (25, 168),\n",
+        "     (53, 88),\n",
+        "     (94, 161),\n",
+        "     (149, 186),\n",
+        "     (26, 122),\n",
+        "     (7, 172),\n",
+        "     (46, 121),\n",
+        "     (123, 172),\n",
+        "     (106, 172),\n",
+        "     (51, 110),\n",
+        "     (126, 171),\n",
+        "     (71, 147),\n",
+        "     (36, 183),\n",
+        "     (90, 94),\n",
+        "     (41, 68),\n",
+        "     (42, 90),\n",
+        "     (136, 155),\n",
+        "     (63, 149),\n",
+        "     (137, 166),\n",
+        "     (35, 121),\n",
+        "     (127, 136),\n",
+        "     (25, 51),\n",
+        "     (8, 85),\n",
+        "     (138, 143),\n",
+        "     (14, 79),\n",
+        "     (143, 178),\n",
+        "     (18, 114),\n",
+        "     (147, 183),\n",
+        "     (20, 175),\n",
+        "     (21, 39),\n",
+        "     (152, 170),\n",
+        "     (8, 36),\n",
+        "     (47, 141),\n",
+        "     (16, 36),\n",
+        "     (53, 161),\n",
+        "     (36, 47),\n",
+        "     (148, 158),\n",
+        "     (151, 163),\n",
+        "     (7, 165),\n",
+        "     (47, 114),\n",
+        "     (123, 165),\n",
+        "     (51, 119),\n",
+        "     (16, 179),\n",
+        "     (35, 145),\n",
+        "     (71, 136),\n",
+        "     (150, 162),\n",
+        "     (82, 172),\n",
+        "     (45, 56),\n",
+        "     (158, 162),\n",
+        "     (125, 181),\n",
+        "     (127, 177),\n",
+        "     (92, 117),\n",
+        "     (37, 153),\n",
+        "     (40, 115),\n",
+        "     (114, 166),\n",
+        "     (25, 52),\n",
+        "     (26, 170),\n",
+        "     (65, 157),\n",
+        "     (88, 159),\n",
+        "     (143, 187),\n",
+        "     (109, 172),\n",
+        "     (152, 177),\n",
+        "     (20, 135),\n",
+        "     (47, 138),\n",
+        "     (33, 37),\n",
+        "     (53, 170),\n",
+        "     (2, 66),\n",
+        "     (39, 111),\n",
+        "     (154, 185),\n",
+        "     (119, 185),\n",
+        "     (47, 123),\n",
+        "     (103, 110),\n",
+        "     (49, 171),\n",
+        "     (58, 150),\n",
+        "     (93, 156),\n",
+        "     (5, 88),\n",
+        "     (136, 137),\n",
+        "     (102, 162),\n",
+        "     (139, 180),\n",
+        "     (159, 186),\n",
+        "     (21, 110),\n",
+        "     (40, 122),\n",
+        "     (65, 150),\n",
+        "     (138, 177),\n",
+        "     (49, 135),\n",
+        "     (141, 164),\n",
+        "     (71, 88),\n",
+        "     (109, 165),\n",
+        "     (146, 177),\n",
+        "     (57, 71),\n",
+        "     (166, 180),\n",
+        "     (25, 174),\n",
+        "     (26, 36),\n",
+        "     (63, 121),\n",
+        "     (47, 147),\n",
+        "     (16, 18),\n",
+        "     (53, 179),\n",
+        "     (56, 137),\n",
+        "     (148, 172),\n",
+        "     (4, 166),\n",
+        "     (53, 121),\n",
+        "     (7, 155),\n",
+        "     (52, 125),\n",
+        "     (0, 107),\n",
+        "     (36, 172),\n",
+        "     (37, 58),\n",
+        "     (77, 83),\n",
+        "     (5, 65),\n",
+        "     (82, 158),\n",
+        "     (63, 128),\n",
+        "     (139, 173),\n",
+        "     (14, 177),\n",
+        "     (39, 58),\n",
+        "     (164, 179),\n",
+        "     (95, 169),\n",
+        "     (45, 187),\n",
+        "     (121, 122),\n",
+        "     (51, 172),\n",
+        "     (87, 143),\n",
+        "     (57, 161),\n",
+        "     (44, 117),\n",
+        "     (63, 102),\n",
+        "     (11, 68),\n",
+        "     (43, 135),\n",
+        "     (37, 82),\n",
+        "     (2, 84),\n",
+        "     (4, 173),\n",
+        "     (5, 57),\n",
+        "     (6, 185),\n",
+        "     (26, 94),\n",
+        "     (119, 143),\n",
+        "     (15, 112),\n",
+        "     (18, 180),\n",
+        "     (56, 127),\n",
+        "     (38, 179),\n",
+        "     (24, 110),\n",
+        "     (67, 175),\n",
+        "     (136, 183),\n",
+        "     (49, 103),\n",
+        "     (21, 102),\n",
+        "     (127, 172),\n",
+        "     (164, 186),\n",
+        "     (40, 104),\n",
+        "     (97, 109),\n",
+        "     (45, 180),\n",
+        "     (84, 113),\n",
+        "     (85, 109),\n",
+        "     (51, 54),\n",
+        "     (153, 179),\n",
+        "     (58, 65),\n",
+        "     (27, 172),\n",
+        "     (47, 161),\n",
+        "     (30, 171),\n",
+        "     (49, 186),\n",
+        "     (93, 110),\n",
+        "     (97, 107),\n",
+        "     (7, 137),\n",
+        "     (103, 149),\n",
+        "     (106, 139),\n",
+        "     (33, 185),\n",
+        "     (16, 159),\n",
+        "     (54, 172),\n",
+        "     (19, 122),\n",
+        "     (58, 163),\n",
+        "     (25, 103),\n",
+        "     (133, 142),\n",
+        "     (135, 158),\n",
+        "     (155, 177),\n",
+        "     (97, 130),\n",
+        "     (24, 158),\n",
+        "     (45, 173),\n",
+        "     (138, 170),\n",
+        "     (39, 110),\n",
+        "     (146, 154),\n",
+        "     (57, 90),\n",
+        "     (76, 146),\n",
+        "     (53, 104),\n",
+        "     (46, 52),\n",
+        "     (177, 187),\n",
+        "     (93, 103),\n",
+        "     (4, 147),\n",
+        "     (27, 82),\n",
+        "     (33, 178),\n",
+        "     (40, 94),\n",
+        "     (35, 178),\n",
+        "     (79, 165),\n",
+        "     (136, 165),\n",
+        "     (135, 151),\n",
+        "     (14, 138),\n",
+        "     (144, 165),\n",
+        "     (18, 57),\n",
+        "     (164, 168),\n",
+        "     (111, 163),\n",
+        "     (8, 127),\n",
+        "     (45, 166),\n",
+        "     (28, 98),\n",
+        "     (85, 127),\n",
+        "     (140, 177),\n",
+        "     (43, 150),\n",
+        "     (159, 162),\n",
+        "     (90, 106),\n",
+        "     (53, 102),\n",
+        "     (109, 137),\n",
+        "     (165, 183),\n",
+        "     (76, 137),\n",
+        "     (95, 135),\n",
+        "     (25, 138),\n",
+        "     (173, 183),\n",
+        "     (46, 63),\n",
+        "     (47, 183),\n",
+        "     (52, 162),\n",
+        "     (128, 141),\n",
+        "     (93, 96),\n",
+        "     (4, 154),\n",
+        "     (98, 109),\n",
+        "     (15, 30),\n",
+        "     (119, 154),\n",
+        "     (16, 141),\n",
+        "     (35, 187),\n",
+        "     (18, 161),\n",
+        "     (19, 136),\n",
+        "     (38, 164),\n",
+        "     (58, 181),\n",
+        "     (24, 125),\n",
+        "     (23, 31),\n",
+        "     (136, 172),\n",
+        "     (63, 164),\n",
+        "     (101, 113),\n",
+        "     (86, 176),\n",
+        "     (161, 164),\n",
+        "     (153, 162),\n",
+        "     (111, 168),\n",
+        "     (3, 83),\n",
+        "     (116, 165),\n",
+        "     (28, 121),\n",
+        "     (122, 140),\n",
+        "     (49, 164),\n",
+        "     (33, 98),\n",
+        "     (87, 147),\n",
+        "     (109, 130),\n",
+        "     (95, 179),\n",
+        "     (152, 155),\n",
+        "     (30, 156),\n",
+        "     (124, 148),\n",
+        "     (56, 172),\n",
+        "     (128, 159),\n",
+        "     (62, 70),\n",
+        "     (119, 147),\n",
+        "     (27, 57),\n",
+        "     (103, 128),\n",
+        "     (52, 88),\n",
+        "     (54, 177),\n",
+        "     (19, 145),\n",
+        "     (77, 112),\n",
+        "     (6, 52),\n",
+        "     (82, 125),\n",
+        "     (159, 163),\n",
+        "     (106, 119),\n",
+        "     (37, 168),\n",
+        "     (38, 46),\n",
+        "     (4, 106),\n",
+        "     (43, 49),\n",
+        "     (8, 109),\n",
+        "     (120, 167),\n",
+        "     (163, 164),\n",
+        "     (33, 123),\n",
+        "     (37, 110),\n",
+        "     (145, 180),\n",
+        "     (165, 185),\n",
+        "     (21, 95),\n",
+        "     (170, 180),\n",
+        "     (25, 148),\n",
+        "     (27, 136),\n",
+        "     (101, 129),\n",
+        "     (121, 136),\n",
+        "     (58, 82),\n",
+        "     (30, 135),\n",
+        "     (88, 98),\n",
+        "     (53, 153),\n",
+        "     (56, 179),\n",
+        "     (4, 136),\n",
+        "     (5, 6),\n",
+        "     (62, 73),\n",
+        "     (100, 132),\n",
+        "     (15, 93),\n",
+        "     (54, 136),\n",
+        "     (43, 153),\n",
+        "     (6, 47),\n",
+        "     (63, 170),\n",
+        "     (102, 115),\n",
+        "     (85, 153),\n",
+        "     (34, 174),\n",
+        "     (35, 54),\n",
+        "     (95, 150),\n",
+        "     (120, 174),\n",
+        "     (51, 150),\n",
+        "     (87, 161),\n",
+        "     (73, 133),\n",
+        "     (21, 88),\n",
+        "     (95, 161),\n",
+        "     (7, 85),\n",
+        "     (121, 145),\n",
+        "     (104, 183),\n",
+        "     (16, 125),\n",
+        "     (36, 110),\n",
+        "     (37, 120),\n",
+        "     (40, 102),\n",
+        "     (47, 51),\n",
+        "     (87, 88),\n",
+        "     (18, 138),\n",
+        "     (24, 82),\n",
+        "     (43, 158),\n",
+        "     (6, 38),\n",
+        "     (45, 121),\n",
+        "     (85, 162),\n",
+        "     (13, 80),\n",
+        "     (38, 88),\n",
+        "     (8, 27),\n",
+        "     (157, 167),\n",
+        "     (45, 138),\n",
+        "     (51, 159),\n",
+        "     (145, 166),\n",
+        "     (57, 127),\n",
+        "     (76, 173),\n",
+        "     (95, 174),\n",
+        "     (61, 66),\n",
+        "     (27, 134),\n",
+        "     (121, 154),\n",
+        "     (123, 138),\n",
+        "     (124, 161),\n",
+        "     (65, 88),\n",
+        "     (10, 160),\n",
+        "     (36, 149),\n",
+        "     (67, 77),\n",
+        "     (47, 56),\n",
+        "     (178, 185),\n",
+        "     (52, 53),\n",
+        "     (35, 87),\n",
+        "     (54, 154),\n",
+        "     (40, 170),\n",
+        "     (114, 145),\n",
+        "     (59, 77),\n",
+        "     (43, 151),\n",
+        "     (82, 102),\n",
+        "     (45, 114),\n",
+        "     (28, 46),\n",
+        "     (141, 148),\n",
+        "     (82, 153),\n",
+        "     (35, 36),\n",
+        "     (110, 157),\n",
+        "     (40, 57),\n",
+        "     (39, 163),\n",
+        "     (82, 162),\n",
+        "     (65, 87),\n",
+        "     (169, 186),\n",
+        "     (87, 183),\n",
+        "     (57, 128),\n",
+        "     (145, 173),\n",
+        "     (44, 61),\n",
+        "     (104, 165),\n",
+        "     (12, 22),\n",
+        "     (67, 131),\n",
+        "     (52, 141),\n",
+        "     (15, 59),\n",
+        "     (77, 131),\n",
+        "     (24, 49),\n",
+        "     (102, 149),\n",
+        "     (103, 164),\n",
+        "     (86, 100),\n",
+        "     (96, 151),\n",
+        "     (19, 173),\n",
+        "     (114, 136),\n",
+        "     (79, 143),\n",
+        "     (28, 37),\n",
+        "     (85, 180),\n",
+        "     (137, 147),\n",
+        "     (3, 59),\n",
+        "     (35, 45),\n",
+        "     (110, 164),\n",
+        "     (147, 154),\n",
+        "     (39, 88),\n",
+        "     (97, 177),\n",
+        "     (7, 51),\n",
+        "     (16, 57),\n",
+        "     (36, 52),\n",
+        "     (149, 174),\n",
+        "     (151, 158),\n",
+        "     (8, 152),\n",
+        "     (104, 172),\n",
+        "     (123, 152),\n",
+        "     (88, 102),\n",
+        "     (52, 148),\n",
+        "     (71, 167),\n",
+        "     (36, 155),\n",
+        "     (69, 81),\n",
+        "     (137, 178),\n",
+        "     (139, 146),\n",
+        "     (108, 168),\n",
+        "     (19, 178),\n",
+        "     (24, 167),\n",
+        "     (23, 49),\n",
+        "     (82, 88),\n",
+        "     (46, 146),\n",
+        "     (138, 147),\n",
+        "     (14, 123),\n",
+        "     (70, 116),\n",
+        "     (143, 166),\n",
+        "     (7, 40),\n",
+        "     (122, 178),\n",
+        "     (53, 85),\n",
+        "     (36, 51),\n",
+        "     (92, 176),\n",
+        "     (57, 146),\n",
+        "     (21, 124),\n",
+        "     (79, 127),\n",
+        "     (7, 185),\n",
+        "     (121, 173),\n",
+        "     (104, 147),\n",
+        "     (123, 145),\n",
+        "     (47, 88),\n",
+        "     (24, 63),\n",
+        "     (150, 158),\n",
+        "     (137, 187),\n",
+        "     (87, 124),\n",
+        "     (16, 157),\n",
+        "     (19, 187),\n",
+        "     (40, 159),\n",
+        "     (17, 33),\n",
+        "     (24, 174),\n",
+        "     (174, 181),\n",
+        "     (46, 149),\n",
+        "     (138, 154),\n",
+        "     (71, 119),\n",
+        "     (40, 46),\n",
+        "     (113, 117),\n",
+        "     (28, 170),\n",
+        "     (53, 94),\n",
+        "     (36, 58),\n",
+        "     (98, 162),\n",
+        "     (154, 173),\n",
+        "     (119, 173),\n",
+        "     (46, 119),\n",
+        "     (104, 154),\n",
+        "     (51, 104),\n",
+        "     (71, 149),\n",
+        "     (36, 137),\n",
+        "     (38, 157),\n",
+        "     (28, 178),\n",
+        "     (158, 164),\n",
+        "     (5, 141),\n",
+        "     (135, 156),\n",
+        "     (125, 162),\n",
+        "     (15, 135),\n",
+        "     (83, 151),\n",
+        "     (37, 150),\n",
+        "     (21, 172),\n",
+        "     (24, 181),\n",
+        "     (79, 154),\n",
+        "     (33, 57),\n",
+        "     (88, 146),\n",
+        "     (143, 180),\n",
+        "     (109, 185),\n",
+        "     (147, 177),\n",
+        "     (39, 71),\n",
+        "     (5, 179),\n",
+        "     (51, 147),\n",
+        "     (28, 161),\n",
+        "     (104, 114),\n",
+        "     (163, 181),\n",
+        "     (128, 174),\n",
+        "     (33, 42),\n",
+        "     (16, 46),\n",
+        "     (53, 167),\n",
+        "     (2, 113),\n",
+        "     (131, 184),\n",
+        "     (26, 123),\n",
+        "     (27, 123),\n",
+        "     (119, 170),\n",
+        "     (174, 179),\n",
+        "     (106, 173),\n",
+        "     (15, 31),\n",
+        "     (71, 146),\n",
+        "     (38, 148),\n",
+        "     (5, 85),\n",
+        "     (6, 109),\n",
+        "     (82, 178),\n",
+        "     (63, 148),\n",
+        "     (137, 165),\n",
+        "     (139, 185),\n",
+        "     (87, 114),\n",
+        "     (127, 139),\n",
+        "     (55, 101),\n",
+        "     (111, 120),\n",
+        "     (114, 172),\n",
+        "     (45, 79),\n",
+        "     (46, 167),\n",
+        "     (138, 140),\n",
+        "     (140, 152),\n",
+        "     (88, 153),\n",
+        "     (14, 76),\n",
+        "     (53, 63),\n",
+        "     (109, 178),\n",
+        "     (166, 187),\n",
+        "     (25, 163),\n",
+        "     (47, 140),\n",
+        "     (16, 37),\n",
+        "     (36, 40),\n",
+        "     (148, 159),\n",
+        "     (4, 177),\n",
+        "     (8, 180),\n",
+        "     (156, 175),\n",
+        "     (104, 136),\n",
+        "     (71, 139),\n",
+        "     (5, 94),\n",
+        "     (135, 181),\n",
+        "     (66, 92),\n",
+        "     (158, 163),\n",
+        "     (65, 94),\n",
+        "     (125, 180),\n",
+        "     (39, 65),\n",
+        "     (62, 144),\n",
+        "     (46, 174),\n",
+        "     (84, 101),\n",
+        "     (32, 67),\n",
+        "     (51, 161),\n",
+        "     (14, 87),\n",
+        "     (53, 56),\n",
+        "     (146, 183),\n",
+        "     (21, 47),\n",
+        "     (25, 164),\n",
+        "     (47, 149),\n",
+        "     (105, 118),\n",
+        "     (33, 36),\n",
+        "     (16, 28),\n",
+        "     (36, 87),\n",
+        "     (57, 174),\n",
+        "     (5, 54),\n",
+        "     (8, 187),\n",
+        "     (104, 143),\n",
+        "     (51, 127),\n",
+        "     (52, 119),\n",
+        "     (71, 128),\n",
+        "     (36, 166),\n",
+        "     (5, 71),\n",
+        "     (120, 153),\n",
+        "     (136, 138),\n",
+        "     (155, 173),\n",
+        "     (102, 163),\n",
+        "     (139, 183),\n",
+        "     (105, 144),\n",
+        "     (127, 185),\n",
+        "     (164, 181),\n",
+        "     (40, 123),\n",
+        "     (23, 82),\n",
+        "     (26, 178),\n",
+        "     (26, 185),\n",
+        "     (85, 90),\n",
+        "     (21, 40),\n",
+        "     (26, 28),\n",
+        "     (152, 185),\n",
+        "     (26, 37),\n",
+        "     (63, 120),\n",
+        "     (47, 146),\n",
+        "     (49, 77),\n",
+        "     (53, 178),\n",
+        "     (56, 138),\n",
+        "     (131, 175),\n",
+        "     (5, 63),\n",
+        "     (53, 128),\n",
+        "     (7, 154),\n",
+        "     (119, 177),\n",
+        "     (123, 178),\n",
+        "     (179, 187),\n",
+        "     (109, 123),\n",
+        "     (56, 121),\n",
+        "     (37, 57),\n",
+        "     (58, 158),\n",
+        "     (58, 88),\n",
+        "     (136, 177),\n",
+        "     (135, 171),\n",
+        "     (10, 29),\n",
+        "     (27, 125),\n",
+        "     (139, 172),\n",
+        "     (127, 166),\n",
+        "     (37, 138),\n",
+        "     (40, 98),\n",
+        "     (39, 53),\n",
+        "     (71, 177),\n",
+        "     (65, 110),\n",
+        "     (138, 185),\n",
+        "     (1, 60),\n",
+        "     (58, 71),\n",
+        "     (166, 172),\n",
+        "     (115, 138),\n",
+        "     (43, 108),\n",
+        "     (33, 153),\n",
+        "     (47, 155),\n",
+        "     (53, 187),\n",
+        "     (9, 61),\n",
+        "     (148, 180),\n",
+        "     (5, 56),\n",
+        "     (153, 167),\n",
+        "     (7, 147),\n",
+        "     (20, 156),\n",
+        "     (103, 159),\n",
+        "     (16, 153),\n",
+        "     (109, 116),\n",
+        "     (54, 170),\n",
+        "     (19, 116),\n",
+        "     (58, 153),\n",
+        "     (93, 151),\n",
+        "     (82, 150),\n",
+        "     (155, 187),\n",
+        "     (139, 165),\n",
+        "     (105, 130),\n",
+        "     (31, 83),\n",
+        "     (14, 185),\n",
+        "     (23, 161),\n",
+        "     (37, 179),\n",
+        "     (97, 136),\n",
+        "     (23, 96),\n",
+        "     (8, 114),\n",
+        "     (62, 133),\n",
+        "     (45, 179),\n",
+        "     (122, 123),\n",
+        "     (16, 161),\n",
+        "     (51, 180),\n",
+        "     (143, 145),\n",
+        "     (18, 87),\n",
+        "     (21, 58),\n",
+        "     (4, 6),\n",
+        "     (7, 123),\n",
+        "     (63, 110),\n",
+        "     (30, 168),\n",
+        "     (125, 158),\n",
+        "     (37, 90),\n",
+        "     (2, 44),\n",
+        "     (148, 179),\n",
+        "     (97, 106),\n",
+        "     (6, 177),\n",
+        "     (7, 136),\n",
+        "     (103, 148),\n",
+        "     (106, 136),\n",
+        "     (51, 82),\n",
+        "     (35, 140),\n",
+        "     (18, 172),\n",
+        "     (54, 173),\n",
+        "     (130, 136),\n",
+        "     (25, 102),\n",
+        "     (47, 159),\n",
+        "     (158, 159),\n",
+        "     (34, 120),\n",
+        "     (18, 63),\n",
+        "     (38, 58),\n",
+        "     (108, 151),\n",
+        "     (24, 159),\n",
+        "     (8, 121),\n",
+        "     (45, 172),\n",
+        "     (48, 144),\n",
+        "     (140, 187),\n",
+        "     (159, 164),\n",
+        "     (109, 143),\n",
+        "     (146, 155),\n",
+        "     (76, 147),\n",
+        "     (20, 59),\n",
+        "     (46, 53),\n",
+        "     (21, 179),\n",
+        "     (53, 141),\n",
+        "     (46, 128),\n",
+        "     (148, 186),\n",
+        "     (167, 181),\n",
+        "     (27, 85),\n",
+        "     (151, 184),\n",
+        "     (103, 141),\n",
+        "     (18, 167),\n",
+        "     (25, 111),\n",
+        "     (136, 166),\n",
+        "     (135, 150),\n",
+        "     (30, 77),\n",
+        "     (36, 102),\n",
+        "     (14, 139),\n",
+        "     (180, 183),\n",
+        "     (18, 54),\n",
+        "     (40, 87),\n",
+        "     (97, 154),\n",
+        "     (140, 178),\n",
+        "     (87, 149),\n",
+        "     (145, 185),\n",
+        "     (109, 136),\n",
+        "     (57, 98),\n",
+        "     (96, 131),\n",
+        "     (95, 181),\n",
+        "     (43, 83),\n",
+        "     (26, 57),\n",
+        "     (28, 146),\n",
+        "     (120, 151),\n",
+        "     (52, 163),\n",
+        "     (53, 150),\n",
+        "     (56, 166),\n",
+        "     (94, 119),\n",
+        "     (4, 155),\n",
+        "     (98, 106),\n",
+        "     (27, 42),\n",
+        "     (119, 149),\n",
+        "     (40, 63),\n",
+        "     (18, 158),\n",
+        "     (19, 139),\n",
+        "     (130, 154),\n",
+        "     (93, 150),\n",
+        "     (79, 173),\n",
+        "     (136, 173),\n",
+        "     (63, 167),\n",
+        "     (85, 150),\n",
+        "     (161, 187),\n",
+        "     (166, 185),\n",
+        "     (37, 174),\n",
+        "     (38, 52),\n",
+        "     (116, 166),\n",
+        "     (26, 145),\n",
+        "     (48, 142),\n",
+        "     (87, 146),\n",
+        "     (145, 178),\n",
+        "     (21, 85),\n",
+        "     (27, 178),\n",
+        "     (0, 68),\n",
+        "     (46, 71),\n",
+        "     (58, 85),\n",
+        "     (177, 180),\n",
+        "     (51, 56),\n",
+        "     (52, 170),\n",
+        "     (73, 107),\n",
+        "     (36, 121),\n",
+        "     (19, 35),\n",
+        "     (93, 120),\n",
+        "     (22, 60),\n",
+        "     (5, 28),\n",
+        "     (6, 154),\n",
+        "     (119, 146),\n",
+        "     (18, 153),\n",
+        "     (77, 111),\n",
+        "     (43, 163),\n",
+        "     (79, 170),\n",
+        "     (63, 172),\n",
+        "     (83, 175),\n",
+        "     (120, 150),\n",
+        "     (85, 159),\n",
+        "     (106, 116),\n",
+        "     (17, 154),\n",
+        "     (18, 40),\n",
+        "     (6, 46),\n",
+        "     (38, 47),\n",
+        "     (120, 168),\n",
+        "     (48, 133),\n",
+        "     (21, 162),\n",
+        "     (90, 125),\n",
+        "     (53, 119),\n",
+        "     (110, 120),\n",
+        "     (2, 129),\n",
+        "     (21, 94),\n",
+        "     (8, 145),\n",
+        "     (121, 143),\n",
+        "     (104, 177),\n",
+        "     (124, 172),\n",
+        "     (56, 180),\n",
+        "     (77, 151),\n",
+        "     (4, 137),\n",
+        "     (9, 66),\n",
+        "     (47, 53),\n",
+        "     (31, 175),\n",
+        "     (0, 70),\n",
+        "     (54, 137),\n",
+        "     (45, 127),\n",
+        "     (106, 127),\n",
+        "     (161, 181),\n",
+        "     (18, 35),\n",
+        "     (182, 184),\n",
+        "     (93, 168),\n",
+        "     (39, 172),\n",
+        "     (23, 157),\n",
+        "     (104, 138),\n",
+        "     (33, 115),\n",
+        "     (145, 172),\n",
+        "     (153, 181),\n",
+        "     (76, 183),\n",
+        "     (21, 103),\n",
+        "     (69, 80),\n",
+        "     (17, 106),\n",
+        "     (71, 187),\n",
+        "     (56, 187),\n",
+        "     (37, 103),\n",
+        "     (5, 14),\n",
+        "     (6, 148),\n",
+        "     (14, 36),\n",
+        "     (36, 185),\n",
+        "     (40, 164),\n",
+        "     (23, 37),\n",
+        "     (20, 83),\n",
+        "     (65, 172),\n",
+        "     (85, 161),\n",
+        "     (50, 69),\n",
+        "     (18, 26),\n",
+        "     (40, 51),\n",
+        "     (15, 77),\n",
+        "     (97, 166),\n",
+        "     (45, 137),\n",
+        "     (28, 71),\n",
+        "     (122, 166),\n",
+        "     (18, 110),\n",
+        "     (51, 158),\n",
+        "     (145, 165),\n",
+        "     (54, 121),\n",
+        "     (165, 170),\n",
+        "     (114, 121),\n",
+        "     (22, 86),\n",
+        "     (27, 153),\n",
+        "     (46, 88),\n",
+        "     (69, 89),\n",
+        "     (31, 99),\n",
+        "     (36, 150),\n",
+        "     (19, 54),\n",
+        "     (24, 43),\n",
+        "     (5, 119),\n",
+        "     (6, 143),\n",
+        "     (119, 121),\n",
+        "     (103, 174),\n",
+        "     (131, 163),\n",
+        "     (54, 155),\n",
+        "     (20, 31),\n",
+        "     (28, 128),\n",
+        "     (23, 34),\n",
+        "     (82, 103),\n",
+        "     (28, 47),\n",
+        "     (85, 170),\n",
+        "     (13, 72),\n",
+        "     (18, 21),\n",
+        "     (110, 162),\n",
+        "     (93, 186),\n",
+        "     (40, 58),\n",
+        "     (39, 162),\n",
+        "     (28, 94),\n",
+        "     (1, 100),\n",
+        "     (76, 165),\n",
+        "     (39, 51),\n",
+        "     (149, 164),\n",
+        "     (79, 104),\n",
+        "     (8, 146),\n",
+        "     (104, 166),\n",
+        "     (16, 82),\n",
+        "     (153, 159),\n",
+        "     (9, 117),\n",
+        "     (102, 138),\n",
+        "     (103, 167),\n",
+        "     (125, 150),\n",
+        "     (15, 171),\n",
+        "     (54, 146),\n",
+        "     (19, 172),\n",
+        "     (40, 146),\n",
+        "     (114, 137),\n",
+        "     (24, 161),\n",
+        "     (25, 85),\n",
+        "     (4, 14),\n",
+        "     (82, 94),\n",
+        "     (28, 38),\n",
+        "     (85, 179),\n",
+        "     (31, 43),\n",
+        "     (103, 125),\n",
+        "     (17, 134),\n",
+        "     (128, 186),\n",
+        "     (4, 79),\n",
+        "     (26, 90),\n",
+        "     (23, 168),\n",
+        "     (31, 59),\n",
+        "     (28, 85),\n",
+        "     (27, 88),\n",
+        "     (16, 58),\n",
+        "     (119, 139),\n",
+        "     (36, 53),\n",
+        "     (94, 178),\n",
+        "     (39, 40),\n",
+        "     (95, 159),\n",
+        "     (104, 173),\n",
+        "     (123, 155),\n",
+        "     (106, 177),\n",
+        "     (120, 158),\n",
+        "     (52, 149),\n",
+        "     (71, 166),\n",
+        "     (88, 90),\n",
+        "     (24, 57),\n",
+        "     (5, 121),\n",
+        "     (6, 121),\n",
+        "     (137, 177),\n",
+        "     (31, 131),\n",
+        "     (34, 110),\n",
+        "     (40, 153),\n",
+        "     (21, 153),\n",
+        "     (24, 168),\n",
+        "     (25, 94),\n",
+        "     (6, 8),\n",
+        "     (120, 164),\n",
+        "     (71, 121),\n",
+        "     (37, 47),\n",
+        "     (4, 54),\n",
+        "     (116, 136),\n",
+        "     (99, 112),\n",
+        "     (63, 94),\n",
+        "     (28, 172),\n",
+        "     (32, 151),\n",
+        "     (16, 49),\n",
+        "     (94, 181),\n",
+        "     (21, 123),\n",
+        "     (57, 102),\n",
+        "     (121, 172),\n",
+        "     (88, 110),\n",
+        "     (51, 98),\n",
+        "     (71, 159),\n",
+        "     (56, 87),\n",
+        "     (38, 155),\n",
+        "     (150, 159),\n",
+        "     (100, 160),\n",
+        "     (135, 157),\n",
+        "     (87, 127),\n",
+        "     (40, 128),\n",
+        "     (114, 187),\n",
+        "     (25, 39),\n",
+        "     (33, 58),\n",
+        "     (28, 52),\n",
+        "     (138, 155),\n",
+        "     (49, 153),\n",
+        "     (72, 75),\n",
+        "     (40, 47),\n",
+        "     (103, 181),\n",
+        "     (8, 56),\n",
+        "     (63, 71),\n",
+        "     (65, 174),\n",
+        "     (16, 40),\n",
+        "     (57, 154),\n",
+        "     (95, 141),\n",
+        "     (151, 175),\n",
+        "     (7, 177),\n",
+        "     (119, 172),\n",
+        "     (104, 155),\n",
+        "     (12, 60),\n",
+        "     (71, 148),\n",
+        "     (36, 138),\n",
+        "     (13, 81),\n",
+        "     (125, 161),\n",
+        "     (35, 114),\n",
+        "     (37, 149),\n",
+        "     (114, 178),\n",
+        "     (25, 40),\n",
+        "     (45, 85),\n",
+        "     (28, 51)])"
+       ]
+      }
+     ],
+     "prompt_number": 265
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neighbors_joint = neighbors_s.intersection(neighbors_t)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 266
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neighbors_joint"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 267,
+       "text": [
+        "set([(41, 42),\n",
+        "     (137, 138),\n",
+        "     (122, 123),\n",
+        "     (116, 118),\n",
+        "     (51, 52),\n",
+        "     (102, 103),\n",
+        "     (162, 163),\n",
+        "     (25, 26),\n",
+        "     (158, 159),\n",
+        "     (46, 47),\n",
+        "     (30, 31),\n",
+        "     (139, 140),\n",
+        "     (35, 36)])"
+       ]
+      }
+     ],
+     "prompt_number": 267
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "#matches knox doc test 13 joint pairs"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 268
+    },
+    {
+     "cell_type": "heading",
+     "level": 2,
+     "metadata": {},
+     "source": [
+      "Experiment with artificial data "
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import numpy as np\n",
+      "n = 100"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 269
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "space = np.random.random((n,2)) # points in the unit square"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 270
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "kd_s = ps.cg.KDTree(space)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 271
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neigh_s = kd_s.query_pairs(0.10) # pairs within 0.10 of each other"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 272
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "len(neigh_s)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 273,
+       "text": [
+        "130"
+       ]
+      }
+     ],
+     "prompt_number": 273
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "times = np.random.random((n,1)) # time from 0 to 1"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 274
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "kd_t = ps.cg.KDTree(times)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 275
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neigh_t = kd_t.query_pairs(0.01) # pairs within .01 units of time of each other"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 276
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "len(neigh_t)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 277,
+       "text": [
+        "94"
+       ]
+      }
+     ],
+     "prompt_number": 277
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "joint = neigh_s.intersection(neigh_t) # neighbors in both space and time"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 278
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "npairs = len(joint) # number of pairs"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 279
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# use the number and ids of time neighbors\n",
+      "time_ids = np.array([ pair for pair in neigh_t])\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 280
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ids = np.arange(n)\n",
+      "permutations = 99\n",
+      "larger = 0\n",
+      "for permutation in xrange(permutations):\n",
+      "    np.random.shuffle(ids)\n",
+      "    random_time = np.zeros((len(time_ids),2),int)\n",
+      "    random_time[:,0] = ids[time_ids[:,0]]\n",
+      "    random_time[:,1] = ids[time_ids[:,1]]\n",
+      "    random_time.sort(axis=1)\n",
+      "    random_time = set([tuple(row) for row in random_time])\n",
+      "    random_joint = random_time.intersection(neigh_s)\n",
+      "    if len(random_joint) >= npairs:\n",
+      "        larger += 1\n",
+      "    \n",
+      "    "
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 281
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "print \"Number as large as observed: \",larger\n",
+      "if (permutations - larger) < larger:\n",
+      "    larger = permutations - larger\n",
+      "\n",
+      "p_sim = (larger + 1.) / (permutations + 1.)\n",
+      "print \"p-value: \", p_sim"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Number as large as observed:  52\n",
+        "p-value:  0.48\n"
+       ]
+      }
+     ],
+     "prompt_number": 282
+    },
+    {
+     "cell_type": "heading",
+     "level": 2,
+     "metadata": {},
+     "source": [
+      "With Burkitt data"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "events = interaction.SpaceTimeEvents(ps.examples.get_path(\"burkitt\"), 'T')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 283
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "kd_t = ps.cg.KDTree(events.time)\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 284
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neigh_t = kd_t.query_pairs(5) # five units for time threshold"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 285
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "kd_s = ps.cg.KDTree(events.space)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 286
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "neigh_s = kd_s.query_pairs(20) # 20 unit threshold for space"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 287
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "joint = neigh_s.intersection(neigh_t)\n",
+      "npairs = len(joint)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 288
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "npairs"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 289,
+       "text": [
+        "13"
+       ]
+      }
+     ],
+     "prompt_number": 289
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "np.random.seed(12345)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 290
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# use the number and ids of time neighbors\n",
+      "time_ids = np.array([ pair for pair in neigh_t])\n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 291
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ids = np.arange(len(events.time))\n",
+      "permutations = 99\n",
+      "larger = 0\n",
+      "joints = np.zeros((permutations,1), int)\n",
+      "for permutation in xrange(permutations):\n",
+      "    np.random.shuffle(ids)\n",
+      "    random_time = np.zeros((len(time_ids),2),int)\n",
+      "    random_time[:,0] = ids[time_ids[:,0]]\n",
+      "    random_time[:,1] = ids[time_ids[:,1]]\n",
+      "    random_time.sort(axis=1)\n",
+      "    random_time = set([tuple(row) for row in random_time])\n",
+      "    random_joint = random_time.intersection(neigh_s)\n",
+      "    nrj = len(random_joint)\n",
+      "    joints[permutation] = nrj\n",
+      "    if nrj >= npairs:\n",
+      "        larger += 1\n",
+      "    \n",
+      "    "
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 292
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "print \"Number as large as observed: \",larger\n",
+      "if (permutations - larger) < larger:\n",
+      "    larger = permutations - larger\n",
+      "\n",
+      "p_sim = (larger + 1.) / (permutations + 1.)\n",
+      "print \"p-value: \", p_sim"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "stream",
+       "stream": "stdout",
+       "text": [
+        "Number as large as observed:  19\n",
+        "p-value:  0.2\n"
+       ]
+      }
+     ],
+     "prompt_number": 293
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 293
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "joints.mean()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 294,
+       "text": [
+        "10.04040404040404"
+       ]
+      }
+     ],
+     "prompt_number": 294
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "joints.min()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 295,
+       "text": [
+        "2"
+       ]
+      }
+     ],
+     "prompt_number": 295
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "joints.max()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 296,
+       "text": [
+        "17"
+       ]
+      }
+     ],
+     "prompt_number": 296
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "joints.var()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 297,
+       "text": [
+        "8.9074584226099311"
+       ]
+      }
+     ],
+     "prompt_number": 297
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file
diff --git a/pysal/spatial_dynamics/interaction.py b/pysal/spatial_dynamics/interaction.py
new file mode 100644
index 0000000..c2a16a0
--- /dev/null
+++ b/pysal/spatial_dynamics/interaction.py
@@ -0,0 +1,641 @@
+"""
+Methods for identifying space-time interaction in spatio-temporal event
+data.
+"""
+__author__ = "Nicholas Malizia <nmalizia at asu.edu>", "Sergio J. Rey \
+<srey at asu.edu>", "Philip Stephens <philip.stephens at asu.edu"
+
+import pysal
+import numpy as np
+import scipy.stats as stats
+import pysal.weights.Distance as Distance
+from pysal import cg
+from pysal.spatial_dynamics import util
+from datetime import date
+
+__all__ = ['SpaceTimeEvents', 'knox', 'mantel', 'jacquez', 'modified_knox']
+
+
+class SpaceTimeEvents:
+    """
+    Method for reformatting event data stored in a shapefile for use in
+    calculating metrics of spatio-temporal interaction.
+
+    Parameters
+    ----------
+    path            : string
+                      the path to the appropriate shapefile, including the
+                      file name, but excluding the extension
+    time            : string
+                      column header in the DBF file indicating the column
+                      containing the time stamp
+    infer_timestamp : boolean
+                      if the column containing the timestamp is formatted as
+                      calendar dates, try to coerce them into Python datetime objects
+
+    Attributes
+    ----------
+    n               : int
+                      number of events
+    x               : array
+                      n x 1 array of the x coordinates for the events
+    y               : array
+                      n x 1 array of the y coordinates for the events
+    t               : array
+                      n x 1 array of the temporal coordinates for the events
+    space           : array
+                      n x 2 array of the spatial coordinates (x,y) for the
+                      events
+    time            : array
+                      n x 2 array of the temporal coordinates (t,1) for the
+                      events, the second column is a vector of ones
+
+    Examples
+    --------
+
+    Read in the example shapefile data, ensuring to omit the file
+    extension. In order to successfully create the event data the .dbf file
+    associated with the shapefile should have a column of values that are a
+    timestamp for the events. This timestamp may be a numerical value
+    or a date. Date inference was added in version 1.6.
+
+    >>> path = pysal.examples.get_path("burkitt")
+
+    Create an instance of SpaceTimeEvents from a shapefile, where the
+    temporal information is stored in a column named "T".
+
+    >>> events = SpaceTimeEvents(path,'T')
+
+    See how many events are in the instance.
+
+    >>> events.n
+    188
+
+    Check the spatial coordinates of the first event.
+
+    >>> events.space[0]
+    array([ 300.,  302.])
+
+    Check the time of the first event.
+
+    >>> events.t[0]
+    array([ 413.])
+
+    Calculate the time difference between the first two events.
+
+    >>> events.t[1] - events.t[0]
+    array([ 59.])
+
+    New, in 1.6, date support:
+
+    Now, create an instance of SpaceTimeEvents from a shapefile, where the
+    temporal information is stored in a column named "DATE".
+
+    >>> events = SpaceTimeEvents(path,'DATE')
+
+    See how many events are in the instance.
+
+    >>> events.n
+    188
+
+    Check the spatial coordinates of the first event.
+
+    >>> events.space[0]
+    array([ 300.,  302.])
+
+    Check the time of the first event. Note that this value is equivalent to
+    413 days after January 1, 1900.
+
+    >>> events.t[0][0]
+    datetime.date(1901, 2, 16)
+
+    Calculate the time difference between the first two events.
+
+    >>> (events.t[1][0] - events.t[0][0]).days
+    59
+
+    """
+    def __init__(self, path, time_col, infer_timestamp=False):
+        shp = pysal.open(path + '.shp')
+        dbf = pysal.open(path + '.dbf')
+
+        # extract the spatial coordinates from the shapefile
+        x = [coords[0] for coords in shp]
+        y = [coords[1] for coords in shp]
+
+        self.n = n = len(shp)
+        x = np.array(x)
+        y = np.array(y)
+        self.x = np.reshape(x, (n, 1))
+        self.y = np.reshape(y, (n, 1))
+        self.space = np.hstack((self.x, self.y))
+
+        # extract the temporal information from the database
+        if infer_timestamp:
+            col = dbf.by_col(time_col)
+            if isinstance(col[0], date):
+                day1 = min(col)
+                col = [(d - day1).days for d in col]
+                t = np.array(col)
+            else:
+                print("Unable to parse your time column as Python datetime \
+                      objects, proceeding as integers.")
+                t = np.array(col)
+        else:
+            t = np.array(dbf.by_col(time_col))
+        line = np.ones((n, 1))
+        self.t = np.reshape(t, (n, 1))
+        self.time = np.hstack((self.t, line))
+
+        # close open objects
+        dbf.close()
+        shp.close()
+
+
+def knox(s_coords, t_coords, delta, tau, permutations=99, debug=False):
+    """
+    Knox test for spatio-temporal interaction. [1]_
+
+    Parameters
+    ----------
+    s_coords        : array
+                      nx2 spatial coordinates
+    t_coords        : array
+                      nx1 temporal coordinates
+    delta           : float
+                      threshold for proximity in space
+    tau             : float
+                      threshold for proximity in time
+    permutations    : int
+                      the number of permutations used to establish pseudo-
+                      significance (default is 99)
+    debug           : bool
+                      if true, debugging information is printed
+
+    Returns
+    -------
+    knox_result     : dictionary
+                      contains the statistic (stat) for the test and the
+                      associated p-value (pvalue)
+    stat            : float
+                      value of the knox test for the dataset
+    pvalue          : float
+                      pseudo p-value associated with the statistic
+    counts          : int
+                      count of space time neighbors
+
+    References
+    ----------
+    .. [1] E. Knox. 1964. The detection of space-time
+       interactions. Journal of the Royal Statistical Society. Series C
+       (Applied Statistics), 13(1):25-30.
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+
+    Read in the example data and create an instance of SpaceTimeEvents.
+
+    >>> path = pysal.examples.get_path("burkitt")
+    >>> events = SpaceTimeEvents(path,'T')
+
+    Set the random seed generator. This is used by the permutation based
+    inference to replicate the pseudo-significance of our example results -
+    the end-user will normally omit this step.
+
+    >>> np.random.seed(100)
+
+    Run the Knox test with distance and time thresholds of 20 and 5,
+    respectively. This counts the events that are closer than 20 units in
+    space, and 5 units in time.
+
+    >>> result = knox(events.space, events.t, delta=20, tau=5, permutations=99)
+
+    Next, we examine the results. First, we call the statistic from the
+    results dictionary. This reports that there are 13 events close
+    in both space and time, according to our threshold definitions.
+
+    >>> result['stat'] == 13
+    True
+
+    Next, we look at the pseudo-significance of this value, calculated by
+    permuting the timestamps and rerunning the statistics. In this case,
+    the results indicate there is likely no space-time interaction between
+    the events.
+
+    >>> print("%2.2f"%result['pvalue'])
+    0.17
+
+    """
+
+    # Do a kdtree on space first as the number of ties (identical points) is
+    # likely to be lower for space than time.
+
+    kd_s = pysal.cg.KDTree(s_coords)
+    neigh_s = kd_s.query_pairs(delta)
+    tau2 = tau * tau
+    ids = np.array(list(neigh_s))
+
+    # For the neighboring pairs in space, determine which are also time
+    # neighbors
+
+    d_t = (t_coords[ids[:, 0]] - t_coords[ids[:, 1]]) ** 2
+    n_st = sum(d_t <= tau2)
+
+    knox_result = {'stat': n_st[0]}
+
+    if permutations:
+        joint = np.zeros((permutations, 1), int)
+        for p in xrange(permutations):
+            np.random.shuffle(t_coords)
+            d_t = (t_coords[ids[:, 0]] - t_coords[ids[:, 1]]) ** 2
+            joint[p] = np.sum(d_t <= tau2)
+
+        larger = sum(joint >= n_st[0])
+        if (permutations - larger) < larger:
+            larger = permutations - larger
+        p_sim = (larger + 1.) / (permutations + 1.)
+        knox_result['pvalue'] = p_sim
+    return knox_result
+
+
+def mantel(s_coords, t_coords, permutations=99, scon=1.0, spow=-1.0, tcon=1.0, tpow=-1.0):
+    """
+    Standardized Mantel test for spatio-temporal interaction. [2]_
+
+    Parameters
+    ----------
+    s_coords        : array
+                      nx2 spatial coordinates
+
+    t_coords        : array
+                      nx1 temporal coordinates
+
+    permutations    : int
+                      the number of permutations used to establish pseudo-
+                      significance (default is 99)
+
+    scon            : float
+                      constant added to spatial distances
+
+    spow            : float
+                      value for power transformation for spatial distances
+
+    tcon            : float
+                      constant added to temporal distances
+
+    tpow            : float
+                      value for power transformation for temporal distances
+
+
+    Returns
+    -------
+    mantel_result   : dictionary
+                      contains the statistic (stat) for the test and the
+                      associated p-value (pvalue)
+    stat            : float
+                      value of the knox test for the dataset
+    pvalue          : float
+                      pseudo p-value associated with the statistic
+
+    References
+    ----------
+    .. [2] N. Mantel. 1967. The detection of disease clustering and a
+        generalized regression approach. Cancer Research, 27(2):209-220.
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+
+    Read in the example data and create an instance of SpaceTimeEvents.
+
+    >>> path = pysal.examples.get_path("burkitt")
+    >>> events = SpaceTimeEvents(path,'T')
+
+    Set the random seed generator. This is used by the permutation based
+    inference to replicate the pseudo-significance of our example results -
+    the end-user will normally omit this step.
+
+    >>> np.random.seed(100)
+
+    The standardized Mantel test is a measure of matrix correlation between
+    the spatial and temporal distance matrices of the event dataset. The
+    following example runs the standardized Mantel test without a constant
+    or transformation; however, as recommended by Mantel (1967) [2]_, these
+    should be added by the user. This can be done by adjusting the constant
+    and power parameters.
+
+    >>> result = mantel(events.space, events.t, 99, scon=1.0, spow=-1.0, tcon=1.0, tpow=-1.0)
+
+    Next, we examine the result of the test.
+
+    >>> print("%6.6f"%result['stat'])
+    0.048368
+
+    Finally, we look at the pseudo-significance of this value, calculated by
+    permuting the timestamps and rerunning the statistic for each of the 99
+    permutations. According to these parameters, the results indicate
+    space-time interaction between the events.
+
+    >>> print("%2.2f"%result['pvalue'])
+    0.01
+
+
+    """
+
+    t = t_coords
+    s = s_coords
+    n = len(t)
+
+    # calculate the spatial and temporal distance matrices for the events
+    distmat = cg.distance_matrix(s)
+    timemat = cg.distance_matrix(t)
+
+    # calculate the transformed standardized statistic
+    timevec = (util.get_lower(timemat) + tcon) ** tpow
+    distvec = (util.get_lower(distmat) + scon) ** spow
+    stat = stats.pearsonr(timevec, distvec)[0].sum()
+
+    # return the results (if no inference)
+    if not permutations:
+        return stat
+
+    # loop for generating a random distribution to assess significance
+    dist = []
+    for i in range(permutations):
+        trand = util.shuffle_matrix(timemat, range(n))
+        timevec = (util.get_lower(trand) + tcon) ** tpow
+        m = stats.pearsonr(timevec, distvec)[0].sum()
+        dist.append(m)
+
+    ## establish the pseudo significance of the observed statistic
+    distribution = np.array(dist)
+    greater = np.ma.masked_greater_equal(distribution, stat)
+    count = np.ma.count_masked(greater)
+    pvalue = (count + 1.0) / (permutations + 1.0)
+
+    # report the results
+    mantel_result = {'stat': stat, 'pvalue': pvalue}
+    return mantel_result
+
+
+def jacquez(s_coords, t_coords, k, permutations=99):
+    """
+    Jacquez k nearest neighbors test for spatio-temporal interaction. [3]_
+
+    Parameters
+    ----------
+    s_coords        : array
+                      nx2 spatial coordinates
+
+    t_coords        : array
+                      nx1 temporal coordinates
+
+    k               : int
+                      the number of nearest neighbors to be searched
+
+    permutations    : int
+                      the number of permutations used to establish pseudo-
+                      significance (default is 99)
+
+    Returns
+    -------
+    jacquez_result  : dictionary
+                      contains the statistic (stat) for the test and the
+                      associated p-value (pvalue)
+    stat            : float
+                      value of the Jacquez k nearest neighbors test for the
+                      dataset
+    pvalue          : float
+                      p-value associated with the statistic (normally
+                      distributed with k-1 df)
+
+    References
+    ----------
+    .. [3] G. Jacquez. 1996. A k nearest neighbour test for space-time
+       interaction. Statistics in Medicine, 15(18):1935-1949.
+
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+
+    Read in the example data and create an instance of SpaceTimeEvents.
+
+    >>> path = pysal.examples.get_path("burkitt")
+    >>> events = SpaceTimeEvents(path,'T')
+
+    The Jacquez test counts the number of events that are k nearest
+    neighbors in both time and space. The following runs the Jacquez test
+    on the example data and reports the resulting statistic. In this case,
+    there are 13 instances where events are nearest neighbors in both space
+    and time.
+
+    # turning off as kdtree changes from scipy < 0.12 return 13
+    #>>> np.random.seed(100)
+    #>>> result = jacquez(events.space, events.t ,k=3,permutations=99)
+    #>>> print result['stat']
+    #12
+
+    The significance of this can be assessed by calling the p-
+    value from the results dictionary, as shown below. Again, no
+    space-time interaction is observed.
+
+    #>>> result['pvalue'] < 0.01
+    #False
+
+    """
+    time = t_coords
+    space = s_coords
+    n = len(time)
+
+    # calculate the nearest neighbors in space and time separately
+    knnt = Distance.knnW(time, k)
+    knns = Distance.knnW(space, k)
+
+    nnt = knnt.neighbors
+    nns = knns.neighbors
+    knn_sum = 0
+
+    # determine which events are nearest neighbors in both space and time
+    for i in range(n):
+        t_neighbors = nnt[i]
+        s_neighbors = nns[i]
+        check = set(t_neighbors)
+        inter = check.intersection(s_neighbors)
+        count = len(inter)
+        knn_sum += count
+
+    stat = knn_sum
+
+    # return the results (if no inference)
+    if not permutations:
+        return stat
+
+    # loop for generating a random distribution to assess significance
+    dist = []
+    for p in range(permutations):
+        j = 0
+        trand = np.random.permutation(time)
+        knnt = Distance.knnW(trand, k)
+        nnt = knnt.neighbors
+        for i in range(n):
+            t_neighbors = nnt[i]
+            s_neighbors = nns[i]
+            check = set(t_neighbors)
+            inter = check.intersection(s_neighbors)
+            count = len(inter)
+            j += count
+
+        dist.append(j)
+
+    # establish the pseudo significance of the observed statistic
+    distribution = np.array(dist)
+    greater = np.ma.masked_greater_equal(distribution, stat)
+    count = np.ma.count_masked(greater)
+    pvalue = (count + 1.0) / (permutations + 1.0)
+
+    # report the results
+    jacquez_result = {'stat': stat, 'pvalue': pvalue}
+    return jacquez_result
+
+
+def modified_knox(s_coords, t_coords, delta, tau, permutations=99):
+    """
+    Baker's modified Knox test for spatio-temporal interaction. [4]_
+
+    Parameters
+    ----------
+    s_coords        : array
+                      nx2 spatial coordinates
+    t_coords        : array
+                      nx1 temporal coordinates
+    delta           : float
+                      threshold for proximity in space
+    tau             : float
+                      threshold for proximity in time
+    permutations    : int
+                      the number of permutations used to establish pseudo-
+                      significance (default is 99)
+
+    Returns
+    -------
+    modknox_result  : dictionary
+                      contains the statistic (stat) for the test and the
+                      associated p-value (pvalue)
+    stat            : float
+                      value of the modified knox test for the dataset
+    pvalue          : float
+                      pseudo p-value associated with the statistic
+
+    References
+    ----------
+    .. [4] R.D. Baker. Identifying space-time disease clusters. Acta Tropica,
+       91(3):291-299, 2004
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+
+    Read in the example data and create an instance of SpaceTimeEvents.
+
+    >>> path = pysal.examples.get_path("burkitt")
+    >>> events = SpaceTimeEvents(path, 'T')
+
+    Set the random seed generator. This is used by the permutation based
+    inference to replicate the pseudo-significance of our example results -
+    the end-user will normally omit this step.
+
+    >>> np.random.seed(100)
+
+    Run the modified Knox test with distance and time thresholds of 20 and 5,
+    respectively. This counts the events that are closer than 20 units in
+    space, and 5 units in time.
+
+    >>> result = modified_knox(events.space, events.t, delta=20, tau=5, permutations=99)
+
+    Next, we examine the results. First, we call the statistic from the
+    results dictionary. This reports the difference between the observed
+    and expected Knox statistic.
+
+    >>> print("%2.8f" % result['stat'])
+    2.81016043
+
+    Next, we look at the pseudo-significance of this value, calculated by
+    permuting the timestamps and rerunning the statistics. In this case,
+    the results indicate there is likely no space-time interaction.
+
+    >>> print("%2.2f" % result['pvalue'])
+    0.11
+
+    """
+    s = s_coords
+    t = t_coords
+    n = len(t)
+
+    # calculate the spatial and temporal distance matrices for the events
+    sdistmat = cg.distance_matrix(s)
+    tdistmat = cg.distance_matrix(t)
+
+    # identify events within thresholds
+    spacmat = np.ones((n, n))
+    spacbin = sdistmat <= delta
+    spacmat = spacmat * spacbin
+    timemat = np.ones((n, n))
+    timebin = tdistmat <= tau
+    timemat = timemat * timebin
+
+    # calculate the observed (original) statistic
+    knoxmat = timemat * spacmat
+    obsstat = (knoxmat.sum() - n)
+
+    # calculate the expectated value
+    ssumvec = np.reshape((spacbin.sum(axis=0) - 1), (n, 1))
+    tsumvec = np.reshape((timebin.sum(axis=0) - 1), (n, 1))
+    expstat = (ssumvec * tsumvec).sum()
+
+    # calculate the modified stat
+    stat = (obsstat - (expstat / (n - 1.0))) / 2.0
+
+    # return results (if no inference)
+    if not permutations:
+        return stat
+    distribution = []
+
+    # loop for generating a random distribution to assess significance
+    for p in range(permutations):
+        rtdistmat = util.shuffle_matrix(tdistmat, range(n))
+        timemat = np.ones((n, n))
+        timebin = rtdistmat <= tau
+        timemat = timemat * timebin
+
+        # calculate the observed knox again
+        knoxmat = timemat * spacmat
+        obsstat = (knoxmat.sum() - n)
+
+        # calculate the expectated value again
+        ssumvec = np.reshape((spacbin.sum(axis=0) - 1), (n, 1))
+        tsumvec = np.reshape((timebin.sum(axis=0) - 1), (n, 1))
+        expstat = (ssumvec * tsumvec).sum()
+
+        # calculate the modified stat
+        tempstat = (obsstat - (expstat / (n - 1.0))) / 2.0
+        distribution.append(tempstat)
+
+    # establish the pseudo significance of the observed statistic
+    distribution = np.array(distribution)
+    greater = np.ma.masked_greater_equal(distribution, stat)
+    count = np.ma.count_masked(greater)
+    pvalue = (count + 1.0) / (permutations + 1.0)
+
+    # return results
+    modknox_result = {'stat': stat, 'pvalue': pvalue}
+    return modknox_result
+
+if __name__ == "__main__":
+    import doctest
+    doctest.testmod()
diff --git a/pysal/spatial_dynamics/markov.py b/pysal/spatial_dynamics/markov.py
new file mode 100644
index 0000000..d5dce5e
--- /dev/null
+++ b/pysal/spatial_dynamics/markov.py
@@ -0,0 +1,1548 @@
+"""
+Markov based methods for spatial dynamics
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu"
+
+import numpy as np
+import numpy.linalg as la
+from pysal.spatial_dynamics.ergodic import fmpt, steady_state
+from scipy import stats
+import pysal
+from operator import gt
+
+__all__ = ["Markov", "LISA_Markov", "Spatial_Markov", "kullback",
+           "prais", "shorrock", "homogeneity"]
+
+# TT predefine LISA transitions
+# TT[i,j] is the transition type from i to j
+# i = quadrant in period 0
+# j = quadrant in period 1
+# uses one offset so first row and col of TT are ignored
+TT = np.zeros((5, 5), int)
+c = 1
+for i in range(1, 5):
+    for j in range(1, 5):
+        TT[i, j] = c
+        c += 1
+
+# MOVE_TYPES is a dictionary that returns the move type of a LISA transition
+# filtered on the significance of the LISA end points
+# True indicates significant LISA in a particular period
+# e.g. a key of (1, 3, True, False) indicates a significant LISA located in
+# quadrant 1 in period 0 moved to quadrant 3 in period 1 but was not
+# significant in quadrant 3.
+
+MOVE_TYPES = {}
+c = 1
+cases = (True, False)
+sig_keys = [(i, j) for i in cases for j in cases]
+
+for i, sig_key in enumerate(sig_keys):
+    c = 1 + i * 16
+    for i in range(1, 5):
+        for j in range(1, 5):
+            key = (i, j, sig_key[0], sig_key[1])
+            MOVE_TYPES[key] = c
+            c += 1
+
+
+class Markov:
+    """
+    Classic Markov transition matrices
+
+    Parameters
+    ----------
+    class_ids    : array (n, t)
+                   One row per observation, one column recording the state of each
+                   observation, with as many columns as time periods
+    classes      : array (k)
+                   All different classes (bins) of the matrix
+
+    Attributes
+    ----------
+    p            : matrix (k, k)
+                   transition probability matrix
+
+    steady_state : matrix (k, 1)
+                   ergodic distribution
+
+    transitions  : matrix (k, k)
+                   count of transitions between each state i and j
+
+    Examples
+    --------
+    >>> c = np.array([['b','a','c'],['c','c','a'],['c','b','c'],['a','a','b'],['a','b','c']])
+    >>> m = Markov(c)
+    >>> m.classes
+    array(['a', 'b', 'c'], 
+          dtype='|S1')
+    >>> m.p
+    matrix([[ 0.25      ,  0.5       ,  0.25      ],
+            [ 0.33333333,  0.        ,  0.66666667],
+            [ 0.33333333,  0.33333333,  0.33333333]])
+    >>> m.steady_state
+    matrix([[ 0.30769231],
+            [ 0.28846154],
+            [ 0.40384615]])
+
+    US nominal per capita income 48 states 81 years 1929-2009
+
+    >>> import pysal
+    >>> f = pysal.open(pysal.examples.get_path("usjoin.csv"))
+    >>> pci = np.array([f.by_col[str(y)] for y in range(1929,2010)])
+
+    set classes to quintiles for each year
+
+    >>> q5 = np.array([pysal.Quantiles(y).yb for y in pci]).transpose()
+    >>> m = Markov(q5)
+    >>> m.transitions
+    array([[ 729.,   71.,    1.,    0.,    0.],
+           [  72.,  567.,   80.,    3.,    0.],
+           [   0.,   81.,  631.,   86.,    2.],
+           [   0.,    3.,   86.,  573.,   56.],
+           [   0.,    0.,    1.,   57.,  741.]])
+    >>> m.p
+    matrix([[ 0.91011236,  0.0886392 ,  0.00124844,  0.        ,  0.        ],
+            [ 0.09972299,  0.78531856,  0.11080332,  0.00415512,  0.        ],
+            [ 0.        ,  0.10125   ,  0.78875   ,  0.1075    ,  0.0025    ],
+            [ 0.        ,  0.00417827,  0.11977716,  0.79805014,  0.07799443],
+            [ 0.        ,  0.        ,  0.00125156,  0.07133917,  0.92740926]])
+    >>> m.steady_state
+    matrix([[ 0.20774716],
+            [ 0.18725774],
+            [ 0.20740537],
+            [ 0.18821787],
+            [ 0.20937187]])
+
+    Relative incomes
+
+    >>> pci = pci.transpose()
+    >>> rpci = pci/(pci.mean(axis=0))
+    >>> rq = pysal.Quantiles(rpci.flatten()).yb
+    >>> rq.shape = (48,81)
+    >>> mq = Markov(rq)
+    >>> mq.transitions
+    array([[ 707.,   58.,    7.,    1.,    0.],
+           [  50.,  629.,   80.,    1.,    1.],
+           [   4.,   79.,  610.,   73.,    2.],
+           [   0.,    7.,   72.,  650.,   37.],
+           [   0.,    0.,    0.,   48.,  724.]])
+    >>> mq.steady_state
+    matrix([[ 0.17957376],
+            [ 0.21631443],
+            [ 0.21499942],
+            [ 0.21134662],
+            [ 0.17776576]])
+
+    """
+    def __init__(self, class_ids, classes=[]):
+        #pylint; Dangerous default value [] as argument
+        if len(classes):
+            self.classes = classes
+        else:
+            self.classes = np.unique(class_ids)
+
+        n, t = class_ids.shape
+        k = len(self.classes)
+        js = range(t - 1)
+
+        classIds = self.classes.tolist()
+        transitions = np.zeros((k, k))
+        for state_0 in js:
+            state_1 = state_0 + 1
+            state_0 = class_ids[:, state_0]
+            state_1 = class_ids[:, state_1]
+            initial = np.unique(state_0)
+            for i in initial:
+                ending = state_1[state_0 == i]
+                uending = np.unique(ending)
+                row = classIds.index(i)
+                for j in uending:
+                    col = classIds.index(j)
+                    transitions[row, col] += sum(ending == j)
+        self.transitions = transitions
+        row_sum = transitions.sum(axis=1)
+        p = np.dot(np.diag(1 / (row_sum + (row_sum == 0))), transitions)
+        self.p = np.matrix(p)
+
+        # steady_state vector
+        v, d = la.eig(np.transpose(self.p))
+        # for a regular P maximum eigenvalue will be 1
+        mv = max(v)
+        # find its position
+        i = v.tolist().index(mv)
+        # normalize eigenvector corresponding to the eigenvalue 1
+        self.steady_state = d[:, i] / sum(d[:, i])
+
+
+class Spatial_Markov:
+    """
+    Markov transitions conditioned on the value of the spatial lag
+
+    Parameters
+    ----------
+
+    y               : array (n,t)
+                      One row per observation, one column per state of each
+                      observation, with as many columns as time periods
+
+    w               : spatial weights object
+
+    k               : integer
+                      number of classes (quantiles)
+
+    permutations    : int
+                      number of permutations (default=0) for use in randomization
+                      based inference
+
+    fixed           : boolean
+                      If true, quantiles are taken over the entire n*t
+                      pooled series. If false, quantiles are taken each
+                      time period over n.
+    variable_name   : string
+                      name of variable
+
+    Attributes
+    ----------
+    p               : matrix (k, k)
+                      transition probability matrix for a-spatial Markov
+    s               : matrix (k, 1)
+                      ergodic distribution for a-spatial Markov
+    transitions     : matrix (k, k)
+                      counts of transitions between each state i and j
+                      for a-spatial Markov
+    T               : matrix (k, k, k)
+                      counts of transitions for each conditional Markov
+                      T[0] is the matrix of transitions for observations with
+                      lags in the 0th quantile, T[k-1] is the transitions for
+                      the observations with lags in the k-1th
+    P               : matrix(k, k, k)
+                      transition probability matrix for spatial Markov
+                      first dimension is the conditioned on the lag
+    S               : matrix(k, k)
+                      steady state distributions for spatial Markov
+                      each row is a conditional steady_state
+    F               : matrix(k, k, k)
+                      first mean passage times
+                      first dimension is conditioned on the lag
+    shtest          : list (k elements)
+                      each element of the list is a tuple for a multinomial
+                      difference test between the steady state distribution from
+                      a conditional distribution versus the overall steady state
+                      distribution, first element of the tuple is the chi2 value,
+                      second its p-value and the third the degrees of freedom
+    chi2            : list (k elements)
+                      each element of the list is a tuple for a chi-squared test
+                      of the difference between the conditional transition
+                      matrix against the overall transition matrix, first
+                      element of the tuple is the chi2 value, second its
+                      p-value and the third the degrees of freedom
+    x2              : float
+                      sum of the chi2 values for each of the conditional tests
+                      has an asymptotic chi2 distribution with k(k-1)(k-1)
+                      degrees of freedom under the null that transition
+                      probabilities are spatially homogeneous
+                      (see chi2 above)
+    x2_dof          : int
+                      degrees of freedom for homogeneity test
+    x2_pvalue       : float
+                      pvalue for homogeneity test based on analytic
+                      distribution
+    x2_rpvalue       : float (if permutations>0)
+                      pseudo p-value for x2 based on random spatial permutations
+                      of the rows of the original transitions
+    x2_realizations : array (permutations,1)
+                      the values of x2 for the random permutations
+    Q               : float
+                      Chi-square test of homogeneity across lag classes based
+                      on Bickenbach and Bode (2003) [3]_
+    Q_p_value       : float
+                      p-value for Q
+    LR              : float
+                      Likelihood ratio statistic for homogeneity across lag
+                      classes based on Bickenback and Bode (2003) [3]_
+    LR_p_value      : float
+                      p-value for LR
+    dof_hom         : int
+                      degrees of freedom for LR and Q, corrected for 0 cells.
+
+    Notes
+    -----
+    Based on  Rey (2001) [1]_
+
+    The shtest and chi2 tests should be used with caution as they are based on
+    classic theory assuming random transitions. The x2 based test is
+    preferable since it simulates the randomness under the null. It is an
+    experimental test requiring further analysis.
+
+
+    Examples
+    --------
+    >>> import pysal as ps
+    >>> f = ps.open(ps.examples.get_path("usjoin.csv"))
+    >>> pci = np.array([f.by_col[str(y)] for y in range(1929,2010)])
+    >>> pci = pci.transpose()
+    >>> rpci = pci/(pci.mean(axis=0))
+    >>> w = ps.open(ps.examples.get_path("states48.gal")).read()
+    >>> w.transform = 'r'
+    >>> sm = ps.Spatial_Markov(rpci, w, fixed=True, k=5, variable_name='rpci')
+    >>> for p in sm.P:
+    ...     print p
+    ...
+    [[ 0.96341463  0.0304878   0.00609756  0.          0.        ]
+     [ 0.06040268  0.83221477  0.10738255  0.          0.        ]
+     [ 0.          0.14        0.74        0.12        0.        ]
+     [ 0.          0.03571429  0.32142857  0.57142857  0.07142857]
+     [ 0.          0.          0.          0.16666667  0.83333333]]
+    [[ 0.79831933  0.16806723  0.03361345  0.          0.        ]
+     [ 0.0754717   0.88207547  0.04245283  0.          0.        ]
+     [ 0.00537634  0.06989247  0.8655914   0.05913978  0.        ]
+     [ 0.          0.          0.06372549  0.90196078  0.03431373]
+     [ 0.          0.          0.          0.19444444  0.80555556]]
+    [[ 0.84693878  0.15306122  0.          0.          0.        ]
+     [ 0.08133971  0.78947368  0.1291866   0.          0.        ]
+     [ 0.00518135  0.0984456   0.79274611  0.0984456   0.00518135]
+     [ 0.          0.          0.09411765  0.87058824  0.03529412]
+     [ 0.          0.          0.          0.10204082  0.89795918]]
+    [[ 0.8852459   0.09836066  0.          0.01639344  0.        ]
+     [ 0.03875969  0.81395349  0.13953488  0.          0.00775194]
+     [ 0.0049505   0.09405941  0.77722772  0.11881188  0.0049505 ]
+     [ 0.          0.02339181  0.12865497  0.75438596  0.09356725]
+     [ 0.          0.          0.          0.09661836  0.90338164]]
+    [[ 0.33333333  0.66666667  0.          0.          0.        ]
+     [ 0.0483871   0.77419355  0.16129032  0.01612903  0.        ]
+     [ 0.01149425  0.16091954  0.74712644  0.08045977  0.        ]
+     [ 0.          0.01036269  0.06217617  0.89637306  0.03108808]
+     [ 0.          0.          0.          0.02352941  0.97647059]]
+
+
+    The probability of a poor state remaining poor is 0.963 if their
+    neighbors are in the 1st quintile and 0.798 if their neighbors are
+    in the 2nd quintile. The probability of a rich economy remaining
+    rich is 0.976 if their neighbors are in the 5th quintile, but if their
+    neighbors are in the 4th quintile this drops to 0.903.
+
+    The Q  and likelihood ratio statistics are both significant indicating
+    the dynamics are not homogeneous across the lag classes:
+
+    >>> "%.3f"%sm.LR
+    '170.659'
+    >>> "%.3f"%sm.Q
+    '200.624'
+    >>> "%.3f"%sm.LR_p_value
+    '0.000'
+    >>> "%.3f"%sm.Q_p_value
+    '0.000'
+    >>> sm.dof_hom
+    60
+
+    The long run distribution for states with poor (rich) neighbors has
+    0.435 (0.018) of the values in the first quintile, 0.263 (0.200) in
+    the second quintile, 0.204 (0.190) in the third, 0.0684 (0.255) in the
+    fourth and 0.029 (0.337) in the fifth quintile.
+
+    >>> sm.S
+    array([[ 0.43509425,  0.2635327 ,  0.20363044,  0.06841983,  0.02932278],
+           [ 0.13391287,  0.33993305,  0.25153036,  0.23343016,  0.04119356],
+           [ 0.12124869,  0.21137444,  0.2635101 ,  0.29013417,  0.1137326 ],
+           [ 0.0776413 ,  0.19748806,  0.25352636,  0.22480415,  0.24654013],
+           [ 0.01776781,  0.19964349,  0.19009833,  0.25524697,  0.3372434 ]])
+
+    States with incomes in the first quintile with neighbors in the
+    first quintile return to the first quartile after 2.298 years, after
+    leaving the first quintile. They enter the fourth quintile after
+    80.810 years after leaving the first quintile, on average.
+    Poor states within neighbors in the fourth quintile return to the
+    first quintile, on average, after 12.88 years, and would enter the
+    fourth quintile after 28.473 years.
+
+    >>> for f in sm.F:
+    ...     print f
+    ...
+    [[   2.29835259   28.95614035   46.14285714   80.80952381  279.42857143]
+     [  33.86549708    3.79459555   22.57142857   57.23809524  255.85714286]
+     [  43.60233918    9.73684211    4.91085714   34.66666667  233.28571429]
+     [  46.62865497   12.76315789    6.25714286   14.61564626  198.61904762]
+     [  52.62865497   18.76315789   12.25714286    6.           34.1031746 ]]
+    [[   7.46754205    9.70574606   25.76785714   74.53116883  194.23446197]
+     [  27.76691978    2.94175577   24.97142857   73.73474026  193.4380334 ]
+     [  53.57477715   28.48447637    3.97566318   48.76331169  168.46660482]
+     [  72.03631562   46.94601483   18.46153846    4.28393653  119.70329314]
+     [  77.17917276   52.08887197   23.6043956     5.14285714   24.27564033]]
+    [[   8.24751154    6.53333333   18.38765432   40.70864198  112.76732026]
+     [  47.35040872    4.73094099   11.85432099   34.17530864  106.23398693]
+     [  69.42288828   24.76666667    3.794921     22.32098765   94.37966594]
+     [  83.72288828   39.06666667   14.3           3.44668119   76.36702977]
+     [  93.52288828   48.86666667   24.1           9.8           8.79255406]]
+    [[  12.87974382   13.34847151   19.83446328   28.47257282   55.82395142]
+     [  99.46114206    5.06359731   10.54545198   23.05133495   49.68944423]
+     [ 117.76777159   23.03735526    3.94436301   15.0843986    43.57927247]
+     [ 127.89752089   32.4393006    14.56853107    4.44831643   31.63099455]
+     [ 138.24752089   42.7893006    24.91853107   10.35          4.05613474]]
+    [[  56.2815534     1.5          10.57236842   27.02173913  110.54347826]
+     [  82.9223301     5.00892857    9.07236842   25.52173913  109.04347826]
+     [  97.17718447   19.53125       5.26043557   21.42391304  104.94565217]
+     [ 127.1407767    48.74107143   33.29605263    3.91777427   83.52173913]
+     [ 169.6407767    91.24107143   75.79605263   42.5           2.96521739]]
+
+
+    References
+    ----------
+
+    .. [3] Bickenbach, F. and E. Bode (2003) "Evaluating the Markov property in studies of economic convergence. International Regional Science Review: 3, 363-392.
+
+    .. [1] Rey, S. (2001) "Spatial empirics for economic growth and convergence." Geographical Analysis, 33: 194-214.
+
+
+   
+    """
+    def __init__(self, y, w, k=4, permutations=0, fixed=False,
+                 variable_name=None):
+
+        self.y = y
+        rows, cols = y.shape
+        self.cols = cols
+        npm = np.matrix
+        npa = np.array
+        self.fixed = fixed
+        self.variable_name = variable_name
+        if fixed:
+            yf = y.flatten()
+            yb = pysal.Quantiles(yf, k=k).yb
+            yb.shape = (rows, cols)
+            classes = yb
+        else:
+            classes = npa([pysal.Quantiles(y[:, i], k=k)
+                           .yb for i in np.arange(cols)]).transpose()
+        classic = Markov(classes)
+        self.classes = classes
+        self.p = classic.p
+        self.s = classic.steady_state
+        self.transitions = classic.transitions
+        T, P, ss, F = self._calc(y, w, classes, k=k)
+        self.T = T
+        self.P = P
+        self.S = ss
+        self.F = F
+        self.shtest = self._mn_test()
+        self.chi2 = self._chi2_test()
+        self.x2 = sum([c[0] for c in self.chi2])
+        dof = k * (k - 1) * (k - 1)
+        self.x2_pvalue = 1 - stats.chi2.cdf(self.x2, dof)
+        self.x2_dof = dof
+        self.k = k
+
+        # bickenbach and bode tests
+        ht = homogeneity(self.T)
+        self.Q = ht.Q
+        self.Q_p_value = ht.Q_p_value
+        self.LR = ht.LR
+        self.LR_p_value = ht.LR_p_value
+        self.dof_hom = ht.dof
+
+
+        if permutations:
+            nrp = np.random.permutation
+            rp = range(permutations)
+            counter = 0
+            x2_realizations = np.zeros((permutations, 1))
+            x2ss = []
+            for perm in range(permutations):
+                T, P, ss, F = self._calc(nrp(y), w, classes, k=k)
+                x2 = [chi2(T[i], self.transitions)[0] for i in range(k)]
+                x2s = sum(x2)
+                x2_realizations[perm] = x2s
+                if x2s >= self.x2:
+                    counter += 1
+            self.x2_rpvalue = (counter + 1.0) / (permutations + 1.)
+            self.x2_realizations = x2_realizations
+
+    def _calc(self, y, w, classes, k):
+        # lag markov
+        ly = pysal.lag_spatial(w, y)
+        npm = np.matrix
+        npa = np.array
+        if self.fixed:
+            l_classes = pysal.Quantiles(ly.flatten(), k=k).yb
+            l_classes.shape = ly.shape
+        else:
+            l_classes = npa([pysal.Quantiles(
+                ly[:, i], k=k).yb for i in np.arange(self.cols)])
+            l_classes = l_classes.transpose()
+        l_classic = Markov(l_classes)
+        T = np.zeros((k, k, k))
+        n, t = y.shape
+        for t1 in range(t - 1):
+            t2 = t1 + 1
+            for i in range(n):
+                T[l_classes[i, t1], classes[i, t1], classes[i, t2]] += 1
+
+        P = np.zeros_like(T)
+        F = np.zeros_like(T)  # fmpt
+        ss = np.zeros_like(T[0])
+        for i, mat in enumerate(T):
+            row_sum = mat.sum(axis=1)
+            row_sum = row_sum + (row_sum == 0)
+            p_i = np.matrix(np.diag(1. / row_sum) * np.matrix(mat))
+            #print i
+            #print mat
+            #print p_i
+            ss[i] = steady_state(p_i).transpose()
+            try:
+                F[i] = fmpt(p_i)
+            except:
+                #pylint; "No exception type(s) specified"
+                print "Singlular fmpt matrix for class ", i
+            P[i] = p_i
+        return T, P, ss, F
+
+    def _mn_test(self):
+        """
+        helper to calculate tests of differences between steady state
+        distributions from the conditional and overall distributions.
+        """
+        n, t = self.y.shape
+        nt = n * (t - 1)
+        n0, n1, n2 = self.T.shape
+        rn = range(n0)
+        mat = [self._ssmnp_test(
+            self.s, self.S[i], self.T[i].sum()) for i in rn]
+        return mat
+
+    def _ssmnp_test(self, p1, p2, nt):
+        """
+        Steady state multinomial probability difference test
+
+        Arguments
+        ---------
+
+        p1       :  array (k, 1)
+                    first steady state probability distribution
+
+        p1       :  array (k, 1)
+                    second steady state probability distribution
+
+        nt       :  int
+                    number of transitions to base the test on
+
+
+        Returns
+        -------
+
+        implicit : tuple (3 elements)
+                   (chi2 value, pvalue, degrees of freedom)
+
+        """
+        p1 = np.array(p1)
+        k, c = p1.shape
+        p1.shape = (k, )
+        o = nt * p2
+        e = nt * p1
+        d = np.multiply((o - e), (o - e))
+        d = d / e
+        chi2 = d.sum()
+        pvalue = 1 - stats.chi2.cdf(chi2, k - 1)
+        return (chi2, pvalue, k - 1)
+
+    def _chi2_test(self):
+        """
+        helper to calculate tests of differences between the conditional
+        transition matrices and the overall transitions matrix.
+        """
+        n, t = self.y.shape
+        n0, n1, n2 = self.T.shape
+        rn = range(n0)
+        mat = [chi2(self.T[i], self.transitions) for i in rn]
+        return mat
+
+    def summary(self, file_name=None):
+        class_names = ["C%d"%i for i in range(self.k)]
+        regime_names = ["LAG%d"%i for i in range(self.k)]
+        ht = homogeneity(self.T, class_names=class_names,
+            regime_names=regime_names)
+        title = "Spatial Markov Test"
+        if self.variable_name:
+            title = title + ": " + self.variable_name
+        if file_name:
+            ht.summary(file_name=file_name, title=title)
+        else:
+            ht.summary(title=title)
+
+
+
+def chi2(T1, T2):
+    """
+    chi-squared test of difference between two transition matrices.
+
+    Parameters
+    ----------
+
+    T1   : matrix (k, k)
+           matrix of transitions (counts)
+
+    T2   : matrix (k, k)
+           matrix of transitions (counts) to use to form the probabilities
+           under the null
+
+
+    Returns
+    -------
+
+    implicit : tuple (3 elements)
+               (chi2 value, pvalue, degrees of freedom)
+
+    Examples
+    --------
+
+    >>> import pysal
+    >>> f = pysal.open(pysal.examples.get_path("usjoin.csv"))
+    >>> pci = np.array([f.by_col[str(y)] for y in range(1929,2010)]).transpose()
+    >>> rpci = pci/(pci.mean(axis=0))
+    >>> w = pysal.open(pysal.examples.get_path("states48.gal")).read()
+    >>> w.transform='r'
+    >>> sm = Spatial_Markov(rpci, w, fixed=True)
+    >>> T1 = sm.T[0]
+    >>> T1
+    array([[ 562.,   22.,    1.,    0.],
+           [  12.,  201.,   22.,    0.],
+           [   0.,   17.,   97.,    4.],
+           [   0.,    0.,    3.,   19.]])
+    >>> T2 = sm.transitions
+    >>> T2
+    array([[ 884.,   77.,    4.,    0.],
+           [  68.,  794.,   87.,    3.],
+           [   1.,   92.,  815.,   51.],
+           [   1.,    0.,   60.,  903.]])
+    >>> chi2(T1,T2)
+    (23.422628044813656, 0.0053137895983268457, 9)
+
+    Notes
+    -----
+
+    Second matrix is used to form the probabilities under the null.
+    Marginal sums from first matrix are distributed across these probabilities
+    under the null. In other words the observed transitions are taken from T1
+    while the expected transitions are formed as follows
+
+    .. math::
+
+            E_{i,j} = \sum_j T1_{i,j} * T2_{i,j}/\sum_j T2_{i,j}
+
+    Degrees of freedom corrected for any rows in either T1 or T2 that have
+    zero total transitions.
+    """
+    rs2 = T2.sum(axis=1)
+    rs1 = T1.sum(axis=1)
+    rs2nz = rs2 > 0
+    rs1nz = rs1 > 0
+    dof1 = sum(rs1nz)
+    dof2 = sum(rs2nz)
+    rs2 = rs2 + rs2nz
+    dof = (dof1 - 1) * (dof2 - 1)
+    p = np.diag(1 / rs2) * np.matrix(T2)
+    E = np.diag(rs1) * np.matrix(p)
+    num = T1 - E
+    num = np.multiply(num, num)
+    E = E + (E == 0)
+    chi2 = num / E
+    chi2 = chi2.sum()
+    pvalue = 1 - stats.chi2.cdf(chi2, dof)
+    return chi2, pvalue, dof
+
+
+class LISA_Markov(Markov):
+    """
+    Markov for Local Indicators of Spatial Association
+
+    Parameters
+    ----------
+
+    y  : array (n,t)
+         n cross-sectional units observed over t time periods
+    w  : weights instance
+    permutations : int
+                   number of permutations used to determine LISA significance
+                   default = 0
+    significance_level : float
+                         significance level (two-sided) for filtering significant LISA end
+                         points in a transition
+                         default = 0.05
+    geoda_quads    : boolean (default=False)
+                     If True use GeoDa scheme: HH=1, LL=2, LH=3, HL=4
+                     If False use PySAL Scheme: HH=1, LH=2, LL=3, HL=4
+
+    Attributes
+    ----------
+    chi_2        : tuple (3 elements)
+                   chi square test statistic
+                   p-value
+                   degrees of freedom
+                   for test that dynamics of y are independent of dynamics of wy
+    classes      : array (4, 1)
+                   1=HH, 2=LH, 3=LL, 4=HL (own, lag) 
+                   1=HH, 2=LL, 3=LH, 4=HL (own, lag) (if geoda_quads=True)
+    expected_t   : array (4, 4)
+                   expected number of transitions under the null that dynamics
+                   of y are independent of dynamics of wy
+    move_types   : matrix (n, t-1)
+                   integer values indicating which type of LISA transition
+                   occurred (q1 is quadrant in period 1, q2 is quadrant in
+                   period 2)
+
+    .. Table:: Move Types
+
+                   ==  ==     ========
+                   q1  q2     move_type
+                   ==  ==     ========
+                   1   1      1
+                   1   2      2
+                   1   3      3
+                   1   4      4
+                   2   1      5
+                   2   2      6
+                   2   3      7
+                   2   4      8
+                   3   1      9
+                   3   2      10
+                   3   3      11
+                   3   4      12
+                   4   1      13
+                   4   2      14
+                   4   3      15
+                   4   4      16
+                   ==  ==     ========
+
+    p            : matrix (k, k)
+                   transition probability matrix
+    p_values     : (if permutations > 0)
+                   matrix (n, t)
+                   LISA p-values for each end point
+    significant_moves    : (if permutations > 0)
+                       matrix (n, t-1)
+                       integer values indicating the type and significance of a LISA
+                       transition. st = 1 if significant in period t, else
+                       st=0
+
+    .. Table:: Significant Moves
+
+                       ===============  ===================
+                       (s1,s2)          move_type
+                       ===============  ===================
+                       (1,1)            [1, 16]
+                       (1,0)            [17, 32]
+                       (0,1)            [33, 48]
+                       (0,0)            [49, 64]
+                       ===============  ===================
+
+
+                       == ==  ==  ==  =========
+                       q1 q2  s1  s2  move_type
+                       == ==  ==  ==  =========
+                        1  1   1   1   1
+                        1  2   1   1   2
+                        1  3   1   1   3
+                        1  4   1   1   4
+                        2  1   1   1   5
+                        2  2   1   1   6
+                        2  3   1   1   7
+                        2  4   1   1   8
+                        3  1   1   1   9
+                        3  2   1   1   10
+                        3  3   1   1   11
+                        3  4   1   1   12
+                        4  1   1   1   13
+                        4  2   1   1   14
+                        4  3   1   1   15
+                        4  4   1   1   16
+                        1  1   1   0   17
+                        1  2   1   0   18
+                        .  .   .   .    .
+                        .  .   .   .    .
+                        4  3   1   0   31
+                        4  4   1   0   32
+                        1  1   0   1   33
+                        1  2   0   1   34
+                        .  .   .   .    .
+                        .  .   .   .    .
+                        4  3   0   1   47
+                        4  4   0   1   48
+                        1  1   0   0   49
+                        1  2   0   0   50
+                        .  .   .   .    .
+                        .  .   .   .    .
+                        4  3   0   0   63
+                        4  4   0   0   64
+                       == ==  ==  ==  =========
+
+    steady_state : matrix (k, 1)
+                   ergodic distribution
+    transitions  : matrix (4, 4)
+                   count of transitions between each state i and j
+    spillover    : binary array (n, 1)
+                   locations that were not part of a cluster in period 1 but
+                   joined a prexisting cluster in period 2
+
+    Examples
+    --------
+
+    >>> import pysal as ps
+    >>> import numpy as np
+    >>> f = ps.open(ps.examples.get_path("usjoin.csv"))
+    >>> pci = np.array([f.by_col[str(y)] for y in range(1929,2010)]).transpose()
+    >>> w = ps.open(ps.examples.get_path("states48.gal")).read()
+    >>> lm = ps.LISA_Markov(pci,w)
+    >>> lm.classes
+    array([1, 2, 3, 4])
+    >>> lm.steady_state
+    matrix([[ 0.28561505],
+            [ 0.14190226],
+            [ 0.40493672],
+            [ 0.16754598]])
+    >>> lm.transitions
+    array([[  1.08700000e+03,   4.40000000e+01,   4.00000000e+00,
+              3.40000000e+01],
+           [  4.10000000e+01,   4.70000000e+02,   3.60000000e+01,
+              1.00000000e+00],
+           [  5.00000000e+00,   3.40000000e+01,   1.42200000e+03,
+              3.90000000e+01],
+           [  3.00000000e+01,   1.00000000e+00,   4.00000000e+01,
+              5.52000000e+02]])
+    >>> lm.p
+    matrix([[ 0.92985458,  0.03763901,  0.00342173,  0.02908469],
+            [ 0.07481752,  0.85766423,  0.06569343,  0.00182482],
+            [ 0.00333333,  0.02266667,  0.948     ,  0.026     ],
+            [ 0.04815409,  0.00160514,  0.06420546,  0.88603531]])
+    >>> lm.move_types
+    array([[11, 11, 11, ..., 11, 11, 11],
+           [ 6,  6,  6, ...,  6,  7, 11],
+           [11, 11, 11, ..., 11, 11, 11],
+           ..., 
+           [ 6,  6,  6, ...,  6,  6,  6],
+           [ 1,  1,  1, ...,  6,  6,  6],
+           [16, 16, 16, ..., 16, 16, 16]])
+
+    Now consider only moves with one, or both, of the LISA end points being
+    significant
+
+    >>> np.random.seed(10)
+    >>> lm_random = pysal.LISA_Markov(pci, w, permutations=99)
+    >>> lm_random.significant_moves
+    array([[11, 11, 11, ..., 59, 59, 59],
+           [54, 54, 54, ..., 54, 55, 59],
+           [11, 11, 11, ..., 11, 59, 59],
+           ..., 
+           [54, 54, 54, ..., 54, 54, 54],
+           [49, 49, 49, ..., 54, 54, 54],
+           [64, 64, 64, ..., 64, 64, 64]])
+
+    Any value less than 49 indicates at least one of the LISA end points was
+    significant. So for example, the first spatial unit experienced a
+    transition of type 11 (LL, LL)  during the first three and last tree
+    intervals (according to lm.move_types), however, the last three of these
+    transitions involved insignificant LISAS in both the start and ending year
+    of each transition.
+
+    Test whether the moves of y are independent of the moves of wy
+
+    >>> "Chi2: %8.3f, p: %5.2f, dof: %d" % lm.chi_2
+    'Chi2:  162.475, p:  0.00, dof: 9'
+
+    Actual transitions of LISAs
+
+    >>> lm.transitions
+    array([[  1.08700000e+03,   4.40000000e+01,   4.00000000e+00,
+              3.40000000e+01],
+           [  4.10000000e+01,   4.70000000e+02,   3.60000000e+01,
+              1.00000000e+00],
+           [  5.00000000e+00,   3.40000000e+01,   1.42200000e+03,
+              3.90000000e+01],
+           [  3.00000000e+01,   1.00000000e+00,   4.00000000e+01,
+              5.52000000e+02]])
+
+    Expected transitions of LISAs under the null y and wy are moving
+    independently of one another
+
+    >>> lm.expected_t
+    array([[  1.12328098e+03,   1.15377356e+01,   3.47522158e-01,
+              3.38337644e+01],
+           [  3.50272664e+00,   5.28473882e+02,   1.59178880e+01,
+              1.05503814e-01],
+           [  1.53878082e-01,   2.32163556e+01,   1.46690710e+03,
+              9.72266513e+00],
+           [  9.60775143e+00,   9.86856346e-02,   6.23537392e+00,
+              6.07058189e+02]])
+
+    If the LISA classes are to be defined according to GeoDa, the `geoda_quad`
+    option has to be set to true
+
+    >>> lm.q[0:5,0]
+    array([3, 2, 3, 1, 4])
+    >>> lm = ps.LISA_Markov(pci,w, geoda_quads=True)
+    >>> lm.q[0:5,0]
+    array([2, 3, 2, 1, 4])
+
+    """
+    def __init__(self, y, w, permutations=0,
+                 significance_level=0.05, geoda_quads=False):
+        y = y.transpose()
+        pml = pysal.Moran_Local
+        gq = geoda_quads
+
+        #################################################################
+        # have to optimize conditional spatial permutations over a
+        # time series - this is a place holder for the foreclosure paper
+        ml = [pml(yi, w, permutations=permutations, geoda_quads=gq) for yi in y]
+        #################################################################
+
+        q = np.array([mli.q for mli in ml]).transpose()
+        classes = np.arange(1, 5)  # no guarantee all 4 quadrants are visited
+        Markov.__init__(self, q, classes)
+        self.q = q
+        self.w = w
+        n, k = q.shape
+        k -= 1
+        self.significance_level = significance_level
+        move_types = np.zeros((n, k), int)
+        sm = np.zeros((n, k), int)
+        self.significance_level = significance_level
+        if permutations > 0:
+            p = np.array([mli.p_z_sim for mli in ml]).transpose()
+            self.p_values = p
+            pb = p <= significance_level
+        else:
+            pb = np.zeros_like(y.T)
+        for t in range(k):
+            origin = q[:, t]
+            dest = q[:, t + 1]
+            p_origin = pb[:, t]
+            p_dest = pb[:, t]
+            for r in range(n):
+                move_types[r, t] = TT[origin[r], dest[r]]
+                key = (origin[r], dest[r], p_origin[r], p_dest[r])
+                sm[r, t] = MOVE_TYPES[key]
+        if permutations > 0:
+            self.significant_moves = sm
+        self.move_types = move_types
+
+        # null of own and lag moves being independent
+
+        ybar = y.mean(axis=0)
+        r = y / ybar
+        ylag = np.array([pysal.lag_spatial(w, yt) for yt in y])
+        rlag = ylag / ybar
+        rc = r < 1.
+        rlagc = rlag < 1.
+        markov_y = pysal.Markov(rc)
+        markov_ylag = pysal.Markov(rlagc)
+        A = np.matrix([[1, 0, 0, 0],
+                       [0, 0, 1, 0],
+                       [0, 0, 0, 1],
+                       [0, 1, 0, 0]])
+
+        kp = A * np.kron(markov_y.p, markov_ylag.p) * A.T
+        trans = self.transitions.sum(axis=1)
+        t1 = np.diag(trans) * kp
+        t2 = self.transitions
+        t1 = t1.getA()
+        self.chi_2 = pysal.spatial_dynamics.markov.chi2(t1, t2)
+        self.expected_t = t1
+        self.permutations = permutations
+
+    def spillover(self, quadrant=1, neighbors_on=False):
+        """
+        Detect spillover locations for diffusion in LISA Markov
+
+        Parameters
+        ----------
+        quadrant : int
+                   which quadrant in the scatterplot should form the core of a
+                   cluster
+
+        neighbors_on : binary
+                   If false then only the 1st order neighbors of a core
+                   location are included in the cluster.
+                   If true, neighbors of cluster core 1st order neighbors are
+                   included in the cluster
+
+        Returns
+        -------
+        dictionary : two keys - values pairs
+                    'components' - array (n, t)
+                    values are integer ids (starting at 1) indicating which
+                    component/cluster observation i in period t belonged to
+                    'spillover' - array (n, t-1)
+                    binary values indicating if the location was a spill-over
+                    location that became a new member of a previously existing
+                    cluster
+
+        Examples
+        --------
+
+        >>> f = pysal.open(pysal.examples.get_path("usjoin.csv"))
+        >>> pci = np.array([f.by_col[str(y)] for y in range(1929,2010)]).transpose()
+        >>> w = pysal.open(pysal.examples.get_path("states48.gal")).read()
+        >>> np.random.seed(10)
+        >>> lm_random = pysal.LISA_Markov(pci, w, permutations=99)
+        >>> r = lm_random.spillover()
+        >>> r['components'][:,12]
+        array([ 0.,  1.,  0.,  1.,  0.,  2.,  2.,  0.,  0.,  0.,  0.,  0.,  0.,
+                0.,  0.,  0.,  0.,  2.,  2.,  0.,  0.,  0.,  0.,  0.,  0.,  1.,
+                2.,  2.,  0.,  2.,  0.,  0.,  0.,  0.,  1.,  2.,  2.,  0.,  0.,
+                0.,  0.,  0.,  2.,  0.,  0.,  0.,  0.,  0.])
+        >>> r['components'][:,13]
+        array([ 0.,  2.,  0.,  2.,  0.,  1.,  1.,  0.,  0.,  2.,  0.,  0.,  0.,
+                0.,  0.,  0.,  0.,  1.,  1.,  0.,  0.,  0.,  0.,  0.,  0.,  2.,
+                0.,  1.,  0.,  1.,  0.,  0.,  0.,  0.,  2.,  1.,  1.,  0.,  0.,
+                0.,  0.,  2.,  1.,  0.,  2.,  0.,  0.,  0.])
+        >>> r['spill_over'][:,12]
+        array([ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  1.,  0.,  0.,  0.,
+                0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,
+                0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,
+                0.,  0.,  1.,  0.,  0.,  1.,  0.,  0.,  0.])
+
+        Including neighbors of core neighbors
+
+        >>> rn = lm_random.spillover(neighbors_on=True)
+        >>> rn['components'][:,12]
+        array([ 0.,  2.,  0.,  2.,  2.,  1.,  1.,  0.,  0.,  2.,  0.,  0.,  0.,
+                0.,  0.,  0.,  1.,  1.,  1.,  0.,  0.,  0.,  0.,  0.,  0.,  2.,
+                1.,  1.,  2.,  1.,  0.,  0.,  1.,  0.,  2.,  1.,  1.,  0.,  0.,
+                0.,  0.,  2.,  1.,  1.,  2.,  1.,  0.,  0.])
+        >>> rn["components"][:,13]
+        array([ 0.,  2.,  0.,  2.,  2.,  1.,  1.,  0.,  0.,  2.,  0.,  0.,  0.,
+                0.,  0.,  0.,  0.,  1.,  1.,  0.,  0.,  0.,  0.,  2.,  0.,  2.,
+                1.,  1.,  2.,  1.,  0.,  0.,  1.,  0.,  2.,  1.,  1.,  0.,  0.,
+                0.,  0.,  2.,  1.,  1.,  2.,  1.,  0.,  2.])
+        >>> rn["spill_over"][:,12]
+        array([ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,
+                0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  1.,  0.,  0.,
+                0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,
+                0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  1.])
+
+
+        """
+        n, k = self.q.shape
+        if self.permutations:
+            spill_over = np.zeros((n, k - 1))
+            components = np.zeros((n, k))
+            i2id = {}  # handle string keys
+            for key in self.w.neighbors.keys():
+                id = self.w.id2i[key]  # pylint "redefining built-in 'id'
+                i2id[id] = key
+            sig_lisas = (self.q == quadrant) \
+                * (self.p_values <= self.significance_level)
+            sig_ids = [np.nonzero(
+                sig_lisas[:, i])[0].tolist() for i in range(k)]
+
+            neighbors = self.w.neighbors
+            for t in range(k - 1):
+                s1 = sig_ids[t]
+                s2 = sig_ids[t + 1]
+                g1 = pysal.region.components.Graph(undirected=True)
+                for i in s1:
+                    for neighbor in neighbors[i2id[i]]:
+                        g1.add_edge(i2id[i], neighbor, 1.0)
+                        if neighbors_on:
+                            for nn in neighbors[neighbor]:
+                                g1.add_edge(neighbor, nn, 1.0)
+                components1 = g1.connected_components(op=gt)
+                components1 = [list(c.nodes) for c in components1]
+                g2 = pysal.region.components.Graph(undirected=True)
+                for i in s2:
+                    for neighbor in neighbors[i2id[i]]:
+                        g2.add_edge(i2id[i], neighbor, 1.0)
+                        if neighbors_on:
+                            for nn in neighbors[neighbor]:
+                                g2.add_edge(neighbor, nn, 1.0)
+                components2 = g2.connected_components(op=gt)
+                components2 = [list(c.nodes) for c in components2]
+                c2 = []
+                c1 = []
+                for c in components2:
+                    c2.extend(c)
+                for c in components1:
+                    c1.extend(c)
+
+                new_ids = [j for j in c2 if j not in c1]
+                spill_ids = []
+                for j in new_ids:
+                    # find j's component in period 2
+                    cj = [c for c in components2 if j in c][0]
+                    # for members of j's component in period 2, check if they belonged to
+                    # any components in period 1
+                    for i in cj:
+                        if i in c1:
+                            spill_ids.append(j)
+                            break
+                for spill_id in spill_ids:
+                    id = self.w.id2i[spill_id]
+                    spill_over[id, t] = 1
+                for c, component in enumerate(components1):
+                    for i in component:
+                        ii = self.w.id2i[i]
+                        components[ii, t] = c + 1
+            results = {}
+            results['components'] = components
+            results['spill_over'] = spill_over
+            return results
+
+        else:
+            return None
+
+
+def kullback(F):
+    """
+    Kullback information based test of Markov Homogeneity
+
+    Parameters
+    ----------
+    F : array (s, r, r)
+       Values are transitions (not probabilities) for
+       s strata
+       r initial states
+       r terminal states
+
+    Returns
+    -------
+
+    Results : Dictionary (key - value)
+
+        Conditional homogeneity - (float) test statistic for homogeneity of
+        transition probabilities across strata
+
+        Conditional homogeneity pvalue - (float) p-value for test statistic
+
+        Conditional homogeneity dof - (int) degrees of freedom =  r(s-1)(r-1)
+
+    Notes
+    -----
+
+    Based on  Kullback, Kupperman and Ku (1962) [2]_
+    Example below is taken from Table 9.2 
+
+    Examples
+    --------
+
+    >>> s1 = np.array([
+    ...         [ 22, 11, 24,  2,  2,  7],
+    ...         [ 5, 23, 15,  3, 42,  6],
+    ...         [ 4, 21, 190, 25, 20, 34],
+    ...         [0, 2, 14, 56, 14, 28],
+    ...         [32, 15, 20, 10, 56, 14],
+    ...         [5, 22, 31, 18, 13, 134]
+    ...     ])
+    >>> s2 = np.array([
+    ...     [3, 6, 9, 3, 0, 8],
+    ...     [1, 9, 3, 12, 27, 5],
+    ...     [2, 9, 208, 32, 5, 18],
+    ...     [0, 14, 32, 108, 40, 40],
+    ...     [22, 14, 9, 26, 224, 14],
+    ...     [1, 5, 13, 53, 13, 116]
+    ...     ])
+    >>>
+    >>> F = np.array([s1, s2])
+    >>> res = kullback(F)
+    >>> "%8.3f"%res['Conditional homogeneity']
+    ' 160.961'
+    >>> "%d"%res['Conditional homogeneity dof']
+    '30'
+    >>> "%3.1f"%res['Conditional homogeneity pvalue']
+    '0.0'
+
+    References
+    ----------
+    .. [2] Kullback, S. Kupperman, M. and H.H. Ku. (1962) "Tests for contigency tables and Markov chains", Technometrics: 4, 573--608.
+
+    """
+
+    F1 = F == 0
+    F1 = F + F1
+    FLF = F * np.log(F1)
+    T1 = 2 * FLF.sum()
+
+    FdJK = F.sum(axis=0)
+    FdJK1 = FdJK + (FdJK == 0)
+    FdJKLFdJK = FdJK * np.log(FdJK1)
+    T2 = 2 * FdJKLFdJK.sum()
+
+    FdJd = F.sum(axis=0).sum(axis=1)
+    FdJd1 = FdJd + (FdJd == 0)
+    T3 = 2 * (FdJd * np.log(FdJd1)).sum()
+
+    FIJd = F[:, :].sum(axis=1)
+    FIJd1 = FIJd + (FIJd == 0)
+    T4 = 2 * (FIJd * np.log(FIJd1)).sum()
+
+    FIdd = F.sum(axis=1).sum(axis=1)
+    T5 = 2 * (FIdd * np.log(FIdd)).sum()
+
+    T6 = F.sum()
+    T6 = 2 * T6 * np.log(T6)
+
+    s, r, r1 = F.shape
+    chom = T1 - T4 - T2 + T3
+    cdof = r * (s - 1) * (r - 1)
+    results = {}
+    results['Conditional homogeneity'] = chom
+    results['Conditional homogeneity dof'] = cdof
+    results['Conditional homogeneity pvalue'] = 1 - stats.chi2.cdf(chom, cdof)
+    return results
+
+
+def prais(pmat):
+    """
+    Prais conditional mobility measure
+
+    Parameters
+    ----------
+
+    pmat : kxk matrix
+          Markov probability transition matrix
+
+    Returns
+    -------
+
+    pr : 1xk matrix
+          Conditional mobility measures for each of the k classes.
+
+    Notes
+    -----
+
+    Prais' conditional mobility measure for a class is defined as:
+
+    .. math::
+
+            pr_i = 1 -  p_{i,i}
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> f = pysal.open(pysal.examples.get_path("usjoin.csv"))
+    >>> pci = np.array([f.by_col[str(y)] for y in range(1929,2010)])
+    >>> q5 = np.array([pysal.Quantiles(y).yb for y in pci]).transpose()
+    >>> m = pysal.Markov(q5)
+    >>> m.transitions
+    array([[ 729.,   71.,    1.,    0.,    0.],
+           [  72.,  567.,   80.,    3.,    0.],
+           [   0.,   81.,  631.,   86.,    2.],
+           [   0.,    3.,   86.,  573.,   56.],
+           [   0.,    0.,    1.,   57.,  741.]])
+    >>> m.p
+    matrix([[ 0.91011236,  0.0886392 ,  0.00124844,  0.        ,  0.        ],
+            [ 0.09972299,  0.78531856,  0.11080332,  0.00415512,  0.        ],
+            [ 0.        ,  0.10125   ,  0.78875   ,  0.1075    ,  0.0025    ],
+            [ 0.        ,  0.00417827,  0.11977716,  0.79805014,  0.07799443],
+            [ 0.        ,  0.        ,  0.00125156,  0.07133917,  0.92740926]])
+    >>> pysal.spatial_dynamics.markov.prais(m.p)
+    matrix([[ 0.08988764,  0.21468144,  0.21125   ,  0.20194986,  0.07259074]])
+
+    """
+    pr = (pmat.sum(axis=1) - np.diag(pmat))[0]
+    return pr
+
+
+def shorrock(pmat):
+    """
+    Shorrock's mobility measure
+
+    Parameters
+    ----------
+
+    pmat : kxk matrix
+          Markov probability transition matrix
+
+    Returns
+    -------
+
+    sh : scalar
+          Shorrock mobility measure
+
+
+
+    Notes
+    -----
+
+    Shorock's mobility measure is defined as
+
+    .. math::
+
+         sh = (k  - \sum_{j=1}^{k} p_{j,j})/(k - 1)
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> f = pysal.open(pysal.examples.get_path("usjoin.csv"))
+    >>> pci = np.array([f.by_col[str(y)] for y in range(1929,2010)])
+    >>> q5 = np.array([pysal.Quantiles(y).yb for y in pci]).transpose()
+    >>> m = pysal.Markov(q5)
+    >>> m.transitions
+    array([[ 729.,   71.,    1.,    0.,    0.],
+           [  72.,  567.,   80.,    3.,    0.],
+           [   0.,   81.,  631.,   86.,    2.],
+           [   0.,    3.,   86.,  573.,   56.],
+           [   0.,    0.,    1.,   57.,  741.]])
+    >>> m.p
+    matrix([[ 0.91011236,  0.0886392 ,  0.00124844,  0.        ,  0.        ],
+            [ 0.09972299,  0.78531856,  0.11080332,  0.00415512,  0.        ],
+            [ 0.        ,  0.10125   ,  0.78875   ,  0.1075    ,  0.0025    ],
+            [ 0.        ,  0.00417827,  0.11977716,  0.79805014,  0.07799443],
+            [ 0.        ,  0.        ,  0.00125156,  0.07133917,  0.92740926]])
+    >>> pysal.spatial_dynamics.markov.shorrock(m.p)
+    0.19758992000997844
+
+    """
+    t = np.trace(pmat)
+    k = pmat.shape[1]
+    sh = (k - t) / (k - 1)
+    return sh
+
+def homogeneity(transition_matrices, regime_names=[], class_names=[], \
+                     title="Markov Homogeneity Test"):
+    """
+    Test for homogeneity of Markov transition probabilities across regimes.
+
+    Parameters
+    ----------
+
+    transition_matrices: list of transition matrices for regimes
+                         all matrices must have same size (r,c)
+                         r is the number of rows in
+                         the transition matrix and c is the number of columns
+                         in the transition matrix.
+
+    regime_names: sequence
+                Labels for the regimes
+
+    class_names: sequence
+                Labels for the classes/states of the Markov chain
+
+    title: string
+            name of test
+
+    Returns
+    ------- 
+
+    implicit: an instance of Homogeneity_Results
+    """
+
+    return Homogeneity_Results(transition_matrices, regime_names=regime_names,
+                                 class_names= class_names,
+                                 title=title)
+
+class Homogeneity_Results:
+    """
+    Wrapper class to present homogeneity results
+ 
+    Parameters
+    ----------
+
+    transition_matrices: list of transition matrices for regimes
+                         all matrices must have same size (r,c)
+                         r is the number of rows in
+                         the transition matrix and c is the number of columns
+                         in the transition matrix.
+                         
+    regime_names: sequence
+                Labels for the regimes
+
+    class_names: sequence
+                Labels for the classes/states of the Markov chain
+    title: string
+           Title of the table
+
+    Notes
+    -----
+    Degrees of freedom adjustment follow the approach in Bickenbach and Bode (2003) [3]_
+
+    Examples
+    --------
+    See Spatial_Markov above.
+
+    """
+
+    def __init__(self, transition_matrices, regime_names=[], class_names = [],
+            title="Markov Homogeneity Test"):
+        self._homogeneity(transition_matrices)
+        self.regime_names=regime_names
+        self.class_names = class_names
+        self.title = title
+
+    def _homogeneity(self, transition_matrices):
+        # form null transition probability matrix
+        M = np.array(transition_matrices)
+        m,r,k = M.shape
+        self.k = k
+        B = np.zeros((r,m))
+        T = M.sum(axis=0)
+        self.t_total = T.sum()
+        n_i = T.sum(axis=1)
+        A_i = (T>0).sum(axis=1)
+        A_im = np.zeros((r,m))
+        p_ij = np.dot(np.diag(1./(n_i + (n_i==0)*1.)), T)
+        den = p_ij + 1. * (p_ij==0)
+        b_i = np.zeros_like(A_i)
+        p_ijm = np.zeros_like(M)
+        # get dimensions
+        m, n_rows, n_cols = M.shape
+        m = 0
+        Q = 0.0
+        LR = 0.0
+        lr_table = np.zeros_like(M)
+        q_table = np.zeros_like(M)
+        
+        for nijm in M:
+            nim = nijm.sum(axis=1)
+            B[:,m] = 1.*(nim>0)
+            b_i = b_i + 1. * (nim>0)
+            p_ijm[m] = np.dot(np.diag(1./(nim + (nim==0)*1.)),nijm)
+            num = (p_ijm[m]-p_ij)**2
+            ratio = num / den
+            qijm = np.dot(np.diag(nim), ratio)
+            q_table[m] = qijm
+            Q = Q + qijm.sum()
+            # only use nonzero pijm in lr test
+            mask = (nijm > 0) * (p_ij > 0)
+            A_im[:,m] = (nijm>0).sum(axis=1)
+            unmask = 1.0 * (mask==0)
+            ratio = (mask * p_ijm[m] + unmask) / (mask * p_ij + unmask)
+            lr = nijm * np.log(ratio)
+            LR = LR + lr.sum()
+            lr_table[m] = 2 * lr
+            m += 1
+        # b_i is the number of regimes that have non-zero observations in row i
+        # A_i is the number of non-zero elements in row i of the aggregated
+        # transition matrix
+        self.dof = int(((b_i-1) * (A_i-1)).sum())
+        self.Q = Q
+        self.Q_p_value = 1 - stats.chi2.cdf(self.Q, self.dof)
+        self.LR = LR * 2.
+        self.LR_p_value = 1 - stats.chi2.cdf(self.LR, self.dof)
+        self.A = A_i
+        self.A_im = A_im
+        self.B = B
+        self.b_i = b_i
+        self.LR_table = lr_table
+        self.Q_table = q_table
+        self.m = m
+        self.p_h0 = p_ij
+        self.p_h1 = p_ijm
+
+    def summary(self, file_name=None, title="Markov Homogeneity Test"):
+        regime_names = ["%d"%i for i in range(self.m)]
+        if self.regime_names:
+            regime_names = self.regime_names
+        cols = ["P(%s)"%str(regime) for regime in regime_names]
+        if not self.class_names:
+            self.class_names = range(self.k)
+
+        max_col = max([len(col) for col in cols])
+        col_width = max([5, max_col]) #probabilities have 5 chars
+        n_tabs = self.k
+        width = n_tabs * 4 + (self.k+1)*col_width
+        lead = "-"* width
+        head = title.center(width)
+        contents = [lead,head,lead]
+        l = "Number of regimes: %d" % int(self.m)
+        k = "Number of classes: %d" % int(self.k)
+        r = "Regime names: "
+        r += ", ".join(regime_names)
+        t = "Number of transitions: %d" % int(self.t_total)
+        contents.append(k)
+        contents.append(t)
+        contents.append(l)
+        contents.append(r)
+        contents.append(lead)
+        h = "%7s %20s %20s"%('Test', 'LR', 'Chi-2')
+        contents.append(h)
+        stat = "%7s %20.3f %20.3f"%('Stat.', self.LR, self.Q)
+        contents.append(stat)
+        stat = "%7s %20d %20d"%('DOF', self.dof, self.dof)
+        contents.append(stat)
+        stat = "%7s %20.3f %20.3f"%('p-value', self.LR_p_value,
+            self.Q_p_value)
+        contents.append(stat)
+        print "\n".join(contents)
+        print lead
+
+        cols = ["P(%s)"%str(regime) for regime in self.regime_names]
+        if not self.class_names:
+            self.class_names = range(self.k)
+        cols.extend(["%s"%str(cname) for cname in self.class_names])
+
+        max_col = max([len(col) for col in cols])
+        col_width = max([5, max_col]) #probabilities have 5 chars
+        p0 = []
+        line0 = [  '{s: <{w}}'.format(s="P(H0)",w=col_width)   ]
+        line0.extend([ '{s: >{w}}'.format(s=cname,w=col_width) for cname in self.class_names])
+        print "    ".join(line0)
+        p0.append("&".join(line0))
+        for i,row in enumerate(self.p_h0):
+            line = ["%*s"%(col_width, str(self.class_names[i]))]
+            line.extend(["%*.3f"%(col_width,v) for v in row])
+            print  "    ".join(line)
+            p0.append("&".join(line))
+        pmats = [p0]
+
+        print lead
+        for r, p1 in enumerate(self.p_h1):
+            p0 = []
+            line0 = [  '{s: <{w}}'.format(s="P(%s)"%regime_names[r],w=col_width)   ]
+            line0.extend([ '{s: >{w}}'.format(s=cname,w=col_width) for cname in self.class_names])
+            print "    ".join(line0)
+            p0.append("&".join(line0))
+            for i,row in enumerate(p1):
+                line = ["%*s"%(col_width, str(self.class_names[i]))]
+                line.extend(["%*.3f"%(col_width,v) for v in row])
+                print  "    ".join(line)
+                p0.append("&".join(line))
+            pmats.append(p0) 
+            print lead
+
+        if file_name:
+            k = self.k
+            ks = str(k+1)
+            with open(file_name, 'w') as f:
+                c = []
+                fmt = "r"*(k+1)
+                s="\\begin{tabular}{|%s|}\\hline\n"%fmt
+                s+= "\\multicolumn{%s}{|c|}{%s}"%(ks,title)
+                c.append(s)
+                s = "Number of classes: %d"%int(self.k)
+                c.append("\\hline\\multicolumn{%s}{|l|}{%s}"%(ks,s))
+                s = "Number of transitions: %d"%int(self.t_total)
+                c.append("\\multicolumn{%s}{|l|}{%s}"%(ks,s))
+                s = "Number of regimes: %d"%int(self.m)
+                c.append("\\multicolumn{%s}{|l|}{%s}"%(ks,s))
+                s = "Regime names: "
+                s += ", ".join(regime_names)
+                c.append("\\multicolumn{%s}{|l|}{%s}"%(ks,s))
+                s = "\\hline\\multicolumn{2}{|l}{%s}"%("Test")
+                s += "&\\multicolumn{2}{r}{LR}&\\multicolumn{2}{r|}{Q}"
+                c.append(s)
+                s = "Stat."
+                s = "\\multicolumn{2}{|l}{%s}"%(s)
+                s += "&\\multicolumn{2}{r}{%.3f}"%self.LR
+                s += "&\\multicolumn{2}{r|}{%.3f}"%self.Q
+                c.append(s)
+                s = "\\multicolumn{2}{|l}{%s}"%("DOF")
+                s += "&\\multicolumn{2}{r}{%d}"%int(self.dof)
+                s += "&\\multicolumn{2}{r|}{%d}"%int(self.dof)
+                c.append(s)
+                s = "\\multicolumn{2}{|l}{%s}"%("p-value")
+                s += "&\\multicolumn{2}{r}{%.3f}"%self.LR_p_value
+                s += "&\\multicolumn{2}{r|}{%.3f}"%self.Q_p_value
+                c.append(s)
+                s1 =  "\\\\\n".join(c)
+                s1 += "\\\\\n"
+                c = []
+                for mat in pmats:
+                    c.append("\\hline\n")
+                    for row in mat:
+                        c.append(row+"\\\\\n")
+                c.append("\\hline\n")
+                c.append("\\end{tabular}")
+                s2 = "".join(c)
+                f.write(s1+s2)
+
diff --git a/pysal/spatial_dynamics/rank.py b/pysal/spatial_dynamics/rank.py
new file mode 100644
index 0000000..2db2af0
--- /dev/null
+++ b/pysal/spatial_dynamics/rank.py
@@ -0,0 +1,443 @@
+"""
+Rank and spatial rank mobility measures
+"""
+__author__ = "Sergio J. Rey <srey at asu.edu> "
+
+#from pysal.common import *
+from scipy.stats.mstats import rankdata
+from scipy.special import erfc
+import pysal
+import numpy as np
+import scipy as sp
+from numpy.random import permutation as NRP
+
+__all__ = ['SpatialTau', 'Tau', 'Theta', ]
+
+
+class Theta:
+    """
+    Regime mobility measure
+
+    For sequence of time periods Theta measures the extent to which rank
+    changes for a variable measured over n locations are in the same direction
+    within mutually exclusive and exhaustive partitions (regimes) of the n locations.
+
+    Theta is defined as the sum of the absolute sum of rank changes within
+    the regimes over the sum of all absolute rank changes.
+
+
+    Parameters
+    ----------
+    y            : array (n,k) with k>=2
+                   successive columns of y are later moments in time (years,
+                   months,etc)
+    regime       : array (n,)
+                   values corresponding to which regime each observation belongs to
+    permutations : int
+                   number of random spatial permutations to generate for
+                   computationally based inference
+
+    Attributes
+    ----------
+    ranks        : array
+                   ranks of the original y array (by columns)
+    regimes      : array
+                   the original regimes array
+    total        : array (k-1,)
+                   the total number of rank changes for each of the k periods
+    max_total    : int
+                   the theoretical maximum number of rank changes for n
+                   observations
+    theta        : array (k-1,)
+                   the theta statistic for each of the k-1 intervals
+    permutations : int
+                   the number of permutations
+    pvalue_left  : float
+                   p-value for test that observed theta is significantly lower
+                   than its expectation under complete spatial randomness
+    pvalue_right : float
+                   p-value for test that observed theta is significantly
+                   greater than its expectation under complete spatial randomness
+
+
+    References
+    ----------
+    Rey, S.J. (2004) "Spatial dependence in the evolution of regional income
+    distributions," in A. Getis, J. Mur and H.Zoeller (eds). Spatial
+    Econometrics and Spatial Statistics. Palgrave, London, pp. 194-213.
+
+
+    Examples
+    --------
+    >>> import pysal
+    >>> f=pysal.open(pysal.examples.get_path("mexico.csv"))
+    >>> vnames=["pcgdp%d"%dec for dec in range(1940,2010,10)]
+    >>> y=np.transpose(np.array([f.by_col[v] for v in vnames]))
+    >>> regime=np.array(f.by_col['esquivel99'])
+    >>> np.random.seed(10)
+    >>> t=Theta(y,regime,999)
+    >>> t.theta
+    array([[ 0.41538462,  0.28070175,  0.61363636,  0.62222222,  0.33333333,
+             0.47222222]])
+    >>> t.pvalue_left
+    array([ 0.307,  0.077,  0.823,  0.552,  0.045,  0.735])
+    >>> t.total
+    array([ 130.,  114.,   88.,   90.,   90.,   72.])
+    >>> t.max_total
+    512
+    >>>
+    """
+    def __init__(self, y, regime, permutations=999):
+        ranks = rankdata(y, axis=0)
+        self.ranks = ranks
+        n, k = y.shape
+        ranks_d = ranks[:, range(1, k)] - ranks[:, range(k - 1)]
+        self.ranks_d = ranks_d
+        regimes = sp.unique(regime)
+        self.regimes = regimes
+        self.total = sum(abs(ranks_d))
+        self.max_total = sum([abs(i - n + i - 1) for i in range(1, n + 1)])
+        self._calc(regime)
+        self.theta = self._calc(regime)
+        self.permutations = permutations
+        if permutations:
+            np.perm = np.random.permutation
+            sim = np.array([self._calc(
+                np.perm(regime)) for i in xrange(permutations)])
+            self.theta.shape = (1, len(self.theta))
+            sim = np.concatenate((self.theta, sim))
+            self.sim = sim
+            den = permutations + 1.
+            self.pvalue_left = (sim <= sim[0]).sum(axis=0) / den
+            self.pvalue_right = (sim > sim[0]).sum(axis=0) / den
+            self.z = (sim[0] - sim.mean(axis=0)) / sim.std(axis=0)
+
+    def _calc(self, regime):
+        within = [abs(
+            sum(self.ranks_d[regime == reg])) for reg in self.regimes]
+        return np.array(sum(within) / self.total)
+
+
+class Tau:
+    """
+    Kendall's Tau is based on a comparison of the number of pairs of n
+    observations that have concordant ranks between two variables.
+
+    Parameters
+    ----------
+    x            : array (n,)
+                   first variable
+    y            : array (n,)
+                   second variable
+
+    Attributes
+    ----------
+    tau          : float
+                   The classic Tau statistic
+
+    tau_p       : float
+                  asymptotic p-value
+
+    Notes
+    -----
+
+    Modification of algorithm suggested by Christensen (2005).
+    PySAL implementation uses a list based representation of a binary tree for
+    the accumulation of the concordance measures. Ties are handled by this
+    implementation (in other words, if there are ties in either x, or y, or
+    both, the calculation returns Tau_b, if no ties classic Tau is returned.)
+
+    References
+    ----------
+
+    Christensen, D. (2005) Fast algorithms for the calculation of
+    Kendall's tau. Computational Statistics, 20: 51-62.
+
+
+    Examples
+    --------
+
+    # from scipy example
+
+    >>> from scipy.stats import kendalltau
+    >>> x1 = [12, 2, 1, 12, 2]
+    >>> x2 = [1, 4, 7, 1, 0]
+    >>> kt = Tau(x1,x2)
+    >>> kt.tau
+    -0.47140452079103173
+    >>> kt.tau_p
+    0.24821309157521476
+    >>> skt = kendalltau(x1,x2)
+    >>> skt
+    (-0.47140452079103173, 0.24821309157521476)
+
+    """
+
+    def __init__(self, x, y):
+        res = self._calc(x, y)
+        self.tau = res[0]
+        self.tau_p = res[1]
+        self.concordant = res[2]
+        self.discordant = res[3]
+        self.extraX = res[4]
+        self.extraY = res[5]
+
+    def _calc(self, x, y):
+        """
+        List based implementation of binary tree algorithm for concordance
+        measure after Christensen (2005).
+
+        """
+        x = np.array(x)
+        y = np.array(y)
+        n = len(y)
+        perm = range(n)
+        perm.sort(key=lambda a: (x[a], y[a]))
+        vals = y[perm]
+        ExtraY = 0
+        ExtraX = 0
+        ACount = 0
+        BCount = 0
+        CCount = 0
+        DCount = 0
+        ECount = 0
+        DCount = 0
+        Concordant = 0
+        Discordant = 0
+        # ids for left child
+        li = [None] * (n - 1)
+        # ids for right child
+        ri = [None] * (n - 1)
+        # number of left descendants for a node
+        ld = np.zeros(n)
+        # number of values equal to value i
+        nequal = np.zeros(n)
+
+        for i in range(1, n):
+            NumBefore = 0
+            NumEqual = 1
+            root = 0
+            x0 = x[perm[i - 1]]
+            y0 = y[perm[i - 1]]
+            x1 = x[perm[i]]
+            y1 = y[perm[i]]
+            if x0 != x1:
+                DCount = 0
+                ECount = 1
+            else:
+                if y0 == y1:
+                    ECount += 1
+                else:
+                    DCount += ECount
+                    ECount = 1
+            root = 0
+            inserting = True
+            while inserting:
+                current = y[perm[i]]
+                if current > y[perm[root]]:
+                    # right branch
+                    NumBefore += 1 + ld[root] + nequal[root]
+                    if ri[root] is None:
+                        # insert as right child to root
+                        ri[root] = i
+                        inserting = False
+                    else:
+                        root = ri[root]
+                elif current < y[perm[root]]:
+                    # increment number of left descendants
+                    ld[root] += 1
+                    if li[root] is None:
+                        # insert as left child to root
+                        li[root] = i
+                        inserting = False
+                    else:
+                        root = li[root]
+                elif current == y[perm[root]]:
+                    NumBefore += ld[root]
+                    NumEqual += nequal[root] + 1
+                    nequal[root] += 1
+                    inserting = False
+
+            ACount = NumBefore - DCount
+            BCount = NumEqual - ECount
+            CCount = i - (ACount + BCount + DCount + ECount - 1)
+            ExtraY += DCount
+            ExtraX += BCount
+            Concordant += ACount
+            Discordant += CCount
+
+        cd = Concordant + Discordant
+        num = Concordant - Discordant
+        tau = num / np.sqrt((cd + ExtraX) * (cd + ExtraY))
+        v = (4. * n + 10) / (9. * n * (n - 1))
+        z = tau / np.sqrt(v)
+        pval = erfc(np.abs(z) / 1.4142136)  # follow scipy
+        return tau, pval, Concordant, Discordant, ExtraX, ExtraY
+
+
+class SpatialTau:
+    """
+    Spatial version of Kendall's rank correlation statistic
+
+    Kendall's Tau is based on a comparison of the number of pairs of n
+    observations that have concordant ranks between two variables. The spatial
+    Tau decomposes these pairs into those that are spatial neighbors and those
+    that are not, and examines whether the rank correlation is different
+    between the two sets relative to what would be expected under spatial randomness.
+
+    Parameters
+    ----------
+    x            : array (n,)
+                   first variable
+    y            : array (n,)
+                   second variable
+    w            : W
+                   spatial weights object
+    permutations : int
+                   number of random spatial permutations for computationally
+                   based inference
+
+    Attributes
+    ----------
+    tau          : float
+                   The classic Tau statistic
+    tau_spatial  : float
+                   Value of Tau for pairs that are spatial neighbors
+    taus         : array (permtuations x 1)
+                   Values of simulated tau_spatial values under random spatial permutations in both periods. (Same permutation used for start and ending period).
+    pairs_spatial : int
+                    Number of spatial pairs
+    concordant   : float
+                   Number of concordant pairs
+    concordant_spatial : float
+                   Number of concordant pairs that are spatial neighbors
+    extraX       : float
+                   Number of extra X pairs
+    extraY       : float
+                   Number of extra Y pairs
+    discordant   : float
+                   Number of discordant pairs
+    discordant_spatial   : float
+                   Number of discordant pairs that are spatial neighbors
+    taus         : float
+                   spatial tau values for permuted samples (if permutations>0)
+    tau_spatial_psim:
+                 : float
+                   pseudo p-value for observed tau_spatial under the null of spatial randomness (if permutations>0)
+
+    Notes
+    -----
+
+    Algorithm has two stages. The first calculates classic Tau using a list
+    based implementation of the algorithm from Christensen (2005). Second
+    stage calculates concordance measures for neighboring pairs of locations
+    using a modification of the algorithm from Press et al (2007). See Rey
+    (2014) for details.
+
+    References
+    ----------
+
+    Christensen, D. (2005) "Fast algorithms for the calculation of
+    Kendall's tau." Computational Statistics, 20: 51-62.
+
+    Press, W.H, S. A Teukolsky, W.T. Vetterling and B. P. Flannery (2007).
+    Numerical Recipes: The Art of Scientific Computing. Cambridge. Pg 752.
+
+    Rey, S.J. (2004) "Spatial dependence in the evolution of regional income
+    distributions," in A. Getis, J. Mur and H.Zoeller (eds). Spatial
+    Econometrics and Spatial Statistics. Palgrave, London, pp. 194-213.
+
+    Rey, S.J. (2014) "Fast algorithms for calculation of a space-time
+    concordance measure." Computational Statistics. Forthcoming.
+
+
+    Examples
+    --------
+    >>> import pysal 
+    >>> import numpy as np
+    >>> f=pysal.open(pysal.examples.get_path("mexico.csv"))
+    >>> vnames=["pcgdp%d"%dec for dec in range(1940,2010,10)]
+    >>> y=np.transpose(np.array([f.by_col[v] for v in vnames]))
+    >>> regime=np.array(f.by_col['esquivel99'])
+    >>> w=pysal.weights.block_weights(regime)
+    >>> np.random.seed(12345)
+    >>> res=[pysal.SpatialTau(y[:,i],y[:,i+1],w,99) for i in range(6)]
+    >>> for r in res:
+    ...     ev = r.taus.mean()
+    ...     "%8.3f %8.3f %8.3f"%(r.tau_spatial, ev, r.tau_spatial_psim)
+    ...
+    '   0.397    0.659    0.010'
+    '   0.492    0.706    0.010'
+    '   0.651    0.772    0.020'
+    '   0.714    0.752    0.210'
+    '   0.683    0.705    0.270'
+    '   0.810    0.819    0.280'
+    """
+
+    def __init__(self, x, y, w, permutations=0):
+
+        w.transform = 'b'
+        self.n = len(x)
+        res = Tau(x, y)
+        self.tau = res.tau
+        self.tau_p = res.tau_p
+        self.concordant = res.concordant
+        self.discordant = res.discordant
+        self.extraX = res.extraX
+        self.extraY = res.extraY
+        res = self._calc(x, y, w)
+        self.tau_spatial = res[0]
+        self.pairs_spatial = int(w.s0 / 2.)
+        self.concordant_spatial = res[1]
+        self.discordant_spatial = res[2]
+
+        if permutations > 0:
+            taus = np.zeros(permutations)
+            ids = np.arange(self.n)
+            for r in xrange(permutations):
+                rids = np.random.permutation(ids)
+                taus[r] = self._calc(x[rids], y[rids], w)[0]
+            self.taus = taus
+            self.tau_spatial_psim = pseudop(taus, self.tau_spatial,
+                                            permutations)
+
+    def _calc(self, x, y, w):
+        n1 = n2 = iS = gc = 0
+        ijs = {}
+        for i in w.id_order:
+            xi = x[i]
+            yi = y[i]
+            for j in w.neighbors[i]:
+                if i < j:
+                    ijs[(i, j)] = (i, j)
+                    xj = x[j]
+                    yj = y[j]
+                    dx = xi - xj
+                    dy = yi - yj
+                    dxdy = dx * dy
+                    if dxdy != 0:
+                        n1 += 1
+                        n2 += 1
+                        if dxdy > 0.0:
+                            gc += 1
+                            iS += 1
+                        else:
+                            iS -= 1
+                    else:
+                        if dx != 0.0:
+                            n1 += 1
+                        if dy != 0.0:
+                            n2 += 1
+        tau_g = iS / (np.sqrt(n1) * np.sqrt(n2))
+        gd = gc - iS
+        return [tau_g, gc, gd]
+
+
+def pseudop(sim, observed, nperm):
+    above = sim >= observed
+    larger = above.sum()
+    psim = (larger + 1.) / (nperm + 1.)
+    if psim > 0.5:
+        psim = (nperm - larger + 1.) / (nperm + 1.)
+    return psim
+
diff --git a/pysal/spatial_dynamics/tests/__init__.py b/pysal/spatial_dynamics/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/spatial_dynamics/tests/test_directional.py b/pysal/spatial_dynamics/tests/test_directional.py
new file mode 100644
index 0000000..73d9716
--- /dev/null
+++ b/pysal/spatial_dynamics/tests/test_directional.py
@@ -0,0 +1,62 @@
+import unittest
+import pysal
+from pysal.spatial_dynamics import directional
+import numpy as np
+
+
+class Rose_Tester(unittest.TestCase):
+    def setUp(self):
+        f = open(pysal.examples.get_path("spi_download.csv"), 'r')
+        lines = f.readlines()
+        f.close()
+        lines = [line.strip().split(",") for line in lines]
+        names = [line[2] for line in lines[1:-5]]
+        data = np.array([map(int, line[3:]) for line in lines[1:-5]])
+        sids = range(60)
+        out = ['"United States 3/"',
+               '"Alaska 3/"',
+               '"District of Columbia"',
+               '"Hawaii 3/"',
+               '"New England"',
+               '"Mideast"',
+               '"Great Lakes"',
+               '"Plains"',
+               '"Southeast"',
+               '"Southwest"',
+               '"Rocky Mountain"',
+               '"Far West 3/"']
+        snames = [name for name in names if name not in out]
+        sids = [names.index(name) for name in snames]
+        states = data[sids, :]
+        us = data[0]
+        years = np.arange(1969, 2009)
+        rel = states / (us * 1.)
+        gal = pysal.open(pysal.examples.get_path('states48.gal'))
+        self.w = gal.read()
+        self.w.transform = 'r'
+        self.Y = rel[:, [0, -1]]
+
+    def test_rose(self):
+        k = 4
+        np.random.seed(100)
+        r4 = directional.rose(self.Y, self.w, k, permutations=999)
+        exp = [0., 1.57079633, 3.14159265, 4.71238898, 6.28318531]
+        obs = list(r4['cuts'])
+        for i in range(k + 1):
+            self.assertAlmostEqual(exp[i], obs[i])
+        self.assertEquals(list(r4['counts']), [32, 5, 9, 2])
+        exp = [0.02, 0.001, 0.001, 0.001]
+        obs = list(r4['pvalues'])
+        for i in range(k):
+            self.assertAlmostEqual(exp[i], obs[i])
+
+
+suite = unittest.TestSuite()
+test_classes = [Rose_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/spatial_dynamics/tests/test_ergodic.py b/pysal/spatial_dynamics/tests/test_ergodic.py
new file mode 100644
index 0000000..643f16c
--- /dev/null
+++ b/pysal/spatial_dynamics/tests/test_ergodic.py
@@ -0,0 +1,55 @@
+import unittest
+import pysal
+from pysal.spatial_dynamics import ergodic
+import numpy as np
+
+
+class SteadyState_Tester(unittest.TestCase):
+    def setUp(self):
+        self.p = np.matrix([[.5, .25, .25], [.5, 0, .5], [.25, .25, .5]])
+
+    def test_steady_state(self):
+        obs = ergodic.steady_state(self.p).tolist()
+        exp = np.matrix([[0.4], [0.2], [0.4]]).tolist()
+        k = self.p.shape[0]
+        for i in range(k):
+            self.assertAlmostEqual(exp[i][0], obs[i][0])
+
+
+class Fmpt_Tester(unittest.TestCase):
+    def setUp(self):
+        self.p = np.matrix([[.5, .25, .25], [.5, 0, .5], [.25, .25, .5]])
+
+    def test_fmpt(self):
+        k = self.p.shape[0]
+        obs = ergodic.fmpt(self.p).flatten().tolist()[0]
+        exp = np.matrix([[2.5, 4., 3.33333333], [2.66666667, 5.,
+                                                 2.66666667], [3.33333333, 4., 2.5]])
+        exp = exp.flatten().tolist()[0]
+        for i in range(k):
+            self.assertAlmostEqual(exp[i], obs[i])
+
+
+class VarFmpt_Tester(unittest.TestCase):
+    def setUp(self):
+        self.p = np.matrix([[.5, .25, .25], [.5, 0, .5], [.25, .25, .5]])
+
+    def test_var_fmpt(self):
+        k = self.p.shape[0]
+        obs = ergodic.var_fmpt(self.p).flatten().tolist()[0]
+        exp = np.matrix([[5.58333333, 12., 6.88888889], [6.22222222,
+                                                         12., 6.22222222], [6.88888889, 12., 5.58333333]])
+        exp = exp.flatten().tolist()[0]
+        for i in range(k):
+            self.assertAlmostEqual(exp[i], obs[i])
+
+
+suite = unittest.TestSuite()
+test_classes = [SteadyState_Tester, Fmpt_Tester, VarFmpt_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/spatial_dynamics/tests/test_interaction.py b/pysal/spatial_dynamics/tests/test_interaction.py
new file mode 100644
index 0000000..83673c0
--- /dev/null
+++ b/pysal/spatial_dynamics/tests/test_interaction.py
@@ -0,0 +1,80 @@
+import unittest
+import pysal
+from pysal.spatial_dynamics import interaction
+import numpy as np
+import scipy
+
+scp_version = int(scipy.version.version.split(".")[1])
+
+
+class SpaceTimeEvents_Tester(unittest.TestCase):
+    def setUp(self):
+        self.path = pysal.examples.get_path("burkitt")
+
+    def test_SpaceTimeEvents(self):
+        events = interaction.SpaceTimeEvents(self.path, 'T')
+        self.assertEquals(events.n, 188)
+        self.assertEquals(list(events.space[0]), [300., 302.])
+        self.assertEquals(list(events.t[0]), [413])
+
+
+class Knox_Tester(unittest.TestCase):
+    def setUp(self):
+        path = pysal.examples.get_path("burkitt")
+        self.events = interaction.SpaceTimeEvents(path, 'T')
+
+    def test_knox(self):
+        result = interaction.knox(
+            self.events.space,
+            self.events.t, delta=20, tau=5, permutations=1)
+        self.assertEquals(result['stat'], 13.0)
+
+
+class Mantel_Tester(unittest.TestCase):
+    def setUp(self):
+        path = pysal.examples.get_path("burkitt")
+        self.events = interaction.SpaceTimeEvents(path, 'T')
+
+    def test_mantel(self):
+        result = interaction.mantel(self.events.space,
+                self.events.time, 1, scon=0.0, spow=1.0, tcon=0.0, tpow=1.0)
+        self.assertAlmostEquals(result['stat'], 0.014154, 6)
+
+
+class Jacquez_Tester(unittest.TestCase):
+    def setUp(self):
+        path = pysal.examples.get_path("burkitt")
+        self.events = interaction.SpaceTimeEvents(path, 'T')
+
+    def test_jacquez(self):
+        result = interaction.jacquez(self.events.space,
+                self.events.t, k=3, permutations=1)
+        if scp_version > 11:
+            self.assertEquals(result['stat'], 12)
+        else:
+            self.assertEquals(result['stat'], 13)
+            
+
+
+class ModifiedKnox_Tester(unittest.TestCase):
+    def setUp(self):
+        path = pysal.examples.get_path("burkitt")
+        self.events = interaction.SpaceTimeEvents(path, 'T')
+
+    def test_modified_knox(self):
+        result = interaction.modified_knox(
+            self.events.space,
+            self.events.t, delta=20, tau=5, permutations=1)
+        self.assertAlmostEquals(result['stat'], 2.810160, 6)
+
+
+suite = unittest.TestSuite()
+test_classes = [SpaceTimeEvents_Tester, Knox_Tester, Mantel_Tester,
+                Jacquez_Tester, ModifiedKnox_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/spatial_dynamics/tests/test_markov.py b/pysal/spatial_dynamics/tests/test_markov.py
new file mode 100644
index 0000000..8e10daf
--- /dev/null
+++ b/pysal/spatial_dynamics/tests/test_markov.py
@@ -0,0 +1,190 @@
+import unittest
+import pysal
+from pysal.spatial_dynamics import markov
+import numpy as np
+
+
+class test_Markov(unittest.TestCase):
+    def test___init__(self):
+        # markov = Markov(class_ids, classes)
+        import pysal
+        f = pysal.open(pysal.examples.get_path('usjoin.csv'))
+        pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)])
+        q5 = np.array([pysal.Quantiles(y).yb for y in pci]).transpose()
+        m = pysal.Markov(q5)
+        expected = np.array([[729., 71., 1., 0., 0.],
+                             [72., 567., 80., 3., 0.],
+                             [0., 81., 631., 86., 2.],
+                             [0., 3., 86., 573., 56.],
+                             [0., 0., 1., 57., 741.]])
+        np.testing.assert_array_equal(m.transitions, expected)
+        expected = np.matrix([[0.91011236, 0.0886392,
+                               0.00124844, 0., 0.],
+                              [0.09972299, 0.78531856, 0.11080332, 0.00415512,
+                                  0.],
+                              [0., 0.10125, 0.78875, 0.1075, 0.0025],
+                              [0., 0.00417827, 0.11977716, 0.79805014,
+                                  0.07799443],
+                              [0., 0., 0.00125156, 0.07133917, 0.92740926]])
+        np.testing.assert_array_almost_equal(m.p.getA(), expected.getA())
+        expected = np.matrix([[0.20774716],
+                              [0.18725774],
+                              [0.20740537],
+                              [0.18821787],
+                              [0.20937187]]).getA()
+        np.testing.assert_array_almost_equal(m.steady_state.getA(), expected)
+
+
+class test_Spatial_Markov(unittest.TestCase):
+    def test___init__(self):
+        import pysal
+        f = pysal.open(pysal.examples.get_path('usjoin.csv'))
+        pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)])
+        pci = pci.transpose()
+        rpci = pci / (pci.mean(axis=0))
+        w = pysal.open(pysal.examples.get_path("states48.gal")).read()
+        w.transform = 'r'
+        sm = pysal.Spatial_Markov(rpci, w, fixed=True, k=5)
+        S = np.array(
+            [[0.43509425, 0.2635327, 0.20363044, 0.06841983, 0.02932278],
+            [0.13391287, 0.33993305, 0.25153036, 0.23343016, 0.04119356],
+            [0.12124869, 0.21137444, 0.2635101, 0.29013417, 0.1137326],
+            [0.0776413, 0.19748806, 0.25352636, 0.22480415, 0.24654013],
+            [0.01776781, 0.19964349, 0.19009833, 0.25524697, 0.3372434]])
+        np.testing.assert_array_almost_equal(S, sm.S)
+
+
+class test_chi2(unittest.TestCase):
+    def test_chi2(self):
+        import pysal
+        f = pysal.open(pysal.examples.get_path('usjoin.csv'))
+        pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)])
+        pci = pci.transpose()
+        rpci = pci / (pci.mean(axis=0))
+        w = pysal.open(pysal.examples.get_path("states48.gal")).read()
+        w.transform = 'r'
+        sm = pysal.Spatial_Markov(rpci, w, fixed=True, k=5)
+        chi = np.matrix([[4.06139105e+01, 6.32961385e-04, 1.60000000e+01],
+                         [5.55485793e+01, 2.88879565e-06, 1.60000000e+01],
+                         [1.77772638e+01, 3.37100315e-01, 1.60000000e+01],
+                         [4.00925436e+01, 7.54729084e-04, 1.60000000e+01],
+                         [4.68588786e+01, 7.16364084e-05, 1.60000000e+01]]).getA()
+        obs = np.matrix(sm.chi2).getA()
+        np.testing.assert_array_almost_equal(obs, chi)
+        obs = np.matrix(
+            [[4.61209613e+02, 0.00000000e+00, 4.00000000e+00],
+             [1.48140694e+02, 0.00000000e+00, 4.00000000e+00],
+             [6.33129261e+01, 5.83089133e-13, 4.00000000e+00],
+             [7.22778509e+01, 7.54951657e-15, 4.00000000e+00],
+             [2.32659201e+02, 0.00000000e+00, 4.00000000e+00]])
+        np.testing.assert_array_almost_equal(obs.getA(),
+                                             np.matrix(sm.shtest).getA())
+
+
+class test_LISA_Markov(unittest.TestCase):
+    def test___init__(self):
+        import numpy as np
+        f = pysal.open(pysal.examples.get_path('usjoin.csv'))
+        pci = np.array(
+            [f.by_col[str(y)] for y in range(1929, 2010)]).transpose()
+        w = pysal.open(pysal.examples.get_path("states48.gal")).read()
+        lm = pysal.LISA_Markov(pci, w)
+        obs = np.array([1, 2, 3, 4])
+        np.testing.assert_array_almost_equal(obs, lm.classes)
+        """
+        >>> lm.steady_state
+        matrix([[ 0.28561505],
+                [ 0.14190226],
+                [ 0.40493672],
+                [ 0.16754598]])
+        >>> lm.transitions
+        array([[  1.08700000e+03,   4.40000000e+01,   4.00000000e+00,
+                  3.40000000e+01],
+               [  4.10000000e+01,   4.70000000e+02,   3.60000000e+01,
+                  1.00000000e+00],
+               [  5.00000000e+00,   3.40000000e+01,   1.42200000e+03,
+                  3.90000000e+01],
+               [  3.00000000e+01,   1.00000000e+00,   4.00000000e+01,
+                  5.52000000e+02]])
+        >>> lm.p
+        matrix([[ 0.92985458,  0.03763901,  0.00342173,  0.02908469],
+                [ 0.07481752,  0.85766423,  0.06569343,  0.00182482],
+                [ 0.00333333,  0.02266667,  0.948     ,  0.026     ],
+                [ 0.04815409,  0.00160514,  0.06420546,  0.88603531]])
+        >>> lm.move_types
+        array([[ 11.,  11.,  11., ...,  11.,  11.,  11.],
+               [  6.,   6.,   6., ...,   6.,   7.,  11.],
+               [ 11.,  11.,  11., ...,  11.,  11.,  11.],
+               ...,
+               [  6.,   6.,   6., ...,   6.,   6.,   6.],
+               [  1.,   1.,   1., ...,   6.,   6.,   6.],
+               [ 16.,  16.,  16., ...,  16.,  16.,  16.]])
+        >>> np.random.seed(10)
+        >>> lm_random = pysal.LISA_Markov(pci, w, permutations=99)
+        >>> lm_random.significant_moves
+        array([[11, 11, 11, ..., 59, 59, 59],
+               [54, 54, 54, ..., 54, 55, 59],
+               [11, 11, 11, ..., 11, 59, 59],
+               ...,
+               [54, 54, 54, ..., 54, 54, 54],
+               [49, 49, 49, ..., 54, 54, 54],
+               [64, 64, 64, ..., 64, 64, 64]])
+
+        """
+
+
+class test_kullback(unittest.TestCase):
+    def test___init__(self):
+        import numpy as np
+        s1 = np.array([
+                      [22, 11, 24, 2, 2, 7],
+                      [5, 23, 15, 3, 42, 6],
+                      [4, 21, 190, 25, 20, 34],
+                      [0, 2, 14, 56, 14, 28],
+                      [32, 15, 20, 10, 56, 14],
+                      [5, 22, 31, 18, 13, 134]
+                      ])
+        s2 = np.array([
+            [3, 6, 9, 3, 0, 8],
+            [1, 9, 3, 12, 27, 5],
+            [2, 9, 208, 32, 5, 18],
+            [0, 14, 32, 108, 40, 40],
+            [22, 14, 9, 26, 224, 14],
+            [1, 5, 13, 53, 13, 116]
+        ])
+
+        F = np.array([s1, s2])
+        res = markov.kullback(F)
+        np.testing.assert_array_almost_equal(160.96060031170782,
+                                             res['Conditional homogeneity'])
+        np.testing.assert_array_almost_equal(30,
+                                             res['Conditional homogeneity dof'])
+        np.testing.assert_array_almost_equal(0.0,
+                                             res['Conditional homogeneity pvalue'])
+
+
+class test_prais(unittest.TestCase):
+    def test___init__(self):
+        import numpy as np
+        f = pysal.open(pysal.examples.get_path('usjoin.csv'))
+        pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)])
+        q5 = np.array([pysal.Quantiles(y).yb for y in pci]).transpose()
+        m = pysal.Markov(q5)
+        res = np.matrix([[0.08988764, 0.21468144,
+                          0.21125, 0.20194986, 0.07259074]])
+        np.testing.assert_array_almost_equal(markov.prais(m.p), res)
+
+
+class test_shorrock(unittest.TestCase):
+    def test___init__(self):
+        import numpy as np
+        f = pysal.open(pysal.examples.get_path('usjoin.csv'))
+        pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)])
+        q5 = np.array([pysal.Quantiles(y).yb for y in pci]).transpose()
+        m = pysal.Markov(q5)
+        np.testing.assert_array_almost_equal(markov.shorrock(m.p),
+                                             0.19758992000997844)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spatial_dynamics/tests/test_rank.py b/pysal/spatial_dynamics/tests/test_rank.py
new file mode 100644
index 0000000..05ff929
--- /dev/null
+++ b/pysal/spatial_dynamics/tests/test_rank.py
@@ -0,0 +1,73 @@
+import unittest
+import pysal
+from pysal.spatial_dynamics import rank
+import numpy as np
+
+
+class Theta_Tester(unittest.TestCase):
+    def setUp(self):
+        f = pysal.open(pysal.examples.get_path('mexico.csv'))
+        vnames = ["pcgdp%d" % dec for dec in range(1940, 2010, 10)]
+        self.y = np.transpose(np.array([f.by_col[v] for v in vnames]))
+        self.regime = np.array(f.by_col['esquivel99'])
+
+    def test_Theta(self):
+        np.random.seed(10)
+        t = rank.Theta(self.y, self.regime, 999)
+        k = self.y.shape[1]
+        obs = t.theta.tolist()
+        exp = [[0.41538462, 0.28070175, 0.61363636, 0.62222222,
+                0.33333333, 0.47222222]]
+        for i in range(k - 1):
+            self.assertAlmostEqual(exp[0][i], obs[0][i])
+        obs = t.pvalue_left.tolist()
+        exp = [0.307, 0.077, 0.823, 0.552, 0.045, 0.735]
+        for i in range(k - 1):
+            self.assertAlmostEqual(exp[i], obs[i])
+        obs = t.total.tolist()
+        exp = [130., 114., 88., 90., 90., 72.]
+        for i in range(k - 1):
+            self.assertAlmostEqual(exp[i], obs[i])
+        self.assertEqual(t.max_total, 512)
+
+
+class SpatialTau_Tester(unittest.TestCase):
+    def setUp(self):
+        f = pysal.open(pysal.examples.get_path('mexico.csv'))
+        vnames = ["pcgdp%d" % dec for dec in range(1940, 2010, 10)]
+        self.y = np.transpose(np.array([f.by_col[v] for v in vnames]))
+        regime = np.array(f.by_col['esquivel99'])
+        self.w = pysal.weights.block_weights(regime)
+
+    def test_SpatialTau(self):
+        np.random.seed(12345)
+        k = self.y.shape[1]
+        obs = [rank.SpatialTau(self.y[:, i], self.y[:, i + 1],
+                               self.w, 99) for i in range(k - 1)]
+        tau_s = [0.397, 0.492, 0.651, 0.714, 0.683, 0.810]
+        ev_tau_s = [0.659, 0.706, 0.772, 0.752, 0.705, 0.819]
+        p_vals = [0.010, 0.010, 0.020, 0.210, 0.270, 0.280]
+        for i in range(k - 1):
+            self.assertAlmostEqual(tau_s[i], obs[i].tau_spatial, 3)
+            self.assertAlmostEqual(ev_tau_s[i], obs[i].taus.mean(), 3)
+            self.assertAlmostEqual(p_vals[i], obs[i].tau_spatial_psim, 3)
+
+
+class Tau_Tester(unittest.TestCase):
+    def test_Tau(self):
+        x1 = [12, 2, 1, 12, 2]
+        x2 = [1, 4, 7, 1, 0]
+        kt = rank.Tau(x1, x2)
+        self.assertAlmostEqual(kt.tau, -0.47140452079103173, 5)
+        self.assertAlmostEqual(kt.tau_p, 0.24821309157521476, 5)
+
+
+suite = unittest.TestSuite()
+test_classes = [Theta_Tester, SpatialTau_Tester, Tau_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/spatial_dynamics/tests/test_util.py b/pysal/spatial_dynamics/tests/test_util.py
new file mode 100644
index 0000000..1284d8d
--- /dev/null
+++ b/pysal/spatial_dynamics/tests/test_util.py
@@ -0,0 +1,41 @@
+import unittest
+import pysal
+from pysal.spatial_dynamics import util
+import numpy as np
+
+
+class ShuffleMatrix_Tester(unittest.TestCase):
+    def setUp(self):
+        self.X = np.arange(16)
+        self.X.shape = (4, 4)
+
+    def test_shuffle_matrix(self):
+        np.random.seed(10)
+        obs = util.shuffle_matrix(self.X, range(4)).flatten().tolist()
+        exp = [10, 8, 11, 9, 2, 0, 3, 1, 14, 12, 15, 13, 6, 4, 7, 5]
+        for i in range(16):
+            self.assertEqual(exp[i], obs[i])
+
+
+class GetLower_Tester(unittest.TestCase):
+    def setUp(self):
+        self.X = np.arange(16)
+        self.X.shape = (4, 4)
+
+    def test_get_lower(self):
+        np.random.seed(10)
+        obs = util.get_lower(self.X).flatten().tolist()
+        exp = [4, 8, 9, 12, 13, 14]
+        for i in range(6):
+            self.assertEqual(exp[i], obs[i])
+
+
+suite = unittest.TestSuite()
+test_classes = [ShuffleMatrix_Tester, GetLower_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/spatial_dynamics/util.py b/pysal/spatial_dynamics/util.py
new file mode 100644
index 0000000..91af545
--- /dev/null
+++ b/pysal/spatial_dynamics/util.py
@@ -0,0 +1,79 @@
+"""
+Utilities for the spatial dynamics module.
+"""
+import numpy as np
+
+__all__ = ['shuffle_matrix', 'get_lower']
+
+
+def shuffle_matrix(X, ids):
+    """
+    Random permutation of rows and columns of a matrix
+
+    Parameters
+    ----------
+    X   : array (k,k)
+          array to be permutated
+    ids : range (k,)
+
+    Returns
+    -------
+    X   : array (k,k)
+          with rows and columns randomly shuffled
+
+    Examples
+    --------
+    >>> X=np.arange(16)
+    >>> X.shape=(4,4)
+    >>> np.random.seed(10)
+    >>> shuffle_matrix(X,range(4))
+    array([[10,  8, 11,  9],
+           [ 2,  0,  3,  1],
+           [14, 12, 15, 13],
+           [ 6,  4,  7,  5]])
+
+    """
+    np.random.shuffle(ids)
+    return X[ids, :][:, ids]
+
+
+def get_lower(matrix):
+    """
+    Flattens the lower part of an n x n matrix into an n*(n-1)/2 x 1 vector.
+
+    Parameters
+    ----------
+    matrix          : numpy array
+                      a distance matrix (n x n)
+
+    Returns
+    -------
+    lowvec          : numpy array
+                      the lower half of the distance matrix flattened into
+                      a vector of length n*(n-1)/2
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> test = np.array([[0,1,2,3],[1,0,1,2],[2,1,0,1],[4,2,1,0]])
+    >>> lower = get_lower(test)
+    >>> lower
+    array([[1],
+           [2],
+           [1],
+           [4],
+           [2],
+           [1]])
+
+    """
+    n = matrix.shape[0]
+    lowerlist = []
+    for i in range(n):
+        for j in range(n):
+            if i > j:
+                lowerlist.append(matrix[i, j])
+    veclen = n * (n - 1) / 2
+    lowvec = np.reshape(lowerlist, (veclen, 1))
+    return lowvec
+
diff --git a/pysal/spreg/__init__.py b/pysal/spreg/__init__.py
new file mode 100644
index 0000000..0587fab
--- /dev/null
+++ b/pysal/spreg/__init__.py
@@ -0,0 +1,20 @@
+from ols import *
+from diagnostics import *
+from diagnostics_sp import *
+from user_output import *
+from twosls import *
+from twosls_sp import *
+from error_sp import *
+from error_sp_het import *
+from error_sp_hom import *
+from ols_regimes import *
+from twosls_regimes import *
+from twosls_sp_regimes import *
+from error_sp_regimes import *
+from error_sp_het_regimes import *
+from error_sp_hom_regimes import *
+from probit import *
+from ml_lag import *
+from ml_lag_regimes import *
+from ml_error import *
+from ml_error_regimes import *
diff --git a/pysal/spreg/diagnostics.py b/pysal/spreg/diagnostics.py
new file mode 100644
index 0000000..2a184d7
--- /dev/null
+++ b/pysal/spreg/diagnostics.py
@@ -0,0 +1,1424 @@
+"""
+Diagnostics for regression estimations. 
+        
+"""
+__author__ = "Luc Anselin luc.anselin at asu.edu, Nicholas Malizia nicholas.malizia at asu.edu "
+
+import pysal
+from pysal.common import *
+import scipy.sparse as SP
+from math import sqrt
+from utils import spmultiply, sphstack, spmin, spmax
+
+
+__all__ = [
+    "f_stat", "t_stat", "r2", "ar2", "se_betas", "log_likelihood", "akaike", "schwarz",
+    "condition_index", "jarque_bera", "breusch_pagan", "white", "koenker_bassett", "vif", "likratiotest"]
+
+
+def f_stat(reg):
+    """
+    Calculates the f-statistic and associated p-value of the regression.
+    (For two stage least squares see f_stat_tsls)
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    ----------
+    fs_result       : tuple
+                      includes value of F statistic and associated p-value
+
+    References
+    ----------
+    .. [1] W. Greene. 2003. Econometric Analysis. Prentice Hall, Upper
+       Saddle River. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data. 
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.    
+
+    >>> reg = OLS(y,X)
+
+    Calculate the F-statistic for the regression. 
+
+    >>> testresult = diagnostics.f_stat(reg)
+
+    Print the results tuple, including the statistic and its significance.
+
+    >>> print("%12.12f"%testresult[0],"%12.12f"%testresult[1])
+    ('28.385629224695', '0.000000009341')
+
+    """
+    k = reg.k            # (scalar) number of ind. vars (includes constant)
+    n = reg.n            # (scalar) number of observations
+    utu = reg.utu        # (scalar) residual sum of squares
+    predy = reg.predy    # (array) vector of predicted values (n x 1)
+    mean_y = reg.mean_y  # (scalar) mean of dependent observations
+    Q = utu
+    U = np.sum((predy - mean_y) ** 2)
+    fStat = (U / (k - 1)) / (Q / (n - k))
+    pValue = stats.f.sf(fStat, k - 1, n - k)
+    fs_result = (fStat, pValue)
+    return fs_result
+
+
+def t_stat(reg, z_stat=False):
+    """
+    Calculates the t-statistics (or z-statistics) and associated p-values.
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+    z_stat          : boolean
+                      If True run z-stat instead of t-stat
+
+    Returns
+    -------
+    ts_result       : list of tuples
+                      each tuple includes value of t statistic (or z
+                      statistic) and associated p-value
+
+    References
+    ----------
+
+    .. [1] W. Greene. 2003. Econometric Analysis. Prentice Hall, Upper
+       Saddle River. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data. 
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.    
+
+    >>> reg = OLS(y,X)
+
+    Calculate t-statistics for the regression coefficients. 
+
+    >>> testresult = diagnostics.t_stat(reg)
+
+    Print the tuples that contain the t-statistics and their significances.
+
+    >>> print("%12.12f"%testresult[0][0], "%12.12f"%testresult[0][1], "%12.12f"%testresult[1][0], "%12.12f"%testresult[1][1], "%12.12f"%testresult[2][0], "%12.12f"%testresult[2][1])
+    ('14.490373143689', '0.000000000000', '-4.780496191297', '0.000018289595', '-2.654408642718', '0.010874504910')
+    """
+
+    k = reg.k           # (scalar) number of ind. vars (includes constant)
+    n = reg.n           # (scalar) number of observations
+    vm = reg.vm         # (array) coefficients of variance matrix (k x k)
+    betas = reg.betas   # (array) coefficients of the regressors (1 x k)
+    variance = vm.diagonal()
+    tStat = betas[range(0, len(vm))].reshape(len(vm),) / np.sqrt(variance)
+    ts_result = []
+    for t in tStat:
+        if z_stat:
+            ts_result.append((t, stats.norm.sf(abs(t)) * 2))
+        else:
+            ts_result.append((t, stats.t.sf(abs(t), n - k) * 2))
+    return ts_result
+
+
+def r2(reg):
+    """
+    Calculates the R^2 value for the regression. 
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    ----------
+    r2_result       : float
+                      value of the coefficient of determination for the
+                      regression 
+
+    References
+    ----------
+    .. [1] W. Greene. 2003. Econometric Analysis. Prentice Hall, Upper
+       Saddle River. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector.
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression. 
+
+    >>> reg = OLS(y,X)
+
+    Calculate the R^2 value for the regression. 
+
+    >>> testresult = diagnostics.r2(reg)
+
+    Print the result. 
+
+    >>> print("%1.8f"%testresult)
+    0.55240404
+
+    """
+    y = reg.y               # (array) vector of dep observations (n x 1)
+    mean_y = reg.mean_y     # (scalar) mean of dep observations
+    utu = reg.utu           # (scalar) residual sum of squares
+    ss_tot = ((y - mean_y) ** 2).sum(0)
+    r2 = 1 - utu / ss_tot
+    r2_result = r2[0]
+    return r2_result
+
+
+def ar2(reg):
+    """
+    Calculates the adjusted R^2 value for the regression. 
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model   
+
+    Returns
+    ----------
+    ar2_result      : float
+                      value of R^2 adjusted for the number of explanatory
+                      variables.
+
+    References
+    ----------
+    .. [1] W. Greene. 2003. Econometric Analysis. Prentice Hall, Upper
+       Saddle River. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression. 
+
+    >>> reg = OLS(y,X)
+
+    Calculate the adjusted R^2 value for the regression. 
+    >>> testresult = diagnostics.ar2(reg)
+
+    Print the result. 
+
+    >>> print("%1.8f"%testresult)
+    0.53294335
+
+    """
+    k = reg.k       # (scalar) number of ind. variables (includes constant)
+    n = reg.n       # (scalar) number of observations
+    ar2_result = 1 - (1 - r2(reg)) * (n - 1) / (n - k)
+    return ar2_result
+
+
+def se_betas(reg):
+    """
+    Calculates the standard error of the regression coefficients.
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    ----------
+    se_result       : array
+                      includes standard errors of each coefficient (1 x k)
+
+    References
+    ----------
+    .. [1] W. Greene. 2003. Econometric Analysis. Prentice Hall, Upper
+       Saddle River. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data. 
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector.
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression. 
+
+    >>> reg = OLS(y,X)
+
+    Calculate the standard errors of the regression coefficients. 
+
+    >>> testresult = diagnostics.se_betas(reg)
+
+    Print the vector of standard errors. 
+
+    >>> testresult
+    array([ 4.73548613,  0.33413076,  0.10319868])
+
+    """
+    vm = reg.vm         # (array) coefficients of variance matrix (k x k)
+    variance = vm.diagonal()
+    se_result = np.sqrt(variance)
+    return se_result
+
+
+def log_likelihood(reg):
+    """
+    Calculates the log-likelihood value for the regression. 
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    -------
+    ll_result       : float
+                      value for the log-likelihood of the regression.
+
+    References
+    ----------
+    .. [1] W. Greene. 2003. Econometric Analysis. Prentice Hall, Upper
+       Saddle River. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data. 
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.
+
+    >>> reg = OLS(y,X)
+
+    Calculate the log-likelihood for the regression. 
+
+    >>> testresult = diagnostics.log_likelihood(reg)
+
+    Print the result. 
+
+    >>> testresult
+    -187.3772388121491
+
+    """
+    n = reg.n       # (scalar) number of observations
+    utu = reg.utu   # (scalar) residual sum of squares
+    ll_result = -0.5 * \
+        (n * (np.log(2 * math.pi)) + n * np.log(utu / n) + (utu / (utu / n)))
+    return ll_result
+
+
+def akaike(reg):
+    """
+    Calculates the Akaike Information Criterion.
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    -------
+    aic_result      : scalar
+                      value for Akaike Information Criterion of the
+                      regression. 
+
+    References
+    ----------
+    .. [1] H. Akaike. 1974. A new look at the statistical identification
+       model. IEEE Transactions on Automatic Control, 19(6):716-723.
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.
+
+    >>> reg = OLS(y,X)
+
+    Calculate the Akaike Information Criterion (AIC). 
+
+    >>> testresult = diagnostics.akaike(reg)
+
+    Print the result. 
+
+    >>> testresult
+    380.7544776242982
+
+    """
+    k = reg.k       # (scalar) number of explanatory vars (including constant)
+    try:   # ML estimation, logll already exists
+        # spatial coefficient included in k
+        aic_result = 2.0 * k - 2.0 * reg.logll
+    except AttributeError:           # OLS case
+        n = reg.n       # (scalar) number of observations
+        utu = reg.utu   # (scalar) residual sum of squares
+        aic_result = 2 * k + n * (np.log((2 * np.pi * utu) / n) + 1)
+    return aic_result
+
+
+def schwarz(reg):
+    """
+    Calculates the Schwarz Information Criterion.
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    -------
+    bic_result      : scalar
+                      value for Schwarz (Bayesian) Information Criterion of
+                      the regression. 
+
+    References
+    ----------
+    .. [1] G. Schwarz. 1978. Estimating the dimension of a model. The
+       Annals of Statistics, pages 461-464. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.
+
+    >>> reg = OLS(y,X)
+
+    Calculate the Schwarz Information Criterion. 
+
+    >>> testresult = diagnostics.schwarz(reg)
+
+    Print the results. 
+
+    >>> testresult
+    386.42993851863008
+
+    """
+    n = reg.n      # (scalar) number of observations
+    k = reg.k      # (scalar) number of ind. variables (including constant)
+    try:  # ML case logll already computed
+        # spatial coeff included in k
+        sc_result = k * np.log(n) - 2.0 * reg.logll
+    except AttributeError:          # OLS case
+        utu = reg.utu  # (scalar) residual sum of squares
+        sc_result = k * np.log(n) + n * (np.log((2 * np.pi * utu) / n) + 1)
+    return sc_result
+
+
+def condition_index(reg):
+    """
+    Calculates the multicollinearity condition index according to Belsey,
+    Kuh and Welsh (1980).
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    -------
+    ci_result       : float
+                      scalar value for the multicollinearity condition
+                      index. 
+
+    References
+    ----------
+    .. [1] D. Belsley, E. Kuh, and R. Welsch. 1980. Regression
+       Diagnostics. New York: Wiley.
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.
+
+    >>> reg = OLS(y,X)
+
+    Calculate the condition index to check for multicollinearity.
+
+    >>> testresult = diagnostics.condition_index(reg)
+
+    Print the result.
+
+    >>> print("%1.3f"%testresult)
+    6.542
+
+    """
+    if hasattr(reg, 'xtx'):
+        xtx = reg.xtx   # (array) k x k projection matrix (includes constant)
+    elif hasattr(reg, 'hth'):
+        xtx = reg.hth   # (array) k x k projection matrix (includes constant)
+    diag = np.diagonal(xtx)
+    scale = xtx / diag
+    eigval = np.linalg.eigvals(scale)
+    max_eigval = max(eigval)
+    min_eigval = min(eigval)
+    ci_result = sqrt(max_eigval / min_eigval)
+    return ci_result
+
+
+def jarque_bera(reg):
+    """
+    Jarque-Bera test for normality in the residuals. 
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    ------- 
+    jb_result       : dictionary
+                      contains the statistic (jb) for the Jarque-Bera test
+                      and the associated p-value (p-value)
+    df              : integer
+                      degrees of freedom for the test (always 2)
+    jb              : float
+                      value of the test statistic
+    pvalue          : float
+                      p-value associated with the statistic (chi^2
+                      distributed with 2 df)
+
+    References
+    ----------
+    .. [1] C. Jarque and A. Bera. 1980. Efficient tests for normality,
+       homoscedasticity and serial independence of regression residuals.
+       Economics Letters, 6(3):255-259.
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"), "r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.
+
+    >>> reg = OLS(y,X)
+
+    Calculate the Jarque-Bera test for normality of residuals.
+
+    >>> testresult = diagnostics.jarque_bera(reg)
+
+    Print the degrees of freedom for the test.
+
+    >>> testresult['df']
+    2
+
+    Print the test statistic. 
+
+    >>> print("%1.3f"%testresult['jb'])
+    1.836
+
+    Print the associated p-value.
+
+    >>> print("%1.4f"%testresult['pvalue'])
+    0.3994
+
+    """
+    n = reg.n               # (scalar) number of observations
+    u = reg.u               # (array) residuals from regression
+    u2 = u ** 2
+    u3 = u ** 3
+    u4 = u ** 4
+    mu2 = np.mean(u2)
+    mu3 = np.mean(u3)
+    mu4 = np.mean(u4)
+    S = mu3 / (mu2 ** (1.5))    # skewness measure
+    K = (mu4 / (mu2 ** 2))      # kurtosis measure
+    jb = n * (((S ** 2) / 6) + ((K - 3) ** 2) / 24)
+    pvalue = stats.chisqprob(jb, 2)
+    jb_result = {"df": 2, "jb": jb, 'pvalue': pvalue}
+    return jb_result
+
+
+def breusch_pagan(reg, z=None):
+    """
+    Calculates the Breusch-Pagan test statistic to check for
+    heteroscedasticity. 
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+    z               : array
+                      optional input for specifying an alternative set of
+                      variables (Z) to explain the observed variance. By
+                      default this is a matrix of the squared explanatory
+                      variables (X**2) with a constant added to the first
+                      column if not already present. In the default case,
+                      the explanatory variables are squared to eliminate
+                      negative values. 
+
+    Returns
+    -------
+    bp_result       : dictionary
+                      contains the statistic (bp) for the test and the
+                      associated p-value (p-value)
+    bp              : float
+                      scalar value for the Breusch-Pagan test statistic
+    df              : integer
+                      degrees of freedom associated with the test (k)
+    pvalue          : float
+                      p-value associated with the statistic (chi^2
+                      distributed with k df)
+
+    Notes
+    -----
+    x attribute in the reg object must have a constant term included. This is
+    standard for spreg.OLS so no testing done to confirm constant.
+
+    References
+    ----------
+
+    .. [1] T. Breusch and A. Pagan. 1979. A simple test for
+       heteroscedasticity and random coefficient variation. Econometrica:
+       Journal of the Econometric Society, 47(5):1287-1294.
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"), "r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.
+
+    >>> reg = OLS(y,X)
+
+    Calculate the Breusch-Pagan test for heteroscedasticity.
+
+    >>> testresult = diagnostics.breusch_pagan(reg)
+
+    Print the degrees of freedom for the test.
+
+    >>> testresult['df']
+    2
+
+    Print the test statistic.
+
+    >>> print("%1.3f"%testresult['bp'])
+    7.900
+
+    Print the associated p-value. 
+
+    >>> print("%1.4f"%testresult['pvalue'])
+    0.0193
+
+    """
+    e2 = reg.u ** 2
+    e = reg.u
+    n = reg.n
+    k = reg.k
+    ete = reg.utu
+
+    den = ete / n
+    g = e2 / den - 1.0
+
+    if z == None:
+        x = reg.x
+        #constant = constant_check(x)
+        # if constant == False:
+        #    z = np.hstack((np.ones((n,1)),x))**2
+        # else:
+        #    z = x**2
+        z = spmultiply(x, x)
+    else:
+        #constant = constant_check(z)
+        # if constant == False:
+        #    z = np.hstack((np.ones((n,1)),z))
+        pass
+
+    n, p = z.shape
+
+    # Check to identify any duplicate columns in Z
+    omitcolumn = []
+    for i in range(p):
+        current = z[:, i]
+        for j in range(p):
+            check = z[:, j]
+            if i < j:
+                test = abs(current - check).sum()
+                if test == 0:
+                    omitcolumn.append(j)
+
+    uniqueomit = set(omitcolumn)
+    omitcolumn = list(uniqueomit)
+
+    # Now the identified columns must be removed (done in reverse to
+    # prevent renumbering)
+    omitcolumn.sort()
+    omitcolumn.reverse()
+    for c in omitcolumn:
+        z = np.delete(z, c, 1)
+    n, p = z.shape
+
+    df = p - 1
+
+    # Now that the variables are prepared, we calculate the statistic
+    zt = np.transpose(z)
+    gt = np.transpose(g)
+    gtz = np.dot(gt, z)
+    ztg = np.dot(zt, g)
+    ztz = np.dot(zt, z)
+    ztzi = la.inv(ztz)
+
+    part1 = np.dot(gtz, ztzi)
+    part2 = np.dot(part1, ztg)
+    bp_array = 0.5 * part2
+    bp = bp_array[0, 0]
+
+    pvalue = stats.chisqprob(bp, df)
+    bp_result = {'df': df, 'bp': bp, 'pvalue': pvalue}
+    return bp_result
+
+
+def white(reg):
+    """
+    Calculates the White test to check for heteroscedasticity.
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    -------
+    white_result    : dictionary
+                      contains the statistic (white), degrees of freedom
+                      (df) and the associated p-value (pvalue) for the
+                      White test. 
+    white           : float
+                      scalar value for the White test statistic.
+    df              : integer
+                      degrees of freedom associated with the test
+    pvalue          : float
+                      p-value associated with the statistic (chi^2
+                      distributed with k df)
+
+    Notes
+    -----
+    x attribute in the reg object must have a constant term included. This is
+    standard for spreg.OLS so no testing done to confirm constant.
+
+    References
+    ----------
+    .. [1] H. White. 1980. A heteroscedasticity-consistent covariance
+       matrix estimator and a direct test for heteroskdasticity.
+       Econometrica. 48(4) 817-838. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.
+
+    >>> reg = OLS(y,X)
+
+    Calculate the White test for heteroscedasticity.
+
+    >>> testresult = diagnostics.white(reg)
+
+    Print the degrees of freedom for the test.
+
+    >>> print testresult['df']
+    5
+
+    Print the test statistic.
+
+    >>> print("%1.3f"%testresult['wh'])
+    19.946
+
+    Print the associated p-value. 
+
+    >>> print("%1.4f"%testresult['pvalue'])
+    0.0013
+
+    """
+    e = reg.u ** 2
+    k = reg.k
+    n = reg.n
+    y = reg.y
+    X = reg.x
+    #constant = constant_check(X)
+
+    # Check for constant, if none add one, see Greene 2003, pg. 222
+    # if constant == False:
+    #    X = np.hstack((np.ones((n,1)),X))
+
+    # Check for multicollinearity in the X matrix
+    ci = condition_index(reg)
+    if ci > 30:
+        white_result = "Not computed due to multicollinearity."
+        return white_result
+
+    # Compute cross-products and squares of the regression variables
+    if type(X).__name__ == 'ndarray':
+        A = np.zeros((n, (k * (k + 1)) / 2.))
+    elif type(X).__name__ == 'csc_matrix' or type(X).__name__ == 'csr_matrix':
+        # this is probably inefficient
+        A = SP.lil_matrix((n, (k * (k + 1)) / 2.))
+    else:
+        raise Exception, "unknown X type, %s" % type(X).__name__
+    counter = 0
+    for i in range(k):
+        for j in range(i, k):
+            v = spmultiply(X[:, i], X[:, j], False)
+            A[:, counter] = v
+            counter += 1
+
+    # Append the original variables
+    A = sphstack(X, A)   # note: this also converts a LIL to CSR
+    n, k = A.shape
+
+    # Check to identify any duplicate or constant columns in A
+    omitcolumn = []
+    for i in range(k):
+        current = A[:, i]
+        # remove all constant terms (will add a constant back later)
+        if spmax(current) == spmin(current):
+            omitcolumn.append(i)
+            pass
+        # do not allow duplicates
+        for j in range(k):
+            check = A[:, j]
+            if i < j:
+                test = abs(current - check).sum()
+                if test == 0:
+                    omitcolumn.append(j)
+    uniqueomit = set(omitcolumn)
+    omitcolumn = list(uniqueomit)
+
+    # Now the identified columns must be removed
+    if type(A).__name__ == 'ndarray':
+        A = np.delete(A, omitcolumn, 1)
+    elif type(A).__name__ == 'csc_matrix' or type(A).__name__ == 'csr_matrix':
+        # this is probably inefficient
+        keepcolumn = range(k)
+        for i in omitcolumn:
+            keepcolumn.remove(i)
+        A = A[:, keepcolumn]
+    else:
+        raise Exception, "unknown A type, %s" % type(X).__name__
+    A = sphstack(np.ones((A.shape[0], 1)), A)   # add a constant back in
+    n, k = A.shape
+
+    # Conduct the auxiliary regression and calculate the statistic
+    import ols as OLS
+    aux_reg = OLS.BaseOLS(e, A)
+    aux_r2 = r2(aux_reg)
+    wh = aux_r2 * n
+    df = k - 1
+    pvalue = stats.chisqprob(wh, df)
+    white_result = {'df': df, 'wh': wh, 'pvalue': pvalue}
+    return white_result
+
+
+def koenker_bassett(reg, z=None):
+    """
+    Calculates the Koenker-Bassett test statistic to check for
+    heteroscedasticity. 
+
+    Parameters
+    ----------
+    reg             : regression output
+                      output from an instance of a regression class
+    z               : array
+                      optional input for specifying an alternative set of
+                      variables (Z) to explain the observed variance. By
+                      default this is a matrix of the squared explanatory
+                      variables (X**2) with a constant added to the first
+                      column if not already present. In the default case,
+                      the explanatory variables are squared to eliminate
+                      negative values. 
+
+    Returns
+    -------
+    kb_result       : dictionary
+                      contains the statistic (kb), degrees of freedom (df)
+                      and the associated p-value (pvalue) for the test. 
+    kb              : float
+                      scalar value for the Koenker-Bassett test statistic.
+    df              : integer
+                      degrees of freedom associated with the test
+    pvalue          : float
+                      p-value associated with the statistic (chi^2
+                      distributed)
+
+    Notes
+    -----
+    x attribute in the reg object must have a constant term included. This is
+    standard for spreg.OLS so no testing done to confirm constant.
+
+    References
+    ----------
+    .. [1] R. Koenker and G. Bassett. 1982. Robust tests for
+       heteroscedasticity based on regression quantiles. Econometrica,
+       50(1):43-61. 
+    .. [2] W. Greene. 2003. Econometric Analysis. Prentice Hall, Upper
+       Saddle River. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.
+
+    >>> reg = OLS(y,X)
+
+    Calculate the Koenker-Bassett test for heteroscedasticity.
+
+    >>> testresult = diagnostics.koenker_bassett(reg)
+
+    Print the degrees of freedom for the test.
+
+    >>> testresult['df']
+    2
+
+    Print the test statistic.
+
+    >>> print("%1.3f"%testresult['kb'])
+    5.694
+
+    Print the associated p-value. 
+
+    >>> print("%1.4f"%testresult['pvalue'])
+    0.0580
+
+    """
+    # The notation here matches that of Greene (2003).
+    u = reg.u ** 2
+    e = reg.u
+    n = reg.n
+    k = reg.k
+    x = reg.x
+    ete = reg.utu
+    #constant = constant_check(x)
+
+    ubar = ete / n
+    ubari = ubar * np.ones((n, 1))
+    g = u - ubari
+    v = (1.0 / n) * np.sum((u - ubar) ** 2)
+
+    if z == None:
+        x = reg.x
+        #constant = constant_check(x)
+        # if constant == False:
+        #    z = np.hstack((np.ones((n,1)),x))**2
+        # else:
+        #    z = x**2
+        z = spmultiply(x, x)
+    else:
+        #constant = constant_check(z)
+        # if constant == False:
+        #    z = np.hstack((np.ones((n,1)),z))
+        pass
+
+    n, p = z.shape
+
+    # Check to identify any duplicate columns in Z
+    omitcolumn = []
+    for i in range(p):
+        current = z[:, i]
+        for j in range(p):
+            check = z[:, j]
+            if i < j:
+                test = abs(current - check).sum()
+                if test == 0:
+                    omitcolumn.append(j)
+
+    uniqueomit = set(omitcolumn)
+    omitcolumn = list(uniqueomit)
+
+    # Now the identified columns must be removed (done in reverse to
+    # prevent renumbering)
+    omitcolumn.sort()
+    omitcolumn.reverse()
+    for c in omitcolumn:
+        z = np.delete(z, c, 1)
+    n, p = z.shape
+
+    df = p - 1
+
+    # Conduct the auxiliary regression.
+    zt = np.transpose(z)
+    gt = np.transpose(g)
+    gtz = np.dot(gt, z)
+    ztg = np.dot(zt, g)
+    ztz = np.dot(zt, z)
+    ztzi = la.inv(ztz)
+
+    part1 = np.dot(gtz, ztzi)
+    part2 = np.dot(part1, ztg)
+    kb_array = (1.0 / v) * part2
+    kb = kb_array[0, 0]
+
+    pvalue = stats.chisqprob(kb, df)
+    kb_result = {'kb': kb, 'df': df, 'pvalue': pvalue}
+    return kb_result
+
+
+def vif(reg):
+    """
+    Calculates the variance inflation factor for each independent variable.
+    For the ease of indexing the results, the constant is currently
+    included. This should be omitted when reporting the results to the
+    output text.
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+
+    Returns
+    -------    
+    vif_result      : list of tuples
+                      each tuple includes the vif and the tolerance, the
+                      order of the variables corresponds to their order in
+                      the reg.x matrix
+
+    References
+    ----------
+    .. [1] W. Greene. 2003. Econometric Analysis. Prentice Hall, Upper
+       Saddle River. 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+
+    Read the DBF associated with the Columbus data.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+
+    Create the dependent variable vector. 
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Create the matrix of independent variables. 
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression.
+
+    >>> reg = OLS(y,X)
+
+    Calculate the variance inflation factor (VIF). 
+    >>> testresult = diagnostics.vif(reg)
+
+    Select the tuple for the income variable. 
+
+    >>> incvif = testresult[1]
+
+    Print the VIF for income. 
+
+    >>> print("%12.12f"%incvif[0])
+    1.333117497189
+
+    Print the tolerance for income. 
+
+    >>> print("%12.12f"%incvif[1])
+    0.750121427487
+
+    Repeat for the home value variable. 
+
+    >>> hovalvif = testresult[2]
+    >>> print("%12.12f"%hovalvif[0])
+    1.333117497189
+    >>> print("%12.12f"%hovalvif[1])
+    0.750121427487
+
+    """
+    X = reg.x
+    n, k = X.shape
+    vif_result = []
+
+    for j in range(k):
+        Z = X.copy()
+        Z = np.delete(Z, j, 1)
+        y = X[:, j]
+        import ols as OLS
+        aux = OLS.BaseOLS(y, Z)
+        mean_y = aux.mean_y
+        utu = aux.utu
+        ss_tot = sum((y - mean_y) ** 2)
+        if ss_tot == 0:
+            resj = pysal.MISSINGVALUE
+        else:
+            r2aux = 1 - utu / ss_tot
+            tolj = 1 - r2aux
+            vifj = 1 / tolj
+            resj = (vifj, tolj)
+        vif_result.append(resj)
+    return vif_result
+
+
+def constant_check(array):
+    """
+    Checks to see numpy array includes a constant.
+
+    Parameters
+    ----------
+    array           : array
+                      an array of variables to be inspected 
+
+    Returns
+    -------
+    constant        : boolean
+                      true signifies the presence of a constant
+
+    Example
+    -------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> import diagnostics
+    >>> from ols import OLS
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+    >>> reg = OLS(y,X)
+    >>> diagnostics.constant_check(reg.x)
+    True
+
+    """
+
+    n, k = array.shape
+    constant = False
+    for j in range(k):
+        variable = array[:, j]
+        varmin = variable.min()
+        varmax = variable.max()
+        if varmin == varmax:
+            constant = True
+            break
+    return constant
+
+
+def likratiotest(reg0, reg1):
+    """
+    Likelihood ratio test statistic
+
+    Parameters
+    ----------
+
+    reg0         : regression object for constrained model (H0)
+    reg1         : regression object for unconstrained model (H1)
+
+    Returns
+    -------
+
+    likratio     : dictionary
+                   contains the statistic (likr), the degrees of
+                   freedom (df) and the p-value (pvalue)
+    likr         : float
+                   likelihood ratio statistic
+    df           : integer
+                   degrees of freedom
+    p-value      : float
+                   p-value
+
+    References
+    ----------
+    .. [1] W. Greene. 2012. Econometric Analysis. Prentice Hall, Upper
+       Saddle River.
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal as ps
+    >>> import scipy.stats as stats
+    >>> import pysal.spreg.ml_lag as lag
+
+    Use the baltim sample data set
+
+    >>> db =  ps.open(ps.examples.get_path("baltim.dbf"),'r')
+    >>> y_name = "PRICE"
+    >>> y = np.array(db.by_col(y_name)).T
+    >>> y.shape = (len(y),1)
+    >>> x_names = ["NROOM","NBATH","PATIO","FIREPL","AC","GAR","AGE","LOTSZ","SQFT"]
+    >>> x = np.array([db.by_col(var) for var in x_names]).T
+    >>> ww = ps.open(ps.examples.get_path("baltim_q.gal"))
+    >>> w = ww.read()
+    >>> ww.close()
+    >>> w.transform = 'r'
+
+    OLS regression
+
+    >>> ols1 = ps.spreg.OLS(y,x)
+
+    ML Lag regression
+
+    >>> mllag1 = lag.ML_Lag(y,x,w)
+
+    >>> lr = likratiotest(ols1,mllag1)
+
+    >>> print "Likelihood Ratio Test: {0:.4f}       df: {1}        p-value: {2:.4f}".format(lr["likr"],lr["df"],lr["p-value"])
+    Likelihood Ratio Test: 44.5721       df: 1        p-value: 0.0000
+
+    """
+
+    likratio = {}
+
+    try:
+        likr = 2.0 * (reg1.logll - reg0.logll)
+    except AttributeError:
+        raise Exception, "Missing or improper log-likelihoods in regression objects"
+    if likr < 0.0:  # always enforces positive likelihood ratio
+        likr = -likr
+    pvalue = stats.chisqprob(likr, 1)
+    likratio = {"likr": likr, "df": 1, "p-value": pvalue}
+    return likratio
+
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/diagnostics_sp.py b/pysal/spreg/diagnostics_sp.py
new file mode 100644
index 0000000..8d40bc1
--- /dev/null
+++ b/pysal/spreg/diagnostics_sp.py
@@ -0,0 +1,826 @@
+"""
+Spatial diagnostics module
+"""
+__author__ = "Luc Anselin luc.anselin at asu.edu, Daniel Arribas-Bel darribas at asu.edu"
+
+from utils import spdot
+from scipy.stats.stats import chisqprob
+from scipy.stats import norm
+import numpy as np
+import numpy.linalg as la
+
+__all__ = ['LMtests', 'MoranRes', 'AKtest']
+
+
+class LMtests:
+
+    """
+    Lagrange Multiplier tests. Implemented as presented in Anselin et al.
+    (1996) [1]_
+    ...
+
+    Attributes
+    ----------
+
+    ols         : OLS
+                  OLS regression object
+    w           : W
+                  Spatial weights instance
+    tests       : list
+                  Lists of strings with the tests desired to be performed.
+                  Values may be:
+
+                   * 'all': runs all the options (default)
+                   * 'lme': LM error test
+                   * 'rlme': Robust LM error test
+                   * 'lml' : LM lag test
+                   * 'rlml': Robust LM lag test
+
+    Parameters
+    ----------
+
+    lme         : tuple
+                  (Only if 'lme' or 'all' was in tests). Pair of statistic and
+                  p-value for the LM error test.
+    lml         : tuple
+                  (Only if 'lml' or 'all' was in tests). Pair of statistic and
+                  p-value for the LM lag test.
+    rlme        : tuple
+                  (Only if 'rlme' or 'all' was in tests). Pair of statistic
+                  and p-value for the Robust LM error test.
+    rlml        : tuple
+                  (Only if 'rlml' or 'all' was in tests). Pair of statistic
+                  and p-value for the Robust LM lag test.
+    sarma       : tuple
+                  (Only if 'rlml' or 'all' was in tests). Pair of statistic
+                  and p-value for the SARMA test.
+
+    References
+    ----------
+    .. [1] Anselin, L., Bera, A. K., Florax, R., Yoon, M. J. (1996) "Simple
+       diagnostic tests for spatial dependence". Regional Science and Urban
+       Economics, 26, 77-104.
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> from ols import OLS
+
+    Open the csv file to access the data for analysis
+
+    >>> csv = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Pull out from the csv the files we need ('HOVAL' as dependent as well as
+    'INC' and 'CRIME' as independent) and directly transform them into nx1 and
+    nx2 arrays, respectively
+
+    >>> y = np.array([csv.by_col('HOVAL')]).T
+    >>> x = np.array([csv.by_col('INC'), csv.by_col('CRIME')]).T
+
+    Create the weights object from existing .gal file
+
+    >>> w = pysal.open(pysal.examples.get_path('columbus.gal'), 'r').read()
+
+    Row-standardize the weight object (not required although desirable in some
+    cases)
+
+    >>> w.transform='r'
+
+    Run an OLS regression
+
+    >>> ols = OLS(y, x)
+
+    Run all the LM tests in the residuals. These diagnostics test for the
+    presence of remaining spatial autocorrelation in the residuals of an OLS
+    model and give indication about the type of spatial model. There are five
+    types: presence of a spatial lag model (simple and robust version),
+    presence of a spatial error model (simple and robust version) and joint presence
+    of both a spatial lag as well as a spatial error model.
+
+    >>> lms = pysal.spreg.diagnostics_sp.LMtests(ols, w)
+
+    LM error test:
+
+    >>> print round(lms.lme[0],4), round(lms.lme[1],4)
+    3.0971 0.0784
+
+    LM lag test:
+
+    >>> print round(lms.lml[0],4), round(lms.lml[1],4)
+    0.9816 0.3218
+
+    Robust LM error test:
+
+    >>> print round(lms.rlme[0],4), round(lms.rlme[1],4)
+    3.2092 0.0732
+
+    Robust LM lag test:
+
+    >>> print round(lms.rlml[0],4), round(lms.rlml[1],4)
+    1.0936 0.2957
+
+    LM SARMA test:
+
+    >>> print round(lms.sarma[0],4), round(lms.sarma[1],4)
+    4.1907 0.123
+    """
+
+    def __init__(self, ols, w, tests=['all']):
+        cache = spDcache(ols, w)
+        if tests == ['all']:
+            tests = ['lme', 'lml', 'rlme', 'rlml', 'sarma']
+        if 'lme' in tests:
+            self.lme = lmErr(ols, w, cache)
+        if 'lml' in tests:
+            self.lml = lmLag(ols, w, cache)
+        if 'rlme' in tests:
+            self.rlme = rlmErr(ols, w, cache)
+        if 'rlml' in tests:
+            self.rlml = rlmLag(ols, w, cache)
+        if 'sarma' in tests:
+            self.sarma = lmSarma(ols, w, cache)
+
+
+class MoranRes:
+
+    """
+    Moran's I for spatial autocorrelation in residuals from OLS regression
+    ...
+
+    Parameters
+    ----------
+
+    ols         : OLS
+                  OLS regression object
+    w           : W
+                  Spatial weights instance
+    z           : boolean
+                  If set to True computes attributes eI, vI and zI. Due to computational burden of vI, defaults to False.
+
+    Attributes
+    ----------
+    I           : float
+                  Moran's I statistic
+    eI          : float
+                  Moran's I expectation
+    vI          : float
+                  Moran's I variance
+    zI          : float
+                  Moran's I standardized value
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> from ols import OLS
+
+    Open the csv file to access the data for analysis
+
+    >>> csv = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Pull out from the csv the files we need ('HOVAL' as dependent as well as
+    'INC' and 'CRIME' as independent) and directly transform them into nx1 and
+    nx2 arrays, respectively
+
+    >>> y = np.array([csv.by_col('HOVAL')]).T
+    >>> x = np.array([csv.by_col('INC'), csv.by_col('CRIME')]).T
+
+    Create the weights object from existing .gal file
+
+    >>> w = pysal.open(pysal.examples.get_path('columbus.gal'), 'r').read()
+
+    Row-standardize the weight object (not required although desirable in some
+    cases)
+
+    >>> w.transform='r'
+
+    Run an OLS regression
+
+    >>> ols = OLS(y, x)
+
+    Run Moran's I test for residual spatial autocorrelation in an OLS model.
+    This computes the traditional statistic applying a correction in the
+    expectation and variance to account for the fact it comes from residuals
+    instead of an independent variable
+
+    >>> m = pysal.spreg.diagnostics_sp.MoranRes(ols, w, z=True)
+
+    Value of the Moran's I statistic:
+
+    >>> print round(m.I,4)
+    0.1713
+
+    Value of the Moran's I expectation:
+
+    >>> print round(m.eI,4)
+    -0.0345
+
+    Value of the Moran's I variance:
+
+    >>> print round(m.vI,4)
+    0.0081
+
+    Value of the Moran's I standardized value. This is
+    distributed as a standard Normal(0, 1)
+
+    >>> print round(m.zI,4)
+    2.2827
+
+    P-value of the standardized Moran's I value (z):
+
+    >>> print round(m.p_norm,4)
+    0.0224
+    """
+
+    def __init__(self, ols, w, z=False):
+        cache = spDcache(ols, w)
+        self.I = get_mI(ols, w, cache)
+        if z:
+            self.eI = get_eI(ols, w, cache)
+            self.vI = get_vI(ols, w, self.eI, cache)
+            self.zI, self.p_norm = get_zI(self.I, self.eI, self.vI)
+
+
+class AKtest:
+
+    """
+    Moran's I test of spatial autocorrelation for IV estimation.
+    Implemented following the original reference Anselin and Kelejian
+    (1997) [AK97]_
+    ...
+
+    Parameters
+    ----------
+
+    iv          : TSLS
+                  Regression object from TSLS class
+    w           : W
+                  Spatial weights instance
+    case        : string
+                  Flag for special cases (default to 'nosp'):
+
+                   * 'nosp': Only NO spatial end. reg.
+                   * 'gen': General case (spatial lag + end. reg.)
+
+    Attributes
+    ----------
+
+    mi          : float
+                  Moran's I statistic for IV residuals
+    ak          : float
+                  Square of corrected Moran's I for residuals::
+
+                  .. math::
+
+                        ak = \dfrac{N \times I^*}{\phi^2}
+
+                  Note: if case='nosp' then it simplifies to the LMerror
+    p           : float
+                  P-value of the test
+
+    References
+    ----------
+
+    .. [AK97] Anselin, L., Kelejian, H. (1997) "Testing for spatial error
+        autocorrelation in the presence of endogenous regressors".
+        Interregional Regional Science Review, 20, 1.
+    .. [2] Kelejian, H.H., Prucha, I.R. and Yuzefovich, Y. (2004)
+        "Instrumental variable estimation of a spatial autorgressive model with
+        autoregressive disturbances: large and small sample results".
+        Advances in Econometrics, 18, 163-198.
+
+    Examples
+    --------
+
+    We first need to import the needed modules. Numpy is needed to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis. The TSLS is required to run the model on
+    which we will perform the tests.
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> from twosls import TSLS
+    >>> from twosls_sp import GM_Lag
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+
+    Before being able to apply the diagnostics, we have to run a model and,
+    for that, we need the input variables. Extract the CRIME column (crime
+    rates) from the DBF file and make it the dependent variable for the
+    regression. Note that PySAL requires this to be an numpy array of shape
+    (n, 1) as opposed to the also common shape of (n, ) that other packages
+    accept.
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this model adds a vector of ones to the
+    independent variables passed in, but this can be overridden by passing
+    constant=False.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+
+    In this case, we consider HOVAL (home value) as an endogenous regressor,
+    so we acknowledge that by reading it in a different category.
+
+    >>> yd = []
+    >>> yd.append(db.by_col("HOVAL"))
+    >>> yd = np.array(yd).T
+
+    In order to properly account for the endogeneity, we have to pass in the
+    instruments. Let us consider DISCBD (distance to the CBD) is a good one:
+
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+
+    Now we are good to run the model. It is an easy one line task.
+
+    >>> reg = TSLS(y, X, yd, q=q)
+
+    Now we are concerned with whether our non-spatial model presents spatial
+    autocorrelation in the residuals. To assess this possibility, we can run
+    the Anselin-Kelejian test, which is a version of the classical LM error
+    test adapted for the case of residuals from an instrumental variables (IV)
+    regression. First we need an extra object, the weights matrix, which
+    includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are good to run the test. It is a very simple task:
+
+    >>> ak = AKtest(reg, w)
+
+    And explore the information obtained:
+
+    >>> print('AK test: %f\tP-value: %f'%(ak.ak, ak.p))
+    AK test: 4.642895      P-value: 0.031182
+
+    The test also accomodates the case when the residuals come from an IV
+    regression that includes a spatial lag of the dependent variable. The only
+    requirement needed is to modify the ``case`` parameter when we call
+    ``AKtest``. First, let us run a spatial lag model:
+
+    >>> reg_lag = GM_Lag(y, X, yd, q=q, w=w)
+
+    And now we can run the AK test and obtain similar information as in the
+    non-spatial model.
+
+    >>> ak_sp = AKtest(reg, w, case='gen')
+    >>> print('AK test: %f\tP-value: %f'%(ak_sp.ak, ak_sp.p))
+    AK test: 1.157593      P-value: 0.281965
+
+    """
+
+    def __init__(self, iv, w, case='nosp'):
+        if case == 'gen':
+            cache = spDcache(iv, w)
+            self.mi, self.ak, self.p = akTest(iv, w, cache)
+        elif case == 'nosp':
+            cache = spDcache(iv, w)
+            self.mi = get_mI(iv, w, cache)
+            self.ak, self.p = lmErr(iv, w, cache)
+        else:
+            print """\n
+            Fix the optional argument 'case' to match the requirements:
+                * 'gen': General case (spatial lag + end. reg.)
+                * 'nosp': No spatial end. reg.
+            \n"""
+
+
+class spDcache:
+
+    """
+    Helper class to compute reusable pieces in the spatial diagnostics module
+    ...
+
+    Parameters
+    ----------
+
+    reg         : OLS_dev, TSLS_dev, STSLS_dev
+                  Instance from a regression class
+    w           : W
+                  Spatial weights instance
+
+    Attributes
+    ----------
+
+    j           : array
+                  1x1 array with the result from:
+
+                  .. math::
+
+                        J = \dfrac{1}{[(WX\beta)' M (WX\beta) + T \sigma^2]}
+
+    wu          : array
+                  nx1 array with spatial lag of the residuals
+
+    utwuDs      : array
+                  1x1 array with the result from:
+
+                  .. math::
+
+                        utwuDs = \dfrac{u' W u}{\tilde{\sigma^2}}
+
+    utwyDs      : array
+                  1x1 array with the result from:
+
+                  .. math::
+
+                        utwyDs = \dfrac{u' W y}{\tilde{\sigma^2}}
+
+
+    t           : array
+                  1x1 array with the result from :
+
+                  .. math::
+
+                        T = tr[(W' + W) W]
+
+    trA         : float
+                  Trace of A as in Cliff & Ord (1981)
+
+    """
+
+    def __init__(self, reg, w):
+        self.reg = reg
+        self.w = w
+        self._cache = {}
+
+    @property
+    def j(self):
+        if 'j' not in self._cache:
+            wxb = self.w.sparse * self.reg.predy
+            wxb2 = np.dot(wxb.T, wxb)
+            xwxb = spdot(self.reg.x.T, wxb)
+            num1 = wxb2 - np.dot(xwxb.T, np.dot(self.reg.xtxi, xwxb))
+            num = num1 + (self.t * self.reg.sig2n)
+            den = self.reg.n * self.reg.sig2n
+            self._cache['j'] = num / den
+        return self._cache['j']
+
+    @property
+    def wu(self):
+        if 'wu' not in self._cache:
+            self._cache['wu'] = self.w.sparse * self.reg.u
+        return self._cache['wu']
+
+    @property
+    def utwuDs(self):
+        if 'utwuDs' not in self._cache:
+            res = np.dot(self.reg.u.T, self.wu) / self.reg.sig2n
+            self._cache['utwuDs'] = res
+        return self._cache['utwuDs']
+
+    @property
+    def utwyDs(self):
+        if 'utwyDs' not in self._cache:
+            res = np.dot(self.reg.u.T, self.w.sparse * self.reg.y)
+            self._cache['utwyDs'] = res / self.reg.sig2n
+        return self._cache['utwyDs']
+
+    @property
+    def t(self):
+        if 't' not in self._cache:
+            prod = (self.w.sparse.T + self.w.sparse) * self.w.sparse
+            self._cache['t'] = np.sum(prod.diagonal())
+        return self._cache['t']
+
+    @property
+    def trA(self):
+        if 'trA' not in self._cache:
+            xtwx = spdot(self.reg.x.T, spdot(self.w.sparse, self.reg.x))
+            mw = np.dot(self.reg.xtxi, xtwx)
+            self._cache['trA'] = np.sum(mw.diagonal())
+        return self._cache['trA']
+
+    @property
+    def AB(self):
+        """
+        Computes A and B matrices as in Cliff-Ord 1981, p. 203
+        """
+        if 'AB' not in self._cache:
+            U = (self.w.sparse + self.w.sparse.T) / 2.
+            z = spdot(U, self.reg.x, array_out=False)
+            c1 = spdot(self.reg.x.T, z, array_out=False)
+            c2 = spdot(z.T, z, array_out=False)
+            G = self.reg.xtxi
+            A = spdot(G, c1)
+            B = spdot(G, c2)
+            self._cache['AB'] = [A, B]
+        return self._cache['AB']
+
+
+def lmErr(reg, w, spDcache):
+    """
+    LM error test. Implemented as presented in eq. (9) of Anselin et al.
+    (1996) [1]_
+    ...
+
+    Attributes
+    ----------
+
+    reg         : OLS_dev, TSLS_dev, STSLS_dev
+                  Instance from a regression class
+    w           : W
+                  Spatial weights instance
+    spDcache    : spDcache
+                  Instance of spDcache class
+
+    Returns
+    -------
+
+    lme         : tuple
+                  Pair of statistic and p-value for the LM error test.
+
+    References
+    ----------
+    .. _ Anselin, L., Bera, A. K., Florax, R., Yoon, M. J. (1996) "Simple
+       diagnostic tests for spatial dependence". Regional Science and Urban
+       Economics, 26, 77-104.
+    """
+    lm = spDcache.utwuDs ** 2 / spDcache.t
+    pval = chisqprob(lm, 1)
+    return (lm[0][0], pval[0][0])
+
+
+def lmLag(ols, w, spDcache):
+    """
+    LM lag test. Implemented as presented in eq. (13) of Anselin et al.
+    (1996) [1]_
+    ...
+
+    Attributes
+    ----------
+
+    ols         : OLS_dev
+                  Instance from an OLS_dev regression
+    w           : W
+                  Spatial weights instance
+    spDcache     : spDcache
+                  Instance of spDcache class
+
+    Returns
+    -------
+
+    lml         : tuple
+                  Pair of statistic and p-value for the LM lag test.
+
+    References
+    ----------
+    .. _ Anselin, L., Bera, A. K., Florax, R., Yoon, M. J. (1996) "Simple
+       diagnostic tests for spatial dependence". Regional Science and Urban
+       Economics, 26, 77-104.
+    """
+    lm = spDcache.utwyDs ** 2 / (ols.n * spDcache.j)
+    pval = chisqprob(lm, 1)
+    return (lm[0][0], pval[0][0])
+
+
+def rlmErr(ols, w, spDcache):
+    """
+    Robust LM error test. Implemented as presented in eq. (8) of Anselin et al. (1996) [1]_
+
+    NOTE: eq. (8) has an errata, the power -1 in the denominator should be inside the square bracket.
+    ...
+
+    Attributes
+    ----------
+
+    ols         : OLS_dev
+                  Instance from an OLS_dev regression
+    w           : W
+                  Spatial weights instance
+    spDcache     : spDcache
+                  Instance of spDcache class
+
+    Returns
+    -------
+
+    rlme        : tuple
+                  Pair of statistic and p-value for the Robust LM error test.
+
+    References
+    ----------
+    .. _ Anselin, L., Bera, A. K., Florax, R., Yoon, M. J. (1996) "Simple
+       diagnostic tests for spatial dependence". Regional Science and Urban
+       Economics, 26, 77-104.
+    """
+    nj = ols.n * spDcache.j
+    num = (spDcache.utwuDs - (spDcache.t * spDcache.utwyDs) / nj) ** 2
+    den = spDcache.t * (1. - (spDcache.t / nj))
+    lm = num / den
+    pval = chisqprob(lm, 1)
+    return (lm[0][0], pval[0][0])
+
+
+def rlmLag(ols, w, spDcache):
+    """
+    Robust LM lag test. Implemented as presented in eq. (12) of Anselin et al.
+    (1996) [1]_
+    ...
+
+    Attributes
+    ----------
+
+    ols             : OLS_dev
+                      Instance from an OLS_dev regression
+    w               : W
+                      Spatial weights instance
+    spDcache        : spDcache
+                      Instance of spDcache class
+
+    Returns
+    -------
+
+    rlml            : tuple
+                      Pair of statistic and p-value for the Robust LM lag test.
+
+    References
+    ----------
+    .. _ Anselin, L., Bera, A. K., Florax, R., Yoon, M. J. (1996) "Simple
+       diagnostic tests for spatial dependence". Regional Science and Urban
+       Economics, 26, 77-104.
+    """
+    lm = (spDcache.utwyDs - spDcache.utwuDs) ** 2 / \
+        ((ols.n * spDcache.j) - spDcache.t)
+    pval = chisqprob(lm, 1)
+    return (lm[0][0], pval[0][0])
+
+
+def lmSarma(ols, w, spDcache):
+    """
+    LM error test. Implemented as presented in eq. (15) of Anselin et al.
+    (1996) [1]_
+    ...
+
+    Attributes
+    ----------
+
+    ols         : OLS_dev
+                  Instance from an OLS_dev regression
+    w           : W
+                  Spatial weights instance
+    spDcache     : spDcache
+                  Instance of spDcache class
+
+    Returns
+    -------
+
+    sarma       : tuple
+                  Pair of statistic and p-value for the LM sarma test.
+
+    References
+    ----------
+    .. _ Anselin, L., Bera, A. K., Florax, R., Yoon, M. J. (1996) "Simple
+       diagnostic tests for spatial dependence". Regional Science and Urban
+       Economics, 26, 77-104.
+    """
+
+    first = (spDcache.utwyDs - spDcache.utwuDs) ** 2 / \
+        (w.n * spDcache.j - spDcache.t)
+    secnd = spDcache.utwuDs ** 2 / spDcache.t
+    lm = first + secnd
+    pval = chisqprob(lm, 2)
+    return (lm[0][0], pval[0][0])
+
+
+def get_mI(reg, w, spDcache):
+    """
+    Moran's I statistic of spatial autocorrelation as showed in Cliff & Ord
+    (1981) [CO81]_, p. 201-203
+    ...
+
+    Attributes
+    ----------
+
+    reg             : OLS_dev, TSLS_dev, STSLS_dev
+                      Instance from a regression class
+    w               : W
+                      Spatial weights instance
+    spDcache        : spDcache
+                      Instance of spDcache class
+
+    Returns
+    -------
+
+    moran           : float
+                      Statistic Moran's I test.
+
+    References
+    ----------
+    .. [CO81] Cliff, AD., Ord, JK. (1981) "Spatial processes: models & applications".
+       Pion London
+    """
+    mi = (w.n * np.dot(reg.u.T, spDcache.wu)) / (w.s0 * reg.utu)
+    return mi[0][0]
+
+
+def get_vI(ols, w, ei, spDcache):
+    """
+    Moran's I variance coded as in Cliff & Ord 1981 (p. 201-203) and R's spdep
+    """
+    A = spDcache.AB[0]
+    trA2 = np.dot(A, A)
+    trA2 = np.sum(trA2.diagonal())
+
+    B = spDcache.AB[1]
+    trB = np.sum(B.diagonal()) * 4.
+    vi = (w.n ** 2 / (w.s0 ** 2 * (w.n - ols.k) * (w.n - ols.k + 2.))) * \
+         (w.s1 + 2. * trA2 - trB -
+          ((2. * (spDcache.trA ** 2)) / (w.n - ols.k)))
+    return vi
+
+
+def get_eI(ols, w, spDcache):
+    """
+    Moran's I expectation using matrix M
+    """
+    return - (w.n * spDcache.trA) / (w.s0 * (w.n - ols.k))
+
+
+def get_zI(I, ei, vi):
+    """
+    Standardized I
+
+    Returns two-sided p-values as provided in the GeoDa family
+    """
+    z = abs((I - ei) / np.sqrt(vi))
+    pval = norm.sf(z) * 2.
+    return (z, pval)
+
+
+def akTest(iv, w, spDcache):
+    """
+    Computes AK-test for the general case (end. reg. + sp. lag)
+    ...
+
+    Parameters
+    ----------
+
+    iv          : STSLS_dev
+                  Instance from spatial 2SLS regression
+    w           : W
+                  Spatial weights instance
+   spDcache     : spDcache
+                  Instance of spDcache class
+
+    Attributes
+    ----------
+    mi          : float
+                  Moran's I statistic for IV residuals
+    ak          : float
+                  Square of corrected Moran's I for residuals::
+
+                  .. math::
+                        ak = \dfrac{N \times I^*}{\phi^2}
+
+    p           : float
+                  P-value of the test
+
+    ToDo:
+        * Code in as Nancy
+        * Compare both
+    """
+    mi = get_mI(iv, w, spDcache)
+    # Phi2
+    etwz = spdot(iv.u.T, spdot(w.sparse, iv.z))
+    a = np.dot(etwz, np.dot(iv.varb, etwz.T))
+    s12 = (w.s0 / w.n) ** 2
+    phi2 = (spDcache.t + (4.0 / iv.sig2n) * a) / (s12 * w.n)
+    ak = w.n * mi ** 2 / phi2
+    pval = chisqprob(ak, 1)
+    return (mi, ak[0][0], pval[0][0])
+
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/diagnostics_tsls.py b/pysal/spreg/diagnostics_tsls.py
new file mode 100644
index 0000000..257887b
--- /dev/null
+++ b/pysal/spreg/diagnostics_tsls.py
@@ -0,0 +1,337 @@
+"""
+Diagnostics for two stage least squares regression estimations. 
+        
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Nicholas Malizia nicholas.malizia at asu.edu "
+
+from pysal.common import *
+from scipy.stats import pearsonr
+
+__all__ = ["t_stat", "pr2_aspatial", "pr2_spatial"]
+
+
+def t_stat(reg, z_stat=False):
+    """
+    Calculates the t-statistics (or z-statistics) and associated p-values.
+
+    Parameters
+    ----------
+    reg             : regression object
+                      output instance from a regression model
+    z_stat          : boolean
+                      If True run z-stat instead of t-stat
+
+    Returns
+    -------    
+    ts_result       : list of tuples
+                      each tuple includes value of t statistic (or z
+                      statistic) and associated p-value
+
+    References
+    ----------
+    .. [1] W. Greene. 2003. Econometric Analysis. Prentice Hall, Upper
+       Saddle River.
+
+    Examples
+    --------
+
+    We first need to import the needed modules. Numpy is needed to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis. The ``diagnostics`` module is used for the tests
+    we will show here and the OLS and TSLS are required to run the models on
+    which we will perform the tests.
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> import pysal.spreg.diagnostics as diagnostics
+    >>> from pysal.spreg.ols import OLS
+    >>> from twosls import TSLS
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+
+    Before being able to apply the diagnostics, we have to run a model and,
+    for that, we need the input variables. Extract the CRIME column (crime
+    rates) from the DBF file and make it the dependent variable for the
+    regression. Note that PySAL requires this to be an numpy array of shape
+    (n, 1) as opposed to the also common shape of (n, ) that other packages
+    accept.
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) and HOVAL (home value) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this model adds a vector of ones to the
+    independent variables passed in, but this can be overridden by passing
+    constant=False.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+
+    Run an OLS regression. Since it is a non-spatial model, all we need is the
+    dependent and the independent variable.
+
+    >>> reg = OLS(y,X)
+
+    Now we can perform a t-statistic on the model:
+
+    >>> testresult = diagnostics.t_stat(reg)
+    >>> print("%12.12f"%testresult[0][0], "%12.12f"%testresult[0][1], "%12.12f"%testresult[1][0], "%12.12f"%testresult[1][1], "%12.12f"%testresult[2][0], "%12.12f"%testresult[2][1])
+    ('14.490373143689', '0.000000000000', '-4.780496191297', '0.000018289595', '-2.654408642718', '0.010874504910')
+
+    We can also use the z-stat. For that, we re-build the model so we consider
+    HOVAL as endogenous, instrument for it using DISCBD and carry out two
+    stage least squares (TSLS) estimation.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T    
+    >>> yd = []
+    >>> yd.append(db.by_col("HOVAL"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+
+    Once the variables are read as different objects, we are good to run the
+    model.
+
+    >>> reg = TSLS(y, X, yd, q)
+
+    With the output of the TSLS regression, we can perform a z-statistic:
+
+    >>> testresult = diagnostics.t_stat(reg, z_stat=True)
+    >>> print("%12.10f"%testresult[0][0], "%12.10f"%testresult[0][1], "%12.10f"%testresult[1][0], "%12.10f"%testresult[1][1], "%12.10f"%testresult[2][0], "%12.10f"%testresult[2][1])
+    ('5.8452644705', '0.0000000051', '0.3676015668', '0.7131703463', '-1.9946891308', '0.0460767956')
+    """
+
+    k = reg.k           # (scalar) number of ind. vas (includes constant)
+    n = reg.n           # (scalar) number of observations
+    vm = reg.vm         # (array) coefficients of variance matrix (k x k)
+    betas = reg.betas   # (array) coefficients of the regressors (1 x k)
+    variance = vm.diagonal()
+    tStat = betas.reshape(len(betas),) / np.sqrt(variance)
+    ts_result = []
+    for t in tStat:
+        if z_stat:
+            ts_result.append((t, stats.norm.sf(abs(t)) * 2))
+        else:
+            ts_result.append((t, stats.t.sf(abs(t), n - k) * 2))
+    return ts_result
+
+
+def pr2_aspatial(tslsreg):
+    """
+    Calculates the pseudo r^2 for the two stage least squares regression.
+
+    Parameters
+    ----------
+    tslsreg             : two stage least squares regression object
+                          output instance from a two stage least squares
+                          regression model
+
+    Returns
+    -------
+    pr2_result          : float
+                          value of the squared pearson correlation between
+                          the y and tsls-predicted y vectors
+
+    Examples
+    --------
+
+    We first need to import the needed modules. Numpy is needed to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis. The TSLS is required to run the model on
+    which we will perform the tests.
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> from twosls import TSLS
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+
+    Before being able to apply the diagnostics, we have to run a model and,
+    for that, we need the input variables. Extract the CRIME column (crime
+    rates) from the DBF file and make it the dependent variable for the
+    regression. Note that PySAL requires this to be an numpy array of shape
+    (n, 1) as opposed to the also common shape of (n, ) that other packages
+    accept.
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this model adds a vector of ones to the
+    independent variables passed in, but this can be overridden by passing
+    constant=False.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+
+    In this case, we consider HOVAL (home value) as an endogenous regressor,
+    so we acknowledge that by reading it in a different category.
+
+    >>> yd = []
+    >>> yd.append(db.by_col("HOVAL"))
+    >>> yd = np.array(yd).T
+
+    In order to properly account for the endogeneity, we have to pass in the
+    instruments. Let us consider DISCBD (distance to the CBD) is a good one:
+
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+
+    Now we are good to run the model. It is an easy one line task.
+
+    >>> reg = TSLS(y, X, yd, q=q)
+
+    In order to perform the pseudo R^2, we pass the regression object to the
+    function and we are done!
+
+    >>> result = pr2_aspatial(reg)
+    >>> print("%1.6f"%result)    
+    0.279361
+
+    """
+
+    y = tslsreg.y
+    predy = tslsreg.predy
+    pr = pearsonr(y, predy)[0]
+    pr2_result = float(pr ** 2)
+    return pr2_result
+
+
+def pr2_spatial(tslsreg):
+    """
+    Calculates the pseudo r^2 for the spatial two stage least squares 
+    regression.
+
+    Parameters
+    ----------
+    stslsreg            : spatial two stage least squares regression object
+                          output instance from a spatial two stage least 
+                          squares regression model
+
+    Returns
+    -------    
+    pr2_result          : float
+                          value of the squared pearson correlation between
+                          the y and stsls-predicted y vectors
+
+    Examples
+    --------
+
+    We first need to import the needed modules. Numpy is needed to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis. The GM_Lag is required to run the model on
+    which we will perform the tests and the ``pysal.spreg.diagnostics`` module
+    contains the function with the test.
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> import pysal.spreg.diagnostics as D
+    >>> from twosls_sp import GM_Lag
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+
+    Extract the HOVAL column (home value) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) vectors from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this model adds a vector of ones to the
+    independent variables passed in, but this can be overridden by passing
+    constant=False.
+
+    >>> X = np.array(db.by_col("INC"))
+    >>> X = np.reshape(X, (49,1))
+
+    In this case, we consider CRIME (crime rates) as an endogenous regressor,
+    so we acknowledge that by reading it in a different category.
+
+    >>> yd = np.array(db.by_col("CRIME"))
+    >>> yd = np.reshape(yd, (49,1))
+
+    In order to properly account for the endogeneity, we have to pass in the
+    instruments. Let us consider DISCBD (distance to the CBD) is a good one:
+
+    >>> q = np.array(db.by_col("DISCBD"))
+    >>> q = np.reshape(q, (49,1))
+
+    Since this test has a spatial component, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp")) 
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    Now we are good to run the spatial lag model. Make sure you pass all the
+    parameters correctly and, if desired, pass the names of the variables as
+    well so when you print the summary (reg.summary) they are included:
+
+    >>> reg = GM_Lag(y, X, w=w, yend=yd, q=q, w_lags=2, name_x=['inc'], name_y='hoval', name_yend=['crime'], name_q=['discbd'], name_ds='columbus')
+
+    Once we have a regression object, we can perform the spatial version of
+    the pesudo R^2. It is as simple as one line!
+
+    >>> result = pr2_spatial(reg)
+    >>> print("%1.6f"%result)
+    0.299649
+
+    """
+
+    y = tslsreg.y
+    predy_e = tslsreg.predy_e
+    pr = pearsonr(y, predy_e)[0]
+    pr2_result = float(pr ** 2)
+    return pr2_result
+
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/error_sp.py b/pysal/spreg/error_sp.py
new file mode 100644
index 0000000..408d353
--- /dev/null
+++ b/pysal/spreg/error_sp.py
@@ -0,0 +1,1133 @@
+"""
+Spatial Error Models module
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, \
+        Daniel Arribas-Bel darribas at asu.edu, \
+        Pedro V. Amaral pedro.amaral at asu.edu"
+
+import numpy as np
+from numpy import linalg as la
+import ols as OLS
+from pysal import lag_spatial
+from utils import power_expansion, set_endog, iter_msg, sp_att
+from utils import get_A1_hom, get_A2_hom, get_A1_het, optim_moments, get_spFilter, get_lags, _moments2eqs
+from utils import spdot, RegressionPropsY, set_warn
+import twosls as TSLS
+import user_output as USER
+import summary_output as SUMMARY
+
+__all__ = ["GM_Error", "GM_Endog_Error", "GM_Combo"]
+
+
+class BaseGM_Error(RegressionPropsY):
+
+    """
+    GMM method for a spatial error model (note: no consistency checks
+    diagnostics or constant added); based on Kelejian and Prucha 
+    (1998, 1999)[1]_ [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : Sparse matrix
+                   Spatial weights sparse matrix   
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+        two-stage least squares procedure for estimating a spatial autoregressive
+        model with autoregressive disturbances". The Journal of Real State
+        Finance and Economics, 17, 1.
+
+    .. [2] Kelejian, H.R., Prucha, I.R. (1999) "A Generalized Moments
+        Estimator for the Autoregressive Parameter in a Spatial Model".
+        International Economic Review, 40, 2.
+
+    Examples
+    --------
+
+    >>> import pysal
+    >>> import numpy as np
+    >>> dbf = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array([dbf.by_col('HOVAL')]).T
+    >>> x = np.array([dbf.by_col('INC'), dbf.by_col('CRIME')]).T
+    >>> x = np.hstack((np.ones(y.shape),x))
+    >>> w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read() 
+    >>> w.transform='r'
+    >>> model = BaseGM_Error(y, x, w=w.sparse)
+    >>> np.around(model.betas, decimals=4)
+    array([[ 47.6946],
+           [  0.7105],
+           [ -0.5505],
+           [  0.3257]])
+    """
+
+    def __init__(self, y, x, w):
+
+        # 1a. OLS --> \tilde{betas}
+        ols = OLS.BaseOLS(y=y, x=x)
+        self.n, self.k = ols.x.shape
+        self.x = ols.x
+        self.y = ols.y
+
+        # 1b. GMM --> \tilde{\lambda1}
+        moments = _momentsGM_Error(w, ols.u)
+        lambda1 = optim_moments(moments)
+
+        # 2a. OLS -->\hat{betas}
+        xs = get_spFilter(w, lambda1, self.x)
+        ys = get_spFilter(w, lambda1, self.y)
+        ols2 = OLS.BaseOLS(y=ys, x=xs)
+
+        # Output
+        self.predy = spdot(self.x, ols2.betas)
+        self.u = y - self.predy
+        self.betas = np.vstack((ols2.betas, np.array([[lambda1]])))
+        self.sig2 = ols2.sig2n
+        self.e_filtered = self.u - lambda1 * w * self.u
+
+        self.vm = self.sig2 * ols2.xtxi
+        se_betas = np.sqrt(self.vm.diagonal())
+        self._cache = {}
+
+
+class GM_Error(BaseGM_Error):
+
+    """
+    GMM method for a spatial error model, with results and diagnostics; based
+    on Kelejian and Prucha (1998, 1999)[1]_ [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : pysal W object
+                   Spatial weights object (always needed)   
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    title        : string
+                   Name of the regression method used
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+        two-stage least squares procedure for estimating a spatial autoregressive
+        model with autoregressive disturbances". The Journal of Real State
+        Finance and Economics, 17, 1.
+    .. [2] Kelejian, H.R., Prucha, I.R. (1999) "A Generalized Moments
+        Estimator for the Autoregressive Parameter in a Spatial Model".
+        International Economic Review, 40, 2.
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import pysal
+    >>> import numpy as np
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> dbf = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Extract the HOVAL column (home values) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array([dbf.by_col('HOVAL')]).T
+
+    Extract CRIME (crime) and INC (income) vectors from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this class adds a vector of ones to the
+    independent variables passed in.
+
+    >>> names_to_extract = ['INC', 'CRIME']
+    >>> x = np.array([dbf.by_col(name) for name in names_to_extract]).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will use
+    ``columbus.gal``, which contains contiguity relationships between the
+    observations in the Columbus dataset we are using throughout this example.
+    Note that, in order to read the file, not only to open it, we need to
+    append '.read()' at the end of the command.
+
+    >>> w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read() 
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, his allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform='r'
+
+    We are all set with the preliminars, we are good to run the model. In this
+    case, we will need the variables and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> model = GM_Error(y, x, w=w, name_y='hoval', name_x=['income', 'crime'], name_ds='columbus')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. Note that because we are running the classical GMM error
+    model from 1998/99, the spatial parameter is obtained as a point estimate, so
+    although you get a value for it (there are for coefficients under
+    model.betas), you cannot perform inference on it (there are only three
+    values in model.se_betas).
+
+    >>> print model.name_x
+    ['CONSTANT', 'income', 'crime', 'lambda']
+    >>> np.around(model.betas, decimals=4)
+    array([[ 47.6946],
+           [  0.7105],
+           [ -0.5505],
+           [  0.3257]])
+    >>> np.around(model.std_err, decimals=4)
+    array([ 12.412 ,   0.5044,   0.1785])
+    >>> np.around(model.z_stat, decimals=6) #doctest: +SKIP
+    array([[  3.84261100e+00,   1.22000000e-04],
+           [  1.40839200e+00,   1.59015000e-01],
+           [ -3.08424700e+00,   2.04100000e-03]])
+    >>> round(model.sig2,4)
+    198.5596
+
+    """
+
+    def __init__(self, y, x, w,
+                 vm=False, name_y=None, name_x=None,
+                 name_w=None, name_ds=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        x_constant = USER.check_constant(x)
+        BaseGM_Error.__init__(self, y=y, x=x_constant, w=w.sparse)
+        self.title = "SPATIALLY WEIGHTED LEAST SQUARES"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_x.append('lambda')
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.GM_Error(reg=self, w=w, vm=vm)
+
+
+class BaseGM_Endog_Error(RegressionPropsY):
+
+    '''
+    GMM method for a spatial error model with endogenous variables (note: no
+    consistency checks, diagnostics or constant added); based on Kelejian and
+    Prucha (1998, 1999)[1]_[2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : Sparse matrix
+                   Spatial weights sparse matrix 
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+        two-stage least squares procedure for estimating a spatial autoregressive
+        model with autoregressive disturbances". The Journal of Real State
+        Finance and Economics, 17, 1.
+
+    .. [2] Kelejian, H.R., Prucha, I.R. (1999) "A Generalized Moments
+        Estimator for the Autoregressive Parameter in a Spatial Model".
+        International Economic Review, 40, 2.
+
+    Examples
+    --------
+
+    >>> import pysal
+    >>> import numpy as np
+    >>> dbf = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array([dbf.by_col('CRIME')]).T
+    >>> x = np.array([dbf.by_col('INC')]).T
+    >>> x = np.hstack((np.ones(y.shape),x))
+    >>> yend = np.array([dbf.by_col('HOVAL')]).T
+    >>> q = np.array([dbf.by_col('DISCBD')]).T
+    >>> w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read() 
+    >>> w.transform='r'
+    >>> model = BaseGM_Endog_Error(y, x, yend, q, w=w.sparse)
+    >>> np.around(model.betas, decimals=4)
+    array([[ 82.573 ],
+           [  0.581 ],
+           [ -1.4481],
+           [  0.3499]])
+
+    '''
+
+    def __init__(self, y, x, yend, q, w):
+
+        # 1a. TSLS --> \tilde{betas}
+        tsls = TSLS.BaseTSLS(y=y, x=x, yend=yend, q=q)
+        self.n, self.k = tsls.z.shape
+        self.x = tsls.x
+        self.y = tsls.y
+        self.yend, self.z = tsls.yend, tsls.z
+
+        # 1b. GMM --> \tilde{\lambda1}
+        moments = _momentsGM_Error(w, tsls.u)
+        lambda1 = optim_moments(moments)
+
+        # 2a. 2SLS -->\hat{betas}
+        xs = get_spFilter(w, lambda1, self.x)
+        ys = get_spFilter(w, lambda1, self.y)
+        yend_s = get_spFilter(w, lambda1, self.yend)
+        tsls2 = TSLS.BaseTSLS(ys, xs, yend_s, h=tsls.h)
+
+        # Output
+        self.betas = np.vstack((tsls2.betas, np.array([[lambda1]])))
+        self.predy = spdot(tsls.z, tsls2.betas)
+        self.u = y - self.predy
+        self.sig2 = float(np.dot(tsls2.u.T, tsls2.u)) / self.n
+        self.e_filtered = self.u - lambda1 * w * self.u
+        self.vm = self.sig2 * tsls2.varb
+        self._cache = {}
+
+
+class GM_Endog_Error(BaseGM_Endog_Error):
+
+    '''
+    GMM method for a spatial error model with endogenous variables, with
+    results and diagnostics; based on Kelejian and Prucha (1998, 1999)[1]_[2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : pysal W object
+                   Spatial weights object (always needed)   
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    sig2         : float
+                   Sigma squared used in computations
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    title         : string
+                    Name of the regression method used
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+        two-stage least squares procedure for estimating a spatial autoregressive
+        model with autoregressive disturbances". The Journal of Real State
+        Finance and Economics, 17, 1.
+
+    .. [2] Kelejian, H.R., Prucha, I.R. (1999) "A Generalized Moments
+        Estimator for the Autoregressive Parameter in a Spatial Model".
+        International Economic Review, 40, 2.
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import pysal
+    >>> import numpy as np
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> dbf = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+
+    Extract the CRIME column (crime rates) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array([dbf.by_col('CRIME')]).T
+
+    Extract INC (income) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this model adds a vector of ones to the
+    independent variables passed in.
+
+    >>> x = np.array([dbf.by_col('INC')]).T
+
+    In this case we consider HOVAL (home value) is an endogenous regressor.
+    We tell the model that this is so by passing it in a different parameter
+    from the exogenous variables (x).
+
+    >>> yend = np.array([dbf.by_col('HOVAL')]).T
+
+    Because we have endogenous variables, to obtain a correct estimate of the
+    model, we need to instrument for HOVAL. We use DISCBD (distance to the
+    CBD) for this and hence put it in the instruments parameter, 'q'.
+
+    >>> q = np.array([dbf.by_col('DISCBD')]).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will use
+    ``columbus.gal``, which contains contiguity relationships between the
+    observations in the Columbus dataset we are using throughout this example.
+    Note that, in order to read the file, not only to open it, we need to
+    append '.read()' at the end of the command.
+
+    >>> w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read() 
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform='r'
+
+    We are all set with the preliminars, we are good to run the model. In this
+    case, we will need the variables (exogenous and endogenous), the
+    instruments and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> model = GM_Endog_Error(y, x, yend, q, w=w, name_x=['inc'], name_y='crime', name_yend=['hoval'], name_q=['discbd'], name_ds='columbus')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. Note that because we are running the classical GMM error
+    model from 1998/99, the spatial parameter is obtained as a point estimate, so
+    although you get a value for it (there are for coefficients under
+    model.betas), you cannot perform inference on it (there are only three
+    values in model.se_betas). Also, this regression uses a two stage least
+    squares estimation method that accounts for the endogeneity created by the
+    endogenous variables included.
+
+    >>> print model.name_z
+    ['CONSTANT', 'inc', 'hoval', 'lambda']
+    >>> np.around(model.betas, decimals=4)
+    array([[ 82.573 ],
+           [  0.581 ],
+           [ -1.4481],
+           [  0.3499]])
+    >>> np.around(model.std_err, decimals=4)
+    array([ 16.1381,   1.3545,   0.7862])
+
+    '''
+
+    def __init__(self, y, x, yend, q, w,
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_ds=None):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        x_constant = USER.check_constant(x)
+        BaseGM_Endog_Error.__init__(
+            self, y=y, x=x_constant, w=w.sparse, yend=yend, q=q)
+        self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_yend = USER.set_name_yend(name_yend, yend)
+        self.name_z = self.name_x + self.name_yend
+        self.name_z.append('lambda')
+        self.name_q = USER.set_name_q(name_q, q)
+        self.name_h = USER.set_name_h(self.name_x, self.name_q)
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.GM_Endog_Error(reg=self, w=w, vm=vm)
+
+
+class BaseGM_Combo(BaseGM_Endog_Error):
+
+    """
+    GMM method for a spatial lag and error model, with endogenous variables
+    (note: no consistency checks, diagnostics or constant added); based on 
+    Kelejian and Prucha (1998, 1999)[1]_[2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : Sparse matrix
+                   Spatial weights sparse matrix  
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+        two-stage least squares procedure for estimating a spatial autoregressive
+        model with autoregressive disturbances". The Journal of Real State
+        Finance and Economics, 17, 1.
+
+    .. [2] Kelejian, H.R., Prucha, I.R. (1999) "A Generalized Moments
+        Estimator for the Autoregressive Parameter in a Spatial Model".
+        International Economic Review, 40, 2.
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> w.transform = 'r'
+    >>> w_lags = 1
+    >>> yd2, q2 = pysal.spreg.utils.set_endog(y, X, w, None, None, w_lags, True)
+    >>> X = np.hstack((np.ones(y.shape),X))
+
+    Example only with spatial lag
+
+    >>> reg = BaseGM_Combo(y, X, yend=yd2, q=q2, w=w.sparse)
+
+    Print the betas
+
+    >>> print np.around(np.hstack((reg.betas[:-1],np.sqrt(reg.vm.diagonal()).reshape(3,1))),3)
+    [[ 39.059  11.86 ]
+     [ -1.404   0.391]
+     [  0.467   0.2  ]]
+
+    And lambda
+
+    >>> print 'Lamda: ', np.around(reg.betas[-1], 3)
+    Lamda:  [-0.048]
+
+    Example with both spatial lag and other endogenous variables
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+    >>> yd = []
+    >>> yd.append(db.by_col("HOVAL"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+    >>> yd2, q2 = pysal.spreg.utils.set_endog(y, X, w, yd, q, w_lags, True)
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> reg = BaseGM_Combo(y, X, yd2, q2, w=w.sparse)
+    >>> betas = np.array([['CONSTANT'],['INC'],['HOVAL'],['W_CRIME']])
+    >>> print np.hstack((betas, np.around(np.hstack((reg.betas[:-1], np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)))
+    [['CONSTANT' '50.0944' '14.3593']
+     ['INC' '-0.2552' '0.5667']
+     ['HOVAL' '-0.6885' '0.3029']
+     ['W_CRIME' '0.4375' '0.2314']]
+
+        """
+
+    def __init__(self, y, x, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True):
+
+        BaseGM_Endog_Error.__init__(self, y=y, x=x, w=w, yend=yend, q=q)
+
+
+class GM_Combo(BaseGM_Combo):
+
+    """
+    GMM method for a spatial lag and error model with endogenous variables,
+    with results and diagnostics; based on Kelejian and Prucha (1998,
+    1999)[1]_[2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : pysal W object
+                   Spatial weights object (always needed)   
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    e_pred       : array
+                   nx1 array of residuals (using reduced form)
+    predy        : array
+                   nx1 array of predicted y values
+    predy_e      : array
+                   nx1 array of predicted y values (using reduced form)
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+    sig2         : float
+                   Sigma squared used in computations (based on filtered
+                   residuals)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    title         : string
+                    Name of the regression method used
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+        two-stage least squares procedure for estimating a spatial autoregressive
+        model with autoregressive disturbances". The Journal of Real State
+        Finance and Economics, 17, 1.
+
+    .. [2] Kelejian, H.R., Prucha, I.R. (1999) "A Generalized Moments
+        Estimator for the Autoregressive Parameter in a Spatial Model".
+        International Economic Review, 40, 2.
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+
+    Extract the CRIME column (crime rates) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this model adds a vector of ones to the
+    independent variables passed in.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    The Combo class runs an SARAR model, that is a spatial lag+error model.
+    In this case we will run a simple version of that, where we have the
+    spatial effects as well as exogenous variables. Since it is a spatial
+    model, we have to pass in the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Combo(y, X, w=w, name_y='crime', name_x=['income'], name_ds='columbus')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. Note that because we are running the classical GMM error
+    model from 1998/99, the spatial parameter is obtained as a point estimate, so
+    although you get a value for it (there are for coefficients under
+    model.betas), you cannot perform inference on it (there are only three
+    values in model.se_betas). Also, this regression uses a two stage least
+    squares estimation method that accounts for the endogeneity created by the
+    spatial lag of the dependent variable. We can check the betas:
+
+    >>> print reg.name_z
+    ['CONSTANT', 'income', 'W_crime', 'lambda']
+    >>> print np.around(np.hstack((reg.betas[:-1],np.sqrt(reg.vm.diagonal()).reshape(3,1))),3)
+    [[ 39.059  11.86 ]
+     [ -1.404   0.391]
+     [  0.467   0.2  ]]
+
+    And lambda:
+
+    >>> print 'lambda: ', np.around(reg.betas[-1], 3)
+    lambda:  [-0.048]
+
+    This class also allows the user to run a spatial lag+error model with the
+    extra feature of including non-spatial endogenous regressors. This means
+    that, in addition to the spatial lag and error, we consider some of the
+    variables on the right-hand side of the equation as endogenous and we
+    instrument for this. As an example, we will include HOVAL (home value) as
+    endogenous and will instrument with DISCBD (distance to the CSB). We first
+    need to read in the variables:
+
+    >>> yd = []
+    >>> yd.append(db.by_col("HOVAL"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+
+    And then we can run and explore the model analogously to the previous combo:
+
+    >>> reg = GM_Combo(y, X, yd, q, w=w, name_x=['inc'], name_y='crime', name_yend=['hoval'], name_q=['discbd'], name_ds='columbus')
+    >>> print reg.name_z
+    ['CONSTANT', 'inc', 'hoval', 'W_crime', 'lambda']
+    >>> names = np.array(reg.name_z).reshape(5,1)
+    >>> print np.hstack((names[0:4,:], np.around(np.hstack((reg.betas[:-1], np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)))
+    [['CONSTANT' '50.0944' '14.3593']
+     ['inc' '-0.2552' '0.5667']
+     ['hoval' '-0.6885' '0.3029']
+     ['W_crime' '0.4375' '0.2314']]
+
+    >>> print 'lambda: ', np.around(reg.betas[-1], 3)
+    lambda:  [ 0.254]
+
+    """
+
+    def __init__(self, y, x, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True,
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_ds=None):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        yend2, q2 = set_endog(y, x, w, yend, q, w_lags, lag_q)
+        x_constant = USER.check_constant(x)
+        BaseGM_Combo.__init__(
+            self, y=y, x=x_constant, w=w.sparse, yend=yend2, q=q2,
+            w_lags=w_lags, lag_q=lag_q)
+        self.rho = self.betas[-2]
+        self.predy_e, self.e_pred, warn = sp_att(w, self.y,
+                                                 self.predy, yend2[:, -1].reshape(self.n, 1), self.rho)
+        set_warn(self, warn)
+        self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_yend = USER.set_name_yend(name_yend, yend)
+        self.name_yend.append(USER.set_name_yend_sp(self.name_y))
+        self.name_z = self.name_x + self.name_yend
+        self.name_z.append('lambda')
+        self.name_q = USER.set_name_q(name_q, q)
+        self.name_q.extend(
+            USER.set_name_q_sp(self.name_x, w_lags, self.name_q, lag_q))
+        self.name_h = USER.set_name_h(self.name_x, self.name_q)
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.GM_Combo(reg=self, w=w, vm=vm)
+
+
+def _momentsGM_Error(w, u):
+    try:
+        wsparse = w.sparse
+    except:
+        wsparse = w
+    n = wsparse.shape[0]
+    u2 = np.dot(u.T, u)
+    wu = wsparse * u
+    uwu = np.dot(u.T, wu)
+    wu2 = np.dot(wu.T, wu)
+    wwu = wsparse * wu
+    uwwu = np.dot(u.T, wwu)
+    wwu2 = np.dot(wwu.T, wwu)
+    wuwwu = np.dot(wu.T, wwu)
+    wtw = wsparse.T * wsparse
+    trWtW = np.sum(wtw.diagonal())
+    g = np.array([[u2[0][0], wu2[0][0], uwu[0][0]]]).T / n
+    G = np.array(
+        [[2 * uwu[0][0], -wu2[0][0], n], [2 * wuwwu[0][0], -wwu2[0][0], trWtW],
+         [uwwu[0][0] + wu2[0][0], -wuwwu[0][0], 0.]]) / n
+    return [G, g]
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+
+    _test()
+    import pysal
+    import numpy as np
+    dbf = pysal.open(pysal.examples.get_path('columbus.dbf'), 'r')
+    y = np.array([dbf.by_col('HOVAL')]).T
+    names_to_extract = ['INC', 'CRIME']
+    x = np.array([dbf.by_col(name) for name in names_to_extract]).T
+    w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read()
+    w.transform = 'r'
+    model = GM_Error(y, x, w, name_y='hoval',
+                     name_x=['income', 'crime'], name_ds='columbus')
+    print model.summary
diff --git a/pysal/spreg/error_sp_het.py b/pysal/spreg/error_sp_het.py
new file mode 100644
index 0000000..40a7bcb
--- /dev/null
+++ b/pysal/spreg/error_sp_het.py
@@ -0,0 +1,1509 @@
+'''
+Spatial Error with Heteroskedasticity family of models
+'''
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, \
+        Pedro V. Amaral pedro.amaral at asu.edu, \
+        Daniel Arribas-Bel darribas at asu.edu, \
+        David C. Folch david.folch at asu.edu \
+        Ran Wei rwei5 at asu.edu"
+
+import numpy as np
+import numpy.linalg as la
+import ols as OLS
+import user_output as USER
+import summary_output as SUMMARY
+import twosls as TSLS
+import utils as UTILS
+from utils import RegressionPropsY, spdot, set_endog, sphstack
+from scipy import sparse as SP
+from pysal import lag_spatial
+
+__all__ = ["GM_Error_Het", "GM_Endog_Error_Het", "GM_Combo_Het"]
+
+
+class BaseGM_Error_Het(RegressionPropsY):
+
+    """
+    GMM method for a spatial error model with heteroskedasticity (note: no
+    consistency checks, diagnostics or constant added); based on Arraiz
+    et al [1]_, following Anselin [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : Sparse matrix
+                   Spatial weights sparse matrix 
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    step1c       : boolean
+                   If True, then include Step 1c from Arraiz et al. 
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    xtx          : float
+                   X'X
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+    Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+    Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+    592-614.
+
+    .. [2] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("CRIME"))
+    >>> X = np.array(X).T
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> w.transform = 'r'
+    >>> reg = BaseGM_Error_Het(y, X, w.sparse, step1c=True)
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 47.9963  11.479 ]
+     [  0.7105   0.3681]
+     [ -0.5588   0.1616]
+     [  0.4118   0.168 ]]
+    """
+
+    def __init__(self, y, x, w,
+                 max_iter=1, epsilon=0.00001, step1c=False):
+
+        self.step1c = step1c
+        # 1a. OLS --> \tilde{betas}
+        ols = OLS.BaseOLS(y=y, x=x)
+        self.x, self.y, self.n, self.k, self.xtx = ols.x, ols.y, ols.n, ols.k, ols.xtx
+        wA1 = UTILS.get_A1_het(w)
+
+        # 1b. GMM --> \tilde{\lambda1}
+        moments = UTILS._moments2eqs(wA1, w, ols.u)
+        lambda1 = UTILS.optim_moments(moments)
+
+        if step1c:
+            # 1c. GMM --> \tilde{\lambda2}
+            sigma = get_psi_sigma(w, ols.u, lambda1)
+            vc1 = get_vc_het(w, wA1, sigma)
+            lambda2 = UTILS.optim_moments(moments, vc1)
+        else:
+            lambda2 = lambda1
+        lambda_old = lambda2
+
+        self.iteration, eps = 0, 1
+        while self.iteration < max_iter and eps > epsilon:
+            # 2a. reg -->\hat{betas}
+            xs = UTILS.get_spFilter(w, lambda_old, self.x)
+            ys = UTILS.get_spFilter(w, lambda_old, self.y)
+            ols_s = OLS.BaseOLS(y=ys, x=xs)
+            self.predy = spdot(self.x, ols_s.betas)
+            self.u = self.y - self.predy
+
+            # 2b. GMM --> \hat{\lambda}
+            sigma_i = get_psi_sigma(w, self.u, lambda_old)
+            vc_i = get_vc_het(w, wA1, sigma_i)
+            moments_i = UTILS._moments2eqs(wA1, w, self.u)
+            lambda3 = UTILS.optim_moments(moments_i, vc_i)
+            eps = abs(lambda3 - lambda_old)
+            lambda_old = lambda3
+            self.iteration += 1
+
+        self.iter_stop = UTILS.iter_msg(self.iteration, max_iter)
+
+        sigma = get_psi_sigma(w, self.u, lambda3)
+        vc3 = get_vc_het(w, wA1, sigma)
+        self.vm = get_vm_het(moments_i[0], lambda3, self, w, vc3)
+        self.betas = np.vstack((ols_s.betas, lambda3))
+        self.e_filtered = self.u - lambda3 * w * self.u
+        self._cache = {}
+
+
+class GM_Error_Het(BaseGM_Error_Het):
+
+    """
+    GMM method for a spatial error model with heteroskedasticity, with results
+    and diagnostics; based on Arraiz et al [1]_, following Anselin [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : pysal W object
+                   Spatial weights object   
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    step1c       : boolean
+                   If True, then include Step 1c from Arraiz et al. 
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    vm           : array
+                   Variance covariance matrix (kxk)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    xtx          : float
+                   X'X
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    title        : string
+                   Name of the regression method used
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+        Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+        Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+        592-614.
+
+    .. [2] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Extract the HOVAL column (home values) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) and CRIME (crime) vectors from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this class adds a vector of ones to the
+    independent variables passed in.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("CRIME"))
+    >>> X = np.array(X).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, his allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Error_Het(y, X, w=w, step1c=True, name_y='home value', name_x=['income', 'crime'], name_ds='columbus')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. This class offers an error model that explicitly accounts
+    for heteroskedasticity and that unlike the models from
+    ``pysal.spreg.error_sp``, it allows for inference on the spatial
+    parameter.
+
+    >>> print reg.name_x
+    ['CONSTANT', 'income', 'crime', 'lambda']
+
+    Hence, we find the same number of betas as of standard errors,
+    which we calculate taking the square root of the diagonal of the
+    variance-covariance matrix:
+
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 47.9963  11.479 ]
+     [  0.7105   0.3681]
+     [ -0.5588   0.1616]
+     [  0.4118   0.168 ]]
+
+    """
+
+    def __init__(self, y, x, w,
+                 max_iter=1, epsilon=0.00001, step1c=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_w=None, name_ds=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        x_constant = USER.check_constant(x)
+        BaseGM_Error_Het.__init__(
+            self, y, x_constant, w.sparse, max_iter=max_iter,
+            step1c=step1c, epsilon=epsilon)
+        self.title = "SPATIALLY WEIGHTED LEAST SQUARES (HET)"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_x.append('lambda')
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.GM_Error_Het(reg=self, w=w, vm=vm)
+
+
+class BaseGM_Endog_Error_Het(RegressionPropsY):
+
+    """
+    GMM method for a spatial error model with heteroskedasticity and
+    endogenous variables (note: no consistency checks, diagnostics or constant
+    added); based on Arraiz et al [1]_, following Anselin [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : Sparse matrix
+                   Spatial weights sparse matrix   
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    step1c       : boolean
+                   If True, then include Step 1c from Arraiz et al. 
+    inv_method   : string
+                   If "power_exp", then compute inverse using the power
+                   expansion. If "true_inv", then compute the true inverse.
+                   Note that true_inv will fail for large n.
+
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    hth          : float
+                   H'H
+
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+    Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+    Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+    592-614.
+
+    .. [2] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> yd = []
+    >>> yd.append(db.by_col("CRIME"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> w.transform = 'r'
+    >>> reg = BaseGM_Endog_Error_Het(y, X, yd, q, w=w.sparse, step1c=True)
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 55.3971  28.8901]
+     [  0.4656   0.7731]
+     [ -0.6704   0.468 ]
+     [  0.4114   0.1777]]
+    """
+
+    def __init__(self, y, x, yend, q, w,
+                 max_iter=1, epsilon=0.00001,
+                 step1c=False, inv_method='power_exp'):
+
+        self.step1c = step1c
+        # 1a. reg --> \tilde{betas}
+        tsls = TSLS.BaseTSLS(y=y, x=x, yend=yend, q=q)
+        self.x, self.z, self.h, self.y = tsls.x, tsls.z, tsls.h, tsls.y
+        self.yend, self.q, self.n, self.k, self.hth = tsls.yend, tsls.q, tsls.n, tsls.k, tsls.hth
+        wA1 = UTILS.get_A1_het(w)
+
+        # 1b. GMM --> \tilde{\lambda1}
+        moments = UTILS._moments2eqs(wA1, w, tsls.u)
+        lambda1 = UTILS.optim_moments(moments)
+
+        if step1c:
+            # 1c. GMM --> \tilde{\lambda2}
+            self.u = tsls.u
+            zs = UTILS.get_spFilter(w, lambda1, self.z)
+            vc1 = get_vc_het_tsls(w, wA1, self, lambda1,
+                                  tsls.pfora1a2, zs, inv_method, filt=False)
+            lambda2 = UTILS.optim_moments(moments, vc1)
+        else:
+            lambda2 = lambda1
+        lambda_old = lambda2
+
+        self.iteration, eps = 0, 1
+        while self.iteration < max_iter and eps > epsilon:
+            # 2a. reg -->\hat{betas}
+            xs = UTILS.get_spFilter(w, lambda_old, self.x)
+            ys = UTILS.get_spFilter(w, lambda_old, self.y)
+            yend_s = UTILS.get_spFilter(w, lambda_old, self.yend)
+            tsls_s = TSLS.BaseTSLS(ys, xs, yend_s, h=self.h)
+            self.predy = spdot(self.z, tsls_s.betas)
+            self.u = self.y - self.predy
+
+            # 2b. GMM --> \hat{\lambda}
+            vc2 = get_vc_het_tsls(w, wA1, self, lambda_old,
+                                  tsls_s.pfora1a2, sphstack(xs, yend_s), inv_method)
+            moments_i = UTILS._moments2eqs(wA1, w, self.u)
+            lambda3 = UTILS.optim_moments(moments_i, vc2)
+            eps = abs(lambda3 - lambda_old)
+            lambda_old = lambda3
+            self.iteration += 1
+
+        self.iter_stop = UTILS.iter_msg(self.iteration, max_iter)
+
+        zs = UTILS.get_spFilter(w, lambda3, self.z)
+        P = get_P_hat(self, tsls.hthi, zs)
+        vc3 = get_vc_het_tsls(w, wA1, self, lambda3, P,
+                              zs, inv_method, save_a1a2=True)
+        self.vm = get_Omega_GS2SLS(w, lambda3, self, moments_i[0], vc3, P)
+        self.betas = np.vstack((tsls_s.betas, lambda3))
+        self.e_filtered = self.u - lambda3 * w * self.u
+        self._cache = {}
+
+
+class GM_Endog_Error_Het(BaseGM_Endog_Error_Het):
+
+    """
+    GMM method for a spatial error model with heteroskedasticity and
+    endogenous variables, with results and diagnostics; based on Arraiz et al
+    [1]_, following Anselin [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : pysal W object
+                   Spatial weights object   
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    step1c       : boolean
+                   If True, then include Step 1c from Arraiz et al. 
+    inv_method   : string
+                   If "power_exp", then compute inverse using the power
+                   expansion. If "true_inv", then compute the true inverse.
+                   Note that true_inv will fail for large n.
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    title         : string
+                    Name of the regression method used
+    hth          : float
+                   H'H
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+        Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+        Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+        592-614.
+
+    .. [2] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Extract the HOVAL column (home values) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this class adds a vector of ones to the
+    independent variables passed in.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+
+    In this case we consider CRIME (crime rates) is an endogenous regressor.
+    We tell the model that this is so by passing it in a different parameter
+    from the exogenous variables (x).
+
+    >>> yd = []
+    >>> yd.append(db.by_col("CRIME"))
+    >>> yd = np.array(yd).T
+
+    Because we have endogenous variables, to obtain a correct estimate of the
+    model, we need to instrument for CRIME. We use DISCBD (distance to the
+    CBD) for this and hence put it in the instruments parameter, 'q'.
+
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, his allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables (exogenous and endogenous), the
+    instruments and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Endog_Error_Het(y, X, yd, q, w=w, step1c=True, name_x=['inc'], name_y='hoval', name_yend=['crime'], name_q=['discbd'], name_ds='columbus')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. This class offers an error model that explicitly accounts
+    for heteroskedasticity and that unlike the models from
+    ``pysal.spreg.error_sp``, it allows for inference on the spatial
+    parameter. Hence, we find the same number of betas as of standard errors,
+    which we calculate taking the square root of the diagonal of the
+    variance-covariance matrix:
+
+    >>> print reg.name_z
+    ['CONSTANT', 'inc', 'crime', 'lambda']
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 55.3971  28.8901]
+     [  0.4656   0.7731]
+     [ -0.6704   0.468 ]
+     [  0.4114   0.1777]]
+
+    """
+
+    def __init__(self, y, x, yend, q, w,
+                 max_iter=1, epsilon=0.00001,
+                 step1c=False, inv_method='power_exp',
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_ds=None):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        x_constant = USER.check_constant(x)
+        BaseGM_Endog_Error_Het.__init__(self, y=y, x=x_constant, yend=yend,
+                                        q=q, w=w.sparse, max_iter=max_iter,
+                                        step1c=step1c, epsilon=epsilon, inv_method=inv_method)
+        self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HET)"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_yend = USER.set_name_yend(name_yend, yend)
+        self.name_z = self.name_x + self.name_yend
+        self.name_z.append('lambda')  # listing lambda last
+        self.name_q = USER.set_name_q(name_q, q)
+        self.name_h = USER.set_name_h(self.name_x, self.name_q)
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.GM_Endog_Error_Het(reg=self, w=w, vm=vm)
+
+
+class BaseGM_Combo_Het(BaseGM_Endog_Error_Het):
+
+    """
+    GMM method for a spatial lag and error model with heteroskedasticity and
+    endogenous variables (note: no consistency checks, diagnostics or constant
+    added); based on Arraiz et al [1]_, following Anselin [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : Sparse matrix
+                   Spatial weights sparse matrix 
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    step1c       : boolean
+                   If True, then include Step 1c from Arraiz et al. 
+    inv_method   : string
+                   If "power_exp", then compute inverse using the power
+                   expansion. If "true_inv", then compute the true inverse.
+                   Note that true_inv will fail for large n.
+
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    hth          : float
+                   H'H
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+    Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+    Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+    592-614.
+
+    .. [2] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> w.transform = 'r'
+    >>> w_lags = 1
+    >>> yd2, q2 = pysal.spreg.utils.set_endog(y, X, w, None, None, w_lags, True)
+    >>> X = np.hstack((np.ones(y.shape),X))
+
+    Example only with spatial lag
+
+    >>> reg = BaseGM_Combo_Het(y, X, yend=yd2, q=q2, w=w.sparse, step1c=True)
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[  9.9753  14.1435]
+     [  1.5742   0.374 ]
+     [  0.1535   0.3978]
+     [  0.2103   0.3924]]
+
+    Example with both spatial lag and other endogenous variables
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+    >>> yd = []
+    >>> yd.append(db.by_col("CRIME"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+    >>> yd2, q2 = pysal.spreg.utils.set_endog(y, X, w, yd, q, w_lags, True)
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> reg = BaseGM_Combo_Het(y, X, yd2, q2, w=w.sparse, step1c=True)
+    >>> betas = np.array([['CONSTANT'],['inc'],['crime'],['lag_hoval'],['lambda']])
+    >>> print np.hstack((betas, np.around(np.hstack((reg.betas, np.sqrt(reg.vm.diagonal()).reshape(5,1))),5)))
+    [['CONSTANT' '113.91292' '64.38815']
+     ['inc' '-0.34822' '1.18219']
+     ['crime' '-1.35656' '0.72482']
+     ['lag_hoval' '-0.57657' '0.75856']
+     ['lambda' '0.65608' '0.15719']]
+    """
+
+    def __init__(self, y, x, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True,
+                 max_iter=1, epsilon=0.00001,
+                 step1c=False, inv_method='power_exp'):
+
+        BaseGM_Endog_Error_Het.__init__(
+            self, y=y, x=x, w=w, yend=yend, q=q, max_iter=max_iter,
+            step1c=step1c, epsilon=epsilon, inv_method=inv_method)
+
+
+class GM_Combo_Het(BaseGM_Combo_Het):
+
+    """
+    GMM method for a spatial lag and error model with heteroskedasticity and
+    endogenous variables, with results and diagnostics; based on Arraiz et al
+    [1]_, following Anselin [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : pysal W object
+                   Spatial weights object (always needed)   
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    step1c       : boolean
+                   If True, then include Step 1c from Arraiz et al. 
+    inv_method   : string
+                   If "power_exp", then compute inverse using the power
+                   expansion. If "true_inv", then compute the true inverse.
+                   Note that true_inv will fail for large n.
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    e_pred       : array
+                   nx1 array of residuals (using reduced form)
+    predy        : array
+                   nx1 array of predicted y values
+    predy_e      : array
+                   nx1 array of predicted y values (using reduced form)
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    title         : string
+                    Name of the regression method used
+    hth          : float
+                   H'H
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+        Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+        Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+        592-614.
+
+    .. [2] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Extract the HOVAL column (home values) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this class adds a vector of ones to the
+    independent variables passed in.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, his allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    The Combo class runs an SARAR model, that is a spatial lag+error model.
+    In this case we will run a simple version of that, where we have the
+    spatial effects as well as exogenous variables. Since it is a spatial
+    model, we have to pass in the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Combo_Het(y, X, w=w, step1c=True, name_y='hoval', name_x=['income'], name_ds='columbus')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. This class offers an error model that explicitly accounts
+    for heteroskedasticity and that unlike the models from
+    ``pysal.spreg.error_sp``, it allows for inference on the spatial
+    parameter. Hence, we find the same number of betas as of standard errors,
+    which we calculate taking the square root of the diagonal of the
+    variance-covariance matrix:
+
+    >>> print reg.name_z
+    ['CONSTANT', 'income', 'W_hoval', 'lambda']
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[  9.9753  14.1435]
+     [  1.5742   0.374 ]
+     [  0.1535   0.3978]
+     [  0.2103   0.3924]]
+
+    This class also allows the user to run a spatial lag+error model with the
+    extra feature of including non-spatial endogenous regressors. This means
+    that, in addition to the spatial lag and error, we consider some of the
+    variables on the right-hand side of the equation as endogenous and we
+    instrument for this. As an example, we will include CRIME (crime rates) as
+    endogenous and will instrument with DISCBD (distance to the CSB). We first
+    need to read in the variables:
+
+    >>> yd = []
+    >>> yd.append(db.by_col("CRIME"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+
+    And then we can run and explore the model analogously to the previous combo:
+
+    >>> reg = GM_Combo_Het(y, X, yd, q, w=w, step1c=True, name_x=['inc'], name_y='hoval', name_yend=['crime'], name_q=['discbd'], name_ds='columbus')
+    >>> print reg.name_z
+    ['CONSTANT', 'inc', 'crime', 'W_hoval', 'lambda']
+    >>> print np.round(reg.betas,4)
+    [[ 113.9129]
+     [  -0.3482]
+     [  -1.3566]
+     [  -0.5766]
+     [   0.6561]]
+
+    """
+
+    def __init__(self, y, x, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True,
+                 max_iter=1, epsilon=0.00001,
+                 step1c=False, inv_method='power_exp',
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_ds=None):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        yend2, q2 = set_endog(y, x, w, yend, q, w_lags, lag_q)
+        x_constant = USER.check_constant(x)
+        BaseGM_Combo_Het.__init__(self, y=y, x=x_constant, yend=yend2, q=q2,
+                                  w=w.sparse, w_lags=w_lags,
+                                  max_iter=max_iter, step1c=step1c, lag_q=lag_q,
+                                  epsilon=epsilon, inv_method=inv_method)
+        self.rho = self.betas[-2]
+        self.predy_e, self.e_pred, warn = UTILS.sp_att(w, self.y, self.predy,
+                                                       yend2[:, -1].reshape(self.n, 1), self.rho)
+        UTILS.set_warn(self, warn)
+        self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HET)"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_yend = USER.set_name_yend(name_yend, yend)
+        self.name_yend.append(USER.set_name_yend_sp(self.name_y))
+        self.name_z = self.name_x + self.name_yend
+        self.name_z.append('lambda')  # listing lambda last
+        self.name_q = USER.set_name_q(name_q, q)
+        self.name_q.extend(
+            USER.set_name_q_sp(self.name_x, w_lags, self.name_q, lag_q))
+        self.name_h = USER.set_name_h(self.name_x, self.name_q)
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.GM_Combo_Het(reg=self, w=w, vm=vm)
+
+
+# Functions
+
+def get_psi_sigma(w, u, lamb):
+    """
+    Computes the Sigma matrix needed to compute Psi
+
+    Parameters
+    ----------
+    w           : Sparse matrix
+                  Spatial weights sparse matrix
+    u           : array
+                  nx1 vector of residuals
+    lamb        : float
+                  Lambda
+
+    """
+
+    e = (u - lamb * (w * u)) ** 2
+    E = SP.dia_matrix((e.flat, 0), shape=(w.shape[0], w.shape[0]))
+    return E.tocsr()
+
+
+def get_vc_het(w, wA1, E):
+    """
+    Computes the VC matrix Psi based on lambda as in Arraiz et al [1]_:
+
+    ..math::
+
+        \tilde{Psi} = \left(\begin{array}{c c}
+                            \psi_{11} & \psi_{12} \\
+                            \psi_{21} & \psi_{22} \\
+                      \end{array} \right)
+
+    NOTE: psi12=psi21
+
+    ...
+
+    Parameters
+    ----------
+
+    w           : Sparse matrix
+                  Spatial weights sparse matrix
+
+    E           : sparse matrix
+                  Sigma
+
+    Returns
+    -------
+
+    Psi         : array
+                  2x2 array with estimator of the variance-covariance matrix
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+    Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+    Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+    592-614.
+
+    """
+    aPatE = 2 * wA1 * E
+    wPwtE = (w + w.T) * E
+
+    psi11 = aPatE * aPatE
+    psi12 = aPatE * wPwtE
+    psi22 = wPwtE * wPwtE
+    psi = map(np.sum, [psi11.diagonal(), psi12.diagonal(), psi22.diagonal()])
+    return np.array([[psi[0], psi[1]], [psi[1], psi[2]]]) / (2. * w.shape[0])
+
+
+def get_vm_het(G, lamb, reg, w, psi):
+    """
+    Computes the variance-covariance matrix Omega as in Arraiz et al [1]_:
+    ...
+
+    Parameters
+    ----------
+
+    G           : array
+                  G from moments equations
+
+    lamb        : float
+                  Final lambda from spHetErr estimation
+
+    reg         : regression object
+                  output instance from a regression model
+
+    u           : array
+                  nx1 vector of residuals
+
+    w           : Sparse matrix
+                  Spatial weights sparse matrix
+
+    psi         : array
+                  2x2 array with the variance-covariance matrix of the moment equations
+
+    Returns
+    -------
+
+    vm          : array
+                  (k+1)x(k+1) array with the variance-covariance matrix of the parameters
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+    Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+    Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+    592-614.
+
+    """
+
+    J = np.dot(G, np.array([[1], [2 * lamb]]))
+    Zs = UTILS.get_spFilter(w, lamb, reg.x)
+    ZstEZs = spdot((Zs.T * get_psi_sigma(w, reg.u, lamb)), Zs)
+    ZsZsi = la.inv(spdot(Zs.T, Zs))
+    omega11 = w.shape[0] * np.dot(np.dot(ZsZsi, ZstEZs), ZsZsi)
+    omega22 = la.inv(np.dot(np.dot(J.T, la.inv(psi)), J))
+    zero = np.zeros((reg.k, 1), float)
+    vm = np.vstack((np.hstack((omega11, zero)), np.hstack((zero.T, omega22)))) / \
+        w.shape[0]
+    return vm
+
+
+def get_P_hat(reg, hthi, zf):
+    """
+    P_hat from Appendix B, used for a1 a2, using filtered Z
+    """
+    htzf = spdot(reg.h.T, zf)
+    P1 = spdot(hthi, htzf)
+    P2 = spdot(htzf.T, P1)
+    P2i = la.inv(P2)
+    return reg.n * np.dot(P1, P2i)
+
+
+def get_a1a2(w, wA1, reg, lambdapar, P, zs, inv_method, filt):
+    """
+    Computes the a1 in psi assuming residuals come from original regression
+    ...
+
+    Parameters
+    ----------
+
+    w           : Sparse matrix
+                  Spatial weights sparse matrix 
+
+    reg         : TSLS
+                  Two stage least quare regression instance
+
+    lambdapar   : float
+                  Spatial autoregressive parameter
+
+    Returns
+    -------
+
+    [a1, a2]    : list
+                  a1 and a2 are two nx1 array in psi equation
+
+    References
+    ----------
+
+    .. [1] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    """
+    us = UTILS.get_spFilter(w, lambdapar, reg.u)
+    alpha1 = (-2.0 / w.shape[0]) * (np.dot(spdot(zs.T, wA1), us))
+    alpha2 = (-1.0 / w.shape[0]) * (np.dot(spdot(zs.T, (w + w.T)), us))
+    a1 = np.dot(spdot(reg.h, P), alpha1)
+    a2 = np.dot(spdot(reg.h, P), alpha2)
+    if not filt:
+        a1 = UTILS.inverse_prod(
+            w, a1, lambdapar, post_multiply=True, inv_method=inv_method).T
+        a2 = UTILS.inverse_prod(
+            w, a2, lambdapar, post_multiply=True, inv_method=inv_method).T
+    return [a1, a2]
+
+
+def get_vc_het_tsls(w, wA1, reg, lambdapar, P, zs, inv_method, filt=True, save_a1a2=False):
+
+    sigma = get_psi_sigma(w, reg.u, lambdapar)
+    vc1 = get_vc_het(w, wA1, sigma)
+    a1, a2 = get_a1a2(w, wA1, reg, lambdapar, P, zs, inv_method, filt)
+    a1s = a1.T * sigma
+    a2s = a2.T * sigma
+    psi11 = float(np.dot(a1s, a1))
+    psi12 = float(np.dot(a1s, a2))
+    psi21 = float(np.dot(a2s, a1))
+    psi22 = float(np.dot(a2s, a2))
+    psi0 = np.array([[psi11, psi12], [psi21, psi22]]) / w.shape[0]
+    if save_a1a2:
+        psi = (vc1 + psi0, a1, a2)
+    else:
+        psi = vc1 + psi0
+    return psi
+
+
+def get_Omega_GS2SLS(w, lamb, reg, G, psi, P):
+    """
+    Computes the variance-covariance matrix for GS2SLS:
+    ...
+
+    Parameters
+    ----------
+
+    w           : Sparse matrix
+                  Spatial weights sparse matrix 
+
+    lamb        : float
+                  Spatial autoregressive parameter
+
+    reg         : GSTSLS
+                  Generalized Spatial two stage least quare regression instance
+    G           : array
+                  Moments
+    psi         : array
+                  Weighting matrix
+
+    Returns
+    -------
+
+    omega       : array
+                  (k+1)x(k+1)                 
+    """
+    psi, a1, a2 = psi
+    sigma = get_psi_sigma(w, reg.u, lamb)
+    psi_dd_1 = (1.0 / w.shape[0]) * reg.h.T * sigma
+    psi_dd = spdot(psi_dd_1, reg.h)
+    psi_dl = spdot(psi_dd_1, np.hstack((a1, a2)))
+    psi_o = np.hstack(
+        (np.vstack((psi_dd, psi_dl.T)), np.vstack((psi_dl, psi))))
+    psii = la.inv(psi)
+
+    j = np.dot(G, np.array([[1.], [2 * lamb]]))
+    jtpsii = np.dot(j.T, psii)
+    jtpsiij = np.dot(jtpsii, j)
+    jtpsiiji = la.inv(jtpsiij)
+    omega_1 = np.dot(jtpsiiji, jtpsii)
+    omega_2 = np.dot(np.dot(psii, j), jtpsiiji)
+    om_1_s = omega_1.shape
+    om_2_s = omega_2.shape
+    p_s = P.shape
+    omega_left = np.hstack((np.vstack((P.T, np.zeros((om_1_s[0], p_s[0])))),
+                            np.vstack((np.zeros((p_s[1], om_1_s[1])), omega_1))))
+    omega_right = np.hstack((np.vstack((P, np.zeros((om_2_s[0], p_s[1])))),
+                             np.vstack((np.zeros((p_s[0], om_2_s[1])), omega_2))))
+    omega = np.dot(np.dot(omega_left, psi_o), omega_right)
+    return omega / w.shape[0]
+
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/error_sp_het_regimes.py b/pysal/spreg/error_sp_het_regimes.py
new file mode 100644
index 0000000..1295adf
--- /dev/null
+++ b/pysal/spreg/error_sp_het_regimes.py
@@ -0,0 +1,1481 @@
+'''
+Spatial Error with Heteroskedasticity and Regimes family of models
+'''
+__author__ = "Luc Anselin luc.anselin at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu"
+
+import numpy as np
+import multiprocessing as mp
+import user_output as USER
+import summary_output as SUMMARY
+import utils as UTILS
+import regimes as REGI
+from ols import BaseOLS
+from twosls import BaseTSLS
+from error_sp_het import BaseGM_Error_Het, BaseGM_Endog_Error_Het, get_psi_sigma, get_vc_het, get_vm_het, get_P_hat, get_a1a2, get_vc_het_tsls, get_Omega_GS2SLS
+from utils import RegressionPropsY, spdot, set_endog, sphstack, set_warn, sp_att
+from scipy import sparse as SP
+from pysal import lag_spatial
+from platform import system
+
+
+class GM_Error_Het_Regimes(RegressionPropsY, REGI.Regimes_Frame):
+
+    """
+    GMM method for a spatial error model with heteroskedasticity and regimes;
+    based on Arraiz et al [1]_, following Anselin [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    w            : pysal W object
+                   Spatial weights object   
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep: boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep : boolean
+                   Always False, kept for consistency, ignored.
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    step1c       : boolean
+                   If True, then include Step 1c from Arraiz et al. 
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+    title        : string
+                   Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Ignored if regimes=False. Constant option for regimes.
+                   Switcher controlling the constant term setup. It may take
+                   the following values:                    
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime
+    cols2regi    : list, 'all'
+                   Ignored if regimes=False. Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all', all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    kr           : int
+                   Number of variables/columns to be "regimized" or subject
+                   to change by regime. These will result in one parameter
+                   estimate by regime for each variable (i.e. nr parameters per
+                   variable)
+    kf           : int
+                   Number of variables/columns to be considered fixed or
+                   global across regimes and hence only obtain one parameter
+                   estimate
+    nr           : int
+                   Number of different regimes in the 'regimes' list
+    multi         : dictionary
+                    Only available when multiple regressions are estimated,
+                    i.e. when regime_err_sep=True and no variable is fixed
+                    across regimes.
+                    Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+    Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+    Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+    592-614.
+
+    .. [2] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial error model, we need to specify
+    the spatial weights matrix that includes the spatial configuration of the
+    observations. To do that, we can open an already existing gal file or 
+    create a new one. In this case, we will create one from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Error_Het_Regimes(y, x, regimes, w=w, step1c=True, name_y=y_var, name_x=x_var, name_regimes=r_var, name_ds='NAT.dbf')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. This class offers an error model that explicitly accounts
+    for heteroskedasticity and that unlike the models from
+    ``pysal.spreg.error_sp``, it allows for inference on the spatial
+    parameter. Alternatively, we can have a summary of the
+    output by typing: model.summary
+
+    >>> print reg.name_x
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', 'lambda']
+    >>> np.around(reg.betas, decimals=6)
+    array([[ 0.009121],
+           [ 0.812973],
+           [ 0.549355],
+           [ 5.00279 ],
+           [ 1.200929],
+           [ 0.614681],
+           [ 0.429277]])
+    >>> np.around(reg.std_err, decimals=6)
+    array([ 0.355844,  0.221743,  0.059276,  0.686764,  0.35843 ,  0.092788,
+            0.02524 ])
+
+    """
+
+    def __init__(self, y, x, regimes, w, max_iter=1, epsilon=0.00001, step1c=False,
+                 constant_regi='many', cols2regi='all', regime_err_sep=False,
+                 regime_lag_sep=False,
+                 cores=False, vm=False, name_y=None, name_x=None, name_w=None,
+                 name_ds=None, name_regimes=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        self.constant_regi = constant_regi
+        self.cols2regi = cols2regi
+        self.regime_err_sep = regime_err_sep
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.n, self.step1c = n, step1c
+        self.y = y
+
+        x_constant = USER.check_constant(x)
+        name_x = USER.set_name_x(name_x, x)
+        self.name_x_r = name_x
+
+        cols2regi = REGI.check_cols2regi(constant_regi, cols2regi, x)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+
+        if regime_err_sep == True:
+            if set(cols2regi) == set([True]):
+                self._error_regimes_multi(y, x, regimes, w, cores,
+                                          max_iter, epsilon, step1c,
+                                          cols2regi, vm, name_x)
+            else:
+                raise Exception, "All coefficients must vary accross regimes if regime_err_sep = True."
+        else:
+            self.x, self.name_x = REGI.Regimes_Frame.__init__(self, x_constant,
+                                                              regimes, constant_regi=None, cols2regi=cols2regi, names=name_x)
+            ols = BaseOLS(y=y, x=self.x)
+            self.k = ols.x.shape[1]
+            wA1 = UTILS.get_A1_het(w.sparse)
+
+            # 1b. GMM --> \tilde{\lambda1}
+            moments = UTILS._moments2eqs(wA1, w.sparse, ols.u)
+            lambda1 = UTILS.optim_moments(moments)
+
+            if step1c:
+                # 1c. GMM --> \tilde{\lambda2}
+                sigma = get_psi_sigma(w.sparse, ols.u, lambda1)
+                vc1 = get_vc_het(w.sparse, wA1, sigma)
+                lambda2 = UTILS.optim_moments(moments, vc1)
+            else:
+                lambda2 = lambda1
+            lambda_old = lambda2
+
+            self.iteration, eps = 0, 1
+            while self.iteration < max_iter and eps > epsilon:
+                # 2a. reg -->\hat{betas}
+                xs = UTILS.get_spFilter(w, lambda_old, x_constant)
+                ys = UTILS.get_spFilter(w, lambda_old, y)
+                xs = REGI.Regimes_Frame.__init__(self, xs,
+                                                 regimes, constant_regi=None, cols2regi=cols2regi)[0]
+                ols_s = BaseOLS(y=ys, x=xs)
+                self.predy = spdot(self.x, ols_s.betas)
+                self.u = self.y - self.predy
+
+                # 2b. GMM --> \hat{\lambda}
+                sigma_i = get_psi_sigma(w.sparse, self.u, lambda_old)
+                vc_i = get_vc_het(w.sparse, wA1, sigma_i)
+                moments_i = UTILS._moments2eqs(wA1, w.sparse, self.u)
+                lambda3 = UTILS.optim_moments(moments_i, vc_i)
+                eps = abs(lambda3 - lambda_old)
+                lambda_old = lambda3
+                self.iteration += 1
+
+            self.iter_stop = UTILS.iter_msg(self.iteration, max_iter)
+
+            sigma = get_psi_sigma(w.sparse, self.u, lambda3)
+            vc3 = get_vc_het(w.sparse, wA1, sigma)
+            self.vm = get_vm_het(moments_i[0], lambda3, self, w.sparse, vc3)
+            self.betas = np.vstack((ols_s.betas, lambda3))
+            self.e_filtered = self.u - lambda3 * lag_spatial(w, self.u)
+            self.title = "SPATIALLY WEIGHTED LEAST SQUARES (HET) - REGIMES"
+            self.name_x.append('lambda')
+            self.kf += 1
+            self.chow = REGI.Chow(self)
+            self._cache = {}
+
+            SUMMARY.GM_Error_Het(reg=self, w=w, vm=vm, regimes=True)
+
+    def _error_regimes_multi(self, y, x, regimes, w, cores,
+                             max_iter, epsilon, step1c, cols2regi, vm, name_x):
+
+        regi_ids = dict(
+            (r, list(np.where(np.array(regimes) == r)[0])) for r in self.regimes_set)
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            if system() == 'Windows':
+                is_win = True
+                results_p[r] = _work_error(*(y,x,regi_ids,r,w,max_iter,epsilon,step1c,self.name_ds,self.name_y,name_x+['lambda'],self.name_w,self.name_regimes))
+            else:
+                pool = mp.Pool(cores)
+                results_p[r] = pool.apply_async(_work_error,args=(y,x,regi_ids,r,w,max_iter,epsilon,step1c,self.name_ds,self.name_y,name_x+['lambda'],self.name_w,self.name_regimes, ))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work_error, args=(
+                    y, x, regi_ids, r, w, max_iter, epsilon, step1c, self.name_ds, self.name_y, name_x + ['lambda'], self.name_w, self.name_regimes, ))
+            else:
+                results_p[r] = _work_error(*(y, x, regi_ids, r, w, max_iter, epsilon, step1c,
+                                             self.name_ds, self.name_y, name_x + ['lambda'], self.name_w, self.name_regimes))
+
+        self.kryd = 0
+        self.kr = len(cols2regi) + 1
+        self.kf = 0
+        self.nr = len(self.regimes_set)
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * self.kr, 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        self.e_filtered = np.zeros((self.n, 1), float)
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        self.name_y, self.name_x = [], []
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * self.kr):((counter + 1) * self.kr), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.e_filtered[regi_ids[r], ] = results[r].e_filtered
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            counter += 1
+        self.chow = REGI.Chow(self)
+        self.multi = results
+        SUMMARY.GM_Error_Het_multi(
+            reg=self, multireg=self.multi, vm=vm, regimes=True)
+
+
+class GM_Endog_Error_Het_Regimes(RegressionPropsY, REGI.Regimes_Frame):
+
+    """
+    GMM method for a spatial error model with heteroskedasticity, regimes and
+    endogenous variables, with results and diagnostics; based on Arraiz et al
+    [1]_, following Anselin [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    w            : pysal W object
+                   Spatial weights object   
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep : boolean
+                     Always False, kept for consistency, ignored.
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    step1c       : boolean
+                   If True, then include Step 1c from Arraiz et al. 
+    inv_method   : string
+                   If "power_exp", then compute inverse using the power
+                   expansion. If "true_inv", then compute the true inverse.
+                   Note that true_inv will fail for large n.
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z            : array
+                   nxk array of variables (combination of x and yend)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    name_regimes  : string
+                    Name of regimes variable for use in output
+    title         : string
+                    Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes       : list
+                    List of n values with the mapping of each
+                    observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi : ['one', 'many']
+                    Ignored if regimes=False. Constant option for regimes.
+                    Switcher controlling the constant term setup. It may take
+                    the following values:
+                      *  'one': a vector of ones is appended to x and held
+                                constant across regimes
+                      * 'many': a vector of ones is appended to x and considered
+                                different per regime
+    cols2regi     : list, 'all'
+                    Ignored if regimes=False. Argument indicating whether each
+                    column of x should be considered as different per regime
+                    or held constant across regimes (False).
+                    If a list, k booleans indicating for each variable the
+                    option (True if one per regime, False to be held constant).
+                    If 'all', all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    kr            : int
+                    Number of variables/columns to be "regimized" or subject
+                    to change by regime. These will result in one parameter
+                    estimate by regime for each variable (i.e. nr parameters per
+                    variable)
+    kf            : int
+                    Number of variables/columns to be considered fixed or
+                    global across regimes and hence only obtain one parameter
+                    estimate
+    nr            : int
+                    Number of different regimes in the 'regimes' list
+    multi         : dictionary
+                    Only available when multiple regressions are estimated,
+                    i.e. when regime_err_sep=True and no variable is fixed
+                    across regimes.
+                    Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+    Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+    Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+    592-614.
+
+    .. [2] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    For the endogenous models, we add the endogenous variable RD90 (resource deprivation)
+    and we decide to instrument for it with FP89 (families below poverty):
+
+    >>> yd_var = ['RD90']
+    >>> yend = np.array([db.by_col(name) for name in yd_var]).T
+    >>> q_var = ['FP89']
+    >>> q = np.array([db.by_col(name) for name in q_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already 
+    existing gal file or create a new one. In this case, we will create one 
+    from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables (exogenous and endogenous), the
+    instruments and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Endog_Error_Het_Regimes(y, x, yend, q, regimes, w=w, step1c=True, name_y=y_var, name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='NAT.dbf')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. This class offers an error model that explicitly accounts
+    for heteroskedasticity and that unlike the models from
+    ``pysal.spreg.error_sp``, it allows for inference on the spatial
+    parameter. Hence, we find the same number of betas as of standard errors,
+    which we calculate taking the square root of the diagonal of the
+    variance-covariance matrix Alternatively, we can have a summary of the
+    output by typing: model.summary
+
+    >>> print reg.name_z
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', '0_RD90', '1_RD90', 'lambda']
+
+    >>> print np.around(reg.betas,4)
+    [[ 3.5944]
+     [ 1.065 ]
+     [ 0.1587]
+     [ 9.184 ]
+     [ 1.8784]
+     [-0.2466]
+     [ 2.4617]
+     [ 3.5756]
+     [ 0.2908]]
+
+    >>> print np.around(np.sqrt(reg.vm.diagonal()),4)
+    [ 0.5043  0.2132  0.0581  0.6681  0.3504  0.0999  0.3686  0.3402  0.028 ]
+
+    """
+
+    def __init__(self, y, x, yend, q, regimes, w,
+                 max_iter=1, epsilon=0.00001, step1c=False,
+                 constant_regi='many', cols2regi='all', regime_err_sep=False,
+                 regime_lag_sep=False,
+                 inv_method='power_exp', cores=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None, name_w=None, name_ds=None,
+                 name_regimes=None, summ=True, add_lag=False):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        self.constant_regi = constant_regi
+        self.cols2regi = cols2regi
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.n, self.step1c = n, step1c
+        self.y = y
+
+        name_x = USER.set_name_x(name_x, x)
+        if summ:
+            name_yend = USER.set_name_yend(name_yend, yend)
+            self.name_y = USER.set_name_y(name_y)
+            name_q = USER.set_name_q(name_q, q)
+        self.name_x_r = name_x + name_yend
+
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, yend=yend)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+
+        if regime_err_sep == True:
+            if set(cols2regi) == set([True]):
+                self._endog_error_regimes_multi(y, x, regimes, w, yend, q, cores,
+                                                max_iter, epsilon, step1c, inv_method, cols2regi, vm,
+                                                name_x, name_yend, name_q, add_lag)
+            else:
+                raise Exception, "All coefficients must vary accross regimes if regime_err_sep = True."
+        else:
+            x_constant = USER.check_constant(x)
+            q, name_q = REGI.Regimes_Frame.__init__(self, q,
+                                                    regimes, constant_regi=None, cols2regi='all', names=name_q)
+            x, name_x = REGI.Regimes_Frame.__init__(self, x_constant,
+                                                    regimes, constant_regi=None, cols2regi=cols2regi,
+                                                    names=name_x)
+            yend2, name_yend = REGI.Regimes_Frame.__init__(self, yend,
+                                                           regimes, constant_regi=None,
+                                                           cols2regi=cols2regi, yend=True, names=name_yend)
+
+            # 1a. S2SLS --> \tilde{\delta}
+            tsls = BaseTSLS(y=y, x=x, yend=yend2, q=q)
+            self.k = tsls.z.shape[1]
+            self.x = tsls.x
+            self.yend, self.z, self.h = tsls.yend, tsls.z, tsls.h
+            wA1 = UTILS.get_A1_het(w.sparse)
+
+            # 1b. GMM --> \tilde{\lambda1}
+            moments = UTILS._moments2eqs(wA1, w.sparse, tsls.u)
+            lambda1 = UTILS.optim_moments(moments)
+
+            if step1c:
+                # 1c. GMM --> \tilde{\lambda2}
+                self.u = tsls.u
+                zs = UTILS.get_spFilter(w, lambda1, self.z)
+                vc1 = get_vc_het_tsls(
+                    w.sparse, wA1, self, lambda1, tsls.pfora1a2, zs, inv_method, filt=False)
+                lambda2 = UTILS.optim_moments(moments, vc1)
+            else:
+                lambda2 = lambda1
+            lambda_old = lambda2
+
+            self.iteration, eps = 0, 1
+            while self.iteration < max_iter and eps > epsilon:
+                # 2a. reg -->\hat{betas}
+                xs = UTILS.get_spFilter(w, lambda1, x_constant)
+                xs = REGI.Regimes_Frame.__init__(self, xs,
+                                                 regimes, constant_regi=None, cols2regi=cols2regi)[0]
+                ys = UTILS.get_spFilter(w, lambda1, y)
+                yend_s = UTILS.get_spFilter(w, lambda1, yend)
+                yend_s = REGI.Regimes_Frame.__init__(self, yend_s,
+                                                     regimes, constant_regi=None, cols2regi=cols2regi,
+                                                     yend=True)[0]
+                tsls_s = BaseTSLS(ys, xs, yend_s, h=tsls.h)
+                self.predy = spdot(self.z, tsls_s.betas)
+                self.u = self.y - self.predy
+
+                # 2b. GMM --> \hat{\lambda}
+                vc2 = get_vc_het_tsls(
+                    w.sparse, wA1, self, lambda_old, tsls_s.pfora1a2, sphstack(xs, yend_s), inv_method)
+                moments_i = UTILS._moments2eqs(wA1, w.sparse, self.u)
+                lambda3 = UTILS.optim_moments(moments_i, vc2)
+                eps = abs(lambda3 - lambda_old)
+                lambda_old = lambda3
+                self.iteration += 1
+
+            self.iter_stop = UTILS.iter_msg(self.iteration, max_iter)
+
+            zs = UTILS.get_spFilter(w, lambda3, self.z)
+            P = get_P_hat(self, tsls.hthi, zs)
+            vc3 = get_vc_het_tsls(
+                w.sparse, wA1, self, lambda3, P, zs, inv_method, save_a1a2=True)
+            self.vm = get_Omega_GS2SLS(
+                w.sparse, lambda3, self, moments_i[0], vc3, P)
+            self.betas = np.vstack((tsls_s.betas, lambda3))
+            self.e_filtered = self.u - lambda3 * lag_spatial(w, self.u)
+            self.name_x = USER.set_name_x(name_x, x, constant=True)
+            self.name_yend = USER.set_name_yend(name_yend, yend)
+            self.name_z = self.name_x + self.name_yend
+            self.name_z.append('lambda')  # listing lambda last
+            self.name_q = USER.set_name_q(name_q, q)
+            self.name_h = USER.set_name_h(self.name_x, self.name_q)
+            self.kf += 1
+            self.chow = REGI.Chow(self)
+            self._cache = {}
+            if summ:
+                self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HET) - REGIMES"
+                SUMMARY.GM_Endog_Error_Het(reg=self, w=w, vm=vm, regimes=True)
+
+    def _endog_error_regimes_multi(self, y, x, regimes, w, yend, q, cores,
+                                   max_iter, epsilon, step1c, inv_method, cols2regi, vm,
+                                   name_x, name_yend, name_q, add_lag):
+
+        regi_ids = dict(
+            (r, list(np.where(np.array(regimes) == r)[0])) for r in self.regimes_set)
+        if add_lag != False:
+            self.cols2regi += [True]
+            cols2regi += [True]
+            self.predy_e = np.zeros((self.n, 1), float)
+            self.e_pred = np.zeros((self.n, 1), float)
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            if system() == 'Windows':
+                is_win = True
+                results_p[r] = _work_endog_error(*(y,x,yend,q,regi_ids,r,w,max_iter,epsilon,step1c,inv_method,self.name_ds,self.name_y,name_x,name_yend,name_q,self.name_w,self.name_regimes,add_lag))
+            else:
+                pool = mp.Pool(cores)        
+                results_p[r] = pool.apply_async(_work_endog_error,args=(y,x,yend,q,regi_ids,r,w,max_iter,epsilon,step1c,inv_method,self.name_ds,self.name_y,name_x,name_yend,name_q,self.name_w,self.name_regimes,add_lag, ))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work_endog_error, args=(y, x, yend, q, regi_ids, r, w, max_iter, epsilon, step1c,
+                                                                         inv_method, self.name_ds, self.name_y, name_x, name_yend, name_q, self.name_w, self.name_regimes, add_lag, ))
+            else:
+                results_p[r] = _work_endog_error(*(y, x, yend, q, regi_ids, r, w, max_iter, epsilon, step1c, inv_method,
+                                                   self.name_ds, self.name_y, name_x, name_yend, name_q, self.name_w, self.name_regimes, add_lag))
+
+        self.kryd, self.kf = 0, 0
+        self.kr = len(cols2regi) + 1
+        self.nr = len(self.regimes_set)
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * self.kr, 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        self.e_filtered = np.zeros((self.n, 1), float)
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        self.name_y, self.name_x, self.name_yend, self.name_q, self.name_z, self.name_h = [
+        ], [], [], [], [], []
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * self.kr):((counter + 1) * self.kr), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.e_filtered[regi_ids[r], ] = results[r].e_filtered
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            self.name_yend += results[r].name_yend
+            self.name_q += results[r].name_q
+            self.name_z += results[r].name_z
+            self.name_h += results[r].name_h
+            if add_lag != False:
+                self.predy_e[regi_ids[r], ] = results[r].predy_e
+                self.e_pred[regi_ids[r], ] = results[r].e_pred
+            counter += 1
+        self.chow = REGI.Chow(self)
+        self.multi = results
+        if add_lag != False:
+            SUMMARY.GM_Combo_Het_multi(
+                reg=self, multireg=self.multi, vm=vm, regimes=True)
+        else:
+            SUMMARY.GM_Endog_Error_Het_multi(
+                reg=self, multireg=self.multi, vm=vm, regimes=True)
+
+
+class GM_Combo_Het_Regimes(GM_Endog_Error_Het_Regimes):
+
+    """
+    GMM method for a spatial lag and error model with heteroskedasticity,
+    regimes and endogenous variables, with results and diagnostics;
+    based on Arraiz et al [1]_, following Anselin [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    w            : pysal W object
+                   Spatial weights object (always needed)   
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep   : boolean
+                   If True, the spatial parameter for spatial lag is also
+                   computed according to different regimes. If False (default), 
+                   the spatial parameter is fixed accross regimes.
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    step1c       : boolean
+                   If True, then include Step 1c from Arraiz et al. 
+    inv_method   : string
+                   If "power_exp", then compute inverse using the power
+                   expansion. If "true_inv", then compute the true inverse.
+                   Note that true_inv will fail for large n.
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    e_pred       : array
+                   nx1 array of residuals (using reduced form)
+    predy        : array
+                   nx1 array of predicted y values
+    predy_e      : array
+                   nx1 array of predicted y values (using reduced form)
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z            : array
+                   nxk array of variables (combination of x and yend)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    name_regimes  : string
+                    Name of regimes variable for use in output
+    title         : string
+                    Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes       : list
+                    List of n values with the mapping of each
+                    observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi : ['one', 'many']
+                    Ignored if regimes=False. Constant option for regimes.
+                    Switcher controlling the constant term setup. It may take
+                    the following values:
+                      *  'one': a vector of ones is appended to x and held
+                                constant across regimes
+                      * 'many': a vector of ones is appended to x and considered
+                                different per regime
+    cols2regi     : list, 'all'
+                    Ignored if regimes=False. Argument indicating whether each
+                    column of x should be considered as different per regime
+                    or held constant across regimes (False).
+                    If a list, k booleans indicating for each variable the
+                    option (True if one per regime, False to be held constant).
+                    If 'all', all the variables vary by regime.
+    regime_err_sep: boolean
+                    If True, a separate regression is run for each regime.
+    regime_lag_sep: boolean
+                    If True, the spatial parameter for spatial lag is also
+                    computed according to different regimes. If False (default), 
+                    the spatial parameter is fixed accross regimes.
+    kr            : int
+                    Number of variables/columns to be "regimized" or subject
+                    to change by regime. These will result in one parameter
+                    estimate by regime for each variable (i.e. nr parameters per
+                    variable)
+    kf            : int
+                    Number of variables/columns to be considered fixed or
+                    global across regimes and hence only obtain one parameter
+                    estimate
+    nr            : int
+                    Number of different regimes in the 'regimes' list
+    multi         : dictionary
+                    Only available when multiple regressions are estimated,
+                    i.e. when regime_err_sep=True and no variable is fixed
+                    across regimes.
+                    Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+    Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+    Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+    592-614.
+
+    .. [2] Anselin, L. GMM Estimation of Spatial Error Autocorrelation with Heteroskedasticity
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial combo model, we need to specify
+    the spatial weights matrix that includes the spatial configuration of the
+    observations. To do that, we can open an already existing gal file or 
+    create a new one. In this case, we will create one from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    Example only with spatial lag
+
+    The Combo class runs an SARAR model, that is a spatial lag+error model.
+    In this case we will run a simple version of that, where we have the
+    spatial effects as well as exogenous variables. Since it is a spatial
+    model, we have to pass in the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.  We can have a 
+    summary of the output by typing: model.summary 
+    Alternatively, we can check the betas:
+
+    >>> reg = GM_Combo_Het_Regimes(y, x, regimes, w=w, step1c=True, name_y=y_var, name_x=x_var, name_regimes=r_var, name_ds='NAT')
+    >>> print reg.name_z
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', '_Global_W_HR90', 'lambda']
+    >>> print np.around(reg.betas,4)
+    [[ 1.4613]
+     [ 0.9587]
+     [ 0.5658]
+     [ 9.1157]
+     [ 1.1324]
+     [ 0.6518]
+     [-0.4587]
+     [ 0.7174]]
+
+    This class also allows the user to run a spatial lag+error model with the
+    extra feature of including non-spatial endogenous regressors. This means
+    that, in addition to the spatial lag and error, we consider some of the
+    variables on the right-hand side of the equation as endogenous and we
+    instrument for this. In this case we consider RD90 (resource deprivation)
+    as an endogenous regressor.  We use FP89 (families below poverty)
+    for this and hence put it in the instruments parameter, 'q'.
+
+    >>> yd_var = ['RD90']
+    >>> yd = np.array([db.by_col(name) for name in yd_var]).T
+    >>> q_var = ['FP89']
+    >>> q = np.array([db.by_col(name) for name in q_var]).T
+
+    And then we can run and explore the model analogously to the previous combo:
+
+    >>> reg = GM_Combo_Het_Regimes(y, x, regimes, yd, q, w=w, step1c=True, name_y=y_var, name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='NAT')
+    >>> print reg.name_z
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', '0_RD90', '1_RD90', '_Global_W_HR90', 'lambda']
+    >>> print reg.betas
+    [[ 3.41936197]
+     [ 1.04071048]
+     [ 0.16747219]
+     [ 8.85820215]
+     [ 1.847382  ]
+     [-0.24545394]
+     [ 2.43189808]
+     [ 3.61328423]
+     [ 0.03132164]
+     [ 0.29544224]]
+    >>> print np.sqrt(reg.vm.diagonal())
+    [ 0.53103804  0.20835827  0.05755679  1.00496234  0.34332131  0.10259525
+      0.3454436   0.37932794  0.07611667  0.07067059]
+    >>> print 'lambda: ', np.around(reg.betas[-1], 4)
+    lambda:  [ 0.2954]
+
+    """
+
+    def __init__(self, y, x, regimes, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True,
+                 max_iter=1, epsilon=0.00001, step1c=False,
+                 cores=False, inv_method='power_exp',
+                 constant_regi='many', cols2regi='all',
+                 regime_err_sep=False, regime_lag_sep=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_ds=None, name_regimes=None):
+
+        n = USER.check_arrays(y, x)
+        self.step1c = step1c
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        name_x = USER.set_name_x(name_x, x, constant=True)
+        self.name_y = USER.set_name_y(name_y)
+        name_yend = USER.set_name_yend(name_yend, yend)
+        name_q = USER.set_name_q(name_q, q)
+        name_q.extend(
+            USER.set_name_q_sp(name_x, w_lags, name_q, lag_q, force_all=True))
+
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, yend=yend, add_cons=False)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+        self.regime_lag_sep = regime_lag_sep
+
+        if regime_lag_sep == True:
+            if regime_err_sep == False:
+                raise Exception, "For spatial combo models, if spatial lag is set by regimes (regime_lag_sep=True), spatial error must also be set by regimes (regime_err_sep=True)."
+            add_lag = [w_lags, lag_q]
+        else:
+            cols2regi += [False]
+            add_lag = False
+            if regime_err_sep == True:
+                raise Exception, "For spatial combo models, if spatial error is set by regimes (regime_err_sep=True), all coefficients including lambda (regime_lag_sep=True) must be set by regimes."
+            yend, q = set_endog(y, x, w, yend, q, w_lags, lag_q)
+        name_yend.append(USER.set_name_yend_sp(self.name_y))
+
+        GM_Endog_Error_Het_Regimes.__init__(self, y=y, x=x, yend=yend,
+                                            q=q, regimes=regimes, w=w, constant_regi=constant_regi,
+                                            cols2regi=cols2regi, regime_err_sep=regime_err_sep,
+                                            max_iter=max_iter, epsilon=epsilon,
+                                            step1c=step1c, inv_method=inv_method, cores=cores,
+                                            vm=vm, name_y=name_y, name_x=name_x, name_yend=name_yend,
+                                            name_q=name_q, name_w=name_w, name_ds=name_ds,
+                                            name_regimes=name_regimes, summ=False, add_lag=add_lag)
+
+        if regime_err_sep != True:
+            self.rho = self.betas[-2]
+            self.predy_e, self.e_pred, warn = UTILS.sp_att(w, self.y,
+                                                           self.predy, yend[:, -1].reshape(self.n, 1), self.rho)
+            UTILS.set_warn(self, warn)
+            self.regime_lag_sep = regime_lag_sep
+            self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HET) - REGIMES"
+            SUMMARY.GM_Combo_Het(reg=self, w=w, vm=vm, regimes=True)
+
+
+def _work_error(y, x, regi_ids, r, w, max_iter, epsilon, step1c, name_ds, name_y, name_x, name_w, name_regimes):
+    w_r, warn = REGI.w_regime(w, regi_ids[r], r, transform=True)
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    x_constant = USER.check_constant(x_r)
+    model = BaseGM_Error_Het(
+        y_r, x_constant, w_r.sparse, max_iter=max_iter, epsilon=epsilon, step1c=step1c)
+    set_warn(model, warn)
+    model.w = w_r
+    model.title = "SPATIALLY WEIGHTED LEAST SQUARES ESTIMATION (HET) - REGIME %s" % r
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    return model
+
+
+def _work_endog_error(y, x, yend, q, regi_ids, r, w, max_iter, epsilon, step1c, inv_method, name_ds, name_y, name_x, name_yend, name_q, name_w, name_regimes, add_lag):
+    w_r, warn = REGI.w_regime(w, regi_ids[r], r, transform=True)
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    if yend != None:
+        yend_r = yend[regi_ids[r]]
+        q_r = q[regi_ids[r]]
+    else:
+        yend_r, q_r = None, None
+    if add_lag != False:
+        yend_r, q_r = set_endog(
+            y_r, x_r, w_r, yend_r, q_r, add_lag[0], add_lag[1])
+    x_constant = USER.check_constant(x_r)
+    model = BaseGM_Endog_Error_Het(y_r, x_constant, yend_r, q_r, w_r.sparse,
+                                   max_iter=max_iter, epsilon=epsilon, step1c=step1c, inv_method=inv_method)
+    set_warn(model, warn)
+    if add_lag != False:
+        model.rho = model.betas[-2]
+        model.predy_e, model.e_pred, warn = sp_att(w_r, model.y,
+                                                   model.predy, model.yend[:, -1].reshape(model.n, 1), model.rho)
+        set_warn(model, warn)
+    model.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HET) - REGIME %s" % r
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_yend = ['%s_%s' % (str(r), i) for i in name_yend]
+    model.name_z = model.name_x + model.name_yend + ['lambda']
+    model.name_q = ['%s_%s' % (str(r), i) for i in name_q]
+    model.name_h = model.name_x + model.name_q
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    return model
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/error_sp_hom.py b/pysal/spreg/error_sp_hom.py
new file mode 100644
index 0000000..d3463d0
--- /dev/null
+++ b/pysal/spreg/error_sp_hom.py
@@ -0,0 +1,1522 @@
+'''
+Hom family of models based on: 
+
+    Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+    
+Following:
+
+    Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation with
+    and without Heteroskedasticity".
+
+'''
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Daniel Arribas-Bel darribas at asu.edu"
+
+from scipy import sparse as SP
+import numpy as np
+from numpy import linalg as la
+import ols as OLS
+from pysal import lag_spatial
+from utils import power_expansion, set_endog, iter_msg, sp_att
+from utils import get_A1_hom, get_A2_hom, get_A1_het, optim_moments
+from utils import get_spFilter, get_lags, _moments2eqs
+from utils import spdot, RegressionPropsY, set_warn
+import twosls as TSLS
+import user_output as USER
+import summary_output as SUMMARY
+
+__all__ = ["GM_Error_Hom", "GM_Endog_Error_Hom", "GM_Combo_Hom"]
+
+
+class BaseGM_Error_Hom(RegressionPropsY):
+
+    '''
+    GMM method for a spatial error model with homoskedasticity (note: no
+    consistency checks, diagnostics or constant added); based on 
+    Drukker et al. (2010) [1]_, following Anselin (2011) [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : Sparse matrix
+                   Spatial weights sparse matrix   
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    A1           : string
+                   If A1='het', then the matrix A1 is defined as in Arraiz et
+                   al. If A1='hom', then as in Anselin (2011) (default).  If
+                   A1='hom_sc' (default), then as in Drukker, Egger and Prucha (2010)
+                   and Drukker, Prucha and Raciborski (2010).
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+    xtx          : float
+                   X'X
+
+    References
+    ----------
+
+    .. [1] Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+
+    .. [2] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("CRIME"))
+    >>> X = np.array(X).T
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> w.transform = 'r'
+
+    Model commands
+
+    >>> reg = BaseGM_Error_Hom(y, X, w=w.sparse, A1='hom_sc')
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 47.9479  12.3021]
+     [  0.7063   0.4967]
+     [ -0.556    0.179 ]
+     [  0.4129   0.1835]]
+    >>> print np.around(reg.vm, 4) #doctest: +SKIP
+    [[  1.51340700e+02  -5.29060000e+00  -1.85650000e+00  -2.40000000e-03]
+     [ -5.29060000e+00   2.46700000e-01   5.14000000e-02   3.00000000e-04]
+     [ -1.85650000e+00   5.14000000e-02   3.21000000e-02  -1.00000000e-04]
+     [ -2.40000000e-03   3.00000000e-04  -1.00000000e-04   3.37000000e-02]]
+    '''
+
+    def __init__(self, y, x, w,
+                 max_iter=1, epsilon=0.00001, A1='hom_sc'):
+        if A1 == 'hom':
+            wA1 = get_A1_hom(w)
+        elif A1 == 'hom_sc':
+            wA1 = get_A1_hom(w, scalarKP=True)
+        elif A1 == 'het':
+            wA1 = get_A1_het(w)
+
+        wA2 = get_A2_hom(w)
+
+        # 1a. OLS --> \tilde{\delta}
+        ols = OLS.BaseOLS(y=y, x=x)
+        self.x, self.y, self.n, self.k, self.xtx = ols.x, ols.y, ols.n, ols.k, ols.xtx
+
+        # 1b. GM --> \tilde{\rho}
+        moments = moments_hom(w, wA1, wA2, ols.u)
+        lambda1 = optim_moments(moments)
+        lambda_old = lambda1
+
+        self.iteration, eps = 0, 1
+        while self.iteration < max_iter and eps > epsilon:
+            # 2a. SWLS --> \hat{\delta}
+            x_s = get_spFilter(w, lambda_old, self.x)
+            y_s = get_spFilter(w, lambda_old, self.y)
+            ols_s = OLS.BaseOLS(y=y_s, x=x_s)
+            self.predy = spdot(self.x, ols_s.betas)
+            self.u = self.y - self.predy
+
+            # 2b. GM 2nd iteration --> \hat{\rho}
+            moments = moments_hom(w, wA1, wA2, self.u)
+            psi = get_vc_hom(w, wA1, wA2, self, lambda_old)[0]
+            lambda2 = optim_moments(moments, psi)
+            eps = abs(lambda2 - lambda_old)
+            lambda_old = lambda2
+            self.iteration += 1
+
+        self.iter_stop = iter_msg(self.iteration, max_iter)
+
+        # Output
+        self.betas = np.vstack((ols_s.betas, lambda2))
+        self.vm, self.sig2 = get_omega_hom_ols(
+            w, wA1, wA2, self, lambda2, moments[0])
+        self.e_filtered = self.u - lambda2 * w * self.u
+        self._cache = {}
+
+
+class GM_Error_Hom(BaseGM_Error_Hom):
+
+    '''
+    GMM method for a spatial error model with homoskedasticity, with results
+    and diagnostics; based on Drukker et al. (2010) [1]_, following Anselin
+    (2011) [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : pysal W object
+                   Spatial weights object   
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    A1           : string
+                   If A1='het', then the matrix A1 is defined as in Arraiz et
+                   al. If A1='hom', then as in Anselin (2011).  If
+                   A1='hom_sc' (default), then as in Drukker, Egger and Prucha (2010)
+                   and Drukker, Prucha and Raciborski (2010).
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    xtx          : float
+                   X'X
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    title        : string
+                   Name of the regression method used
+
+    References
+    ----------
+
+    .. [1] Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+
+    .. [2] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Extract the HOVAL column (home values) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) and CRIME (crime) vectors from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this class adds a vector of ones to the
+    independent variables passed in.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("CRIME"))
+    >>> X = np.array(X).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, his allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminars, we are good to run the model. In this
+    case, we will need the variables and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Error_Hom(y, X, w=w, A1='hom_sc', name_y='home value', name_x=['income', 'crime'], name_ds='columbus')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. This class offers an error model that assumes
+    homoskedasticity but that unlike the models from
+    ``pysal.spreg.error_sp``, it allows for inference on the spatial
+    parameter. This is why you obtain as many coefficient estimates as
+    standard errors, which you calculate taking the square root of the
+    diagonal of the variance-covariance matrix of the parameters:
+
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 47.9479  12.3021]
+     [  0.7063   0.4967]
+     [ -0.556    0.179 ]
+     [  0.4129   0.1835]]
+
+    '''
+
+    def __init__(self, y, x, w,
+                 max_iter=1, epsilon=0.00001, A1='hom_sc',
+                 vm=False, name_y=None, name_x=None,
+                 name_w=None, name_ds=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        x_constant = USER.check_constant(x)
+        BaseGM_Error_Hom.__init__(self, y=y, x=x_constant, w=w.sparse, A1=A1,
+                                  max_iter=max_iter, epsilon=epsilon)
+        self.title = "SPATIALLY WEIGHTED LEAST SQUARES (HOM)"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_x.append('lambda')
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.GM_Error_Hom(reg=self, w=w, vm=vm)
+
+
+class BaseGM_Endog_Error_Hom(RegressionPropsY):
+
+    '''
+    GMM method for a spatial error model with homoskedasticity and
+    endogenous variables (note: no consistency checks, diagnostics or constant
+    added); based on Drukker et al. (2010) [1]_, following Anselin (2011) [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : Sparse matrix
+                   Spatial weights sparse matrix   
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    A1           : string
+                   If A1='het', then the matrix A1 is defined as in Arraiz et
+                   al. If A1='hom', then as in Anselin (2011).  If
+                   A1='hom_sc' (default), then as in Drukker, Egger and Prucha (2010)
+                   and Drukker, Prucha and Raciborski (2010).
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+    hth          : float
+                   H'H
+
+    References
+    ----------
+
+    .. [1] Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+
+    .. [2] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> yd = []
+    >>> yd.append(db.by_col("CRIME"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> w.transform = 'r'
+    >>> reg = BaseGM_Endog_Error_Hom(y, X, yd, q, w=w.sparse, A1='hom_sc')
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 55.3658  23.496 ]
+     [  0.4643   0.7382]
+     [ -0.669    0.3943]
+     [  0.4321   0.1927]]
+
+
+    '''
+
+    def __init__(self, y, x, yend, q, w,
+                 max_iter=1, epsilon=0.00001, A1='hom_sc'):
+
+        if A1 == 'hom':
+            wA1 = get_A1_hom(w)
+        elif A1 == 'hom_sc':
+            wA1 = get_A1_hom(w, scalarKP=True)
+        elif A1 == 'het':
+            wA1 = get_A1_het(w)
+
+        wA2 = get_A2_hom(w)
+
+        # 1a. S2SLS --> \tilde{\delta}
+        tsls = TSLS.BaseTSLS(y=y, x=x, yend=yend, q=q)
+        self.x, self.z, self.h, self.y, self.hth = tsls.x, tsls.z, tsls.h, tsls.y, tsls.hth
+        self.yend, self.q, self.n, self.k = tsls.yend, tsls.q, tsls.n, tsls.k
+
+        # 1b. GM --> \tilde{\rho}
+        moments = moments_hom(w, wA1, wA2, tsls.u)
+        lambda1 = optim_moments(moments)
+        lambda_old = lambda1
+
+        self.iteration, eps = 0, 1
+        while self.iteration < max_iter and eps > epsilon:
+            # 2a. GS2SLS --> \hat{\delta}
+            x_s = get_spFilter(w, lambda_old, self.x)
+            y_s = get_spFilter(w, lambda_old, self.y)
+            yend_s = get_spFilter(w, lambda_old, self.yend)
+            tsls_s = TSLS.BaseTSLS(y=y_s, x=x_s, yend=yend_s, h=self.h)
+            self.predy = spdot(self.z, tsls_s.betas)
+            self.u = self.y - self.predy
+
+            # 2b. GM 2nd iteration --> \hat{\rho}
+            moments = moments_hom(w, wA1, wA2, self.u)
+            psi = get_vc_hom(w, wA1, wA2, self, lambda_old, tsls_s.z)[0]
+            lambda2 = optim_moments(moments, psi)
+            eps = abs(lambda2 - lambda_old)
+            lambda_old = lambda2
+            self.iteration += 1
+
+        self.iter_stop = iter_msg(self.iteration, max_iter)
+
+        # Output
+        self.betas = np.vstack((tsls_s.betas, lambda2))
+        self.vm, self.sig2 = get_omega_hom(
+            w, wA1, wA2, self, lambda2, moments[0])
+        self.e_filtered = self.u - lambda2 * w * self.u
+        self._cache = {}
+
+
+class GM_Endog_Error_Hom(BaseGM_Endog_Error_Hom):
+
+    '''
+    GMM method for a spatial error model with homoskedasticity and endogenous
+    variables, with results and diagnostics; based on Drukker et al. (2010) [1]_,
+    following Anselin (2011) [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : pysal W object
+                   Spatial weights object   
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    A1           : string
+                   If A1='het', then the matrix A1 is defined as in Arraiz et
+                   al. If A1='hom', then as in Anselin (2011).  If
+                   A1='hom_sc' (default), then as in Drukker, Egger and Prucha (2010)
+                   and Drukker, Prucha and Raciborski (2010).
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    sig2         : float
+                   Sigma squared used in computations
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    title         : string
+                    Name of the regression method used
+    hth          : float
+                   H'H
+
+
+    References
+    ----------
+
+    .. [1] Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+
+    .. [2] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Extract the HOVAL column (home values) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this class adds a vector of ones to the
+    independent variables passed in.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+
+    In this case we consider CRIME (crime rates) is an endogenous regressor.
+    We tell the model that this is so by passing it in a different parameter
+    from the exogenous variables (x).
+
+    >>> yd = []
+    >>> yd.append(db.by_col("CRIME"))
+    >>> yd = np.array(yd).T
+
+    Because we have endogenous variables, to obtain a correct estimate of the
+    model, we need to instrument for CRIME. We use DISCBD (distance to the
+    CBD) for this and hence put it in the instruments parameter, 'q'.
+
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, his allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminars, we are good to run the model. In this
+    case, we will need the variables (exogenous and endogenous), the
+    instruments and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Endog_Error_Hom(y, X, yd, q, w=w, A1='hom_sc', name_x=['inc'], name_y='hoval', name_yend=['crime'], name_q=['discbd'], name_ds='columbus')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. This class offers an error model that assumes
+    homoskedasticity but that unlike the models from
+    ``pysal.spreg.error_sp``, it allows for inference on the spatial
+    parameter. Hence, we find the same number of betas as of standard errors,
+    which we calculate taking the square root of the diagonal of the
+    variance-covariance matrix:
+
+    >>> print reg.name_z
+    ['CONSTANT', 'inc', 'crime', 'lambda']
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 55.3658  23.496 ]
+     [  0.4643   0.7382]
+     [ -0.669    0.3943]
+     [  0.4321   0.1927]]
+
+    '''
+
+    def __init__(self, y, x, yend, q, w,
+                 max_iter=1, epsilon=0.00001, A1='hom_sc',
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_ds=None):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        x_constant = USER.check_constant(x)
+        BaseGM_Endog_Error_Hom.__init__(
+            self, y=y, x=x_constant, w=w.sparse, yend=yend, q=q,
+            A1=A1, max_iter=max_iter, epsilon=epsilon)
+        self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HOM)"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_yend = USER.set_name_yend(name_yend, yend)
+        self.name_z = self.name_x + self.name_yend
+        self.name_z.append('lambda')  # listing lambda last
+        self.name_q = USER.set_name_q(name_q, q)
+        self.name_h = USER.set_name_h(self.name_x, self.name_q)
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.GM_Endog_Error_Hom(reg=self, w=w, vm=vm)
+
+
+class BaseGM_Combo_Hom(BaseGM_Endog_Error_Hom):
+
+    '''
+    GMM method for a spatial lag and error model with homoskedasticity and
+    endogenous variables (note: no consistency checks, diagnostics or constant
+    added); based on Drukker et al. (2010) [1]_, following Anselin (2011) [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : Sparse matrix
+                   Spatial weights sparse matrix   
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    A1           : string
+                   If A1='het', then the matrix A1 is defined as in Arraiz et
+                   al. If A1='hom', then as in Anselin (2011).  If
+                   A1='hom_sc' (default), then as in Drukker, Egger and Prucha (2010)
+                   and Drukker, Prucha and Raciborski (2010).
+
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+    hth          : float
+                   H'H
+
+
+    References
+    ----------
+
+    .. [1] Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+
+    .. [2] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> w.transform = 'r'
+    >>> w_lags = 1
+    >>> yd2, q2 = pysal.spreg.utils.set_endog(y, X, w, None, None, w_lags, True)
+    >>> X = np.hstack((np.ones(y.shape),X))
+
+    Example only with spatial lag
+
+    >>> reg = BaseGM_Combo_Hom(y, X, yend=yd2, q=q2, w=w.sparse, A1='hom_sc')
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 10.1254  15.2871]
+     [  1.5683   0.4407]
+     [  0.1513   0.4048]
+     [  0.2103   0.4226]]
+
+
+    Example with both spatial lag and other endogenous variables
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+    >>> yd = []
+    >>> yd.append(db.by_col("CRIME"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+    >>> yd2, q2 = pysal.spreg.utils.set_endog(y, X, w, yd, q, w_lags, True)
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> reg = BaseGM_Combo_Hom(y, X, yd2, q2, w=w.sparse, A1='hom_sc')
+    >>> betas = np.array([['CONSTANT'],['inc'],['crime'],['W_hoval'],['lambda']])
+    >>> print np.hstack((betas, np.around(np.hstack((reg.betas, np.sqrt(reg.vm.diagonal()).reshape(5,1))),5)))
+    [['CONSTANT' '111.7705' '67.75191']
+     ['inc' '-0.30974' '1.16656']
+     ['crime' '-1.36043' '0.6841']
+     ['W_hoval' '-0.52908' '0.84428']
+     ['lambda' '0.60116' '0.18605']]
+
+    '''
+
+    def __init__(self, y, x, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True,
+                 max_iter=1, epsilon=0.00001, A1='hom_sc'):
+
+        BaseGM_Endog_Error_Hom.__init__(
+            self, y=y, x=x, w=w, yend=yend, q=q, A1=A1,
+            max_iter=max_iter, epsilon=epsilon)
+
+
+class GM_Combo_Hom(BaseGM_Combo_Hom):
+
+    '''
+    GMM method for a spatial lag and error model with homoskedasticity and
+    endogenous variables, with results and diagnostics; based on Drukker et
+    al. (2010) [1]_, following Anselin (2011) [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : pysal W object
+                   Spatial weights object (always necessary)   
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    A1           : string
+                   If A1='het', then the matrix A1 is defined as in Arraiz et
+                   al. If A1='hom', then as in Anselin (2011).  If
+                   A1='hom_sc' (default), then as in Drukker, Egger and Prucha (2010)
+                   and Drukker, Prucha and Raciborski (2010).
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    e_pred       : array
+                   nx1 array of residuals (using reduced form)
+    predy        : array
+                   nx1 array of predicted y values
+    predy_e      : array
+                   nx1 array of predicted y values (using reduced form)
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+    sig2         : float
+                   Sigma squared used in computations (based on filtered
+                   residuals)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    title         : string
+                    Name of the regression method used
+    hth          : float
+                   H'H
+
+
+    References
+    ----------
+
+    .. [1] Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+
+    .. [2] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Extract the HOVAL column (home values) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this class adds a vector of ones to the
+    independent variables passed in.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, his allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    Example only with spatial lag
+
+    The Combo class runs an SARAR model, that is a spatial lag+error model.
+    In this case we will run a simple version of that, where we have the
+    spatial effects as well as exogenous variables. Since it is a spatial
+    model, we have to pass in the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Combo_Hom(y, X, w=w, A1='hom_sc', name_x=['inc'],\
+            name_y='hoval', name_yend=['crime'], name_q=['discbd'],\
+            name_ds='columbus')
+    >>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
+    [[ 10.1254  15.2871]
+     [  1.5683   0.4407]
+     [  0.1513   0.4048]
+     [  0.2103   0.4226]]
+
+    This class also allows the user to run a spatial lag+error model with the
+    extra feature of including non-spatial endogenous regressors. This means
+    that, in addition to the spatial lag and error, we consider some of the
+    variables on the right-hand side of the equation as endogenous and we
+    instrument for this. As an example, we will include CRIME (crime rates) as
+    endogenous and will instrument with DISCBD (distance to the CSB). We first
+    need to read in the variables:
+
+
+    >>> yd = []
+    >>> yd.append(db.by_col("CRIME"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+
+    And then we can run and explore the model analogously to the previous combo:
+
+    >>> reg = GM_Combo_Hom(y, X, yd, q, w=w, A1='hom_sc', \
+            name_ds='columbus')
+    >>> betas = np.array([['CONSTANT'],['inc'],['crime'],['W_hoval'],['lambda']])
+    >>> print np.hstack((betas, np.around(np.hstack((reg.betas, np.sqrt(reg.vm.diagonal()).reshape(5,1))),5)))
+    [['CONSTANT' '111.7705' '67.75191']
+     ['inc' '-0.30974' '1.16656']
+     ['crime' '-1.36043' '0.6841']
+     ['W_hoval' '-0.52908' '0.84428']
+     ['lambda' '0.60116' '0.18605']]
+
+    '''
+
+    def __init__(self, y, x, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True,
+                 max_iter=1, epsilon=0.00001, A1='hom_sc',
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_ds=None):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        yend2, q2 = set_endog(y, x, w, yend, q, w_lags, lag_q)
+        x_constant = USER.check_constant(x)
+        BaseGM_Combo_Hom.__init__(
+            self, y=y, x=x_constant, w=w.sparse, yend=yend2, q=q2,
+            w_lags=w_lags, A1=A1, lag_q=lag_q,
+            max_iter=max_iter, epsilon=epsilon)
+        self.rho = self.betas[-2]
+        self.predy_e, self.e_pred, warn = sp_att(w, self.y, self.predy,
+                                                 yend2[:, -1].reshape(self.n, 1), self.rho)
+        set_warn(self, warn)
+        self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HOM)"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_yend = USER.set_name_yend(name_yend, yend)
+        self.name_yend.append(USER.set_name_yend_sp(self.name_y))
+        self.name_z = self.name_x + self.name_yend
+        self.name_z.append('lambda')  # listing lambda last
+        self.name_q = USER.set_name_q(name_q, q)
+        self.name_q.extend(
+            USER.set_name_q_sp(self.name_x, w_lags, self.name_q, lag_q))
+        self.name_h = USER.set_name_h(self.name_x, self.name_q)
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.GM_Combo_Hom(reg=self, w=w, vm=vm)
+
+
+# Functions
+
+def moments_hom(w, wA1, wA2, u):
+    '''
+    Compute G and g matrices for the spatial error model with homoscedasticity
+    as in Anselin [1]_ (2011).
+    ...
+
+    Parameters
+    ----------
+
+    w           : Sparse matrix
+                  Spatial weights sparse matrix   
+
+    u           : array
+                  Residuals. nx1 array assumed to be aligned with w
+
+    Attributes
+    ----------
+
+    moments     : list
+                  List of two arrays corresponding to the matrices 'G' and
+                  'g', respectively.
+
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+    '''
+    n = w.shape[0]
+    A1u = wA1 * u
+    A2u = wA2 * u
+    wu = w * u
+
+    g1 = np.dot(u.T, A1u)
+    g2 = np.dot(u.T, A2u)
+    g = np.array([[g1][0][0], [g2][0][0]]) / n
+
+    G11 = 2 * np.dot(wu.T * wA1, u)
+    G12 = -np.dot(wu.T * wA1, wu)
+    G21 = 2 * np.dot(wu.T * wA2, u)
+    G22 = -np.dot(wu.T * wA2, wu)
+    G = np.array([[G11[0][0], G12[0][0]], [G21[0][0], G22[0][0]]]) / n
+    return [G, g]
+
+
+def get_vc_hom(w, wA1, wA2, reg, lambdapar, z_s=None, for_omegaOLS=False):
+    '''
+    VC matrix \psi of Spatial error with homoscedasticity. As in 
+    Anselin (2011) [1]_ (p. 20)
+    ...
+
+    Parameters
+    ----------
+    w               :   Sparse matrix
+                        Spatial weights sparse matrix
+    reg             :   reg
+                        Regression object
+    lambdapar       :   float
+                        Spatial parameter estimated in previous step of the
+                        procedure
+    z_s             :   array
+                        optional argument for spatially filtered Z (to be
+                        passed only if endogenous variables are present)
+    for_omegaOLS    :   boolean
+                        If True (default=False), it also returns P, needed
+                        only in the computation of Omega
+
+    Returns
+    -------
+
+    psi         : array
+                  2x2 VC matrix
+    a1          : array
+                  nx1 vector a1. If z_s=None, a1 = 0.
+    a2          : array
+                  nx1 vector a2. If z_s=None, a2 = 0.
+    p           : array
+                  P matrix. If z_s=None or for_omegaOLS=False, p=0.
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    '''
+    u_s = get_spFilter(w, lambdapar, reg.u)
+    n = float(w.shape[0])
+    sig2 = np.dot(u_s.T, u_s) / n
+    mu3 = np.sum(u_s ** 3) / n
+    mu4 = np.sum(u_s ** 4) / n
+
+    tr11 = wA1 * wA1
+    tr11 = np.sum(tr11.diagonal())
+    tr12 = wA1 * (wA2 * 2)
+    tr12 = np.sum(tr12.diagonal())
+    tr22 = wA2 * wA2 * 2
+    tr22 = np.sum(tr22.diagonal())
+    vecd1 = np.array([wA1.diagonal()]).T
+
+    psi11 = 2 * sig2 ** 2 * tr11 + \
+        (mu4 - 3 * sig2 ** 2) * np.dot(vecd1.T, vecd1)
+    psi12 = sig2 ** 2 * tr12
+    psi22 = sig2 ** 2 * tr22
+
+    a1, a2, p = 0., 0., 0.
+
+    if for_omegaOLS:
+        x_s = get_spFilter(w, lambdapar, reg.x)
+        p = la.inv(spdot(x_s.T, x_s) / n)
+
+    if issubclass(type(z_s), np.ndarray) or \
+            issubclass(type(z_s), SP.csr.csr_matrix) or \
+            issubclass(type(z_s), SP.csc.csc_matrix):
+        alpha1 = (-2 / n) * spdot(z_s.T, wA1 * u_s)
+        alpha2 = (-2 / n) * spdot(z_s.T, wA2 * u_s)
+
+        hth = spdot(reg.h.T, reg.h)
+        hthni = la.inv(hth / n)
+        htzsn = spdot(reg.h.T, z_s) / n
+        p = spdot(hthni, htzsn)
+        p = spdot(p, la.inv(spdot(htzsn.T, p)))
+        hp = spdot(reg.h, p)
+        a1 = spdot(hp, alpha1)
+        a2 = spdot(hp, alpha2)
+
+        psi11 = psi11 + \
+            sig2 * spdot(a1.T, a1) + \
+            2 * mu3 * spdot(a1.T, vecd1)
+        psi12 = psi12 + \
+            sig2 * spdot(a1.T, a2) + \
+            mu3 * spdot(a2.T, vecd1)  # 3rd term=0
+        psi22 = psi22 + \
+            sig2 * spdot(a2.T, a2)  # 3rd&4th terms=0 bc vecd2=0
+
+    psi = np.array(
+        [[psi11[0][0], psi12[0][0]], [psi12[0][0], psi22[0][0]]]) / n
+    return psi, a1, a2, p
+
+
+def get_omega_hom(w, wA1, wA2, reg, lamb, G):
+    '''
+    Omega VC matrix for Hom models with endogenous variables computed as in
+    Anselin (2011) [1]_ (p. 21).
+    ...
+
+    Parameters
+    ----------
+    w       :   Sparse matrix
+                Spatial weights sparse matrix
+    reg     :   reg
+                Regression object
+    lamb    :   float
+                Spatial parameter estimated in previous step of the
+                procedure
+    G       :   array
+                Matrix 'G' of the moment equation
+
+    Returns
+    -------
+    omega   :   array
+                Omega matrix of VC of the model
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    '''
+    n = float(w.shape[0])
+    z_s = get_spFilter(w, lamb, reg.z)
+    u_s = get_spFilter(w, lamb, reg.u)
+    sig2 = np.dot(u_s.T, u_s) / n
+    mu3 = np.sum(u_s ** 3) / n
+    vecdA1 = np.array([wA1.diagonal()]).T
+    psi, a1, a2, p = get_vc_hom(w, wA1, wA2, reg, lamb, z_s)
+    j = np.dot(G, np.array([[1.], [2 * lamb]]))
+    psii = la.inv(psi)
+    t2 = spdot(reg.h.T, np.hstack((a1, a2)))
+    psiDL = (mu3 * spdot(reg.h.T, np.hstack((vecdA1, np.zeros((n, 1))))) +
+             sig2 * spdot(reg.h.T, np.hstack((a1, a2)))) / n
+
+    oDD = spdot(la.inv(spdot(reg.h.T, reg.h)), spdot(reg.h.T, z_s))
+    oDD = sig2 * la.inv(spdot(z_s.T, spdot(reg.h, oDD)))
+    oLL = la.inv(spdot(j.T, spdot(psii, j))) / n
+    oDL = spdot(spdot(spdot(p.T, psiDL), spdot(psii, j)), oLL)
+
+    o_upper = np.hstack((oDD, oDL))
+    o_lower = np.hstack((oDL.T, oLL))
+    return np.vstack((o_upper, o_lower)), float(sig2)
+
+
+def get_omega_hom_ols(w, wA1, wA2, reg, lamb, G):
+    '''
+    Omega VC matrix for Hom models without endogenous variables (OLS) computed
+    as in Anselin (2011) [1]_.
+    ...
+
+    Parameters
+    ----------
+    w       :   Sparse matrix
+                Spatial weights sparse matrix
+    reg     :   reg
+                Regression object
+    lamb    :   float
+                Spatial parameter estimated in previous step of the
+                procedure
+    G       :   array
+                Matrix 'G' of the moment equation
+
+    Returns
+    -------
+    omega   :   array
+                Omega matrix of VC of the model
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    '''
+    n = float(w.shape[0])
+    x_s = get_spFilter(w, lamb, reg.x)
+    u_s = get_spFilter(w, lamb, reg.u)
+    sig2 = np.dot(u_s.T, u_s) / n
+    vecdA1 = np.array([wA1.diagonal()]).T
+    psi, a1, a2, p = get_vc_hom(w, wA1, wA2, reg, lamb, for_omegaOLS=True)
+    j = np.dot(G, np.array([[1.], [2 * lamb]]))
+    psii = la.inv(psi)
+
+    oDD = sig2 * la.inv(spdot(x_s.T, x_s))
+    oLL = la.inv(spdot(j.T, spdot(psii, j))) / n
+    #oDL = np.zeros((oDD.shape[0], oLL.shape[1]))
+    mu3 = np.sum(u_s ** 3) / n
+    psiDL = (mu3 * spdot(reg.x.T, np.hstack((vecdA1, np.zeros((n, 1)))))) / n
+    oDL = spdot(spdot(spdot(p.T, psiDL), spdot(psii, j)), oLL)
+
+    o_upper = np.hstack((oDD, oDL))
+    o_lower = np.hstack((oDL.T, oLL))
+    return np.vstack((o_upper, o_lower)), float(sig2)
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+
+    _test()
diff --git a/pysal/spreg/error_sp_hom_regimes.py b/pysal/spreg/error_sp_hom_regimes.py
new file mode 100644
index 0000000..557a69b
--- /dev/null
+++ b/pysal/spreg/error_sp_hom_regimes.py
@@ -0,0 +1,1496 @@
+'''
+Hom family of models with regimes. 
+'''
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu, Daniel Arribas-Bel darribas at asu.edu"
+
+from scipy import sparse as SP
+import numpy as np
+import multiprocessing as mp
+from numpy import linalg as la
+from pysal import lag_spatial
+from utils import power_expansion, set_endog, iter_msg, sp_att
+from utils import get_A1_hom, get_A2_hom, get_A1_het, optim_moments
+from utils import get_spFilter, get_lags, _moments2eqs
+from utils import spdot, RegressionPropsY, set_warn
+from ols import BaseOLS
+from twosls import BaseTSLS
+from error_sp_hom import BaseGM_Error_Hom, BaseGM_Endog_Error_Hom, moments_hom, get_vc_hom, get_omega_hom, get_omega_hom_ols
+import regimes as REGI
+import user_output as USER
+import summary_output as SUMMARY
+from platform import system
+
+
+class GM_Error_Hom_Regimes(RegressionPropsY, REGI.Regimes_Frame):
+
+    '''
+    GMM method for a spatial error model with homoskedasticity, with regimes, 
+    results and diagnostics; based on Drukker et al. (2010) [1]_, following
+    Anselin (2011) [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    w            : pysal W object
+                   Spatial weights object   
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:                    
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep : boolean
+                   Always False, kept for consistency, ignored.
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    A1           : string
+                   If A1='het', then the matrix A1 is defined as in Arraiz et
+                   al. If A1='hom', then as in Anselin (2011).  If
+                   A1='hom_sc', then as in Drukker, Egger and Prucha (2010)
+                   and Drukker, Prucha and Raciborski (2010).
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    xtx          : float
+                   X'X
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+    title        : string
+                   Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Ignored if regimes=False. Constant option for regimes.
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime
+    cols2regi    : list, 'all'
+                   Ignored if regimes=False. Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all', all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    kr           : int
+                   Number of variables/columns to be "regimized" or subject
+                   to change by regime. These will result in one parameter
+                   estimate by regime for each variable (i.e. nr parameters per
+                   variable)
+    kf           : int
+                   Number of variables/columns to be considered fixed or
+                   global across regimes and hence only obtain one parameter
+                   estimate
+    nr           : int
+                   Number of different regimes in the 'regimes' list
+    multi        : dictionary
+                   Only available when multiple regressions are estimated,
+                   i.e. when regime_err_sep=True and no variable is fixed
+                   across regimes.
+                   Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+
+    .. [2] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial lag model, we need to specify
+    the spatial weights matrix that includes the spatial configuration of the
+    observations. To do that, we can open an already existing gal file or 
+    create a new one. In this case, we will create one from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Error_Hom_Regimes(y, x, regimes, w=w, name_y=y_var, name_x=x_var, name_ds='NAT')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. This class offers an error model that assumes
+    homoskedasticity but that unlike the models from
+    ``pysal.spreg.error_sp``, it allows for inference on the spatial
+    parameter. This is why you obtain as many coefficient estimates as
+    standard errors, which you calculate taking the square root of the
+    diagonal of the variance-covariance matrix of the parameters. Alternatively,
+    we can have a summary of the output by typing: model.summary
+    >>> print reg.name_x
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', 'lambda']
+
+    >>> print np.around(reg.betas,4)
+    [[ 0.069 ]
+     [ 0.7885]
+     [ 0.5398]
+     [ 5.0948]
+     [ 1.1965]
+     [ 0.6018]
+     [ 0.4104]]
+
+    >>> print np.sqrt(reg.vm.diagonal())
+    [ 0.39105854  0.15664624  0.05254328  0.48379958  0.20018799  0.05834139
+      0.01882401]
+
+    '''
+
+    def __init__(self, y, x, regimes, w,
+                 max_iter=1, epsilon=0.00001, A1='het', cores=False,
+                 constant_regi='many', cols2regi='all', regime_err_sep=False,
+                 regime_lag_sep=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_w=None, name_ds=None, name_regimes=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        self.constant_regi = constant_regi
+        self.cols2regi = cols2regi
+        self.regime_err_sep = regime_err_sep
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.n = n
+        self.y = y
+
+        x_constant = USER.check_constant(x)
+        name_x = USER.set_name_x(name_x, x)
+        self.name_x_r = name_x
+
+        cols2regi = REGI.check_cols2regi(constant_regi, cols2regi, x)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+
+        if regime_err_sep == True:
+            if set(cols2regi) == set([True]):
+                self._error_regimes_multi(y, x, regimes, w, cores,
+                                          max_iter, epsilon, A1, cols2regi, vm, name_x)
+            else:
+                raise Exception, "All coefficients must vary accross regimes if regime_err_sep = True."
+        else:
+            if A1 == 'hom':
+                wA1 = get_A1_hom(w.sparse)
+            elif A1 == 'hom_sc':
+                wA1 = get_A1_hom(w.sparse, scalarKP=True)
+            elif A1 == 'het':
+                wA1 = get_A1_het(w.sparse)
+
+            wA2 = get_A2_hom(w.sparse)
+
+            # 1a. OLS --> \tilde{\delta}
+            self.x, self.name_x = REGI.Regimes_Frame.__init__(self, x_constant,
+                                                              regimes, constant_regi=None, cols2regi=cols2regi, names=name_x)
+            ols = BaseOLS(y=y, x=self.x)
+            self.k = ols.x.shape[1]
+
+            # 1b. GM --> \tilde{\rho}
+            moments = moments_hom(w.sparse, wA1, wA2, ols.u)
+            lambda1 = optim_moments(moments)
+            lambda_old = lambda1
+
+            self.iteration, eps = 0, 1
+            while self.iteration < max_iter and eps > epsilon:
+                # 2a. SWLS --> \hat{\delta}
+                xs = get_spFilter(w, lambda1, x_constant)
+                ys = get_spFilter(w, lambda1, y)
+                xs = REGI.Regimes_Frame.__init__(self, xs,
+                                                 regimes, constant_regi=None, cols2regi=cols2regi)[0]
+                ols_s = BaseOLS(y=ys, x=xs)
+                self.predy = spdot(self.x, ols_s.betas)
+                self.u = self.y - self.predy
+
+                # 2b. GM 2nd iteration --> \hat{\rho}
+                moments = moments_hom(w.sparse, wA1, wA2, self.u)
+                psi = get_vc_hom(w.sparse, wA1, wA2, self, lambda_old)[0]
+                lambda2 = optim_moments(moments, psi)
+                eps = abs(lambda2 - lambda_old)
+                lambda_old = lambda2
+                self.iteration += 1
+
+            self.iter_stop = iter_msg(self.iteration, max_iter)
+
+            # Output
+            self.betas = np.vstack((ols_s.betas, lambda2))
+            self.vm, self.sig2 = get_omega_hom_ols(
+                w.sparse, wA1, wA2, self, lambda2, moments[0])
+            self.e_filtered = self.u - lambda2 * lag_spatial(w, self.u)
+            self.title = "SPATIALLY WEIGHTED LEAST SQUARES (HOM) - REGIMES"
+            self.name_x.append('lambda')
+            self.kf += 1
+            self.chow = REGI.Chow(self)
+            self._cache = {}
+            SUMMARY.GM_Error_Hom(reg=self, w=w, vm=vm, regimes=True)
+
+    def _error_regimes_multi(self, y, x, regimes, w, cores,
+                             max_iter, epsilon, A1, cols2regi, vm, name_x):
+
+        regi_ids = dict(
+            (r, list(np.where(np.array(regimes) == r)[0])) for r in self.regimes_set)
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            if system() == 'Windows':
+                is_win = True
+                results_p[r] = _work_error(*(y,x,regi_ids,r,w,max_iter,epsilon,A1,self.name_ds,self.name_y,name_x+['lambda'],self.name_w,self.name_regimes))
+            else:
+                pool = mp.Pool(cores)
+                results_p[r] = pool.apply_async(_work_error,args=(y,x,regi_ids,r,w,max_iter,epsilon,A1,self.name_ds,self.name_y,name_x+['lambda'],self.name_w,self.name_regimes, ))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work_error, args=(
+                    y, x, regi_ids, r, w, max_iter, epsilon, A1, self.name_ds, self.name_y, name_x + ['lambda'], self.name_w, self.name_regimes, ))
+            else:
+                results_p[r] = _work_error(
+                    *(y, x, regi_ids, r, w, max_iter, epsilon, A1, self.name_ds, self.name_y, name_x + ['lambda'], self.name_w, self.name_regimes))
+
+        self.kryd = 0
+        self.kr = len(cols2regi) + 1
+        self.kf = 0
+        self.nr = len(self.regimes_set)
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * self.kr, 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        self.e_filtered = np.zeros((self.n, 1), float)
+        self.name_y, self.name_x = [], []
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * self.kr):((counter + 1) * self.kr), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.e_filtered[regi_ids[r], ] = results[r].e_filtered
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            counter += 1
+        self.chow = REGI.Chow(self)
+        self.multi = results
+        SUMMARY.GM_Error_Hom_multi(
+            reg=self, multireg=self.multi, vm=vm, regimes=True)
+
+
+class GM_Endog_Error_Hom_Regimes(RegressionPropsY, REGI.Regimes_Frame):
+
+    '''
+    GMM method for a spatial error model with homoskedasticity, regimes and
+    endogenous variables.
+    Based on Drukker et al. (2010) [1]_, following Anselin (2011) [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    w            : pysal W object
+                   Spatial weights object   
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep : boolean
+                   Always False, kept for consistency, ignored.
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    A1           : string
+                   If A1='het', then the matrix A1 is defined as in Arraiz et
+                   al. If A1='hom', then as in Anselin (2011).  If
+                   A1='hom_sc', then as in Drukker, Egger and Prucha (2010)
+                   and Drukker, Prucha and Raciborski (2010).
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z            : array
+                   nxk array of variables (combination of x and yend)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    sig2         : float
+                   Sigma squared used in computations
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    hth          : float
+                   H'H
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    name_regimes  : string
+                    Name of regimes variable for use in output
+    title         : string
+                    Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes       : list
+                    List of n values with the mapping of each
+                    observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi : ['one', 'many']
+                    Ignored if regimes=False. Constant option for regimes.
+                    Switcher controlling the constant term setup. It may take
+                    the following values:
+                      *  'one': a vector of ones is appended to x and held
+                                constant across regimes
+                      * 'many': a vector of ones is appended to x and considered
+                                different per regime
+    cols2regi     : list, 'all'
+                    Ignored if regimes=False. Argument indicating whether each
+                    column of x should be considered as different per regime
+                    or held constant across regimes (False).
+                    If a list, k booleans indicating for each variable the
+                    option (True if one per regime, False to be held constant).
+                    If 'all', all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    kr            : int
+                    Number of variables/columns to be "regimized" or subject
+                    to change by regime. These will result in one parameter
+                    estimate by regime for each variable (i.e. nr parameters per
+                    variable)
+    kf            : int
+                    Number of variables/columns to be considered fixed or
+                    global across regimes and hence only obtain one parameter
+                    estimate
+    nr            : int
+                    Number of different regimes in the 'regimes' list
+    multi         : dictionary
+                    Only available when multiple regressions are estimated,
+                    i.e. when regime_err_sep=True and no variable is fixed
+                    across regimes.
+                    Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+
+    .. [2] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    For the endogenous models, we add the endogenous variable RD90 (resource deprivation)
+    and we decide to instrument for it with FP89 (families below poverty):
+
+    >>> yd_var = ['RD90']
+    >>> yend = np.array([db.by_col(name) for name in yd_var]).T
+    >>> q_var = ['FP89']
+    >>> q = np.array([db.by_col(name) for name in q_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already 
+    existing gal file or create a new one. In this case, we will create one 
+    from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables (exogenous and endogenous), the
+    instruments and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> reg = GM_Endog_Error_Hom_Regimes(y, x, yend, q, regimes, w=w, A1='hom_sc', name_y=y_var, name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='NAT.dbf')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. This class offers an error model that assumes
+    homoskedasticity but that unlike the models from
+    ``pysal.spreg.error_sp``, it allows for inference on the spatial
+    parameter. Hence, we find the same number of betas as of standard errors,
+    which we calculate taking the square root of the diagonal of the
+    variance-covariance matrix. Alternatively, we can have a summary of the
+    output by typing: model.summary
+
+    >>> print reg.name_z
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', '0_RD90', '1_RD90', 'lambda']
+
+    >>> print np.around(reg.betas,4)
+    [[ 3.5973]
+     [ 1.0652]
+     [ 0.1582]
+     [ 9.198 ]
+     [ 1.8809]
+     [-0.2489]
+     [ 2.4616]
+     [ 3.5796]
+     [ 0.2541]]
+
+    >>> print np.around(np.sqrt(reg.vm.diagonal()),4)
+    [ 0.5204  0.1371  0.0629  0.4721  0.1824  0.0725  0.2992  0.2395  0.024 ]
+
+    '''
+
+    def __init__(self, y, x, yend, q, regimes, w,
+                 constant_regi='many', cols2regi='all', regime_err_sep=False,
+                 regime_lag_sep=False,
+                 max_iter=1, epsilon=0.00001, A1='het', cores=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None, name_w=None,
+                 name_ds=None, name_regimes=None, summ=True, add_lag=False):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        self.constant_regi = constant_regi
+        self.cols2regi = cols2regi
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.n = n
+        self.y = y
+
+        name_x = USER.set_name_x(name_x, x)
+        if summ:
+            name_yend = USER.set_name_yend(name_yend, yend)
+            self.name_y = USER.set_name_y(name_y)
+            name_q = USER.set_name_q(name_q, q)
+        self.name_x_r = name_x + name_yend
+
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, yend=yend)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+
+        if regime_err_sep == True:
+            if set(cols2regi) == set([True]):
+                self._endog_error_regimes_multi(y, x, regimes, w, yend, q, cores,
+                                                max_iter, epsilon, A1, cols2regi, vm,
+                                                name_x, name_yend, name_q, add_lag)
+            else:
+                raise Exception, "All coefficients must vary accross regimes if regime_err_sep = True."
+        else:
+            x_constant = USER.check_constant(x)
+            q, name_q = REGI.Regimes_Frame.__init__(self, q,
+                                                    regimes, constant_regi=None, cols2regi='all', names=name_q)
+            x, name_x = REGI.Regimes_Frame.__init__(self, x_constant,
+                                                    regimes, constant_regi=None, cols2regi=cols2regi,
+                                                    names=name_x)
+            yend2, name_yend = REGI.Regimes_Frame.__init__(self, yend,
+                                                           regimes, constant_regi=None,
+                                                           cols2regi=cols2regi, yend=True, names=name_yend)
+
+            if A1 == 'hom':
+                wA1 = get_A1_hom(w.sparse)
+            elif A1 == 'hom_sc':
+                wA1 = get_A1_hom(w.sparse, scalarKP=True)
+            elif A1 == 'het':
+                wA1 = get_A1_het(w.sparse)
+
+            wA2 = get_A2_hom(w.sparse)
+
+            # 1a. S2SLS --> \tilde{\delta}
+            tsls = BaseTSLS(y=y, x=x, yend=yend2, q=q)
+            self.k = tsls.z.shape[1]
+            self.x = tsls.x
+            self.yend, self.z, self.h = tsls.yend, tsls.z, tsls.h
+
+            # 1b. GM --> \tilde{\rho}
+            moments = moments_hom(w.sparse, wA1, wA2, tsls.u)
+            lambda1 = optim_moments(moments)
+            lambda_old = lambda1
+
+            self.iteration, eps = 0, 1
+            while self.iteration < max_iter and eps > epsilon:
+                # 2a. GS2SLS --> \hat{\delta}
+                xs = get_spFilter(w, lambda1, x_constant)
+                xs = REGI.Regimes_Frame.__init__(self, xs,
+                                                 regimes, constant_regi=None, cols2regi=cols2regi)[0]
+                ys = get_spFilter(w, lambda1, y)
+                yend_s = get_spFilter(w, lambda1, yend)
+                yend_s = REGI.Regimes_Frame.__init__(self, yend_s,
+                                                     regimes, constant_regi=None, cols2regi=cols2regi,
+                                                     yend=True)[0]
+                tsls_s = BaseTSLS(ys, xs, yend_s, h=tsls.h)
+                self.predy = spdot(self.z, tsls_s.betas)
+                self.u = self.y - self.predy
+
+                # 2b. GM 2nd iteration --> \hat{\rho}
+                moments = moments_hom(w.sparse, wA1, wA2, self.u)
+                psi = get_vc_hom(
+                    w.sparse, wA1, wA2, self, lambda_old, tsls_s.z)[0]
+                lambda2 = optim_moments(moments, psi)
+                eps = abs(lambda2 - lambda_old)
+                lambda_old = lambda2
+                self.iteration += 1
+
+            self.iter_stop = iter_msg(self.iteration, max_iter)
+
+            # Output
+            self.betas = np.vstack((tsls_s.betas, lambda2))
+            self.vm, self.sig2 = get_omega_hom(
+                w.sparse, wA1, wA2, self, lambda2, moments[0])
+            self.e_filtered = self.u - lambda2 * lag_spatial(w, self.u)
+            self.name_x = USER.set_name_x(name_x, x, constant=True)
+            self.name_yend = USER.set_name_yend(name_yend, yend)
+            self.name_z = self.name_x + self.name_yend
+            self.name_z.append('lambda')
+            self.name_q = USER.set_name_q(name_q, q)
+            self.name_h = USER.set_name_h(self.name_x, self.name_q)
+            self.kf += 1
+            self.chow = REGI.Chow(self)
+            self._cache = {}
+            if summ:
+                self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HOM) - REGIMES"
+                SUMMARY.GM_Endog_Error_Hom(reg=self, w=w, vm=vm, regimes=True)
+
+    def _endog_error_regimes_multi(self, y, x, regimes, w, yend, q, cores,
+                                   max_iter, epsilon, A1, cols2regi, vm,
+                                   name_x, name_yend, name_q, add_lag):
+
+        regi_ids = dict(
+            (r, list(np.where(np.array(regimes) == r)[0])) for r in self.regimes_set)
+        if add_lag != False:
+            self.cols2regi += [True]
+            cols2regi += [True]
+            self.predy_e = np.zeros((self.n, 1), float)
+            self.e_pred = np.zeros((self.n, 1), float)
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            if system() == 'Windows':
+                is_win = True
+                results_p[r] = _work_endog_error(*(y,x,yend,q,regi_ids,r,w,max_iter,epsilon,A1,self.name_ds,self.name_y,name_x,name_yend,name_q,self.name_w,self.name_regimes,add_lag))
+            else:
+                pool = mp.Pool(cores)        
+                results_p[r] = pool.apply_async(_work_endog_error,args=(y,x,yend,q,regi_ids,r,w,max_iter,epsilon,A1,self.name_ds,self.name_y,name_x,name_yend,name_q,self.name_w,self.name_regimes,add_lag, ))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work_endog_error, args=(
+                    y, x, yend, q, regi_ids, r, w, max_iter, epsilon, A1, self.name_ds, self.name_y, name_x, name_yend, name_q, self.name_w, self.name_regimes, add_lag, ))
+            else:
+                results_p[r] = _work_endog_error(*(y, x, yend, q, regi_ids, r, w, max_iter, epsilon, A1,
+                                                   self.name_ds, self.name_y, name_x, name_yend, name_q, self.name_w, self.name_regimes, add_lag))
+
+        self.kryd, self.kf = 0, 0
+        self.kr = len(cols2regi) + 1
+        self.nr = len(self.regimes_set)
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * self.kr, 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        self.e_filtered = np.zeros((self.n, 1), float)
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        self.name_y, self.name_x, self.name_yend, self.name_q, self.name_z, self.name_h = [
+        ], [], [], [], [], []
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * self.kr):((counter + 1) * self.kr), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.e_filtered[regi_ids[r], ] = results[r].e_filtered
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            self.name_yend += results[r].name_yend
+            self.name_q += results[r].name_q
+            self.name_z += results[r].name_z
+            self.name_h += results[r].name_h
+            if add_lag != False:
+                self.predy_e[regi_ids[r], ] = results[r].predy_e
+                self.e_pred[regi_ids[r], ] = results[r].e_pred
+            counter += 1
+        self.chow = REGI.Chow(self)
+        self.multi = results
+        if add_lag != False:
+            SUMMARY.GM_Combo_Hom_multi(
+                reg=self, multireg=self.multi, vm=vm, regimes=True)
+        else:
+            SUMMARY.GM_Endog_Error_Hom_multi(
+                reg=self, multireg=self.multi, vm=vm, regimes=True)
+
+
+class GM_Combo_Hom_Regimes(GM_Endog_Error_Hom_Regimes):
+
+    '''
+    GMM method for a spatial lag and error model with homoskedasticity,
+    regimes and endogenous variables, with results and diagnostics;
+    based on Drukker et al. (2010) [1]_, following Anselin (2011) [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    w            : pysal W object
+                   Spatial weights object (always needed)  
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep   : boolean
+                   If True, the spatial parameter for spatial lag is also
+                   computed according to different regimes. If False (default), 
+                   the spatial parameter is fixed accross regimes.
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    max_iter     : int
+                   Maximum number of iterations of steps 2a and 2b from Arraiz
+                   et al. Note: epsilon provides an additional stop condition.
+    epsilon      : float
+                   Minimum change in lambda required to stop iterations of
+                   steps 2a and 2b from Arraiz et al. Note: max_iter provides
+                   an additional stop condition.
+    A1           : string
+                   If A1='het', then the matrix A1 is defined as in Arraiz et
+                   al. If A1='hom', then as in Anselin (2011).  If
+                   A1='hom_sc', then as in Drukker, Egger and Prucha (2010)
+                   and Drukker, Prucha and Raciborski (2010).
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    e_pred       : array
+                   nx1 array of residuals (using reduced form)
+    predy        : array
+                   nx1 array of predicted y values
+    predy_e      : array
+                   nx1 array of predicted y values (using reduced form)
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z            : array
+                   nxk array of variables (combination of x and yend)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iter_stop    : string
+                   Stop criterion reached during iteration of steps 2a and 2b
+                   from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    iteration    : integer
+                   Number of iterations of steps 2a and 2b from Arraiz et al.
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    sig2         : float
+                   Sigma squared used in computations (based on filtered
+                   residuals)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    name_regimes  : string
+                    Name of regimes variable for use in output
+    title         : string
+                    Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes       : list
+                    List of n values with the mapping of each
+                    observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi : ['one', 'many']
+                    Ignored if regimes=False. Constant option for regimes.
+                    Switcher controlling the constant term setup. It may take
+                    the following values:
+                      *  'one': a vector of ones is appended to x and held
+                                constant across regimes
+                      * 'many': a vector of ones is appended to x and considered
+                                different per regime
+    cols2regi     : list, 'all'
+                    Ignored if regimes=False. Argument indicating whether each
+                    column of x should be considered as different per regime
+                    or held constant across regimes (False).
+                    If a list, k booleans indicating for each variable the
+                    option (True if one per regime, False to be held constant).
+                    If 'all', all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep    : boolean
+                    If True, the spatial parameter for spatial lag is also
+                    computed according to different regimes. If False (default), 
+                    the spatial parameter is fixed accross regimes.
+    kr            : int
+                    Number of variables/columns to be "regimized" or subject
+                    to change by regime. These will result in one parameter
+                    estimate by regime for each variable (i.e. nr parameters per
+                    variable)
+    kf            : int
+                    Number of variables/columns to be considered fixed or
+                    global across regimes and hence only obtain one parameter
+                    estimate
+    nr            : int
+                    Number of different regimes in the 'regimes' list
+    multi         : dictionary
+                    Only available when multiple regressions are estimated,
+                    i.e. when regime_err_sep=True and no variable is fixed
+                    across regimes.
+                    Contains all attributes of each individual regression
+
+
+    References
+    ----------
+
+    .. [1] Drukker, D. M., Egger, P., Prucha, I. R. (2010)
+    "On Two-step Estimation of a Spatial Autoregressive Model with Autoregressive
+    Disturbances and Endogenous Regressors". Working paper.
+
+    .. [2] Anselin, L. (2011) "GMM Estimation of Spatial Error Autocorrelation
+    with and without Heteroskedasticity". 
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial combo model, we need to specify
+    the spatial weights matrix that includes the spatial configuration of the
+    observations. To do that, we can open an already existing gal file or 
+    create a new one. In this case, we will create one from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    Example only with spatial lag
+
+    The Combo class runs an SARAR model, that is a spatial lag+error model.
+    In this case we will run a simple version of that, where we have the
+    spatial effects as well as exogenous variables. Since it is a spatial
+    model, we have to pass in the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.  We can have a 
+    summary of the output by typing: model.summary 
+    Alternatively, we can check the betas:
+
+    >>> reg = GM_Combo_Hom_Regimes(y, x, regimes, w=w, A1='hom_sc', name_y=y_var, name_x=x_var, name_regimes=r_var, name_ds='NAT')
+    >>> print reg.name_z
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', '_Global_W_HR90', 'lambda']
+    >>> print np.around(reg.betas,4)
+    [[ 1.4607]
+     [ 0.9579]
+     [ 0.5658]
+     [ 9.1129]
+     [ 1.1339]
+     [ 0.6517]
+     [-0.4583]
+     [ 0.6634]]
+
+    This class also allows the user to run a spatial lag+error model with the
+    extra feature of including non-spatial endogenous regressors. This means
+    that, in addition to the spatial lag and error, we consider some of the
+    variables on the right-hand side of the equation as endogenous and we
+    instrument for this. In this case we consider RD90 (resource deprivation)
+    as an endogenous regressor.  We use FP89 (families below poverty)
+    for this and hence put it in the instruments parameter, 'q'.
+
+    >>> yd_var = ['RD90']
+    >>> yd = np.array([db.by_col(name) for name in yd_var]).T
+    >>> q_var = ['FP89']
+    >>> q = np.array([db.by_col(name) for name in q_var]).T
+
+    And then we can run and explore the model analogously to the previous combo:
+
+    >>> reg = GM_Combo_Hom_Regimes(y, x, regimes, yd, q, w=w, A1='hom_sc', name_y=y_var, name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='NAT')
+    >>> print reg.name_z
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', '0_RD90', '1_RD90', '_Global_W_HR90', 'lambda']
+    >>> print reg.betas
+    [[ 3.4196478 ]
+     [ 1.04065595]
+     [ 0.16630304]
+     [ 8.86570777]
+     [ 1.85134286]
+     [-0.24921597]
+     [ 2.43007651]
+     [ 3.61656899]
+     [ 0.03315061]
+     [ 0.22636055]]
+    >>> print np.sqrt(reg.vm.diagonal())
+    [ 0.53989913  0.13506086  0.06143434  0.77049956  0.18089997  0.07246848
+      0.29218837  0.25378655  0.06184801  0.06323236]
+    >>> print 'lambda: ', np.around(reg.betas[-1], 4)
+    lambda:  [ 0.2264]
+
+    '''
+
+    def __init__(self, y, x, regimes, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True, cores=False,
+                 max_iter=1, epsilon=0.00001, A1='het',
+                 constant_regi='many', cols2regi='all',
+                 regime_err_sep=False, regime_lag_sep=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_ds=None, name_regimes=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        name_x = USER.set_name_x(name_x, x, constant=True)
+        self.name_y = USER.set_name_y(name_y)
+        name_yend = USER.set_name_yend(name_yend, yend)
+        name_q = USER.set_name_q(name_q, q)
+        name_q.extend(
+            USER.set_name_q_sp(name_x, w_lags, name_q, lag_q, force_all=True))
+
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, yend=yend, add_cons=False)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+        self.regime_lag_sep = regime_lag_sep
+
+        if regime_lag_sep == True:
+            if regime_err_sep == False:
+                raise Exception, "For spatial combo models, if spatial lag is set by regimes (regime_lag_sep=True), spatial error must also be set by regimes (regime_err_sep=True)."
+            add_lag = [w_lags, lag_q]
+        else:
+            cols2regi += [False]
+            add_lag = False
+            if regime_err_sep == True:
+                raise Exception, "For spatial combo models, if spatial error is set by regimes (regime_err_sep=True), all coefficients including lambda (regime_lag_sep=True) must be set by regimes."
+            yend, q = set_endog(y, x, w, yend, q, w_lags, lag_q)
+        name_yend.append(USER.set_name_yend_sp(self.name_y))
+
+        GM_Endog_Error_Hom_Regimes.__init__(self, y=y, x=x, yend=yend,
+                                            q=q, regimes=regimes, w=w, vm=vm, constant_regi=constant_regi,
+                                            cols2regi=cols2regi, regime_err_sep=regime_err_sep,
+                                            max_iter=max_iter, epsilon=epsilon, A1=A1, cores=cores,
+                                            name_y=self.name_y, name_x=name_x, name_yend=name_yend,
+                                            name_q=name_q, name_w=name_w, name_ds=name_ds,
+                                            name_regimes=name_regimes, summ=False, add_lag=add_lag)
+
+        if regime_err_sep != True:
+            self.rho = self.betas[-2]
+            self.predy_e, self.e_pred, warn = sp_att(w, self.y,
+                                                     self.predy, yend[:, -1].reshape(self.n, 1), self.rho)
+            set_warn(self, warn)
+            self.regime_lag_sep = regime_lag_sep
+            self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HOM) - REGIMES"
+            SUMMARY.GM_Combo_Hom(reg=self, w=w, vm=vm, regimes=True)
+
+
+def _work_error(y, x, regi_ids, r, w, max_iter, epsilon, A1, name_ds, name_y, name_x, name_w, name_regimes):
+    w_r, warn = REGI.w_regime(w, regi_ids[r], r, transform=True)
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    x_constant = USER.check_constant(x_r)
+    model = BaseGM_Error_Hom(
+        y_r, x_constant, w_r.sparse, max_iter=max_iter, epsilon=epsilon, A1=A1)
+    set_warn(model, warn)
+    model.w = w_r
+    model.title = "SPATIALLY WEIGHTED LEAST SQUARES ESTIMATION (HOM) - REGIME %s" % r
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    return model
+
+
+def _work_endog_error(y, x, yend, q, regi_ids, r, w, max_iter, epsilon, A1, name_ds, name_y, name_x, name_yend, name_q, name_w, name_regimes, add_lag):
+    w_r, warn = REGI.w_regime(w, regi_ids[r], r, transform=True)
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    if yend != None:
+        yend_r = yend[regi_ids[r]]
+        q_r = q[regi_ids[r]]
+    else:
+        yend_r, q_r = None, None
+    if add_lag != False:
+        yend_r, q_r = set_endog(
+            y_r, x_r, w_r, yend_r, q_r, add_lag[0], add_lag[1])
+    x_constant = USER.check_constant(x_r)
+    model = BaseGM_Endog_Error_Hom(
+        y_r, x_constant, yend_r, q_r, w_r.sparse, max_iter=max_iter, epsilon=epsilon, A1=A1)
+    set_warn(model, warn)
+    if add_lag != False:
+        model.rho = model.betas[-2]
+        model.predy_e, model.e_pred, warn = sp_att(w_r, model.y,
+                                                   model.predy, model.yend[:, -1].reshape(model.n, 1), model.rho)
+        set_warn(model, warn)
+    model.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES (HOM) - REGIME %s" % r
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_yend = ['%s_%s' % (str(r), i) for i in name_yend]
+    model.name_z = model.name_x + model.name_yend + ['lambda']
+    model.name_q = ['%s_%s' % (str(r), i) for i in name_q]
+    model.name_h = model.name_x + model.name_q
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    return model
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/error_sp_regimes.py b/pysal/spreg/error_sp_regimes.py
new file mode 100644
index 0000000..d5f78f2
--- /dev/null
+++ b/pysal/spreg/error_sp_regimes.py
@@ -0,0 +1,1374 @@
+"""
+Spatial Error Models with regimes module
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu"
+
+import numpy as np
+import multiprocessing as mp
+import regimes as REGI
+import user_output as USER
+import summary_output as SUMMARY
+from pysal import lag_spatial
+from ols import BaseOLS
+from twosls import BaseTSLS
+from error_sp import BaseGM_Error, BaseGM_Endog_Error, _momentsGM_Error
+from utils import set_endog, iter_msg, sp_att, set_warn
+from utils import optim_moments, get_spFilter, get_lags
+from utils import spdot, RegressionPropsY
+from platform import system
+
+
+class GM_Error_Regimes(RegressionPropsY, REGI.Regimes_Frame):
+
+    """
+    GMM method for a spatial error model with regimes, with results and diagnostics;
+    based on Kelejian and Prucha (1998, 1999)[1]_ [2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    w            : pysal W object
+                   Spatial weights object   
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep : boolean
+                   Always False, kept for consistency, ignored.
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    vm           : array
+                   Variance covariance matrix (kxk)
+    sig2         : float
+                   Sigma squared used in computations
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+    title        : string
+                   Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Ignored if regimes=False. Constant option for regimes.
+                   Switcher controlling the constant term setup. It may take
+                   the following values:                    
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime
+    cols2regi    : list, 'all'
+                   Ignored if regimes=False. Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all', all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    kr           : int
+                   Number of variables/columns to be "regimized" or subject
+                   to change by regime. These will result in one parameter
+                   estimate by regime for each variable (i.e. nr parameters per
+                   variable)
+    kf           : int
+                   Number of variables/columns to be considered fixed or
+                   global across regimes and hence only obtain one parameter
+                   estimate
+    nr           : int
+                   Number of different regimes in the 'regimes' list
+    multi        : dictionary
+                   Only available when multiple regressions are estimated,
+                   i.e. when regime_err_sep=True and no variable is fixed
+                   across regimes.
+                   Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+    two-stage least squares procedure for estimating a spatial autoregressive
+    model with autoregressive disturbances". The Journal of Real State
+    Finance and Economics, 17, 1.
+
+    .. [2] Kelejian, H.R., Prucha, I.R. (1999) "A Generalized Moments
+    Estimator for the Autoregressive Parameter in a Spatial Model".
+    International Economic Review, 40, 2.
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import pysal
+    >>> import numpy as np
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial error model, we need to specify
+    the spatial weights matrix that includes the spatial configuration of the
+    observations. To do that, we can open an already existing gal file or 
+    create a new one. In this case, we will create one from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> model = GM_Error_Regimes(y, x, regimes, w=w, name_y=y_var, name_x=x_var, name_regimes=r_var, name_ds='NAT.dbf')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. Note that because we are running the classical GMM error
+    model from 1998/99, the spatial parameter is obtained as a point estimate, so
+    although you get a value for it (there are for coefficients under
+    model.betas), you cannot perform inference on it (there are only three
+    values in model.se_betas). Alternatively, we can have a summary of the
+    output by typing: model.summary
+
+    >>> print model.name_x
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', 'lambda']
+    >>> np.around(model.betas, decimals=6)
+    array([[ 0.074807],
+           [ 0.786107],
+           [ 0.538849],
+           [ 5.103756],
+           [ 1.196009],
+           [ 0.600533],
+           [ 0.364103]])
+    >>> np.around(model.std_err, decimals=6)
+    array([ 0.379864,  0.152316,  0.051942,  0.471285,  0.19867 ,  0.057252])
+    >>> np.around(model.z_stat, decimals=6)
+    array([[  0.196932,   0.843881],
+           [  5.161042,   0.      ],
+           [ 10.37397 ,   0.      ],
+           [ 10.829455,   0.      ],
+           [  6.02007 ,   0.      ],
+           [ 10.489215,   0.      ]])
+    >>> np.around(model.sig2, decimals=6)
+    28.172732
+
+    """
+
+    def __init__(self, y, x, regimes, w,
+                 vm=False, name_y=None, name_x=None, name_w=None,
+                 constant_regi='many', cols2regi='all', regime_err_sep=False,
+                 regime_lag_sep=False,
+                 cores=False, name_ds=None, name_regimes=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        self.constant_regi = constant_regi
+        self.cols2regi = cols2regi
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.n = n
+        self.y = y
+
+        x_constant = USER.check_constant(x)
+        name_x = USER.set_name_x(name_x, x)
+        self.name_x_r = name_x
+
+        cols2regi = REGI.check_cols2regi(constant_regi, cols2regi, x)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+        if regime_err_sep == True:
+            if set(cols2regi) == set([True]):
+                self._error_regimes_multi(y, x, regimes, w, cores,
+                                          cols2regi, vm, name_x)
+            else:
+                raise Exception, "All coefficients must vary accross regimes if regime_err_sep = True."
+        else:
+            self.x, self.name_x = REGI.Regimes_Frame.__init__(self, x_constant,
+                                                              regimes, constant_regi=None, cols2regi=cols2regi, names=name_x)
+            ols = BaseOLS(y=y, x=self.x)
+            self.k = ols.x.shape[1]
+            moments = _momentsGM_Error(w, ols.u)
+            lambda1 = optim_moments(moments)
+            xs = get_spFilter(w, lambda1, x_constant)
+            ys = get_spFilter(w, lambda1, y)
+            xs = REGI.Regimes_Frame.__init__(self, xs,
+                                             regimes, constant_regi=None, cols2regi=cols2regi)[0]
+            ols2 = BaseOLS(y=ys, x=xs)
+
+            # Output
+            self.predy = spdot(self.x, ols2.betas)
+            self.u = y - self.predy
+            self.betas = np.vstack((ols2.betas, np.array([[lambda1]])))
+            self.sig2 = ols2.sig2n
+            self.e_filtered = self.u - lambda1 * lag_spatial(w, self.u)
+            self.vm = self.sig2 * ols2.xtxi
+            self.title = "SPATIALLY WEIGHTED LEAST SQUARES - REGIMES"
+            self.name_x.append('lambda')
+            self.kf += 1
+            self.chow = REGI.Chow(self)
+            self._cache = {}
+            SUMMARY.GM_Error(reg=self, w=w, vm=vm, regimes=True)
+
+    def _error_regimes_multi(self, y, x, regimes, w, cores,
+                             cols2regi, vm, name_x):
+        regi_ids = dict(
+            (r, list(np.where(np.array(regimes) == r)[0])) for r in self.regimes_set)
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            if system() == 'Windows':
+                results_p[r] = _work_error(*(y,x,regi_ids,r,w,self.name_ds,self.name_y,name_x+['lambda'],self.name_w,self.name_regimes))
+                is_win = True
+            else:
+                pool = mp.Pool(cores)                
+                results_p[r] = pool.apply_async(_work_error,args=(y,x,regi_ids,r,w,self.name_ds,self.name_y,name_x+['lambda'],self.name_w,self.name_regimes, ))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work_error, args=(
+                    y, x, regi_ids, r, w, self.name_ds, self.name_y, name_x + ['lambda'], self.name_w, self.name_regimes, ))
+            else:
+                results_p[r] = _work_error(
+                    *(y, x, regi_ids, r, w, self.name_ds, self.name_y, name_x + ['lambda'], self.name_w, self.name_regimes))
+
+        self.kryd = 0
+        self.kr = len(cols2regi)
+        self.kf = 0
+        self.nr = len(self.regimes_set)
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * (self.kr + 1), 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        self.e_filtered = np.zeros((self.n, 1), float)
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        self.name_y, self.name_x = [], []
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * (self.kr + 1)):((counter + 1) * (self.kr + 1)), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.e_filtered[regi_ids[r], ] = results[r].e_filtered
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            counter += 1
+        self.chow = REGI.Chow(self)
+        self.multi = results
+        SUMMARY.GM_Error_multi(
+            reg=self, multireg=self.multi, vm=vm, regimes=True)
+
+
+class GM_Endog_Error_Regimes(RegressionPropsY, REGI.Regimes_Frame):
+
+    '''
+    GMM method for a spatial error model with regimes and endogenous variables, with
+    results and diagnostics; based on Kelejian and Prucha (1998, 1999)[1]_[2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    w            : pysal W object
+                   Spatial weights object   
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep : boolean
+                   Always False, kept for consistency, ignored.
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z            : array
+                   nxk array of variables (combination of x and yend)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    sig2         : float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+                   Sigma squared used in computations
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    name_regimes  : string
+                    Name of regimes variable for use in output
+    title         : string
+                    Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes       : list
+                    List of n values with the mapping of each
+                    observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi : ['one', 'many']
+                    Ignored if regimes=False. Constant option for regimes.
+                    Switcher controlling the constant term setup. It may take
+                    the following values:
+                      *  'one': a vector of ones is appended to x and held
+                                constant across regimes
+                      * 'many': a vector of ones is appended to x and considered
+                                different per regime
+    cols2regi     : list, 'all'
+                    Ignored if regimes=False. Argument indicating whether each
+                    column of x should be considered as different per regime
+                    or held constant across regimes (False).
+                    If a list, k booleans indicating for each variable the
+                    option (True if one per regime, False to be held constant).
+                    If 'all', all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    kr            : int
+                    Number of variables/columns to be "regimized" or subject
+                    to change by regime. These will result in one parameter
+                    estimate by regime for each variable (i.e. nr parameters per
+                    variable)
+    kf            : int
+                    Number of variables/columns to be considered fixed or
+                    global across regimes and hence only obtain one parameter
+                    estimate
+    nr            : int
+                    Number of different regimes in the 'regimes' list
+    multi        : dictionary
+                   Only available when multiple regressions are estimated,
+                   i.e. when regime_err_sep=True and no variable is fixed
+                   across regimes.
+                   Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+    two-stage least squares procedure for estimating a spatial autoregressive
+    model with autoregressive disturbances". The Journal of Real State
+    Finance and Economics, 17, 1.
+
+    .. [2] Kelejian, H.R., Prucha, I.R. (1999) "A Generalized Moments
+    Estimator for the Autoregressive Parameter in a Spatial Model".
+    International Economic Review, 40, 2.
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import pysal
+    >>> import numpy as np
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    For the endogenous models, we add the endogenous variable RD90 (resource deprivation)
+    and we decide to instrument for it with FP89 (families below poverty):
+
+    >>> yd_var = ['RD90']
+    >>> yend = np.array([db.by_col(name) for name in yd_var]).T
+    >>> q_var = ['FP89']
+    >>> q = np.array([db.by_col(name) for name in q_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already 
+    existing gal file or create a new one. In this case, we will create one 
+    from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables (exogenous and endogenous), the
+    instruments and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> model = GM_Endog_Error_Regimes(y, x, yend, q, regimes, w=w, name_y=y_var, name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='NAT.dbf')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. Note that because we are running the classical GMM error
+    model from 1998/99, the spatial parameter is obtained as a point estimate, so
+    although you get a value for it (there are for coefficients under
+    model.betas), you cannot perform inference on it (there are only three
+    values in model.se_betas). Also, this regression uses a two stage least
+    squares estimation method that accounts for the endogeneity created by the
+    endogenous variables included. Alternatively, we can have a summary of the
+    output by typing: model.summary
+
+    >>> print model.name_z
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', '0_RD90', '1_RD90', 'lambda']
+    >>> np.around(model.betas, decimals=5)
+    array([[ 3.59718],
+           [ 1.0652 ],
+           [ 0.15822],
+           [ 9.19754],
+           [ 1.88082],
+           [-0.24878],
+           [ 2.46161],
+           [ 3.57943],
+           [ 0.25564]])
+    >>> np.around(model.std_err, decimals=6)
+    array([ 0.522633,  0.137555,  0.063054,  0.473654,  0.18335 ,  0.072786,
+            0.300711,  0.240413])
+
+    '''
+
+    def __init__(self, y, x, yend, q, regimes, w, cores=False,
+                 vm=False, constant_regi='many', cols2regi='all',
+                 regime_err_sep=False, regime_lag_sep=False, name_y=None,
+                 name_x=None, name_yend=None, name_q=None, name_w=None,
+                 name_ds=None, name_regimes=None, summ=True, add_lag=False):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        self.constant_regi = constant_regi
+        self.cols2regi = cols2regi
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.n = n
+        self.y = y
+
+        name_x = USER.set_name_x(name_x, x)
+        if summ:
+            name_yend = USER.set_name_yend(name_yend, yend)
+            self.name_y = USER.set_name_y(name_y)
+            name_q = USER.set_name_q(name_q, q)
+        self.name_x_r = name_x + name_yend
+
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, yend=yend)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+
+        if regime_err_sep == True:
+            if set(cols2regi) == set([True]):
+                self._endog_error_regimes_multi(y, x, regimes, w, yend, q, cores,
+                                                cols2regi, vm, name_x, name_yend, name_q, add_lag)
+            else:
+                raise Exception, "All coefficients must vary accross regimes if regime_err_sep = True."
+        else:
+            x_constant = USER.check_constant(x)
+            q, name_q = REGI.Regimes_Frame.__init__(self, q,
+                                                    regimes, constant_regi=None, cols2regi='all', names=name_q)
+            x, name_x = REGI.Regimes_Frame.__init__(self, x_constant,
+                                                    regimes, constant_regi=None, cols2regi=cols2regi,
+                                                    names=name_x)
+            yend2, name_yend = REGI.Regimes_Frame.__init__(self, yend,
+                                                           regimes, constant_regi=None,
+                                                           cols2regi=cols2regi, yend=True, names=name_yend)
+
+            tsls = BaseTSLS(y=y, x=x, yend=yend2, q=q)
+            self.k = tsls.z.shape[1]
+            self.x = tsls.x
+            self.yend, self.z = tsls.yend, tsls.z
+            moments = _momentsGM_Error(w, tsls.u)
+            lambda1 = optim_moments(moments)
+            xs = get_spFilter(w, lambda1, x_constant)
+            xs = REGI.Regimes_Frame.__init__(self, xs,
+                                             regimes, constant_regi=None, cols2regi=cols2regi)[0]
+            ys = get_spFilter(w, lambda1, y)
+            yend_s = get_spFilter(w, lambda1, yend)
+            yend_s = REGI.Regimes_Frame.__init__(self, yend_s,
+                                                 regimes, constant_regi=None, cols2regi=cols2regi,
+                                                 yend=True)[0]
+            tsls2 = BaseTSLS(ys, xs, yend_s, h=tsls.h)
+
+            # Output
+            self.betas = np.vstack((tsls2.betas, np.array([[lambda1]])))
+            self.predy = spdot(tsls.z, tsls2.betas)
+            self.u = y - self.predy
+            self.sig2 = float(np.dot(tsls2.u.T, tsls2.u)) / self.n
+            self.e_filtered = self.u - lambda1 * lag_spatial(w, self.u)
+            self.vm = self.sig2 * tsls2.varb
+            self.name_x = USER.set_name_x(name_x, x, constant=True)
+            self.name_yend = USER.set_name_yend(name_yend, yend)
+            self.name_z = self.name_x + self.name_yend
+            self.name_z.append('lambda')
+            self.name_q = USER.set_name_q(name_q, q)
+            self.name_h = USER.set_name_h(self.name_x, self.name_q)
+            self.kf += 1
+            self.chow = REGI.Chow(self)
+            self._cache = {}
+            if summ:
+                self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES - REGIMES"
+                SUMMARY.GM_Endog_Error(reg=self, w=w, vm=vm, regimes=True)
+
+    def _endog_error_regimes_multi(self, y, x, regimes, w, yend, q, cores,
+                                   cols2regi, vm, name_x, name_yend, name_q, add_lag):
+
+        regi_ids = dict(
+            (r, list(np.where(np.array(regimes) == r)[0])) for r in self.regimes_set)
+        if add_lag != False:
+            self.cols2regi += [True]
+            cols2regi += [True]
+            self.predy_e = np.zeros((self.n, 1), float)
+            self.e_pred = np.zeros((self.n, 1), float)
+        results_p = {}
+        for r in self.regimes_set:
+            """
+            if system() == 'Windows':
+                results_p[r] = _work_endog_error(*(y,x,yend,q,regi_ids,r,w,self.name_ds,self.name_y,name_x,name_yend,name_q,self.name_w,self.name_regimes,add_lag))
+                is_win = True
+            else:
+                pool = mp.Pool(cores)        
+                results_p[r] = pool.apply_async(_work_endog_error,args=(y,x,yend,q,regi_ids,r,w,self.name_ds,self.name_y,name_x,name_yend,name_q,self.name_w,self.name_regimes,add_lag, ))
+                is_win = False
+            """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work_endog_error, args=(
+                    y, x, yend, q, regi_ids, r, w, self.name_ds, self.name_y, name_x, name_yend, name_q, self.name_w, self.name_regimes, add_lag, ))
+            else:
+                results_p[r] = _work_endog_error(
+                    *(y, x, yend, q, regi_ids, r, w, self.name_ds, self.name_y, name_x, name_yend, name_q, self.name_w, self.name_regimes, add_lag))
+
+        self.kryd, self.kf = 0, 0
+        self.kr = len(cols2regi)
+        self.nr = len(self.regimes_set)
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * (self.kr + 1), 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        self.e_filtered = np.zeros((self.n, 1), float)
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        self.name_y, self.name_x, self.name_yend, self.name_q, self.name_z, self.name_h = [
+        ], [], [], [], [], []
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * (self.kr + 1)):((counter + 1) * (self.kr + 1)), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.e_filtered[regi_ids[r], ] = results[r].e_filtered
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            self.name_yend += results[r].name_yend
+            self.name_q += results[r].name_q
+            self.name_z += results[r].name_z
+            self.name_h += results[r].name_h
+            if add_lag != False:
+                self.predy_e[regi_ids[r], ] = results[r].predy_e
+                self.e_pred[regi_ids[r], ] = results[r].e_pred
+            counter += 1
+        self.chow = REGI.Chow(self)
+        self.multi = results
+        if add_lag != False:
+            SUMMARY.GM_Combo_multi(
+                reg=self, multireg=self.multi, vm=vm, regimes=True)
+        else:
+            SUMMARY.GM_Endog_Error_multi(
+                reg=self, multireg=self.multi, vm=vm, regimes=True)
+
+
+class GM_Combo_Regimes(GM_Endog_Error_Regimes, REGI.Regimes_Frame):
+
+    """
+    GMM method for a spatial lag and error model with regimes and endogenous
+    variables, with results and diagnostics; based on Kelejian and Prucha (1998,
+    1999)[1]_[2]_.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : pysal W object
+                   Spatial weights object (always needed)   
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep   : boolean
+                   If True, the spatial parameter for spatial lag is also
+                   computed according to different regimes. If False (default), 
+                   the spatial parameter is fixed accross regimes.
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    e_pred       : array
+                   nx1 array of residuals (using reduced form)
+    predy        : array
+                   nx1 array of predicted y values
+    predy_e      : array
+                   nx1 array of predicted y values (using reduced form)
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z            : array
+                   nxk array of variables (combination of x and yend)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    sig2         : float
+                   Sigma squared used in computations (based on filtered
+                   residuals)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_yend     : list of strings
+                    Names of endogenous variables for use in output
+    name_z        : list of strings
+                    Names of exogenous and endogenous variables for use in 
+                    output
+    name_q        : list of strings
+                    Names of external instruments
+    name_h        : list of strings
+                    Names of all instruments used in ouput
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    name_regimes  : string
+                    Name of regimes variable for use in output
+    title         : string
+                    Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes       : list
+                    List of n values with the mapping of each
+                    observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi : ['one', 'many']
+                    Ignored if regimes=False. Constant option for regimes.
+                    Switcher controlling the constant term setup. It may take
+                    the following values:
+                      *  'one': a vector of ones is appended to x and held
+                                constant across regimes
+                      * 'many': a vector of ones is appended to x and considered
+                                different per regime
+    cols2regi     : list, 'all'
+                    Ignored if regimes=False. Argument indicating whether each
+                    column of x should be considered as different per regime
+                    or held constant across regimes (False).
+                    If a list, k booleans indicating for each variable the
+                    option (True if one per regime, False to be held constant).
+                    If 'all', all the variables vary by regime.
+    regime_err_sep  : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep    : boolean
+                    If True, the spatial parameter for spatial lag is also
+                    computed according to different regimes. If False (default), 
+                    the spatial parameter is fixed accross regimes.
+    kr            : int
+                    Number of variables/columns to be "regimized" or subject
+                    to change by regime. These will result in one parameter
+                    estimate by regime for each variable (i.e. nr parameters per
+                    variable)
+    kf            : int
+                    Number of variables/columns to be considered fixed or
+                    global across regimes and hence only obtain one parameter
+                    estimate
+    nr            : int
+                    Number of different regimes in the 'regimes' list
+    multi        : dictionary
+                   Only available when multiple regressions are estimated,
+                   i.e. when regime_err_sep=True and no variable is fixed
+                   across regimes.
+                   Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Kelejian, H.R., Prucha, I.R. (1998) "A generalized spatial
+    two-stage least squares procedure for estimating a spatial autoregressive
+    model with autoregressive disturbances". The Journal of Real State
+    Finance and Economics, 17, 1.
+
+    .. [2] Kelejian, H.R., Prucha, I.R. (1999) "A Generalized Moments
+    Estimator for the Autoregressive Parameter in a Spatial Model".
+    International Economic Review, 40, 2.
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial lag model, we need to specify
+    the spatial weights matrix that includes the spatial configuration of the
+    observations. To do that, we can open an already existing gal file or 
+    create a new one. In this case, we will create one from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    The Combo class runs an SARAR model, that is a spatial lag+error model.
+    In this case we will run a simple version of that, where we have the
+    spatial effects as well as exogenous variables. Since it is a spatial
+    model, we have to pass in the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> model = GM_Combo_Regimes(y, x, regimes, w=w, name_y=y_var, name_x=x_var, name_regimes=r_var, name_ds='NAT')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them. Note that because we are running the classical GMM error
+    model from 1998/99, the spatial parameter is obtained as a point estimate, so
+    although you get a value for it (there are for coefficients under
+    model.betas), you cannot perform inference on it (there are only three
+    values in model.se_betas). Also, this regression uses a two stage least
+    squares estimation method that accounts for the endogeneity created by the
+    spatial lag of the dependent variable. We can have a summary of the
+    output by typing: model.summary 
+    Alternatively, we can check the betas:
+
+    >>> print model.name_z
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', '_Global_W_HR90', 'lambda']
+    >>> print np.around(model.betas,4)
+    [[ 1.4607]
+     [ 0.958 ]
+     [ 0.5658]
+     [ 9.113 ]
+     [ 1.1338]
+     [ 0.6517]
+     [-0.4583]
+     [ 0.6136]]
+
+    And lambda:
+
+    >>> print 'lambda: ', np.around(model.betas[-1], 4)
+    lambda:  [ 0.6136]
+
+    This class also allows the user to run a spatial lag+error model with the
+    extra feature of including non-spatial endogenous regressors. This means
+    that, in addition to the spatial lag and error, we consider some of the
+    variables on the right-hand side of the equation as endogenous and we
+    instrument for this. In this case we consider RD90 (resource deprivation)
+    as an endogenous regressor.  We use FP89 (families below poverty)
+    for this and hence put it in the instruments parameter, 'q'.
+
+    >>> yd_var = ['RD90']
+    >>> yd = np.array([db.by_col(name) for name in yd_var]).T
+    >>> q_var = ['FP89']
+    >>> q = np.array([db.by_col(name) for name in q_var]).T
+
+    And then we can run and explore the model analogously to the previous combo:
+
+    >>> model = GM_Combo_Regimes(y, x, regimes, yd, q, w=w, name_y=y_var, name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='NAT')
+    >>> print model.name_z
+    ['0_CONSTANT', '0_PS90', '0_UE90', '1_CONSTANT', '1_PS90', '1_UE90', '0_RD90', '1_RD90', '_Global_W_HR90', 'lambda']
+    >>> print model.betas
+    [[ 3.41963782]
+     [ 1.04065841]
+     [ 0.16634393]
+     [ 8.86544628]
+     [ 1.85120528]
+     [-0.24908469]
+     [ 2.43014046]
+     [ 3.61645481]
+     [ 0.03308671]
+     [ 0.18684992]]
+    >>> print np.sqrt(model.vm.diagonal())
+    [ 0.53067577  0.13271426  0.06058025  0.76406411  0.17969783  0.07167421
+      0.28943121  0.25308326  0.06126529]
+    >>> print 'lambda: ', np.around(model.betas[-1], 4)
+    lambda:  [ 0.1868]
+    """
+
+    def __init__(self, y, x, regimes, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True, cores=False,
+                 constant_regi='many', cols2regi='all',
+                 regime_err_sep=False, regime_lag_sep=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_ds=None, name_regimes=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        name_x = USER.set_name_x(name_x, x, constant=True)
+        self.name_y = USER.set_name_y(name_y)
+        name_yend = USER.set_name_yend(name_yend, yend)
+        name_q = USER.set_name_q(name_q, q)
+        name_q.extend(
+            USER.set_name_q_sp(name_x, w_lags, name_q, lag_q, force_all=True))
+
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, yend=yend, add_cons=False)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+        self.regime_lag_sep = regime_lag_sep
+
+        if regime_lag_sep == True:
+            if regime_err_sep == False:
+                raise Exception, "For spatial combo models, if spatial lag is set by regimes (regime_lag_sep=True), spatial error must also be set by regimes (regime_err_sep=True)."
+            add_lag = [w_lags, lag_q]
+        else:
+            if regime_err_sep == True:
+                raise Exception, "For spatial combo models, if spatial error is set by regimes (regime_err_sep=True), all coefficients including lambda (regime_lag_sep=True) must be set by regimes."
+            cols2regi += [False]
+            add_lag = False
+            yend, q = set_endog(y, x, w, yend, q, w_lags, lag_q)
+        name_yend.append(USER.set_name_yend_sp(self.name_y))
+
+        GM_Endog_Error_Regimes.__init__(self, y=y, x=x, yend=yend,
+                                        q=q, regimes=regimes, w=w, vm=vm, constant_regi=constant_regi,
+                                        cols2regi=cols2regi, regime_err_sep=regime_err_sep, cores=cores,
+                                        name_y=self.name_y, name_x=name_x,
+                                        name_yend=name_yend, name_q=name_q, name_w=name_w,
+                                        name_ds=name_ds, name_regimes=name_regimes, summ=False, add_lag=add_lag)
+
+        if regime_err_sep != True:
+            self.rho = self.betas[-2]
+            self.predy_e, self.e_pred, warn = sp_att(w, self.y,
+                                                     self.predy, yend[:, -1].reshape(self.n, 1), self.rho)
+            set_warn(self, warn)
+            self.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES - REGIMES"
+            SUMMARY.GM_Combo(reg=self, w=w, vm=vm, regimes=True)
+
+
+def _work_error(y, x, regi_ids, r, w, name_ds, name_y, name_x, name_w, name_regimes):
+    w_r, warn = REGI.w_regime(w, regi_ids[r], r, transform=True)
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    x_constant = USER.check_constant(x_r)
+    model = BaseGM_Error(y_r, x_constant, w_r.sparse)
+    set_warn(model, warn)
+    model.w = w_r
+    model.title = "SPATIALLY WEIGHTED LEAST SQUARES ESTIMATION - REGIME %s" % r
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    return model
+
+
+def _work_endog_error(y, x, yend, q, regi_ids, r, w, name_ds, name_y, name_x, name_yend, name_q, name_w, name_regimes, add_lag):
+    w_r, warn = REGI.w_regime(w, regi_ids[r], r, transform=True)
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    if yend != None:
+        yend_r = yend[regi_ids[r]]
+        q_r = q[regi_ids[r]]
+    else:
+        yend_r, q_r = None, None
+    if add_lag != False:
+        yend_r, q_r = set_endog(
+            y_r, x_r, w_r, yend_r, q_r, add_lag[0], add_lag[1])
+    x_constant = USER.check_constant(x_r)
+    model = BaseGM_Endog_Error(y_r, x_constant, yend_r, q_r, w_r.sparse)
+    set_warn(model, warn)
+    if add_lag != False:
+        model.rho = model.betas[-2]
+        model.predy_e, model.e_pred, warn = sp_att(w_r, model.y,
+                                                   model.predy, model.yend[:, -1].reshape(model.n, 1), model.rho)
+        set_warn(model, warn)
+    model.w = w_r
+    model.title = "SPATIALLY WEIGHTED TWO STAGE LEAST SQUARES - REGIME %s" % r
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_yend = ['%s_%s' % (str(r), i) for i in name_yend]
+    model.name_z = model.name_x + model.name_yend + ['lambda']
+    model.name_q = ['%s_%s' % (str(r), i) for i in name_q]
+    model.name_h = model.name_x + model.name_q
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    return model
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+
+    _test()
+    import pysal
+    import numpy as np
+    dbf = pysal.open(pysal.examples.get_path('columbus.dbf'), 'r')
+    y = np.array([dbf.by_col('CRIME')]).T
+    names_to_extract = ['INC']
+    x = np.array([dbf.by_col(name) for name in names_to_extract]).T
+    yd_var = ['HOVAL']
+    yend = np.array([dbf.by_col(name) for name in yd_var]).T
+    q_var = ['DISCBD']
+    q = np.array([dbf.by_col(name) for name in q_var]).T
+    regimes = regimes = dbf.by_col('NSA')
+    w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read()
+    w.transform = 'r'
+    model = GM_Error_Regimes(y, x, regimes=regimes, w=w, name_y='crime', name_x=[
+                             'income'], name_regimes='nsa', name_ds='columbus', regime_err_sep=True)
+    print model.summary
diff --git a/pysal/spreg/ml_error.py b/pysal/spreg/ml_error.py
new file mode 100644
index 0000000..b111cdf
--- /dev/null
+++ b/pysal/spreg/ml_error.py
@@ -0,0 +1,511 @@
+"""
+ML Estimation of Spatial Error Model
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Serge Rey srey at asu.edu"
+
+import numpy as np
+import numpy.linalg as la
+import pysal as ps
+from pysal.spreg.utils import RegressionPropsY, RegressionPropsVM
+import diagnostics as DIAG
+import user_output as USER
+import summary_output as SUMMARY
+import regimes as REGI
+from w_utils import symmetrize
+try:
+    from scipy.optimize import minimize_scalar
+    minimize_scalar_available = True
+except ImportError:
+    minimize_scalar_available = False
+
+__all__ = ["ML_Error"]
+
+
+class BaseML_Error(RegressionPropsY, RegressionPropsVM, REGI.Regimes_Frame):
+
+    """
+    ML estimation of the spatial error model (note no consistency
+    checks, diagnostics or constants added); Anselin (1988) [1]_
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : Sparse matrix
+                   Spatial weights sparse matrix
+    method       : string
+                   if 'full', brute force calculation (full matrix expressions)
+    epsilon      : float
+                   tolerance criterion in mimimize_scalar function and inverse_product
+    regimes_att  : dictionary
+                   Dictionary containing elements to be used in case of a regimes model,
+                   i.e. 'x' before regimes, 'regimes' list and 'cols2regi'
+
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    lam          : float
+                   estimate of spatial autoregressive coefficient
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant, excluding the rho)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    method       : string
+                   log Jacobian method
+                   if 'full': brute force (full matrix computations)
+                   if 'ord' : Ord eigenvalue method
+    epsilon      : float
+                   tolerance criterion used in minimize_scalar function and inverse_product
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (k+1 x k+1) - includes lambda
+    vm1          : array
+                   2x2 array of variance covariance for lambda, sigma
+    sig2         : float
+                   Sigma squared used in computations
+    logll        : float
+                   maximized log-likelihood (including constant terms)
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal as ps
+    >>> np.set_printoptions(suppress=True) #prevent scientific format
+    >>> db = ps.open(ps.examples.get_path("south.dbf"),'r')
+    >>> y_name = "HR90"
+    >>> y = np.array(db.by_col(y_name))
+    >>> y.shape = (len(y),1)
+    >>> x_names = ["RD90","PS90","UE90","DV90"]
+    >>> x = np.array([db.by_col(var) for var in x_names]).T
+    >>> x = np.hstack((np.ones((len(y),1)),x))
+    >>> ww = ps.open(ps.examples.get_path("south_q.gal"))
+    >>> w = ww.read()
+    >>> ww.close()
+    >>> w.transform = 'r'
+    >>> mlerr = BaseML_Error(y,x,w) #doctest: +SKIP
+    >>> "{0:.6f}".format(mlerr.lam) #doctest: +SKIP
+    '0.299078'
+    >>> np.around(mlerr.betas, decimals=4) #doctest: +SKIP
+    array([[ 6.1492],
+           [ 4.4024],
+           [ 1.7784],
+           [-0.3781],
+           [ 0.4858],
+           [ 0.2991]])
+    >>> "{0:.6f}".format(mlerr.mean_y) #doctest: +SKIP
+    '9.549293'
+    >>> "{0:.6f}".format(mlerr.std_y) #doctest: +SKIP
+    '7.038851'
+    >>> np.diag(mlerr.vm) #doctest: +SKIP
+    array([ 1.06476526,  0.05548248,  0.04544514,  0.00614425,  0.01481356,
+            0.00143001])
+    >>> "{0:.6f}".format(mlerr.sig2[0][0]) #doctest: +SKIP
+    '32.406854'
+    >>> "{0:.6f}".format(mlerr.logll) #doctest: +SKIP
+    '-4471.407067'
+    >>> mlerr1 = BaseML_Error(y,x,w,method='ord') #doctest: +SKIP
+    >>> "{0:.6f}".format(mlerr1.lam) #doctest: +SKIP
+    '0.299078'
+    >>> np.around(mlerr1.betas, decimals=4) #doctest: +SKIP
+    array([[ 6.1492],
+           [ 4.4024],
+           [ 1.7784],
+           [-0.3781],
+           [ 0.4858],
+           [ 0.2991]])
+    >>> "{0:.6f}".format(mlerr1.mean_y) #doctest: +SKIP
+    '9.549293'
+    >>> "{0:.6f}".format(mlerr1.std_y) #doctest: +SKIP
+    '7.038851'
+    >>> np.around(np.diag(mlerr1.vm), decimals=4) #doctest: +SKIP
+    array([ 1.0648,  0.0555,  0.0454,  0.0061,  0.0148,  0.0014])
+    >>> "{0:.4f}".format(mlerr1.sig2[0][0]) #doctest: +SKIP
+    '32.4069'
+    >>> "{0:.4f}".format(mlerr1.logll) #doctest: +SKIP
+    '-4471.4071'
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
+    Kluwer Academic Publishers. Dordrecht.
+
+    """
+
+    def __init__(self, y, x, w, method='full', epsilon=0.0000001, regimes_att=None):
+        # set up main regression variables and spatial filters
+        self.y = y
+        if regimes_att:
+            self.x = x.toarray()
+        else:
+            self.x = x
+        self.n, self.k = self.x.shape
+        self.method = method
+        self.epsilon = epsilon
+        W = w.full()[0]
+
+        ylag = ps.lag_spatial(w, self.y)
+        xlag = self.get_x_lag(w, regimes_att)
+
+        # call minimizer using concentrated log-likelihood to get lambda
+        methodML = method.upper()
+        if methodML in ['FULL', 'ORD']:
+            if methodML == 'FULL':
+                res = minimize_scalar(err_c_loglik, 0.0, bounds=(-1.0, 1.0),
+                                      args=(self.n, self.y, ylag, self.x,
+                                            xlag, W), method='bounded',
+                                      tol=epsilon)
+            elif methodML == 'ORD':
+                # check on symmetry structure
+                if w.asymmetry(intrinsic=False) == []:
+                    ww = symmetrize(w)
+                    WW = ww.todense()
+                    evals = la.eigvalsh(WW)
+                else:
+                    evals = la.eigvals(W)
+                res = minimize_scalar(
+                    err_c_loglik_ord, 0.0, bounds=(-1.0, 1.0),
+                    args=(self.n, self.y, ylag, self.x,
+                          xlag, evals), method='bounded',
+                    tol=epsilon)
+        else:
+            raise Exception, "{0} is an unsupported method".format(method)
+
+        self.lam = res.x
+
+        # compute full log-likelihood, including constants
+        ln2pi = np.log(2.0 * np.pi)
+        llik = -res.fun - self.n / 2.0 * ln2pi - self.n / 2.0
+
+        self.logll = llik
+
+        # b, residuals and predicted values
+
+        ys = self.y - self.lam * ylag
+        xs = self.x - self.lam * xlag
+        xsxs = np.dot(xs.T, xs)
+        xsxsi = np.linalg.inv(xsxs)
+        xsys = np.dot(xs.T, ys)
+        b = np.dot(xsxsi, xsys)
+
+        self.betas = np.vstack((b, self.lam))
+
+        self.u = y - np.dot(self.x, b)
+        self.predy = self.y - self.u
+
+        # residual variance
+
+        self.e_filtered = self.u - self.lam * ps.lag_spatial(w, self.u)
+        self.sig2 = np.dot(self.e_filtered.T, self.e_filtered) / self.n
+
+        # variance-covariance matrix betas
+
+        varb = self.sig2 * xsxsi
+
+        # variance-covariance matrix lambda, sigma
+
+        a = -self.lam * W
+        np.fill_diagonal(a, 1.0)
+        ai = la.inv(a)
+        wai = np.dot(W, ai)
+        tr1 = np.trace(wai)
+
+        wai2 = np.dot(wai, wai)
+        tr2 = np.trace(wai2)
+
+        waiTwai = np.dot(wai.T, wai)
+        tr3 = np.trace(waiTwai)
+
+        v1 = np.vstack((tr2 + tr3,
+                        tr1 / self.sig2))
+        v2 = np.vstack((tr1 / self.sig2,
+                        self.n / (2.0 * self.sig2 ** 2)))
+
+        v = np.hstack((v1, v2))
+
+        self.vm1 = np.linalg.inv(v)
+
+        # create variance matrix for beta, lambda
+        vv = np.hstack((varb, np.zeros((self.k, 1))))
+        vv1 = np.hstack(
+            (np.zeros((1, self.k)), self.vm1[0, 0] * np.ones((1, 1))))
+
+        self.vm = np.vstack((vv, vv1))
+
+        self._cache = {}
+
+    def get_x_lag(self, w, regimes_att):
+        if regimes_att:
+            xlag = ps.lag_spatial(w, regimes_att['x'])
+            xlag = REGI.Regimes_Frame.__init__(self, xlag,
+                                               regimes_att['regimes'], constant_regi=None, cols2regi=regimes_att['cols2regi'])[0]
+            xlag = xlag.toarray()
+        else:
+            xlag = ps.lag_spatial(w, self.x)
+        return xlag
+
+
+class ML_Error(BaseML_Error):
+
+    """
+    ML estimation of the spatial lag model with all results and diagnostics;
+    Anselin (1988) [1]_
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : Sparse matrix
+                   Spatial weights sparse matrix
+    method       : string
+                   if 'full', brute force calculation (full matrix expressions)
+                   ir 'ord', Ord eigenvalue method
+    epsilon      : float
+                   tolerance criterion in mimimize_scalar function and inverse_product
+    spat_diag    : boolean
+                   if True, include spatial diagnostics
+    vm           : boolean
+                   if True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    betas        : array
+                   (k+1)x1 array of estimated coefficients (rho first)
+    lam          : float
+                   estimate of spatial autoregressive coefficient
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant, excluding lambda)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    method       : string
+                   log Jacobian method
+                   if 'full': brute force (full matrix computations)
+    epsilon      : float
+                   tolerance criterion used in minimize_scalar function and inverse_product
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    varb         : array
+                   Variance covariance matrix (k+1 x k+1) - includes var(lambda)
+    vm1          : array
+                   variance covariance matrix for lambda, sigma (2 x 2)
+    sig2         : float
+                   Sigma squared used in computations
+    logll        : float
+                   maximized log-likelihood (including constant terms)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    utu          : float
+                   Sum of squared residuals
+    std_err      : array
+                   1xk array of standard errors of the betas
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    title        : string
+                   Name of the regression method used
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal as ps
+    >>> np.set_printoptions(suppress=True)  #prevent scientific format
+    >>> db = ps.open(ps.examples.get_path("south.dbf"),'r')
+    >>> ds_name = "south.dbf"
+    >>> y_name = "HR90"
+    >>> y = np.array(db.by_col(y_name))
+    >>> y.shape = (len(y),1)
+    >>> x_names = ["RD90","PS90","UE90","DV90"]
+    >>> x = np.array([db.by_col(var) for var in x_names]).T
+    >>> ww = ps.open(ps.examples.get_path("south_q.gal"))
+    >>> w = ww.read()
+    >>> ww.close()
+    >>> w_name = "south_q.gal"
+    >>> w.transform = 'r'
+    >>> mlerr = ML_Error(y,x,w,name_y=y_name,name_x=x_names,\
+               name_w=w_name,name_ds=ds_name) #doctest: +SKIP
+    >>> np.around(mlerr.betas, decimals=4) #doctest: +SKIP
+    array([[ 6.1492],
+           [ 4.4024],
+           [ 1.7784],
+           [-0.3781],
+           [ 0.4858],
+           [ 0.2991]])
+    >>> "{0:.4f}".format(mlerr.lam) #doctest: +SKIP
+    '0.2991'
+    >>> "{0:.4f}".format(mlerr.mean_y) #doctest: +SKIP
+    '9.5493'
+    >>> "{0:.4f}".format(mlerr.std_y) #doctest: +SKIP
+    '7.0389'
+    >>> np.around(np.diag(mlerr.vm), decimals=4) #doctest: +SKIP
+    array([ 1.0648,  0.0555,  0.0454,  0.0061,  0.0148,  0.0014])
+    >>> np.around(mlerr.sig2, decimals=4) #doctest: +SKIP
+    array([[ 32.4069]])
+    >>> "{0:.4f}".format(mlerr.logll) #doctest: +SKIP
+    '-4471.4071'
+    >>> "{0:.4f}".format(mlerr.aic) #doctest: +SKIP
+    '8952.8141'
+    >>> "{0:.4f}".format(mlerr.schwarz) #doctest: +SKIP
+    '8979.0779'
+    >>> "{0:.4f}".format(mlerr.pr2) #doctest: +SKIP
+    '0.3058'
+    >>> "{0:.4f}".format(mlerr.utu) #doctest: +SKIP
+    '48534.9148'
+    >>> np.around(mlerr.std_err, decimals=4) #doctest: +SKIP
+    array([ 1.0319,  0.2355,  0.2132,  0.0784,  0.1217,  0.0378])
+    >>> np.around(mlerr.z_stat, decimals=4) #doctest: +SKIP
+    array([[  5.9593,   0.    ],
+           [ 18.6902,   0.    ],
+           [  8.3422,   0.    ],
+           [ -4.8233,   0.    ],
+           [  3.9913,   0.0001],
+           [  7.9089,   0.    ]])
+    >>> mlerr.name_y #doctest: +SKIP
+    'HR90'
+    >>> mlerr.name_x #doctest: +SKIP
+    ['CONSTANT', 'RD90', 'PS90', 'UE90', 'DV90', 'lambda']
+    >>> mlerr.name_w #doctest: +SKIP
+    'south_q.gal'
+    >>> mlerr.name_ds #doctest: +SKIP
+    'south.dbf'
+    >>> mlerr.title #doctest: +SKIP
+    'MAXIMUM LIKELIHOOD SPATIAL ERROR (METHOD = FULL)'
+
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
+    Kluwer Academic Publishers. Dordrecht.
+
+    """
+
+    def __init__(self, y, x, w, method='full', epsilon=0.0000001,
+                 spat_diag=False, vm=False, name_y=None, name_x=None,
+                 name_w=None, name_ds=None):
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        x_constant = USER.check_constant(x)
+        method = method.upper()
+        if method in ['FULL', 'ORD']:
+            BaseML_Error.__init__(self, y=y, x=x_constant,
+                                  w=w, method=method, epsilon=epsilon)
+            self.title = "MAXIMUM LIKELIHOOD SPATIAL ERROR" + \
+                " (METHOD = " + method + ")"
+            self.name_ds = USER.set_name_ds(name_ds)
+            self.name_y = USER.set_name_y(name_y)
+            self.name_x = USER.set_name_x(name_x, x)
+            self.name_x.append('lambda')
+            self.name_w = USER.set_name_w(name_w, w)
+            self.aic = DIAG.akaike(reg=self)
+            self.schwarz = DIAG.schwarz(reg=self)
+            SUMMARY.ML_Error(reg=self, w=w, vm=vm, spat_diag=spat_diag)
+        else:
+            raise Exception, "{0} is an unsupported method".format(method)
+
+
+def err_c_loglik(lam, n, y, ylag, x, xlag, W):
+    # concentrated log-lik for error model, no constants, brute force
+    ys = y - lam * ylag
+    xs = x - lam * xlag
+    ysys = np.dot(ys.T, ys)
+    xsxs = np.dot(xs.T, xs)
+    xsxsi = np.linalg.inv(xsxs)
+    xsys = np.dot(xs.T, ys)
+    x1 = np.dot(xsxsi, xsys)
+    x2 = np.dot(xsys.T, x1)
+    ee = ysys - x2
+    sig2 = ee[0][0] / n
+    nlsig2 = (n / 2.0) * np.log(sig2)
+    a = -lam * W
+    np.fill_diagonal(a, 1.0)
+    jacob = np.log(np.linalg.det(a))
+    # this is the negative of the concentrated log lik for minimization
+    clik = nlsig2 - jacob
+    return clik
+
+
+def err_c_loglik_ord(lam, n, y, ylag, x, xlag, evals):
+    # concentrated log-lik for error model, no constants, brute force
+    ys = y - lam * ylag
+    xs = x - lam * xlag
+    ysys = np.dot(ys.T, ys)
+    xsxs = np.dot(xs.T, xs)
+    xsxsi = np.linalg.inv(xsxs)
+    xsys = np.dot(xs.T, ys)
+    x1 = np.dot(xsxsi, xsys)
+    x2 = np.dot(xsys.T, x1)
+    ee = ysys - x2
+    sig2 = ee[0][0] / n
+    nlsig2 = (n / 2.0) * np.log(sig2)
+    revals = lam * evals
+    jacob = np.log(1 - revals).sum()
+    if isinstance(jacob, complex):
+        jacob = jacob.real
+    # this is the negative of the concentrated log lik for minimization
+    clik = nlsig2 - jacob
+    return clik
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
diff --git a/pysal/spreg/ml_error_regimes.py b/pysal/spreg/ml_error_regimes.py
new file mode 100644
index 0000000..fb7f4da
--- /dev/null
+++ b/pysal/spreg/ml_error_regimes.py
@@ -0,0 +1,462 @@
+"""
+ML Estimation of Spatial Error Model
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu"
+
+import pysal
+import numpy as np
+import multiprocessing as mp
+import regimes as REGI
+import user_output as USER
+import summary_output as SUMMARY
+import diagnostics as DIAG
+from utils import set_warn
+from ml_error import BaseML_Error
+from platform import system
+
+__all__ = ["ML_Error_Regimes"]
+
+
+class ML_Error_Regimes(BaseML_Error, REGI.Regimes_Frame):
+
+    """
+    ML estimation of the spatial error model with regimes (note no consistency 
+    checks, diagnostics or constants added); Anselin (1988) [1]_
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    w            : Sparse matrix
+                   Spatial weights sparse matrix 
+    method       : string
+                   if 'full', brute force calculation (full matrix expressions)
+    epsilon      : float
+                   tolerance criterion in mimimize_scalar function and inverse_product
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    regime_lag_sep : boolean
+                   Always False, kept for consistency in function call, ignored.
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    spat_diag    : boolean
+                   if True, include spatial diagnostics
+    vm           : boolean
+                   if True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regimes variable for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   (k+1)x1 array of estimated coefficients (lambda last)
+    lam          : float
+                   estimate of spatial autoregressive coefficient
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    u            : array
+                   nx1 array of residuals
+    e_filtered   : array
+                   nx1 array of spatially filtered residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant, excluding the rho)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    method       : string
+                   log Jacobian method
+                   if 'full': brute force (full matrix computations)
+    epsilon      : float
+                   tolerance criterion used in minimize_scalar function and inverse_product
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (k+1 x k+1), all coefficients
+    vm1          : array
+                   variance covariance matrix for lambda, sigma (2 x 2)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    sig2         : float
+                   Sigma squared used in computations
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    logll        : float
+                   maximized log-likelihood (including constant terms)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regimes variable for use in output
+    title        : string
+                   Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Ignored if regimes=False. Constant option for regimes.
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime
+    cols2regi    : list, 'all'
+                   Ignored if regimes=False. Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all', all the variables vary by regime.
+    regime_lag_sep   : boolean
+                   If True, the spatial parameter for spatial lag is also
+                   computed according to different regimes. If False (default), 
+                   the spatial parameter is fixed accross regimes.
+    kr           : int
+                   Number of variables/columns to be "regimized" or subject
+                   to change by regime. These will result in one parameter
+                   estimate by regime for each variable (i.e. nr parameters per
+                   variable)
+    kf           : int
+                   Number of variables/columns to be considered fixed or
+                   global across regimes and hence only obtain one parameter
+                   estimate
+    nr           : int
+                   Number of different regimes in the 'regimes' list
+    multi        : dictionary
+                   Only available when multiple regressions are estimated,
+                   i.e. when regime_err_sep=True and no variable is fixed
+                   across regimes.
+                   Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
+    Kluwer Academic Publishers. Dordrecht.
+
+    Example
+    ________
+
+    Open data baltim.dbf using pysal and create the variables matrices and weights matrix.
+
+    >>> import numpy as np
+    >>> import pysal as ps
+    >>> db =  ps.open(ps.examples.get_path("baltim.dbf"),'r')
+    >>> ds_name = "baltim.dbf"
+    >>> y_name = "PRICE"
+    >>> y = np.array(db.by_col(y_name)).T
+    >>> y.shape = (len(y),1)
+    >>> x_names = ["NROOM","AGE","SQFT"]
+    >>> x = np.array([db.by_col(var) for var in x_names]).T
+    >>> ww = ps.open(ps.examples.get_path("baltim_q.gal"))
+    >>> w = ww.read()
+    >>> ww.close()
+    >>> w_name = "baltim_q.gal"
+    >>> w.transform = 'r'    
+
+    Since in this example we are interested in checking whether the results vary
+    by regimes, we use CITCOU to define whether the location is in the city or 
+    outside the city (in the county):
+
+    >>> regimes = db.by_col("CITCOU")
+
+    Now we can run the regression with all parameters:
+
+    >>> mlerr = ML_Error_Regimes(y,x,regimes,w=w,name_y=y_name,name_x=x_names,\
+               name_w=w_name,name_ds=ds_name,name_regimes="CITCOU")
+    >>> np.around(mlerr.betas, decimals=4)
+    array([[ -2.3949],
+           [  4.8738],
+           [ -0.0291],
+           [  0.3328],
+           [ 31.7962],
+           [  2.981 ],
+           [ -0.2371],
+           [  0.8058],
+           [  0.6177]])
+    >>> "{0:.6f}".format(mlerr.lam)
+    '0.617707'
+    >>> "{0:.6f}".format(mlerr.mean_y)
+    '44.307180'
+    >>> "{0:.6f}".format(mlerr.std_y)
+    '23.606077'
+    >>> np.around(mlerr.vm1, decimals=4)
+    array([[   0.005 ,   -0.3535],
+           [  -0.3535,  441.3039]])
+    >>> np.around(np.diag(mlerr.vm), decimals=4)
+    array([ 58.5055,   2.4295,   0.0072,   0.0639,  80.5925,   3.161 ,
+             0.012 ,   0.0499,   0.005 ])
+    >>> np.around(mlerr.sig2, decimals=4)
+    array([[ 209.6064]])
+    >>> "{0:.6f}".format(mlerr.logll)
+    '-870.333106'
+    >>> "{0:.6f}".format(mlerr.aic)
+    '1756.666212'
+    >>> "{0:.6f}".format(mlerr.schwarz)
+    '1783.481077'
+    >>> mlerr.title
+    'MAXIMUM LIKELIHOOD SPATIAL ERROR - REGIMES (METHOD = full)'
+    """
+
+    def __init__(self, y, x, regimes, w=None, constant_regi='many',
+                 cols2regi='all', method='full', epsilon=0.0000001,
+                 regime_err_sep=False, regime_lag_sep=False, cores=False, spat_diag=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_w=None, name_ds=None, name_regimes=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        self.constant_regi = constant_regi
+        self.cols2regi = cols2regi
+        self.regime_err_sep = regime_err_sep
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.n = n
+        self.y = y
+
+        x_constant = USER.check_constant(x)
+        name_x = USER.set_name_x(name_x, x)
+        self.name_x_r = name_x
+
+        cols2regi = REGI.check_cols2regi(constant_regi, cols2regi, x)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        self.regime_err_sep = regime_err_sep
+
+        if regime_err_sep == True:
+            if set(cols2regi) == set([True]):
+                self._error_regimes_multi(y, x, regimes, w, cores,
+                                          method, epsilon, cols2regi, vm, name_x, spat_diag)
+            else:
+                raise Exception, "All coefficients must vary accross regimes if regime_err_sep = True."
+        else:
+            regimes_att = {}
+            regimes_att['x'] = x_constant
+            regimes_att['regimes'] = regimes
+            regimes_att['cols2regi'] = cols2regi
+            x, name_x = REGI.Regimes_Frame.__init__(self, x_constant,
+                                                    regimes, constant_regi=None, cols2regi=cols2regi,
+                                                    names=name_x)
+
+            BaseML_Error.__init__(
+                self, y=y, x=x, w=w, method=method, epsilon=epsilon, regimes_att=regimes_att)
+
+            self.title = "MAXIMUM LIKELIHOOD SPATIAL ERROR - REGIMES" + \
+                " (METHOD = " + method + ")"
+            self.name_x = USER.set_name_x(name_x, x, constant=True)
+            self.name_x.append('lambda')
+            self.kf += 1  # Adding a fixed k to account for lambda.
+            self.chow = REGI.Chow(self)
+            self.aic = DIAG.akaike(reg=self)
+            self.schwarz = DIAG.schwarz(reg=self)
+            self._cache = {}
+            SUMMARY.ML_Error(
+                reg=self, w=w, vm=vm, spat_diag=spat_diag, regimes=True)
+
+    def _error_regimes_multi(self, y, x, regimes, w, cores,
+                             method, epsilon, cols2regi, vm, name_x, spat_diag):
+
+        regi_ids = dict(
+            (r, list(np.where(np.array(regimes) == r)[0])) for r in self.regimes_set)
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            if system() == 'Windows':
+                is_win = True
+                results_p[r] = _work_error(*(y,x,regi_ids,r,w,method,epsilon,self.name_ds,self.name_y,name_x+['lambda'],self.name_w,self.name_regimes))
+            else:
+                pool = mp.Pool(cores)
+                results_p[r] = pool.apply_async(_work_error,args=(y,x,regi_ids,r,w,method,epsilon,self.name_ds,self.name_y,name_x+['lambda'],self.name_w,self.name_regimes, ))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work_error, args=(
+                    y, x, regi_ids, r, w, method, epsilon, self.name_ds, self.name_y, name_x + ['lambda'], self.name_w, self.name_regimes, ))
+            else:
+                results_p[r] = _work_error(
+                    *(y, x, regi_ids, r, w, method, epsilon, self.name_ds, self.name_y, name_x + ['lambda'], self.name_w, self.name_regimes))
+
+        self.kryd = 0
+        self.kr = len(cols2regi) + 1
+        self.kf = 0
+        self.nr = len(self.regimes_set)
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * self.kr, 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        self.e_filtered = np.zeros((self.n, 1), float)
+        self.name_y, self.name_x = [], []
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * self.kr):((counter + 1) * self.kr), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.e_filtered[regi_ids[r], ] = results[r].e_filtered
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            counter += 1
+        self.chow = REGI.Chow(self)
+        self.multi = results
+        SUMMARY.ML_Error_multi(
+            reg=self, multireg=self.multi, vm=vm, spat_diag=spat_diag, regimes=True, w=w)
+
+
+def _work_error(y, x, regi_ids, r, w, method, epsilon, name_ds, name_y, name_x, name_w, name_regimes):
+    w_r, warn = REGI.w_regime(w, regi_ids[r], r, transform=True)
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    x_constant = USER.check_constant(x_r)
+    model = BaseML_Error(
+        y=y_r, x=x_constant, w=w_r, method=method, epsilon=epsilon)
+    set_warn(model, warn)
+    model.w = w_r
+    model.title = "MAXIMUM LIKELIHOOD SPATIAL ERROR - REGIME " + \
+        str(r) + " (METHOD = " + method + ")"
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    model.aic = DIAG.akaike(reg=model)
+    model.schwarz = DIAG.schwarz(reg=model)
+    return model
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == "__main__":
+    _test()
+    import numpy as np
+    import pysal as ps
+
+    db = ps.open(ps.examples.get_path("baltim.dbf"), 'r')
+    ds_name = "baltim.dbf"
+    y_name = "PRICE"
+    y = np.array(db.by_col(y_name)).T
+    y.shape = (len(y), 1)
+    x_names = ["NROOM", "NBATH", "PATIO", "FIREPL",
+               "AC", "GAR", "AGE", "LOTSZ", "SQFT"]
+    x = np.array([db.by_col(var) for var in x_names]).T
+    ww = ps.open(ps.examples.get_path("baltim_q.gal"))
+    w = ww.read()
+    ww.close()
+    w_name = "baltim_q.gal"
+    w.transform = 'r'
+
+    regimes = []
+    y_coord = np.array(db.by_col("Y"))
+    for i in y_coord:
+        if i > 544.5:
+            regimes.append("North")
+        else:
+            regimes.append("South")
+
+    mlerror = ML_Error_Regimes(y, x, regimes, w=w, method='full', name_y=y_name,
+                               name_x=x_names, name_w=w_name, name_ds=ds_name, regime_err_sep=False,
+                               name_regimes="North")
+    print mlerror.summary
diff --git a/pysal/spreg/ml_lag.py b/pysal/spreg/ml_lag.py
new file mode 100644
index 0000000..7aac794
--- /dev/null
+++ b/pysal/spreg/ml_lag.py
@@ -0,0 +1,593 @@
+"""
+ML Estimation of Spatial Lag Model
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Serge Rey srey at asu.edu"
+
+import numpy as np
+import numpy.linalg as la
+import pysal as ps
+from pysal.spreg.utils import RegressionPropsY, RegressionPropsVM, inverse_prod
+from utils import spdot
+import diagnostics as DIAG
+import user_output as USER
+import summary_output as SUMMARY
+from w_utils import symmetrize
+try:
+    from scipy.optimize import minimize_scalar
+    minimize_scalar_available = True
+except ImportError:
+    minimize_scalar_available = False
+
+__all__ = ["ML_Lag"]
+
+
+class BaseML_Lag(RegressionPropsY, RegressionPropsVM):
+
+    """
+    ML estimation of the spatial lag model (note no consistency
+    checks, diagnostics or constants added); Anselin (1988) [1]_
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : pysal W object
+                   Spatial weights object
+    method       : string
+                   if 'full', brute force calculation (full matrix expressions)
+                   if 'ord', Ord eigenvalue method
+    epsilon      : float
+                   tolerance criterion in mimimize_scalar function and inverse_product
+
+    Attributes
+    ----------
+    betas        : array
+                   (k+1)x1 array of estimated coefficients (rho first)
+    rho          : float
+                   estimate of spatial autoregressive coefficient
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant, excluding the rho)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    method       : string
+                   log Jacobian method
+                   if 'full': brute force (full matrix computations)
+                   if 'ord' : Ord eigenvalue method
+    epsilon      : float
+                   tolerance criterion used in minimize_scalar function and inverse_product
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (k+1 x k+1)
+    vm1          : array
+                   Variance covariance matrix (k+2 x k+2) includes sigma2
+    sig2         : float
+                   Sigma squared used in computations
+    logll        : float
+                   maximized log-likelihood (including constant terms)
+    predy_e      : array
+                   predicted values from reduced form
+    e_pred       : array
+                   prediction errors using reduced form predicted values
+
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal as ps
+    >>> db =  ps.open(ps.examples.get_path("baltim.dbf"),'r')
+    >>> ds_name = "baltim.dbf"
+    >>> y_name = "PRICE"
+    >>> y = np.array(db.by_col(y_name)).T
+    >>> y.shape = (len(y),1)
+    >>> x_names = ["NROOM","NBATH","PATIO","FIREPL","AC","GAR","AGE","LOTSZ","SQFT"]
+    >>> x = np.array([db.by_col(var) for var in x_names]).T
+    >>> x = np.hstack((np.ones((len(y),1)),x))
+    >>> ww = ps.open(ps.examples.get_path("baltim_q.gal"))
+    >>> w = ww.read()
+    >>> ww.close()
+    >>> w.transform = 'r'
+    >>> w_name = "baltim_q.gal"
+    >>> mllag = BaseML_Lag(y,x,w,method='ord') #doctest: +SKIP
+    >>> "{0:.6f}".format(mllag.rho) #doctest: +SKIP
+    '0.425885'
+    >>> np.around(mllag.betas, decimals=4) #doctest: +SKIP
+    array([[ 4.3675],
+           [ 0.7502],
+           [ 5.6116],
+           [ 7.0497],
+           [ 7.7246],
+           [ 6.1231],
+           [ 4.6375],
+           [-0.1107],
+           [ 0.0679],
+           [ 0.0794],
+           [ 0.4259]])
+    >>> "{0:.6f}".format(mllag.mean_y) #doctest: +SKIP
+    '44.307180'
+    >>> "{0:.6f}".format(mllag.std_y) #doctest: +SKIP
+    '23.606077'
+    >>> np.around(np.diag(mllag.vm1), decimals=4) #doctest: +SKIP
+    array([  23.8716,    1.1222,    3.0593,    7.3416,    5.6695,    5.4698,
+              2.8684,    0.0026,    0.0002,    0.0266,    0.0032,  220.1292])
+    >>> np.around(np.diag(mllag.vm), decimals=4) #doctest: +SKIP
+    array([ 23.8716,   1.1222,   3.0593,   7.3416,   5.6695,   5.4698,
+             2.8684,   0.0026,   0.0002,   0.0266,   0.0032])
+    >>> "{0:.6f}".format(mllag.sig2) #doctest: +SKIP
+    '151.458698'
+    >>> "{0:.6f}".format(mllag.logll) #doctest: +SKIP
+    '-832.937174'
+    >>> mllag = BaseML_Lag(y,x,w) #doctest: +SKIP
+    >>> "{0:.6f}".format(mllag.rho) #doctest: +SKIP
+    '0.425885'
+    >>> np.around(mllag.betas, decimals=4) #doctest: +SKIP
+    array([[ 4.3675],
+           [ 0.7502],
+           [ 5.6116],
+           [ 7.0497],
+           [ 7.7246],
+           [ 6.1231],
+           [ 4.6375],
+           [-0.1107],
+           [ 0.0679],
+           [ 0.0794],
+           [ 0.4259]])
+    >>> "{0:.6f}".format(mllag.mean_y) #doctest: +SKIP
+    '44.307180'
+    >>> "{0:.6f}".format(mllag.std_y) #doctest: +SKIP
+    '23.606077'
+    >>> np.around(np.diag(mllag.vm1), decimals=4) #doctest: +SKIP
+    array([  23.8716,    1.1222,    3.0593,    7.3416,    5.6695,    5.4698,
+              2.8684,    0.0026,    0.0002,    0.0266,    0.0032,  220.1292])
+    >>> np.around(np.diag(mllag.vm), decimals=4) #doctest: +SKIP
+    array([ 23.8716,   1.1222,   3.0593,   7.3416,   5.6695,   5.4698,
+             2.8684,   0.0026,   0.0002,   0.0266,   0.0032])
+    >>> "{0:.6f}".format(mllag.sig2) #doctest: +SKIP
+    '151.458698'
+    >>> "{0:.6f}".format(mllag.logll) #doctest: +SKIP
+    '-832.937174'
+
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
+    Kluwer Academic Publishers. Dordrecht.
+
+    """
+
+    def __init__(self, y, x, w, method='full', epsilon=0.0000001):
+        # set up main regression variables and spatial filters
+        self.y = y
+        self.x = x
+        self.n, self.k = self.x.shape
+        self.method = method
+        self.epsilon = epsilon
+        W = w.full()[0]
+        ylag = ps.lag_spatial(w, y)
+        # b0, b1, e0 and e1
+        xtx = spdot(self.x.T, self.x)
+        xtxi = la.inv(xtx)
+        xty = spdot(self.x.T, self.y)
+        xtyl = spdot(self.x.T, ylag)
+        b0 = np.dot(xtxi, xty)
+        b1 = np.dot(xtxi, xtyl)
+        e0 = self.y - spdot(x, b0)
+        e1 = ylag - spdot(x, b1)
+        methodML = method.upper()
+        # call minimizer using concentrated log-likelihood to get rho
+        if methodML in ['FULL', 'ORD']:
+            if methodML == 'FULL':
+                res = minimize_scalar(lag_c_loglik, 0.0, bounds=(-1.0, 1.0),
+                                      args=(
+                                          self.n, e0, e1, W), method='bounded',
+                                      tol=epsilon)
+            elif methodML == 'ORD':
+                # check on symmetry structure
+                if w.asymmetry(intrinsic=False) == []:
+                    ww = symmetrize(w)
+                    WW = ww.todense()
+                    evals = la.eigvalsh(WW)
+                else:
+                    evals = la.eigvals(W)
+                res = minimize_scalar(lag_c_loglik_ord, 0.0, bounds=(-1.0, 1.0),
+                                      args=(
+                                          self.n, e0, e1, evals), method='bounded',
+                                      tol=epsilon)
+        else:
+            # program will crash, need to catch
+            print "{0} is an unsupported method".format(methodML)
+            self = None
+            return
+
+        self.rho = res.x[0][0]
+
+        # compute full log-likelihood, including constants
+        ln2pi = np.log(2.0 * np.pi)
+        llik = -res.fun - self.n / 2.0 * ln2pi - self.n / 2.0
+        self.logll = llik[0][0]
+
+        # b, residuals and predicted values
+
+        b = b0 - self.rho * b1
+        self.betas = np.vstack((b, self.rho))   # rho added as last coefficient
+        self.u = e0 - self.rho * e1
+        self.predy = self.y - self.u
+
+        xb = spdot(x, b)
+
+        self.predy_e = inverse_prod(
+            w.sparse, xb, self.rho, inv_method="power_exp", threshold=epsilon)
+        self.e_pred = self.y - self.predy_e
+
+        # residual variance
+        self._cache = {}
+        self.sig2 = self.sig2n  # no allowance for division by n-k
+
+        # information matrix
+        a = -self.rho * W
+        np.fill_diagonal(a, 1.0)
+        ai = la.inv(a)
+        wai = np.dot(W, ai)
+        tr1 = np.trace(wai)
+
+        wai2 = np.dot(wai, wai)
+        tr2 = np.trace(wai2)
+
+        waiTwai = np.dot(wai.T, wai)
+        tr3 = np.trace(waiTwai)
+
+        wpredy = ps.lag_spatial(w, self.predy_e)
+        wpyTwpy = np.dot(wpredy.T, wpredy)
+        xTwpy = spdot(x.T, wpredy)
+
+        # order of variables is beta, rho, sigma2
+
+        v1 = np.vstack(
+            (xtx / self.sig2, xTwpy.T / self.sig2, np.zeros((1, self.k))))
+        v2 = np.vstack(
+            (xTwpy / self.sig2, tr2 + tr3 + wpyTwpy / self.sig2, tr1 / self.sig2))
+        v3 = np.vstack(
+            (np.zeros((self.k, 1)), tr1 / self.sig2, self.n / (2.0 * self.sig2 ** 2)))
+
+        v = np.hstack((v1, v2, v3))
+
+        self.vm1 = la.inv(v)  # vm1 includes variance for sigma2
+        self.vm = self.vm1[:-1, :-1]  # vm is for coefficients only
+
+
+class ML_Lag(BaseML_Lag):
+
+    """
+    ML estimation of the spatial lag model with all results and diagnostics;
+    Anselin (1988) [1]_
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : pysal W object
+                   Spatial weights object
+    method       : string
+                   if 'full', brute force calculation (full matrix expressions)
+                   if 'ord', Ord eigenvalue method
+    epsilon      : float
+                   tolerance criterion in mimimize_scalar function and inverse_product
+    spat_diag    : boolean
+                   if True, include spatial diagnostics
+    vm           : boolean
+                   if True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    betas        : array
+                   (k+1)x1 array of estimated coefficients (rho first)
+    rho          : float
+                   estimate of spatial autoregressive coefficient
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant, excluding the rho)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    method       : string
+                   log Jacobian method
+                   if 'full': brute force (full matrix computations)
+    epsilon      : float
+                   tolerance criterion used in minimize_scalar function and inverse_product
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (k+1 x k+1), all coefficients
+    vm1          : array
+                   Variance covariance matrix (k+2 x k+2), includes sig2
+    sig2         : float
+                   Sigma squared used in computations
+    logll        : float
+                   maximized log-likelihood (including constant terms)
+    aic          : float
+                   Akaike information criterion
+    schwarz      : float
+                   Schwarz criterion
+    predy_e      : array
+                   predicted values from reduced form
+    e_pred       : array
+                   prediction errors using reduced form predicted values
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+    utu          : float
+                   Sum of squared residuals
+    std_err      : array
+                   1xk array of standard errors of the betas
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    title        : string
+                   Name of the regression method used
+
+    Examples
+    ________
+
+    >>> import numpy as np
+    >>> import pysal as ps
+    >>> db =  ps.open(ps.examples.get_path("baltim.dbf"),'r')
+    >>> ds_name = "baltim.dbf"
+    >>> y_name = "PRICE"
+    >>> y = np.array(db.by_col(y_name)).T
+    >>> y.shape = (len(y),1)
+    >>> x_names = ["NROOM","NBATH","PATIO","FIREPL","AC","GAR","AGE","LOTSZ","SQFT"]
+    >>> x = np.array([db.by_col(var) for var in x_names]).T
+    >>> ww = ps.open(ps.examples.get_path("baltim_q.gal"))
+    >>> w = ww.read()
+    >>> ww.close()
+    >>> w_name = "baltim_q.gal"
+    >>> w.transform = 'r'
+    >>> mllag = ML_Lag(y,x,w,name_y=y_name,name_x=x_names,\
+               name_w=w_name,name_ds=ds_name) #doctest: +SKIP
+    >>> np.around(mllag.betas, decimals=4) #doctest: +SKIP
+    array([[ 4.3675],
+           [ 0.7502],
+           [ 5.6116],
+           [ 7.0497],
+           [ 7.7246],
+           [ 6.1231],
+           [ 4.6375],
+           [-0.1107],
+           [ 0.0679],
+           [ 0.0794],
+           [ 0.4259]])
+    >>> "{0:.6f}".format(mllag.rho) #doctest: +SKIP
+    '0.425885'
+    >>> "{0:.6f}".format(mllag.mean_y) #doctest: +SKIP
+    '44.307180'
+    >>> "{0:.6f}".format(mllag.std_y) #doctest: +SKIP
+    '23.606077'
+    >>> np.around(np.diag(mllag.vm1), decimals=4) #doctest: +SKIP
+    array([  23.8716,    1.1222,    3.0593,    7.3416,    5.6695,    5.4698,
+              2.8684,    0.0026,    0.0002,    0.0266,    0.0032,  220.1292])
+    >>> np.around(np.diag(mllag.vm), decimals=4) #doctest: +SKIP
+    array([ 23.8716,   1.1222,   3.0593,   7.3416,   5.6695,   5.4698,
+             2.8684,   0.0026,   0.0002,   0.0266,   0.0032])
+    >>> "{0:.6f}".format(mllag.sig2) #doctest: +SKIP
+    '151.458698'
+    >>> "{0:.6f}".format(mllag.logll) #doctest: +SKIP
+    '-832.937174'
+    >>> "{0:.6f}".format(mllag.aic) #doctest: +SKIP
+    '1687.874348'
+    >>> "{0:.6f}".format(mllag.schwarz) #doctest: +SKIP
+    '1724.744787'
+    >>> "{0:.6f}".format(mllag.pr2) #doctest: +SKIP
+    '0.727081'
+    >>> "{0:.4f}".format(mllag.pr2_e) #doctest: +SKIP
+    '0.7062'
+    >>> "{0:.4f}".format(mllag.utu) #doctest: +SKIP
+    '31957.7853'
+    >>> np.around(mllag.std_err, decimals=4) #doctest: +SKIP
+    array([ 4.8859,  1.0593,  1.7491,  2.7095,  2.3811,  2.3388,  1.6936,
+            0.0508,  0.0146,  0.1631,  0.057 ])
+    >>> np.around(mllag.z_stat, decimals=4) #doctest: +SKIP
+    array([[ 0.8939,  0.3714],
+           [ 0.7082,  0.4788],
+           [ 3.2083,  0.0013],
+           [ 2.6018,  0.0093],
+           [ 3.2442,  0.0012],
+           [ 2.6181,  0.0088],
+           [ 2.7382,  0.0062],
+           [-2.178 ,  0.0294],
+           [ 4.6487,  0.    ],
+           [ 0.4866,  0.6266],
+           [ 7.4775,  0.    ]])
+    >>> mllag.name_y #doctest: +SKIP
+    'PRICE'
+    >>> mllag.name_x #doctest: +SKIP
+    ['CONSTANT', 'NROOM', 'NBATH', 'PATIO', 'FIREPL', 'AC', 'GAR', 'AGE', 'LOTSZ', 'SQFT', 'W_PRICE']
+    >>> mllag.name_w #doctest: +SKIP
+    'baltim_q.gal'
+    >>> mllag.name_ds #doctest: +SKIP
+    'baltim.dbf'
+    >>> mllag.title #doctest: +SKIP
+    'MAXIMUM LIKELIHOOD SPATIAL LAG (METHOD = FULL)'
+    >>> mllag = ML_Lag(y,x,w,method='ord',name_y=y_name,name_x=x_names,\
+               name_w=w_name,name_ds=ds_name) #doctest: +SKIP
+    >>> np.around(mllag.betas, decimals=4) #doctest: +SKIP
+    array([[ 4.3675],
+           [ 0.7502],
+           [ 5.6116],
+           [ 7.0497],
+           [ 7.7246],
+           [ 6.1231],
+           [ 4.6375],
+           [-0.1107],
+           [ 0.0679],
+           [ 0.0794],
+           [ 0.4259]])
+    >>> "{0:.6f}".format(mllag.rho) #doctest: +SKIP
+    '0.425885'
+    >>> "{0:.6f}".format(mllag.mean_y) #doctest: +SKIP
+    '44.307180'
+    >>> "{0:.6f}".format(mllag.std_y) #doctest: +SKIP
+    '23.606077'
+    >>> np.around(np.diag(mllag.vm1), decimals=4) #doctest: +SKIP
+    array([  23.8716,    1.1222,    3.0593,    7.3416,    5.6695,    5.4698,
+              2.8684,    0.0026,    0.0002,    0.0266,    0.0032,  220.1292])
+    >>> np.around(np.diag(mllag.vm), decimals=4) #doctest: +SKIP
+    array([ 23.8716,   1.1222,   3.0593,   7.3416,   5.6695,   5.4698,
+             2.8684,   0.0026,   0.0002,   0.0266,   0.0032])
+    >>> "{0:.6f}".format(mllag.sig2) #doctest: +SKIP
+    '151.458698'
+    >>> "{0:.6f}".format(mllag.logll) #doctest: +SKIP
+    '-832.937174'
+    >>> "{0:.6f}".format(mllag.aic) #doctest: +SKIP
+    '1687.874348'
+    >>> "{0:.6f}".format(mllag.schwarz) #doctest: +SKIP
+    '1724.744787'
+    >>> "{0:.6f}".format(mllag.pr2) #doctest: +SKIP
+    '0.727081'
+    >>> "{0:.6f}".format(mllag.pr2_e) #doctest: +SKIP
+    '0.706198'
+    >>> "{0:.4f}".format(mllag.utu) #doctest: +SKIP
+    '31957.7853'
+    >>> np.around(mllag.std_err, decimals=4) #doctest: +SKIP
+    array([ 4.8859,  1.0593,  1.7491,  2.7095,  2.3811,  2.3388,  1.6936,
+            0.0508,  0.0146,  0.1631,  0.057 ])
+    >>> np.around(mllag.z_stat, decimals=4) #doctest: +SKIP
+    array([[ 0.8939,  0.3714],
+           [ 0.7082,  0.4788],
+           [ 3.2083,  0.0013],
+           [ 2.6018,  0.0093],
+           [ 3.2442,  0.0012],
+           [ 2.6181,  0.0088],
+           [ 2.7382,  0.0062],
+           [-2.178 ,  0.0294],
+           [ 4.6487,  0.    ],
+           [ 0.4866,  0.6266],
+           [ 7.4775,  0.    ]])
+    >>> mllag.name_y #doctest: +SKIP
+    'PRICE'
+    >>> mllag.name_x #doctest: +SKIP
+    ['CONSTANT', 'NROOM', 'NBATH', 'PATIO', 'FIREPL', 'AC', 'GAR', 'AGE', 'LOTSZ', 'SQFT', 'W_PRICE']
+    >>> mllag.name_w #doctest: +SKIP
+    'baltim_q.gal'
+    >>> mllag.name_ds #doctest: +SKIP
+    'baltim.dbf'
+    >>> mllag.title #doctest: +SKIP
+    'MAXIMUM LIKELIHOOD SPATIAL LAG (METHOD = ORD)'
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
+    Kluwer Academic Publishers. Dordrecht.
+
+    """
+
+    def __init__(self, y, x, w, method='full', epsilon=0.0000001,
+                 spat_diag=False, vm=False, name_y=None, name_x=None,
+                 name_w=None, name_ds=None):
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        x_constant = USER.check_constant(x)
+        method = method.upper()
+        if method in ['FULL', 'ORD']:
+            BaseML_Lag.__init__(
+                self, y=y, x=x_constant, w=w, method=method, epsilon=epsilon)
+            # increase by 1 to have correct aic and sc, include rho in count
+            self.k += 1
+            self.title = "MAXIMUM LIKELIHOOD SPATIAL LAG" + \
+                " (METHOD = " + method + ")"
+            self.name_ds = USER.set_name_ds(name_ds)
+            self.name_y = USER.set_name_y(name_y)
+            self.name_x = USER.set_name_x(name_x, x)
+            name_ylag = USER.set_name_yend_sp(self.name_y)
+            self.name_x.append(name_ylag)  # rho changed to last position
+            self.name_w = USER.set_name_w(name_w, w)
+            self.aic = DIAG.akaike(reg=self)
+            self.schwarz = DIAG.schwarz(reg=self)
+            SUMMARY.ML_Lag(reg=self, w=w, vm=vm, spat_diag=spat_diag)
+        else:
+            raise Exception, "{0} is an unsupported method".format(method)
+
+def lag_c_loglik(rho, n, e0, e1, W):
+    # concentrated log-lik for lag model, no constants, brute force
+    er = e0 - rho * e1
+    sig2 = np.dot(er.T, er) / n
+    nlsig2 = (n / 2.0) * np.log(sig2)
+    a = -rho * W
+    np.fill_diagonal(a, 1.0)
+    jacob = np.log(np.linalg.det(a))
+    # this is the negative of the concentrated log lik for minimization
+    clik = nlsig2 - jacob
+    return clik
+
+
+def lag_c_loglik_ord(rho, n, e0, e1, evals):
+    # concentrated log-lik for lag model, no constants, Ord eigenvalue method
+    er = e0 - rho * e1
+    sig2 = np.dot(er.T, er) / n
+    nlsig2 = (n / 2.0) * np.log(sig2)
+    revals = rho * evals
+    jacob = np.log(1 - revals).sum()
+    if isinstance(jacob, complex):
+        jacob = jacob.real
+    # this is the negative of the concentrated log lik for minimization
+    clik = nlsig2 - jacob
+    return clik
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
diff --git a/pysal/spreg/ml_lag_regimes.py b/pysal/spreg/ml_lag_regimes.py
new file mode 100644
index 0000000..b0b63c8
--- /dev/null
+++ b/pysal/spreg/ml_lag_regimes.py
@@ -0,0 +1,484 @@
+"""
+ML Estimation of Spatial Lag Model with Regimes
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu"
+
+import pysal
+import numpy as np
+import regimes as REGI
+import user_output as USER
+import summary_output as SUMMARY
+import diagnostics as DIAG
+import multiprocessing as mp
+from ml_lag import BaseML_Lag
+from utils import set_warn
+from platform import system
+
+__all__ = ["ML_Lag_Regimes"]
+
+
+class ML_Lag_Regimes(BaseML_Lag, REGI.Regimes_Frame):
+
+    """
+    ML estimation of the spatial lag model with regimes (note no consistency 
+    checks, diagnostics or constants added); Anselin (1988) [1]_
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    w            : Sparse matrix
+                   Spatial weights sparse matrix 
+    method       : string
+                   if 'full', brute force calculation (full matrix expressions)
+                   if 'ord', Ord eigenvalue method
+    epsilon      : float
+                   tolerance criterion in mimimize_scalar function and inverse_product
+    regime_lag_sep: boolean
+                   If True, the spatial parameter for spatial lag is also
+                   computed according to different regimes. If False (default), 
+                   the spatial parameter is fixed accross regimes.
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    spat_diag    : boolean
+                   if True, include spatial diagnostics
+    vm           : boolean
+                   if True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regimes variable for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   (k+1)x1 array of estimated coefficients (rho first)
+    rho          : float
+                   estimate of spatial autoregressive coefficient
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant, excluding the rho)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    method       : string
+                   log Jacobian method
+                   if 'full': brute force (full matrix computations)
+    epsilon      : float
+                   tolerance criterion used in minimize_scalar function and inverse_product
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (k+1 x k+1), all coefficients
+    vm1          : array
+                   Variance covariance matrix (k+2 x k+2), includes sig2
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    sig2         : float
+                   Sigma squared used in computations
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    logll        : float
+                   maximized log-likelihood (including constant terms)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    aic          : float
+                   Akaike information criterion
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    schwarz      : float
+                   Schwarz criterion
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    predy_e      : array
+                   predicted values from reduced form
+    e_pred       : array
+                   prediction errors using reduced form predicted values
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regimes variable for use in output
+    title        : string
+                   Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Ignored if regimes=False. Constant option for regimes.
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime
+    cols2regi    : list, 'all'
+                   Ignored if regimes=False. Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all', all the variables vary by regime.
+    regime_lag_sep   : boolean
+                   If True, the spatial parameter for spatial lag is also
+                   computed according to different regimes. If False (default), 
+                   the spatial parameter is fixed accross regimes.
+    regime_err_sep  : boolean
+                   always set to False - kept for compatibility with other
+                   regime models
+    kr           : int
+                   Number of variables/columns to be "regimized" or subject
+                   to change by regime. These will result in one parameter
+                   estimate by regime for each variable (i.e. nr parameters per
+                   variable)
+    kf           : int
+                   Number of variables/columns to be considered fixed or
+                   global across regimes and hence only obtain one parameter
+                   estimate
+    nr           : int
+                   Number of different regimes in the 'regimes' list
+    multi        : dictionary
+                   Only available when multiple regressions are estimated,
+                   i.e. when regime_err_sep=True and no variable is fixed
+                   across regimes.
+                   Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
+    Kluwer Academic Publishers. Dordrecht.
+
+    Example
+    ________
+
+    Open data baltim.dbf using pysal and create the variables matrices and weights matrix.
+
+    >>> import numpy as np
+    >>> import pysal as ps
+    >>> db =  ps.open(ps.examples.get_path("baltim.dbf"),'r')
+    >>> ds_name = "baltim.dbf"
+    >>> y_name = "PRICE"
+    >>> y = np.array(db.by_col(y_name)).T
+    >>> y.shape = (len(y),1)
+    >>> x_names = ["NROOM","AGE","SQFT"]
+    >>> x = np.array([db.by_col(var) for var in x_names]).T
+    >>> ww = ps.open(ps.examples.get_path("baltim_q.gal"))
+    >>> w = ww.read()
+    >>> ww.close()
+    >>> w_name = "baltim_q.gal"
+    >>> w.transform = 'r'    
+
+    Since in this example we are interested in checking whether the results vary
+    by regimes, we use CITCOU to define whether the location is in the city or 
+    outside the city (in the county):
+
+    >>> regimes = db.by_col("CITCOU")
+
+    Now we can run the regression with all parameters:
+
+    >>> mllag = ML_Lag_Regimes(y,x,regimes,w=w,name_y=y_name,name_x=x_names,\
+               name_w=w_name,name_ds=ds_name,name_regimes="CITCOU")
+    >>> np.around(mllag.betas, decimals=4)
+    array([[-15.0059],
+           [  4.496 ],
+           [ -0.0318],
+           [  0.35  ],
+           [ -4.5404],
+           [  3.9219],
+           [ -0.1702],
+           [  0.8194],
+           [  0.5385]])
+    >>> "{0:.6f}".format(mllag.rho)
+    '0.538503'
+    >>> "{0:.6f}".format(mllag.mean_y)
+    '44.307180'
+    >>> "{0:.6f}".format(mllag.std_y)
+    '23.606077'
+    >>> np.around(np.diag(mllag.vm1), decimals=4)
+    array([  47.42  ,    2.3953,    0.0051,    0.0648,   69.6765,    3.2066,
+              0.0116,    0.0486,    0.004 ,  390.7274])
+    >>> np.around(np.diag(mllag.vm), decimals=4)
+    array([ 47.42  ,   2.3953,   0.0051,   0.0648,  69.6765,   3.2066,
+             0.0116,   0.0486,   0.004 ])
+    >>> "{0:.6f}".format(mllag.sig2)
+    '200.044334'
+    >>> "{0:.6f}".format(mllag.logll)
+    '-864.985056'
+    >>> "{0:.6f}".format(mllag.aic)
+    '1747.970112'
+    >>> "{0:.6f}".format(mllag.schwarz)
+    '1778.136835'
+    >>> mllag.title
+    'MAXIMUM LIKELIHOOD SPATIAL LAG - REGIMES (METHOD = full)'
+    """
+
+    def __init__(self, y, x, regimes, w=None, constant_regi='many',
+                 cols2regi='all', method='full', epsilon=0.0000001,
+                 regime_lag_sep=False, regime_err_sep=False, cores=False, spat_diag=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_w=None, name_ds=None, name_regimes=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        USER.check_spat_diag(spat_diag, w)
+        name_y = USER.set_name_y(name_y)
+        self.name_y = name_y
+        self.name_x_r = USER.set_name_x(
+            name_x, x) + [USER.set_name_yend_sp(name_y)]
+        self.method = method
+        self.epsilon = epsilon
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.constant_regi = constant_regi
+        self.n = n
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, add_cons=False)
+        self.cols2regi = cols2regi
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        self.regime_lag_sep = regime_lag_sep
+        self._cache = {}
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_w = USER.set_name_w(name_w, w)
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+
+# regime_err_sep is ignored, always False
+
+        if regime_lag_sep == True:
+            if not (set(cols2regi) == set([True]) and constant_regi == 'many'):
+                raise Exception, "All variables must vary by regimes if regime_lag_sep = True."
+            cols2regi += [True]
+            w_i, regi_ids, warn = REGI.w_regimes(
+                w, regimes, self.regimes_set, transform=True, get_ids=True, min_n=len(cols2regi) + 1)
+            set_warn(self, warn)
+        else:
+            cols2regi += [False]
+
+        if set(cols2regi) == set([True]) and constant_regi == 'many':
+            self.y = y
+            self.ML_Lag_Regimes_Multi(y, x, w_i, w, regi_ids,
+                                      cores=cores, cols2regi=cols2regi, method=method, epsilon=epsilon,
+                                      spat_diag=spat_diag, vm=vm, name_y=name_y, name_x=name_x,
+                                      name_regimes=self.name_regimes,
+                                      name_w=name_w, name_ds=name_ds)
+        else:
+            # if regime_lag_sep == True:
+            #    w = REGI.w_regimes_union(w, w_i, self.regimes_set)
+            name_x = USER.set_name_x(name_x, x, constant=True)
+            x, self.name_x = REGI.Regimes_Frame.__init__(self, x,
+                                                         regimes, constant_regi, cols2regi=cols2regi[:-1], names=name_x)
+            self.name_x.append("_Global_" + USER.set_name_yend_sp(name_y))
+            BaseML_Lag.__init__(
+                self, y=y, x=x, w=w, method=method, epsilon=epsilon)
+            self.kf += 1  # Adding a fixed k to account for spatial lag in Chow
+            # adding a fixed k to account for spatial lag in aic, sc
+            self.k += 1
+            self.chow = REGI.Chow(self)
+            self.aic = DIAG.akaike(reg=self)
+            self.schwarz = DIAG.schwarz(reg=self)
+            self.regime_lag_sep = regime_lag_sep
+            self.title = "MAXIMUM LIKELIHOOD SPATIAL LAG - REGIMES" + \
+                " (METHOD = " + method + ")"
+            SUMMARY.ML_Lag(
+                reg=self, w=w, vm=vm, spat_diag=spat_diag, regimes=True)
+
+    def ML_Lag_Regimes_Multi(self, y, x, w_i, w, regi_ids,
+                             cores, cols2regi, method, epsilon,
+                             spat_diag, vm, name_y, name_x,
+                             name_regimes, name_w, name_ds):
+        #        pool = mp.Pool(cores)
+        name_x = USER.set_name_x(name_x, x) + [USER.set_name_yend_sp(name_y)]
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            if system() == 'Windows':
+                is_win = True
+                results_p[r] = _work(*(y,x,regi_ids,r,w_i[r],method,epsilon,name_ds,name_y,name_x,name_w,name_regimes))
+            else:                
+                results_p[r] = pool.apply_async(_work,args=(y,x,regi_ids,r,w_i[r],method,epsilon,name_ds,name_y,name_x,name_w,name_regimes, ))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work, args=(y, x, regi_ids, r, w_i[
+                                                r], method, epsilon, name_ds, name_y, name_x, name_w, name_regimes, ))
+            else:
+                results_p[r] = _work(
+                    *(y, x, regi_ids, r, w_i[r], method, epsilon, name_ds, name_y, name_x, name_w, name_regimes))
+
+        self.kryd = 0
+        self.kr = len(cols2regi) + 1
+        self.kf = 0
+        self.nr = len(self.regimes_set)
+        self.name_x_r = name_x
+        self.name_regimes = name_regimes
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * self.kr, 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        self.predy_e = np.zeros((self.n, 1), float)
+        self.e_pred = np.zeros((self.n, 1), float)
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        self.name_y, self.name_x = [], []
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * self.kr):((counter + 1) * self.kr), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.predy_e[regi_ids[r], ] = results[r].predy_e
+            self.e_pred[regi_ids[r], ] = results[r].e_pred
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            counter += 1
+        self.multi = results
+        self.chow = REGI.Chow(self)
+        SUMMARY.ML_Lag_multi(
+            reg=self, multireg=self.multi, vm=vm, spat_diag=spat_diag, regimes=True, w=w)
+
+
+def _work(y, x, regi_ids, r, w_r, method, epsilon, name_ds, name_y, name_x, name_w, name_regimes):
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    x_constant = USER.check_constant(x_r)
+    model = BaseML_Lag(y_r, x_constant, w_r, method=method, epsilon=epsilon)
+    model.title = "MAXIMUM LIKELIHOOD SPATIAL LAG - REGIME " + \
+        str(r) + " (METHOD = " + method + ")"
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    model.k += 1  # add 1 for proper df and aic, sc
+    model.aic = DIAG.akaike(reg=model)
+    model.schwarz = DIAG.schwarz(reg=model)
+    return model
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == "__main__":
+    _test()
+    import numpy as np
+    import pysal as ps
+    db = ps.open(ps.examples.get_path("baltim.dbf"), 'r')
+    ds_name = "baltim.dbf"
+    y_name = "PRICE"
+    y = np.array(db.by_col(y_name)).T
+    y.shape = (len(y), 1)
+    x_names = ["NROOM", "NBATH", "PATIO", "FIREPL",
+               "AC", "GAR", "AGE", "LOTSZ", "SQFT"]
+    x = np.array([db.by_col(var) for var in x_names]).T
+    ww = ps.open(ps.examples.get_path("baltim_q.gal"))
+    w = ww.read()
+    ww.close()
+    w_name = "baltim_q.gal"
+    w.transform = 'r'
+    regimes = db.by_col("CITCOU")
+
+    mllag = ML_Lag_Regimes(y, x, regimes, w=w, method='full', name_y=y_name, name_x=x_names,
+                           name_w=w_name, name_ds=ds_name, regime_lag_sep=True, constant_regi='many',
+                           name_regimes="CITCOU")
+    print mllag.summary
diff --git a/pysal/spreg/ols.py b/pysal/spreg/ols.py
new file mode 100644
index 0000000..10710d3
--- /dev/null
+++ b/pysal/spreg/ols.py
@@ -0,0 +1,471 @@
+"""Ordinary Least Squares regression classes."""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, David C. Folch david.folch at asu.edu"
+import numpy as np
+import copy as COPY
+import numpy.linalg as la
+import user_output as USER
+import summary_output as SUMMARY
+import robust as ROBUST
+from utils import spdot, sphstack, RegressionPropsY, RegressionPropsVM
+
+__all__ = ["OLS"]
+
+
+class BaseOLS(RegressionPropsY, RegressionPropsVM):
+
+    """
+    Ordinary least squares (OLS) (note: no consistency checks, diagnostics or
+    constant added)
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    robust       : string
+                   If 'white', then a White consistent estimator of the
+                   variance-covariance matrix is given.  If 'hac', then a
+                   HAC consistent estimator of the variance-covariance
+                   matrix is given. Default set to None. 
+    gwk          : pysal W object
+                   Kernel spatial weights needed for HAC estimation. Note:
+                   matrix must have ones along the main diagonal.
+    sig2n_k      : boolean
+                   If True, then use n-k to estimate sigma^2. If False, use n.
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    utu          : float
+                   Sum of squared residuals
+    sig2         : float
+                   Sigma squared used in computations
+    sig2n        : float
+                   Sigma squared (computed with n in the denominator)
+    sig2n_k      : float
+                   Sigma squared (computed with n-k in the denominator)
+    xtx          : float
+                   X'X
+    xtxi         : float
+                   (X'X)^-1
+
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("CRIME"))
+    >>> X = np.array(X).T
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> ols=BaseOLS(y,X)
+    >>> ols.betas
+    array([[ 46.42818268],
+           [  0.62898397],
+           [ -0.48488854]])
+    >>> ols.vm
+    array([[ 174.02245348,   -6.52060364,   -2.15109867],
+           [  -6.52060364,    0.28720001,    0.06809568],
+           [  -2.15109867,    0.06809568,    0.03336939]])
+    """
+
+    def __init__(self, y, x, robust=None, gwk=None, sig2n_k=True):
+        self.x = x
+        self.xtx = spdot(self.x.T, self.x)
+        xty = spdot(self.x.T, y)
+
+        self.xtxi = la.inv(self.xtx)
+        self.betas = np.dot(self.xtxi, xty)
+        predy = spdot(self.x, self.betas)
+
+        u = y - predy
+        self.u = u
+        self.predy = predy
+        self.y = y
+        self.n, self.k = self.x.shape
+
+        if robust:
+            self.vm = ROBUST.robust_vm(reg=self, gwk=gwk, sig2n_k=sig2n_k)
+
+        self._cache = {}
+        if sig2n_k:
+            self.sig2 = self.sig2n_k
+        else:
+            self.sig2 = self.sig2n
+
+
+class OLS(BaseOLS):
+
+    """
+    Ordinary least squares with results and diagnostics.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    w            : pysal W object
+                   Spatial weights object (required if running spatial
+                   diagnostics)
+    robust       : string
+                   If 'white', then a White consistent estimator of the
+                   variance-covariance matrix is given.  If 'hac', then a
+                   HAC consistent estimator of the variance-covariance
+                   matrix is given. Default set to None. 
+    gwk          : pysal W object
+                   Kernel spatial weights needed for HAC estimation. Note:
+                   matrix must have ones along the main diagonal.
+    sig2n_k      : boolean
+                   If True, then use n-k to estimate sigma^2. If False, use n.
+    nonspat_diag : boolean
+                   If True, then compute non-spatial diagnostics on
+                   the regression.
+    spat_diag    : boolean
+                   If True, then compute Lagrange multiplier tests (requires
+                   w). Note: see moran for further tests.
+    moran        : boolean
+                   If True, compute Moran's I on the residuals. Note:
+                   requires spat_diag=True.
+    white_test   : boolean
+                   If True, compute White's specification robust test.
+                   (requires nonspat_diag=True)
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    robust       : string
+                   Adjustment for robust standard errors
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    r2           : float
+                   R squared
+    ar2          : float
+                   Adjusted R squared
+    utu          : float
+                   Sum of squared residuals
+    sig2         : float
+                   Sigma squared used in computations
+    sig2ML       : float
+                   Sigma squared (maximum likelihood)
+    f_stat       : tuple
+                   Statistic (float), p-value (float)
+    logll        : float
+                   Log likelihood
+    aic          : float
+                   Akaike information criterion 
+    schwarz      : float
+                   Schwarz information criterion     
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    t_stat       : list of tuples
+                   t statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    mulColli     : float
+                   Multicollinearity condition number
+    jarque_bera  : dictionary
+                   'jb': Jarque-Bera statistic (float); 'pvalue': p-value
+                   (float); 'df': degrees of freedom (int)  
+    breusch_pagan : dictionary
+                    'bp': Breusch-Pagan statistic (float); 'pvalue': p-value
+                    (float); 'df': degrees of freedom (int)  
+    koenker_bassett : dictionary
+                      'kb': Koenker-Bassett statistic (float); 'pvalue':
+                      p-value (float); 'df': degrees of freedom (int)  
+    white         : dictionary
+                    'wh': White statistic (float); 'pvalue': p-value (float);
+                    'df': degrees of freedom (int)  
+    lm_error      : tuple
+                    Lagrange multiplier test for spatial error model; tuple
+                    contains the pair (statistic, p-value), where each is a
+                    float 
+    lm_lag        : tuple
+                    Lagrange multiplier test for spatial lag model; tuple
+                    contains the pair (statistic, p-value), where each is a
+                    float 
+    rlm_error     : tuple
+                    Robust lagrange multiplier test for spatial error model;
+                    tuple contains the pair (statistic, p-value), where each
+                    is a float
+    rlm_lag       : tuple
+                    Robust lagrange multiplier test for spatial lag model;
+                    tuple contains the pair (statistic, p-value), where each
+                    is a float
+    lm_sarma      : tuple
+                    Lagrange multiplier test for spatial SARMA model; tuple
+                    contains the pair (statistic, p-value), where each is a
+                    float
+    moran_res     : tuple
+                    Moran's I for the residuals; tuple containing the triple
+                    (Moran's I, standardized Moran's I, p-value)
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_gwk      : string
+                    Name of kernel weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    title         : string
+                    Name of the regression method used
+    sig2n        : float
+                   Sigma squared (computed with n in the denominator)
+    sig2n_k      : float
+                   Sigma squared (computed with n-k in the denominator)
+    xtx          : float
+                   X'X
+    xtxi         : float
+                   (X'X)^-1
+
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; also, the actual OLS class
+    requires data to be passed in as numpy arrays so the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Extract the HOVAL column (home values) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an nx1 numpy array.
+
+    >>> hoval = db.by_col("HOVAL")
+    >>> y = np.array(hoval)
+    >>> y.shape = (len(hoval), 1)
+
+    Extract CRIME (crime) and INC (income) vectors from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). pysal.spreg.OLS adds a vector of ones to the
+    independent variables passed in.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("CRIME"))
+    >>> X = np.array(X).T
+
+    The minimum parameters needed to run an ordinary least squares regression
+    are the two numpy arrays containing the independent variable and dependent
+    variables respectively.  To make the printed results more meaningful, the
+    user can pass in explicit names for the variables used; this is optional.
+
+    >>> ols = OLS(y, X, name_y='home value', name_x=['income','crime'], name_ds='columbus', white_test=True)
+
+    pysal.spreg.OLS computes the regression coefficients and their standard
+    errors, t-stats and p-values. It also computes a large battery of
+    diagnostics on the regression. In this example we compute the white test
+    which by default isn't ('white_test=True'). All of these results can be independently
+    accessed as attributes of the regression object created by running
+    pysal.spreg.OLS.  They can also be accessed at one time by printing the
+    summary attribute of the regression object. In the example below, the
+    parameter on crime is -0.4849, with a t-statistic of -2.6544 and p-value
+    of 0.01087.
+
+    >>> ols.betas
+    array([[ 46.42818268],
+           [  0.62898397],
+           [ -0.48488854]])
+    >>> print round(ols.t_stat[2][0],3)
+    -2.654
+    >>> print round(ols.t_stat[2][1],3)
+    0.011
+    >>> print round(ols.r2,3)
+    0.35
+
+    Or we can easily obtain a full summary of all the results nicely formatted and
+    ready to be printed:
+
+    >>> print ols.summary
+    REGRESSION
+    ----------
+    SUMMARY OF OUTPUT: ORDINARY LEAST SQUARES
+    -----------------------------------------
+    Data set            :    columbus
+    Dependent Variable  :  home value                Number of Observations:          49
+    Mean dependent var  :     38.4362                Number of Variables   :           3
+    S.D. dependent var  :     18.4661                Degrees of Freedom    :          46
+    R-squared           :      0.3495
+    Adjusted R-squared  :      0.3212
+    Sum squared residual:   10647.015                F-statistic           :     12.3582
+    Sigma-square        :     231.457                Prob(F-statistic)     :   5.064e-05
+    S.E. of regression  :      15.214                Log likelihood        :    -201.368
+    Sigma-square ML     :     217.286                Akaike info criterion :     408.735
+    S.E of regression ML:     14.7406                Schwarz criterion     :     414.411
+    <BLANKLINE>
+    ------------------------------------------------------------------------------------
+                Variable     Coefficient       Std.Error     t-Statistic     Probability
+    ------------------------------------------------------------------------------------
+                CONSTANT      46.4281827      13.1917570       3.5194844       0.0009867
+                   crime      -0.4848885       0.1826729      -2.6544086       0.0108745
+                  income       0.6289840       0.5359104       1.1736736       0.2465669
+    ------------------------------------------------------------------------------------
+    <BLANKLINE>
+    REGRESSION DIAGNOSTICS
+    MULTICOLLINEARITY CONDITION NUMBER           12.538
+    <BLANKLINE>
+    TEST ON NORMALITY OF ERRORS
+    TEST                             DF        VALUE           PROB
+    Jarque-Bera                       2          39.706           0.0000
+    <BLANKLINE>
+    DIAGNOSTICS FOR HETEROSKEDASTICITY
+    RANDOM COEFFICIENTS
+    TEST                             DF        VALUE           PROB
+    Breusch-Pagan test                2           5.767           0.0559
+    Koenker-Bassett test              2           2.270           0.3214
+    <BLANKLINE>
+    SPECIFICATION ROBUST TEST
+    TEST                             DF        VALUE           PROB
+    White                             5           2.906           0.7145
+    ================================ END OF REPORT =====================================
+
+    If the optional parameters w and spat_diag are passed to pysal.spreg.OLS,
+    spatial diagnostics will also be computed for the regression.  These
+    include Lagrange multiplier tests and Moran's I of the residuals.  The w
+    parameter is a PySAL spatial weights matrix. In this example, w is built
+    directly from the shapefile columbus.shp, but w can also be read in from a
+    GAL or GWT file.  In this case a rook contiguity weights matrix is built,
+    but PySAL also offers queen contiguity, distance weights and k nearest
+    neighbor weights among others. In the example, the Moran's I of the
+    residuals is 0.204 with a standardized value of 2.592 and a p-value of
+    0.0095.
+
+    >>> w = pysal.weights.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> ols = OLS(y, X, w, spat_diag=True, moran=True, name_y='home value', name_x=['income','crime'], name_ds='columbus')
+    >>> ols.betas
+    array([[ 46.42818268],
+           [  0.62898397],
+           [ -0.48488854]])
+    >>> print round(ols.moran_res[0],3)
+    0.204
+    >>> print round(ols.moran_res[1],3)
+    2.592
+    >>> print round(ols.moran_res[2],4)
+    0.0095
+
+    """
+
+    def __init__(self, y, x,
+                 w=None,
+                 robust=None, gwk=None, sig2n_k=True,
+                 nonspat_diag=True, spat_diag=False, moran=False,
+                 white_test=False, vm=False, name_y=None, name_x=None,
+                 name_w=None, name_gwk=None, name_ds=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y)
+        USER.check_robust(robust, gwk)
+        USER.check_spat_diag(spat_diag, w)
+        x_constant = USER.check_constant(x)
+        BaseOLS.__init__(self, y=y, x=x_constant, robust=robust,
+                         gwk=gwk, sig2n_k=sig2n_k)
+        self.title = "ORDINARY LEAST SQUARES"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.robust = USER.set_robust(robust)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.name_gwk = USER.set_name_w(name_gwk, gwk)
+        SUMMARY.OLS(reg=self, vm=vm, w=w, nonspat_diag=nonspat_diag,
+                    spat_diag=spat_diag, moran=moran, white_test=white_test)
+
+
+def _test():
+    import doctest
+    # the following line could be used to define an alternative to the '<BLANKLINE>' flag
+    #doctest.BLANKLINE_MARKER = 'something better than <BLANKLINE>'
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+    _test()
+
+    import numpy as np
+    import pysal
+    db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
+    y_var = 'CRIME'
+    y = np.array([db.by_col(y_var)]).reshape(49, 1)
+    x_var = ['INC', 'HOVAL']
+    x = np.array([db.by_col(name) for name in x_var]).T
+    w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    w.transform = 'r'
+    ols = OLS(
+        y, x, w=w, nonspat_diag=True, spat_diag=True, name_y=y_var, name_x=x_var,
+        name_ds='columbus', name_w='columbus.gal', robust='white', sig2n_k=True, moran=True)
+    print ols.summary
diff --git a/pysal/spreg/ols_regimes.py b/pysal/spreg/ols_regimes.py
new file mode 100644
index 0000000..e4e3393
--- /dev/null
+++ b/pysal/spreg/ols_regimes.py
@@ -0,0 +1,541 @@
+"""
+Ordinary Least Squares regression with regimes.
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu, Daniel Arribas-Bel darribas at asu.edu"
+
+import regimes as REGI
+import user_output as USER
+import multiprocessing as mp
+from ols import BaseOLS
+from utils import set_warn, spbroadcast, RegressionProps_basic, RegressionPropsY, spdot
+from robust import hac_multi
+import summary_output as SUMMARY
+import numpy as np
+from platform import system
+import scipy.sparse as SP
+
+
+class OLS_Regimes(BaseOLS, REGI.Regimes_Frame, RegressionPropsY):
+
+    """
+    Ordinary least squares with results and diagnostics.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    w            : pysal W object
+                   Spatial weights object (required if running spatial
+                   diagnostics)
+    robust       : string
+                   If 'white', then a White consistent estimator of the
+                   variance-covariance matrix is given.  If 'hac', then a
+                   HAC consistent estimator of the variance-covariance
+                   matrix is given. Default set to None. 
+    gwk          : pysal W object
+                   Kernel spatial weights needed for HAC estimation. Note:
+                   matrix must have ones along the main diagonal.
+    sig2n_k      : boolean
+                   If True, then use n-k to estimate sigma^2. If False, use n.
+    nonspat_diag : boolean
+                   If True, then compute non-spatial diagnostics on
+                   the regression.
+    spat_diag    : boolean
+                   If True, then compute Lagrange multiplier tests (requires
+                   w). Note: see moran for further tests.
+    moran        : boolean
+                   If True, compute Moran's I on the residuals. Note:
+                   requires spat_diag=True.
+    white_test   : boolean
+                   If True, compute White's specification robust test.
+                   (requires nonspat_diag=True)
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep  : boolean
+                   If True, a separate regression is run for each regime.
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    robust       : string
+                   Adjustment for robust standard errors
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)                  
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    r2           : float
+                   R squared
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    ar2          : float
+                   Adjusted R squared
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    utu          : float
+                   Sum of squared residuals
+    sig2         : float
+                   Sigma squared used in computations
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    sig2ML       : float
+                   Sigma squared (maximum likelihood)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    f_stat       : tuple
+                   Statistic (float), p-value (float)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    logll        : float
+                   Log likelihood
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    aic          : float
+                   Akaike information criterion 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    schwarz      : float
+                   Schwarz information criterion     
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    t_stat       : list of tuples
+                   t statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mulColli     : float
+                   Multicollinearity condition number
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    jarque_bera  : dictionary
+                   'jb': Jarque-Bera statistic (float); 'pvalue': p-value
+                   (float); 'df': degrees of freedom (int)  
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    breusch_pagan : dictionary
+                    'bp': Breusch-Pagan statistic (float); 'pvalue': p-value
+                    (float); 'df': degrees of freedom (int)  
+                    Only available in dictionary 'multi' when multiple regressions
+                    (see 'multi' below for details)
+    koenker_bassett : dictionary
+                      'kb': Koenker-Bassett statistic (float); 'pvalue':
+                      p-value (float); 'df': degrees of freedom (int)  
+                      Only available in dictionary 'multi' when multiple regressions
+                      (see 'multi' below for details)
+    white         : dictionary
+                    'wh': White statistic (float); 'pvalue': p-value (float);
+                    'df': degrees of freedom (int)  
+                    Only available in dictionary 'multi' when multiple regressions
+                    (see 'multi' below for details)
+    lm_error      : tuple
+                    Lagrange multiplier test for spatial error model; tuple
+                    contains the pair (statistic, p-value), where each is a
+                    float 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    lm_lag        : tuple
+                    Lagrange multiplier test for spatial lag model; tuple
+                    contains the pair (statistic, p-value), where each is a
+                    float 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    rlm_error     : tuple
+                    Robust lagrange multiplier test for spatial error model;
+                    tuple contains the pair (statistic, p-value), where each
+                    is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    rlm_lag       : tuple
+                    Robust lagrange multiplier test for spatial lag model;
+                    tuple contains the pair (statistic, p-value), where each
+                    is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    lm_sarma      : tuple
+                    Lagrange multiplier test for spatial SARMA model; tuple
+                    contains the pair (statistic, p-value), where each is a
+                    float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    moran_res     : tuple
+                    Moran's I for the residuals; tuple containing the triple
+                    (Moran's I, standardized Moran's I, p-value)
+    name_y        : string
+                    Name of dependent variable for use in output
+    name_x        : list of strings
+                    Names of independent variables for use in output
+    name_w        : string
+                    Name of weights matrix for use in output
+    name_gwk      : string
+                    Name of kernel weights matrix for use in output
+    name_ds       : string
+                    Name of dataset for use in output
+    name_regimes : string
+                   Name of regime variable for use in the output
+    title         : string
+                    Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    sig2n        : float
+                   Sigma squared (computed with n in the denominator)
+    sig2n_k      : float
+                   Sigma squared (computed with n-k in the denominator)
+    xtx          : float
+                   X'X
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    xtxi         : float
+                   (X'X)^-1
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Ignored if regimes=False. Constant option for regimes.
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime
+    cols2regi    : list, 'all'
+                   Ignored if regimes=False. Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all', all the variables vary by regime.
+    regime_err_sep  : boolean
+                   If True, a separate regression is run for each regime.
+    kr           : int
+                   Number of variables/columns to be "regimized" or subject
+                   to change by regime. These will result in one parameter
+                   estimate by regime for each variable (i.e. nr parameters per
+                   variable)
+    kf           : int
+                   Number of variables/columns to be considered fixed or
+                   global across regimes and hence only obtain one parameter
+                   estimate
+    nr           : int
+                   Number of different regimes in the 'regimes' list
+    multi        : dictionary
+                   Only available when multiple regressions are estimated,
+                   i.e. when regime_err_sep=True and no variable is fixed
+                   across regimes.
+                   Contains all attributes of each individual regression
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it
+    the dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = db.by_col(y_var)
+    >>> y = np.array(y).reshape(len(y), 1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    We can now run the regression and then have a summary of the output
+    by typing: olsr.summary
+    Alternatively, we can just check the betas and standard errors of the
+    parameters:
+
+    >>> olsr = OLS_Regimes(y, x, regimes, nonspat_diag=False, name_y=y_var, name_x=['PS90','UE90'], name_regimes=r_var, name_ds='NAT')
+    >>> olsr.betas
+    array([[ 0.39642899],
+           [ 0.65583299],
+           [ 0.48703937],
+           [ 5.59835   ],
+           [ 1.16210453],
+           [ 0.53163886]])
+    >>> np.sqrt(olsr.vm.diagonal())
+    array([ 0.24816345,  0.09662678,  0.03628629,  0.46894564,  0.21667395,
+            0.05945651])
+    >>> olsr.cols2regi
+    'all'
+    """
+
+    def __init__(self, y, x, regimes,
+                 w=None, robust=None, gwk=None, sig2n_k=True,
+                 nonspat_diag=True, spat_diag=False, moran=False, white_test=False,
+                 vm=False, constant_regi='many', cols2regi='all',
+                 regime_err_sep=True, cores=False,
+                 name_y=None, name_x=None, name_regimes=None,
+                 name_w=None, name_gwk=None, name_ds=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y)
+        USER.check_robust(robust, gwk)
+        USER.check_spat_diag(spat_diag, w)
+        self.name_x_r = USER.set_name_x(name_x, x)
+        self.constant_regi = constant_regi
+        self.cols2regi = cols2regi
+        self.name_w = USER.set_name_w(name_w, w)
+        self.name_gwk = USER.set_name_w(name_gwk, gwk)
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.n = n
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, add_cons=False)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        if regime_err_sep == True and robust == 'hac':
+            set_warn(
+                self, "Error by regimes is incompatible with HAC estimation. Hence, error by regimes has been disabled for this model.")
+            regime_err_sep = False
+        self.regime_err_sep = regime_err_sep
+        if regime_err_sep == True and set(cols2regi) == set([True]) and constant_regi == 'many':
+            self.y = y
+            name_x = USER.set_name_x(name_x, x)
+            regi_ids = dict(
+                (r, list(np.where(np.array(regimes) == r)[0])) for r in self.regimes_set)
+            self._ols_regimes_multi(x, w, regi_ids, cores,
+                                    gwk, sig2n_k, robust, nonspat_diag, spat_diag, vm, name_x, moran, white_test)
+        else:
+            name_x = USER.set_name_x(name_x, x, constant=True)
+            x, self.name_x = REGI.Regimes_Frame.__init__(self, x,
+                                                         regimes, constant_regi, cols2regi, name_x)
+            BaseOLS.__init__(
+                self, y=y, x=x, robust=robust, gwk=gwk, sig2n_k=sig2n_k)
+            if regime_err_sep == True and robust == None:
+                y2, x2 = REGI._get_weighted_var(
+                    regimes, self.regimes_set, sig2n_k, self.u, y, x)
+                ols2 = BaseOLS(y=y2, x=x2, sig2n_k=sig2n_k)
+                RegressionProps_basic(self, betas=ols2.betas, vm=ols2.vm)
+                self.title = "ORDINARY LEAST SQUARES - REGIMES (Group-wise heteroskedasticity)"
+                nonspat_diag = None
+                set_warn(
+                    self, "Residuals treated as homoskedastic for the purpose of diagnostics.")
+            else:
+                self.title = "ORDINARY LEAST SQUARES - REGIMES"
+            self.robust = USER.set_robust(robust)
+            self.chow = REGI.Chow(self)
+            SUMMARY.OLS(reg=self, vm=vm, w=w, nonspat_diag=nonspat_diag,
+                        spat_diag=spat_diag, moran=moran, white_test=white_test, regimes=True)
+
+    def _ols_regimes_multi(self, x, w, regi_ids, cores,
+                           gwk, sig2n_k, robust, nonspat_diag, spat_diag, vm, name_x, moran, white_test):
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            if system() == 'Windows':
+                is_win = True
+                results_p[r] = _work(*(self.y,x,w,regi_ids,r,robust,sig2n_k,self.name_ds,self.name_y,name_x,self.name_w,self.name_regimes))
+            else:
+                pool = mp.Pool(cores)
+                results_p[r] = pool.apply_async(_work,args=(self.y,x,w,regi_ids,r,robust,sig2n_k,self.name_ds,self.name_y,name_x,self.name_w,self.name_regimes))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work, args=(
+                    self.y, x, w, regi_ids, r, robust, sig2n_k, self.name_ds, self.name_y, name_x, self.name_w, self.name_regimes))
+            else:
+                results_p[r] = _work(*(self.y, x, w, regi_ids, r, robust, sig2n_k,
+                                       self.name_ds, self.name_y, name_x, self.name_w, self.name_regimes))
+
+        self.kryd = 0
+        self.kr = x.shape[1] + 1
+        self.kf = 0
+        self.nr = len(self.regimes_set)
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * self.kr, 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        self.name_y, self.name_x = [], []
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * self.kr):((counter + 1) * self.kr), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            counter += 1
+        self.multi = results
+        self.hac_var = x
+        if robust == 'hac':
+            hac_multi(self, gwk)
+        self.chow = REGI.Chow(self)
+        if spat_diag:
+            self._get_spat_diag_props(x, sig2n_k)
+        SUMMARY.OLS_multi(reg=self, multireg=self.multi, vm=vm, nonspat_diag=nonspat_diag,
+                          spat_diag=spat_diag, moran=moran, white_test=white_test, regimes=True, w=w)
+
+    def _get_spat_diag_props(self, x, sig2n_k):
+        self.k = self.kr
+        self._cache = {}
+        x = np.hstack((np.ones((x.shape[0], 1)), x))
+        self.x = REGI.regimeX_setup(
+            x, self.regimes, [True] * x.shape[1], self.regimes_set)
+        self.xtx = spdot(self.x.T, self.x)
+        self.xtxi = np.linalg.inv(self.xtx)
+
+
+def _work(y, x, w, regi_ids, r, robust, sig2n_k, name_ds, name_y, name_x, name_w, name_regimes):
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    x_constant = USER.check_constant(x_r)
+    if robust == 'hac':
+        robust = None
+    model = BaseOLS(y_r, x_constant, robust=robust, sig2n_k=sig2n_k)
+    model.title = "ORDINARY LEAST SQUARES ESTIMATION - REGIME %s" % r
+    model.robust = USER.set_robust(robust)
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    if w:
+        w_r, warn = REGI.w_regime(w, regi_ids[r], r, transform=True)
+        set_warn(model, warn)
+        model.w = w_r
+    return model
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+    _test()
+    import numpy as np
+    import pysal
+    db = pysal.open(pysal.examples.get_path('columbus.dbf'), 'r')
+    y_var = 'CRIME'
+    y = np.array([db.by_col(y_var)]).reshape(49, 1)
+    x_var = ['INC', 'HOVAL']
+    x = np.array([db.by_col(name) for name in x_var]).T
+    r_var = 'NSA'
+    regimes = db.by_col(r_var)
+    w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    w.transform = 'r'
+    olsr = OLS_Regimes(y, x, regimes, w=w, constant_regi='many', nonspat_diag=False, spat_diag=False, name_y=y_var, name_x=['INC', 'HOVAL'],
+                       name_ds='columbus', name_regimes=r_var, name_w='columbus.gal', regime_err_sep=True, cols2regi=[True, True], sig2n_k=True, robust='white')
+    print olsr.summary
diff --git a/pysal/spreg/probit.py b/pysal/spreg/probit.py
new file mode 100644
index 0000000..4bbc0ac
--- /dev/null
+++ b/pysal/spreg/probit.py
@@ -0,0 +1,684 @@
+"""Probit regression class and diagnostics."""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu"
+
+import numpy as np
+import numpy.linalg as la
+import scipy.optimize as op
+from scipy.stats import norm, chisqprob
+import scipy.sparse as SP
+import user_output as USER
+import summary_output as SUMMARY
+
+__all__ = ["Probit"]
+
+
+class BaseProbit:
+
+    """
+    Probit class to do all the computations
+
+    Parameters
+    ----------
+
+    x           : array
+                  nxk array of independent variables (assumed to be aligned with y)
+    y           : array
+                  nx1 array of dependent binary variable
+    w           : W
+                  PySAL weights instance aligned with y
+    optim       : string
+                  Optimization method.
+                  Default: 'newton' (Newton-Raphson).
+                  Alternatives: 'ncg' (Newton-CG), 'bfgs' (BFGS algorithm)
+    scalem      : string
+                  Method to calculate the scale of the marginal effects.
+                  Default: 'phimean' (Mean of individual marginal effects)
+                  Alternative: 'xmean' (Marginal effects at variables mean)
+    maxiter     : int
+                  Maximum number of iterations until optimizer stops                  
+
+    Attributes
+    ----------
+
+    x           : array
+                  Two dimensional array with n rows and one column for each
+                  independent (exogenous) variable, including the constant
+    y           : array
+                  nx1 array of dependent variable
+    betas       : array
+                  kx1 array with estimated coefficients
+    predy       : array
+                  nx1 array of predicted y values
+    n           : int
+                  Number of observations
+    k           : int
+                  Number of variables
+    vm          : array
+                  Variance-covariance matrix (kxk)
+    z_stat      : list of tuples
+                  z statistic; each tuple contains the pair (statistic,
+                  p-value), where each is a float                  
+    xmean       : array
+                  Mean of the independent variables (kx1)
+    predpc      : float
+                  Percent of y correctly predicted
+    logl        : float
+                  Log-Likelihhod of the estimation
+    scalem      : string
+                  Method to calculate the scale of the marginal effects.
+    scale       : float
+                  Scale of the marginal effects.
+    slopes      : array
+                  Marginal effects of the independent variables (k-1x1)
+                  Note: Disregards the presence of dummies.
+    slopes_vm   : array
+                  Variance-covariance matrix of the slopes (k-1xk-1)
+    LR          : tuple
+                  Likelihood Ratio test of all coefficients = 0
+                  (test statistics, p-value)
+    Pinkse_error: float
+                  Lagrange Multiplier test against spatial error correlation.
+                  Implemented as presented in Pinkse (2004)              
+    KP_error    : float
+                  Moran's I type test against spatial error correlation.
+                  Implemented as presented in Kelejian and Prucha (2001)
+    PS_error    : float
+                  Lagrange Multiplier test against spatial error correlation.
+                  Implemented as presented in Pinkse and Slade (1998)
+    warning     : boolean
+                  if True Maximum number of iterations exceeded or gradient 
+                  and/or function calls not changing.
+
+    References
+    ----------
+    .. [1] Pinkse, J. (2004). Moran-flavored tests with nuisance parameter. In: Anselin,
+    L., Florax, R. J., Rey, S. J. (editors) Advances in Spatial Econometrics,
+    pages 67-77. Springer-Verlag, Heidelberg.
+    .. [2] Kelejian, H., Prucha, I. (2001) "On the asymptotic distribution of the
+    Moran I test statistic with applications". Journal of Econometrics, 104(2):219-57.
+    .. [3] Pinkse, J., Slade, M. E. (1998) "Contracting in space: an application of
+    spatial statistics to discrete-choice models". Journal of Econometrics, 85(1):125-54.
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> import pysal
+    >>> dbf = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array([dbf.by_col('CRIME')]).T
+    >>> x = np.array([dbf.by_col('INC'), dbf.by_col('HOVAL')]).T
+    >>> x = np.hstack((np.ones(y.shape),x))
+    >>> w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read()
+    >>> w.transform='r'
+    >>> model = BaseProbit((y>40).astype(float), x, w=w)    
+    >>> np.around(model.betas, decimals=6)
+    array([[ 3.353811],
+           [-0.199653],
+           [-0.029514]])
+
+    >>> np.around(model.vm, decimals=6)
+    array([[ 0.852814, -0.043627, -0.008052],
+           [-0.043627,  0.004114, -0.000193],
+           [-0.008052, -0.000193,  0.00031 ]])
+
+    >>> tests = np.array([['Pinkse_error','KP_error','PS_error']])
+    >>> stats = np.array([[model.Pinkse_error[0],model.KP_error[0],model.PS_error[0]]])
+    >>> pvalue = np.array([[model.Pinkse_error[1],model.KP_error[1],model.PS_error[1]]])
+    >>> print np.hstack((tests.T,np.around(np.hstack((stats.T,pvalue.T)),6)))
+    [['Pinkse_error' '3.131719' '0.076783']
+     ['KP_error' '1.721312' '0.085194']
+     ['PS_error' '2.558166' '0.109726']]
+    """
+
+    def __init__(self, y, x, w=None, optim='newton', scalem='phimean', maxiter=100):
+        self.y = y
+        self.x = x
+        self.n, self.k = x.shape
+        self.optim = optim
+        self.scalem = scalem
+        self.w = w
+        self.maxiter = maxiter
+        par_est, self.warning = self.par_est()
+        self.betas = np.reshape(par_est[0], (self.k, 1))
+        self.logl = -float(par_est[1])
+        self._cache = {}
+
+    @property
+    def vm(self):
+        if 'vm' not in self._cache:
+            H = self.hessian(self.betas)
+            self._cache['vm'] = -la.inv(H)
+        return self._cache['vm']
+
+    @property
+    def z_stat(self):
+        if 'z_stat' not in self._cache:
+            variance = self.vm.diagonal()
+            zStat = self.betas.reshape(len(self.betas),) / np.sqrt(variance)
+            rs = {}
+            for i in range(len(self.betas)):
+                rs[i] = (zStat[i], norm.sf(abs(zStat[i])) * 2)
+            self._cache['z_stat'] = rs.values()
+        return self._cache['z_stat']
+
+    @property
+    def slopes_std_err(self):
+        if 'slopes_std_err' not in self._cache:
+            variance = self.slopes_vm.diagonal()
+            self._cache['slopes_std_err'] = np.sqrt(variance)
+        return self._cache['slopes_std_err']
+
+    @property
+    def slopes_z_stat(self):
+        if 'slopes_z_stat' not in self._cache:
+            zStat = self.slopes.reshape(
+                len(self.slopes),) / self.slopes_std_err
+            rs = {}
+            for i in range(len(self.slopes)):
+                rs[i] = (zStat[i], norm.sf(abs(zStat[i])) * 2)
+            self._cache['slopes_z_stat'] = rs.values()
+        return self._cache['slopes_z_stat']
+
+    @property
+    def xmean(self):
+        if 'xmean' not in self._cache:
+            self._cache['xmean'] = np.reshape(
+                sum(self.x) / self.n, (self.k, 1))
+        return self._cache['xmean']
+
+    @property
+    def xb(self):
+        if 'xb' not in self._cache:
+            self._cache['xb'] = np.dot(self.x, self.betas)
+        return self._cache['xb']
+
+    @property
+    def predy(self):
+        if 'predy' not in self._cache:
+            self._cache['predy'] = norm.cdf(self.xb)
+        return self._cache['predy']
+
+    @property
+    def predpc(self):
+        if 'predpc' not in self._cache:
+            predpc = abs(self.y - self.predy)
+            for i in range(len(predpc)):
+                if predpc[i] > 0.5:
+                    predpc[i] = 0
+                else:
+                    predpc[i] = 1
+            self._cache['predpc'] = float(100 * np.sum(predpc) / self.n)
+        return self._cache['predpc']
+
+    @property
+    def phiy(self):
+        if 'phiy' not in self._cache:
+            self._cache['phiy'] = norm.pdf(self.xb)
+        return self._cache['phiy']
+
+    @property
+    def scale(self):
+        if 'scale' not in self._cache:
+            if self.scalem == 'phimean':
+                self._cache['scale'] = float(1.0 * np.sum(self.phiy) / self.n)
+            if self.scalem == 'xmean':
+                self._cache['scale'] = float(
+                    norm.pdf(np.dot(self.xmean.T, self.betas)))
+        return self._cache['scale']
+
+    @property
+    def slopes(self):
+        if 'slopes' not in self._cache:
+            # Disregard the presence of dummies.
+            self._cache['slopes'] = self.betas[1:] * self.scale
+        return self._cache['slopes']
+
+    @property
+    def slopes_vm(self):
+        if 'slopes_vm' not in self._cache:
+            x = self.xmean
+            b = self.betas
+            dfdb = np.eye(self.k) - np.dot(b.T, x) * np.dot(b, x.T)
+            slopes_vm = (self.scale ** 2) * \
+                np.dot(np.dot(dfdb, self.vm), dfdb.T)
+            self._cache['slopes_vm'] = slopes_vm[1:, 1:]
+        return self._cache['slopes_vm']
+
+    @property
+    def LR(self):
+        if 'LR' not in self._cache:
+            P = 1.0 * np.sum(self.y) / self.n
+            LR = float(
+                -2 * (self.n * (P * np.log(P) + (1 - P) * np.log(1 - P)) - self.logl))
+            self._cache['LR'] = (LR, chisqprob(LR, self.k))
+        return self._cache['LR']
+
+    @property
+    def u_naive(self):
+        if 'u_naive' not in self._cache:
+            u_naive = self.y - self.predy
+            self._cache['u_naive'] = u_naive
+        return self._cache['u_naive']
+
+    @property
+    def u_gen(self):
+        if 'u_gen' not in self._cache:
+            Phi_prod = self.predy * (1 - self.predy)
+            u_gen = self.phiy * (self.u_naive / Phi_prod)
+            self._cache['u_gen'] = u_gen
+        return self._cache['u_gen']
+
+    @property
+    def Pinkse_error(self):
+        if 'Pinkse_error' not in self._cache:
+            self._cache['Pinkse_error'], self._cache[
+                'KP_error'], self._cache['PS_error'] = sp_tests(self)
+        return self._cache['Pinkse_error']
+
+    @property
+    def KP_error(self):
+        if 'KP_error' not in self._cache:
+            self._cache['Pinkse_error'], self._cache[
+                'KP_error'], self._cache['PS_error'] = sp_tests(self)
+        return self._cache['KP_error']
+
+    @property
+    def PS_error(self):
+        if 'PS_error' not in self._cache:
+            self._cache['Pinkse_error'], self._cache[
+                'KP_error'], self._cache['PS_error'] = sp_tests(self)
+        return self._cache['PS_error']
+
+    def par_est(self):
+        start = np.dot(la.inv(np.dot(self.x.T, self.x)),
+                       np.dot(self.x.T, self.y))
+        flogl = lambda par: -self.ll(par)
+        if self.optim == 'newton':
+            fgrad = lambda par: self.gradient(par)
+            fhess = lambda par: self.hessian(par)
+            par_hat = newton(flogl, start, fgrad, fhess, self.maxiter)
+            warn = par_hat[2]
+        else:
+            fgrad = lambda par: -self.gradient(par)
+            if self.optim == 'bfgs':
+                par_hat = op.fmin_bfgs(
+                    flogl, start, fgrad, full_output=1, disp=0)
+                warn = par_hat[6]
+            if self.optim == 'ncg':
+                fhess = lambda par: -self.hessian(par)
+                par_hat = op.fmin_ncg(
+                    flogl, start, fgrad, fhess=fhess, full_output=1, disp=0)
+                warn = par_hat[5]
+        if warn > 0:
+            warn = True
+        else:
+            warn = False
+        return par_hat, warn
+
+    def ll(self, par):
+        beta = np.reshape(np.array(par), (self.k, 1))
+        q = 2 * self.y - 1
+        qxb = q * np.dot(self.x, beta)
+        ll = sum(np.log(norm.cdf(qxb)))
+        return ll
+
+    def gradient(self, par):
+        beta = np.reshape(np.array(par), (self.k, 1))
+        q = 2 * self.y - 1
+        qxb = q * np.dot(self.x, beta)
+        lamb = q * norm.pdf(qxb) / norm.cdf(qxb)
+        gradient = np.dot(lamb.T, self.x)[0]
+        return gradient
+
+    def hessian(self, par):
+        beta = np.reshape(np.array(par), (self.k, 1))
+        q = 2 * self.y - 1
+        xb = np.dot(self.x, beta)
+        qxb = q * xb
+        lamb = q * norm.pdf(qxb) / norm.cdf(qxb)
+        hessian = np.dot((self.x.T), (-lamb * (lamb + xb) * self.x))
+        return hessian
+
+
+class Probit(BaseProbit):
+
+    """
+    Classic non-spatial Probit and spatial diagnostics. The class includes a
+    printout that formats all the results and tests in a nice format.
+
+    The diagnostics for spatial dependence currently implemented are:
+
+        * Pinkse Error [1]_
+        * Kelejian and Prucha Moran's I [2]_
+        * Pinkse & Slade Error [3]_
+
+    Parameters
+    ----------
+
+    x           : array
+                  nxk array of independent variables (assumed to be aligned with y)
+    y           : array
+                  nx1 array of dependent binary variable
+    w           : W
+                  PySAL weights instance aligned with y
+    optim       : string
+                  Optimization method.
+                  Default: 'newton' (Newton-Raphson).
+                  Alternatives: 'ncg' (Newton-CG), 'bfgs' (BFGS algorithm)
+    scalem      : string
+                  Method to calculate the scale of the marginal effects.
+                  Default: 'phimean' (Mean of individual marginal effects)
+                  Alternative: 'xmean' (Marginal effects at variables mean)
+    maxiter     : int
+                  Maximum number of iterations until optimizer stops                  
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+
+    x           : array
+                  Two dimensional array with n rows and one column for each
+                  independent (exogenous) variable, including the constant
+    y           : array
+                  nx1 array of dependent variable
+    betas       : array
+                  kx1 array with estimated coefficients
+    predy       : array
+                  nx1 array of predicted y values
+    n           : int
+                  Number of observations
+    k           : int
+                  Number of variables
+    vm          : array
+                  Variance-covariance matrix (kxk)
+    z_stat      : list of tuples
+                  z statistic; each tuple contains the pair (statistic,
+                  p-value), where each is a float                  
+    xmean       : array
+                  Mean of the independent variables (kx1)
+    predpc      : float
+                  Percent of y correctly predicted
+    logl        : float
+                  Log-Likelihhod of the estimation
+    scalem      : string
+                  Method to calculate the scale of the marginal effects.
+    scale       : float
+                  Scale of the marginal effects.
+    slopes      : array
+                  Marginal effects of the independent variables (k-1x1)
+    slopes_vm   : array
+                  Variance-covariance matrix of the slopes (k-1xk-1)
+    LR          : tuple
+                  Likelihood Ratio test of all coefficients = 0
+                  (test statistics, p-value)
+    Pinkse_error: float
+                  Lagrange Multiplier test against spatial error correlation.
+                  Implemented as presented in Pinkse (2004)              
+    KP_error    : float
+                  Moran's I type test against spatial error correlation.
+                  Implemented as presented in Kelejian and Prucha (2001)
+    PS_error    : float
+                  Lagrange Multiplier test against spatial error correlation.
+                  Implemented as presented in Pinkse and Slade (1998)
+    warning     : boolean
+                  if True Maximum number of iterations exceeded or gradient 
+                  and/or function calls not changing.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    title        : string
+                   Name of the regression method used
+
+    References
+    ----------
+    .. [1] Pinkse, J. (2004). Moran-flavored tests with nuisance parameter. In: Anselin, L., Florax, R. J., Rey, S. J. (editors) Advances in Spatial Econometrics, pages 67-77. Springer-Verlag, Heidelberg.
+    .. [2] Kelejian, H., Prucha, I. (2001) "On the asymptotic distribution of the Moran I test statistic with applications". Journal of Econometrics, 104(2):219-57.
+    .. [3] Pinkse, J., Slade, M. E. (1998) "Contracting in space: an application of spatial statistics to discrete-choice models". Journal of Econometrics, 85(1):125-54.
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> dbf = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+
+    Extract the CRIME column (crime) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept. Since we want to run a probit model and for this
+    example we use the Columbus data, we also need to transform the continuous
+    CRIME variable into a binary variable. As in McMillen, D. (1992) "Probit with
+    spatial autocorrelation". Journal of Regional Science 32(3):335-48, we define
+    y = 1 if CRIME > 40.
+
+    >>> y = np.array([dbf.by_col('CRIME')]).T
+    >>> y = (y>40).astype(float)
+
+    Extract HOVAL (home values) and INC (income) vectors from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this class adds a vector of ones to the
+    independent variables passed in.
+
+    >>> names_to_extract = ['INC', 'HOVAL']
+    >>> x = np.array([dbf.by_col(name) for name in names_to_extract]).T
+
+    Since we want to the test the probit model for spatial dependence, we need to
+    specify the spatial weights matrix that includes the spatial configuration of
+    the observations into the error component of the model. To do that, we can open
+    an already existing gal file or create a new one. In this case, we will use
+    ``columbus.gal``, which contains contiguity relationships between the
+    observations in the Columbus dataset we are using throughout this example.
+    Note that, in order to read the file, not only to open it, we need to
+    append '.read()' at the end of the command.
+
+    >>> w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read() 
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. In PySAL, this
+    can be easily performed in the following way:
+
+    >>> w.transform='r'
+
+    We are all set with the preliminaries, we are good to run the model. In this
+    case, we will need the variables and the weights matrix. If we want to
+    have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional. 
+
+    >>> model = Probit(y, x, w=w, name_y='crime', name_x=['income','home value'], name_ds='columbus', name_w='columbus.gal')
+
+    Once we have run the model, we can explore a little bit the output. The
+    regression object we have created has many attributes so take your time to
+    discover them.
+
+    >>> np.around(model.betas, decimals=6)
+    array([[ 3.353811],
+           [-0.199653],
+           [-0.029514]])
+
+    >>> np.around(model.vm, decimals=6)
+    array([[ 0.852814, -0.043627, -0.008052],
+           [-0.043627,  0.004114, -0.000193],
+           [-0.008052, -0.000193,  0.00031 ]])
+
+    Since we have provided a spatial weigths matrix, the diagnostics for
+    spatial dependence have also been computed. We can access them and their
+    p-values individually:
+
+    >>> tests = np.array([['Pinkse_error','KP_error','PS_error']])
+    >>> stats = np.array([[model.Pinkse_error[0],model.KP_error[0],model.PS_error[0]]])
+    >>> pvalue = np.array([[model.Pinkse_error[1],model.KP_error[1],model.PS_error[1]]])
+    >>> print np.hstack((tests.T,np.around(np.hstack((stats.T,pvalue.T)),6)))
+    [['Pinkse_error' '3.131719' '0.076783']
+     ['KP_error' '1.721312' '0.085194']
+     ['PS_error' '2.558166' '0.109726']]
+
+    Or we can easily obtain a full summary of all the results nicely formatted and
+    ready to be printed simply by typing 'print model.summary'
+
+    """
+
+    def __init__(
+        self, y, x, w=None, optim='newton', scalem='phimean', maxiter=100,
+        vm=False, name_y=None, name_x=None, name_w=None, name_ds=None,
+            spat_diag=False):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        if w:
+            USER.check_weights(w, y)
+            spat_diag = True
+        x_constant = USER.check_constant(x)
+        BaseProbit.__init__(self, y=y, x=x_constant, w=w,
+                            optim=optim, scalem=scalem, maxiter=maxiter)
+        self.title = "CLASSIC PROBIT ESTIMATOR"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_w = USER.set_name_w(name_w, w)
+        SUMMARY.Probit(reg=self, w=w, vm=vm, spat_diag=spat_diag)
+
+
+def newton(flogl, start, fgrad, fhess, maxiter):
+    """
+    Calculates the Newton-Raphson method
+
+    Parameters
+    ----------
+
+    flogl       : lambda
+                  Function to calculate the log-likelihood
+    start       : array
+                  kx1 array of starting values
+    fgrad       : lambda
+                  Function to calculate the gradient
+    fhess       : lambda
+                  Function to calculate the hessian
+    maxiter     : int
+                  Maximum number of iterations until optimizer stops                
+    """
+    warn = 0
+    iteration = 0
+    par_hat0 = start
+    m = 1
+    while (iteration < maxiter and m >= 1e-04):
+        H = -la.inv(fhess(par_hat0))
+        g = fgrad(par_hat0).reshape(start.shape)
+        Hg = np.dot(H, g)
+        par_hat0 = par_hat0 + Hg
+        iteration += 1
+        m = np.dot(g.T, Hg)
+    if iteration == maxiter:
+        warn = 1
+    logl = flogl(par_hat0)
+    return (par_hat0, logl, warn)
+
+
+def sp_tests(reg):
+    """
+    Calculates tests for spatial dependence in Probit models
+
+    Parameters
+    ----------
+
+    reg         : regression object
+                  output instance from a probit model            
+    """
+    if reg.w:
+        w = reg.w.sparse
+        Phi = reg.predy
+        phi = reg.phiy
+        # Pinkse_error:
+        Phi_prod = Phi * (1 - Phi)
+        u_naive = reg.u_naive
+        u_gen = reg.u_gen
+        sig2 = np.sum((phi * phi) / Phi_prod) / reg.n
+        LM_err_num = np.dot(u_gen.T, (w * u_gen)) ** 2
+        trWW = np.sum((w * w).diagonal())
+        trWWWWp = trWW + np.sum((w * w.T).diagonal())
+        LM_err = float(1.0 * LM_err_num / (sig2 ** 2 * trWWWWp))
+        LM_err = np.array([LM_err, chisqprob(LM_err, 1)])
+        # KP_error:
+        moran = moran_KP(reg.w, u_naive, Phi_prod)
+        # Pinkse-Slade_error:
+        u_std = u_naive / np.sqrt(Phi_prod)
+        ps_num = np.dot(u_std.T, (w * u_std)) ** 2
+        trWpW = np.sum((w.T * w).diagonal())
+        ps = float(ps_num / (trWW + trWpW))
+        # chi-square instead of bootstrap.
+        ps = np.array([ps, chisqprob(ps, 1)])
+    else:
+        raise Exception, "W matrix not provided to calculate spatial test."
+    return LM_err, moran, ps
+
+
+def moran_KP(w, u, sig2i):
+    """
+    Calculates Moran-flavoured tests 
+
+    Parameters
+    ----------
+
+    w           : W
+                  PySAL weights instance aligned with y
+    u           : array
+                  nx1 array of naive residuals
+    sig2i       : array
+                  nx1 array of individual variance               
+    """
+    w = w.sparse
+    moran_num = np.dot(u.T, (w * u))
+    E = SP.lil_matrix(w.get_shape())
+    E.setdiag(sig2i.flat)
+    E = E.asformat('csr')
+    WE = w * E
+    moran_den = np.sqrt(np.sum((WE * WE + (w.T * E) * WE).diagonal()))
+    moran = float(1.0 * moran_num / moran_den)
+    moran = np.array([moran, norm.sf(abs(moran)) * 2.])
+    return moran
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+    _test()
+    import numpy as np
+    import pysal
+    dbf = pysal.open(pysal.examples.get_path('columbus.dbf'), 'r')
+    y = np.array([dbf.by_col('CRIME')]).T
+    var_x = ['INC', 'HOVAL']
+    x = np.array([dbf.by_col(name) for name in var_x]).T
+    w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read()
+    w.transform = 'r'
+    probit1 = Probit(
+        (y > 40).astype(float), x, w=w, name_x=var_x, name_y="CRIME",
+        name_ds="Columbus", name_w="columbus.dbf")
+    # print probit1.summary
diff --git a/pysal/spreg/regimes.py b/pysal/spreg/regimes.py
new file mode 100644
index 0000000..e633576
--- /dev/null
+++ b/pysal/spreg/regimes.py
@@ -0,0 +1,689 @@
+import numpy as np
+import pysal
+import scipy.sparse as SP
+import itertools as iter
+from scipy.stats import f, chisqprob
+import numpy.linalg as la
+from utils import spbroadcast
+
+"""
+Tools for different regimes procedure estimations
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, \
+        Daniel Arribas-Bel darribas at asu.edu, \
+        Pedro V. Amaral pedro.amaral at asu.edu"
+
+
+class Chow:
+
+    '''
+    Chow test of coefficient stability across regimes. The test is a
+    particular case of the Wald statistic in which the constraint are setup
+    according to the spatial or other type of regime structure
+    ...
+
+    Parameters
+    ==========
+    reg     : regression object
+              Regression object from PySAL.spreg which is assumed to have the
+              following attributes:
+
+                    * betas : coefficient estimates
+                    * vm    : variance covariance matrix of betas
+                    * kr    : Number of variables varying across regimes
+                    * kryd  : Number of endogenous variables varying across regimes
+                    * kf    : Number of variables fixed (global) across regimes
+                    * nr    : Number of regimes
+
+    Attributes
+    ==========
+    joint   : tuple
+              Pair of Wald statistic and p-value for the setup of global
+              regime stability, that is all betas are the same across
+              regimes.
+    regi    : array
+              kr x 2 array with Wald statistic (col 0) and its p-value (col 1)
+              for each beta that varies across regimes. The restrictions
+              are setup to test for the global stability (all regimes have the
+              same parameter) of the beta.
+
+    Examples
+    ========
+    >>> import numpy as np
+    >>> import pysal
+    >>> from ols_regimes import OLS_Regimes
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y_var = 'CRIME'
+    >>> y = np.array([db.by_col(y_var)]).reshape(49,1)
+    >>> x_var = ['INC','HOVAL']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+    >>> r_var = 'NSA'
+    >>> regimes = db.by_col(r_var)
+    >>> olsr = OLS_Regimes(y, x, regimes, constant_regi='many', nonspat_diag=False, spat_diag=False, name_y=y_var, name_x=x_var, name_ds='columbus', name_regimes=r_var, regime_err_sep=False)
+    >>> print olsr.name_x_r #x_var
+    ['CONSTANT', 'INC', 'HOVAL']
+    >>> print olsr.chow.regi
+    [[ 0.01020844  0.91952121]
+     [ 0.46024939  0.49750745]
+     [ 0.55477371  0.45637369]]
+    >>> print 'Joint test:'
+    Joint test:
+    >>> print olsr.chow.joint
+    (0.6339319928978806, 0.8886223520178802)
+    '''
+
+    def __init__(self, reg):
+        kr, kf, kryd, nr, betas, vm = reg.kr, reg.kf, reg.kryd, reg.nr, reg.betas, reg.vm
+        if betas.shape[0] != vm.shape[0]:
+            if kf > 0:
+                betas = betas[0:vm.shape[0], :]
+                kf = kf - 1
+            else:
+                brange = []
+                for i in range(nr):
+                    brange.extend(range(i * (kr + 1), i * (kr + 1) + kr))
+                betas = betas[brange, :]
+        r_global = []
+        regi = np.zeros((reg.kr, 2))
+        for vari in np.arange(kr):
+            r_vari = buildR1var(vari, kr, kf, kryd, nr)
+            r_global.append(r_vari)
+            q = np.zeros((r_vari.shape[0], 1))
+            regi[vari, :] = wald_test(betas, r_vari, q, vm)
+        r_global = np.vstack(tuple(r_global))
+        q = np.zeros((r_global.shape[0], 1))
+        joint = wald_test(betas, r_global, q, vm)
+        self.joint = joint
+        self.regi = regi
+
+
+class Wald:
+
+    '''
+    Chi sq. Wald statistic to test for restriction of coefficients.
+    Implementation following Greene [1]_ eq. (17-24), p. 488
+    ...
+
+    Parameters
+    ==========
+    reg     : regression object
+              Regression object from PySAL.spreg
+    r       : array
+              Array of dimension Rxk (R being number of restrictions) with constrain setup.
+    q       : array
+              Rx1 array with constants in the constraint setup. See Greene
+              [1]_ for reference.
+
+    Attributes
+    ==========
+    w       : float
+              Wald statistic
+    pvalue  : float
+              P value for Wald statistic calculated as a Chi sq. distribution
+              with R degrees of freedom
+
+    References
+    ==========
+    .. [1] W. Greene. 2003. Econometric Analysis (Fifth Edtion). Prentice Hall, Upper
+       Saddle River. 
+    '''
+
+    def __init__(self, reg, r, q=None):
+        if not q:
+            q = np.zeros((r.shape[0], 1))
+        self.w, self.pvalue = wald_test(reg.betas, r, q, reg.vm)
+
+
+class Regimes_Frame:
+
+    '''
+    Setup framework to work with regimes. Basically it involves:
+        * Dealing with the constant in a regimes world
+        * Creating a sparse representation of X 
+        * Generating a list of names of X taking into account regimes
+    ...
+
+    Parameters
+    ==========
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: [False, 'one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+
+                     *  False: no constant term is appended in any way
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    names         : None, list of strings
+                   Names of independent variables for use in output
+
+    Returns
+    =======
+    x            : csr sparse matrix
+                   Sparse matrix containing X variables properly aligned for
+                   regimes regression. 'xsp' is of dimension (n, k*r) where 'r'
+                   is the number of different regimes
+                   The structure of the alignent is X1r1 X2r1 ... X1r2 X2r2 ...
+    names        : None, list of strings
+                   Names of independent variables for use in output
+                   conveniently arranged by regimes. The structure of the name
+                   is "regimeName_-_varName"
+    kr           : int
+                   Number of variables/columns to be "regimized" or subject
+                   to change by regime. These will result in one parameter
+                   estimate by regime for each variable (i.e. nr parameters per
+                   variable)
+    kf           : int
+                   Number of variables/columns to be considered fixed or
+                   global across regimes and hence only obtain one parameter
+                   estimate
+    nr           : int
+                   Number of different regimes in the 'regimes' list
+
+    '''
+
+    def __init__(self, x, regimes, constant_regi, cols2regi, names=None, yend=False):
+        if cols2regi == 'all':
+            cols2regi = [True] * x.shape[1]
+        else:
+            if yend:
+                cols2regi = cols2regi[-x.shape[1]:]
+            else:
+                cols2regi = cols2regi[0:x.shape[1]]
+        if constant_regi:
+            x = np.hstack((np.ones((x.shape[0], 1)), x))
+            if constant_regi == 'one':
+                cols2regi.insert(0, False)
+            elif constant_regi == 'many':
+                cols2regi.insert(0, True)
+            else:
+                raise Exception, "Invalid argument (%s) passed for 'constant_regi'. Please secify a valid term." % str(
+                    constant)
+        try:
+            x = regimeX_setup(x, regimes, cols2regi,
+                              self.regimes_set, constant=constant_regi)
+        except AttributeError:
+            self.regimes_set = _get_regimes_set(regimes)
+            x = regimeX_setup(x, regimes, cols2regi,
+                              self.regimes_set, constant=constant_regi)
+
+        kr = len(np.where(np.array(cols2regi) == True)[0])
+        if yend:
+            self.kr += kr
+            self.kf += len(cols2regi) - kr
+            self.kryd = kr
+        else:
+            self.kr = kr
+            self.kf = len(cols2regi) - self.kr
+            self.kryd = 0
+        self.nr = len(set(regimes))
+
+        if names:
+            names = set_name_x_regimes(
+                names, regimes, constant_regi, cols2regi, self.regimes_set)
+
+        return (x, names)
+
+
+def wald_test(betas, r, q, vm):
+    '''
+    Chi sq. Wald statistic to test for restriction of coefficients.
+    Implementation following Greene [1]_ eq. (17-24), p. 488
+    ...
+
+    Parameters
+    ==========
+    betas   : array
+              kx1 array with coefficient estimates
+    r       : array
+              Array of dimension Rxk (R being number of restrictions) with constrain setup.
+    q       : array
+              Rx1 array with constants in the constraint setup. See Greene
+              [1]_ for reference.
+    vm      : array
+              kxk variance-covariance matrix of coefficient estimates
+
+    Returns
+    =======
+    w       : float
+              Wald statistic
+    pvalue  : float
+              P value for Wald statistic calculated as a Chi sq. distribution
+              with R degrees of freedom
+
+    References
+    ==========
+    .. [1] W. Greene. 2003. Econometric Analysis (Fifth Edtion). Prentice Hall, Upper
+       Saddle River. 
+    '''
+    rbq = np.dot(r, betas) - q
+    rvri = la.inv(np.dot(r, np.dot(vm, r.T)))
+    w = np.dot(rbq.T, np.dot(rvri, rbq))[0][0]
+    df = r.shape[0]
+    pvalue = chisqprob(w, df)
+    return w, pvalue
+
+
+def buildR(kr, kf, nr):
+    '''
+    Build R matrix to globally test for spatial heterogeneity across regimes.
+    The constraint setup reflects the null every beta is the same
+    across regimes
+    ...
+
+    Parameters
+    ==========
+    kr      : int
+              Number of variables that vary across regimes ("regimized")
+    kf      : int
+              Number of variables that do not vary across regimes ("fixed" or
+              global)
+    nr      : int
+              Number of regimes
+
+    Returns
+    =======
+    R       : array
+              Array with constrain setup to test stability across regimes of
+              one variable
+    '''
+    return np.vstack(tuple(map(buildR1var, np.arange(kr), [kr] * kr, [kf] * kr, [nr] * kr)))
+
+
+def buildR1var(vari, kr, kf, kryd, nr):
+    '''
+    Build R matrix to test for spatial heterogeneity across regimes in one
+    variable. The constraint setup reflects the null betas for variable 'vari'
+    are the same across regimes
+    ...
+
+    Parameters
+    ==========
+    vari    : int
+              Position of the variable to be tested (order in the sequence of
+              variables per regime)
+    kr      : int
+              Number of variables that vary across regimes ("regimized")
+    kf      : int
+              Number of variables that do not vary across regimes ("fixed" or
+              global)
+    nr      : int
+              Number of regimes
+
+    Returns
+    =======
+    R       : array
+              Array with constrain setup to test stability across regimes of
+              one variable
+    '''
+    ncols = (kr * nr)
+    nrows = nr - 1
+    r = np.zeros((nrows, ncols), dtype=int)
+    rbeg = 0
+    krexog = kr - kryd
+    if vari < krexog:
+        kr_j = krexog
+        cbeg = vari
+    else:
+        kr_j = kryd
+        cbeg = krexog * (nr - 1) + vari
+    r[rbeg: rbeg + nrows, cbeg] = 1
+    for j in np.arange(nrows):
+        r[rbeg + j, kr_j + cbeg + j * kr_j] = -1
+    return np.hstack((r, np.zeros((nrows, kf), dtype=int)))
+
+
+def regimeX_setup(x, regimes, cols2regi, regimes_set, constant=False):
+    '''
+    Flexible full setup of a regime structure
+
+    NOTE: constant term, if desired in the model, should be included in the x
+    already
+    ...
+
+    Parameters
+    ==========
+    x           : np.array
+                  Dense array of dimension (n, k) with values for all observations
+                  IMPORTANT: constant term (if desired in the model) should be
+                  included
+    regimes     : list
+                  list of n values with the mapping of each observation to a
+                  regime. Assumed to be aligned with 'x'.
+    cols2regi   : list
+                  List of k booleans indicating whether each column should be
+                  considered as different per regime (True) or held constant
+                  across regimes (False)
+    regimes_set : list
+                  List of ordered regimes tags
+    constant    : [False, 'one', 'many']
+                  Switcher controlling the constant term setup. It may take
+                  the following values:
+
+                    *  False: no constant term is appended in any way
+                    *  'one': a vector of ones is appended to x and held
+                              constant across regimes
+                    * 'many': a vector of ones is appended to x and considered
+                              different per regime
+
+    Returns
+    =======
+    xsp         : csr sparse matrix
+                  Sparse matrix containing the full setup for a regimes model
+                  as specified in the arguments passed
+                  NOTE: columns are reordered so first are all the regime
+                  columns then all the global columns (this makes it much more
+                  efficient)
+                  Structure of the output matrix (assuming X1, X2 to vary
+                  across regimes and constant term, X3 and X4 to be global):
+                    X1r1, X2r1, ... , X1r2, X2r2, ... , constant, X3, X4
+    '''
+    cols2regi = np.array(cols2regi)
+    if set(cols2regi) == set([True]):
+        xsp = x2xsp(x, regimes, regimes_set)
+    elif set(cols2regi) == set([False]):
+        xsp = SP.csr_matrix(x)
+    else:
+        not_regi = x[:, np.where(cols2regi == False)[0]]
+        regi_subset = x[:, np.where(cols2regi)[0]]
+        regi_subset = x2xsp(regi_subset, regimes, regimes_set)
+        xsp = SP.hstack((regi_subset, SP.csr_matrix(not_regi)), format='csr')
+    return xsp
+
+
+def set_name_x_regimes(name_x, regimes, constant_regi, cols2regi, regimes_set):
+    '''
+    Generate the set of variable names in a regimes setup, according to the
+    order of the betas
+
+    NOTE: constant term, if desired in the model, should be included in the x
+    already
+    ...
+
+    Parameters
+    ==========
+    name_x          : list/None
+                      If passed, list of strings with the names of the
+                      variables aligned with the original dense array x
+                      IMPORTANT: constant term (if desired in the model) should be
+                      included
+    regimes         : list
+                      list of n values with the mapping of each observation to a
+                      regime. Assumed to be aligned with 'x'.
+    constant_regi   : [False, 'one', 'many']
+                      Switcher controlling the constant term setup. It may take
+                      the following values:
+
+                         *  False: no constant term is appended in any way
+                         *  'one': a vector of ones is appended to x and held
+                                   constant across regimes
+                         * 'many': a vector of ones is appended to x and considered
+                                   different per regime
+    cols2regi       : list
+                      List of k booleans indicating whether each column should be
+                      considered as different per regime (True) or held constant
+                      across regimes (False)
+    regimes_set     : list
+                      List of ordered regimes tags
+    Returns
+    =======
+    name_x_regi
+    '''
+    k = len(cols2regi)
+    if constant_regi:
+        k -= 1
+    if not name_x:
+        name_x = ['var_' + str(i + 1) for i in range(k)]
+    if constant_regi:
+        name_x.insert(0, 'CONSTANT')
+    nxa = np.array(name_x)
+    c2ra = np.array(cols2regi)
+    vars_regi = nxa[np.where(c2ra == True)]
+    vars_glob = nxa[np.where(c2ra == False)]
+    name_x_regi = []
+    for r in regimes_set:
+        rl = ['%s_%s' % (str(r), i) for i in vars_regi]
+        name_x_regi.extend(rl)
+    name_x_regi.extend(['_Global_%s' % i for i in vars_glob])
+    return name_x_regi
+
+
+def w_regime(w, regi_ids, regi_i, transform=True, min_n=None):
+    '''
+    Returns the subset of W matrix according to a given regime ID
+    ...
+
+    Attributes
+    ==========
+    w           : pysal W object
+                  Spatial weights object
+    regi_ids    : list
+                  Contains the location of observations in y that are assigned to regime regi_i
+    regi_i      : string or float
+                  The regime for which W will be subset
+
+    Returns
+    =======
+    w_regi_i    : pysal W object
+                  Subset of W for regime regi_i
+    '''
+    w_ids = map(w.id_order.__getitem__, regi_ids)
+    warn = None
+    w_regi_i = pysal.weights.w_subset(w, w_ids, silent_island_warning=True)
+    if min_n:
+        if w_regi_i.n < min_n:
+            raise Exception, "There are less observations than variables in regime %s." % regi_i
+    if transform:
+        w_regi_i.transform = w.get_transform()
+    if w_regi_i.islands:
+        warn = "The regimes operation resulted in islands for regime %s." % regi_i
+    return w_regi_i, warn
+
+
+def w_regimes(w, regimes, regimes_set, transform=True, get_ids=None, min_n=None):
+    '''
+    ######### DEPRECATED ##########
+    Subsets W matrix according to regimes
+    ...
+
+    Attributes
+    ==========
+    w           : pysal W object
+                  Spatial weights object
+    regimes     : list
+                  list of n values with the mapping of each observation to a
+                  regime. Assumed to be aligned with 'x'.
+    regimes_set : list
+                  List of ordered regimes tags
+
+    Returns
+    =======
+    w_regi      : dictionary
+                  Dictionary containing the subsets of W according to regimes: [r1:w1, r2:w2, ..., rR:wR]
+    '''
+    regi_ids = dict((r, list(np.where(np.array(regimes) == r)[0]))
+                    for r in regimes_set)
+    w_ids = dict((r, map(w.id_order.__getitem__, regi_ids[r]))
+                 for r in regimes_set)
+    w_regi_i = {}
+    warn = None
+    for r in regimes_set:
+        w_regi_i[r] = pysal.weights.w_subset(w, w_ids[r],
+                                             silent_island_warning=True)
+        if min_n:
+            if w_regi_i[r].n < min_n:
+                raise Exception, "There are less observations than variables in regime %s." % r
+        if transform:
+            w_regi_i[r].transform = w.get_transform()
+        if w_regi_i[r].islands:
+            warn = "The regimes operation resulted in islands for regime %s." % r
+    if get_ids:
+        get_ids = regi_ids
+    return w_regi_i, get_ids, warn
+
+
+def w_regimes_union(w, w_regi_i, regimes_set):
+    '''
+    Combines the subsets of the W matrix according to regimes
+    ...
+
+    Attributes
+    ==========
+    w           : pysal W object
+                  Spatial weights object
+    w_regi_i    : dictionary
+                  Dictionary containing the subsets of W according to regimes: [r1:w1, r2:w2, ..., rR:wR]
+    regimes_set : list
+                  List of ordered regimes tags
+
+    Returns
+    =======
+    w_regi      : pysal W object
+                  Spatial weights object containing the union of the subsets of W
+    '''
+    w_regi = pysal.weights.w_union(w_regi_i[regimes_set[0]],
+                                   w_regi_i[regimes_set[1]], silent_island_warning=True)
+    if len(regimes_set) > 2:
+        for i in range(len(regimes_set))[2:]:
+            w_regi = pysal.weights.w_union(w_regi,
+                                           w_regi_i[regimes_set[i]], silent_island_warning=True)
+    w_regi = pysal.weights.remap_ids(w_regi, dict((i, i)
+                                                  for i in w_regi.id_order), w.id_order)
+    w_regi.transform = w.get_transform()
+    return w_regi
+
+
+def x2xsp(x, regimes, regimes_set):
+    '''
+    Convert X matrix with regimes into a sparse X matrix that accounts for the
+    regimes
+    ...
+
+    Attributes
+    ==========
+    x           : np.array
+                  Dense array of dimension (n, k) with values for all observations
+    regimes     : list
+                  list of n values with the mapping of each observation to a
+                  regime. Assumed to be aligned with 'x'.
+    regimes_set : list
+                  List of ordered regimes tags
+    Returns
+    =======
+    xsp         : csr sparse matrix
+                  Sparse matrix containing X variables properly aligned for
+                  regimes regression. 'xsp' is of dimension (n, k*r) where 'r'
+                  is the number of different regimes
+                  The structure of the alignent is X1r1 X2r1 ... X1r2 X2r2 ...
+    '''
+    n, k = x.shape
+    data = x.flatten()
+    R = len(regimes_set)
+    # X1r1 X2r1 ... X1r2 X2r2 ...
+    regime_by_row = np.array([[r] * k for r in list(regimes_set)]).flatten()
+    row_map = dict((r, np.where(regime_by_row == r)[0]) for r in regimes_set)
+    indices = np.array([row_map[row] for row in regimes]).flatten()
+    indptr = np.zeros((n + 1, ), dtype=int)
+    indptr[:-1] = list(np.arange(n) * k)
+    indptr[-1] = n * k
+    return SP.csr_matrix((data, indices, indptr))
+
+
+def check_cols2regi(constant_regi, cols2regi, x, yend=None, add_cons=True):
+    ''' Checks if dimensions of list cols2regi match number of variables. '''
+
+    if add_cons:
+        is_cons = 1
+        if constant_regi == 'many':
+            regi_cons = [True]
+        elif constant_regi == 'one':
+            regi_cons = [False]
+    else:
+        is_cons = 0
+        regi_cons = []
+    try:
+        tot_k = x.shape[1] + yend.shape[1]
+    except:
+        tot_k = x.shape[1]
+    if cols2regi == 'all':
+        cols2regi = regi_cons + [True] * tot_k
+    else:
+        cols2regi = regi_cons + cols2regi
+    if len(cols2regi) - is_cons != tot_k:
+        raise Exception, "The lenght of list 'cols2regi' must be equal to the amount of variables (exogenous + endogenous) when not using cols2regi=='all'."
+    return cols2regi
+
+
+def _get_regimes_set(regimes):
+    ''' Creates a list with regimes in alphabetical order. '''
+    regimes_set = list(set(regimes))
+    if isinstance(regimes_set[0], float):
+        regimes_set1 = list(set(map(int, regimes_set)))
+        if len(regimes_set1) == len(regimes_set):
+            regimes_set = regimes_set1
+    regimes_set.sort()
+    return regimes_set
+
+
+def _get_weighted_var(regimes, regimes_set, sig2n_k, u, y, x, yend=None, q=None):
+    regi_ids = dict((r, list(np.where(np.array(regimes) == r)[0]))
+                    for r in regimes_set)
+    if sig2n_k:
+        sig = dict((r, np.dot(u[regi_ids[r]].T, u[regi_ids[r]]) / (len(regi_ids[r]) - x.shape[1]))
+                   for r in regimes_set)
+    else:
+        sig = dict((r, np.dot(u[regi_ids[r]].T, u[regi_ids[r]]) / len(regi_ids[r]))
+                   for r in regimes_set)
+    sig_vec = np.zeros(y.shape, float)
+    y2 = np.zeros(y.shape, float)
+    for r in regimes_set:
+        sig_vec[regi_ids[r]] = 1 / float(np.sqrt(sig[r]))
+        y2[regi_ids[r]] = y[regi_ids[r]] / float(np.sqrt(sig[r]))
+    x2 = spbroadcast(x, sig_vec)
+    if yend != None:
+        yend2 = spbroadcast(yend, sig_vec)
+        q2 = spbroadcast(q, sig_vec)
+        return y2, x2, yend2, q2
+    else:
+        return y2, x2
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+    _test()
+    import numpy as np
+    import pysal
+    from ols_regimes import OLS_Regimes
+    db = pysal.open(pysal.examples.get_path('columbus.dbf'), 'r')
+    y_var = 'CRIME'
+    y = np.array([db.by_col(y_var)]).reshape(49, 1)
+    x_var = ['INC', 'HOVAL']
+    x = np.array([db.by_col(name) for name in x_var]).T
+    r_var = 'NSA'
+    regimes = db.by_col(r_var)
+    w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    w.transform = 'r'
+    olsr = OLS_Regimes(
+        y, x, regimes, w=w, constant_regi='many', nonspat_diag=False, spat_diag=False,
+        name_y=y_var, name_x=x_var, name_ds='columbus', name_regimes=r_var, name_w='columbus.gal')
+    print olsr.summary
diff --git a/pysal/spreg/robust.py b/pysal/spreg/robust.py
new file mode 100644
index 0000000..0724b8d
--- /dev/null
+++ b/pysal/spreg/robust.py
@@ -0,0 +1,169 @@
+__author__ = "Luc Anselin luc.anselin at asu.edu, \
+        Pedro V. Amaral pedro.amaral at asu.edu, \
+        David C. Folch david.folch at asu.edu"
+
+import numpy as np
+import numpy.linalg as la
+from pysal import lag_spatial
+from utils import spdot, spbroadcast
+from user_output import check_constant
+
+
+def robust_vm(reg, gwk=None, sig2n_k=False):
+    """
+    Robust estimation of the variance-covariance matrix. Estimated by White (default) or HAC (if wk is provided). 
+
+    Parameters
+    ----------
+
+    reg             : Regression object (OLS or TSLS)
+                      output instance from a regression model
+
+    gwk             : PySAL weights object
+                      Optional. Spatial weights based on kernel functions
+                      If provided, returns the HAC variance estimation
+    sig2n_k         : boolean
+                      If True, then use n-k to rescale the vc matrix.
+                      If False, use n. (White only)
+
+    Returns
+    --------
+
+    psi             : kxk array
+                      Robust estimation of the variance-covariance
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> from ols import OLS
+    >>> from twosls import TSLS
+    >>> db=pysal.open(pysal.examples.get_path("NAT.dbf"),"r")
+    >>> y = np.array(db.by_col("HR90"))
+    >>> y = np.reshape(y, (y.shape[0],1))
+    >>> X = []
+    >>> X.append(db.by_col("RD90"))
+    >>> X.append(db.by_col("DV90"))
+    >>> X = np.array(X).T                       
+
+    Example with OLS with unadjusted standard errors
+
+    >>> ols = OLS(y,X)
+    >>> ols.vm
+    array([[ 0.17004545,  0.00226532, -0.02243898],
+           [ 0.00226532,  0.00941319, -0.00031638],
+           [-0.02243898, -0.00031638,  0.00313386]])
+
+    Example with OLS and White
+
+    >>> ols = OLS(y,X, robust='white')
+    >>> ols.vm
+    array([[ 0.24515481,  0.01093322, -0.03441966],
+           [ 0.01093322,  0.01798616, -0.00071414],
+           [-0.03441966, -0.00071414,  0.0050153 ]])
+
+    Example with OLS and HAC
+
+    >>> wk = pysal.kernelW_from_shapefile(pysal.examples.get_path('NAT.shp'),k=15,function='triangular', fixed=False)
+    >>> wk.transform = 'o'
+    >>> ols = OLS(y,X, robust='hac', gwk=wk)
+    >>> ols.vm
+    array([[ 0.29213532,  0.01670361, -0.03948199],
+           [ 0.01655557,  0.02295829, -0.00116874],
+           [-0.03941483, -0.00119077,  0.00568314]])
+
+    Example with 2SLS and White
+
+    >>> yd = []
+    >>> yd.append(db.by_col("UE90"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("UE80"))
+    >>> q = np.array(q).T
+    >>> tsls = TSLS(y, X, yd, q=q, robust='white')
+    >>> tsls.vm
+    array([[ 0.29569954,  0.04119843, -0.02496858, -0.01640185],
+           [ 0.04119843,  0.03647762,  0.004702  , -0.00987345],
+           [-0.02496858,  0.004702  ,  0.00648262, -0.00292891],
+           [-0.01640185, -0.00987345, -0.00292891,  0.0053322 ]])
+
+    Example with 2SLS and HAC
+
+    >>> tsls = TSLS(y, X, yd, q=q, robust='hac', gwk=wk)
+    >>> tsls.vm
+    array([[ 0.41985329,  0.06823119, -0.02883889, -0.02788116],
+           [ 0.06867042,  0.04887508,  0.00497443, -0.01367746],
+           [-0.02856454,  0.00501402,  0.0072195 , -0.00321604],
+           [-0.02810131, -0.01364908, -0.00318197,  0.00713251]])
+
+    """
+    if hasattr(reg, 'h'):  # If reg has H, do 2SLS estimator. OLS otherwise.
+        tsls = True
+        xu = spbroadcast(reg.h, reg.u)
+    else:
+        tsls = False
+        xu = spbroadcast(reg.x, reg.u)
+
+    if gwk:  # If gwk do HAC. White otherwise.
+        gwkxu = lag_spatial(gwk, xu)
+        psi0 = spdot(xu.T, gwkxu)
+    else:
+        psi0 = spdot(xu.T, xu)
+        if sig2n_k:
+            psi0 = psi0 * (1. * reg.n / (reg.n - reg.k))
+    if tsls:
+        psi1 = spdot(reg.varb, reg.zthhthi)
+        psi = spdot(psi1, np.dot(psi0, psi1.T))
+    else:
+        psi = spdot(reg.xtxi, np.dot(psi0, reg.xtxi))
+
+    return psi
+
+
+def hac_multi(reg, gwk, constant=False):
+    """
+    HAC robust estimation of the variance-covariance matrix for multi-regression object 
+
+    Parameters
+    ----------
+
+    reg             : Regression object (OLS or TSLS)
+                      output instance from a regression model
+
+    gwk             : PySAL weights object
+                      Spatial weights based on kernel functions
+
+    Returns
+    --------
+
+    psi             : kxk array
+                      Robust estimation of the variance-covariance
+
+    """
+    if not constant:
+        reg.hac_var = check_constant(reg.hac_var)
+    xu = spbroadcast(reg.hac_var, reg.u)
+    gwkxu = lag_spatial(gwk, xu)
+    psi0 = spdot(xu.T, gwkxu)
+    counter = 0
+    for m in reg.multi:
+        reg.multi[m].robust = 'hac'
+        reg.multi[m].name_gwk = reg.name_gwk
+        try:
+            psi1 = spdot(reg.multi[m].varb, reg.multi[m].zthhthi)
+            reg.multi[m].vm = spdot(psi1, np.dot(psi0, psi1.T))
+        except:
+            reg.multi[m].vm = spdot(
+                reg.multi[m].xtxi, np.dot(psi0, reg.multi[m].xtxi))
+        reg.vm[(counter * reg.kr):((counter + 1) * reg.kr),
+               (counter * reg.kr):((counter + 1) * reg.kr)] = reg.multi[m].vm
+        counter += 1
+
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/summary_output.py b/pysal/spreg/summary_output.py
new file mode 100644
index 0000000..26f9c0e
--- /dev/null
+++ b/pysal/spreg/summary_output.py
@@ -0,0 +1,1235 @@
+"""Internal helper files for user output."""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, David C. Folch david.folch at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu, Jing Yao jingyao at asu.edu"
+
+import textwrap as TW
+import numpy as np
+import copy as COPY
+import diagnostics as diagnostics
+import diagnostics_tsls as diagnostics_tsls
+import diagnostics_sp as diagnostics_sp
+import pysal
+import scipy
+from scipy.sparse.csr import csr_matrix
+
+__all__ = []
+
+
+###############################################################################
+############### Primary functions for running summary diagnostics #############
+###############################################################################
+
+"""
+This section contains one function for each user level regression class. These
+are called directly from the user class. Each one mixes and matches smaller
+functions located later in this module.
+"""
+
+
+def OLS(reg, vm, w, nonspat_diag, spat_diag, moran, white_test, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag_ols(reg, reg.robust)
+    if nonspat_diag:
+        # compute diagnostics
+        reg.sig2ML = reg.sig2n
+        reg.f_stat = diagnostics.f_stat(reg)
+        reg.logll = diagnostics.log_likelihood(reg)
+        reg.aic = diagnostics.akaike(reg)
+        reg.schwarz = diagnostics.schwarz(reg)
+        reg.mulColli = diagnostics.condition_index(reg)
+        reg.jarque_bera = diagnostics.jarque_bera(reg)
+        reg.breusch_pagan = diagnostics.breusch_pagan(reg)
+        reg.koenker_bassett = diagnostics.koenker_bassett(reg)
+        if white_test:
+            reg.white = diagnostics.white(reg)
+        # organize summary output
+        reg.__summary['summary_nonspat_diag_1'] = summary_nonspat_diag_1(reg)
+        reg.__summary['summary_nonspat_diag_2'] = summary_nonspat_diag_2(reg)
+    if spat_diag:
+        # compute diagnostics and organize summary output
+        spat_diag_ols(reg, w, moran)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=False,
+            nonspat_diag=nonspat_diag, spat_diag=spat_diag)
+
+
+def OLS_multi(reg, multireg, vm, nonspat_diag, spat_diag, moran, white_test, regimes=False, sur=False, w=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag_ols(mreg, mreg.robust)
+        if nonspat_diag:
+            # compute diagnostics
+            mreg.sig2ML = mreg.sig2n
+            mreg.f_stat = diagnostics.f_stat(mreg)
+            mreg.logll = diagnostics.log_likelihood(mreg)
+            mreg.aic = diagnostics.akaike(mreg)
+            mreg.schwarz = diagnostics.schwarz(mreg)
+            mreg.mulColli = diagnostics.condition_index(mreg)
+            mreg.jarque_bera = diagnostics.jarque_bera(mreg)
+            mreg.breusch_pagan = diagnostics.breusch_pagan(mreg)
+            mreg.koenker_bassett = diagnostics.koenker_bassett(mreg)
+            if white_test:
+                mreg.white = diagnostics.white(mreg)
+            # organize summary output
+            mreg.__summary[
+                'summary_nonspat_diag_1'] = summary_nonspat_diag_1(mreg)
+            mreg.__summary[
+                'summary_nonspat_diag_2'] = summary_nonspat_diag_2(mreg)
+        if spat_diag:
+            # compute diagnostics and organize summary output
+            spat_diag_ols(mreg, mreg.w, moran)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        if sur:
+            summary_sur(mreg)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    if regimes:
+        summary_chow(reg)
+    if sur:
+        summary_sur(reg, u_cov=True)
+    if spat_diag:
+        # compute global diagnostics and organize summary output
+        spat_diag_ols(reg, w, moran)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm, instruments=False,
+                  nonspat_diag=nonspat_diag, spat_diag=spat_diag)
+
+
+def TSLS(reg, vm, w, spat_diag, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag(reg, reg.robust)
+    if spat_diag:
+        # compute diagnostics and organize summary output
+        spat_diag_instruments(reg, w)
+    # build coefficients table body
+    build_coefs_body_instruments(reg)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=True,
+            nonspat_diag=False, spat_diag=spat_diag)
+
+
+def TSLS_multi(reg, multireg, vm, spat_diag, regimes=False, sur=False, w=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag(mreg, mreg.robust)
+        if spat_diag:
+            # compute diagnostics and organize summary output
+            spat_diag_instruments(mreg, mreg.w)
+        # build coefficients table body
+        build_coefs_body_instruments(mreg)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        if sur:
+            summary_sur(mreg)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    if regimes:
+        summary_chow(reg)
+    if sur:
+        summary_sur(reg, u_cov=True)
+    if spat_diag:
+        # compute global diagnostics and organize summary output
+        spat_diag_instruments(reg, w)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=True, nonspat_diag=False, spat_diag=spat_diag)
+
+
+def GM_Lag(reg, vm, w, spat_diag, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag_lag(reg, reg.robust, error=False)
+    if spat_diag:
+        # compute diagnostics and organize summary output
+        spat_diag_instruments(reg, w)
+    # build coefficients table body
+    summary_coefs_allx(reg, reg.z_stat)
+    summary_coefs_instruments(reg)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=True,
+            nonspat_diag=False, spat_diag=spat_diag)
+
+
+def GM_Lag_multi(reg, multireg, vm, spat_diag, regimes=False, sur=False, w=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag_lag(mreg, mreg.robust, error=False)
+        if spat_diag:
+            # compute diagnostics and organize summary output
+            spat_diag_instruments(mreg, mreg.w)
+        # build coefficients table body
+        summary_coefs_allx(mreg, mreg.z_stat)
+        summary_coefs_instruments(mreg)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        if sur:
+            summary_sur(mreg)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    if regimes:
+        summary_chow(reg)
+    if spat_diag:
+        pass
+        # compute global diagnostics and organize summary output
+        #spat_diag_instruments(reg, w)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=True, nonspat_diag=False, spat_diag=spat_diag)
+
+
+def ML_Lag(reg, w, vm, spat_diag, regimes=False):  # extra space d
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag_lag(reg, robust=None, error=False)
+    reg.__summary['summary_r2'] += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+        'Sigma-square ML', reg.sig2, 'Log likelihood', reg.logll)
+    reg.__summary['summary_r2'] += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+        'S.E of regression', np.sqrt(reg.sig2), 'Akaike info criterion', reg.aic)
+    reg.__summary['summary_r2'] += "                                                 %-22s:%12.3f\n" % (
+        'Schwarz criterion', reg.schwarz)
+    # build coefficients table body
+    summary_coefs_allx(reg, reg.z_stat)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=False,
+            nonspat_diag=False, spat_diag=spat_diag)
+
+
+# extra space d
+def ML_Lag_multi(reg, multireg, vm, spat_diag, regimes=False, sur=False, w=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag_lag(mreg, robust=None, error=False)
+        mreg.__summary['summary_r2'] += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+            'Sigma-square ML', mreg.sig2, 'Log likelihood', mreg.logll)
+        mreg.__summary['summary_r2'] += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+            'S.E of regression', np.sqrt(mreg.sig2), 'Akaike info criterion', mreg.aic)
+        mreg.__summary['summary_r2'] += "                                                 %-22s:%12.3f\n" % (
+            'Schwarz criterion', mreg.schwarz)
+        # build coefficients table body
+        summary_coefs_allx(mreg, mreg.z_stat)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    if regimes:
+        summary_chow(reg)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=False, nonspat_diag=False, spat_diag=spat_diag)
+
+
+def ML_Error(reg, w, vm, spat_diag, regimes=False):   # extra space d
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag(reg, robust=None)
+    reg.__summary['summary_r2'] += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+        'Sigma-square ML', reg.sig2, 'Log likelihood', reg.logll)
+    reg.__summary['summary_r2'] += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+        'S.E of regression', np.sqrt(reg.sig2), 'Akaike info criterion', reg.aic)
+    reg.__summary['summary_r2'] += "                                                 %-22s:%12.3f\n" % (
+        'Schwarz criterion', reg.schwarz)
+    # build coefficients table body
+    summary_coefs_allx(reg, reg.z_stat)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=False,
+            nonspat_diag=False, spat_diag=spat_diag)
+
+
+# extra space d
+def ML_Error_multi(reg, multireg, vm, spat_diag, regimes=False, sur=False, w=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag(mreg, robust=None)
+        mreg.__summary['summary_r2'] += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+            'Sigma-square ML', mreg.sig2, 'Log likelihood', mreg.logll)
+        mreg.__summary['summary_r2'] += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+            'S.E of regression', np.sqrt(mreg.sig2), 'Akaike info criterion', mreg.aic)
+        mreg.__summary['summary_r2'] += "                                                 %-22s:%12.3f\n" % (
+            'Schwarz criterion', mreg.schwarz)
+        # build coefficients table body
+        summary_coefs_allx(mreg, mreg.z_stat)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    if regimes:
+        summary_chow(reg, lambd=True)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=False, nonspat_diag=False, spat_diag=spat_diag)
+
+
+def GM_Error(reg, vm, w, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag(reg, None)
+    # build coefficients table body
+    beta_position = summary_coefs_somex(reg, reg.z_stat)
+    summary_coefs_lambda(reg, reg.z_stat)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=False,
+            nonspat_diag=False, spat_diag=False)
+
+
+def GM_Error_multi(reg, multireg, vm, regimes=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag(mreg, None)
+        # build coefficients table body
+        beta_position = summary_coefs_somex(mreg, mreg.z_stat)
+        summary_coefs_lambda(mreg, mreg.z_stat)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    summary_chow(reg, lambd=False)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=False, nonspat_diag=False, spat_diag=False)
+
+
+def GM_Endog_Error(reg, vm, w, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag(reg, None)
+    # build coefficients table body
+    summary_coefs_allx(reg, reg.z_stat, lambd=True)
+    summary_coefs_lambda(reg, reg.z_stat)
+    summary_coefs_instruments(reg)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=True,
+            nonspat_diag=False, spat_diag=False)
+
+
+def GM_Endog_Error_multi(reg, multireg, vm, regimes=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag(mreg, None)
+        # build coefficients table body
+        summary_coefs_allx(mreg, mreg.z_stat, lambd=True)
+        summary_coefs_lambda(mreg, mreg.z_stat)
+        summary_coefs_instruments(mreg)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    summary_chow(reg, lambd=False)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=True, nonspat_diag=False, spat_diag=False)
+
+
+def GM_Error_Hom(reg, vm, w, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag(reg, None)
+    summary_iteration(reg)
+    # build coefficients table body
+    beta_position = summary_coefs_somex(reg, reg.z_stat)
+    summary_coefs_lambda(reg, reg.z_stat)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=False,
+            nonspat_diag=False, spat_diag=False)
+
+
+def GM_Error_Hom_multi(reg, multireg, vm, regimes=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        summary_iteration(mreg)
+        beta_diag(mreg, None)
+        # build coefficients table body
+        beta_position = summary_coefs_somex(mreg, mreg.z_stat)
+        summary_coefs_lambda(mreg, mreg.z_stat)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    summary_chow(reg, lambd=True)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=False, nonspat_diag=False, spat_diag=False)
+
+
+def GM_Endog_Error_Hom(reg, vm, w, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag(reg, None)
+    summary_iteration(reg)
+    # build coefficients table body
+    summary_coefs_allx(reg, reg.z_stat, lambd=True)
+    summary_coefs_lambda(reg, reg.z_stat)
+    summary_coefs_instruments(reg)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=True,
+            nonspat_diag=False, spat_diag=False)
+
+
+def GM_Endog_Error_Hom_multi(reg, multireg, vm, regimes=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag(mreg, None)
+        summary_iteration(mreg)
+        # build coefficients table body
+        summary_coefs_allx(mreg, mreg.z_stat, lambd=True)
+        summary_coefs_lambda(mreg, mreg.z_stat)
+        summary_coefs_instruments(mreg)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    summary_chow(reg, lambd=True)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=True, nonspat_diag=False, spat_diag=False)
+
+
+def GM_Error_Het(reg, vm, w, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag(reg, 'het')
+    summary_iteration(reg)
+    # build coefficients table body
+    beta_position = summary_coefs_somex(reg, reg.z_stat)
+    summary_coefs_lambda(reg, reg.z_stat)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=False,
+            nonspat_diag=False, spat_diag=False)
+
+
+def GM_Error_Het_multi(reg, multireg, vm, regimes=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag(mreg, 'het')
+        summary_iteration(mreg)
+        # build coefficients table body
+        beta_position = summary_coefs_somex(mreg, mreg.z_stat)
+        summary_coefs_lambda(mreg, mreg.z_stat)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    summary_chow(reg, lambd=True)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=False, nonspat_diag=False, spat_diag=False)
+
+
+def GM_Endog_Error_Het(reg, vm, w, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag(reg, 'het')
+    summary_iteration(reg)
+    # build coefficients table body
+    summary_coefs_allx(reg, reg.z_stat, lambd=True)
+    summary_coefs_lambda(reg, reg.z_stat)
+    summary_coefs_instruments(reg)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=True,
+            nonspat_diag=False, spat_diag=False)
+
+
+def GM_Endog_Error_Het_multi(reg, multireg, vm, regimes=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag(mreg, 'het')
+        summary_iteration(mreg)
+        # build coefficients table body
+        summary_coefs_allx(mreg, mreg.z_stat, lambd=True)
+        summary_coefs_lambda(mreg, mreg.z_stat)
+        summary_coefs_instruments(mreg)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    summary_chow(reg, lambd=True)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=True, nonspat_diag=False, spat_diag=False)
+
+
+def GM_Combo(reg, vm, w, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag_lag(reg, None)
+    # build coefficients table body
+    summary_coefs_allx(reg, reg.z_stat, lambd=True)
+    summary_coefs_lambda(reg, reg.z_stat)
+    summary_coefs_instruments(reg)
+    summary_warning(reg)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=True,
+            nonspat_diag=False, spat_diag=False)
+
+
+def GM_Combo_multi(reg, multireg, vm, regimes=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag_lag(mreg, None)
+        # build coefficients table body
+        summary_coefs_allx(mreg, mreg.z_stat, lambd=True)
+        summary_coefs_lambda(mreg, mreg.z_stat)
+        summary_coefs_instruments(mreg)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    if regimes:
+        summary_chow(reg, lambd=False)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=True, nonspat_diag=False, spat_diag=False)
+
+
+def GM_Combo_Hom(reg, vm, w, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag_lag(reg, None)
+    summary_iteration(reg)
+    # build coefficients table body
+    summary_coefs_allx(reg, reg.z_stat, lambd=True)
+    summary_coefs_lambda(reg, reg.z_stat)
+    summary_coefs_instruments(reg)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=True,
+            nonspat_diag=False, spat_diag=False)
+
+
+def GM_Combo_Hom_multi(reg, multireg, vm, regimes=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag_lag(mreg, None)
+        summary_iteration(mreg)
+        # build coefficients table body
+        summary_coefs_allx(mreg, mreg.z_stat, lambd=True)
+        summary_coefs_lambda(mreg, mreg.z_stat)
+        summary_coefs_instruments(mreg)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    if regimes:
+        summary_chow(reg, lambd=True)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=True, nonspat_diag=False, spat_diag=False)
+
+
+def GM_Combo_Het(reg, vm, w, regimes=False):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag_lag(reg, 'het')
+    summary_iteration(reg)
+    # build coefficients table body
+    summary_coefs_allx(reg, reg.z_stat, lambd=True)
+    summary_coefs_lambda(reg, reg.z_stat)
+    summary_coefs_instruments(reg)
+    if regimes:
+        summary_regimes(reg)
+    summary_warning(reg)
+    summary(reg=reg, vm=vm, instruments=True,
+            nonspat_diag=False, spat_diag=False)
+
+
+def GM_Combo_Het_multi(reg, multireg, vm, regimes=False):
+    for m in multireg:
+        mreg = multireg[m]
+        mreg.__summary = {}
+        # compute diagnostics and organize summary output
+        beta_diag_lag(mreg, 'het')
+        summary_iteration(mreg)
+        # build coefficients table body
+        summary_coefs_allx(mreg, mreg.z_stat, lambd=True)
+        summary_coefs_lambda(mreg, mreg.z_stat)
+        summary_coefs_instruments(mreg)
+        if regimes:
+            summary_regimes(mreg, chow=False)
+        summary_warning(mreg)
+        multireg[m].__summary = mreg.__summary
+    reg.__summary = {}
+    if regimes:
+        summary_chow(reg, lambd=True)
+    summary_warning(reg)
+    summary_multi(reg=reg, multireg=multireg, vm=vm,
+                  instruments=True, nonspat_diag=False, spat_diag=False)
+
+
+def Probit(reg, vm, w, spat_diag):
+    reg.__summary = {}
+    # compute diagnostics and organize summary output
+    beta_diag(reg, None)
+    # organize summary output
+    if spat_diag:
+        reg.__summary['summary_spat_diag'] = summary_spat_diag_probit(reg)
+    reg.__summary[
+        'summary_r2'] = "%-21s: %3.2f\n" % ('% correctly predicted', reg.predpc)
+    reg.__summary[
+        'summary_r2'] += "%-21s: %3.4f\n" % ('Log-Likelihood', reg.logl)
+    reg.__summary['summary_r2'] += "%-21s: %3.4f\n" % ('LR test', reg.LR[0])
+    reg.__summary[
+        'summary_r2'] += "%-21s: %3.4f\n" % ('LR test (p-value)', reg.LR[1])
+    if reg.warning:
+        reg.__summary[
+            'summary_r2'] += "\nMaximum number of iterations exceeded or gradient and/or function calls not changing\n"
+    # build coefficients table body
+    beta_position = summary_coefs_allx(reg, reg.z_stat)
+    reg.__summary['summary_other_mid'] = summary_coefs_slopes(reg)
+    summary(reg=reg, vm=vm, instruments=False,
+            short_intro=True, spat_diag=spat_diag)
+
+##############################################################################
+
+
+##############################################################################
+############### Helper functions for running summary diagnostics #############
+##############################################################################
+
+def beta_diag_ols(reg, robust):
+    # compute diagnostics
+    reg.std_err = diagnostics.se_betas(reg)
+    reg.t_stat = diagnostics.t_stat(reg)
+    reg.r2 = diagnostics.r2(reg)
+    reg.ar2 = diagnostics.ar2(reg)
+    # organize summary output
+    reg.__summary['summary_std_err'] = robust
+    reg.__summary['summary_zt'] = 't'
+    reg.__summary['summary_r2'] = "%-20s:%12.4f\n%-20s:%12.4f\n" % (
+        'R-squared', reg.r2, 'Adjusted R-squared', reg.ar2)
+    # build coefficients table body
+    position = summary_coefs_allx(reg, reg.t_stat)
+
+
+def beta_diag(reg, robust):
+    # compute diagnostics
+    reg.std_err = diagnostics.se_betas(reg)
+    reg.z_stat = diagnostics.t_stat(reg, z_stat=True)
+    reg.pr2 = diagnostics_tsls.pr2_aspatial(reg)
+    # organize summary output
+    reg.__summary['summary_std_err'] = robust
+    reg.__summary['summary_zt'] = 'z'
+    reg.__summary[
+        'summary_r2'] = "%-20s:%12.4f\n" % ('Pseudo R-squared', reg.pr2)
+
+
+def beta_diag_lag(reg, robust, error=True):
+    # compute diagnostics
+    reg.std_err = diagnostics.se_betas(reg)
+    reg.z_stat = diagnostics.t_stat(reg, z_stat=True)
+    reg.pr2 = diagnostics_tsls.pr2_aspatial(reg)
+    # organize summary output
+    reg.__summary['summary_std_err'] = robust
+    reg.__summary['summary_zt'] = 'z'
+    reg.__summary[
+        'summary_r2'] = "%-20s:      %5.4f\n" % ('Pseudo R-squared', reg.pr2)
+    if np.abs(reg.rho) < 1:
+        reg.pr2_e = diagnostics_tsls.pr2_spatial(reg)
+        reg.__summary[
+            'summary_r2'] += "%-20s:  %5.4f\n" % ('Spatial Pseudo R-squared', reg.pr2_e)
+    else:
+        reg.__summary[
+            'summary_r2'] += "Spatial Pseudo R-squared: omitted due to rho outside the boundary (-1, 1)."
+
+
+def build_coefs_body_instruments(reg):
+    beta_position = summary_coefs_allx(reg, reg.z_stat)
+    summary_coefs_allx(reg, reg.z_stat)
+    summary_coefs_instruments(reg)
+
+
+def spat_diag_ols(reg, w, moran):
+    # compute diagnostics
+    lm_tests = diagnostics_sp.LMtests(reg, w)
+    reg.lm_error = lm_tests.lme
+    reg.lm_lag = lm_tests.lml
+    reg.rlm_error = lm_tests.rlme
+    reg.rlm_lag = lm_tests.rlml
+    reg.lm_sarma = lm_tests.sarma
+    if moran:
+        moran_res = diagnostics_sp.MoranRes(reg, w, z=True)
+        reg.moran_res = moran_res.I, moran_res.zI, moran_res.p_norm
+    # organize summary output
+    reg.__summary['summary_spat_diag'] = summary_spat_diag_ols(reg, moran)
+
+
+def spat_diag_instruments(reg, w):
+    # compute diagnostics
+    cache = diagnostics_sp.spDcache(reg, w)
+    mi, ak, ak_p = diagnostics_sp.akTest(reg, w, cache)
+    reg.ak_test = ak, ak_p
+    # organize summary output
+    reg.__summary['summary_spat_diag'] = "%-27s      %2d    %12.3f       %9.4f\n" % (
+        "Anselin-Kelejian Test", 1, reg.ak_test[0], reg.ak_test[1])
+
+
+def summary(reg, vm, instruments, short_intro=False, nonspat_diag=False, spat_diag=False, other_end=False):
+    summary = summary_open()
+    summary += summary_intro(reg, short_intro)
+    summary += reg.__summary['summary_r2']
+    if nonspat_diag:
+        summary += reg.__summary['summary_nonspat_diag_1']
+    try:
+        summary += reg.__summary['summary_other_top']
+    except:
+        pass
+    summary += summary_coefs_intro(reg)
+    summary += reg.__summary['summary_coefs']
+    summary += "------------------------------------------------------------------------------------\n"
+    if instruments:
+        summary += reg.__summary['summary_coefs_instruments']
+    try:
+        summary += reg.__summary['summary_other_mid']
+    except:
+        pass
+    if nonspat_diag:
+        summary += reg.__summary['summary_nonspat_diag_2']
+    if spat_diag:
+        summary += summary_spat_diag_intro()
+        summary += reg.__summary['summary_spat_diag']
+    if vm:
+        summary += summary_vm(reg, instruments)
+    try:
+        summary += reg.__summary['summary_chow']
+    except:
+        pass
+    if other_end:
+        summary += reg.__summary['summary_other_end']
+    summary += summary_close()
+    reg.summary = summary
+
+
+def summary_multi(reg, multireg, vm, instruments, short_intro=False, nonspat_diag=False, spat_diag=False, other_end=False):
+    summary = summary_open(multi=True)
+    for m in multireg:
+        mreg = multireg[m]
+        summary += "----------\n\n"
+        summary += summary_intro(mreg, short_intro)
+        summary += mreg.__summary['summary_r2']
+        if nonspat_diag:
+            summary += mreg.__summary['summary_nonspat_diag_1']
+        try:
+            summary += reg.__summary['summary_other_top']
+        except:
+            pass
+        summary += summary_coefs_intro(mreg)
+        summary += mreg.__summary['summary_coefs']
+        summary += "------------------------------------------------------------------------------------\n"
+        if instruments:
+            summary += mreg.__summary['summary_coefs_instruments']
+        try:
+            summary += mreg.__summary['summary_other_mid']
+        except:
+            pass
+        if m == multireg.keys()[-1]:
+            try:
+                summary += reg.__summary['summary_other_mid']
+            except:
+                pass
+        if nonspat_diag:
+            summary += mreg.__summary['summary_nonspat_diag_2']
+        if spat_diag:
+            summary += summary_spat_diag_intro()
+            summary += mreg.__summary['summary_spat_diag']
+        if vm:
+            summary += summary_vm(mreg, instruments)
+        if other_end:
+            summary += mreg.__summary['summary_other_end']
+        if m == multireg.keys()[-1]:
+            try:
+                summary += reg.__summary['summary_chow']
+            except:
+                pass
+            if spat_diag:
+                try:
+                    spat_diag_str = reg.__summary['summary_spat_diag']
+                    summary += summary_spat_diag_intro_global()
+                    summary += spat_diag_str
+                except:
+                    pass
+            try:
+                summary += reg.__summary['summary_other_end']
+            except:
+                pass
+    summary += summary_close()
+    reg.summary = summary
+
+
+def _get_var_indices(reg, lambd=False):
+    try:
+        var_names = reg.name_z
+    except:
+        var_names = reg.name_x
+    last_v = len(var_names)
+    if lambd:
+        last_v += -1
+    indices = []
+    try:
+        kf = reg.kf
+        if lambd:
+            kf += -1
+        krex = reg.kr - reg.kryd
+        try:
+            kfyd = reg.yend.shape[1] - reg.nr * reg.kryd
+        except:
+            kfyd = 0
+        j_con = 0
+        if reg.constant_regi == 'many':
+            j_con = 1
+        for i in range(reg.nr):
+            j = i * krex
+            jyd = krex * reg.nr + i * reg.kryd + kf - kfyd
+            name_reg = var_names[j + j_con:j + krex] + \
+                var_names[jyd:jyd + reg.kryd]
+            name_reg.sort()
+            if reg.constant_regi == 'many':
+                indices += [j] + [var_names.index(ind) for ind in name_reg]
+            else:
+                indices += [var_names.index(ind) for ind in name_reg]
+        if reg.constant_regi == 'one':
+            indices += [krex * reg.nr]
+        if len(indices) < last_v:
+            name_reg = var_names[krex * reg.nr + 1 - j_con:krex * reg.nr + kf -
+                                 kfyd] + var_names[reg.kr * reg.nr + kf - kfyd:reg.kr * reg.nr + kf]
+            name_reg.sort()
+            indices += [var_names.index(ind) for ind in name_reg]
+    except:
+        indices = [0] + (np.argsort(var_names[1:last_v]) + 1).tolist()
+    return var_names, indices
+
+##############################################################################
+
+
+##############################################################################
+############### Guts of the summary printout #################################
+##############################################################################
+
+"""
+This section contains the pieces needed to put together the summary printout.
+"""
+
+
+def summary_open(multi=False):
+    strSummary = ""
+    strSummary += "REGRESSION\n"
+    if not multi:
+        strSummary += "----------\n"
+    return strSummary
+
+
+def summary_intro(reg, short):  # extra space d
+    title = "SUMMARY OF OUTPUT: " + reg.title + "\n"
+    strSummary = title
+    strSummary += "-" * (len(title) - 1) + "\n"
+    strSummary += "%-20s:%12s\n" % ('Data set', reg.name_ds)
+    if reg.name_w:
+        strSummary += "%-20s:%12s\n" % ('Weights matrix', reg.name_w)
+    strSummary += "%-20s:%12s                %-22s:%12d\n" % (
+        'Dependent Variable', reg.name_y, 'Number of Observations', reg.n)
+    if not short:
+        strSummary += "%-20s:%12.4f                %-22s:%12d\n" % (
+            'Mean dependent var', reg.mean_y, 'Number of Variables', reg.k)
+        strSummary += "%-20s:%12.4f                %-22s:%12d\n" % (
+            'S.D. dependent var', reg.std_y, 'Degrees of Freedom', reg.n - reg.k)
+    #strSummary += '\n'
+    return strSummary
+
+
+def summary_coefs_intro(reg):
+    strSummary = "\n"
+    if reg.__summary['summary_std_err']:
+        if reg.__summary['summary_std_err'].lower() == 'white':
+            strSummary += "White Standard Errors\n"
+        elif reg.__summary['summary_std_err'].lower() == 'hac':
+            strSummary += "HAC Standard Errors; Kernel Weights: " + \
+                reg.name_gwk + "\n"
+        # elif reg.__summary['summary_std_err'].lower() == 'het':
+            #strSummary += "Heteroskedastic Corrected Standard Errors\n"
+    strSummary += "------------------------------------------------------------------------------------\n"
+    strSummary += "            Variable     Coefficient       Std.Error     %1s-Statistic     Probability\n" % (
+        reg.__summary['summary_zt'])
+    strSummary += "------------------------------------------------------------------------------------\n"
+    return strSummary
+
+
+def summary_coefs_allx(reg, zt_stat, lambd=False):
+    strSummary = ""
+    var_names, indices = _get_var_indices(reg, lambd)
+    for i in indices:
+        strSummary += "%20s    %12.7f    %12.7f    %12.7f    %12.7f\n"   \
+            % (var_names[i], reg.betas[i][0], reg.std_err[i], zt_stat[i][0], zt_stat[i][1])
+    reg.__summary['summary_coefs'] = strSummary
+    return i
+
+
+def summary_coefs_somex(reg, zt_stat):
+    """This is a special case needed for models that do not have inference on
+    the lambda term
+    """
+    strSummary = ""
+    var_names, indices = _get_var_indices(reg, lambd=True)
+    for i in indices:
+        strSummary += "%20s    %12.7f    %12.7f    %12.7f    %12.7f\n"   \
+            % (reg.name_x[i], reg.betas[i][0], reg.std_err[i], zt_stat[i][0], zt_stat[i][1])
+    reg.__summary['summary_coefs'] = strSummary
+    return i
+"""
+def summary_coefs_yend(reg, zt_stat, lambd=False):
+    strSummary = ""
+    indices = _get_var_indices(reg, lambd) 
+    for i in indices:
+        strSummary += "%20s    %12.7f    %12.7f    %12.7f    %12.7f\n"   \
+                     % (reg.name_z[i],reg.betas[i][0],reg.std_err[i],zt_stat[i][0],zt_stat[i][1])              
+    reg.__summary['summary_coefs'] = strSummary
+"""
+
+
+def summary_coefs_lambda(reg, zt_stat):
+    try:
+        name_var = reg.name_z
+    except:
+        name_var = reg.name_x
+    if len(reg.betas) == len(zt_stat):
+        reg.__summary['summary_coefs'] += "%20s    %12.7f    %12.7f    %12.7f    %12.7f\n"   \
+            % (name_var[-1], reg.betas[-1][0], reg.std_err[-1], zt_stat[-1][0], zt_stat[-1][1])
+    else:
+        reg.__summary[
+            'summary_coefs'] += "%20s    %12.7f    \n" % (name_var[-1], reg.betas[-1][0])
+
+
+def summary_coefs_instruments(reg):
+    """Generates a list of the instruments used.
+    """
+    insts = "Instruments: "
+    for name in sorted(reg.name_q):
+        insts += name + ", "
+    text_wrapper = TW.TextWrapper(width=76, subsequent_indent="             ")
+    insts = text_wrapper.fill(insts[:-2])
+    insts += "\n"
+    inst2 = "Instrumented: "
+    for name in sorted(reg.name_yend):
+        inst2 += name + ", "
+    text_wrapper = TW.TextWrapper(width=76, subsequent_indent="             ")
+    inst2 = text_wrapper.fill(inst2[:-2])
+    inst2 += "\n"
+    inst2 += insts
+    reg.__summary['summary_coefs_instruments'] = inst2
+
+
+def summary_iteration(reg):  # extra space d
+    """Reports the number of iterations computed.
+    """
+    try:
+        if reg.step1c:
+            step1c = 'Yes'
+        else:
+            step1c = 'No'
+        txt = "%-20s:%12s                %-22s:%12s\n" % (
+            'N. of iterations', reg.iteration, 'Step1c computed', step1c)
+    except:
+        txt = "%-20s:%12s\n" % ('N. of iterations', reg.iteration)
+    try:
+        reg.__summary['summary_other_top'] += txt
+    except:
+        reg.__summary['summary_other_top'] = txt
+
+
+def summary_regimes(reg, chow=True):
+    """Lists the regimes variable used.
+    """
+    try:
+        reg.__summary[
+            'summary_other_mid'] += "Regimes variable: %s\n" % reg.name_regimes
+    except:
+        reg.__summary[
+            'summary_other_mid'] = "Regimes variable: %s\n" % reg.name_regimes
+    if chow:
+        summary_chow(reg)
+
+
+def summary_sur(reg, u_cov=False):
+    """Lists the equation ID variable used.
+    """
+    if u_cov:
+        str_ucv = "\nERROR COVARIANCE MATRIX\n"
+        for i in range(reg.u_cov.shape[0]):
+            for j in range(reg.u_cov.shape[1]):
+                str_ucv += "%12.6f" % (reg.u_cov[i][j])
+            str_ucv += "\n"
+        try:
+            reg.__summary['summary_other_end'] += str_ucv
+        except:
+            reg.__summary['summary_other_end'] = str_ucv
+    else:
+        try:
+            reg.__summary[
+                'summary_other_mid'] += "Equation ID: %s\n" % reg.name_multiID
+        except:
+            reg.__summary[
+                'summary_other_mid'] = "Equation ID: %s\n" % reg.name_multiID
+        try:
+            reg.__summary[
+                'summary_r2'] += "%-20s: %3.4f\n" % ('Log-Likelihood', reg.logl)
+        except:
+            pass
+
+
+def summary_chow(reg, lambd=False):
+    reg.__summary['summary_chow'] = "\nREGIMES DIAGNOSTICS - CHOW TEST\n"
+    reg.__summary[
+        'summary_chow'] += "                 VARIABLE        DF        VALUE           PROB\n"
+    if reg.cols2regi == 'all':
+        names_chow = reg.name_x_r[1:]
+    else:
+        names_chow = [reg.name_x_r[1:][i] for i in np.where(reg.cols2regi)[0]]
+    if reg.constant_regi == 'many':
+        indices = [0] + (np.argsort(names_chow) + 1).tolist()
+        names_chow = ['CONSTANT'] + names_chow
+    else:
+        indices = (np.argsort(names_chow)).tolist()
+    if lambd:
+        indices += [-1]
+        names_chow += ['lambda']
+    for i in indices:
+        reg.__summary['summary_chow'] += "%25s        %2d    %12.3f        %9.4f\n" % (
+            names_chow[i], reg.nr - 1, reg.chow.regi[i, 0], reg.chow.regi[i, 1])
+    reg.__summary['summary_chow'] += "%25s        %2d    %12.3f        %9.4f\n" % (
+        'Global test', reg.kr * (reg.nr - 1), reg.chow.joint[0], reg.chow.joint[1])
+
+
+def summary_warning(reg):
+    try:
+        if reg.warning:
+            try:
+                reg.__summary['summary_other_mid'] += reg.warning
+            except:
+                reg.__summary['summary_other_mid'] = reg.warning
+    except:
+        pass
+
+
+def summary_coefs_slopes(reg):
+    strSummary = "\nMARGINAL EFFECTS\n"
+    if reg.scalem == 'phimean':
+        strSummary += "Method: Mean of individual marginal effects\n"
+    elif reg.scalem == 'xmean':
+        strSummary += "Method: Marginal effects at variables mean\n"
+    strSummary += "------------------------------------------------------------------------------------\n"
+    strSummary += "            Variable           Slope       Std.Error     %1s-Statistic     Probability\n" % (
+        reg.__summary['summary_zt'])
+    strSummary += "------------------------------------------------------------------------------------\n"
+    indices = np.argsort(reg.name_x[1:]).tolist()
+    for i in indices:
+        strSummary += "%20s    %12.7f    %12.7f    %12.7f    %12.7f\n"   \
+            % (reg.name_x[i + 1], reg.slopes[i][0], reg.slopes_std_err[i], reg.slopes_z_stat[i][0], reg.slopes_z_stat[i][1])
+    return strSummary + "\n\n"
+"""
+def summary_r2(reg, ols, spatial_lag):
+    if ols:
+        strSummary = "%-20s:%12.4f\n%-20s:%12.4f\n" % ('R-squared',reg.r2,'Adjusted R-squared',reg.ar2)
+    else:
+        strSummary = "%-20s:%12.4f\n" % ('Pseudo R-squared',reg.pr2)
+        if spatial_lag:
+            if reg.pr2_e != None: 
+                strSummary += "%-20s:%12.4f\n" % ('Spatial Pseudo R-squared',reg.pr2_e)
+    return strSummary
+"""
+
+
+def summary_nonspat_diag_1(reg):  # extra space d
+    strSummary = ""
+    strSummary += "%-20s:%12.3f                %-22s:%12.4f\n" % (
+        'Sum squared residual', reg.utu, 'F-statistic', reg.f_stat[0])
+    strSummary += "%-20s:%12.3f                %-22s:%12.4g\n" % (
+        'Sigma-square', reg.sig2, 'Prob(F-statistic)', reg.f_stat[1])
+    strSummary += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+        'S.E. of regression', np.sqrt(reg.sig2), 'Log likelihood', reg.logll)
+    strSummary += "%-20s:%12.3f                %-22s:%12.3f\n" % (
+        'Sigma-square ML', reg.sig2ML, 'Akaike info criterion', reg.aic)
+    strSummary += "%-20s:%12.4f                %-22s:%12.3f\n" % (
+        'S.E of regression ML', np.sqrt(reg.sig2ML), 'Schwarz criterion', reg.schwarz)
+    return strSummary
+
+
+def summary_nonspat_diag_2(reg):
+    strSummary = ""
+    strSummary += "\nREGRESSION DIAGNOSTICS\n"
+    if reg.mulColli:
+        strSummary += "MULTICOLLINEARITY CONDITION NUMBER %16.3f\n\n" % (
+            reg.mulColli)
+    strSummary += "TEST ON NORMALITY OF ERRORS\n"
+    strSummary += "TEST                             DF        VALUE           PROB\n"
+    strSummary += "%-27s      %2d  %14.3f        %9.4f\n\n" % (
+        'Jarque-Bera', reg.jarque_bera['df'], reg.jarque_bera['jb'], reg.jarque_bera['pvalue'])
+    strSummary += "DIAGNOSTICS FOR HETEROSKEDASTICITY\n"
+    strSummary += "RANDOM COEFFICIENTS\n"
+    strSummary += "TEST                             DF        VALUE           PROB\n"
+    strSummary += "%-27s      %2d    %12.3f        %9.4f\n" % (
+        'Breusch-Pagan test', reg.breusch_pagan['df'], reg.breusch_pagan['bp'], reg.breusch_pagan['pvalue'])
+    strSummary += "%-27s      %2d    %12.3f        %9.4f\n" % (
+        'Koenker-Bassett test', reg.koenker_bassett['df'], reg.koenker_bassett['kb'], reg.koenker_bassett['pvalue'])
+    try:
+        if reg.white:
+            strSummary += "\nSPECIFICATION ROBUST TEST\n"
+            if len(reg.white) > 3:
+                strSummary += reg.white + '\n'
+            else:
+                strSummary += "TEST                             DF        VALUE           PROB\n"
+                strSummary += "%-27s      %2d    %12.3f        %9.4f\n" % (
+                    'White', reg.white['df'], reg.white['wh'], reg.white['pvalue'])
+    except:
+        pass
+    return strSummary
+
+
+def summary_spat_diag_intro():
+    strSummary = ""
+    strSummary += "\nDIAGNOSTICS FOR SPATIAL DEPENDENCE\n"
+    strSummary += "TEST                           MI/DF       VALUE           PROB\n"
+    return strSummary
+
+
+def summary_spat_diag_intro_global():
+    strSummary = ""
+    strSummary += "\nDIAGNOSTICS FOR GLOBAL SPATIAL DEPENDENCE\n"
+    strSummary += "Residuals are treated as homoskedastic for the purpose of these tests\n"
+    strSummary += "TEST                           MI/DF       VALUE           PROB\n"
+    return strSummary
+
+
+def summary_spat_diag_ols(reg, moran):
+    strSummary = ""
+    if moran:
+        strSummary += "%-27s  %8.4f     %9.3f        %9.4f\n" % (
+            "Moran's I (error)", reg.moran_res[0], reg.moran_res[1], reg.moran_res[2])
+    strSummary += "%-27s      %2d    %12.3f        %9.4f\n" % (
+        "Lagrange Multiplier (lag)", 1, reg.lm_lag[0], reg.lm_lag[1])
+    strSummary += "%-27s      %2d    %12.3f        %9.4f\n" % (
+        "Robust LM (lag)", 1, reg.rlm_lag[0], reg.rlm_lag[1])
+    strSummary += "%-27s      %2d    %12.3f        %9.4f\n" % (
+        "Lagrange Multiplier (error)", 1, reg.lm_error[0], reg.lm_error[1])
+    strSummary += "%-27s      %2d    %12.3f        %9.4f\n" % (
+        "Robust LM (error)", 1, reg.rlm_error[0], reg.rlm_error[1])
+    strSummary += "%-27s      %2d    %12.3f        %9.4f\n\n" % (
+        "Lagrange Multiplier (SARMA)", 2, reg.lm_sarma[0], reg.lm_sarma[1])
+    return strSummary
+
+
+def summary_spat_diag_probit(reg):
+    strSummary = ""
+    strSummary += "%-27s      %2d    %12.3f       %9.4f\n" % (
+        "Kelejian-Prucha (error)", 1, reg.KP_error[0], reg.KP_error[1])
+    strSummary += "%-27s      %2d    %12.3f       %9.4f\n" % (
+        "Pinkse (error)", 1, reg.Pinkse_error[0], reg.Pinkse_error[1])
+    strSummary += "%-27s      %2d    %12.3f       %9.4f\n\n" % (
+        "Pinkse-Slade (error)", 1, reg.PS_error[0], reg.PS_error[1])
+    return strSummary
+
+
+def summary_vm(reg, instruments):
+    strVM = "\n"
+    strVM += "COEFFICIENTS VARIANCE MATRIX\n"
+    strVM += "----------------------------\n"
+    if instruments:
+        for name in reg.name_z:
+            strVM += "%12s" % (name)
+    else:
+        for name in reg.name_x:
+            strVM += "%12s" % (name)
+    strVM += "\n"
+    nrow = reg.vm.shape[0]
+    ncol = reg.vm.shape[1]
+    for i in range(nrow):
+        for j in range(ncol):
+            strVM += "%12.6f" % (reg.vm[i][j])
+        strVM += "\n"
+    return strVM
+
+
+def summary_pred(reg):
+    strPred = "\n\n"
+    strPred += "%16s%16s%16s%16s\n" % ('OBS',
+                                       reg.name_y, 'PREDICTED', 'RESIDUAL')
+    for i in range(reg.n):
+        strPred += "%16d%16.5f%16.5f%16.5f\n" % (
+            i + 1, reg.y[i][0], reg.predy[i][0], reg.u[i][0])
+    return strPred
+
+
+def summary_close():
+    return "================================ END OF REPORT ====================================="
+
+##############################################################################
+
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/tests/test_diagnostics.py b/pysal/spreg/tests/test_diagnostics.py
new file mode 100644
index 0000000..1904b58
--- /dev/null
+++ b/pysal/spreg/tests/test_diagnostics.py
@@ -0,0 +1,128 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.spreg import diagnostics
+from pysal.spreg.ols import OLS 
+
+
+# create regression object used by all the tests below
+db = pysal.open(pysal.examples.get_path("columbus.dbf"), "r")
+y = np.array(db.by_col("CRIME"))
+y = np.reshape(y, (49,1))
+X = []
+X.append(db.by_col("INC"))
+X.append(db.by_col("HOVAL"))
+X = np.array(X).T
+reg = OLS(y,X)
+
+
+class TestFStat(unittest.TestCase):
+    def test_f_stat(self):
+        obs = diagnostics.f_stat(reg)
+        exp = (28.385629224695, 0.000000009341)
+        for i in range(2):
+            self.assertAlmostEquals(obs[i],exp[i])
+
+class TestTStat(unittest.TestCase):
+    def test_t_stat(self):
+        obs = diagnostics.t_stat(reg)
+        exp = [(14.490373143689094, 9.2108899889173982e-19),
+               (-4.7804961912965762, 1.8289595070843232e-05),
+               (-2.6544086427176916, 0.010874504909754612)]
+        for i in range(3):
+            for j in range(2):
+                self.assertAlmostEquals(obs[i][j],exp[i][j])
+
+class TestR2(unittest.TestCase):
+    def test_r2(self):
+        obs = diagnostics.r2(reg)
+        exp = 0.55240404083742334
+        self.assertAlmostEquals(obs,exp)
+
+class TestAr2(unittest.TestCase):
+    def test_ar2(self):
+        obs = diagnostics.ar2(reg)
+        exp = 0.5329433469607896
+        self.assertAlmostEquals(obs,exp)
+
+class TestSeBetas(unittest.TestCase):
+    def test_se_betas(self):
+        obs = diagnostics.se_betas(reg)
+        exp = np.array([4.73548613, 0.33413076, 0.10319868])
+        np.testing.assert_array_almost_equal(obs,exp)
+
+class TestLogLikelihood(unittest.TestCase):
+    def test_log_likelihood(self):
+        obs = diagnostics.log_likelihood(reg)
+        exp = -187.3772388121491
+        self.assertAlmostEquals(obs,exp)
+
+class TestAkaike(unittest.TestCase):
+    def test_akaike(self):
+        obs = diagnostics.akaike(reg)
+        exp = 380.7544776242982
+        self.assertAlmostEquals(obs,exp)
+
+class TestSchwarz(unittest.TestCase):
+    def test_schwarz(self):
+        obs = diagnostics.schwarz(reg)
+        exp = 386.42993851863008
+        self.assertAlmostEquals(obs,exp)
+
+class TestConditionIndex(unittest.TestCase):
+    def test_condition_index(self):
+        obs = diagnostics.condition_index(reg)
+        exp = 6.541827751444
+        self.assertAlmostEquals(obs,exp)
+
+class TestJarqueBera(unittest.TestCase):
+    def test_jarque_bera(self):
+        obs = diagnostics.jarque_bera(reg)
+        exp = {'df':2, 'jb':1.835752520076, 'pvalue':0.399366291249}
+        self.assertEquals(obs['df'],exp['df'])
+        self.assertAlmostEquals(obs['jb'],exp['jb'])
+        self.assertAlmostEquals(obs['pvalue'],exp['pvalue'])
+
+class TestBreuschPagan(unittest.TestCase):
+    def test_breusch_pagan(self):
+        obs = diagnostics.breusch_pagan(reg)
+        exp = {'df':2, 'bp':7.900441675960, 'pvalue':0.019250450075}
+        self.assertEquals(obs['df'],exp['df'])
+        self.assertAlmostEquals(obs['bp'],exp['bp'])
+        self.assertAlmostEquals(obs['pvalue'],exp['pvalue'])
+
+class TestWhite(unittest.TestCase):
+    def test_white(self):
+        obs = diagnostics.white(reg)
+        exp = {'df':5, 'wh':19.946008239903, 'pvalue':0.001279222817}
+        self.assertEquals(obs['df'],exp['df'])
+        self.assertAlmostEquals(obs['wh'],exp['wh'])
+        self.assertAlmostEquals(obs['pvalue'],exp['pvalue'])
+
+class TestKoenkerBassett(unittest.TestCase):
+    def test_koenker_bassett(self):
+        obs = diagnostics.koenker_bassett(reg)
+        exp = {'df':2, 'kb':5.694087931707, 'pvalue':0.058015563638}
+        self.assertEquals(obs['df'],exp['df'])
+        self.assertAlmostEquals(obs['kb'],exp['kb'])
+        self.assertAlmostEquals(obs['pvalue'],exp['pvalue'])
+
+class TestVif(unittest.TestCase):
+    def test_vif(self):
+        obs = diagnostics.vif(reg)
+        exp = [(0.0, 0.0),  # note [0][1] should actually be infiniity...
+               (1.3331174971891975, 0.75012142748740696),
+               (1.3331174971891973, 0.75012142748740707)]
+        for i in range(1,3):
+            for j in range(2):
+                self.assertAlmostEquals(obs[i][j],exp[i][j])
+
+class TestConstantCheck(unittest.TestCase):
+    def test_constant_check(self):
+        obs = diagnostics.constant_check(reg.x)
+        exp = True
+        self.assertEquals(obs,exp)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_diagnostics_sp.py b/pysal/spreg/tests/test_diagnostics_sp.py
new file mode 100644
index 0000000..3d9b8ff
--- /dev/null
+++ b/pysal/spreg/tests/test_diagnostics_sp.py
@@ -0,0 +1,179 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.spreg import diagnostics
+from pysal.spreg.ols import OLS as OLS
+from pysal.spreg.twosls import TSLS as TSLS
+from pysal.spreg.twosls_sp import GM_Lag
+from pysal.spreg.diagnostics_sp import LMtests, MoranRes, spDcache, AKtest
+
+
+class TestLMtests(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        X = np.array(X).T
+        self.y = y
+        self.X = X
+        ols = OLS(self.y, self.X)
+        self.ols = ols
+        w = pysal.open(pysal.examples.get_path('columbus.gal'), 'r').read()
+        w.transform='r'
+        self.w = w
+
+    def test_lm_err(self):
+        lms = LMtests(self.ols, self.w)
+        lme = np.array([3.097094,  0.078432])
+        np.testing.assert_array_almost_equal(lms.lme, lme, decimal=6)
+
+    def test_lm_lag(self):
+        lms = LMtests(self.ols, self.w)
+        lml = np.array([ 0.981552,  0.321816])
+        np.testing.assert_array_almost_equal(lms.lml, lml, decimal=6)
+
+    def test_rlm_err(self):
+        lms = LMtests(self.ols, self.w)
+        rlme = np.array([ 3.209187,  0.073226])
+        np.testing.assert_array_almost_equal(lms.rlme, rlme, decimal=6)
+
+    def test_rlm_lag(self):
+        lms = LMtests(self.ols, self.w)
+        rlml = np.array([ 1.093645,  0.295665])
+        np.testing.assert_array_almost_equal(lms.rlml, rlml, decimal=6)
+
+    def test_lm_sarma(self):
+        lms = LMtests(self.ols, self.w)
+        sarma = np.array([ 4.190739,  0.123025])
+        np.testing.assert_array_almost_equal(lms.sarma, sarma, decimal=6)
+
+
+class TestMoranRes(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        X = np.array(X).T
+        self.y = y
+        self.X = X
+        ols = OLS(self.y, self.X)
+        self.ols = ols
+        w = pysal.open(pysal.examples.get_path('columbus.gal'), 'r').read()
+        w.transform='r'
+        self.w = w
+    
+    def test_get_m_i(self):
+        m = MoranRes(self.ols, self.w, z=True)
+        np.testing.assert_array_almost_equal(m.I, 0.17130999999999999, decimal=6)
+
+    def test_get_v_i(self):
+        m = MoranRes(self.ols, self.w, z=True)
+        np.testing.assert_array_almost_equal(m.vI, 0.0081300000000000001, decimal=6)
+
+    def test_get_e_i(self):
+        m = MoranRes(self.ols, self.w, z=True)
+        np.testing.assert_array_almost_equal(m.eI, -0.034522999999999998, decimal=6)
+
+    def test_get_z_i(self):
+        m = MoranRes(self.ols, self.w, z=True)
+        np.testing.assert_array_almost_equal(m.zI, 2.2827389999999999, decimal=6)
+
+
+class TestAKTest(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+        y = np.array(db.by_col("CRIME"))
+        y = np.reshape(y, (49,1))
+        self.y = y
+        X = []
+        X.append(db.by_col("INC"))
+        X = np.array(X).T
+        self.X = X
+        yd = []
+        yd.append(db.by_col("HOVAL"))
+        yd = np.array(yd).T
+        self.yd = yd
+        q = []
+        q.append(db.by_col("DISCBD"))
+        q = np.array(q).T
+        self.q = q
+        reg = TSLS(y, X, yd, q=q)
+        self.reg = reg
+        w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        w.transform = 'r'
+        self.w = w
+
+    def test_gen_mi(self):
+        ak = AKtest(self.reg, self.w)
+        np.testing.assert_array_almost_equal(ak.mi, 0.2232672865437263, decimal=6)
+
+    def test_gen_ak(self):
+        ak = AKtest(self.reg, self.w)
+        np.testing.assert_array_almost_equal(ak.ak, 4.6428948758930852, decimal=6)
+
+    def test_gen_p(self):
+        ak = AKtest(self.reg, self.w)
+        np.testing.assert_array_almost_equal(ak.p, 0.031182360054340875, decimal=6)
+
+    def test_sp_mi(self):
+        ak = AKtest(self.reg, self.w, case='gen')
+        np.testing.assert_array_almost_equal(ak.mi, 0.2232672865437263, decimal=6)
+
+    def test_sp_ak(self):
+        ak = AKtest(self.reg, self.w,case='gen')
+        np.testing.assert_array_almost_equal(ak.ak, 1.1575928784397795, decimal=6)
+
+    def test_sp_p(self):
+        ak = AKtest(self.reg, self.w, case='gen')
+        np.testing.assert_array_almost_equal(ak.p, 0.28196531619791054, decimal=6)
+
+class TestSpDcache(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        X = np.array(X).T
+        self.y = y
+        self.X = X
+        ols = OLS(self.y, self.X)
+        self.ols = ols
+        w = pysal.open(pysal.examples.get_path('columbus.gal'), 'r').read()
+        w.transform='r'
+        self.w = w
+
+    def test_j(self):
+        cache = spDcache(self.ols, self.w)
+        np.testing.assert_array_almost_equal(cache.j[0][0], 0.62330311259039439, decimal=6)
+
+    def test_t(self):
+        cache = spDcache(self.ols, self.w)
+        np.testing.assert_array_almost_equal(cache.t, 22.751186696900984, decimal=6)
+
+    def test_trA(self):
+        cache = spDcache(self.ols, self.w)
+        np.testing.assert_array_almost_equal(cache.trA, 1.5880426389276328, decimal=6)
+
+    def test_utwuDs(self):
+        cache = spDcache(self.ols, self.w)
+        np.testing.assert_array_almost_equal(cache.utwuDs[0][0], 8.3941977502916068, decimal=6)
+
+    def test_utwyDs(self):
+        cache = spDcache(self.ols, self.w)
+        np.testing.assert_array_almost_equal(cache.utwyDs[0][0], 5.475255215067957, decimal=6)
+
+    def test_wu(self):
+        cache = spDcache(self.ols, self.w)
+        np.testing.assert_array_almost_equal(cache.wu[0][0], -10.681344941514411, decimal=6)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_diagnostics_tsls.py b/pysal/spreg/tests/test_diagnostics_tsls.py
new file mode 100644
index 0000000..4423b33
--- /dev/null
+++ b/pysal/spreg/tests/test_diagnostics_tsls.py
@@ -0,0 +1,66 @@
+import unittest
+import numpy as np
+import pysal
+import pysal.spreg.diagnostics_tsls as diagnostics_tsls
+import pysal.spreg.diagnostics as diagnostics
+from pysal.spreg.ols import OLS as OLS
+from pysal.spreg.twosls import TSLS as TSLS
+from pysal.spreg.twosls_sp import GM_Lag
+from scipy.stats import pearsonr
+
+
+# create regression object used by the apatial tests
+db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+y = np.array(db.by_col("CRIME"))
+y = np.reshape(y, (49,1))
+X = []
+X.append(db.by_col("INC"))
+X = np.array(X).T    
+yd = []
+yd.append(db.by_col("HOVAL"))
+yd = np.array(yd).T
+q = []
+q.append(db.by_col("DISCBD"))
+q = np.array(q).T
+reg = TSLS(y, X, yd, q)
+
+# create regression object for spatial test
+db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+y = np.array(db.by_col("HOVAL"))
+y = np.reshape(y, (49,1))
+X = np.array(db.by_col("INC"))
+X = np.reshape(X, (49,1))
+yd = np.array(db.by_col("CRIME"))
+yd = np.reshape(yd, (49,1))
+q = np.array(db.by_col("DISCBD"))
+q = np.reshape(q, (49,1))
+w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp")) 
+w.transform = 'r'
+regsp = GM_Lag(y, X, w=w, yend=yd, q=q, w_lags=2)
+
+
+class TestTStat(unittest.TestCase):
+    def test_t_stat(self):
+        obs = diagnostics_tsls.t_stat(reg)
+        exp = [(5.8452644704588588, 4.9369075950019865e-07),
+               (0.36760156683572748, 0.71485634049075841),
+               (-1.9946891307832111, 0.052021795864651159)]
+        for i in range(3):
+            for j in range(2):
+                self.assertAlmostEquals(obs[i][j],exp[i][j])
+
+class TestPr2Aspatial(unittest.TestCase):
+    def test_pr2_aspatial(self):
+        obs = diagnostics_tsls.pr2_aspatial(reg)
+        exp = 0.2793613712817381
+        self.assertAlmostEquals(obs,exp)
+
+class TestPr2Spatial(unittest.TestCase):
+    def test_pr2_spatial(self):
+        obs = diagnostics_tsls.pr2_spatial(regsp)
+        exp = 0.29964855438065163
+        self.assertAlmostEquals(obs,exp)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_error_sp.py b/pysal/spreg/tests/test_error_sp.py
new file mode 100644
index 0000000..1aba6db
--- /dev/null
+++ b/pysal/spreg/tests/test_error_sp.py
@@ -0,0 +1,317 @@
+import unittest
+import scipy
+import pysal
+import numpy as np
+from pysal.spreg import error_sp as SP
+
+class TestBaseGMError(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = SP.BaseGM_Error(self.y, self.X, self.w.sparse)
+        betas = np.array([[ 47.94371455], [  0.70598088], [ -0.55571746], [  0.37230161]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 27.4739775])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 52.9930255])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n,4)
+        k = 3
+        self.assertAlmostEqual(reg.k,k,4)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,4)
+        e = np.array([ 31.89620319])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        predy = np.array([ 52.9930255])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([[  1.51884943e+02,  -5.37622793e+00,  -1.86970286e+00], [ -5.37622793e+00,   2.48972661e-01,   5.26564244e-02], [ -1.86970286e+00,   5.26564244e-02, 3.18930650e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        sig2 = 191.73716465732355
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+
+class TestGMError(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = SP.GM_Error(self.y, self.X, self.w)
+        betas = np.array([[ 47.94371455], [  0.70598088], [ -0.55571746], [  0.37230161]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 27.4739775])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 52.9930255])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n,4)
+        k = 3
+        self.assertAlmostEqual(reg.k,k,4)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,4)
+        e = np.array([ 31.89620319])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        predy = np.array([ 52.9930255])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([[  1.51884943e+02,  -5.37622793e+00,  -1.86970286e+00], [ -5.37622793e+00,   2.48972661e-01,   5.26564244e-02], [ -1.86970286e+00,   5.26564244e-02, 3.18930650e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        sig2 = 191.73716465732355
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.3495097406012179
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 12.32416094,   0.4989716 ,   0.1785863 ])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        z_stat = np.array([[  3.89022140e+00,   1.00152805e-04], [  1.41487186e+00,   1.57106070e-01], [ -3.11175868e+00,   1.85976455e-03]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+"Maximum Likelihood requires SciPy version 11 or newer.")
+class TestBaseGMEndogError(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = SP.BaseGM_Endog_Error(self.y, self.X, self.yd, self.q, self.w.sparse)
+        betas = np.array([[ 55.36095292], [  0.46411479], [ -0.66883535], [  0.38989939]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 26.55951566])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e = np.array([ 31.23925425])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        predy = np.array([ 53.9074875])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0],x,4)
+        yend = np.array([  15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,4)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0],z,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        #std_y
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        #vm
+        vm = np.array([[  5.29158422e+02,  -1.57833675e+01,  -8.38021080e+00],
+       [ -1.57833675e+01,   5.40235041e-01,   2.31120327e-01],
+       [ -8.38021080e+00,   2.31120327e-01,   1.44977385e-01]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        sig2 = 192.50022721929574
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+"Maximum Likelihood requires SciPy version 11 or newer.")
+class TestGMEndogError(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = SP.GM_Endog_Error(self.y, self.X, self.yd, self.q, self.w)
+        betas = np.array([[ 55.36095292], [  0.46411479], [ -0.66883535], [  0.38989939]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 26.55951566])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e = np.array([ 31.23925425])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        predy = np.array([ 53.9074875])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0],x,4)
+        yend = np.array([  15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,4)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0],z,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([[  5.29158422e+02,  -1.57833675e+01,  -8.38021080e+00],
+       [ -1.57833675e+01,   5.40235041e-01,   2.31120327e-01],
+       [ -8.38021080e+00,   2.31120327e-01,   1.44977385e-01]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        pr2 = 0.346472557570858
+        self.assertAlmostEqual(reg.pr2,pr2)
+        sig2 = 192.50022721929574
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        std_err = np.array([ 23.003401  ,   0.73500657,   0.38075777])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        z_stat = np.array([[ 2.40664208,  0.01609994], [ 0.63144305,  0.52775088], [-1.75659016,  0.07898769]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+"Maximum Likelihood requires SciPy version 11 or newer.")
+class TestBaseGMCombo(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        # Only spatial lag
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 1, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        reg = SP.BaseGM_Combo(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse)
+        betas = np.array([[ 57.61123461],[  0.73441314], [ -0.59459416], [ -0.21762921], [  0.54732051]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 25.57932637])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e_filtered = np.array([ 31.65374945])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e_filtered,4)
+        predy = np.array([ 54.88767663])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 4
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,4)
+        yend = np.array([  35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,4)
+        z = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0],z,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([  5.22438365e+02,   2.38012873e-01,   3.20924172e-02,
+         2.15753599e-01])
+        np.testing.assert_array_almost_equal(np.diag(reg.vm),vm,4)
+        sig2 = 181.78650186468832
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+"Maximum Likelihood requires SciPy version 11 or newer.")
+class TestGMCombo(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        # Only spatial lag
+        reg = SP.GM_Combo(self.y, self.X, w=self.w)
+        e_reduced = np.array([ 28.18617481])
+        np.testing.assert_array_almost_equal(reg.e_pred[0],e_reduced,4)
+        predy_e = np.array([ 52.28082782])
+        np.testing.assert_array_almost_equal(reg.predy_e[0],predy_e,4)
+        betas = np.array([[ 57.61123515],[  0.73441313], [ -0.59459416], [ -0.21762921], [  0.54732051]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 25.57932637])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e_filtered = np.array([ 31.65374945])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e_filtered,4)
+        predy = np.array([ 54.88767685])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 4
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,4)
+        yend = np.array([  35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,4)
+        z = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0],z,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([  5.22438333e+02,   2.38012875e-01,   3.20924173e-02,
+         2.15753579e-01])
+        np.testing.assert_array_almost_equal(np.diag(reg.vm),vm,4)
+        sig2 = 181.78650186468832
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.3018280166937799
+        self.assertAlmostEqual(reg.pr2,pr2,4)
+        pr2_e = 0.3561355586759414
+        self.assertAlmostEqual(reg.pr2_e,pr2_e,4)
+        std_err = np.array([ 22.85692222,  0.48786559,  0.17914356,  0.46449318])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        z_stat = np.array([[  2.52051597e+00,   1.17182922e-02], [  1.50535954e+00,   1.32231664e-01], [ -3.31909311e+00,   9.03103123e-04], [ -4.68530506e-01,   6.39405261e-01]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_error_sp_het.py b/pysal/spreg/tests/test_error_sp_het.py
new file mode 100644
index 0000000..1c2ce6e
--- /dev/null
+++ b/pysal/spreg/tests/test_error_sp_het.py
@@ -0,0 +1,407 @@
+import unittest
+import pysal
+import numpy as np
+from pysal.spreg import error_sp_het as HET
+
+class TestBaseGMErrorHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = HET.BaseGM_Error_Het(self.y, self.X, self.w.sparse, step1c=True)
+        betas = np.array([[ 47.99626638], [  0.71048989], [ -0.55876126], [  0.41178776]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 27.38122697])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 32.29765975])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        predy = np.array([ 53.08577603])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  1.31767529e+02,  -3.58368748e+00,  -1.65090647e+00,
+              0.00000000e+00],
+           [ -3.58368748e+00,   1.35513711e-01,   3.77539055e-02,
+              0.00000000e+00],
+           [ -1.65090647e+00,   3.77539055e-02,   2.61042702e-02,
+              0.00000000e+00],
+           [  0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
+              2.82398517e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        xtx = np.array([[  4.90000000e+01,   7.04371999e+02,   1.72131237e+03],
+           [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04],
+           [  1.72131237e+03,   2.15575320e+04,   7.39058986e+04]])
+        np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
+             
+class TestGMErrorHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = HET.GM_Error_Het(self.y, self.X, self.w, step1c=True)
+        betas = np.array([[ 47.99626638], [  0.71048989], [ -0.55876126], [  0.41178776]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 27.38122697])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 32.29765975])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        predy = np.array([ 53.08577603])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  1.31767529e+02,  -3.58368748e+00,  -1.65090647e+00,
+              0.00000000e+00],
+           [ -3.58368748e+00,   1.35513711e-01,   3.77539055e-02,
+              0.00000000e+00],
+           [ -1.65090647e+00,   3.77539055e-02,   2.61042702e-02,
+              0.00000000e+00],
+           [  0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
+              2.82398517e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        pr2 = 0.34951013222581306
+        self.assertAlmostEqual(reg.pr2,pr2)
+        stde = np.array([ 11.47900385,   0.36812187,   0.16156816,   0.16804717])
+        np.testing.assert_array_almost_equal(reg.std_err,stde,4)
+        z_stat = np.array([[  4.18122226e+00,   2.89946274e-05],
+           [  1.93003988e+00,   5.36018970e-02],
+           [ -3.45836247e+00,   5.43469673e-04],
+           [  2.45042960e+00,   1.42685863e-02]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
+        xtx = np.array([[  4.90000000e+01,   7.04371999e+02,   1.72131237e+03],
+           [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04],
+           [  1.72131237e+03,   2.15575320e+04,   7.39058986e+04]])
+        np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
+
+class TestBaseGMEndogErrorHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = HET.BaseGM_Endog_Error_Het(self.y, self.X, self.yd, self.q, self.w.sparse, step1c=True)
+        betas = np.array([[ 55.39707924], [  0.46563046], [ -0.67038326], [  0.41135023]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 26.51812895])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 31.46604707])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        predy = np.array([ 53.94887405])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        yend = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 5.03])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0],z,7)
+        h = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0],h,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  8.34637805e+02,  -2.16932259e+01,  -1.33327894e+01,
+                  1.65840848e+00],
+               [ -2.16932259e+01,   5.97683070e-01,   3.39503523e-01,
+                 -3.90111107e-02],
+               [ -1.33327894e+01,   3.39503523e-01,   2.19008080e-01,
+                 -2.81929695e-02],
+               [  1.65840848e+00,  -3.90111107e-02,  -2.81929695e-02,
+                  3.15686105e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ],
+               [   704.371999  ,  11686.67338121,   2246.12800625],
+               [   139.75      ,   2246.12800625,    498.5851    ]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,6)
+        
+class TestGMEndogErrorHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = HET.GM_Endog_Error_Het(self.y, self.X, self.yd, self.q, self.w, step1c=True)
+        betas = np.array([[ 55.39707924], [  0.46563046], [ -0.67038326], [  0.41135023]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 26.51812895])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        predy = np.array([ 53.94887405])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        yend = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 5.03])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0],z,7)
+        h = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0],h,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  8.34637805e+02,  -2.16932259e+01,  -1.33327894e+01,
+                  1.65840848e+00],
+               [ -2.16932259e+01,   5.97683070e-01,   3.39503523e-01,
+                 -3.90111107e-02],
+               [ -1.33327894e+01,   3.39503523e-01,   2.19008080e-01,
+                 -2.81929695e-02],
+               [  1.65840848e+00,  -3.90111107e-02,  -2.81929695e-02,
+                  3.15686105e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        pr2 = 0.34648011338954804
+        self.assertAlmostEqual(reg.pr2,pr2,7)
+        std_err = np.array([ 28.89009873,  0.77309965,  0.46798299,
+            0.17767558])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([(1.9175109006819244, 0.055173057472126787), (0.60229035155742305, 0.54698088217644414), (-1.4324949211864271, 0.15200223057569454), (2.3151759776869496, 0.020603303355572443)])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ],
+               [   704.371999  ,  11686.67338121,   2246.12800625],
+               [   139.75      ,   2246.12800625,    498.5851    ]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,6)
+ 
+class TestBaseGMComboHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        # Only spatial lag
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 1, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        reg = HET.BaseGM_Combo_Het(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse, step1c=True)
+        betas = np.array([[ 57.7778574 ], [  0.73034922], [ -0.59257362], [ -0.2230231 ], [  0.56636724]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 25.65156033])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 31.87664403])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        predy = np.array([ 54.81544267])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 4
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        yend = np.array([ 35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 18.594    ,  24.7142675])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        z = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0],z,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy,7)
+        vm = np.array([[  4.86218274e+02,  -2.77268729e+00,  -1.59987770e+00,
+             -1.01969471e+01,   2.74302006e+00],
+           [ -2.77268729e+00,   1.04680972e-01,   2.51172238e-02,
+              1.95136385e-03,   3.70052723e-03],
+           [ -1.59987770e+00,   2.51172238e-02,   2.15655720e-02,
+              7.65868344e-03,  -7.30173070e-03],
+           [ -1.01969471e+01,   1.95136385e-03,   7.65868344e-03,
+              2.78273684e-01,  -6.89402590e-02],
+           [  2.74302006e+00,   3.70052723e-03,  -7.30173070e-03,
+             -6.89402590e-02,   7.12034037e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        hth = np.array([[  4.90000000e+01,   7.04371999e+02,   1.72131237e+03,
+              7.24743592e+02,   1.70735413e+03],
+           [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04,
+              1.10925200e+04,   2.23848036e+04],
+           [  1.72131237e+03,   2.15575320e+04,   7.39058986e+04,
+              2.34796298e+04,   6.70145378e+04],
+           [  7.24743592e+02,   1.10925200e+04,   2.34796298e+04,
+              1.16146226e+04,   2.30304624e+04],
+           [  1.70735413e+03,   2.23848036e+04,   6.70145378e+04,
+              2.30304624e+04,   6.69879858e+04]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,4)
+
+class TestGMComboHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        # Only spatial lag
+        reg = HET.GM_Combo_Het(self.y, self.X, w=self.w, step1c=True)
+        betas = np.array([[ 57.7778574 ], [  0.73034922], [ -0.59257362], [ -0.2230231 ], [  0.56636724]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 25.65156033])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 31.87664403])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        ep = np.array([ 28.30648145])
+        np.testing.assert_array_almost_equal(reg.e_pred[0],ep,7)
+        pe = np.array([ 52.16052155])
+        np.testing.assert_array_almost_equal(reg.predy_e[0],pe,7)
+        predy = np.array([ 54.81544267])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 4
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        yend = np.array([ 35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 18.594    ,  24.7142675])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        z = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0],z,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  4.86218274e+02,  -2.77268729e+00,  -1.59987770e+00,
+             -1.01969471e+01,   2.74302006e+00],
+           [ -2.77268729e+00,   1.04680972e-01,   2.51172238e-02,
+              1.95136385e-03,   3.70052723e-03],
+           [ -1.59987770e+00,   2.51172238e-02,   2.15655720e-02,
+              7.65868344e-03,  -7.30173070e-03],
+           [ -1.01969471e+01,   1.95136385e-03,   7.65868344e-03,
+              2.78273684e-01,  -6.89402590e-02],
+           [  2.74302006e+00,   3.70052723e-03,  -7.30173070e-03,
+             -6.89402590e-02,   7.12034037e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        pr2 = 0.3001582877472412
+        self.assertAlmostEqual(reg.pr2,pr2,7)
+        pr2_e = 0.35613102283621967
+        self.assertAlmostEqual(reg.pr2_e,pr2_e,7)
+        std_err = np.array([ 22.05035768,  0.32354439,  0.14685221,  0.52751653,  0.26683966])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([(2.6202684885795335, 0.00878605635338265), (2.2573385444145524, 0.023986928627746887), (-4.0351698589183433, 5.456281036278686e-05), (-0.42277935292121521, 0.67245625315942159), (2.1225002455741895, 0.033795752094112265)])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+        hth = np.array([[  4.90000000e+01,   7.04371999e+02,   1.72131237e+03,
+              7.24743592e+02,   1.70735413e+03],
+           [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04,
+              1.10925200e+04,   2.23848036e+04],
+           [  1.72131237e+03,   2.15575320e+04,   7.39058986e+04,
+              2.34796298e+04,   6.70145378e+04],
+           [  7.24743592e+02,   1.10925200e+04,   2.34796298e+04,
+              1.16146226e+04,   2.30304624e+04],
+           [  1.70735413e+03,   2.23848036e+04,   6.70145378e+04,
+              2.30304624e+04,   6.69879858e+04]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,4)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_error_sp_het_regimes.py b/pysal/spreg/tests/test_error_sp_het_regimes.py
new file mode 100644
index 0000000..4eab0f5
--- /dev/null
+++ b/pysal/spreg/tests/test_error_sp_het_regimes.py
@@ -0,0 +1,307 @@
+import unittest
+import pysal
+import numpy as np
+from pysal.spreg import error_sp_het_regimes as SP
+from pysal.spreg.error_sp_het import GM_Error_Het, GM_Endog_Error_Het, GM_Combo_Het
+
+class TestGM_Error_Het_Regimes(unittest.TestCase):
+    def setUp(self):
+        #Columbus:
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("CRIME"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("HOVAL"))
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        X2 = []
+        X2.append(db.by_col("INC"))
+        self.X2 = np.array(X2).T
+        yd = []
+        yd.append(db.by_col("HOVAL"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.queen_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+        self.r_var = 'NSA'
+        self.regimes = db.by_col(self.r_var)
+        #Artficial:
+        n = 256
+        self.n2 = n/2
+        self.x_a1 = np.random.uniform(-10,10,(n,1))
+        self.x_a2 = np.random.uniform(1,5,(n,1))
+        self.q_a = self.x_a2 + np.random.normal(0,1,(n,1))
+        self.x_a = np.hstack((self.x_a1,self.x_a2))
+        self.y_a = np.dot(np.hstack((np.ones((n,1)),self.x_a)),np.array([[1],[0.5],[2]])) + np.random.normal(0,1,(n,1))
+        latt = int(np.sqrt(n))
+        self.w_a = pysal.lat2W(latt,latt)
+        self.w_a.transform='r'
+        self.regi_a = [0]*(n/2) + [1]*(n/2)
+        self.w_a1 = pysal.lat2W(latt/2,latt)
+        self.w_a1.transform='r'
+        
+    def test_model(self):
+        reg = SP.GM_Error_Het_Regimes(self.y, self.X, self.regimes, self.w)
+        betas = np.array([[ 62.95986466],
+       [ -0.15660795],
+       [ -1.49054832],
+       [ 60.98577615],
+       [ -0.3358993 ],
+       [ -0.82129289],
+       [  0.54662719]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([-2.19031456])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        predy = np.array([ 17.91629456])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        n = 49
+        self.assertAlmostEqual(reg.n,n,6)
+        k = 6
+        self.assertAlmostEqual(reg.k,k,6)
+        y = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.y[0],y,6)
+        x = np.array([[  0.      ,   0.      ,   0.      ,   1.      ,  80.467003,  19.531   ]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,6)
+        e = np.array([ 2.77847355])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        my = 35.128823897959187
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 16.732092091229699
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([  3.86154100e+01,  -2.51553730e-01,  -8.20138673e-01,
+         1.71714184e+00,  -1.94929113e-02,   1.23118051e-01,
+         0.00000000e+00])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,6)
+        pr2 = 0.5515791216043385
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 6.21412987,  0.15340022,  0.44060473,  7.6032169 ,  0.19353719,
+        0.73621596,  0.13968272])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        chow_r = np.array([[ 0.04190799,  0.83779526],
+       [ 0.5736724 ,  0.44880328],
+       [ 0.62498575,  0.42920056]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 0.72341901308525713
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+
+    def test_model_regi_error(self):
+        #Columbus:
+        reg = SP.GM_Error_Het_Regimes(self.y, self.X, self.regimes, self.w, regime_err_sep=True)
+        betas = np.array([[ 60.74090229],
+       [ -0.17492294],
+       [ -1.33383387],
+       [  0.68303064],
+       [ 66.30374279],
+       [ -0.31841139],
+       [ -1.27502813],
+       [  0.11515312]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        vm = np.array([ 44.9411672 ,  -0.34343354,  -0.39946055,   0.        ,
+         0.        ,   0.        ,   0.        ,   0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,6)
+        u = np.array([-0.05357818])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        predy = np.array([ 15.77955818])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        e = np.array([ 0.70542044])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        chow_r = np.array([[  3.11061225e-01,   5.77029704e-01],
+       [  3.39747489e-01,   5.59975012e-01],
+       [  3.86371771e-03,   9.50436364e-01],
+       [  4.02884201e+00,   4.47286322e-02]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 4.7467070503995412
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+        #Artficial:
+        model = SP.GM_Error_Het_Regimes(self.y_a, self.x_a, self.regi_a, w=self.w_a, regime_err_sep=True)
+        model1 = GM_Error_Het(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a[0:(self.n2)], w=self.w_a1)
+        model2 = GM_Error_Het(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a[(self.n2):], w=self.w_a1)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 6)
+
+    def test_model_endog(self):
+        reg = SP.GM_Endog_Error_Het_Regimes(self.y, self.X2, self.yd, self.q, self.regimes, self.w)
+        betas = np.array([[ 77.26679984],
+       [  4.45992905],
+       [ 78.59534391],
+       [  0.41432319],
+       [ -3.20196286],
+       [ -1.13672283],
+       [  0.2174965 ]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([ 20.50716917])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        e = np.array([ 25.13517175])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        predy = np.array([-4.78118917])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 6
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.y[0],y,6)
+        x = np.array([[  0.   ,   0.   ,   1.   ,  19.531]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,6)
+        yend = np.array([[  0.      ,  80.467003]])
+        np.testing.assert_array_almost_equal(reg.yend[0].toarray(),yend,6)
+        z = np.array([[  0.      ,   0.      ,   1.      ,  19.531   ,   0.      ,
+         80.467003]])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray(),z,6)
+        my = 35.128823897959187
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 16.732092091229699
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 509.66122149,  150.5845341 ,    9.64413821,    5.54782831,
+        -80.95846045,   -2.25308524,   -3.2045214 ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,5)
+        pr2 = 0.19776512679331681
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 22.57567765,  11.34616946,  17.43881791,   1.30953812,
+         5.4830829 ,   0.74634612,   0.29973079])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        chow_r = np.array([[ 0.0022216 ,  0.96240654],
+       [ 0.13127347,  0.7171153 ],
+       [ 0.14367307,  0.70465645]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 1.2329971019087163
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+
+    def test_model_endog_regi_error(self):
+        #Columbus:
+        reg = SP.GM_Endog_Error_Het_Regimes(self.y, self.X2, self.yd, self.q, self.regimes, self.w, regime_err_sep=True)
+        betas = np.array([[  7.92747424e+01],
+       [  5.78086230e+00],
+       [ -3.83173581e+00],
+       [  2.23210962e-01],
+       [  8.26255251e+01],
+       [  5.48294187e-01],
+       [ -1.28432891e+00],
+       [  3.57661629e-02]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        vm = np.array([  7.55988579e+02,   2.53659722e+02,  -1.34288316e+02,
+        -2.66141766e-01,   0.00000000e+00,   0.00000000e+00,
+         0.00000000e+00,   0.00000000e+00])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,6)
+        u = np.array([ 25.73781918])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        predy = np.array([-10.01183918])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        e = np.array([ 26.5449135])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        chow_r = np.array([[ 0.00998573,  0.92040097],
+       [ 0.12660165,  0.72198192],
+       [ 0.12737281,  0.72117171],
+       [ 0.43507956,  0.50950696]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 1.3756768204399892
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+        #Artficial:
+        model = SP.GM_Endog_Error_Het_Regimes(self.y_a, self.x_a1, yend=self.x_a2, q=self.q_a, regimes=self.regi_a, w=self.w_a, regime_err_sep=True)
+        model1 = GM_Endog_Error_Het(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a1[0:(self.n2)], yend=self.x_a2[0:(self.n2)], q=self.q_a[0:(self.n2)], w=self.w_a1)
+        model2 = GM_Endog_Error_Het(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a1[(self.n2):], yend=self.x_a2[(self.n2):], q=self.q_a[(self.n2):], w=self.w_a1)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 6)
+
+    def test_model_combo(self):
+        reg = SP.GM_Combo_Het_Regimes(self.y, self.X2, self.regimes, self.yd, self.q, w=self.w)
+        betas = np.array([[  3.69372678e+01],
+       [ -8.29474998e-01],
+       [  3.08667517e+01],
+       [ -7.23753444e-01],
+       [ -3.01900940e-01],
+       [ -2.21328949e-01],
+       [  6.41902155e-01],
+       [ -2.45714919e-02]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([ 0.94039246])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        e_filtered = np.array([ 0.8737864])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e_filtered,5)
+        predy_e = np.array([ 18.68732105])
+        np.testing.assert_array_almost_equal(reg.predy_e[0],predy_e,6)
+        predy = np.array([ 14.78558754])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 7
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.y[0],y,6)
+        x = np.array([[  0.   ,   0.   ,   1.   ,  19.531]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,6)
+        yend = np.array([[  0.       ,  80.467003 ,  24.7142675]])
+        np.testing.assert_array_almost_equal(reg.yend[0].toarray(),yend,6)
+        z = np.array([[  0.       ,   0.       ,   1.       ,  19.531    ,   0.       ,
+         80.467003 ,  24.7142675]])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray(),z,6)
+        my = 35.128823897959187
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 16.732092091229699
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 71.26851365,  -0.58278032,  50.53169815,  -0.74561147,
+        -0.79510274,  -0.10823496,  -0.98141395,   1.16575965])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        pr2 = 0.6504148883602958
+        self.assertAlmostEqual(reg.pr2,pr2)
+        pr2_e = 0.527136896994038
+        self.assertAlmostEqual(reg.pr2_e,pr2_e)
+        std_err = np.array([ 8.44206809,  0.72363219,  9.85790968,  0.77218082,  0.34084146,
+        0.21752916,  0.14371614,  0.39226478])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,5)
+        chow_r = np.array([[ 0.54688708,  0.45959243],
+       [ 0.01035136,  0.91896175],
+       [ 0.03981108,  0.84185042]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 0.78070369988354349
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+
+    def test_model_combo_regi_error(self):
+        #Columbus:
+        reg = SP.GM_Combo_Het_Regimes(self.y, self.X2, self.regimes, self.yd, self.q, w=self.w, regime_lag_sep=True, regime_err_sep=True)
+        betas = np.array([[ 42.01151458],
+       [ -0.13917151],
+       [ -0.65300184],
+       [  0.54737064],
+       [  0.2629229 ],
+       [ 34.21569751],
+       [ -0.15236089],
+       [ -0.49175217],
+       [  0.65733173],
+       [ -0.07713581]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        vm = np.array([ 77.49519689,   0.57226879,  -1.18856422,  -1.28088712,
+         0.866752  ,   0.        ,   0.        ,   0.        ,
+         0.        ,   0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,6)
+        u = np.array([ 7.81039418])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        predy = np.array([ 7.91558582])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        e = np.array([ 7.22996911])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        chow_r = np.array([[  1.90869079e-01,   6.62194273e-01],
+       [  4.56118982e-05,   9.94611401e-01],
+       [  3.12104263e-02,   8.59771748e-01],
+       [  1.56368204e-01,   6.92522476e-01],
+       [  7.52928732e-01,   3.85550558e-01]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 1.1316136604755913
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+        #Artficial:
+        model = SP.GM_Combo_Het_Regimes(self.y_a, self.x_a1, yend=self.x_a2, q=self.q_a, regimes=self.regi_a, w=self.w_a, regime_err_sep=True, regime_lag_sep=True)
+        model1 = GM_Combo_Het(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a1[0:(self.n2)], yend=self.x_a2[0:(self.n2)], q=self.q_a[0:(self.n2)], w=self.w_a1)
+        model2 = GM_Combo_Het(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a1[(self.n2):], yend=self.x_a2[(self.n2):], q=self.q_a[(self.n2):], w=self.w_a1)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_error_sp_het_sparse.py b/pysal/spreg/tests/test_error_sp_het_sparse.py
new file mode 100644
index 0000000..ff8a80d
--- /dev/null
+++ b/pysal/spreg/tests/test_error_sp_het_sparse.py
@@ -0,0 +1,414 @@
+import unittest
+import pysal
+import numpy as np
+from scipy import sparse
+from pysal.spreg import error_sp_het as HET
+
+class TestBaseGMErrorHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = HET.BaseGM_Error_Het(self.y, self.X, self.w.sparse, step1c=True)
+        betas = np.array([[ 47.99626638], [  0.71048989], [ -0.55876126], [  0.41178776]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 27.38122697])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 32.29765975])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        predy = np.array([ 53.08577603])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  1.31767529e+02,  -3.58368748e+00,  -1.65090647e+00,
+              0.00000000e+00],
+           [ -3.58368748e+00,   1.35513711e-01,   3.77539055e-02,
+              0.00000000e+00],
+           [ -1.65090647e+00,   3.77539055e-02,   2.61042702e-02,
+              0.00000000e+00],
+           [  0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
+              2.82398517e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        xtx = np.array([[  4.90000000e+01,   7.04371999e+02,   1.72131237e+03],
+           [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04],
+           [  1.72131237e+03,   2.15575320e+04,   7.39058986e+04]])
+        np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
+             
+class TestGMErrorHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = sparse.csr_matrix(self.X)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = HET.GM_Error_Het(self.y, self.X, self.w, step1c=True)
+        betas = np.array([[ 47.99626638], [  0.71048989], [ -0.55876126], [  0.41178776]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 27.38122697])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 32.29765975])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        predy = np.array([ 53.08577603])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  1.31767529e+02,  -3.58368748e+00,  -1.65090647e+00,
+              0.00000000e+00],
+           [ -3.58368748e+00,   1.35513711e-01,   3.77539055e-02,
+              0.00000000e+00],
+           [ -1.65090647e+00,   3.77539055e-02,   2.61042702e-02,
+              0.00000000e+00],
+           [  0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
+              2.82398517e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        pr2 = 0.34951013222581306
+        self.assertAlmostEqual(reg.pr2,pr2)
+        stde = np.array([ 11.47900385,   0.36812187,   0.16156816,   0.16804717])
+        np.testing.assert_array_almost_equal(reg.std_err,stde,4)
+        z_stat = np.array([[  4.18122226e+00,   2.89946274e-05],
+           [  1.93003988e+00,   5.36018970e-02],
+           [ -3.45836247e+00,   5.43469673e-04],
+           [  2.45042960e+00,   1.42685863e-02]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
+        xtx = np.array([[  4.90000000e+01,   7.04371999e+02,   1.72131237e+03],
+           [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04],
+           [  1.72131237e+03,   2.15575320e+04,   7.39058986e+04]])
+        np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
+
+class TestBaseGMEndogErrorHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = HET.BaseGM_Endog_Error_Het(self.y, self.X, self.yd, self.q, self.w.sparse, step1c=True)
+        betas = np.array([[ 55.39707924], [  0.46563046], [ -0.67038326], [  0.41135023]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 26.51812895])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 31.46604707])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        predy = np.array([ 53.94887405])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        yend = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 5.03])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
+        h = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0].toarray()[0],h,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  8.34637805e+02,  -2.16932259e+01,  -1.33327894e+01,
+                  1.65840848e+00],
+               [ -2.16932259e+01,   5.97683070e-01,   3.39503523e-01,
+                 -3.90111107e-02],
+               [ -1.33327894e+01,   3.39503523e-01,   2.19008080e-01,
+                 -2.81929695e-02],
+               [  1.65840848e+00,  -3.90111107e-02,  -2.81929695e-02,
+                  3.15686105e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ],
+               [   704.371999  ,  11686.67338121,   2246.12800625],
+               [   139.75      ,   2246.12800625,    498.5851    ]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,6)
+        
+class TestGMEndogErrorHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = sparse.csr_matrix(self.X)
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = HET.GM_Endog_Error_Het(self.y, self.X, self.yd, self.q, self.w, step1c=True)
+        betas = np.array([[ 55.39707924], [  0.46563046], [ -0.67038326], [  0.41135023]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 26.51812895])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        predy = np.array([ 53.94887405])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        yend = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 5.03])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
+        h = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0].toarray()[0],h,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  8.34637805e+02,  -2.16932259e+01,  -1.33327894e+01,
+                  1.65840848e+00],
+               [ -2.16932259e+01,   5.97683070e-01,   3.39503523e-01,
+                 -3.90111107e-02],
+               [ -1.33327894e+01,   3.39503523e-01,   2.19008080e-01,
+                 -2.81929695e-02],
+               [  1.65840848e+00,  -3.90111107e-02,  -2.81929695e-02,
+                  3.15686105e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        pr2 = 0.34648011338954804
+        self.assertAlmostEqual(reg.pr2,pr2,7)
+        std_err = np.array([ 28.89009873,  0.77309965,  0.46798299,
+            0.17767558])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([(1.9175109006819244, 0.055173057472126787), (0.60229035155742305, 0.54698088217644414), (-1.4324949211864271, 0.15200223057569454), (2.3151759776869496, 0.020603303355572443)])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ],
+               [   704.371999  ,  11686.67338121,   2246.12800625],
+               [   139.75      ,   2246.12800625,    498.5851    ]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,6)
+ 
+class TestBaseGMComboHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        # Only spatial lag
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 1, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        reg = HET.BaseGM_Combo_Het(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse, step1c=True)
+        betas = np.array([[ 57.7778574 ], [  0.73034922], [ -0.59257362], [ -0.2230231 ], [  0.56636724]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 25.65156033])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 31.87664403])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        predy = np.array([ 54.81544267])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 4
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        yend = np.array([ 35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 18.594    ,  24.7142675])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        z = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy,7)
+        vm = np.array([[  4.86218274e+02,  -2.77268729e+00,  -1.59987770e+00,
+             -1.01969471e+01,   2.74302006e+00],
+           [ -2.77268729e+00,   1.04680972e-01,   2.51172238e-02,
+              1.95136385e-03,   3.70052723e-03],
+           [ -1.59987770e+00,   2.51172238e-02,   2.15655720e-02,
+              7.65868344e-03,  -7.30173070e-03],
+           [ -1.01969471e+01,   1.95136385e-03,   7.65868344e-03,
+              2.78273684e-01,  -6.89402590e-02],
+           [  2.74302006e+00,   3.70052723e-03,  -7.30173070e-03,
+             -6.89402590e-02,   7.12034037e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        hth = np.array([[  4.90000000e+01,   7.04371999e+02,   1.72131237e+03,
+              7.24743592e+02,   1.70735413e+03],
+           [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04,
+              1.10925200e+04,   2.23848036e+04],
+           [  1.72131237e+03,   2.15575320e+04,   7.39058986e+04,
+              2.34796298e+04,   6.70145378e+04],
+           [  7.24743592e+02,   1.10925200e+04,   2.34796298e+04,
+              1.16146226e+04,   2.30304624e+04],
+           [  1.70735413e+03,   2.23848036e+04,   6.70145378e+04,
+              2.30304624e+04,   6.69879858e+04]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,4)
+
+class TestGMComboHet(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = sparse.csr_matrix(self.X)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        # Only spatial lag
+        reg = HET.GM_Combo_Het(self.y, self.X, w=self.w, step1c=True)
+        betas = np.array([[ 57.7778574 ], [  0.73034922], [ -0.59257362], [ -0.2230231 ], [  0.56636724]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        u = np.array([ 25.65156033])
+        np.testing.assert_array_almost_equal(reg.u[0],u,7)
+        ef = np.array([ 31.87664403])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
+        ep = np.array([ 28.30648145])
+        np.testing.assert_array_almost_equal(reg.e_pred[0],ep,7)
+        pe = np.array([ 52.16052155])
+        np.testing.assert_array_almost_equal(reg.predy_e[0],pe,7)
+        predy = np.array([ 54.81544267])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 4
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        yend = np.array([ 35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 18.594    ,  24.7142675])
+        np.testing.assert_array_almost_equal(reg.q[0].toarray()[0],q,7)
+        z = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
+        i_s = 'Maximum number of iterations reached.'
+        np.testing.assert_string_equal(reg.iter_stop,i_s)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        stdy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,stdy)
+        vm = np.array([[  4.86218274e+02,  -2.77268729e+00,  -1.59987770e+00,
+             -1.01969471e+01,   2.74302006e+00],
+           [ -2.77268729e+00,   1.04680972e-01,   2.51172238e-02,
+              1.95136385e-03,   3.70052723e-03],
+           [ -1.59987770e+00,   2.51172238e-02,   2.15655720e-02,
+              7.65868344e-03,  -7.30173070e-03],
+           [ -1.01969471e+01,   1.95136385e-03,   7.65868344e-03,
+              2.78273684e-01,  -6.89402590e-02],
+           [  2.74302006e+00,   3.70052723e-03,  -7.30173070e-03,
+             -6.89402590e-02,   7.12034037e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        pr2 = 0.3001582877472412
+        self.assertAlmostEqual(reg.pr2,pr2,7)
+        pr2_e = 0.35613102283621967
+        self.assertAlmostEqual(reg.pr2_e,pr2_e,7)
+        std_err = np.array([ 22.05035768,  0.32354439,  0.14685221,  0.52751653,  0.26683966])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([(2.6202684885795335, 0.00878605635338265), (2.2573385444145524, 0.023986928627746887), (-4.0351698589183433, 5.456281036278686e-05), (-0.42277935292121521, 0.67245625315942159), (2.1225002455741895, 0.033795752094112265)])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+        hth = np.array([[  4.90000000e+01,   7.04371999e+02,   1.72131237e+03,
+              7.24743592e+02,   1.70735413e+03],
+           [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04,
+              1.10925200e+04,   2.23848036e+04],
+           [  1.72131237e+03,   2.15575320e+04,   7.39058986e+04,
+              2.34796298e+04,   6.70145378e+04],
+           [  7.24743592e+02,   1.10925200e+04,   2.34796298e+04,
+              1.16146226e+04,   2.30304624e+04],
+           [  1.70735413e+03,   2.23848036e+04,   6.70145378e+04,
+              2.30304624e+04,   6.69879858e+04]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,4)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_error_sp_hom.py b/pysal/spreg/tests/test_error_sp_hom.py
new file mode 100644
index 0000000..afc730d
--- /dev/null
+++ b/pysal/spreg/tests/test_error_sp_hom.py
@@ -0,0 +1,315 @@
+'''
+Unittests for spreg.error_sp_hom module
+
+'''
+import unittest
+import pysal
+from pysal.spreg import error_sp_hom as HOM
+import numpy as np
+
+class BaseGM_Error_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.BaseGM_Error_Hom(self.y, self.X, self.w.sparse, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([80.467003]),7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        betas = np.array([[ 47.9478524 ], [  0.70633223], [ -0.55595633], [  0.41288558]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        np.testing.assert_array_almost_equal(reg.u[0],np.array([27.466734]),6)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 32.37298547]),7)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        np.testing.assert_array_almost_equal(reg.predy[0],np.array([ 53.000269]),6)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        sig2 = 189.94459439729718
+        self.assertAlmostEqual(reg.sig2,sig2)
+        vm = np.array([[  1.51340717e+02,  -5.29057506e+00,  -1.85654540e+00, -2.39139054e-03], [ -5.29057506e+00,   2.46669610e-01, 5.14259101e-02, 3.19241302e-04], [ -1.85654540e+00,   5.14259101e-02, 3.20510550e-02,  -5.95640240e-05], [ -2.39139054e-03,   3.19241302e-04, -5.95640240e-05,  3.36690159e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        xtx = np.array([[  4.90000000e+01,   7.04371999e+02, 1.72131237e+03], [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04], [  1.72131237e+03,   2.15575320e+04, 7.39058986e+04]])
+        np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
+
+class GM_Error_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.GM_Error_Hom(self.y, self.X, self.w, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([80.467003]),7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        betas = np.array([[ 47.9478524 ], [  0.70633223], [ -0.55595633], [  0.41288558]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        np.testing.assert_array_almost_equal(reg.u[0],np.array([27.46673388]),6)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 32.37298547]),7)
+        np.testing.assert_array_almost_equal(reg.predy[0],np.array([ 53.00026912]),6)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        vm = np.array([[  1.51340717e+02,  -5.29057506e+00,  -1.85654540e+00, -2.39139054e-03], [ -5.29057506e+00,   2.46669610e-01, 5.14259101e-02, 3.19241302e-04], [ -1.85654540e+00,   5.14259101e-02, 3.20510550e-02,  -5.95640240e-05], [ -2.39139054e-03,   3.19241302e-04, -5.95640240e-05,  3.36690159e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        self.assertAlmostEqual(reg.iteration,1,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        pr2 = 0.34950977055969729
+        self.assertAlmostEqual(reg.pr2,pr2)
+        sig2 = 189.94459439729718
+        self.assertAlmostEqual(reg.sig2,sig2)
+        std_err = np.array([ 12.30206149,   0.49665844,   0.17902808, 0.18349119])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([[  3.89754616e+00,   9.71723059e-05], [  1.42216900e+00,   1.54977196e-01], [ -3.10541409e+00,   1.90012806e-03], [  2.25016500e+00,   2.44384731e-02]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+        xtx = np.array([[  4.90000000e+01,   7.04371999e+02, 1.72131237e+03], [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04], [  1.72131237e+03,   2.15575320e+04, 7.39058986e+04]])
+        np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
+
+
+class BaseGM_Endog_Error_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.BaseGM_Endog_Error_Hom(self.y, self.X, self.yd, self.q, self.w.sparse, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([ 80.467003]),7)
+        x = np.array([  1.     ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0],z,7)
+        h = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0],h,7)
+        yend = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 5.03])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        betas = np.array([[ 55.36575166], [  0.46432416], [ -0.66904404], [  0.43205526]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([ 26.55390939])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 31.74114306]),7)
+        predy = np.array([ 53.91309361])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        sig2 = 190.59435238060928
+        self.assertAlmostEqual(reg.sig2,sig2)
+        vm = np.array([[  5.52064057e+02,  -1.61264555e+01,  -8.86360735e+00, 1.04251912e+00], [ -1.61264555e+01,   5.44898242e-01, 2.39518645e-01, -1.88092950e-02], [ -8.86360735e+00,   2.39518645e-01, 1.55501840e-01, -2.18638648e-02], [  1.04251912e+00, -1.88092950e-02, -2.18638648e-02, 3.71222222e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        sig2 = 0
+        #self.assertAlmostEqual(reg.sig2,sig2)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ], [   704.371999  ,  11686.67338121,   2246.12800625], [   139.75      ,   2246.12800625,    498.5851]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,4)
+
+class GM_Endog_Error_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.GM_Endog_Error_Hom(self.y, self.X, self.yd, self.q, self.w, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([ 80.467003]),7)
+        x = np.array([  1.     ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0],z,7)
+        h = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0],h,7)
+        yend = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 5.03])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        betas = np.array([[ 55.36575166], [  0.46432416], [ -0.66904404], [  0.43205526]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([ 26.55390939])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 31.74114306]),7)
+        predy = np.array([ 53.91309361])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        vm = np.array([[  5.52064057e+02,  -1.61264555e+01,  -8.86360735e+00, 1.04251912e+00], [ -1.61264555e+01,   5.44898242e-01, 2.39518645e-01, -1.88092950e-02], [ -8.86360735e+00,   2.39518645e-01, 1.55501840e-01, -2.18638648e-02], [  1.04251912e+00, -1.88092950e-02, -2.18638648e-02, 3.71222222e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        pr2 = 0.34647366525657419
+        self.assertAlmostEqual(reg.pr2,pr2)
+        sig2 = 190.59435238060928
+        self.assertAlmostEqual(reg.sig2,sig2)
+        #std_err
+        std_err = np.array([ 23.49604343,   0.73817223,   0.39433722, 0.19267128])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([[ 2.35638617,  0.01845372], [ 0.62901874,  0.52933679], [-1.69662923,  0.08976678], [ 2.24244556,  0.02493259]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+
+
+class BaseGM_Combo_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 1, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        reg = HOM.BaseGM_Combo_Hom(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([80.467003]),7)
+        x = np.array([  1.     ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        betas = np.array([[ 10.12541428], [  1.56832263], [  0.15132076], [  0.21033397]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        np.testing.assert_array_almost_equal(reg.u[0],np.array([34.3450723]),7)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 36.6149682]),7)
+        np.testing.assert_array_almost_equal(reg.predy[0],np.array([ 46.1219307]),7)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        vm = np.array([[  2.33694742e+02,  -6.66856869e-01,  -5.58304254e+00, 4.85488380e+00], [ -6.66856869e-01,   1.94241504e-01, -5.42327138e-02, 5.37225570e-02], [ -5.58304254e+00,  -5.42327138e-02, 1.63860721e-01, -1.44425498e-01], [  4.85488380e+00, 5.37225570e-02, -1.44425498e-01, 1.78622255e-01]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        z = np.array([  1.       ,  19.531    ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0],z,7)
+        h = np.array([  1.   ,  19.531,  18.594])
+        np.testing.assert_array_almost_equal(reg.h[0],h,7)
+        yend = np.array([ 35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 18.594])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        sig2 = 232.22680651270042
+        #self.assertAlmostEqual(reg.sig2,sig2)
+        np.testing.assert_allclose(reg.sig2,sig2)
+        hth = np.array([[    49.        ,    704.371999  ,    724.7435916 ], [   704.371999  ,  11686.67338121,  11092.519988  ], [   724.7435916 ,  11092.519988  , 11614.62257048]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,4)
+
+
+class GM_Combo_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.GM_Combo_Hom(self.y, self.X, w=self.w, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([80.467003]),7)
+        x = np.array([  1.     ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0],x,7)
+        betas = np.array([[ 10.12541428], [  1.56832263], [  0.15132076], [  0.21033397]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        np.testing.assert_array_almost_equal(reg.u[0],np.array([34.3450723]),7)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 36.6149682]),7)
+        np.testing.assert_array_almost_equal(reg.e_pred[0],np.array([ 32.90372983]),7)
+        np.testing.assert_array_almost_equal(reg.predy[0],np.array([ 46.1219307]),7)
+        np.testing.assert_array_almost_equal(reg.predy_e[0],np.array([47.56327317]),7)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        z = np.array([  1.       ,  19.531    ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0],z,7)
+        h = np.array([  1.   ,  19.531,  18.594])
+        np.testing.assert_array_almost_equal(reg.h[0],h,7)
+        yend = np.array([ 35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 18.594])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        self.assertAlmostEqual(reg.iteration,1,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        pr2 = 0.28379825632694394
+        self.assertAlmostEqual(reg.pr2,pr2)
+        pr2_e = 0.25082892555141506
+        self.assertAlmostEqual(reg.pr2_e,pr2_e)
+        sig2 = 232.22680651270042
+        #self.assertAlmostEqual(reg.sig2, sig2)
+        np.testing.assert_allclose(reg.sig2, sig2)
+        std_err = np.array([ 15.28707761,   0.44072838,   0.40479714, 0.42263726])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([[  6.62351206e-01,   5.07746167e-01], [  3.55847888e+00,   3.73008780e-04], [  3.73818749e-01,   7.08539170e-01], [  4.97670189e-01,   6.18716523e-01]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+        vm = np.array([[  2.33694742e+02,  -6.66856869e-01,  -5.58304254e+00, 4.85488380e+00], [ -6.66856869e-01,   1.94241504e-01, -5.42327138e-02, 5.37225570e-02], [ -5.58304254e+00,  -5.42327138e-02, 1.63860721e-01, -1.44425498e-01], [  4.85488380e+00, 5.37225570e-02, -1.44425498e-01, 1.78622255e-01]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+
+suite = unittest.TestSuite()
+test_classes = [BaseGM_Error_Hom_Tester, GM_Error_Hom_Tester,\
+        BaseGM_Endog_Error_Hom_Tester, GM_Endog_Error_Hom_Tester, \
+        BaseGM_Combo_Hom_Tester, GM_Combo_Hom_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
+
diff --git a/pysal/spreg/tests/test_error_sp_hom_regimes.py b/pysal/spreg/tests/test_error_sp_hom_regimes.py
new file mode 100644
index 0000000..8c8e650
--- /dev/null
+++ b/pysal/spreg/tests/test_error_sp_hom_regimes.py
@@ -0,0 +1,307 @@
+import unittest
+import pysal
+import numpy as np
+from pysal.spreg import error_sp_hom_regimes as SP
+from pysal.spreg.error_sp_hom import GM_Error_Hom, GM_Endog_Error_Hom, GM_Combo_Hom
+
+class TestGM_Error_Hom_Regimes(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("CRIME"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("HOVAL"))
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        X2 = []
+        X2.append(db.by_col("INC"))
+        self.X2 = np.array(X2).T
+        yd = []
+        yd.append(db.by_col("HOVAL"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.queen_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+        self.r_var = 'NSA'
+        self.regimes = db.by_col(self.r_var)
+        #Artficial:
+        n = 256
+        self.n2 = n/2
+        self.x_a1 = np.random.uniform(-10,10,(n,1))
+        self.x_a2 = np.random.uniform(1,5,(n,1))
+        self.q_a = self.x_a2 + np.random.normal(0,1,(n,1))
+        self.x_a = np.hstack((self.x_a1,self.x_a2))
+        self.y_a = np.dot(np.hstack((np.ones((n,1)),self.x_a)),np.array([[1],[0.5],[2]])) + np.random.normal(0,1,(n,1))
+        latt = int(np.sqrt(n))
+        self.w_a = pysal.lat2W(latt,latt)
+        self.w_a.transform='r'
+        self.regi_a = [0]*(n/2) + [1]*(n/2)
+        self.w_a1 = pysal.lat2W(latt/2,latt)
+        self.w_a1.transform='r'
+        
+    def test_model(self):
+        reg = SP.GM_Error_Hom_Regimes(self.y, self.X, self.regimes, self.w, A1='het')
+        betas = np.array([[ 62.95986466],
+       [ -0.15660795],
+       [ -1.49054832],
+       [ 60.98577615],
+       [ -0.3358993 ],
+       [ -0.82129289],
+       [  0.54033921]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([-2.19031456])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        predy = np.array([ 17.91629456])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        n = 49
+        self.assertAlmostEqual(reg.n,n,6)
+        k = 6
+        self.assertAlmostEqual(reg.k,k,6)
+        y = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.y[0],y,6)
+        x = np.array([[  0.      ,   0.      ,   0.      ,   1.      ,  80.467003,  19.531   ]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,6)
+        e = np.array([ 2.72131648])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        my = 35.128823897959187
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 16.732092091229699
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 49.16245801,  -0.12493165,  -1.89294614,   5.71968257,
+        -0.0571525 ,   0.05745855,   0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,6)
+        sig2 = 96.96108341267626
+        self.assertAlmostEqual(reg.sig2,sig2,5)
+        pr2 = 0.5515791216023577
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 7.01159454,  0.20701411,  0.56905515,  7.90537942,  0.10268949,
+        0.56660879,  0.15659504])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        chow_r = np.array([[ 0.03888544,  0.84367579],
+       [ 0.61613446,  0.43248738],
+       [ 0.72632441,  0.39407719]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 0.92133276766189676
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+
+    def test_model_regi_error(self):
+        #Artficial:
+        model = SP.GM_Error_Hom_Regimes(self.y_a, self.x_a, self.regi_a, w=self.w_a, regime_err_sep=True, A1='het')
+        model1 = GM_Error_Hom(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a[0:(self.n2)], w=self.w_a1, A1='het')
+        model2 = GM_Error_Hom(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a[(self.n2):], w=self.w_a1, A1='het')
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 6)
+        #Columbus:
+        reg = SP.GM_Error_Hom_Regimes(self.y, self.X2, self.regimes, self.w, regime_err_sep=True, A1='het')
+        betas = np.array([[ 60.66668194],
+       [ -1.72708492],
+       [  0.62170311],
+       [ 61.4526885 ],
+       [ -1.90700858],
+       [  0.1102755 ]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        vm = np.array([ 45.57956967,  -1.65365774,   0.        ,   0.        ,
+         0.        ,   0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,6)
+        u = np.array([-8.48092392])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        predy = np.array([ 24.20690392])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        e = np.array([-8.33982604])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        chow_r = np.array([[ 0.0050892 ,  0.94312823],
+       [ 0.05746619,  0.81054651],
+       [ 1.65677138,  0.19803981]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 1.7914221673031792
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+
+    def test_model_endog(self):
+        reg = SP.GM_Endog_Error_Hom_Regimes(self.y, self.X2, self.yd, self.q, self.regimes, self.w, A1='het')
+        betas = np.array([[ 77.26679984],
+       [  4.45992905],
+       [ 78.59534391],
+       [  0.41432319],
+       [ -3.20196286],
+       [ -1.13672283],
+       [  0.22178164]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([ 20.50716917])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        e = np.array([ 25.22635318])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        predy = np.array([-4.78118917])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 6
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.y[0],y,6)
+        x = np.array([[  0.   ,   0.   ,   1.   ,  19.531]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,6)
+        yend = np.array([[  0.      ,  80.467003]])
+        np.testing.assert_array_almost_equal(reg.yend[0].toarray(),yend,6)
+        z = np.array([[  0.      ,   0.      ,   1.      ,  19.531   ,   0.      ,
+         80.467003]])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray(),z,6)
+        my = 35.128823897959187
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 16.732092091229699
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 403.76852704,   69.06920553,   19.8388512 ,    3.62501395,
+        -40.30472224,   -1.6601927 ,   -1.64319352])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,5)
+        pr2 = 0.19776512679498906
+        self.assertAlmostEqual(reg.pr2,pr2)
+        sig2 = 644.23810259214
+        self.assertAlmostEqual(reg.sig2,sig2,5)
+        std_err = np.array([ 20.09399231,   7.03617703,  23.64968032,   2.176846  ,
+         3.40352278,   0.92377997,   0.24462006])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        chow_r = np.array([[ 0.00191145,  0.96512749],
+       [ 0.31031517,  0.57748685],
+       [ 0.34994619,  0.55414359]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 1.248410480025556
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+
+    def test_model_endog_regi_error(self):
+        #Columbus:
+        reg = SP.GM_Endog_Error_Hom_Regimes(self.y, self.X2, self.yd, self.q, self.regimes, self.w, regime_err_sep=True, A1='het')
+        betas = np.array([[  7.92747424e+01],
+       [  5.78086230e+00],
+       [ -3.83173581e+00],
+       [  2.14725610e-01],
+       [  8.26255251e+01],
+       [  5.48294187e-01],
+       [ -1.28432891e+00],
+       [  2.98658172e-02]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        vm = np.array([ 867.50930457,  161.04430783,  -92.35637083,   -1.13838767,
+          0.        ,    0.        ,    0.        ,    0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,6)
+        u = np.array([ 25.73781918])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        predy = np.array([-10.01183918])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        e = np.array([26.41176711])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        chow_r = np.array([[ 0.00909777,  0.92401124],
+       [ 0.24034941,  0.62395386],
+       [ 0.24322564,  0.62188603],
+       [ 0.32572159,  0.5681893 ]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 1.4485058522307526
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+        #Artficial:
+        model = SP.GM_Endog_Error_Hom_Regimes(self.y_a, self.x_a1, yend=self.x_a2, q=self.q_a, regimes=self.regi_a, w=self.w_a, regime_err_sep=True, A1='het')
+        model1 = GM_Endog_Error_Hom(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a1[0:(self.n2)], yend=self.x_a2[0:(self.n2)], q=self.q_a[0:(self.n2)], w=self.w_a1, A1='het')
+        model2 = GM_Endog_Error_Hom(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a1[(self.n2):], yend=self.x_a2[(self.n2):], q=self.q_a[(self.n2):], w=self.w_a1, A1='het')
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 6)
+
+    def test_model_combo(self):
+        reg = SP.GM_Combo_Hom_Regimes(self.y, self.X2, self.regimes, self.yd, self.q, w=self.w, A1='het')
+        betas = np.array([[ 36.93726782],
+       [ -0.829475  ],
+       [ 30.86675168],
+       [ -0.72375344],
+       [ -0.30190094],
+       [ -0.22132895],
+       [  0.64190215],
+       [ -0.07314671]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([ 0.94039246])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        e_filtered = np.array([ 0.74211331])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e_filtered,5)
+        predy_e = np.array([ 18.68732105])
+        np.testing.assert_array_almost_equal(reg.predy_e[0],predy_e,6)
+        predy = np.array([ 14.78558754])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 7
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.y[0],y,6)
+        x = np.array([[  0.   ,   0.   ,   1.   ,  19.531]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,6)
+        yend = np.array([[  0.       ,  80.467003 ,  24.7142675]])
+        np.testing.assert_array_almost_equal(reg.yend[0].toarray(),yend,6)
+        z = np.array([[  0.       ,   0.       ,   1.       ,  19.531    ,   0.       ,
+         80.467003 ,  24.7142675]])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray(),z,6)
+        my = 35.128823897959187
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 16.732092091229699
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 111.54419614,   -0.23476709,   83.37295278,   -1.74452409,
+         -1.60256796,   -0.13151396,   -1.43857915,    2.19420848])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        sig2 = 95.57694234438294
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.6504148883591536
+        self.assertAlmostEqual(reg.pr2,pr2)
+        pr2_e = 0.5271368969923579
+        self.assertAlmostEqual(reg.pr2_e,pr2_e)
+        std_err = np.array([ 10.56144858,   0.93986958,  11.52977369,   0.61000358,
+         0.44419535,   0.16191882,   0.1630835 ,   0.41107528])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,5)
+        chow_r = np.array([[ 0.47406771,  0.49112176],
+       [ 0.00879838,  0.92526827],
+       [ 0.02943577,  0.86377672]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 0.59098559257602923
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+
+    def test_model_combo_regi_error(self):
+        #Columbus:
+        reg = SP.GM_Combo_Hom_Regimes(self.y, self.X2, self.regimes, self.yd, self.q, w=self.w, regime_lag_sep=True, regime_err_sep=True, A1='het')
+        betas = np.array([[  4.20115146e+01],
+       [ -1.39171512e-01],
+       [ -6.53001838e-01],
+       [  5.47370644e-01],
+       [  2.61860326e-01],
+       [  3.42156975e+01],
+       [ -1.52360889e-01],
+       [ -4.91752171e-01],
+       [  6.57331733e-01],
+       [ -2.68716241e-02]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        vm = np.array([ 154.23356187,    2.99104716,   -3.29036767,   -2.473113  ,
+          1.65247551,    0.        ,    0.        ,    0.        ,
+          0.        ,    0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,6)
+        u = np.array([ 7.81039418])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        predy = np.array([ 7.91558582])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        e = np.array([ 7.60819283])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,6)
+        chow_r = np.array([[  9.59590706e-02,   7.56733881e-01],
+       [  6.53130455e-05,   9.93551847e-01],
+       [  4.65270134e-02,   8.29220655e-01],
+       [  7.68939379e-02,   7.81551631e-01],
+       [  5.04560098e-01,   4.77503278e-01]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,6)
+        chow_j = 0.74134991257940286
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+        #Artficial:
+        model = SP.GM_Combo_Hom_Regimes(self.y_a, self.x_a1, yend=self.x_a2, q=self.q_a, regimes=self.regi_a, w=self.w_a, regime_err_sep=True, regime_lag_sep=True, A1='het')
+        model1 = GM_Combo_Hom(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a1[0:(self.n2)], yend=self.x_a2[0:(self.n2)], q=self.q_a[0:(self.n2)], w=self.w_a1, A1='het')
+        model2 = GM_Combo_Hom(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a1[(self.n2):], yend=self.x_a2[(self.n2):], q=self.q_a[(self.n2):], w=self.w_a1, A1='het')
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_error_sp_hom_sparse.py b/pysal/spreg/tests/test_error_sp_hom_sparse.py
new file mode 100644
index 0000000..23fafce
--- /dev/null
+++ b/pysal/spreg/tests/test_error_sp_hom_sparse.py
@@ -0,0 +1,320 @@
+'''
+Unittests for spreg.error_sp_hom module
+
+'''
+import unittest
+import pysal
+from pysal.spreg import error_sp_hom as HOM
+from scipy import sparse
+import numpy as np
+
+class BaseGM_Error_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.BaseGM_Error_Hom(self.y, self.X, self.w.sparse, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([80.467003]),7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        betas = np.array([[ 47.9478524 ], [  0.70633223], [ -0.55595633], [  0.41288558]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        np.testing.assert_array_almost_equal(reg.u[0],np.array([27.466734]),6)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 32.37298547]),7)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        np.testing.assert_array_almost_equal(reg.predy[0],np.array([ 53.000269]),6)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        sig2 = 189.94459439729718
+        self.assertAlmostEqual(reg.sig2,sig2)
+        vm = np.array([[  1.51340717e+02,  -5.29057506e+00,  -1.85654540e+00, -2.39139054e-03], [ -5.29057506e+00,   2.46669610e-01, 5.14259101e-02, 3.19241302e-04], [ -1.85654540e+00,   5.14259101e-02, 3.20510550e-02,  -5.95640240e-05], [ -2.39139054e-03,   3.19241302e-04, -5.95640240e-05,  3.36690159e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        xtx = np.array([[  4.90000000e+01,   7.04371999e+02, 1.72131237e+03], [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04], [  1.72131237e+03,   2.15575320e+04, 7.39058986e+04]])
+        np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
+
+class GM_Error_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = sparse.csr_matrix(self.X)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.GM_Error_Hom(self.y, self.X, self.w, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([80.467003]),7)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        betas = np.array([[ 47.9478524 ], [  0.70633223], [ -0.55595633], [  0.41288558]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        np.testing.assert_array_almost_equal(reg.u[0],np.array([27.46673388]),6)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 32.37298547]),7)
+        np.testing.assert_array_almost_equal(reg.predy[0],np.array([ 53.00026912]),6)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        vm = np.array([[  1.51340717e+02,  -5.29057506e+00,  -1.85654540e+00, -2.39139054e-03], [ -5.29057506e+00,   2.46669610e-01, 5.14259101e-02, 3.19241302e-04], [ -1.85654540e+00,   5.14259101e-02, 3.20510550e-02,  -5.95640240e-05], [ -2.39139054e-03,   3.19241302e-04, -5.95640240e-05,  3.36690159e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        self.assertAlmostEqual(reg.iteration,1,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        pr2 = 0.34950977055969729
+        self.assertAlmostEqual(reg.pr2,pr2)
+        sig2 = 189.94459439729718
+        self.assertAlmostEqual(reg.sig2,sig2)
+        std_err = np.array([ 12.30206149,   0.49665844,   0.17902808, 0.18349119])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([[  3.89754616e+00,   9.71723059e-05], [  1.42216900e+00,   1.54977196e-01], [ -3.10541409e+00,   1.90012806e-03], [  2.25016500e+00,   2.44384731e-02]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+        xtx = np.array([[  4.90000000e+01,   7.04371999e+02, 1.72131237e+03], [  7.04371999e+02,   1.16866734e+04,   2.15575320e+04], [  1.72131237e+03,   2.15575320e+04, 7.39058986e+04]])
+        np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
+
+
+class BaseGM_Endog_Error_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.BaseGM_Endog_Error_Hom(self.y, self.X, self.yd, self.q, self.w.sparse, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([ 80.467003]),7)
+        x = np.array([  1.     ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
+        h = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0].toarray()[0],h,7)
+        yend = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 5.03])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        betas = np.array([[ 55.36575166], [  0.46432416], [ -0.66904404], [  0.43205526]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([ 26.55390939])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 31.74114306]),7)
+        predy = np.array([ 53.91309361])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        sig2 = 190.59435238060928
+        self.assertAlmostEqual(reg.sig2,sig2)
+        vm = np.array([[  5.52064057e+02,  -1.61264555e+01,  -8.86360735e+00, 1.04251912e+00], [ -1.61264555e+01,   5.44898242e-01, 2.39518645e-01, -1.88092950e-02], [ -8.86360735e+00,   2.39518645e-01, 1.55501840e-01, -2.18638648e-02], [  1.04251912e+00, -1.88092950e-02, -2.18638648e-02, 3.71222222e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        sig2 = 0
+        #self.assertAlmostEqual(reg.sig2,sig2)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ], [   704.371999  ,  11686.67338121,   2246.12800625], [   139.75      ,   2246.12800625,    498.5851]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,4)
+
+class GM_Endog_Error_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = sparse.csr_matrix(self.X)
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.GM_Endog_Error_Hom(self.y, self.X, self.yd, self.q, self.w, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([ 80.467003]),7)
+        x = np.array([  1.     ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
+        h = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0].toarray()[0],h,7)
+        yend = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 5.03])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        betas = np.array([[ 55.36575166], [  0.46432416], [ -0.66904404], [  0.43205526]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        u = np.array([ 26.55390939])
+        np.testing.assert_array_almost_equal(reg.u[0],u,6)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 31.74114306]),7)
+        predy = np.array([ 53.91309361])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        vm = np.array([[  5.52064057e+02,  -1.61264555e+01,  -8.86360735e+00, 1.04251912e+00], [ -1.61264555e+01,   5.44898242e-01, 2.39518645e-01, -1.88092950e-02], [ -8.86360735e+00,   2.39518645e-01, 1.55501840e-01, -2.18638648e-02], [  1.04251912e+00, -1.88092950e-02, -2.18638648e-02, 3.71222222e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        pr2 = 0.34647366525657419
+        self.assertAlmostEqual(reg.pr2,pr2)
+        sig2 = 190.59435238060928
+        self.assertAlmostEqual(reg.sig2,sig2)
+        #std_err
+        std_err = np.array([ 23.49604343,   0.73817223,   0.39433722, 0.19267128])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([[ 2.35638617,  0.01845372], [ 0.62901874,  0.52933679], [-1.69662923,  0.08976678], [ 2.24244556,  0.02493259]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+
+
+class BaseGM_Combo_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 1, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        reg = HOM.BaseGM_Combo_Hom(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([80.467003]),7)
+        x = np.array([  1.     ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        betas = np.array([[ 10.12541428], [  1.56832263], [  0.15132076], [  0.21033397]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        np.testing.assert_array_almost_equal(reg.u[0],np.array([34.3450723]),7)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 36.6149682]),7)
+        np.testing.assert_array_almost_equal(reg.predy[0],np.array([ 46.1219307]),7)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        vm = np.array([[  2.33694742e+02,  -6.66856869e-01,  -5.58304254e+00, 4.85488380e+00], [ -6.66856869e-01,   1.94241504e-01, -5.42327138e-02, 5.37225570e-02], [ -5.58304254e+00,  -5.42327138e-02, 1.63860721e-01, -1.44425498e-01], [  4.85488380e+00, 5.37225570e-02, -1.44425498e-01, 1.78622255e-01]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        z = np.array([  1.       ,  19.531    ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
+        h = np.array([  1.   ,  19.531,  18.594])
+        np.testing.assert_array_almost_equal(reg.h[0].toarray()[0],h,7)
+        yend = np.array([ 35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 18.594])
+        np.testing.assert_array_almost_equal(reg.q[0],q,7)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        its = 1
+        self.assertAlmostEqual(reg.iteration,its,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        sig2 = 232.22680644168395
+        self.assertAlmostEqual(reg.sig2,sig2, places=6)
+        hth = np.array([[    49.        ,    704.371999  ,    724.7435916 ], [   704.371999  ,  11686.67338121,  11092.519988  ], [   724.7435916 ,  11092.519988  , 11614.62257048]])
+        np.testing.assert_array_almost_equal(reg.hth,hth,4)
+
+
+class GM_Combo_Hom_Tester(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = sparse.csr_matrix(self.X)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        reg = HOM.GM_Combo_Hom(self.y, self.X, w=self.w, A1='hom_sc')
+        np.testing.assert_array_almost_equal(reg.y[0],np.array([80.467003]),7)
+        x = np.array([  1.     ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
+        betas = np.array([[ 10.12541428], [  1.56832263], [  0.15132076], [  0.21033397]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,7)
+        np.testing.assert_array_almost_equal(reg.u[0],np.array([34.3450723]),7)
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],np.array([ 36.6149682]),7)
+        np.testing.assert_array_almost_equal(reg.e_pred[0],np.array([ 32.90372983]),7)
+        np.testing.assert_array_almost_equal(reg.predy[0],np.array([ 46.1219307]),7)
+        np.testing.assert_array_almost_equal(reg.predy_e[0],np.array([47.56327317]),7)
+        self.assertAlmostEquals(reg.n,49,7)
+        self.assertAlmostEquals(reg.k,3,7)
+        z = np.array([  1.       ,  19.531    ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
+        h = np.array([  1.   ,  19.531,  18.594])
+        np.testing.assert_array_almost_equal(reg.h[0].toarray()[0],h,7)
+        yend = np.array([ 35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
+        q = np.array([ 18.594])
+        np.testing.assert_array_almost_equal(reg.q[0].toarray()[0],q,7)
+        i_s = 'Maximum number of iterations reached.'
+        self.assertAlmostEqual(reg.iter_stop,i_s,7)
+        self.assertAlmostEqual(reg.iteration,1,7)
+        my = 38.436224469387746
+        self.assertAlmostEqual(reg.mean_y,my)
+        std_y = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,std_y)
+        pr2 = 0.28379825632694394
+        self.assertAlmostEqual(reg.pr2,pr2)
+        pr2_e = 0.25082892555141506
+        self.assertAlmostEqual(reg.pr2_e,pr2_e)
+        sig2 = 232.22680644168395
+        self.assertAlmostEqual(reg.sig2,sig2, places=6)
+        std_err = np.array([ 15.28707761,   0.44072838,   0.40479714, 0.42263726])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
+        z_stat = np.array([[  6.62351206e-01,   5.07746167e-01], [  3.55847888e+00,   3.73008780e-04], [  3.73818749e-01,   7.08539170e-01], [  4.97670189e-01,   6.18716523e-01]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
+        vm = np.array([[  2.33694742e+02,  -6.66856869e-01,  -5.58304254e+00, 4.85488380e+00], [ -6.66856869e-01,   1.94241504e-01, -5.42327138e-02, 5.37225570e-02], [ -5.58304254e+00,  -5.42327138e-02, 1.63860721e-01, -1.44425498e-01], [  4.85488380e+00, 5.37225570e-02, -1.44425498e-01, 1.78622255e-01]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+
+suite = unittest.TestSuite()
+test_classes = [BaseGM_Error_Hom_Tester, GM_Error_Hom_Tester,\
+        BaseGM_Endog_Error_Hom_Tester, GM_Endog_Error_Hom_Tester, \
+        BaseGM_Combo_Hom_Tester, GM_Combo_Hom_Tester]
+for i in test_classes:
+    a = unittest.TestLoader().loadTestsFromTestCase(i)
+    suite.addTest(a)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
+
diff --git a/pysal/spreg/tests/test_error_sp_regimes.py b/pysal/spreg/tests/test_error_sp_regimes.py
new file mode 100644
index 0000000..6f3c15a
--- /dev/null
+++ b/pysal/spreg/tests/test_error_sp_regimes.py
@@ -0,0 +1,305 @@
+import unittest
+import scipy
+import pysal
+import numpy as np
+from pysal.spreg import error_sp_regimes as SP
+from pysal.spreg.error_sp import GM_Error, GM_Endog_Error, GM_Combo
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+"Maximum Likelihood requires SciPy version 11 or newer.")
+class TestGM_Error_Regimes(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("CRIME"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("HOVAL"))
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.w = pysal.queen_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+        self.r_var = 'NSA'
+        self.regimes = db.by_col(self.r_var)
+        X1 = []
+        X1.append(db.by_col("INC"))
+        self.X1 = np.array(X1).T
+        yd = []
+        yd.append(db.by_col("HOVAL"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        #Artficial:
+        n = 256
+        self.n2 = n/2
+        self.x_a1 = np.random.uniform(-10,10,(n,1))
+        self.x_a2 = np.random.uniform(1,5,(n,1))
+        self.q_a = self.x_a2 + np.random.normal(0,1,(n,1))
+        self.x_a = np.hstack((self.x_a1,self.x_a2))
+        self.y_a = np.dot(np.hstack((np.ones((n,1)),self.x_a)),np.array([[1],[0.5],[2]])) + np.random.normal(0,1,(n,1))
+        latt = int(np.sqrt(n))
+        self.w_a = pysal.lat2W(latt,latt)
+        self.w_a.transform='r'
+        self.regi_a = [0]*(n/2) + [1]*(n/2)
+        self.w_a1 = pysal.lat2W(latt/2,latt)
+        self.w_a1.transform='r'
+        
+    def test_model(self):
+        reg = SP.GM_Error_Regimes(self.y, self.X, self.regimes, self.w)
+        betas = np.array([[ 63.3443073 ],
+       [ -0.15468   ],
+       [ -1.52186509],
+       [ 61.40071412],
+       [ -0.33550084],
+       [ -0.85076108],
+       [  0.38671608]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([-2.06177251])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 17.78775251])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n,4)
+        k = 6
+        self.assertAlmostEqual(reg.k,k,4)
+        y = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([[  0.      ,   0.      ,   0.      ,   1.      ,  80.467003,  19.531   ]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,4)
+        e = np.array([ 1.40747232])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        my = 35.128823897959187
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 16.732092091229699
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 50.55875289,  -0.14444487,  -2.05735489,   0.        ,
+         0.        ,   0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        sig2 = 102.13050615267227
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.5525102200608539
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 7.11046784,  0.21879293,  0.58477864,  7.50596504,  0.10800686,
+        0.57365981])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        chow_r = np.array([[ 0.03533785,  0.85088948],
+       [ 0.54918491,  0.45865093],
+       [ 0.67115641,  0.41264872]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 0.81985446000130979
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+    """
+    def test_model_regi_error(self):
+        #Columbus:
+        reg = SP.GM_Error_Regimes(self.y, self.X, self.regimes, self.w, regime_err_sep=True)
+        betas = np.array([[ 60.45730439],
+       [ -0.17732134],
+       [ -1.30936328],
+       [  0.51314713],
+       [ 66.5487126 ],
+       [ -0.31845995],
+       [ -1.29047149],
+       [  0.08092997]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        vm = np.array([ 39.33656288,  -0.08420799,  -1.50350999,   0.        ,
+         0.        ,   0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        u = np.array([ 0.00698341])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 15.71899659])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        e = np.array([ 0.53685671])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        chow_r = np.array([[  3.63674458e-01,   5.46472584e-01],
+       [  4.29607250e-01,   5.12181727e-01],
+       [  5.44739543e-04,   9.81379339e-01]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 0.70119418251625387
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j,4)
+        #Artficial:
+        model = SP.GM_Error_Regimes(self.y_a, self.x_a, self.regi_a, w=self.w_a, regime_err_sep=True)
+        model1 = GM_Error(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a[0:(self.n2)], w=self.w_a1)
+        model2 = GM_Error(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a[(self.n2):], w=self.w_a1)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 4)
+    """
+    def test_model_endog(self):
+        reg = SP.GM_Endog_Error_Regimes(self.y, self.X1, self.yd, self.q, self.regimes, self.w)
+        betas = np.array([[ 77.48385551,   4.52986622,  78.93209405,   0.42186261,
+         -3.23823854,  -1.1475775 ,   0.20222108]])
+        np.testing.assert_array_almost_equal(reg.betas.T,betas,4)
+        u = np.array([ 20.89660904])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e = np.array([ 25.21818724])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        predy = np.array([-5.17062904])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 6
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([[  0.   ,   0.   ,   1.   ,  19.531]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,4)
+        yend = np.array([[  0.      ,  80.467003]])
+        np.testing.assert_array_almost_equal(reg.yend[0].toarray(),yend,4)
+        z = np.array([[  0.      ,   0.      ,   1.      ,  19.531   ,   0.      ,
+         80.467003]])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray(),z,4)
+        my = 35.128823897959187
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 16.732092091229699
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 390.88250241,   52.25924084,    0.        ,    0.        ,
+        -32.64274729,    0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        pr2 = 0.19623994206233333
+        self.assertAlmostEqual(reg.pr2,pr2,4)
+        sig2 = 649.4011
+        self.assertAlmostEqual(round(reg.sig2,4),round(sig2,4),4)
+        std_err = np.array([ 19.77074866,   6.07667394,  24.32254786,   2.17776972,
+         2.97078606,   0.94392418])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        chow_r = np.array([[ 0.0021348 ,  0.96314775],
+       [ 0.40499741,  0.5245196 ],
+       [ 0.4498365 ,  0.50241261]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 1.2885590185243503
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+
+    def test_model_endog_regi_error(self):
+        #Columbus:
+        reg = SP.GM_Endog_Error_Regimes(self.y, self.X1, self.yd, self.q, self.regimes, self.w, regime_err_sep=True)
+        betas = np.array([[  7.91729500e+01],
+       [  5.80693176e+00],
+       [ -3.84036576e+00],
+       [  1.46462983e-01],
+       [  8.24723791e+01],
+       [  5.68908920e-01],
+       [ -1.28824699e+00],
+       [  6.70387351e-02]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        vm = np.array([ 791.86679123,  140.12967794,  -81.37581255,    0.        ,
+          0.        ,    0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        u = np.array([ 25.80361497])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([-10.07763497])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        e = np.array([ 27.32251813])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        chow_r = np.array([[ 0.00926459,  0.92331985],
+       [ 0.26102777,  0.60941494],
+       [ 0.26664581,  0.60559072]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 1.1184631131987004
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+        #Artficial:
+        model = SP.GM_Endog_Error_Regimes(self.y_a, self.x_a1, yend=self.x_a2, q=self.q_a, regimes=self.regi_a, w=self.w_a, regime_err_sep=True)
+        model1 = GM_Endog_Error(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a1[0:(self.n2)], yend=self.x_a2[0:(self.n2)], q=self.q_a[0:(self.n2)], w=self.w_a1)
+        model2 = GM_Endog_Error(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a1[(self.n2):], yend=self.x_a2[(self.n2):], q=self.q_a[(self.n2):], w=self.w_a1)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 4)
+
+    def test_model_combo(self):
+        reg = SP.GM_Combo_Regimes(self.y, self.X1, self.regimes, self.yd, self.q, w=self.w)
+        predy_e = np.array([ 18.82774339])
+        np.testing.assert_array_almost_equal(reg.predy_e[0],predy_e,4)
+        betas = np.array([[ 36.44798052],
+       [ -0.7974482 ],
+       [ 30.53782661],
+       [ -0.72602806],
+       [ -0.30953121],
+       [ -0.21736652],
+       [  0.64801059],
+       [ -0.16601265]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 0.84393304])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e_filtered = np.array([ 0.4040027])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e_filtered,4)
+        predy = np.array([ 14.88204696])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 7
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 15.72598])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([[  0.   ,   0.   ,   1.   ,  19.531]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,4)
+        yend = np.array([[  0.       ,  80.467003 ,  24.7142675]])
+        np.testing.assert_array_almost_equal(reg.yend[0].toarray(),yend,4)
+        z = np.array([[  0.       ,   0.       ,   1.       ,  19.531    ,   0.       ,
+         80.467003 ,  24.7142675]])
+        np.testing.assert_array_almost_equal(reg.z[0].toarray(),z,4)
+        my = 35.128823897959187
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 16.732092091229699
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 109.23549239,   -0.19754121,   84.29574673,   -1.99317178,
+         -1.60123994,   -0.1252719 ,   -1.3930344 ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        sig2 = 94.98610921110007
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.6493586702255537
+        self.assertAlmostEqual(reg.pr2,pr2)
+        pr2_e = 0.5255332447240576
+        self.assertAlmostEqual(reg.pr2_e,pr2_e)
+        std_err = np.array([ 10.45157846,   0.93942923,  11.38484969,   0.60774708,
+         0.44461334,   0.15871227,   0.15738141])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        chow_r = np.array([[ 0.49716076,  0.48075032],
+       [ 0.00405377,  0.94923363],
+       [ 0.03866684,  0.84411016]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 0.64531386285872072
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+
+    def test_model_combo_regi_error(self):
+        #Columbus:
+        reg = SP.GM_Combo_Regimes(self.y, self.X1, self.regimes, self.yd, self.q, w=self.w, regime_lag_sep=True, regime_err_sep=True)
+        betas = np.array([[ 42.01035248],
+       [ -0.13938772],
+       [ -0.6528306 ],
+       [  0.54737621],
+       [  0.2684419 ],
+       [ 34.02473255],
+       [ -0.14920259],
+       [ -0.48972903],
+       [  0.65883658],
+       [ -0.17174845]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        vm = np.array([ 153.58614432,    2.96302131,   -3.26211855,   -2.46914703,
+          0.        ,    0.        ,    0.        ,    0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        u = np.array([ 7.73968703])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 7.98629297])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        e = np.array([ 6.45052714])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        chow_r = np.array([[  1.00886404e-01,   7.50768497e-01],
+       [  3.61843271e-05,   9.95200481e-01],
+       [  4.69585772e-02,   8.28442711e-01],
+       [  8.13275259e-02,   7.75506385e-01]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 0.28479988992843119
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j)
+        #Artficial:
+        model = SP.GM_Combo_Regimes(self.y_a, self.x_a1, yend=self.x_a2, q=self.q_a, regimes=self.regi_a, w=self.w_a, regime_err_sep=True, regime_lag_sep=True)
+        model1 = GM_Combo(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a1[0:(self.n2)], yend=self.x_a2[0:(self.n2)], q=self.q_a[0:(self.n2)], w=self.w_a1)
+        model2 = GM_Combo(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a1[(self.n2):], yend=self.x_a2[(self.n2):], q=self.q_a[(self.n2):], w=self.w_a1)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+
+if __name__ == '__main__':
+    unittest.main()
+
+
diff --git a/pysal/spreg/tests/test_error_sp_sparse.py b/pysal/spreg/tests/test_error_sp_sparse.py
new file mode 100644
index 0000000..1d38814
--- /dev/null
+++ b/pysal/spreg/tests/test_error_sp_sparse.py
@@ -0,0 +1,332 @@
+import unittest
+import pysal
+import numpy as np
+from pysal.spreg import error_sp as SP
+import scipy
+from scipy import sparse
+
+class TestBaseGMError(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = SP.BaseGM_Error(self.y, self.X, self.w.sparse)
+        betas = np.array([[ 47.94371455], [  0.70598088], [ -0.55571746], [  0.37230161]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 27.4739775])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 52.9930255])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n,4)
+        k = 3
+        self.assertAlmostEqual(reg.k,k,4)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0],x,4)
+        e = np.array([ 31.89620319])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        predy = np.array([ 52.9930255])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([[  1.51884943e+02,  -5.37622793e+00,  -1.86970286e+00], [ -5.37622793e+00,   2.48972661e-01,   5.26564244e-02], [ -1.86970286e+00,   5.26564244e-02, 3.18930650e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        sig2 = 191.73716465732355
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+
+class TestGMError(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = sparse.csr_matrix(self.X)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = SP.GM_Error(self.y, self.X, self.w)
+        betas = np.array([[ 47.94371455], [  0.70598088], [ -0.55571746], [  0.37230161]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 27.4739775])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 52.9930255])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n,4)
+        k = 3
+        self.assertAlmostEqual(reg.k,k,4)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0],x,4)
+        e = np.array([ 31.89620319])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        predy = np.array([ 52.9930255])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([[  1.51884943e+02,  -5.37622793e+00,  -1.86970286e+00], [ -5.37622793e+00,   2.48972661e-01,   5.26564244e-02], [ -1.86970286e+00,   5.26564244e-02, 3.18930650e-02]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        sig2 = 191.73716465732355
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.3495097406012179
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 12.32416094,   0.4989716 ,   0.1785863 ])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        z_stat = np.array([[  3.89022140e+00,   1.00152805e-04], [  1.41487186e+00,   1.57106070e-01], [ -3.11175868e+00,   1.85976455e-03]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+"Maximum Likelihood requires SciPy version 11 or newer.")
+class TestBaseGMEndogError(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = SP.BaseGM_Endog_Error(self.y, self.X, self.yd, self.q, self.w.sparse)
+        betas = np.array([[ 55.36095292], [  0.46411479], [ -0.66883535], [  0.38989939]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 26.55951566])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e = np.array([ 31.23925425])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        predy = np.array([ 53.9074875])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0],x,4)
+        yend = np.array([  15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,4)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z.toarray()[0],z,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        #std_y
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        #vm
+        vm = np.array([[ 529.15840986,  -15.78336736,   -8.38021053],
+       [ -15.78336736,    0.54023504,    0.23112032],
+       [  -8.38021053,    0.23112032,    0.14497738]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        sig2 = 192.5002
+        self.assertAlmostEqual(round(reg.sig2,4),round(sig2,4),4)
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+"Maximum Likelihood requires SciPy version 11 or newer.")
+class TestGMEndogError(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        self.X = np.array(X).T
+        self.X = sparse.csr_matrix(self.X)
+        yd = []
+        yd.append(db.by_col("CRIME"))
+        self.yd = np.array(yd).T
+        q = []
+        q.append(db.by_col("DISCBD"))
+        self.q = np.array(q).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = SP.GM_Endog_Error(self.y, self.X, self.yd, self.q, self.w)
+        betas = np.array([[ 55.36095292], [  0.46411479], [ -0.66883535], [  0.38989939]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 26.55951566])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e = np.array([ 31.23925425])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        predy = np.array([ 53.9074875])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 3
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0],x,4)
+        yend = np.array([  15.72598])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,4)
+        z = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.z.toarray()[0],z,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([[ 529.15840986,  -15.78336736,   -8.38021053],
+       [ -15.78336736,    0.54023504,    0.23112032],
+       [  -8.38021053,    0.23112032,    0.14497738]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        pr2 = 0.346472557570858
+        self.assertAlmostEqual(reg.pr2,pr2)
+        sig2 = 192.5002
+        self.assertAlmostEqual(round(reg.sig2,4),round(sig2,4),4)
+        std_err = np.array([ 23.003401  ,   0.73500657,   0.38075777])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        z_stat = np.array([[ 2.40664208,  0.01609994], [ 0.63144305,  0.52775088], [-1.75659016,  0.07898769]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+"Maximum Likelihood requires SciPy version 11 or newer.")
+class TestBaseGMCombo(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        # Only spatial lag
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 1, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        reg = SP.BaseGM_Combo(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse)
+        betas = np.array([[ 57.61123461],[  0.73441314], [ -0.59459416], [ -0.21762921], [  0.54732051]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 25.57932637])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e_filtered = np.array([ 31.65374945])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e_filtered,4)
+        predy = np.array([ 54.88767663])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 4
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0],x,4)
+        yend = np.array([  35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,4)
+        z = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z.toarray()[0],z,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([[ 522.43841148,   -6.07256915,   -1.91429117,   -8.97133162],
+       [  -6.07256915,    0.23801287,    0.0470161 ,    0.02809628],
+       [  -1.91429117,    0.0470161 ,    0.03209242,    0.00314973],
+       [  -8.97133162,    0.02809628,    0.00314973,    0.21575363]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        sig2 = 181.78650186468832
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+"Maximum Likelihood requires SciPy version 11 or newer.")
+class TestGMCombo(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = sparse.csr_matrix(self.X)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+    def test_model(self):
+        # Only spatial lag
+        reg = SP.GM_Combo(self.y, self.X, w=self.w)
+        e_reduced = np.array([ 28.18617481])
+        np.testing.assert_array_almost_equal(reg.e_pred[0],e_reduced,4)
+        predy_e = np.array([ 52.28082782])
+        np.testing.assert_array_almost_equal(reg.predy_e[0],predy_e,4)
+        betas = np.array([[ 57.61123515],[  0.73441313], [ -0.59459416], [ -0.21762921], [  0.54732051]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 25.57932637])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        e_filtered = np.array([ 31.65374945])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e_filtered,4)
+        predy = np.array([ 54.88767685])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 49
+        self.assertAlmostEqual(reg.n,n)
+        k = 4
+        self.assertAlmostEqual(reg.k,k)
+        y = np.array([ 80.467003])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0],x,4)
+        yend = np.array([  35.4585005])
+        np.testing.assert_array_almost_equal(reg.yend[0],yend,4)
+        z = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005])
+        np.testing.assert_array_almost_equal(reg.z.toarray()[0],z,4)
+        my = 38.43622446938776
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 18.466069465206047
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([[ 522.43841148,   -6.07256915,   -1.91429117,   -8.97133162],
+       [  -6.07256915,    0.23801287,    0.0470161 ,    0.02809628],
+       [  -1.91429117,    0.0470161 ,    0.03209242,    0.00314973],
+       [  -8.97133162,    0.02809628,    0.00314973,    0.21575363]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,4)
+        sig2 = 181.78650186468832
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.3018280166937799
+        self.assertAlmostEqual(reg.pr2,pr2,4)
+        pr2_e = 0.3561355587000738
+        self.assertAlmostEqual(reg.pr2_e,pr2_e,4)
+        std_err = np.array([ 22.85692222,  0.48786559,  0.17914356,  0.46449318])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        z_stat = np.array([[  2.52051597e+00,   1.17182922e-02], [  1.50535954e+00,   1.32231664e-01], [ -3.31909311e+00,   9.03103123e-04], [ -4.68530506e-01,   6.39405261e-01]])
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
+
+if __name__ == '__main__':
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True) 
+    unittest.main()
+    np.set_printoptions(suppress=start_suppress)
+
diff --git a/pysal/spreg/tests/test_ml_error.py b/pysal/spreg/tests/test_ml_error.py
new file mode 100644
index 0000000..385ee8e
--- /dev/null
+++ b/pysal/spreg/tests/test_ml_error.py
@@ -0,0 +1,71 @@
+import unittest
+import pysal
+import scipy
+import numpy as np
+from pysal.spreg.ml_error import ML_Error
+from pysal.spreg import utils
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+        "Max Likelihood requires SciPy version 11 or newer.")
+class TestMLError(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("south.dbf"),'r')
+        self.y_name = "HR90"
+        self.y = np.array(db.by_col(self.y_name))
+        self.y.shape = (len(self.y),1)
+        self.x_names = ["RD90","PS90","UE90","DV90"]
+        self.x = np.array([db.by_col(var) for var in self.x_names]).T
+        ww = pysal.open(pysal.examples.get_path("south_q.gal"))
+        self.w = ww.read()
+        ww.close()
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = ML_Error(self.y,self.x,w=self.w,name_y=self.y_name,name_x=self.x_names,\
+               name_w="south_q.gal")
+        betas = np.array([[ 6.1492], [ 4.4024], [ 1.7784], [-0.3781], [ 0.4858], [ 0.2991]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([-5.97649777])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 6.92258051])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 1412
+        self.assertAlmostEqual(reg.n,n,4)
+        k = 5
+        self.assertAlmostEqual(reg.k,k,4)
+        y = np.array([ 0.94608274])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([ 1.        , -0.39902838,  0.89645344,  6.85780705,  7.2636377 ])
+        np.testing.assert_array_almost_equal(reg.x[0],x,4)
+        e = np.array([-4.92843327])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        my = 9.5492931620846928
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 7.0388508798387219
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 1.06476526,  0.05548248,  0.04544514,  0.00614425,  0.01481356,
+        0.00143001])
+        np.testing.assert_array_almost_equal(reg.vm.diagonal(),vm,4)
+        sig2 = [ 32.40685441]
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.3057664820364818
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 1.03187463,  0.23554719,  0.21317867,  0.07838525,  0.12171098,
+        0.03781546])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        z_stat = [(5.9592751097983534, 2.5335926307459251e-09),
+ (18.690182928021841, 5.9508619446611137e-78),
+ (8.3421632936950338, 7.2943630281051907e-17),
+ (-4.8232686291115678, 1.4122456582517099e-06),
+ (3.9913060809142995, 6.5710406838016854e-05),
+ (7.9088780724028922, 2.5971882547279339e-15)]
+        np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
+        logll = -4471.407066887894
+        self.assertAlmostEqual(reg.logll,logll,4)
+        aic = 8952.8141337757879
+        self.assertAlmostEqual(reg.aic,aic,4)
+        schwarz = 8979.0779458660545
+        self.assertAlmostEqual(reg.schwarz,schwarz,4)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_ml_error_regimes.py b/pysal/spreg/tests/test_ml_error_regimes.py
new file mode 100644
index 0000000..dd205e1
--- /dev/null
+++ b/pysal/spreg/tests/test_ml_error_regimes.py
@@ -0,0 +1,138 @@
+import unittest
+import scipy
+import pysal
+import numpy as np
+from pysal.spreg.ml_error_regimes import ML_Error_Regimes
+from pysal.spreg.ml_error import ML_Error
+from pysal.spreg import utils
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+         "Max Likelihood requires SciPy version 11 or newer.")
+class TestMLError(unittest.TestCase):
+    def setUp(self):
+        db =  pysal.open(pysal.examples.get_path("baltim.dbf"),'r')
+        self.ds_name = "baltim.dbf"
+        self.y_name = "PRICE"
+        self.y = np.array(db.by_col(self.y_name)).T
+        self.y.shape = (len(self.y),1)
+        self.x_names = ["NROOM","AGE","SQFT"]
+        self.x = np.array([db.by_col(var) for var in self.x_names]).T
+        ww = pysal.open(pysal.examples.get_path("baltim_q.gal"))
+        self.w = ww.read()
+        ww.close()
+        self.w_name = "baltim_q.gal"
+        self.w.transform = 'r'
+        self.regimes = db.by_col("CITCOU")
+        #Artficial:
+        n = 256
+        self.n2 = n/2
+        self.x_a1 = np.random.uniform(-10,10,(n,1))
+        self.x_a2 = np.random.uniform(1,5,(n,1))
+        self.q_a = self.x_a2 + np.random.normal(0,1,(n,1))
+        self.x_a = np.hstack((self.x_a1,self.x_a2))
+        self.y_a = np.dot(np.hstack((np.ones((n,1)),self.x_a)),np.array([[1],[0.5],[2]])) + np.random.normal(0,1,(n,1))
+        latt = int(np.sqrt(n))
+        self.w_a = pysal.lat2W(latt,latt)
+        self.w_a.transform='r'
+        self.regi_a = [0]*(n/2) + [1]*(n/2)
+        self.w_a1 = pysal.lat2W(latt/2,latt)
+        self.w_a1.transform='r'
+
+    def test_model1(self):
+        reg = ML_Error_Regimes(self.y,self.x,self.regimes,w=self.w,name_y=self.y_name,name_x=self.x_names,\
+               name_w=self.w_name,name_ds=self.ds_name,name_regimes="CITCOU", regime_err_sep=False)
+        betas = np.array([[ -2.39491278],
+       [  4.873757  ],
+       [ -0.02911854],
+       [  0.33275008],
+       [ 31.79618475],
+       [  2.98102401],
+       [ -0.23710892],
+       [  0.80581127],
+       [  0.61770744]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 30.46599009])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 16.53400991])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 211
+        self.assertAlmostEqual(reg.n,n,4)
+        k = 8
+        self.assertAlmostEqual(reg.k,k,4)
+        y = np.array([ 47.])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([   1.  ,    4.  ,  148.  ,   11.25,    0.  ,    0.  ,    0.  ,    0.  ])
+        np.testing.assert_array_almost_equal(reg.x[0],x,4)
+        e = np.array([ 34.69181334])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        my = 44.307180094786695
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 23.606076835380495
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 58.50551173,   2.42952002,   0.00721525,   0.06391736,
+        80.59249161,   3.1610047 ,   0.0119782 ,   0.0499432 ,   0.00502785])
+        np.testing.assert_array_almost_equal(reg.vm.diagonal(),vm,4)
+        sig2 = np.array([[ 209.60639741]])
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.43600837301477025
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 7.64888957,  1.55869177,  0.08494262,  0.25281882,  8.9773321 ,
+        1.77792146,  0.10944497,  0.22347975,  0.07090735])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        logll = -870.3331059537576
+        self.assertAlmostEqual(reg.logll,logll,4)
+        aic = 1756.6662119075154
+        self.assertAlmostEqual(reg.aic,aic,4)
+        schwarz = 1783.481076975324
+        self.assertAlmostEqual(reg.schwarz,schwarz,4)
+        chow_r = np.array([[ 8.40437046,  0.0037432 ],
+       [ 0.64080535,  0.42341932],
+       [ 2.25389396,  0.13327865],
+       [ 1.96544702,  0.16093197]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 25.367913028011799
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j,4)
+
+    def test_model2(self):
+        reg = ML_Error_Regimes(self.y,self.x,self.regimes,w=self.w,name_y=self.y_name,name_x=self.x_names,\
+               name_w=self.w_name,name_ds=self.ds_name,name_regimes="CITCOU", regime_err_sep=True)
+        betas = np.array([[  3.66158216],
+       [  4.55700255],
+       [ -0.08045502],
+       [  0.44800318],
+       [  0.17774677],
+       [ 33.3086368 ],
+       [  2.44709405],
+       [ -0.18803509],
+       [  0.68956598],
+       [  0.75599089]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        vm = np.array([ 40.60994599,  -7.25413138,  -0.16605501,   0.48961884,
+         0.        ,   0.        ,   0.        ,   0.        ,
+         0.        ,   0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        u = np.array([ 31.97771505])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 15.02228495])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        e = np.array([ 33.83065421])
+        np.testing.assert_array_almost_equal(reg.e_filtered[0],e,4)
+        chow_r = np.array([[  6.88023639,   0.0087154 ],
+       [  0.90512612,   0.34141092],
+       [  0.75996258,   0.38334023],
+       [  0.56882946,   0.45072443],
+       [ 12.18358581,   0.00048212]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 26.673798071789673
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j,4)
+        #Artficial:
+        model = ML_Error_Regimes(self.y_a, self.x_a, self.regi_a, w=self.w_a, regime_err_sep=True)
+        model1 = ML_Error(self.y_a[0:(self.n2)].reshape((self.n2),1), self.x_a[0:(self.n2)], w=self.w_a1)
+        model2 = ML_Error(self.y_a[(self.n2):].reshape((self.n2),1), self.x_a[(self.n2):], w=self.w_a1)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 4)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_ml_lag.py b/pysal/spreg/tests/test_ml_lag.py
new file mode 100644
index 0000000..462db18
--- /dev/null
+++ b/pysal/spreg/tests/test_ml_lag.py
@@ -0,0 +1,68 @@
+import unittest
+import pysal
+import scipy
+import numpy as np
+from pysal.spreg.ml_lag import ML_Lag
+from pysal.spreg import utils
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+        "Max Likelihood requires SciPy version 11 or newer.")
+class TestMLError(unittest.TestCase):
+    def setUp(self):
+        db =  pysal.open(pysal.examples.get_path("baltim.dbf"),'r')
+        self.ds_name = "baltim.dbf"
+        self.y_name = "PRICE"
+        self.y = np.array(db.by_col(self.y_name)).T
+        self.y.shape = (len(self.y),1)
+        self.x_names = ["NROOM","AGE","SQFT"]
+        self.x = np.array([db.by_col(var) for var in self.x_names]).T
+        ww = pysal.open(pysal.examples.get_path("baltim_q.gal"))
+        self.w = ww.read()
+        ww.close()
+        self.w_name = "baltim_q.gal"
+        self.w.transform = 'r'
+
+    def test_model1(self):
+        reg = ML_Lag(self.y,self.x,w=self.w,name_y=self.y_name,name_x=self.x_names,\
+               name_w=self.w_name,name_ds=self.ds_name)
+        betas = np.array([[-6.04040164],
+       [ 3.48995114],
+       [-0.20103955],
+       [ 0.65462382],
+       [ 0.62351143]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 47.51218398])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([-0.51218398])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 211
+        self.assertAlmostEqual(reg.n,n,4)
+        k = 5
+        self.assertAlmostEqual(reg.k,k,4)
+        y = np.array([ 47.])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([   1.  ,    4.  ,  148.  ,   11.25])
+        np.testing.assert_array_almost_equal(reg.x[0],x,4)
+        e = np.array([ 41.99251608])
+        np.testing.assert_array_almost_equal(reg.e_pred[0],e,4)
+        my = 44.307180094786695
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 23.606076835380495
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 28.57288755,   1.42341656,   0.00288068,   0.02956392,   0.00332139])
+        np.testing.assert_array_almost_equal(reg.vm.diagonal(),vm,4)
+        sig2 = 216.27525647243797
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.6133020721559487
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 5.34536131,  1.19307022,  0.05367198,  0.17194162,  0.05763147])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        logll = -875.92771143484833
+        self.assertAlmostEqual(reg.logll,logll,4)
+        aic = 1761.8554228696967
+        self.assertAlmostEqual(reg.aic,aic,4)
+        schwarz = 1778.614713537077
+        self.assertAlmostEqual(reg.schwarz,schwarz,4)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_ml_lag_regimes.py b/pysal/spreg/tests/test_ml_lag_regimes.py
new file mode 100644
index 0000000..dd77ce1
--- /dev/null
+++ b/pysal/spreg/tests/test_ml_lag_regimes.py
@@ -0,0 +1,117 @@
+import unittest
+import scipy
+import pysal
+import numpy as np
+from pysal.spreg.ml_lag_regimes import ML_Lag_Regimes
+from pysal.spreg.ml_lag import ML_Lag
+from pysal.spreg import utils
+
+ at unittest.skipIf(int(scipy.__version__.split(".")[1]) < 11,
+         "Max Likelihood requires SciPy version 11 or newer.")
+class TestMLError(unittest.TestCase):
+    def setUp(self):
+        db =  pysal.open(pysal.examples.get_path("baltim.dbf"),'r')
+        self.ds_name = "baltim.dbf"
+        self.y_name = "PRICE"
+        self.y = np.array(db.by_col(self.y_name)).T
+        self.y.shape = (len(self.y),1)
+        self.x_names = ["NROOM","AGE","SQFT"]
+        self.x = np.array([db.by_col(var) for var in self.x_names]).T
+        ww = pysal.open(pysal.examples.get_path("baltim_q.gal"))
+        self.w = ww.read()
+        ww.close()
+        self.w_name = "baltim_q.gal"
+        self.w.transform = 'r'
+        self.regimes = db.by_col("CITCOU")
+
+    def test_model1(self):
+        reg = ML_Lag_Regimes(self.y,self.x,self.regimes,w=self.w,name_y=self.y_name,name_x=self.x_names,\
+               name_w=self.w_name,name_ds=self.ds_name,name_regimes="CITCOU", regime_lag_sep=False)
+        betas = np.array([[-15.00586577],
+       [  4.49600801],
+       [ -0.03180518],
+       [  0.34995882],
+       [ -4.54040395],
+       [  3.92187578],
+       [ -0.17021393],
+       [  0.81941371],
+       [  0.53850323]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        u = np.array([ 32.73718478])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 14.26281522])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        n = 211
+        self.assertAlmostEqual(reg.n,n,4)
+        k = 9
+        self.assertAlmostEqual(reg.k,k,4)
+        y = np.array([ 47.])
+        np.testing.assert_array_almost_equal(reg.y[0],y,4)
+        x = np.array([[   1.  ,    4.  ,  148.  ,   11.25,    0.  ,    0.  ,    0.  ,
+           0.  ]])
+        np.testing.assert_array_almost_equal(reg.x[0].toarray(),x,4)
+        e = np.array([ 29.45407124])
+        np.testing.assert_array_almost_equal(reg.e_pred[0],e,4)
+        my = 44.307180094786695
+        self.assertAlmostEqual(reg.mean_y,my)
+        sy = 23.606076835380495
+        self.assertAlmostEqual(reg.std_y,sy)
+        vm = np.array([ 47.42000914,   2.39526578,   0.00506895,   0.06480022,
+        69.67653371,   3.20661492,   0.01156766,   0.04862014,   0.00400775])
+        np.testing.assert_array_almost_equal(reg.vm.diagonal(),vm,4)
+        sig2 = 200.04433357145007
+        self.assertAlmostEqual(reg.sig2,sig2,4)
+        pr2 = 0.6404460298085746
+        self.assertAlmostEqual(reg.pr2,pr2)
+        std_err = np.array([ 6.88621878,  1.54766462,  0.07119654,  0.25455888,  8.34724707,
+        1.79070235,  0.10755305,  0.22049975,  0.0633068 ])
+        np.testing.assert_array_almost_equal(reg.std_err,std_err,4)
+        logll = -864.98505596489736
+        self.assertAlmostEqual(reg.logll,logll,4)
+        aic = 1747.9701119297947
+        self.assertAlmostEqual(reg.aic,aic,4)
+        schwarz = 1778.1368351310794
+        self.assertAlmostEqual(reg.schwarz,schwarz,4)
+        chow_r = np.array([[ 1.00180776,  0.31687348],
+       [ 0.05904944,  0.8080047 ],
+       [ 1.16987812,  0.27942629],
+       [ 1.95931177,  0.16158694]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 21.648337464039283
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j,4)
+
+    def test_model2(self):
+        reg = ML_Lag_Regimes(self.y,self.x,self.regimes,w=self.w,name_y=self.y_name,name_x=self.x_names,\
+               name_w=self.w_name,name_ds=self.ds_name,name_regimes="CITCOU", regime_lag_sep=True)
+        betas = np.array([[-0.71589799],
+       [ 4.40910538],
+       [-0.08652467],
+       [ 0.46266265],
+       [ 0.1627765 ],
+       [-5.00594358],
+       [ 2.91060349],
+       [-0.18207394],
+       [ 0.71129227],
+       [ 0.66753263]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,4)
+        vm = np.array([ 55.3593679 ,  -7.22927797,  -0.19487326,   0.6030953 ,
+        -0.52249569,   0.        ,   0.        ,   0.        ,
+         0.        ,   0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0],vm,4)
+        u = np.array([ 34.03630518])
+        np.testing.assert_array_almost_equal(reg.u[0],u,4)
+        predy = np.array([ 12.96369482])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,4)
+        e = np.array([ 32.46466912])
+        np.testing.assert_array_almost_equal(reg.e_pred[0],e,4)
+        chow_r = np.array([[  0.15654726,   0.69235548],
+       [  0.43533847,   0.509381  ],
+       [  0.60552514,   0.43647766],
+       [  0.59214981,   0.441589  ],
+       [ 11.69437282,   0.00062689]])
+        np.testing.assert_array_almost_equal(reg.chow.regi,chow_r,4)
+        chow_j = 21.978012275873063
+        self.assertAlmostEqual(reg.chow.joint[0],chow_j,4)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_ols.py b/pysal/spreg/tests/test_ols.py
new file mode 100644
index 0000000..f2495d9
--- /dev/null
+++ b/pysal/spreg/tests/test_ols.py
@@ -0,0 +1,125 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.spreg import utils
+import pysal.spreg as EC
+
+PEGP = pysal.examples.get_path
+
+class TestBaseOLS(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(PEGP('columbus.dbf'),'r')
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.weights.rook_from_shapefile(PEGP("columbus.shp"))
+
+    def test_ols(self):
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        ols = EC.ols.BaseOLS(self.y,self.X)
+        np.testing.assert_array_almost_equal(ols.betas, np.array([[
+            46.42818268], [  0.62898397], [ -0.48488854]]))
+        vm = np.array([[  1.74022453e+02,  -6.52060364e+00,  -2.15109867e+00],
+           [ -6.52060364e+00,   2.87200008e-01,   6.80956787e-02],
+           [ -2.15109867e+00,   6.80956787e-02,   3.33693910e-02]])
+        np.testing.assert_array_almost_equal(ols.vm, vm,6)
+
+    def test_ols_white1(self):
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        ols = EC.ols.BaseOLS(self.y,self.X,robust='white', sig2n_k=True)
+        np.testing.assert_array_almost_equal(ols.betas, np.array([[
+            46.42818268], [  0.62898397], [ -0.48488854]]))
+        vm = np.array([[  2.05819450e+02,  -6.83139266e+00,  -2.64825846e+00],
+       [ -6.83139266e+00,   2.58480813e-01,   8.07733167e-02],
+       [ -2.64825846e+00,   8.07733167e-02,   3.75817181e-02]])
+        np.testing.assert_array_almost_equal(ols.vm, vm,6)
+
+    def test_ols_white2(self):
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        ols = EC.ols.BaseOLS(self.y,self.X,robust='white', sig2n_k=False)
+        np.testing.assert_array_almost_equal(ols.betas, np.array([[
+            46.42818268], [  0.62898397], [ -0.48488854]]))
+        vm = np.array([[  1.93218259e+02,  -6.41314413e+00,  -2.48612018e+00],
+       [ -6.41314413e+00,   2.42655457e-01,   7.58280116e-02],
+       [ -2.48612018e+00,   7.58280116e-02,   3.52807966e-02]])
+        np.testing.assert_array_almost_equal(ols.vm, vm,6)
+
+    def test_OLS(self):
+        ols = EC.OLS(self.y, self.X, self.w, spat_diag=True, moran=True, \
+                white_test=True, name_y='home value', name_x=['income','crime'], \
+                name_ds='columbus')
+        
+        np.testing.assert_array_almost_equal(ols.aic, \
+                408.73548964604873 ,7)
+        np.testing.assert_array_almost_equal(ols.ar2, \
+                0.32123239427957662 ,7)
+        np.testing.assert_array_almost_equal(ols.betas, \
+                np.array([[ 46.42818268], [  0.62898397], \
+                    [ -0.48488854]]), 7) 
+        bp = np.array([2, 5.7667905131212587, 0.05594449410070558])
+        ols_bp = np.array([ols.breusch_pagan['df'], ols.breusch_pagan['bp'], ols.breusch_pagan['pvalue']])
+        np.testing.assert_array_almost_equal(bp, ols_bp, 7)
+        np.testing.assert_array_almost_equal(ols.f_stat, \
+            (12.358198885356581, 5.0636903313953024e-05), 7)
+        jb = np.array([2, 39.706155069114878, 2.387360356860208e-09])
+        ols_jb = np.array([ols.jarque_bera['df'], ols.jarque_bera['jb'], ols.jarque_bera['pvalue']])
+        np.testing.assert_array_almost_equal(ols_jb,jb, 7)
+        white = np.array([5, 2.90606708, 0.71446484])
+        ols_white = np.array([ols.white['df'], ols.white['wh'], ols.white['pvalue']])
+        np.testing.assert_array_almost_equal(ols_white,white, 7)
+        np.testing.assert_equal(ols.k,  3)
+        kb = {'df': 2, 'kb': 2.2700383871478675, 'pvalue': 0.32141595215434604}
+        for key in kb:
+            self.assertAlmostEqual(ols.koenker_bassett[key],  kb[key], 7)
+        np.testing.assert_array_almost_equal(ols.lm_error, \
+            (4.1508117035117893, 0.041614570655392716),7)
+        np.testing.assert_array_almost_equal(ols.lm_lag, \
+            (0.98279980617162233, 0.32150855529063727), 7)
+        np.testing.assert_array_almost_equal(ols.lm_sarma, \
+                (4.3222725729143736, 0.11519415308749938), 7)
+        np.testing.assert_array_almost_equal(ols.logll, \
+                -201.3677448230244 ,7)
+        np.testing.assert_array_almost_equal(ols.mean_y, \
+            38.436224469387746,7)
+        np.testing.assert_array_almost_equal(ols.moran_res[0], \
+            0.20373540938,7)
+        np.testing.assert_array_almost_equal(ols.moran_res[1], \
+            2.59180452208,7)
+        np.testing.assert_array_almost_equal(ols.moran_res[2], \
+            0.00954740031251,7)
+        np.testing.assert_array_almost_equal(ols.mulColli, \
+            12.537554873824675 ,7)
+        np.testing.assert_equal(ols.n,  49)
+        np.testing.assert_equal(ols.name_ds,  'columbus')
+        np.testing.assert_equal(ols.name_gwk,  None)
+        np.testing.assert_equal(ols.name_w,  'unknown')
+        np.testing.assert_equal(ols.name_x,  ['CONSTANT', 'income', 'crime'])
+        np.testing.assert_equal(ols.name_y,  'home value')
+        np.testing.assert_array_almost_equal(ols.predy[3], np.array([
+            33.53969014]),7)
+        np.testing.assert_array_almost_equal(ols.r2, \
+                0.34951437785126105 ,7)
+        np.testing.assert_array_almost_equal(ols.rlm_error, \
+                (3.3394727667427513, 0.067636278225568919),7)
+        np.testing.assert_array_almost_equal(ols.rlm_lag, \
+            (0.17146086940258459, 0.67881673703455414), 7)
+        np.testing.assert_equal(ols.robust,  'unadjusted')
+        np.testing.assert_array_almost_equal(ols.schwarz, \
+            414.41095054038061,7 )
+        np.testing.assert_array_almost_equal(ols.sig2, \
+            231.4568494392652,7 )
+        np.testing.assert_array_almost_equal(ols.sig2ML, \
+            217.28602192257551,7 )
+        np.testing.assert_array_almost_equal(ols.sig2n, \
+                217.28602192257551, 7)
+ 
+        np.testing.assert_array_almost_equal(ols.t_stat[2][0], \
+                -2.65440864272,7)
+        np.testing.assert_array_almost_equal(ols.t_stat[2][1], \
+                0.0108745049098,7)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_ols_regimes.py b/pysal/spreg/tests/test_ols_regimes.py
new file mode 100644
index 0000000..20a9977
--- /dev/null
+++ b/pysal/spreg/tests/test_ols_regimes.py
@@ -0,0 +1,144 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.spreg.ols import OLS
+from pysal.spreg.ols_regimes import OLS_Regimes
+
+PEGP = pysal.examples.get_path
+
+class TestOLS_regimes(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+        self.y_var = 'CRIME'
+        self.y = np.array([db.by_col(self.y_var)]).reshape(49,1)
+        self.x_var = ['INC','HOVAL']
+        self.x = np.array([db.by_col(name) for name in self.x_var]).T
+        self.r_var = 'NSA'
+        self.regimes = db.by_col(self.r_var)
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_OLS(self):
+        start_suppress = np.get_printoptions()['suppress']
+        np.set_printoptions(suppress=True)    
+        ols = OLS_Regimes(self.y, self.x, self.regimes, w=self.w, regime_err_sep=False, constant_regi='many', nonspat_diag=False, spat_diag=True, name_y=self.y_var, name_x=self.x_var, name_ds='columbus', name_regimes=self.r_var, name_w='columbus.gal')        
+        #np.testing.assert_array_almost_equal(ols.aic, 408.73548964604873 ,7)
+        np.testing.assert_array_almost_equal(ols.ar2,0.50761700679873101 ,7)
+        np.testing.assert_array_almost_equal(ols.betas,np.array([[ 68.78670869],\
+                [ -1.9864167 ],[ -0.10887962],[ 67.73579559],[ -1.36937552],[ -0.31792362]])) 
+        vm = np.array([ 48.81339213,  -2.14959579,  -0.19968157,   0.        ,
+         0.        ,   0.        ])
+        np.testing.assert_array_almost_equal(ols.vm[0], vm, 6)
+        np.testing.assert_array_almost_equal(ols.lm_error, \
+            (5.92970357,  0.01488775),7)
+        np.testing.assert_array_almost_equal(ols.lm_lag, \
+            (8.78315751,  0.00304024), 7)
+        np.testing.assert_array_almost_equal(ols.lm_sarma, \
+                (8.89955982,  0.01168114), 7)
+        np.testing.assert_array_almost_equal(ols.mean_y, \
+            35.1288238979591,7)
+        np.testing.assert_equal(ols.k, 6)
+        np.testing.assert_equal(ols.kf, 0)
+        np.testing.assert_equal(ols.kr, 3)
+        np.testing.assert_equal(ols.n, 49)
+        np.testing.assert_equal(ols.nr, 2)
+        np.testing.assert_equal(ols.name_ds,  'columbus')
+        np.testing.assert_equal(ols.name_gwk,  None)
+        np.testing.assert_equal(ols.name_w,  'columbus.gal')
+        np.testing.assert_equal(ols.name_x,  ['0_CONSTANT', '0_INC', '0_HOVAL', '1_CONSTANT', '1_INC', '1_HOVAL'])
+        np.testing.assert_equal(ols.name_y,  'CRIME')
+        np.testing.assert_array_almost_equal(ols.predy[3], np.array([
+            51.05003696]),7)
+        np.testing.assert_array_almost_equal(ols.r2, \
+                0.55890690192386316 ,7)
+        np.testing.assert_array_almost_equal(ols.rlm_error, \
+                (0.11640231,  0.73296972),7)
+        np.testing.assert_array_almost_equal(ols.rlm_lag, \
+            (2.96985625,  0.08482939), 7)
+        np.testing.assert_equal(ols.robust,  'unadjusted')
+        np.testing.assert_array_almost_equal(ols.sig2, \
+            137.84897351821013,7 )
+        np.testing.assert_array_almost_equal(ols.sig2n, \
+                120.96950737312316, 7)
+        np.testing.assert_array_almost_equal(ols.t_stat[2][0], \
+                -0.43342216706091791,7)
+        np.testing.assert_array_almost_equal(ols.t_stat[2][1], \
+                0.66687472578594531,7)
+        np.set_printoptions(suppress=start_suppress)        
+    """
+    def test_OLS_regi(self):
+        #Artficial:
+        n = 256
+        x1 = np.random.uniform(-10,10,(n,1))
+        y = np.dot(np.hstack((np.ones((n,1)),x1)),np.array([[1],[0.5]])) + np.random.normal(0,1,(n,1))
+        latt = int(np.sqrt(n))
+        regi = [0]*(n/2) + [1]*(n/2)
+        model = OLS_Regimes(y, x1, regimes=regi, regime_err_sep=True, sig2n_k=False)
+        model1 = OLS(y[0:(n/2)].reshape((n/2),1), x1[0:(n/2)], sig2n_k=False)
+        model2 = OLS(y[(n/2):n].reshape((n/2),1), x1[(n/2):n], sig2n_k=False)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 6)
+        #Columbus:  
+        reg = OLS_Regimes(self.y, self.x, self.regimes, w=self.w, constant_regi='many', nonspat_diag=True, spat_diag=True, name_y=self.y_var, name_x=self.x_var, name_ds='columbus', name_regimes=self.r_var, name_w='columbus.gal', regime_err_sep=True)        
+        np.testing.assert_array_almost_equal(reg.multi[0].aic, 192.96044303402897 ,7)
+        tbetas = np.array([[ 68.78670869],
+       [ -1.9864167 ],
+       [ -0.10887962],
+       [ 67.73579559],
+       [ -1.36937552],
+       [ -0.31792362]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        vm = np.array([ 41.68828023,  -1.83582717,  -0.17053478,   0.        ,
+         0.        ,   0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0], vm, 6)
+        u_3 = np.array([[ 0.31781838],
+       [-5.6905584 ],
+       [-6.8819715 ]])
+        np.testing.assert_array_almost_equal(reg.u[0:3], u_3, 7)
+        predy_3 = np.array([[ 15.40816162],
+       [ 24.4923124 ],
+       [ 37.5087525 ]])
+        np.testing.assert_array_almost_equal(reg.predy[0:3], predy_3, 7)
+        chow_regi = np.array([[ 0.01002733,  0.92023592],
+       [ 0.46017009,  0.49754449],
+       [ 0.60732697,  0.43579603]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 0.67787986791767096, 7)
+    """
+    def test_OLS_fixed(self):
+        start_suppress = np.get_printoptions()['suppress']
+        np.set_printoptions(suppress=True)    
+        ols = OLS_Regimes(self.y, self.x, self.regimes, w=self.w, cols2regi=[False,True], regime_err_sep=True, constant_regi='one', nonspat_diag=False, spat_diag=True, name_y=self.y_var, name_x=self.x_var, name_ds='columbus', name_regimes=self.r_var, name_w='columbus.gal')        
+        np.testing.assert_array_almost_equal(ols.betas,np.array([[ -0.24385565], [ -0.26335026], [ 68.89701137], [ -1.67389685]])) 
+        vm = np.array([ 0.02354271,  0.01246677,  0.00424658, -0.04921356])
+        np.testing.assert_array_almost_equal(ols.vm[0], vm, 6)
+        np.testing.assert_array_almost_equal(ols.lm_error, \
+            (5.62668744,  0.01768903),7)
+        np.testing.assert_array_almost_equal(ols.lm_lag, \
+            (9.43264957,  0.00213156), 7)
+        np.testing.assert_array_almost_equal(ols.mean_y, \
+            35.12882389795919,7)
+        np.testing.assert_equal(ols.kf, 2)
+        np.testing.assert_equal(ols.kr, 1)
+        np.testing.assert_equal(ols.n, 49)
+        np.testing.assert_equal(ols.nr, 2)
+        np.testing.assert_equal(ols.name_ds,  'columbus')
+        np.testing.assert_equal(ols.name_gwk,  None)
+        np.testing.assert_equal(ols.name_w,  'columbus.gal')
+        np.testing.assert_equal(ols.name_x,  ['0_HOVAL', '1_HOVAL', '_Global_CONSTANT', '_Global_INC'])
+        np.testing.assert_equal(ols.name_y,  'CRIME')
+        np.testing.assert_array_almost_equal(ols.predy[3], np.array([
+            52.65974636]),7)
+        np.testing.assert_array_almost_equal(ols.r2, \
+                0.5525561183786056 ,7)
+        np.testing.assert_equal(ols.robust,  'unadjusted')
+        np.testing.assert_array_almost_equal(ols.t_stat[2][0], \
+                13.848705206463748,7)
+        np.testing.assert_array_almost_equal(ols.t_stat[2][1], \
+                7.776650625274256e-18,7)
+        np.set_printoptions(suppress=start_suppress)
+        
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_ols_sparse.py b/pysal/spreg/tests/test_ols_sparse.py
new file mode 100644
index 0000000..1341c96
--- /dev/null
+++ b/pysal/spreg/tests/test_ols_sparse.py
@@ -0,0 +1,107 @@
+import unittest
+import numpy as np
+import pysal
+import pysal.spreg as EC
+from scipy import sparse
+
+PEGP = pysal.examples.get_path
+
+class TestBaseOLS(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(PEGP('columbus.dbf'),'r')
+        y = np.array(db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.w = pysal.weights.rook_from_shapefile(PEGP("columbus.shp"))
+
+    def test_ols(self):
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = sparse.csr_matrix(self.X)
+        ols = EC.ols.BaseOLS(self.y,self.X)
+        np.testing.assert_array_almost_equal(ols.betas, np.array([[
+            46.42818268], [  0.62898397], [ -0.48488854]]))
+        vm = np.array([[  1.74022453e+02,  -6.52060364e+00,  -2.15109867e+00],
+           [ -6.52060364e+00,   2.87200008e-01,   6.80956787e-02],
+           [ -2.15109867e+00,   6.80956787e-02,   3.33693910e-02]])
+        np.testing.assert_array_almost_equal(ols.vm, vm,6)
+
+    def test_OLS(self):
+        self.X = sparse.csr_matrix(self.X)
+        ols = EC.OLS(self.y, self.X, self.w, spat_diag=True, moran=True, \
+                name_y='home value', name_x=['income','crime'], \
+                name_ds='columbus', nonspat_diag=True, white_test=True)
+        
+        np.testing.assert_array_almost_equal(ols.aic, \
+                408.73548964604873 ,7)
+        np.testing.assert_array_almost_equal(ols.ar2, \
+                0.32123239427957662 ,7)
+        np.testing.assert_array_almost_equal(ols.betas, \
+                np.array([[ 46.42818268], [  0.62898397], \
+                    [ -0.48488854]]), 7) 
+        bp = np.array([2, 5.7667905131212587, 0.05594449410070558])
+        ols_bp = np.array([ols.breusch_pagan['df'], ols.breusch_pagan['bp'], ols.breusch_pagan['pvalue']])
+        np.testing.assert_array_almost_equal(bp, ols_bp, 7)
+        np.testing.assert_array_almost_equal(ols.f_stat, \
+            (12.358198885356581, 5.0636903313953024e-05), 7)
+        jb = np.array([2, 39.706155069114878, 2.387360356860208e-09])
+        ols_jb = np.array([ols.jarque_bera['df'], ols.jarque_bera['jb'], ols.jarque_bera['pvalue']])
+        np.testing.assert_array_almost_equal(ols_jb,jb, 7)
+        white = np.array([5, 2.90606708, 0.71446484])
+        ols_white = np.array([ols.white['df'], ols.white['wh'], ols.white['pvalue']])
+        np.testing.assert_array_almost_equal(ols_white,white, 7)
+        np.testing.assert_equal(ols.k,  3)
+        kb = {'df': 2, 'kb': 2.2700383871478675, 'pvalue': 0.32141595215434604}
+        for key in kb:
+            self.assertAlmostEqual(ols.koenker_bassett[key],  kb[key], 7)
+        np.testing.assert_array_almost_equal(ols.lm_error, \
+            (4.1508117035117893, 0.041614570655392716),7)
+        np.testing.assert_array_almost_equal(ols.lm_lag, \
+            (0.98279980617162233, 0.32150855529063727), 7)
+        np.testing.assert_array_almost_equal(ols.lm_sarma, \
+                (4.3222725729143736, 0.11519415308749938), 7)
+        np.testing.assert_array_almost_equal(ols.logll, \
+                -201.3677448230244 ,7)
+        np.testing.assert_array_almost_equal(ols.mean_y, \
+            38.436224469387746,7)
+        np.testing.assert_array_almost_equal(ols.moran_res[0], \
+            0.20373540938,7)
+        np.testing.assert_array_almost_equal(ols.moran_res[1], \
+            2.59180452208,7)
+        np.testing.assert_array_almost_equal(ols.moran_res[2], \
+            0.00954740031251,7)
+        np.testing.assert_array_almost_equal(ols.mulColli, \
+            12.537554873824675 ,7)
+        np.testing.assert_equal(ols.n,  49)
+        np.testing.assert_equal(ols.name_ds,  'columbus')
+        np.testing.assert_equal(ols.name_gwk,  None)
+        np.testing.assert_equal(ols.name_w,  'unknown')
+        np.testing.assert_equal(ols.name_x,  ['CONSTANT', 'income', 'crime'])
+        np.testing.assert_equal(ols.name_y,  'home value')
+        np.testing.assert_array_almost_equal(ols.predy[3], np.array([
+            33.53969014]),7)
+        np.testing.assert_array_almost_equal(ols.r2, \
+                0.34951437785126105 ,7)
+        np.testing.assert_array_almost_equal(ols.rlm_error, \
+                (3.3394727667427513, 0.067636278225568919),7)
+        np.testing.assert_array_almost_equal(ols.rlm_lag, \
+            (0.17146086940258459, 0.67881673703455414), 7)
+        np.testing.assert_equal(ols.robust,  'unadjusted')
+        np.testing.assert_array_almost_equal(ols.schwarz, \
+            414.41095054038061,7 )
+        np.testing.assert_array_almost_equal(ols.sig2, \
+            231.4568494392652,7 )
+        np.testing.assert_array_almost_equal(ols.sig2ML, \
+            217.28602192257551,7 )
+        np.testing.assert_array_almost_equal(ols.sig2n, \
+                217.28602192257551, 7)
+ 
+        np.testing.assert_array_almost_equal(ols.t_stat[2][0], \
+                -2.65440864272,7)
+        np.testing.assert_array_almost_equal(ols.t_stat[2][1], \
+                0.0108745049098,7)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_probit.py b/pysal/spreg/tests/test_probit.py
new file mode 100644
index 0000000..5c4e6dd
--- /dev/null
+++ b/pysal/spreg/tests/test_probit.py
@@ -0,0 +1,108 @@
+import unittest
+import pysal
+import numpy as np
+from pysal.spreg import probit as PB
+
+class TestBaseProbit(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("CRIME"))
+        y = np.reshape(y, (49,1))
+        self.y = (y>40).astype(float)
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("HOVAL"))
+        self.X = np.array(X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = PB.BaseProbit(self.y, self.X, w=self.w)
+        betas = np.array([[ 3.35381078], [-0.1996531 ], [-0.02951371]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        predy = np.array([ 0.00174739])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        n = 49
+        self.assertAlmostEqual(reg.n,n,6)
+        k = 3
+        self.assertAlmostEqual(reg.k,k,6)
+        y = np.array([ 0.])
+        np.testing.assert_array_almost_equal(reg.y[0],y,6)
+        x = np.array([  1.      ,  19.531   ,  80.467003])
+        np.testing.assert_array_almost_equal(reg.x[0],x,6)
+        vm = np.array([[  8.52813879e-01,  -4.36272459e-02,  -8.05171472e-03], [ -4.36272459e-02,   4.11381444e-03,  -1.92834842e-04], [ -8.05171472e-03,  -1.92834842e-04,   3.09660240e-04]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        xmean = np.array([[  1.        ], [ 14.37493876], [ 38.43622447 ]])
+        np.testing.assert_array_almost_equal(reg.xmean,xmean,6)        
+        predpc = 85.714285714285708
+        self.assertAlmostEqual(reg.predpc,predpc,5)
+        logl = -20.06009093055782
+        self.assertAlmostEqual(reg.logl,logl,5)
+        scale = 0.23309310130643665
+        self.assertAlmostEqual(reg.scale,scale,5)
+        slopes = np.array([[-0.04653776], [-0.00687944]])
+        np.testing.assert_array_almost_equal(reg.slopes,slopes,6)
+        slopes_vm = np.array([[  1.77101993e-04,  -1.65021168e-05], [ -1.65021168e-05,   1.60575016e-05]])
+        np.testing.assert_array_almost_equal(reg.slopes_vm,slopes_vm,6)
+        LR = 25.317683245671716
+        self.assertAlmostEqual(reg.LR[0],LR,5)
+        Pinkse_error = 2.9632385352516728
+        self.assertAlmostEqual(reg.Pinkse_error[0],Pinkse_error,5)
+        KP_error = 1.6509224700582124
+        self.assertAlmostEqual(reg.KP_error[0],KP_error,5)
+        PS_error = 2.3732463777623511
+        self.assertAlmostEqual(reg.PS_error[0],PS_error,5)
+
+class TestProbit(unittest.TestCase):
+    def setUp(self):
+        db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
+        y = np.array(db.by_col("CRIME"))
+        y = np.reshape(y, (49,1))
+        self.y = (y>40).astype(float)
+        X = []
+        X.append(db.by_col("INC"))
+        X.append(db.by_col("HOVAL"))
+        self.X = np.array(X).T
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+
+    def test_model(self):
+        reg = PB.Probit(self.y, self.X, w=self.w)
+        betas = np.array([[ 3.35381078], [-0.1996531 ], [-0.02951371]])
+        np.testing.assert_array_almost_equal(reg.betas,betas,6)
+        predy = np.array([ 0.00174739])
+        np.testing.assert_array_almost_equal(reg.predy[0],predy,6)
+        n = 49
+        self.assertAlmostEqual(reg.n,n,6)
+        k = 3
+        self.assertAlmostEqual(reg.k,k,6)
+        y = np.array([ 0.])
+        np.testing.assert_array_almost_equal(reg.y[0],y,6)
+        x = np.array([  1.      ,  19.531   ,  80.467003])
+        np.testing.assert_array_almost_equal(reg.x[0],x,6)
+        vm = np.array([[  8.52813879e-01,  -4.36272459e-02,  -8.05171472e-03], [ -4.36272459e-02,   4.11381444e-03,  -1.92834842e-04], [ -8.05171472e-03,  -1.92834842e-04,   3.09660240e-04]])
+        np.testing.assert_array_almost_equal(reg.vm,vm,6)
+        xmean = np.array([[  1.        ], [ 14.37493876], [ 38.43622447 ]])
+        np.testing.assert_array_almost_equal(reg.xmean,xmean,6)        
+        predpc = 85.714285714285708
+        self.assertAlmostEqual(reg.predpc,predpc,5)
+        logl = -20.06009093055782
+        self.assertAlmostEqual(reg.logl,logl,5)
+        scale = 0.23309310130643665
+        self.assertAlmostEqual(reg.scale,scale,5)
+        slopes = np.array([[-0.04653776], [-0.00687944]])
+        np.testing.assert_array_almost_equal(reg.slopes,slopes,6)
+        slopes_vm = np.array([[  1.77101993e-04,  -1.65021168e-05], [ -1.65021168e-05,   1.60575016e-05]])
+        np.testing.assert_array_almost_equal(reg.slopes_vm,slopes_vm,6)
+        LR = 25.317683245671716
+        self.assertAlmostEqual(reg.LR[0],LR,5)
+        Pinkse_error = 2.9632385352516728
+        self.assertAlmostEqual(reg.Pinkse_error[0],Pinkse_error,5)
+        KP_error = 1.6509224700582124
+        self.assertAlmostEqual(reg.KP_error[0],KP_error,5)
+        PS_error = 2.3732463777623511
+        self.assertAlmostEqual(reg.PS_error[0],PS_error,5)
+        
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_twosls.py b/pysal/spreg/tests/test_twosls.py
new file mode 100644
index 0000000..71021e3
--- /dev/null
+++ b/pysal/spreg/tests/test_twosls.py
@@ -0,0 +1,254 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.spreg.twosls import BaseTSLS, TSLS
+
+class TestBaseTSLS(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+        self.y = np.array(db.by_col("CRIME"))
+        self.y = np.reshape(self.y, (49,1))
+        self.X = []
+        self.X.append(db.by_col("INC"))
+        self.X = np.array(self.X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.yd = []
+        self.yd.append(db.by_col("HOVAL"))
+        self.yd = np.array(self.yd).T
+        self.q = []
+        self.q.append(db.by_col("DISCBD"))
+        self.q = np.array(self.q).T
+
+    def test_basic(self):
+        reg = BaseTSLS(self.y, self.X, self.yd, self.q)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        h_0 = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0], h_0)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ],
+                        [   704.371999  ,  11686.67338121,   2246.12800625],
+                        [   139.75      ,   2246.12800625,    498.5851    ]])
+        np.testing.assert_array_almost_equal(reg.hth, hth, 7)
+        hthi = np.array([[ 0.1597275 , -0.00762011, -0.01044191],
+                        [-0.00762011,  0.00100135, -0.0023752 ],
+                        [-0.01044191, -0.0023752 ,  0.01563276]]) 
+        np.testing.assert_array_almost_equal(reg.hthi, hthi, 7)
+        self.assertEqual(reg.k, 3)
+        self.assertEqual(reg.kstar, 1)
+        self.assertAlmostEqual(reg.mean_y, 35.128823897959187, 7)
+        self.assertEqual(reg.n, 49)
+        pfora1a2 = np.array([[ 9.58156106, -0.22744226, -0.13820537],
+                             [ 0.02580142,  0.08226331, -0.03143731],
+                             [-3.13896453, -0.33487872,  0.20690965]]) 
+        np.testing.assert_array_almost_equal(reg.pfora1a2, pfora1a2, 7)
+        predy_5 = np.array([[-28.68949467], [ 28.99484984], [ 55.07344824], [ 38.26609504], [ 57.57145851]]) 
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        q_5 = np.array([[ 5.03], [ 4.27], [ 3.89], [ 3.7 ], [ 2.83]])
+        np.testing.assert_array_equal(reg.q[0:5], q_5)
+        self.assertAlmostEqual(reg.sig2n_k, 587.56797852699822, 7)
+        self.assertAlmostEqual(reg.sig2n, 551.5944288212637, 7)
+        self.assertAlmostEqual(reg.sig2, 551.5944288212637, 7)
+        self.assertAlmostEqual(reg.std_y, 16.732092091229699, 7)
+        u_5 = np.array([[ 44.41547467], [-10.19309584], [-24.44666724], [ -5.87833504], [ -6.83994851]]) 
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 27028.127012241919, 7)
+        varb = np.array([[ 0.41526237,  0.01879906, -0.01730372],
+                         [ 0.01879906,  0.00362823, -0.00184604],
+                         [-0.01730372, -0.00184604,  0.0011406 ]]) 
+        np.testing.assert_array_almost_equal(reg.varb, varb, 7)
+        vm = np.array([[ 229.05640809,   10.36945783,   -9.54463414],
+                       [  10.36945783,    2.0013142 ,   -1.01826408],
+                       [  -9.54463414,   -1.01826408,    0.62914915]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        x_0 = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0], x_0, 7)
+        y_5 = np.array([[ 15.72598 ], [ 18.801754], [ 30.626781], [ 32.38776 ], [ 50.73151 ]]) 
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_5 = np.array([[ 80.467003], [ 44.567001], [ 26.35    ], [ 33.200001], [ 23.225   ]]) 
+        np.testing.assert_array_almost_equal(reg.yend[0:5], yend_5, 7)
+        z_0 = np.array([  1.      ,  19.531   ,  80.467003]) 
+        np.testing.assert_array_almost_equal(reg.z[0], z_0, 7)
+        zthhthi = np.array([[  1.00000000e+00,  -1.66533454e-16,   4.44089210e-16],
+                            [  0.00000000e+00,   1.00000000e+00,   0.00000000e+00],
+                            [  1.26978671e+01,   1.05598709e+00,   3.70212359e+00]]) 
+        np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
+        
+    def test_n_k(self):
+        reg = BaseTSLS(self.y, self.X, self.yd, self.q, sig2n_k=True)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 243.99486949,   11.04572682,  -10.16711028],
+                       [  11.04572682,    2.13183469,   -1.08467261],
+                       [ -10.16711028,   -1.08467261,    0.67018062]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_white(self):
+        reg = BaseTSLS(self.y, self.X, self.yd, self.q, robust='white')
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 208.27139316,   15.6687805 ,  -11.53686154],
+                       [  15.6687805 ,    2.26882747,   -1.30312033],
+                       [ -11.53686154,   -1.30312033,    0.81940656]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_hac(self):
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=15,function='triangular', fixed=False)
+        reg = BaseTSLS(self.y, self.X, self.yd, self.q, robust='hac', gwk=gwk)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 231.07254978,   15.42050291,  -11.3941033 ],
+                       [  15.01376346,    1.92422887,   -1.11865505],
+                       [ -11.34381641,   -1.1279227 ,    0.72053806]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+class TestTSLS(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+        self.y = np.array(db.by_col("CRIME"))
+        self.y = np.reshape(self.y, (49,1))
+        self.X = []
+        self.X.append(db.by_col("INC"))
+        self.X = np.array(self.X).T
+        self.yd = []
+        self.yd.append(db.by_col("HOVAL"))
+        self.yd = np.array(self.yd).T
+        self.q = []
+        self.q.append(db.by_col("DISCBD"))
+        self.q = np.array(self.q).T
+
+    def test_basic(self):
+        reg = TSLS(self.y, self.X, self.yd, self.q)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        h_0 = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h[0], h_0)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ],
+                        [   704.371999  ,  11686.67338121,   2246.12800625],
+                        [   139.75      ,   2246.12800625,    498.5851    ]])
+        np.testing.assert_array_almost_equal(reg.hth, hth, 7)
+        hthi = np.array([[ 0.1597275 , -0.00762011, -0.01044191],
+                        [-0.00762011,  0.00100135, -0.0023752 ],
+                        [-0.01044191, -0.0023752 ,  0.01563276]]) 
+        np.testing.assert_array_almost_equal(reg.hthi, hthi, 7)
+        self.assertEqual(reg.k, 3)
+        self.assertEqual(reg.kstar, 1)
+        self.assertAlmostEqual(reg.mean_y, 35.128823897959187, 7)
+        self.assertEqual(reg.n, 49)
+        pfora1a2 = np.array([[ 9.58156106, -0.22744226, -0.13820537],
+                             [ 0.02580142,  0.08226331, -0.03143731],
+                             [-3.13896453, -0.33487872,  0.20690965]]) 
+        np.testing.assert_array_almost_equal(reg.pfora1a2, pfora1a2, 7)
+        predy_5 = np.array([[-28.68949467], [ 28.99484984], [ 55.07344824], [ 38.26609504], [ 57.57145851]]) 
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        q_5 = np.array([[ 5.03], [ 4.27], [ 3.89], [ 3.7 ], [ 2.83]])
+        np.testing.assert_array_equal(reg.q[0:5], q_5)
+        self.assertAlmostEqual(reg.sig2n_k, 587.56797852699822, 7)
+        self.assertAlmostEqual(reg.sig2n, 551.5944288212637, 7)
+        self.assertAlmostEqual(reg.sig2, 551.5944288212637, 7)
+        self.assertAlmostEqual(reg.std_y, 16.732092091229699, 7)
+        u_5 = np.array([[ 44.41547467], [-10.19309584], [-24.44666724], [ -5.87833504], [ -6.83994851]]) 
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 27028.127012241919, 7)
+        varb = np.array([[ 0.41526237,  0.01879906, -0.01730372],
+                         [ 0.01879906,  0.00362823, -0.00184604],
+                         [-0.01730372, -0.00184604,  0.0011406 ]]) 
+        np.testing.assert_array_almost_equal(reg.varb, varb, 7)
+        vm = np.array([[ 229.05640809,   10.36945783,   -9.54463414],
+                       [  10.36945783,    2.0013142 ,   -1.01826408],
+                       [  -9.54463414,   -1.01826408,    0.62914915]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        x_0 = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x[0], x_0, 7)
+        y_5 = np.array([[ 15.72598 ], [ 18.801754], [ 30.626781], [ 32.38776 ], [ 50.73151 ]]) 
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_5 = np.array([[ 80.467003], [ 44.567001], [ 26.35    ], [ 33.200001], [ 23.225   ]]) 
+        np.testing.assert_array_almost_equal(reg.yend[0:5], yend_5, 7)
+        z_0 = np.array([  1.      ,  19.531   ,  80.467003]) 
+        np.testing.assert_array_almost_equal(reg.z[0], z_0, 7)
+        zthhthi = np.array([[  1.00000000e+00,  -1.66533454e-16,   4.44089210e-16],
+                            [  0.00000000e+00,   1.00000000e+00,   0.00000000e+00],
+                            [  1.26978671e+01,   1.05598709e+00,   3.70212359e+00]]) 
+        np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
+        self.assertAlmostEqual(reg.pr2, 0.27936137128173893, 7)
+        z_stat = np.array([[  5.84526447e+00,   5.05764078e-09],
+                           [  3.67601567e-01,   7.13170346e-01],
+                           [ -1.99468913e+00,   4.60767956e-02]])
+        np.testing.assert_array_almost_equal(reg.z_stat, z_stat, 7)
+        title = 'TWO STAGE LEAST SQUARES'
+        self.assertEqual(reg.title, title)
+        
+    def test_n_k(self):
+        reg = TSLS(self.y, self.X, self.yd, self.q, sig2n_k=True)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 243.99486949,   11.04572682,  -10.16711028],
+                       [  11.04572682,    2.13183469,   -1.08467261],
+                       [ -10.16711028,   -1.08467261,    0.67018062]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_white(self):
+        reg = TSLS(self.y, self.X, self.yd, self.q, robust='white')
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 208.27139316,   15.6687805 ,  -11.53686154],
+                       [  15.6687805 ,    2.26882747,   -1.30312033],
+                       [ -11.53686154,   -1.30312033,    0.81940656]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        self.assertEqual(reg.robust, 'white')
+
+    def test_hac(self):
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=5,function='triangular', fixed=False)
+        reg = TSLS(self.y, self.X, self.yd, self.q, robust='hac', gwk=gwk)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 225.0795089 ,   17.11660041,  -12.22448566],
+                       [  17.67097154,    2.47483461,   -1.4183641 ],
+                       [ -12.45093722,   -1.40495464,    0.8700441 ]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        self.assertEqual(reg.robust, 'hac')
+
+    def test_spatial(self):
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        reg = TSLS(self.y, self.X, self.yd, self.q, spat_diag=True, w=w)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 229.05640809,   10.36945783,   -9.54463414],
+                       [  10.36945783,    2.0013142 ,   -1.01826408],
+                       [  -9.54463414,   -1.01826408,    0.62914915]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        ak_test = np.array([ 1.16816972,  0.27977763])
+        np.testing.assert_array_almost_equal(reg.ak_test, ak_test, 7)
+
+    def test_names(self):
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=5,function='triangular', fixed=False)
+        name_x = ['inc']
+        name_y = 'crime'
+        name_yend = ['hoval']
+        name_q = ['discbd']
+        name_w = 'queen'
+        name_gwk = 'k=5'
+        name_ds = 'columbus'
+        reg = TSLS(self.y, self.X, self.yd, self.q,
+                spat_diag=True, w=w, robust='hac', gwk=gwk,
+                name_x=name_x, name_y=name_y, name_q=name_q, name_w=name_w,
+                name_yend=name_yend, name_gwk=name_gwk, name_ds=name_ds)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 225.0795089 ,   17.11660041,  -12.22448566],
+                       [  17.67097154,    2.47483461,   -1.4183641 ],
+                       [ -12.45093722,   -1.40495464,    0.8700441 ]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        self.assertListEqual(reg.name_x, ['CONSTANT']+name_x)
+        self.assertListEqual(reg.name_yend, name_yend)
+        self.assertListEqual(reg.name_q, name_q)
+        self.assertEqual(reg.name_y, name_y)
+        self.assertEqual(reg.name_w, name_w)
+        self.assertEqual(reg.name_gwk, name_gwk)
+        self.assertEqual(reg.name_ds, name_ds)
+
+    
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_twosls_regimes.py b/pysal/spreg/tests/test_twosls_regimes.py
new file mode 100644
index 0000000..155bf03
--- /dev/null
+++ b/pysal/spreg/tests/test_twosls_regimes.py
@@ -0,0 +1,276 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.spreg.twosls_regimes import TSLS_Regimes
+from pysal.spreg.twosls import TSLS
+
+class TestTSLS(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+        self.y = np.array(db.by_col("CRIME"))
+        self.y = np.reshape(self.y, (49,1))
+        self.x = []
+        self.x.append(db.by_col("INC"))
+        self.x = np.array(self.x).T
+        self.yd = []
+        self.yd.append(db.by_col("HOVAL"))
+        self.yd = np.array(self.yd).T
+        self.q = []
+        self.q.append(db.by_col("DISCBD"))
+        self.q = np.array(self.q).T
+        self.r_var = 'NSA'
+        self.regimes = db.by_col(self.r_var)
+
+    def test_basic(self):
+        reg = TSLS_Regimes(self.y, self.x, self.yd, self.q, self.regimes, regime_err_sep=False)        
+        betas = np.array([[ 80.23408166],[  5.48218125],[ 82.98396737],[  0.49775429],[ -3.72663211],[ -1.27451485]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        h_0 = np.array([[  0.   ,   0.   ,   1.   ,  19.531,   0.   ,   5.03 ]])
+        np.testing.assert_array_almost_equal(reg.h[0]*np.eye(6), h_0)
+        hth = np.array([[   25.        ,   416.378999  ,     0.        ,     0.        ,\
+           76.03      ,     0.        ],\
+       [  416.378999  ,  7831.05477839,     0.        ,     0.        ,\
+         1418.65422625,     0.        ],\
+       [    0.        ,     0.        ,    24.        ,   287.993     ,\
+            0.        ,    63.72      ],\
+       [    0.        ,     0.        ,   287.993     ,  3855.61860282,\
+            0.        ,   827.47378   ],\
+       [   76.03      ,  1418.65422625,     0.        ,     0.        ,\
+          291.9749    ,     0.        ],\
+       [    0.        ,     0.        ,    63.72      ,   827.47378   ,\
+            0.        ,   206.6102    ]])
+        np.testing.assert_array_almost_equal(reg.hth, hth, 7)
+        hthi = np.array([[ 0.3507855 , -0.0175615 ,  0.        ,  0.        , -0.00601601,\
+         0.        ],\
+       [-0.0175615 ,  0.00194521, -0.        , -0.        , -0.00487844,\
+        -0.        ],\
+       [ 0.        ,  0.        ,  0.42327489, -0.02563036,  0.        ,\
+        -0.02789128],\
+       [-0.        , -0.        , -0.02563036,  0.00339841, -0.        ,\
+        -0.00570605],\
+       [-0.00601601, -0.00487844,  0.        ,  0.        ,  0.02869498,\
+         0.        ],\
+       [ 0.        ,  0.        , -0.02789128, -0.00570605,  0.        ,\
+         0.03629464]])
+        np.testing.assert_array_almost_equal(reg.hthi, hthi, 7)
+        self.assertEqual(reg.k, 6)
+        self.assertEqual(reg.kstar, 2)
+        self.assertAlmostEqual(reg.mean_y, 35.128823897959187, 7)
+        np.testing.assert_equal(reg.kf, 0)
+        np.testing.assert_equal(reg.kr, 3)
+        np.testing.assert_equal(reg.n, 49)
+        np.testing.assert_equal(reg.nr, 2)
+        pfora1a2 = np.array([[ 17.80208995,  -0.46997739,   0.        ,   0.        ,\
+         -0.21344994,   0.        ],\
+       [ -0.36293902,   0.41200496,   0.        ,   0.        ,\
+         -0.17308863,   0.        ],\
+       [  0.        ,   0.        ,  23.8584271 ,  -0.96035493,\
+          0.        ,  -0.26149141],\
+       [  0.        ,   0.        ,  -0.61800983,   0.2269828 ,\
+          0.        ,  -0.05349643],\
+       [ -3.22151864,  -2.10181214,   0.        ,   0.        ,\
+          1.01810757,   0.        ],\
+       [  0.        ,   0.        ,  -5.42403871,  -0.6641704 ,\
+          0.        ,   0.34027606]]) 
+        np.testing.assert_array_almost_equal(reg.pfora1a2, pfora1a2, 7)
+        predy_5 = np.array([[ -9.85078372],[ 36.75098196],[ 57.34266859],[ 42.89851907],[ 58.9840913 ]]) 
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        q_5 = np.array([ 5.03,  4.27,  3.89,  3.7 ,  2.83])
+        np.testing.assert_array_equal((reg.q[0:5].T*np.eye(5))[1,:], q_5)
+        self.assertAlmostEqual(reg.sig2n_k, 990.00750983736714, 7)
+        self.assertAlmostEqual(reg.sig2n, 868.78210046952631, 7)
+        self.assertAlmostEqual(reg.sig2, 990.00750983736714, 7)
+        self.assertAlmostEqual(reg.std_y, 16.732092091229699, 7)
+        u_5 = np.array([[ 25.57676372],[-17.94922796],[-26.71588759],[-10.51075907],[ -8.2525813 ]]) 
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 42570.322923006788, 7)
+        varb = np.array([[ 0.50015831,  0.07969376,  0.        ,  0.        , -0.04760541,\
+         0.        ],\
+       [ 0.07969376,  0.06523527,  0.        ,  0.        , -0.03105915,\
+         0.        ],\
+       [ 0.        ,  0.        ,  0.73944792,  0.01132445,  0.        ,\
+        -0.02117969],\
+       [ 0.        ,  0.        ,  0.01132445,  0.00756336,  0.        ,\
+        -0.00259344],\
+       [-0.04760541, -0.03105915, -0.        , -0.        ,  0.0150449 ,\
+        -0.        ],\
+       [-0.        , -0.        , -0.02117969, -0.00259344, -0.        ,\
+         0.0013287 ]]) 
+        np.testing.assert_array_almost_equal(reg.varb, varb, 7)
+        vm = np.array([[ 495.16048523,   78.89742341,    0.        ,    0.        ,\
+         -47.12971066,    0.        ],\
+       [  78.89742341,   64.58341083,    0.        ,    0.        ,\
+         -30.74878934,    0.        ],\
+       [   0.        ,    0.        ,  732.05899155,   11.21128921,\
+           0.        ,  -20.96804956],\
+       [   0.        ,    0.        ,   11.21128921,    7.48778398,\
+           0.        ,   -2.56752553],\
+       [ -47.12971066,  -30.74878934,    0.        ,    0.        ,\
+          14.89456384,    0.        ],\
+       [   0.        ,    0.        ,  -20.96804956,   -2.56752553,\
+           0.        ,    1.3154267 ]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        x_0 = np.array([[  0.   ,   0.   ,   1.   ,  19.531]])
+        np.testing.assert_array_almost_equal(reg.x[0]*np.eye(4), x_0, 7)
+        y_5 = np.array([[ 15.72598 ], [ 18.801754], [ 30.626781], [ 32.38776 ], [ 50.73151 ]]) 
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_3 = np.array([[  0.      ,  80.467003],[  0.      ,  44.567001],[  0.      ,  26.35    ]]) 
+        np.testing.assert_array_almost_equal(reg.yend[0:3]*np.eye(2), yend_3, 7)
+        z_0 = np.array([[  0.      ,   0.      ,   1.      ,  19.531   ,   0.      , 80.467003]]) 
+        np.testing.assert_array_almost_equal(reg.z[0]*np.eye(6), z_0, 7)
+        zthhthi = np.array([[  1.00000000e+00,   0.00000000e+00,   0.00000000e+00,\
+          0.00000000e+00,  -4.44089210e-16,   0.00000000e+00],\
+       [ -1.24344979e-14,   1.00000000e+00,   0.00000000e+00,\
+          0.00000000e+00,   0.00000000e+00,   0.00000000e+00],\
+       [  0.00000000e+00,   0.00000000e+00,   1.00000000e+00,\
+          0.00000000e+00,   0.00000000e+00,  -1.11022302e-16],\
+       [  0.00000000e+00,   0.00000000e+00,  -3.55271368e-15,\
+          1.00000000e+00,   0.00000000e+00,   0.00000000e+00],\
+       [  2.87468088e+00,   1.82963841e+00,   0.00000000e+00,\
+          0.00000000e+00,   1.38104644e+00,   0.00000000e+00],\
+       [  0.00000000e+00,   0.00000000e+00,   1.19237474e+01,\
+          1.13018165e+00,   0.00000000e+00,   5.22645427e+00]]) 
+        np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
+        self.assertAlmostEqual(reg.pr2, 0.17729324026706564, 7)
+        z_stat = np.array([[  3.60566933e+00,   3.11349387e-04],\
+       [  6.82170447e-01,   4.95131179e-01],\
+       [  3.06705211e+00,   2.16181168e-03],\
+       [  1.81902371e-01,   8.55659343e-01],\
+       [ -9.65611937e-01,   3.34238400e-01],\
+       [ -1.11124949e+00,   2.66460976e-01]])
+        np.testing.assert_array_almost_equal(np.array(reg.z_stat), z_stat, 7)
+        chow_regi = np.array([[ 0.00616179,  0.93743265],
+       [ 0.3447218 ,  0.55711631],
+       [ 0.37093662,  0.54249417]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 1.1353790779820598, 7)
+        title = 'TWO STAGE LEAST SQUARES - REGIMES'
+        self.assertEqual(reg.title, title)
+        
+    def test_n_k(self):
+        reg = TSLS_Regimes(self.y, self.x, self.yd, self.q, self.regimes, sig2n_k=True, regime_err_sep=False)
+        betas = np.array([[ 80.23408166],[  5.48218125],[ 82.98396737],[  0.49775429],[ -3.72663211],[ -1.27451485]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 495.16048523,   78.89742341,    0.        ,    0.        ,\
+         -47.12971066,    0.        ],\
+       [  78.89742341,   64.58341083,    0.        ,    0.        ,\
+         -30.74878934,    0.        ],\
+       [   0.        ,    0.        ,  732.05899155,   11.21128921,\
+           0.        ,  -20.96804956],\
+       [   0.        ,    0.        ,   11.21128921,    7.48778398,\
+           0.        ,   -2.56752553],\
+       [ -47.12971066,  -30.74878934,    0.        ,    0.        ,\
+          14.89456384,    0.        ],\
+       [   0.        ,    0.        ,  -20.96804956,   -2.56752553,\
+           0.        ,    1.3154267 ]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_spatial(self):
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        reg = TSLS_Regimes(self.y, self.x, self.yd, self.q, self.regimes, spat_diag=True, w=w, regime_err_sep=False)
+        betas = np.array([[ 80.23408166],[  5.48218125],[ 82.98396737],[  0.49775429],[ -3.72663211],[ -1.27451485]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 495.16048523,   78.89742341,    0.        ,    0.        ,\
+         -47.12971066,    0.        ],\
+       [  78.89742341,   64.58341083,    0.        ,    0.        ,\
+         -30.74878934,    0.        ],\
+       [   0.        ,    0.        ,  732.05899155,   11.21128921,\
+           0.        ,  -20.96804956],\
+       [   0.        ,    0.        ,   11.21128921,    7.48778398,\
+           0.        ,   -2.56752553],\
+       [ -47.12971066,  -30.74878934,    0.        ,    0.        ,\
+          14.89456384,    0.        ],\
+       [   0.        ,    0.        ,  -20.96804956,   -2.56752553,\
+           0.        ,    1.3154267 ]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        ak_test = np.array([ 0.69774552,  0.40354227])
+        np.testing.assert_array_almost_equal(reg.ak_test, ak_test, 7)   
+
+    def test_names(self):
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=5,function='triangular', fixed=False)
+        name_x = ['inc']
+        name_y = 'crime'
+        name_yend = ['hoval']
+        name_q = ['discbd']
+        name_w = 'queen'
+        name_gwk = 'k=5'
+        name_ds = 'columbus'
+        name_regimes= 'nsa'
+        reg = TSLS_Regimes(self.y, self.x, self.yd, self.q, self.regimes, regime_err_sep=False,
+                spat_diag=True, w=w, robust='hac', gwk=gwk, name_regimes=name_regimes,
+                name_x=name_x, name_y=name_y, name_q=name_q, name_w=name_w,
+                name_yend=name_yend, name_gwk=name_gwk, name_ds=name_ds)
+        betas = np.array([[ 80.23408166],[  5.48218125],[ 82.98396737],[  0.49775429],[ -3.72663211],[ -1.27451485]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 522.75813101,  120.64940697,  -15.60303241,   -0.976389  ,\
+         -67.15556574,    0.64553579],\
+       [ 122.83491674,  122.62303068,   -5.52270916,    0.05023488,\
+         -57.89404902,    0.15750428],\
+       [   0.1983661 ,   -0.03539147,  335.24731378,   17.40764168,\
+          -0.26447114,  -14.3375455 ],\
+       [  -0.13612426,   -0.43622084,   18.46644989,    2.70320508,\
+           0.20398876,   -1.31821991],\
+       [ -68.0704928 ,  -58.03685405,    2.66225388,    0.00323082,\
+          27.68512974,   -0.08124602],\
+       [  -0.08001296,    0.13575504,  -14.6998294 ,   -1.28225201,\
+          -0.05193056,    0.79845124]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        self.assertEqual(reg.name_x, ['0_CONSTANT', '0_inc', '1_CONSTANT', '1_inc'])
+        self.assertEqual(reg.name_yend, ['0_hoval', '1_hoval'])
+        self.assertEqual(reg.name_q, ['0_discbd', '1_discbd'])
+        self.assertEqual(reg.name_y, name_y)
+        self.assertEqual(reg.name_w, name_w)
+        self.assertEqual(reg.name_gwk, name_gwk)
+        self.assertEqual(reg.name_ds, name_ds)
+        self.assertEqual(reg.name_regimes, name_regimes)
+    
+    def test_regi_err(self):
+        #Artficial:
+        n = 256
+        x1 = np.random.uniform(-10,10,(n,1))
+        x2 = np.random.uniform(1,5,(n,1))
+        q = x2 + np.random.normal(0,1,(n,1))
+        x = np.hstack((x1,x2))
+        y = np.dot(np.hstack((np.ones((n,1)),x)),np.array([[1],[0.5],[2]])) + np.random.normal(0,1,(n,1))
+        latt = int(np.sqrt(n))
+        regi = [0]*(n/2) + [1]*(n/2)
+        model = TSLS_Regimes(y, x1, regimes=regi, q=q, yend=x2, regime_err_sep=True, sig2n_k=False)
+        model1 = TSLS(y[0:(n/2)].reshape((n/2),1), x1[0:(n/2)], yend=x2[0:(n/2)], q=q[0:(n/2)], sig2n_k=False)
+        model2 = TSLS(y[(n/2):n].reshape((n/2),1), x1[(n/2):n], yend=x2[(n/2):n], q=q[(n/2):n], sig2n_k=False)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 6)
+        #Columbus:
+        reg = TSLS_Regimes(self.y, self.x, regimes=self.regimes, yend=self.yd, q=self.q, regime_err_sep=False)
+        tbetas = np.array([[ 80.23408166],
+       [  5.48218125],
+       [ 82.98396737],
+       [  0.49775429],
+       [ -3.72663211],
+       [ -1.27451485],])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        vm = np.array([ 495.16048523,   78.89742341,    0.        ,    0.        ,
+        -47.12971066,    0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0], vm, 6)
+        u_3 = np.array([[ 25.57676372],
+       [-17.94922796],
+       [-26.71588759]])
+        np.testing.assert_array_almost_equal(reg.u[0:3], u_3, 7)
+        predy_3 = np.array([[ -9.85078372],
+       [ 36.75098196],
+       [ 57.34266859]])
+        np.testing.assert_array_almost_equal(reg.predy[0:3], predy_3, 7)
+        chow_regi = np.array([[ 0.00616179,  0.93743265],
+       [ 0.3447218 ,  0.55711631],
+       [ 0.37093662,  0.54249417]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 1.1353790779821029, 7)
+
+if __name__ == '__main__':
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)  
+    unittest.main()
+    np.set_printoptions(suppress=start_suppress)        
+    
diff --git a/pysal/spreg/tests/test_twosls_sp.py b/pysal/spreg/tests/test_twosls_sp.py
new file mode 100644
index 0000000..914aef7
--- /dev/null
+++ b/pysal/spreg/tests/test_twosls_sp.py
@@ -0,0 +1,369 @@
+import unittest
+import numpy as np
+import pysal
+import pysal.spreg.diagnostics as D
+from pysal.spreg.twosls_sp import BaseGM_Lag, GM_Lag
+
+class TestBaseGMLag(unittest.TestCase):
+    def setUp(self):
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+        self.db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
+        y = np.array(self.db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        
+    def test___init__(self):
+        w_lags = 2
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, w_lags, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        reg = BaseGM_Lag(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse, w_lags=w_lags)
+        betas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        h_0 = np.array([  1.        ,  19.531     ,  15.72598   ,  18.594     ,
+                            24.7142675 ,  13.72216667,  27.82929567])
+        np.testing.assert_array_almost_equal(reg.h[0], h_0)
+        hth = np.  array([   49.        ,   704.371999  ,  1721.312371  ,   724.7435916 ,
+                             1707.35412945,   711.31248483,  1729.63201243])
+        np.testing.assert_array_almost_equal(reg.hth[0], hth, 7)
+        hthi = np.array([  7.33701328e+00,   2.27764882e-02,   2.18153588e-02,
+                           -5.11035447e-02,   1.22515181e-03,  -2.38079378e-01,
+                           -1.20149133e-01])
+        np.testing.assert_array_almost_equal(reg.hthi[0], hthi, 7)
+        self.assertEqual(reg.k, 4)
+        self.assertEqual(reg.kstar, 1)
+        self.assertAlmostEqual(reg.mean_y, 38.436224469387746, 7)
+        self.assertEqual(reg.n, 49)
+        pfora1a2 = np.array([ 80.5588479 ,  -1.06625281,  -0.61703759,  -1.10071931]) 
+        np.testing.assert_array_almost_equal(reg.pfora1a2[0], pfora1a2, 7)
+        predy_5 = np.array([[ 50.87411532],[ 50.76969931],[ 41.77223722],[ 33.44262382],[ 28.77418036]])
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        q_5 = np.array([ 18.594     ,  24.7142675 ,  13.72216667,  27.82929567])
+        np.testing.assert_array_almost_equal(reg.q[0], q_5)
+        self.assertAlmostEqual(reg.sig2n_k, 234.54258763039289, 7)
+        self.assertAlmostEqual(reg.sig2n, 215.39625394627919, 7)
+        self.assertAlmostEqual(reg.sig2, 215.39625394627919, 7)
+        self.assertAlmostEqual(reg.std_y, 18.466069465206047, 7)
+        u_5 = np.array( [[ 29.59288768], [ -6.20269831], [-15.42223722], [ -0.24262282], [ -5.54918036]])
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 10554.41644336768, 7)
+        varb = np.array( [[  1.48966377e+00, -2.28698061e-02, -1.20217386e-02, -1.85763498e-02],
+                          [ -2.28698061e-02,  1.27893998e-03,  2.74600023e-04, -1.33497705e-04],
+                          [ -1.20217386e-02,  2.74600023e-04,  1.54257766e-04,  6.86851184e-05],
+                          [ -1.85763498e-02, -1.33497705e-04,  6.86851184e-05,  4.67711582e-04]])
+        np.testing.assert_array_almost_equal(reg.varb, varb, 7)
+        vm = np.array([[  3.20867996e+02, -4.92607057e+00, -2.58943746e+00, -4.00127615e+00],
+                       [ -4.92607057e+00,  2.75478880e-01,  5.91478163e-02, -2.87549056e-02],
+                       [ -2.58943746e+00,  5.91478163e-02,  3.32265449e-02,  1.47945172e-02],
+                       [ -4.00127615e+00, -2.87549056e-02,  1.47945172e-02,  1.00743323e-01]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 6)
+        x_0 = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0], x_0, 7)
+        y_5 = np.array( [[ 80.467003], [ 44.567001], [ 26.35    ], [ 33.200001], [ 23.225   ]])
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_5 = np.array( [[ 35.4585005 ], [ 46.67233467], [ 45.36475125], [ 32.81675025], [ 30.81785714]])
+        np.testing.assert_array_almost_equal(reg.yend[0:5], yend_5, 7)
+        z_0 = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005]) 
+        np.testing.assert_array_almost_equal(reg.z[0], z_0, 7)
+        zthhthi = np.array( [[  1.00000000e+00, -2.22044605e-16, -2.22044605e-16 , 2.22044605e-16,
+                                4.44089210e-16,  0.00000000e+00, -8.88178420e-16],
+                             [  0.00000000e+00,  1.00000000e+00, -3.55271368e-15 , 3.55271368e-15,
+                               -7.10542736e-15,  7.10542736e-14,  0.00000000e+00],
+                             [  1.81898940e-12,  2.84217094e-14,  1.00000000e+00 , 0.00000000e+00,
+                               -2.84217094e-14,  5.68434189e-14,  5.68434189e-14],
+                             [ -8.31133940e+00, -3.76104678e-01, -2.07028208e-01 , 1.32618931e+00,
+                               -8.04284562e-01,  1.30527047e+00,  1.39136816e+00]])
+        np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
+
+    def test_init_white_(self):
+        w_lags = 2
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, w_lags, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        base_gm_lag = BaseGM_Lag(self.y, self.X,  yend=yd2, q=q2, w=self.w.sparse, w_lags=w_lags, robust='white')
+        tbetas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(base_gm_lag.betas, tbetas) 
+        dbetas = D.se_betas(base_gm_lag)
+        se_betas = np.array([ 20.47077481, 0.50613931, 0.20138425, 0.38028295 ])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_init_hac_(self):
+        w_lags = 2
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, w_lags, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=15,function='triangular', fixed=False)        
+        base_gm_lag = BaseGM_Lag(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse, w_lags=w_lags, robust='hac', gwk=gwk)
+        tbetas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(base_gm_lag.betas, tbetas) 
+        dbetas = D.se_betas(base_gm_lag)
+        se_betas = np.array([ 19.08513569,   0.51769543,   0.18244862,   0.35460553])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_init_discbd(self):
+        w_lags = 2
+        X = np.array(self.db.by_col("INC"))
+        self.X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, yd, q, w_lags, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        reg = BaseGM_Lag(self.y, self.X, w=self.w.sparse, yend=yd2, q=q2, w_lags=w_lags)
+        tbetas = np.array([[ 100.79359082], [  -0.50215501], [  -1.14881711], [  -0.38235022]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        dbetas = D.se_betas(reg)
+        se_betas = np.array([ 53.0829123 ,   1.02511494,   0.57589064,   0.59891744 ])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_n_k(self):
+        w_lags = 2
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, w_lags, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        reg = BaseGM_Lag(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse, w_lags=w_lags, sig2n_k=True)
+        betas = np.  array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array( [[  3.49389596e+02, -5.36394351e+00, -2.81960968e+00, -4.35694515e+00],
+                         [ -5.36394351e+00,  2.99965892e-01,  6.44054000e-02, -3.13108972e-02],
+                         [ -2.81960968e+00,  6.44054000e-02,  3.61800155e-02,  1.61095854e-02],
+                         [ -4.35694515e+00, -3.13108972e-02,  1.61095854e-02,  1.09698285e-01]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_lag_q(self):
+        w_lags = 2
+        X = np.array(self.db.by_col("INC"))
+        self.X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, yd, q, w_lags, False)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        reg = BaseGM_Lag(self.y, self.X, w=self.w.sparse, yend=yd2, q=q2, w_lags=w_lags, lag_q=False)
+        tbetas = np.array( [[ 108.83261383], [  -0.48041099], [  -1.18950006], [  -0.56140186]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        dbetas = D.se_betas(reg)
+        se_betas = np.array([ 58.33203837,   1.09100446,   0.62315167,   0.68088777])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+
+
+class TestGMLag(unittest.TestCase):
+    def setUp(self):
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+        self.db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
+        y = np.array(self.db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        
+    def test___init__(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        reg = GM_Lag(self.y, self.X, w=self.w, w_lags=2)
+        betas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        e_5 = np.array( [[ 29.28976367], [ -6.07439501], [-15.30080685], [ -0.41773375], [ -5.67197968]])
+        np.testing.assert_array_almost_equal(reg.e_pred[0:5], e_5, 7)
+        h_0 = np.array([  1.        ,  19.531     ,  15.72598   ,  18.594     ,
+                            24.7142675 ,  13.72216667,  27.82929567])
+        np.testing.assert_array_almost_equal(reg.h[0], h_0)
+        hth = np.  array([   49.        ,   704.371999  ,  1721.312371  ,   724.7435916 ,
+                             1707.35412945,   711.31248483,  1729.63201243])
+        np.testing.assert_array_almost_equal(reg.hth[0], hth, 7)
+        hthi = np.array([  7.33701328e+00,   2.27764882e-02,   2.18153588e-02,
+                           -5.11035447e-02,   1.22515181e-03,  -2.38079378e-01,
+                           -1.20149133e-01])
+        np.testing.assert_array_almost_equal(reg.hthi[0], hthi, 7)
+        self.assertEqual(reg.k, 4)
+        self.assertEqual(reg.kstar, 1)
+        self.assertAlmostEqual(reg.mean_y, 38.436224469387746, 7)
+        self.assertEqual(reg.n, 49)
+        pfora1a2 = np.array([ 80.5588479 ,  -1.06625281,  -0.61703759,  -1.10071931]) 
+        self.assertAlmostEqual(reg.pr2, 0.3551928222612527, 7)
+        self.assertAlmostEqual(reg.pr2_e, 0.34763857386174174, 7)
+        np.testing.assert_array_almost_equal(reg.pfora1a2[0], pfora1a2, 7)
+        predy_5 = np.array([[ 50.87411532],[ 50.76969931],[ 41.77223722],[ 33.44262382],[ 28.77418036]])
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        predy_e_5 = np.array( [[ 51.17723933], [ 50.64139601], [ 41.65080685], [ 33.61773475], [ 28.89697968]])
+        np.testing.assert_array_almost_equal(reg.predy_e[0:5], predy_e_5, 7)
+        q_5 = np.array([ 18.594     ,  24.7142675 ,  13.72216667,  27.82929567])
+        np.testing.assert_array_almost_equal(reg.q[0], q_5)
+        self.assertEqual(reg.robust, 'unadjusted')
+        self.assertAlmostEqual(reg.sig2n_k, 234.54258763039289, 7)
+        self.assertAlmostEqual(reg.sig2n, 215.39625394627919, 7)
+        self.assertAlmostEqual(reg.sig2, 215.39625394627919, 7)
+        self.assertAlmostEqual(reg.std_y, 18.466069465206047, 7)
+        u_5 = np.array( [[ 29.59288768], [ -6.20269831], [-15.42223722], [ -0.24262282], [ -5.54918036]])
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 10554.41644336768, 7)
+        varb = np.array( [[  1.48966377e+00, -2.28698061e-02, -1.20217386e-02, -1.85763498e-02],
+                          [ -2.28698061e-02,  1.27893998e-03,  2.74600023e-04, -1.33497705e-04],
+                          [ -1.20217386e-02,  2.74600023e-04,  1.54257766e-04,  6.86851184e-05],
+                          [ -1.85763498e-02, -1.33497705e-04,  6.86851184e-05,  4.67711582e-04]])
+        np.testing.assert_array_almost_equal(reg.varb, varb, 7)
+        vm = np.array([[  3.20867996e+02, -4.92607057e+00, -2.58943746e+00, -4.00127615e+00],
+                       [ -4.92607057e+00,  2.75478880e-01,  5.91478163e-02, -2.87549056e-02],
+                       [ -2.58943746e+00,  5.91478163e-02,  3.32265449e-02,  1.47945172e-02],
+                       [ -4.00127615e+00, -2.87549056e-02,  1.47945172e-02,  1.00743323e-01]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 6)
+        x_0 = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x[0], x_0, 7)
+        y_5 = np.array( [[ 80.467003], [ 44.567001], [ 26.35    ], [ 33.200001], [ 23.225   ]])
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_5 = np.array( [[ 35.4585005 ], [ 46.67233467], [ 45.36475125], [ 32.81675025], [ 30.81785714]])
+        np.testing.assert_array_almost_equal(reg.yend[0:5], yend_5, 7)
+        z_0 = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005]) 
+        np.testing.assert_array_almost_equal(reg.z[0], z_0, 7)
+        zthhthi = np.array( [[  1.00000000e+00, -2.22044605e-16, -2.22044605e-16 , 2.22044605e-16,
+                                4.44089210e-16,  0.00000000e+00, -8.88178420e-16],
+                             [  0.00000000e+00,  1.00000000e+00, -3.55271368e-15 , 3.55271368e-15,
+                               -7.10542736e-15,  7.10542736e-14,  0.00000000e+00],
+                             [  1.81898940e-12,  2.84217094e-14,  1.00000000e+00 , 0.00000000e+00,
+                               -2.84217094e-14,  5.68434189e-14,  5.68434189e-14],
+                             [ -8.31133940e+00, -3.76104678e-01, -2.07028208e-01 , 1.32618931e+00,
+                               -8.04284562e-01,  1.30527047e+00,  1.39136816e+00]])
+        np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
+
+    def test_init_white_(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        base_gm_lag = GM_Lag(self.y, self.X, w=self.w, w_lags=2, robust='white')
+        tbetas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(base_gm_lag.betas, tbetas) 
+        dbetas = D.se_betas(base_gm_lag)
+        se_betas = np.array([ 20.47077481, 0.50613931, 0.20138425, 0.38028295 ])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_init_hac_(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=15,function='triangular', fixed=False)        
+        base_gm_lag = GM_Lag(self.y, self.X, w=self.w, w_lags=2, robust='hac', gwk=gwk)
+        tbetas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(base_gm_lag.betas, tbetas) 
+        dbetas = D.se_betas(base_gm_lag)
+        se_betas = np.array([ 19.08513569,   0.51769543,   0.18244862,   0.35460553])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_init_discbd(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        reg = GM_Lag(self.y, X, w=self.w, yend=yd, q=q, w_lags=2)
+        tbetas = np.array([[ 100.79359082], [  -0.50215501], [  -1.14881711], [  -0.38235022]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        dbetas = D.se_betas(reg)
+        se_betas = np.array([ 53.0829123 ,   1.02511494,   0.57589064,   0.59891744 ])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_n_k(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        reg = GM_Lag(self.y, self.X, w=self.w, w_lags=2, sig2n_k=True)
+        betas = np.  array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array( [[  3.49389596e+02, -5.36394351e+00, -2.81960968e+00, -4.35694515e+00],
+                         [ -5.36394351e+00,  2.99965892e-01,  6.44054000e-02, -3.13108972e-02],
+                         [ -2.81960968e+00,  6.44054000e-02,  3.61800155e-02,  1.61095854e-02],
+                         [ -4.35694515e+00, -3.13108972e-02,  1.61095854e-02,  1.09698285e-01]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_lag_q(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        reg = GM_Lag(self.y, X, w=self.w, yend=yd, q=q, w_lags=2, lag_q=False)
+        tbetas = np.array( [[ 108.83261383], [  -0.48041099], [  -1.18950006], [  -0.56140186]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        dbetas = D.se_betas(reg)
+        se_betas = np.array([ 58.33203837,   1.09100446,   0.62315167,   0.68088777])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_spatial(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        reg = GM_Lag(self.y, X, yd, q, spat_diag=True, w=w)
+        betas = np.array([[  5.46344924e+01], [  4.13301682e-01], [ -5.92637442e-01], [ -7.40490883e-03]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array( [[  4.45202654e+02, -1.50290275e+01, -6.36557072e+00, -5.71403440e-03],
+                        [ -1.50290275e+01,  5.93124683e-01,  2.19169508e-01, -6.70675916e-03],
+                        [ -6.36557072e+00,  2.19169508e-01,  1.06577542e-01, -2.96533875e-03],
+                        [ -5.71403440e-03, -6.70675916e-03, -2.96533875e-03,  1.15655425e-03]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 6)
+        ak_test = np.array([ 2.52597326,  0.11198567])
+        np.testing.assert_array_almost_equal(reg.ak_test, ak_test, 7)
+
+    def test_names(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=5,function='triangular', fixed=False)
+        name_x = ['inc']
+        name_y = 'crime'
+        name_yend = ['crime']
+        name_q = ['discbd']
+        name_w = 'queen'
+        name_gwk = 'k=5'
+        name_ds = 'columbus'
+        reg = GM_Lag(self.y, X, yd, q,
+                spat_diag=True, w=w, robust='hac', gwk=gwk,
+                name_x=name_x, name_y=name_y, name_q=name_q, name_w=name_w,
+                name_yend=name_yend, name_gwk=name_gwk, name_ds=name_ds)
+        betas = np.array([[  5.46344924e+01], [  4.13301682e-01], [ -5.92637442e-01], [ -7.40490883e-03]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array( [[  5.70817052e+02, -1.83655385e+01, -8.36602575e+00,  2.37538877e-02],
+                        [ -1.85224661e+01,  6.53311383e-01,  2.84209566e-01, -6.47694160e-03],
+                        [ -8.31105622e+00,  2.78772694e-01,  1.38144928e-01, -3.98175246e-03],
+                        [  2.66662466e-02, -6.23783104e-03, -4.11092891e-03,  1.10936528e-03]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 6)
+        self.assertListEqual(reg.name_x, ['CONSTANT']+name_x)
+        name_yend.append('W_crime')
+        self.assertListEqual(reg.name_yend, name_yend)
+        name_q.extend(['W_inc', 'W_discbd'])
+        self.assertListEqual(reg.name_q, name_q)
+        self.assertEqual(reg.name_y, name_y)
+        self.assertEqual(reg.name_w, name_w)
+        self.assertEqual(reg.name_gwk, name_gwk)
+        self.assertEqual(reg.name_ds, name_ds)
+
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_twosls_sp_regimes.py b/pysal/spreg/tests/test_twosls_sp_regimes.py
new file mode 100644
index 0000000..b311b17
--- /dev/null
+++ b/pysal/spreg/tests/test_twosls_sp_regimes.py
@@ -0,0 +1,352 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.spreg.twosls_sp_regimes import GM_Lag_Regimes
+from pysal.spreg import utils
+from pysal.spreg.twosls_sp import GM_Lag
+
+class TestGMLag_Regimes(unittest.TestCase):
+    def setUp(self):
+        self.w = pysal.queen_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+        self.db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
+        y = np.array(self.db.by_col("CRIME"))
+        self.y = np.reshape(y, (49,1))
+        self.r_var = 'NSA'
+        self.regimes = self.db.by_col(self.r_var)
+
+    def test___init__(self):
+        #Matches SpaceStat
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("HOVAL"))
+        self.X = np.array(X).T
+        reg = GM_Lag_Regimes(self.y, self.X, self.regimes, w=self.w, sig2n_k=True, regime_lag_sep=False, regime_err_sep=False) 
+        betas = np.array([[ 45.14892906],
+       [ -1.42593383],
+       [ -0.11501037],
+       [ 40.99023016],
+       [ -0.81498302],
+       [ -0.28391409],
+       [  0.4736163 ]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        e_5 = np.array([[ -1.47960519],
+       [ -7.93748769],
+       [ -5.88561835],
+       [-13.37941105],
+       [  5.2524303 ]])
+        np.testing.assert_array_almost_equal(reg.e_pred[0:5], e_5, 7)
+        h_0 = np.array([[  0.       ,   0.       ,   0.       ,   1.       ,  19.531    ,
+         80.467003 ,   0.       ,   0.       ,  18.594    ,  35.4585005]])
+        np.testing.assert_array_almost_equal(reg.h[0]*np.eye(10), h_0)
+        self.assertEqual(reg.k, 7)
+        self.assertEqual(reg.kstar, 1)
+        self.assertAlmostEqual(reg.mean_y, 35.128823897959187, 7)
+        self.assertEqual(reg.n, 49)
+        self.assertAlmostEqual(reg.pr2, 0.6572182131915739, 7)
+        self.assertAlmostEqual(reg.pr2_e, 0.5779687278635434, 7)
+        pfora1a2 = np.array([ -2.15017629,  -0.30169328,  -0.07603704, -22.06541809,
+         0.45738058,   0.02805828,   0.39073923]) 
+        np.testing.assert_array_almost_equal(reg.pfora1a2[0], pfora1a2, 7)
+        predy_5 = np.array([[ 13.93216104],
+       [ 23.46424269],
+       [ 34.43510955],
+       [ 44.32473878],
+       [ 44.39117516]])
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        predy_e_5 = np.array([[ 17.20558519],
+       [ 26.73924169],
+       [ 36.51239935],
+       [ 45.76717105],
+       [ 45.4790797 ]])
+        np.testing.assert_array_almost_equal(reg.predy_e[0:5], predy_e_5, 7)
+        q_5 = np.array([[  0.       ,   0.       ,  18.594    ,  35.4585005]])
+        np.testing.assert_array_almost_equal(reg.q[0]*np.eye(4), q_5)
+        self.assertEqual(reg.robust, 'unadjusted')
+        self.assertAlmostEqual(reg.sig2n_k, 109.76462904625834, 7)
+        self.assertAlmostEqual(reg.sig2n, 94.08396775393571, 7)
+        self.assertAlmostEqual(reg.sig2, 109.76462904625834, 7)
+        self.assertAlmostEqual(reg.std_y, 16.732092091229699, 7)
+        u_5 = np.array([[  1.79381896],
+       [ -4.66248869],
+       [ -3.80832855],
+       [-11.93697878],
+       [  6.34033484]])
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 4610.11441994285, 7)
+        varb = np.array([  1.23841820e+00,  -3.65620114e-02,  -1.21919663e-03,
+         1.00057547e+00,  -2.07403182e-02,  -1.27232693e-03,
+        -1.77184084e-02])
+        np.testing.assert_array_almost_equal(reg.varb[0], varb, 7)
+        vm = np.array([  1.35934514e+02,  -4.01321561e+00,  -1.33824666e-01,
+         1.09827796e+02,  -2.27655334e+00,  -1.39656494e-01,
+        -1.94485452e+00])
+        np.testing.assert_array_almost_equal(reg.vm[0], vm, 6)
+        x_0 = np.array([[  0.      ,   0.      ,   0.      ,   1.      ,  19.531   ,
+         80.467003]])
+        np.testing.assert_array_almost_equal(reg.x[0]*np.eye(6), x_0, 7)
+        y_5 = np.array([[ 15.72598 ],
+       [ 18.801754],
+       [ 30.626781],
+       [ 32.38776 ],
+       [ 50.73151 ]])
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_5 = np.array([[ 24.7142675 ],
+       [ 26.24684033],
+       [ 29.411751  ],
+       [ 34.64647575],
+       [ 40.4653275 ]])
+        np.testing.assert_array_almost_equal(reg.yend[0:5]*np.array([[1]]), yend_5, 7)
+        z_0 = np.array([[  0.       ,   0.       ,   0.       ,   1.       ,  19.531    ,
+         80.467003 ,  24.7142675]]) 
+        np.testing.assert_array_almost_equal(reg.z[0]*np.eye(7), z_0, 7)
+        zthhthi = np.array([  1.00000000e+00,  -2.35922393e-16,   5.55111512e-17,
+         0.00000000e+00,   0.00000000e+00,   0.00000000e+00,
+        -4.44089210e-16,   2.22044605e-16,   0.00000000e+00,
+         0.00000000e+00])
+        np.testing.assert_array_almost_equal(reg.zthhthi[0], zthhthi, 7)
+        chow_regi = np.array([[ 0.19692667,  0.65721307],
+       [ 0.5666492 ,  0.45159351],
+       [ 0.45282066,  0.5009985 ]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 0.82409867601863462, 7)
+    
+    def test_init_discbd(self):
+        #Matches SpaceStat.
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("HOVAL"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        reg = GM_Lag_Regimes(self.y, X, self.regimes, yend=yd, q=q, lag_q=False, w=self.w, sig2n_k=True, regime_lag_sep=False, regime_err_sep=False) 
+        tbetas = np.array([[ 42.7266306 ],
+       [ -0.15552345],
+       [ 37.70545276],
+       [ -0.5341577 ],
+       [ -0.68305796],
+       [ -0.37106077],
+       [  0.55809516]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        vm = np.array([ 270.62979422,    3.62539081,  327.89638627,    6.24949355,
+         -5.25333106,   -6.01743515,   -4.19290074])
+        np.testing.assert_array_almost_equal(reg.vm[0], vm, 6)
+        e_3 = np.array([[-0.33142796],
+       [-9.51719607],
+       [-7.86272153]])
+        np.testing.assert_array_almost_equal(reg.e_pred[0:3], e_3, 7)
+        u_3 = np.array([[ 4.51839601],
+       [-5.67363147],
+       [-5.1927562 ]])
+        np.testing.assert_array_almost_equal(reg.u[0:3], u_3, 7)
+        predy_3 = np.array([[ 11.20758399],
+       [ 24.47538547],
+       [ 35.8195372 ]])
+        np.testing.assert_array_almost_equal(reg.predy[0:3], predy_3, 7)
+        predy_e_3 = np.array([[ 16.05740796],
+       [ 28.31895007],
+       [ 38.48950253]])
+        np.testing.assert_array_almost_equal(reg.predy_e[0:3], predy_e_3, 7)
+        chow_regi = np.array([[ 0.13130991,  0.71707772],
+       [ 0.04740966,  0.82763357],
+       [ 0.15474413,  0.6940423 ]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 0.31248100032096549, 7)
+    
+    def test_lag_q(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("HOVAL"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        reg = GM_Lag_Regimes(self.y, X, self.regimes, yend=yd, q=q, w=self.w, sig2n_k=True, regime_lag_sep=False, regime_err_sep=False) 
+        tbetas = np.array([[ 37.87698329],
+       [ -0.89426982],
+       [ 31.4714777 ],
+       [ -0.71640525],
+       [ -0.28494432],
+       [ -0.2294271 ],
+       [  0.62996544]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        vm = np.array([ 128.25714554,   -0.38975354,   95.7271044 ,   -1.8429218 ,
+         -1.75331978,   -0.18240338,   -1.67767464])
+        np.testing.assert_array_almost_equal(reg.vm[0], vm, 6)
+        chow_regi = np.array([[ 0.43494049,  0.50957463],
+       [ 0.02089281,  0.88507135],
+       [ 0.01180501,  0.91347943]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 0.54288190938307757, 7)
+    
+    def test_all_regi(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("HOVAL"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        reg = GM_Lag_Regimes(self.y, X, self.regimes, yend=yd, q=q, w=self.w, regime_lag_sep=False, regime_err_sep=True) 
+        tbetas = np.array([[ 37.87698329,  -0.89426982,  31.4714777 ,  -0.71640525,
+         -0.28494432,  -0.2294271 ,   0.62996544]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas.T)
+        vm = np.array([ 70.38291551,  -0.64868787,  49.25453215,  -0.62851534,
+        -0.75413453,  -0.12674433,  -0.97179236])
+        np.testing.assert_array_almost_equal(reg.vm[0], vm, 6)
+        e_3 = np.array([[-2.66997799],
+       [-7.69786264],
+       [-4.39412782]])
+        np.testing.assert_array_almost_equal(reg.e_pred[0:3], e_3, 7)
+        u_3 = np.array([[ 1.13879007],
+       [-3.76873198],
+       [-1.89671717]])
+        np.testing.assert_array_almost_equal(reg.u[0:3], u_3, 7)
+        predy_3 = np.array([[ 14.58718993],
+       [ 22.57048598],
+       [ 32.52349817]])
+        np.testing.assert_array_almost_equal(reg.predy[0:3], predy_3, 7)
+        predy_e_3 = np.array([[ 18.39595799],
+       [ 26.49961664],
+       [ 35.02090882]])
+        np.testing.assert_array_almost_equal(reg.predy_e[0:3], predy_e_3, 7)
+        chow_regi = np.array([[ 0.60091096,  0.43823066],
+       [ 0.03006744,  0.8623373 ],
+       [ 0.01943727,  0.88912016]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 0.88634854058300516, 7)
+    
+    def test_all_regi_sig2(self):
+        #Artficial:
+        n = 256
+        x1 = np.random.uniform(-10,10,(n,1))
+        x2 = np.random.uniform(1,5,(n,1))
+        q = x2 + np.random.normal(0,1,(n,1))
+        x = np.hstack((x1,x2))
+        y = np.dot(np.hstack((np.ones((n,1)),x)),np.array([[1],[0.5],[2]])) + np.random.normal(0,1,(n,1))
+        latt = int(np.sqrt(n))
+        w = pysal.lat2W(latt,latt)
+        w.transform='r'
+        regi = [0]*(n/2) + [1]*(n/2)
+        model = GM_Lag_Regimes(y, x1, regi, q=q, yend=x2, w=w, regime_lag_sep=True, regime_err_sep=True)
+        w1 = pysal.lat2W(latt/2,latt)
+        w1.transform='r'
+        model1 = GM_Lag(y[0:(n/2)].reshape((n/2),1), x1[0:(n/2)], yend=x2[0:(n/2)], q=q[0:(n/2)], w=w1)
+        model2 = GM_Lag(y[(n/2):n].reshape((n/2),1), x1[(n/2):n], yend=x2[(n/2):n], q=q[(n/2):n], w=w1)
+        tbetas = np.vstack((model1.betas, model2.betas))
+        np.testing.assert_array_almost_equal(model.betas,tbetas)
+        vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
+        np.testing.assert_array_almost_equal(model.vm.diagonal(), vm, 6)
+        #Columbus:
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("HOVAL"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        reg = GM_Lag_Regimes(self.y, X, self.regimes, yend=yd, q=q, w=self.w,regime_lag_sep=True, regime_err_sep = True) 
+        tbetas = np.array([[ 42.35827477],
+       [ -0.09472413],
+       [ -0.68794223],
+       [  0.54482537],
+       [ 32.24228762],
+       [ -0.12304063],
+       [ -0.46840307],
+       [  0.67108156]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        vm = np.array([ 200.92894859,    4.56244927,   -4.85603079,   -2.9755413 ,
+          0.        ,    0.        ,    0.        ,    0.        ])
+        np.testing.assert_array_almost_equal(reg.vm[0], vm, 6)
+        e_3 = np.array([[ -1.32209547],
+       [-13.15611199],
+       [-11.62357696]])
+        np.testing.assert_array_almost_equal(reg.e_pred[0:3], e_3, 7)
+        u_3 = np.array([[ 6.99250069],
+       [-7.5665856 ],
+       [-7.04753328]])
+        np.testing.assert_array_almost_equal(reg.u[0:3], u_3, 7)
+        predy_3 = np.array([[  8.73347931],
+       [ 26.3683396 ],
+       [ 37.67431428]])
+        np.testing.assert_array_almost_equal(reg.predy[0:3], predy_3, 7)
+        predy_e_3 = np.array([[ 17.04807547],
+       [ 31.95786599],
+       [ 42.25035796]])
+        np.testing.assert_array_almost_equal(reg.predy_e[0:3], predy_e_3, 7)
+        chow_regi = np.array([[  1.51825373e-01,   6.96797034e-01],
+       [  3.20105698e-04,   9.85725412e-01],
+       [  8.58836996e-02,   7.69476896e-01],
+       [  1.01357290e-01,   7.50206873e-01]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 0.38417230022512161, 7)
+
+    def test_fixed_const(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("HOVAL"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        reg = GM_Lag_Regimes(self.y, X, self.regimes, yend=yd, q=q, w=self.w, constant_regi='one', regime_lag_sep=False, regime_err_sep=False) 
+        tbetas = np.array([[ -0.37658823],
+       [ -0.9666079 ],
+       [ 35.5445944 ],
+       [ -0.45793559],
+       [ -0.24216904],
+       [  0.62500602]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        vm = np.array([ 1.4183697 , -0.05975784, -0.27161863, -0.62517245,  0.02266177,
+        0.00312976])
+        np.testing.assert_array_almost_equal(reg.vm[0], vm, 6)
+        e_3 = np.array([[ 0.17317815],
+       [-5.53766328],
+       [-3.82889307]])
+        np.testing.assert_array_almost_equal(reg.e_pred[0:3], e_3, 7)
+        u_3 = np.array([[ 3.10025518],
+       [-1.83150689],
+       [-1.49598494]])
+        np.testing.assert_array_almost_equal(reg.u[0:3], u_3, 7)
+        predy_3 = np.array([[ 12.62572482],
+       [ 20.63326089],
+       [ 32.12276594]])
+        np.testing.assert_array_almost_equal(reg.predy[0:3], predy_3, 7)
+        predy_e_3 = np.array([[ 15.55280185],
+       [ 24.33941728],
+       [ 34.45567407]])
+        np.testing.assert_array_almost_equal(reg.predy_e[0:3], predy_e_3, 7)
+        chow_regi = np.array([[  1.85767047e-01,   6.66463269e-01],
+       [  1.19445012e+01,   5.48089036e-04]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 12.017256217621382, 7)
+
+    def test_names(self):
+        y_var = 'CRIME'
+        x_var = ['INC']
+        x = np.array([self.db.by_col(name) for name in x_var]).T
+        yd_var = ['HOVAL']
+        yd = np.array([self.db.by_col(name) for name in yd_var]).T
+        q_var = ['DISCBD']
+        q = np.array([self.db.by_col(name) for name in q_var]).T
+        r_var = 'NSA'
+        reg = GM_Lag_Regimes(self.y, x, self.regimes, yend=yd, q=q, w=self.w, name_y=y_var, name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='columbus', name_w='columbus.gal', regime_lag_sep=False, regime_err_sep=False)
+        betas = np.array([[ 37.87698329],
+       [ -0.89426982],
+       [ 31.4714777 ],
+       [ -0.71640525],
+       [ -0.28494432],
+       [ -0.2294271 ],
+       [  0.62996544]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([ 109.93469618,   -0.33407447,   82.05180377,   -1.57964725,
+         -1.50284553,   -0.15634575,   -1.43800683])
+        np.testing.assert_array_almost_equal(reg.vm[0], vm, 6)
+        chow_regi = np.array([[ 0.50743058,  0.47625326],
+       [ 0.02437494,  0.87593468],
+       [ 0.01377251,  0.9065777 ]])
+        np.testing.assert_array_almost_equal(reg.chow.regi, chow_regi, 7)
+        self.assertAlmostEqual(reg.chow.joint[0], 0.63336222761359162, 7)
+        self.assertListEqual(reg.name_x, ['0_CONSTANT', '0_INC', '1_CONSTANT', '1_INC'])
+        self.assertListEqual(reg.name_yend, ['0_HOVAL', '1_HOVAL', '_Global_W_CRIME'])
+        self.assertListEqual(reg.name_q, ['0_DISCBD', '0_W_INC', '0_W_DISCBD', '1_DISCBD', '1_W_INC', '1_W_DISCBD'])
+        self.assertEqual(reg.name_y, y_var)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_twosls_sp_sparse.py b/pysal/spreg/tests/test_twosls_sp_sparse.py
new file mode 100644
index 0000000..e6669e0
--- /dev/null
+++ b/pysal/spreg/tests/test_twosls_sp_sparse.py
@@ -0,0 +1,379 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.spreg.twosls_sp import BaseGM_Lag, GM_Lag
+import pysal.spreg.diagnostics as D
+from scipy import sparse as SP
+
+
+class TestBaseGMLag(unittest.TestCase):
+    def setUp(self):
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+        self.db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
+        y = np.array(self.db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        
+    def test___init__(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 2, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = SP.csr_matrix(self.X)
+        reg = BaseGM_Lag(self.y, self.X, yend=yd2, q=q2, w=self.w, w_lags=2)
+        betas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        h_0 = np.array([  1.        ,  19.531     ,  15.72598   ,  18.594     ,
+                            24.7142675 ,  13.72216667,  27.82929567])
+        np.testing.assert_array_almost_equal(reg.h.toarray()[0], h_0)
+        hth = np.array([   49.        ,   704.371999  ,  1721.312371  ,   724.7435916 ,
+                             1707.35412945,   711.31248483,  1729.63201243])
+        np.testing.assert_array_almost_equal(reg.hth[0], hth, 7)
+        hthi = np.array([  7.33701328e+00,   2.27764882e-02,   2.18153588e-02,
+                           -5.11035447e-02,   1.22515181e-03,  -2.38079378e-01,
+                           -1.20149133e-01])
+        np.testing.assert_array_almost_equal(reg.hthi[0], hthi, 7)
+        self.assertEqual(reg.k, 4)
+        self.assertEqual(reg.kstar, 1)
+        self.assertAlmostEqual(reg.mean_y, 38.436224469387746, 7)
+        self.assertEqual(reg.n, 49)
+        pfora1a2 = np.array([ 80.5588479 ,  -1.06625281,  -0.61703759,  -1.10071931]) 
+        np.testing.assert_array_almost_equal(reg.pfora1a2[0], pfora1a2, 7)
+        predy_5 = np.array([[ 50.87411532],[ 50.76969931],[ 41.77223722],[ 33.44262382],[ 28.77418036]])
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        q_5 = np.array([ 18.594     ,  24.7142675 ,  13.72216667,  27.82929567])
+        np.testing.assert_array_almost_equal(reg.q[0], q_5)
+        self.assertAlmostEqual(reg.sig2n_k, 234.54258763039289, 7)
+        self.assertAlmostEqual(reg.sig2n, 215.39625394627919, 7)
+        self.assertAlmostEqual(reg.sig2, 215.39625394627919, 7)
+        self.assertAlmostEqual(reg.std_y, 18.466069465206047, 7)
+        u_5 = np.array( [[ 29.59288768], [ -6.20269831], [-15.42223722], [ -0.24262282], [ -5.54918036]])
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 10554.41644336768, 7)
+        varb = np.array( [[  1.48966377e+00, -2.28698061e-02, -1.20217386e-02, -1.85763498e-02],
+                          [ -2.28698061e-02,  1.27893998e-03,  2.74600023e-04, -1.33497705e-04],
+                          [ -1.20217386e-02,  2.74600023e-04,  1.54257766e-04,  6.86851184e-05],
+                          [ -1.85763498e-02, -1.33497705e-04,  6.86851184e-05,  4.67711582e-04]])
+        np.testing.assert_array_almost_equal(reg.varb, varb, 7)
+        vm = np.array([[  3.20867996e+02, -4.92607057e+00, -2.58943746e+00, -4.00127615e+00],
+                       [ -4.92607057e+00,  2.75478880e-01,  5.91478163e-02, -2.87549056e-02],
+                       [ -2.58943746e+00,  5.91478163e-02,  3.32265449e-02,  1.47945172e-02],
+                       [ -4.00127615e+00, -2.87549056e-02,  1.47945172e-02,  1.00743323e-01]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 6)
+        x_0 = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0], x_0, 7)
+        y_5 = np.array( [[ 80.467003], [ 44.567001], [ 26.35    ], [ 33.200001], [ 23.225   ]])
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_5 = np.array( [[ 35.4585005 ], [ 46.67233467], [ 45.36475125], [ 32.81675025], [ 30.81785714]])
+        np.testing.assert_array_almost_equal(reg.yend[0:5], yend_5, 7)
+        z_0 = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005]) 
+        np.testing.assert_array_almost_equal(reg.z.toarray()[0], z_0, 7)
+        zthhthi = np.array( [[  1.00000000e+00, -2.22044605e-16, -2.22044605e-16 , 2.22044605e-16,
+                                4.44089210e-16,  0.00000000e+00, -8.88178420e-16],
+                             [  0.00000000e+00,  1.00000000e+00, -3.55271368e-15 , 3.55271368e-15,
+                               -7.10542736e-15,  7.10542736e-14,  0.00000000e+00],
+                             [  1.81898940e-12,  2.84217094e-14,  1.00000000e+00 , 0.00000000e+00,
+                               -2.84217094e-14,  5.68434189e-14,  5.68434189e-14],
+                             [ -8.31133940e+00, -3.76104678e-01, -2.07028208e-01 , 1.32618931e+00,
+                               -8.04284562e-01,  1.30527047e+00,  1.39136816e+00]])
+        np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
+
+    def test_init_white_(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 2, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = SP.csr_matrix(self.X)
+        base_gm_lag = BaseGM_Lag(self.y, self.X, yend=yd2, q=q2, w=self.w, w_lags=2, robust='white')
+        tbetas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(base_gm_lag.betas, tbetas) 
+        dbetas = D.se_betas(base_gm_lag)
+        se_betas = np.array([ 20.47077481, 0.50613931, 0.20138425, 0.38028295 ])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_init_hac_(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 2, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = SP.csr_matrix(self.X)
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=15,function='triangular', fixed=False)        
+        base_gm_lag = BaseGM_Lag(self.y, self.X, yend=yd2, q=q2, w=self.w, w_lags=2, robust='hac', gwk=gwk)
+        tbetas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(base_gm_lag.betas, tbetas) 
+        dbetas = D.se_betas(base_gm_lag)
+        se_betas = np.array([ 19.08513569,   0.51769543,   0.18244862,   0.35460553])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_init_discbd(self):
+        X = np.array(self.db.by_col("INC"))
+        self.X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, yd, q, 2, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = SP.csr_matrix(self.X)
+        reg = BaseGM_Lag(self.y, self.X, yend=yd2, q=q2, w=self.w, w_lags=2)
+        tbetas = np.array([[ 100.79359082], [  -0.50215501], [  -1.14881711], [  -0.38235022]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        dbetas = D.se_betas(reg)
+        se_betas = np.array([ 53.0829123 ,   1.02511494,   0.57589064,   0.59891744 ])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_n_k(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 2, True)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = SP.csr_matrix(self.X)
+        reg = BaseGM_Lag(self.y, self.X, yend=yd2, q=q2, w=self.w, w_lags=2, sig2n_k=True)
+        betas = np.  array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array( [[  3.49389596e+02, -5.36394351e+00, -2.81960968e+00, -4.35694515e+00],
+                         [ -5.36394351e+00,  2.99965892e-01,  6.44054000e-02, -3.13108972e-02],
+                         [ -2.81960968e+00,  6.44054000e-02,  3.61800155e-02,  1.61095854e-02],
+                         [ -4.35694515e+00, -3.13108972e-02,  1.61095854e-02,  1.09698285e-01]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_lag_q(self):
+        X = np.array(self.db.by_col("INC"))
+        self.X = np.reshape(X, (49,1))
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, yd, q, 2, False)
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = SP.csr_matrix(self.X)
+        reg = BaseGM_Lag(self.y, self.X, yend=yd2, q=q2, w=self.w, w_lags=2, lag_q=False)
+        tbetas = np.array( [[ 108.83261383], [  -0.48041099], [  -1.18950006], [  -0.56140186]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        dbetas = D.se_betas(reg)
+        se_betas = np.array([ 58.33203837,   1.09100446,   0.62315167,   0.68088777])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+
+
+class TestGMLag(unittest.TestCase):
+    def setUp(self):
+        self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.w.transform = 'r'
+        self.db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
+        y = np.array(self.db.by_col("HOVAL"))
+        self.y = np.reshape(y, (49,1))
+        
+    def test___init__(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = SP.csr_matrix(self.X)
+        reg = GM_Lag(self.y, self.X, w=self.w, w_lags=2)
+        betas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        e_5 = np.array( [[ 29.28976367], [ -6.07439501], [-15.30080685], [ -0.41773375], [ -5.67197968]])
+        np.testing.assert_array_almost_equal(reg.e_pred[0:5], e_5, 7)
+        h_0 = np.array([  1.        ,  19.531     ,  15.72598   ,  18.594     ,
+                            24.7142675 ,  13.72216667,  27.82929567])
+        np.testing.assert_array_almost_equal(reg.h.toarray()[0], h_0)
+        hth = np.  array([   49.        ,   704.371999  ,  1721.312371  ,   724.7435916 ,
+                             1707.35412945,   711.31248483,  1729.63201243])
+        np.testing.assert_array_almost_equal(reg.hth[0], hth, 7)
+        hthi = np.array([  7.33701328e+00,   2.27764882e-02,   2.18153588e-02,
+                           -5.11035447e-02,   1.22515181e-03,  -2.38079378e-01,
+                           -1.20149133e-01])
+        np.testing.assert_array_almost_equal(reg.hthi[0], hthi, 7)
+        self.assertEqual(reg.k, 4)
+        self.assertEqual(reg.kstar, 1)
+        self.assertAlmostEqual(reg.mean_y, 38.436224469387746, 7)
+        self.assertEqual(reg.n, 49)
+        pfora1a2 = np.array([ 80.5588479 ,  -1.06625281,  -0.61703759,  -1.10071931]) 
+        self.assertAlmostEqual(reg.pr2, 0.3551928222612527, 7)
+        self.assertAlmostEqual(reg.pr2_e, 0.34763857386174174, 7)
+        np.testing.assert_array_almost_equal(reg.pfora1a2[0], pfora1a2, 7)
+        predy_5 = np.array([[ 50.87411532],[ 50.76969931],[ 41.77223722],[ 33.44262382],[ 28.77418036]])
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        predy_e_5 = np.array( [[ 51.17723933], [ 50.64139601], [ 41.65080685], [ 33.61773475], [ 28.89697968]])
+        np.testing.assert_array_almost_equal(reg.predy_e[0:5], predy_e_5, 7)
+        q_5 = np.array([ 18.594     ,  24.7142675 ,  13.72216667,  27.82929567])
+        np.testing.assert_array_almost_equal(reg.q.toarray()[0], q_5)
+        self.assertEqual(reg.robust, 'unadjusted')
+        self.assertAlmostEqual(reg.sig2n_k, 234.54258763039289, 7)
+        self.assertAlmostEqual(reg.sig2n, 215.39625394627919, 7)
+        self.assertAlmostEqual(reg.sig2, 215.39625394627919, 7)
+        self.assertAlmostEqual(reg.std_y, 18.466069465206047, 7)
+        u_5 = np.array( [[ 29.59288768], [ -6.20269831], [-15.42223722], [ -0.24262282], [ -5.54918036]])
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 10554.41644336768, 7)
+        varb = np.array( [[  1.48966377e+00, -2.28698061e-02, -1.20217386e-02, -1.85763498e-02],
+                          [ -2.28698061e-02,  1.27893998e-03,  2.74600023e-04, -1.33497705e-04],
+                          [ -1.20217386e-02,  2.74600023e-04,  1.54257766e-04,  6.86851184e-05],
+                          [ -1.85763498e-02, -1.33497705e-04,  6.86851184e-05,  4.67711582e-04]])
+        np.testing.assert_array_almost_equal(reg.varb, varb, 7)
+        vm = np.array([[  3.20867996e+02, -4.92607057e+00, -2.58943746e+00, -4.00127615e+00],
+                       [ -4.92607057e+00,  2.75478880e-01,  5.91478163e-02, -2.87549056e-02],
+                       [ -2.58943746e+00,  5.91478163e-02,  3.32265449e-02,  1.47945172e-02],
+                       [ -4.00127615e+00, -2.87549056e-02,  1.47945172e-02,  1.00743323e-01]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 6)
+        x_0 = np.array([  1.     ,  19.531  ,  15.72598])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0], x_0, 7)
+        y_5 = np.array( [[ 80.467003], [ 44.567001], [ 26.35    ], [ 33.200001], [ 23.225   ]])
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_5 = np.array( [[ 35.4585005 ], [ 46.67233467], [ 45.36475125], [ 32.81675025], [ 30.81785714]])
+        np.testing.assert_array_almost_equal(reg.yend[0:5], yend_5, 7)
+        z_0 = np.array([  1.       ,  19.531    ,  15.72598  ,  35.4585005]) 
+        np.testing.assert_array_almost_equal(reg.z.toarray()[0], z_0, 7)
+        zthhthi = np.array( [[  1.00000000e+00, -2.22044605e-16, -2.22044605e-16 , 2.22044605e-16,
+                                4.44089210e-16,  0.00000000e+00, -8.88178420e-16],
+                             [  0.00000000e+00,  1.00000000e+00, -3.55271368e-15 , 3.55271368e-15,
+                               -7.10542736e-15,  7.10542736e-14,  0.00000000e+00],
+                             [  1.81898940e-12,  2.84217094e-14,  1.00000000e+00 , 0.00000000e+00,
+                               -2.84217094e-14,  5.68434189e-14,  5.68434189e-14],
+                             [ -8.31133940e+00, -3.76104678e-01, -2.07028208e-01 , 1.32618931e+00,
+                               -8.04284562e-01,  1.30527047e+00,  1.39136816e+00]])
+        np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
+
+    def test_init_white_(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = SP.csr_matrix(self.X)
+        base_gm_lag = GM_Lag(self.y, self.X, w=self.w, w_lags=2, robust='white')
+        tbetas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(base_gm_lag.betas, tbetas) 
+        dbetas = D.se_betas(base_gm_lag)
+        se_betas = np.array([ 20.47077481, 0.50613931, 0.20138425, 0.38028295 ])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_init_hac_(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = SP.csr_matrix(self.X)
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=15,function='triangular', fixed=False)        
+        base_gm_lag = GM_Lag(self.y, self.X, w=self.w, w_lags=2, robust='hac', gwk=gwk)
+        tbetas = np.array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(base_gm_lag.betas, tbetas) 
+        dbetas = D.se_betas(base_gm_lag)
+        se_betas = np.array([ 19.08513569,   0.51769543,   0.18244862,   0.35460553])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_init_discbd(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        X = SP.csr_matrix(X)
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        reg = GM_Lag(self.y, X, w=self.w, yend=yd, q=q, w_lags=2)
+        tbetas = np.array([[ 100.79359082], [  -0.50215501], [  -1.14881711], [  -0.38235022]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        dbetas = D.se_betas(reg)
+        se_betas = np.array([ 53.0829123 ,   1.02511494,   0.57589064,   0.59891744 ])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_n_k(self):
+        X = []
+        X.append(self.db.by_col("INC"))
+        X.append(self.db.by_col("CRIME"))
+        self.X = np.array(X).T
+        self.X = SP.csr_matrix(self.X)
+        reg = GM_Lag(self.y, self.X, w=self.w, w_lags=2, sig2n_k=True)
+        betas = np.  array([[  4.53017056e+01], [  6.20888617e-01], [ -4.80723451e-01], [  2.83622122e-02]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array( [[  3.49389596e+02, -5.36394351e+00, -2.81960968e+00, -4.35694515e+00],
+                         [ -5.36394351e+00,  2.99965892e-01,  6.44054000e-02, -3.13108972e-02],
+                         [ -2.81960968e+00,  6.44054000e-02,  3.61800155e-02,  1.61095854e-02],
+                         [ -4.35694515e+00, -3.13108972e-02,  1.61095854e-02,  1.09698285e-01]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_lag_q(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        X = SP.csr_matrix(X)
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        reg = GM_Lag(self.y, X, w=self.w, yend=yd, q=q, w_lags=2, lag_q=False)
+        tbetas = np.array( [[ 108.83261383], [  -0.48041099], [  -1.18950006], [  -0.56140186]])
+        np.testing.assert_array_almost_equal(tbetas, reg.betas)
+        dbetas = D.se_betas(reg)
+        se_betas = np.array([ 58.33203837,   1.09100446,   0.62315167,   0.68088777])
+        np.testing.assert_array_almost_equal(dbetas, se_betas)
+
+    def test_spatial(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        X = SP.csr_matrix(X)
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        reg = GM_Lag(self.y, X, yd, q, spat_diag=True, w=w)
+        betas = np.array([[  5.46344924e+01], [  4.13301682e-01], [ -5.92637442e-01], [ -7.40490883e-03]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array( [[  4.45202654e+02, -1.50290275e+01, -6.36557072e+00, -5.71403440e-03],
+                        [ -1.50290275e+01,  5.93124683e-01,  2.19169508e-01, -6.70675916e-03],
+                        [ -6.36557072e+00,  2.19169508e-01,  1.06577542e-01, -2.96533875e-03],
+                        [ -5.71403440e-03, -6.70675916e-03, -2.96533875e-03,  1.15655425e-03]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 6)
+        ak_test = np.array([ 2.52597326,  0.11198567])
+        np.testing.assert_array_almost_equal(reg.ak_test, ak_test, 7)
+
+    def test_names(self):
+        X = np.array(self.db.by_col("INC"))
+        X = np.reshape(X, (49,1))
+        X = SP.csr_matrix(X)
+        yd = np.array(self.db.by_col("CRIME"))
+        yd = np.reshape(yd, (49,1))
+        q = np.array(self.db.by_col("DISCBD"))
+        q = np.reshape(q, (49,1))
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=5,function='triangular', fixed=False)
+        name_x = ['inc']
+        name_y = 'crime'
+        name_yend = ['crime']
+        name_q = ['discbd']
+        name_w = 'queen'
+        name_gwk = 'k=5'
+        name_ds = 'columbus'
+        reg = GM_Lag(self.y, X, yd, q,
+                spat_diag=True, w=w, robust='hac', gwk=gwk,
+                name_x=name_x, name_y=name_y, name_q=name_q, name_w=name_w,
+                name_yend=name_yend, name_gwk=name_gwk, name_ds=name_ds)
+        betas = np.array([[  5.46344924e+01], [  4.13301682e-01], [ -5.92637442e-01], [ -7.40490883e-03]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array( [[  5.70817052e+02, -1.83655385e+01, -8.36602575e+00,  2.37538877e-02],
+                        [ -1.85224661e+01,  6.53311383e-01,  2.84209566e-01, -6.47694160e-03],
+                        [ -8.31105622e+00,  2.78772694e-01,  1.38144928e-01, -3.98175246e-03],
+                        [  2.66662466e-02, -6.23783104e-03, -4.11092891e-03,  1.10936528e-03]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 6)
+        self.assertListEqual(reg.name_x, ['CONSTANT']+name_x)
+        name_yend.append('W_crime')
+        self.assertListEqual(reg.name_yend, name_yend)
+        name_q.extend(['W_inc', 'W_discbd'])
+        self.assertListEqual(reg.name_q, name_q)
+        self.assertEqual(reg.name_y, name_y)
+        self.assertEqual(reg.name_w, name_w)
+        self.assertEqual(reg.name_gwk, name_gwk)
+        self.assertEqual(reg.name_ds, name_ds)
+
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/tests/test_twosls_sparse.py b/pysal/spreg/tests/test_twosls_sparse.py
new file mode 100644
index 0000000..654dd5a
--- /dev/null
+++ b/pysal/spreg/tests/test_twosls_sparse.py
@@ -0,0 +1,258 @@
+import unittest
+import numpy as np
+import pysal
+from pysal.spreg.twosls import TSLS, BaseTSLS
+from scipy import sparse as SP
+
+
+class TestBaseTSLS(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+        self.y = np.array(db.by_col("CRIME"))
+        self.y = np.reshape(self.y, (49,1))
+        self.X = []
+        self.X.append(db.by_col("INC"))
+        self.X = np.array(self.X).T
+        self.X = np.hstack((np.ones(self.y.shape),self.X))
+        self.X = SP.csr_matrix(self.X)
+        self.yd = []
+        self.yd.append(db.by_col("HOVAL"))
+        self.yd = np.array(self.yd).T
+        self.q = []
+        self.q.append(db.by_col("DISCBD"))
+        self.q = np.array(self.q).T
+
+    def test_basic(self):
+        reg = BaseTSLS(self.y, self.X, self.yd, self.q)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        h_0 = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h.toarray()[0], h_0)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ],
+                        [   704.371999  ,  11686.67338121,   2246.12800625],
+                        [   139.75      ,   2246.12800625,    498.5851    ]])
+        np.testing.assert_array_almost_equal(reg.hth, hth, 7)
+        hthi = np.array([[ 0.1597275 , -0.00762011, -0.01044191],
+                        [-0.00762011,  0.00100135, -0.0023752 ],
+                        [-0.01044191, -0.0023752 ,  0.01563276]]) 
+        np.testing.assert_array_almost_equal(reg.hthi, hthi, 7)
+        self.assertEqual(reg.k, 3)
+        self.assertEqual(reg.kstar, 1)
+        self.assertAlmostEqual(reg.mean_y, 35.128823897959187, 7)
+        self.assertEqual(reg.n, 49)
+        pfora1a2 = np.array([[ 9.58156106, -0.22744226, -0.13820537],
+                             [ 0.02580142,  0.08226331, -0.03143731],
+                             [-3.13896453, -0.33487872,  0.20690965]]) 
+        np.testing.assert_array_almost_equal(reg.pfora1a2, pfora1a2, 7)
+        predy_5 = np.array([[-28.68949467], [ 28.99484984], [ 55.07344824], [ 38.26609504], [ 57.57145851]]) 
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        q_5 = np.array([[ 5.03], [ 4.27], [ 3.89], [ 3.7 ], [ 2.83]])
+        np.testing.assert_array_equal(reg.q[0:5], q_5)
+        self.assertAlmostEqual(reg.sig2n_k, 587.56797852699822, 7)
+        self.assertAlmostEqual(reg.sig2n, 551.5944288212637, 7)
+        self.assertAlmostEqual(reg.sig2, 551.5944288212637, 7)
+        self.assertAlmostEqual(reg.std_y, 16.732092091229699, 7)
+        u_5 = np.array([[ 44.41547467], [-10.19309584], [-24.44666724], [ -5.87833504], [ -6.83994851]]) 
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 27028.127012241919, 7)
+        varb = np.array([[ 0.41526237,  0.01879906, -0.01730372],
+                         [ 0.01879906,  0.00362823, -0.00184604],
+                         [-0.01730372, -0.00184604,  0.0011406 ]]) 
+        np.testing.assert_array_almost_equal(reg.varb, varb, 7)
+        vm = np.array([[ 229.05640809,   10.36945783,   -9.54463414],
+                       [  10.36945783,    2.0013142 ,   -1.01826408],
+                       [  -9.54463414,   -1.01826408,    0.62914915]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        x_0 = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0], x_0, 7)
+        y_5 = np.array([[ 15.72598 ], [ 18.801754], [ 30.626781], [ 32.38776 ], [ 50.73151 ]]) 
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_5 = np.array([[ 80.467003], [ 44.567001], [ 26.35    ], [ 33.200001], [ 23.225   ]]) 
+        np.testing.assert_array_almost_equal(reg.yend[0:5], yend_5, 7)
+        z_0 = np.array([  1.      ,  19.531   ,  80.467003]) 
+        np.testing.assert_array_almost_equal(reg.z.toarray()[0], z_0, 7)
+        zthhthi = np.array([[  1.00000000e+00,  -1.66533454e-16,   4.44089210e-16],
+                            [  0.00000000e+00,   1.00000000e+00,   0.00000000e+00],
+                            [  1.26978671e+01,   1.05598709e+00,   3.70212359e+00]]) 
+        np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
+        
+    def test_n_k(self):
+        reg = BaseTSLS(self.y, self.X, self.yd, self.q, sig2n_k=True)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 243.99486949,   11.04572682,  -10.16711028],
+                       [  11.04572682,    2.13183469,   -1.08467261],
+                       [ -10.16711028,   -1.08467261,    0.67018062]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_white(self):
+        reg = BaseTSLS(self.y, self.X, self.yd, self.q, robust='white')
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 208.27139316,   15.6687805 ,  -11.53686154],
+                       [  15.6687805 ,    2.26882747,   -1.30312033],
+                       [ -11.53686154,   -1.30312033,    0.81940656]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_hac(self):
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=15,function='triangular', fixed=False)
+        reg = BaseTSLS(self.y, self.X, self.yd, self.q, robust='hac', gwk=gwk)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 231.07254978,   15.42050291,  -11.3941033 ],
+                       [  15.01376346,    1.92422887,   -1.11865505],
+                       [ -11.34381641,   -1.1279227 ,    0.72053806]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+class TestTSLS(unittest.TestCase):
+    def setUp(self):
+        db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+        self.y = np.array(db.by_col("CRIME"))
+        self.y = np.reshape(self.y, (49,1))
+        self.X = []
+        self.X.append(db.by_col("INC"))
+        self.X = np.array(self.X).T
+        self.X = SP.csr_matrix(self.X)
+        self.yd = []
+        self.yd.append(db.by_col("HOVAL"))
+        self.yd = np.array(self.yd).T
+        self.q = []
+        self.q.append(db.by_col("DISCBD"))
+        self.q = np.array(self.q).T
+
+    def test_basic(self):
+        reg = TSLS(self.y, self.X, self.yd, self.q)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        h_0 = np.array([  1.   ,  19.531,   5.03 ])
+        np.testing.assert_array_almost_equal(reg.h.toarray()[0], h_0)
+        hth = np.array([[    49.        ,    704.371999  ,    139.75      ],
+                        [   704.371999  ,  11686.67338121,   2246.12800625],
+                        [   139.75      ,   2246.12800625,    498.5851    ]])
+        np.testing.assert_array_almost_equal(reg.hth, hth, 7)
+        hthi = np.array([[ 0.1597275 , -0.00762011, -0.01044191],
+                        [-0.00762011,  0.00100135, -0.0023752 ],
+                        [-0.01044191, -0.0023752 ,  0.01563276]]) 
+        np.testing.assert_array_almost_equal(reg.hthi, hthi, 7)
+        self.assertEqual(reg.k, 3)
+        self.assertEqual(reg.kstar, 1)
+        self.assertAlmostEqual(reg.mean_y, 35.128823897959187, 7)
+        self.assertEqual(reg.n, 49)
+        pfora1a2 = np.array([[ 9.58156106, -0.22744226, -0.13820537],
+                             [ 0.02580142,  0.08226331, -0.03143731],
+                             [-3.13896453, -0.33487872,  0.20690965]]) 
+        np.testing.assert_array_almost_equal(reg.pfora1a2, pfora1a2, 7)
+        predy_5 = np.array([[-28.68949467], [ 28.99484984], [ 55.07344824], [ 38.26609504], [ 57.57145851]]) 
+        np.testing.assert_array_almost_equal(reg.predy[0:5], predy_5, 7)
+        q_5 = np.array([[ 5.03], [ 4.27], [ 3.89], [ 3.7 ], [ 2.83]])
+        np.testing.assert_array_equal(reg.q[0:5], q_5)
+        self.assertAlmostEqual(reg.sig2n_k, 587.56797852699822, 7)
+        self.assertAlmostEqual(reg.sig2n, 551.5944288212637, 7)
+        self.assertAlmostEqual(reg.sig2, 551.5944288212637, 7)
+        self.assertAlmostEqual(reg.std_y, 16.732092091229699, 7)
+        u_5 = np.array([[ 44.41547467], [-10.19309584], [-24.44666724], [ -5.87833504], [ -6.83994851]]) 
+        np.testing.assert_array_almost_equal(reg.u[0:5], u_5, 7)
+        self.assertAlmostEqual(reg.utu, 27028.127012241919, 7)
+        varb = np.array([[ 0.41526237,  0.01879906, -0.01730372],
+                         [ 0.01879906,  0.00362823, -0.00184604],
+                         [-0.01730372, -0.00184604,  0.0011406 ]]) 
+        np.testing.assert_array_almost_equal(reg.varb, varb, 7)
+        vm = np.array([[ 229.05640809,   10.36945783,   -9.54463414],
+                       [  10.36945783,    2.0013142 ,   -1.01826408],
+                       [  -9.54463414,   -1.01826408,    0.62914915]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        x_0 = np.array([  1.   ,  19.531])
+        np.testing.assert_array_almost_equal(reg.x.toarray()[0], x_0, 7)
+        y_5 = np.array([[ 15.72598 ], [ 18.801754], [ 30.626781], [ 32.38776 ], [ 50.73151 ]]) 
+        np.testing.assert_array_almost_equal(reg.y[0:5], y_5, 7)
+        yend_5 = np.array([[ 80.467003], [ 44.567001], [ 26.35    ], [ 33.200001], [ 23.225   ]]) 
+        np.testing.assert_array_almost_equal(reg.yend[0:5], yend_5, 7)
+        z_0 = np.array([  1.      ,  19.531   ,  80.467003]) 
+        np.testing.assert_array_almost_equal(reg.z.toarray()[0], z_0, 7)
+        zthhthi = np.array([[  1.00000000e+00,  -1.66533454e-16,   4.44089210e-16],
+                            [  0.00000000e+00,   1.00000000e+00,   0.00000000e+00],
+                            [  1.26978671e+01,   1.05598709e+00,   3.70212359e+00]]) 
+        np.testing.assert_array_almost_equal(reg.zthhthi, zthhthi, 7)
+        self.assertAlmostEqual(reg.pr2, 0.27936137128173893, 7)
+        z_stat = np.array([[  5.84526447e+00,   5.05764078e-09],
+                           [  3.67601567e-01,   7.13170346e-01],
+                           [ -1.99468913e+00,   4.60767956e-02]])
+        np.testing.assert_array_almost_equal(reg.z_stat, z_stat, 7)
+        title = 'TWO STAGE LEAST SQUARES'
+        self.assertEqual(reg.title, title)
+        
+    def test_n_k(self):
+        reg = TSLS(self.y, self.X, self.yd, self.q, sig2n_k=True)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 243.99486949,   11.04572682,  -10.16711028],
+                       [  11.04572682,    2.13183469,   -1.08467261],
+                       [ -10.16711028,   -1.08467261,    0.67018062]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+
+    def test_white(self):
+        reg = TSLS(self.y, self.X, self.yd, self.q, robust='white')
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 208.27139316,   15.6687805 ,  -11.53686154],
+                       [  15.6687805 ,    2.26882747,   -1.30312033],
+                       [ -11.53686154,   -1.30312033,    0.81940656]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        self.assertEqual(reg.robust, 'white')
+
+    def test_hac(self):
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=5,function='triangular', fixed=False)
+        reg = TSLS(self.y, self.X, self.yd, self.q, robust='hac', gwk=gwk)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 225.0795089 ,   17.11660041,  -12.22448566],
+                       [  17.67097154,    2.47483461,   -1.4183641 ],
+                       [ -12.45093722,   -1.40495464,    0.8700441 ]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        self.assertEqual(reg.robust, 'hac')
+
+    def test_spatial(self):
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        reg = TSLS(self.y, self.X, self.yd, self.q, spat_diag=True, w=w)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 229.05640809,   10.36945783,   -9.54463414],
+                       [  10.36945783,    2.0013142 ,   -1.01826408],
+                       [  -9.54463414,   -1.01826408,    0.62914915]]) 
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        ak_test = np.array([ 1.16816972,  0.27977763])
+        np.testing.assert_array_almost_equal(reg.ak_test, ak_test, 7)
+
+    def test_names(self):
+        w = pysal.queen_from_shapefile(pysal.examples.get_path('columbus.shp'))
+        gwk = pysal.kernelW_from_shapefile(pysal.examples.get_path('columbus.shp'),k=5,function='triangular', fixed=False)
+        name_x = ['inc']
+        name_y = 'crime'
+        name_yend = ['hoval']
+        name_q = ['discbd']
+        name_w = 'queen'
+        name_gwk = 'k=5'
+        name_ds = 'columbus'
+        reg = TSLS(self.y, self.X, self.yd, self.q,
+                spat_diag=True, w=w, robust='hac', gwk=gwk,
+                name_x=name_x, name_y=name_y, name_q=name_q, name_w=name_w,
+                name_yend=name_yend, name_gwk=name_gwk, name_ds=name_ds)
+        betas = np.array([[ 88.46579584], [  0.5200379 ], [ -1.58216593]])
+        np.testing.assert_array_almost_equal(reg.betas, betas, 7)
+        vm = np.array([[ 225.0795089 ,   17.11660041,  -12.22448566],
+                       [  17.67097154,    2.47483461,   -1.4183641 ],
+                       [ -12.45093722,   -1.40495464,    0.8700441 ]])
+        np.testing.assert_array_almost_equal(reg.vm, vm, 7)
+        self.assertListEqual(reg.name_x, ['CONSTANT']+name_x)
+        self.assertListEqual(reg.name_yend, name_yend)
+        self.assertListEqual(reg.name_q, name_q)
+        self.assertEqual(reg.name_y, name_y)
+        self.assertEqual(reg.name_w, name_w)
+        self.assertEqual(reg.name_gwk, name_gwk)
+        self.assertEqual(reg.name_ds, name_ds)
+
+    
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/spreg/twosls.py b/pysal/spreg/twosls.py
new file mode 100644
index 0000000..8c85af0
--- /dev/null
+++ b/pysal/spreg/twosls.py
@@ -0,0 +1,475 @@
+import numpy as np
+import copy
+import numpy.linalg as la
+import summary_output as SUMMARY
+import robust as ROBUST
+import user_output as USER
+from utils import spdot, sphstack, RegressionPropsY, RegressionPropsVM
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, David C. Folch david.folch at asu.edu, Jing Yao jingyao at asu.edu"
+__all__ = ["TSLS"]
+
+
+class BaseTSLS(RegressionPropsY, RegressionPropsVM):
+
+    """
+    Two stage least squares (2SLS) (note: no consistency checks,
+    diagnostics or constant added)
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x); cannot be
+                   used in combination with h
+    h            : array
+                   Two dimensional array with n rows and one column for each
+                   exogenous variable to use as instruments (note: this 
+                   can contain variables from x); cannot be used in 
+                   combination with q
+    robust       : string
+                   If 'white', then a White consistent estimator of the
+                   variance-covariance matrix is given.  If 'hac', then a
+                   HAC consistent estimator of the variance-covariance
+                   matrix is given. Default set to None. 
+    gwk          : pysal W object
+                   Kernel spatial weights needed for HAC estimation. Note:
+                   matrix must have ones along the main diagonal.
+    sig2n_k      : boolean
+                   If True, then use n-k to estimate sigma^2. If False, use n.
+
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    kstar        : integer
+                   Number of endogenous variables. 
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    utu          : float
+                   Sum of squared residuals
+    sig2         : float
+                   Sigma squared used in computations
+    sig2n        : float
+                   Sigma squared (computed with n in the denominator)
+    sig2n_k      : float
+                   Sigma squared (computed with n-k in the denominator)
+    hth          : float
+                   H'H
+    hthi         : float
+                   (H'H)^-1
+    varb         : array
+                   (Z'H (H'H)^-1 H'Z)^-1
+    zthhthi      : array
+                   Z'H(H'H)^-1
+    pfora1a2     : array
+                   n(zthhthi)'varb
+
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> yd = []
+    >>> yd.append(db.by_col("HOVAL"))
+    >>> yd = np.array(yd).T
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+    >>> reg = BaseTSLS(y, X, yd, q=q)
+    >>> print reg.betas
+    [[ 88.46579584]
+     [  0.5200379 ]
+     [ -1.58216593]]
+    >>> reg = BaseTSLS(y, X, yd, q=q, robust="white")
+
+    """
+
+    def __init__(self, y, x, yend, q=None, h=None,
+                 robust=None, gwk=None, sig2n_k=False):
+
+        if issubclass(type(q), np.ndarray) and issubclass(type(h), np.ndarray):
+            raise Exception, "Please do not provide 'q' and 'h' together"
+        if q == None and h == None:
+            raise Exception, "Please provide either 'q' or 'h'"
+
+        self.y = y
+        self.n = y.shape[0]
+        self.x = x
+
+        self.kstar = yend.shape[1]
+        # including exogenous and endogenous variables
+        z = sphstack(self.x, yend)
+        if type(h).__name__ not in ['ndarray', 'csr_matrix']:
+            # including exogenous variables and instrument
+            h = sphstack(self.x, q)
+        self.z = z
+        self.h = h
+        self.q = q
+        self.yend = yend
+        # k = number of exogenous variables and endogenous variables
+        self.k = z.shape[1]
+        hth = spdot(h.T, h)
+        hthi = la.inv(hth)
+        zth = spdot(z.T, h)
+        hty = spdot(h.T, y)
+
+        factor_1 = np.dot(zth, hthi)
+        factor_2 = np.dot(factor_1, zth.T)
+        # this one needs to be in cache to be used in AK
+        varb = la.inv(factor_2)
+        factor_3 = np.dot(varb, factor_1)
+        betas = np.dot(factor_3, hty)
+        self.betas = betas
+        self.varb = varb
+        self.zthhthi = factor_1
+
+        # predicted values
+        self.predy = spdot(z, betas)
+
+        # residuals
+        u = y - self.predy
+        self.u = u
+
+        # attributes used in property
+        self.hth = hth     # Required for condition index
+        self.hthi = hthi   # Used in error models
+        self.htz = zth.T
+
+        if robust:
+            self.vm = ROBUST.robust_vm(reg=self, gwk=gwk, sig2n_k=sig2n_k)
+
+        self._cache = {}
+        if sig2n_k:
+            self.sig2 = self.sig2n_k
+        else:
+            self.sig2 = self.sig2n
+
+    @property
+    def pfora1a2(self):
+        if 'pfora1a2' not in self._cache:
+            self._cache['pfora1a2'] = self.n * \
+                np.dot(self.zthhthi.T, self.varb)
+        return self._cache['pfora1a2']
+
+    @property
+    def vm(self):
+        if 'vm' not in self._cache:
+            self._cache['vm'] = np.dot(self.sig2, self.varb)
+        return self._cache['vm']
+
+
+class TSLS(BaseTSLS):
+
+    """
+    Two stage least squares with results and diagnostics.
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    w            : pysal W object
+                   Spatial weights object (required if running spatial
+                   diagnostics)
+    robust       : string
+                   If 'white', then a White consistent estimator of the
+                   variance-covariance matrix is given.  If 'hac', then a
+                   HAC consistent estimator of the variance-covariance
+                   matrix is given. Default set to None. 
+    gwk          : pysal W object
+                   Kernel spatial weights needed for HAC estimation. Note:
+                   matrix must have ones along the main diagonal.
+    sig2n_k      : boolean
+                   If True, then use n-k to estimate sigma^2. If False, use n.
+    spat_diag    : boolean
+                   If True, then compute Anselin-Kelejian test (requires w)
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    kstar        : integer
+                   Number of endogenous variables. 
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    robust       : string
+                   Adjustment for robust standard errors
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    utu          : float
+                   Sum of squared residuals
+    sig2         : float
+                   Sigma squared used in computations
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    ak_test      : tuple
+                   Anselin-Kelejian test; tuple contains the pair (statistic,
+                   p-value)
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_z       : list of strings
+                   Names of exogenous and endogenous variables for use in 
+                   output
+    name_q       : list of strings
+                   Names of external instruments
+    name_h       : list of strings
+                   Names of all instruments used in ouput
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    title        : string
+                   Name of the regression method used
+    sig2n        : float
+                   Sigma squared (computed with n in the denominator)
+    sig2n_k      : float
+                   Sigma squared (computed with n-k in the denominator)
+    hth          : float
+                   H'H
+    hthi         : float
+                   (H'H)^-1
+    varb         : array
+                   (Z'H (H'H)^-1 H'Z)^-1
+    zthhthi      : array
+                   Z'H(H'H)^-1
+    pfora1a2     : array
+                   n(zthhthi)'varb
+
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+
+    Extract the CRIME column (crime rates) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) vector from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this model adds a vector of ones to the
+    independent variables passed in, but this can be overridden by passing
+    constant=False.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X = np.array(X).T
+
+    In this case we consider HOVAL (home value) is an endogenous regressor.
+    We tell the model that this is so by passing it in a different parameter
+    from the exogenous variables (x).
+
+    >>> yd = []
+    >>> yd.append(db.by_col("HOVAL"))
+    >>> yd = np.array(yd).T
+
+    Because we have endogenous variables, to obtain a correct estimate of the
+    model, we need to instrument for HOVAL. We use DISCBD (distance to the
+    CBD) for this and hence put it in the instruments parameter, 'q'.
+
+    >>> q = []
+    >>> q.append(db.by_col("DISCBD"))
+    >>> q = np.array(q).T
+
+    We are all set with the preliminars, we are good to run the model. In this
+    case, we will need the variables (exogenous and endogenous) and the
+    instruments. If we want to have the names of the variables printed in the
+    output summary, we will have to pass them in as well, although this is optional.
+
+    >>> reg = TSLS(y, X, yd, q, name_x=['inc'], name_y='crime', name_yend=['hoval'], name_q=['discbd'], name_ds='columbus')
+    >>> print reg.betas
+    [[ 88.46579584]
+     [  0.5200379 ]
+     [ -1.58216593]]
+
+    """
+
+    def __init__(self, y, x, yend, q,
+                 w=None,
+                 robust=None, gwk=None, sig2n_k=False,
+                 spat_diag=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_gwk=None, name_ds=None):
+
+        n = USER.check_arrays(y, x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y)
+        USER.check_robust(robust, gwk)
+        USER.check_spat_diag(spat_diag, w)
+        x_constant = USER.check_constant(x)
+        BaseTSLS.__init__(self, y=y, x=x_constant, yend=yend, q=q,
+                          robust=robust, gwk=gwk, sig2n_k=sig2n_k)
+        self.title = "TWO STAGE LEAST SQUARES"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_yend = USER.set_name_yend(name_yend, yend)
+        self.name_z = self.name_x + self.name_yend
+        self.name_q = USER.set_name_q(name_q, q)
+        self.name_h = USER.set_name_h(self.name_x, self.name_q)
+        self.robust = USER.set_robust(robust)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.name_gwk = USER.set_name_w(name_gwk, gwk)
+        SUMMARY.TSLS(reg=self, vm=vm, w=w, spat_diag=spat_diag)
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+
+if __name__ == '__main__':
+    _test()
+
+    import numpy as np
+    import pysal
+    db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
+    y_var = 'CRIME'
+    y = np.array([db.by_col(y_var)]).reshape(49, 1)
+    x_var = ['INC']
+    x = np.array([db.by_col(name) for name in x_var]).T
+    yd_var = ['HOVAL']
+    yd = np.array([db.by_col(name) for name in yd_var]).T
+    q_var = ['DISCBD']
+    q = np.array([db.by_col(name) for name in q_var]).T
+    w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    w.transform = 'r'
+    tsls = TSLS(y, x, yd, q, w=w, spat_diag=True, name_y=y_var, name_x=x_var,
+                name_yend=yd_var, name_q=q_var, name_ds='columbus', name_w='columbus.gal')
+    print tsls.summary
diff --git a/pysal/spreg/twosls_regimes.py b/pysal/spreg/twosls_regimes.py
new file mode 100644
index 0000000..7a621db
--- /dev/null
+++ b/pysal/spreg/twosls_regimes.py
@@ -0,0 +1,515 @@
+import numpy as np
+import regimes as REGI
+import user_output as USER
+import multiprocessing as mp
+import scipy.sparse as SP
+from utils import sphstack, set_warn, RegressionProps_basic, spdot, sphstack
+from twosls import BaseTSLS
+from robust import hac_multi
+import summary_output as SUMMARY
+from platform import system
+
+"""
+Two-stage Least Squares estimation with regimes.
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu, David C. Folch david.folch at asu.edu"
+
+
+class TSLS_Regimes(BaseTSLS, REGI.Regimes_Frame):
+
+    """
+    Two stage least squares (2SLS) with regimes
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    robust       : string
+                   If 'white', then a White consistent estimator of the
+                   variance-covariance matrix is given.
+                   If 'hac', then a HAC consistent estimator of the 
+                   variance-covariance matrix is given.
+                   If 'ogmm', then Optimal GMM is used to estimate
+                   betas and the variance-covariance matrix.
+                   Default set to None. 
+    gwk          : pysal W object
+                   Kernel spatial weights needed for HAC estimation. Note:
+                   matrix must have ones along the main diagonal.
+    sig2n_k      : boolean
+                   If True, then use n-k to estimate sigma^2. If False, use n.
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_regimes : string
+                   Name of regimes variable for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    vm           : array
+                   Variance covariance matrix (kxk)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: [False, 'one', 'many']
+                   Ignored if regimes=False. Constant option for regimes.
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime
+    cols2regi    : list, 'all'
+                   Ignored if regimes=False. Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all', all the variables vary by regime.
+    regime_err_sep : boolean
+                   If True, a separate regression is run for each regime.
+    kr           : int
+                   Number of variables/columns to be "regimized" or subject
+                   to change by regime. These will result in one parameter
+                   estimate by regime for each variable (i.e. nr parameters per
+                   variable)
+    kf           : int
+                   Number of variables/columns to be considered fixed or
+                   global across regimes and hence only obtain one parameter
+                   estimate
+    nr           : int
+                   Number of different regimes in the 'regimes' list
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_regimes : string
+                   Name of regimes variable for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    multi        : dictionary
+                   Only available when multiple regressions are estimated,
+                   i.e. when regime_err_sep=True and no variable is fixed
+                   across regimes.
+                   Contains all attributes of each individual regression
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    In this case we consider RD90 (resource deprivation) as an endogenous regressor.
+    We tell the model that this is so by passing it in a different parameter
+    from the exogenous variables (x).
+
+    >>> yd_var = ['RD90']
+    >>> yd = np.array([db.by_col(name) for name in yd_var]).T
+
+    Because we have endogenous variables, to obtain a correct estimate of the
+    model, we need to instrument for RD90. We use FP89 (families below poverty)
+    for this and hence put it in the instruments parameter, 'q'.
+
+    >>> q_var = ['FP89']
+    >>> q = np.array([db.by_col(name) for name in q_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to perform tests for spatial dependence, we need to specify
+    the spatial weights matrix that includes the spatial configuration of the
+    observations into the error component of the model. To do that, we can open
+    an already existing gal file or create a new one. In this case, we will
+    create one from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    We can now run the regression and then have a summary of the output
+    by typing: model.summary
+    Alternatively, we can just check the betas and standard errors of the
+    parameters:
+
+    >>> tslsr = TSLS_Regimes(y, x, yd, q, regimes, w=w, constant_regi='many', spat_diag=False, name_y=y_var, name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='NAT', name_w='NAT.shp')
+
+    >>> tslsr.betas
+    array([[ 3.66973562],
+           [ 1.06950466],
+           [ 0.14680946],
+           [ 2.45864196],
+           [ 9.55873243],
+           [ 1.94666348],
+           [-0.30810214],
+           [ 3.68718119]])
+
+    >>> np.sqrt(tslsr.vm.diagonal())
+    array([ 0.38389901,  0.09963973,  0.04672091,  0.22725012,  0.49181223,
+            0.19630774,  0.07784587,  0.25529011])
+
+    """
+
+    def __init__(self, y, x, yend, q, regimes,
+                 w=None, robust=None, gwk=None, sig2n_k=True,
+                 spat_diag=False, vm=False, constant_regi='many',
+                 cols2regi='all', regime_err_sep=True, name_y=None, name_x=None,
+                 cores=False, name_yend=None, name_q=None, name_regimes=None,
+                 name_w=None, name_gwk=None, name_ds=None, summ=True):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y)
+        USER.check_robust(robust, gwk)
+        USER.check_spat_diag(spat_diag, w)
+        self.constant_regi = constant_regi
+        self.cols2regi = cols2regi
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.name_gwk = USER.set_name_w(name_gwk, gwk)
+        self.name_y = USER.set_name_y(name_y)
+        name_yend = USER.set_name_yend(name_yend, yend)
+        name_q = USER.set_name_q(name_q, q)
+        self.name_x_r = USER.set_name_x(name_x, x) + name_yend
+        self.n = n
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, yend=yend, add_cons=False)
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        if regime_err_sep == True and robust == 'hac':
+            set_warn(
+                self, "Error by regimes is incompatible with HAC estimation for 2SLS models. Hence, the error by regimes has been disabled for this model.")
+            regime_err_sep = False
+        self.regime_err_sep = regime_err_sep
+        if regime_err_sep == True and set(cols2regi) == set([True]) and constant_regi == 'many':
+            name_x = USER.set_name_x(name_x, x)
+            self.y = y
+            regi_ids = dict(
+                (r, list(np.where(np.array(regimes) == r)[0])) for r in self.regimes_set)
+            self._tsls_regimes_multi(x, yend, q, w, regi_ids, cores,
+                                     gwk, sig2n_k, robust, spat_diag, vm, name_x, name_yend, name_q)
+        else:
+            name_x = USER.set_name_x(name_x, x, constant=True)
+            q, self.name_q = REGI.Regimes_Frame.__init__(self, q,
+                                                         regimes, constant_regi=None, cols2regi='all', names=name_q)
+            x, self.name_x = REGI.Regimes_Frame.__init__(self, x,
+                                                         regimes, constant_regi, cols2regi=cols2regi, names=name_x)
+            yend, self.name_yend = REGI.Regimes_Frame.__init__(self, yend,
+                                                               regimes, constant_regi=None,
+                                                               cols2regi=cols2regi, yend=True, names=name_yend)
+            if regime_err_sep == True and robust == None:
+                robust = 'white'
+            BaseTSLS.__init__(self, y=y, x=x, yend=yend, q=q,
+                              robust=robust, gwk=gwk, sig2n_k=sig2n_k)
+            self.title = "TWO STAGE LEAST SQUARES - REGIMES"
+            if robust == 'ogmm':
+                _optimal_weight(self, sig2n_k)
+            self.name_z = self.name_x + self.name_yend
+            self.name_h = USER.set_name_h(self.name_x, self.name_q)
+            self.chow = REGI.Chow(self)
+            self.robust = USER.set_robust(robust)
+            if summ:
+                SUMMARY.TSLS(
+                    reg=self, vm=vm, w=w, spat_diag=spat_diag, regimes=True)
+
+    def _tsls_regimes_multi(self, x, yend, q, w, regi_ids, cores,
+                            gwk, sig2n_k, robust, spat_diag, vm, name_x, name_yend, name_q):
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            if system() != 'Windows':
+                is_win = True
+                results_p[r] = _work(*(self.y,x,w,regi_ids,r,yend,q,robust,sig2n_k,self.name_ds,self.name_y,name_x,name_yend,name_q,self.name_w,self.name_regimes))
+            else:
+                pool = mp.Pool(cores)
+                results_p[r] = pool.apply_async(_work,args=(self.y,x,w,regi_ids,r,yend,q,robust,sig2n_k,self.name_ds,self.name_y,name_x,name_yend,name_q,self.name_w,self.name_regimes))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work, args=(
+                    self.y, x, w, regi_ids, r, yend, q, robust, sig2n_k, self.name_ds, self.name_y, name_x, name_yend, name_q, self.name_w, self.name_regimes))
+            else:
+                results_p[r] = _work(*(self.y, x, w, regi_ids, r, yend, q, robust, sig2n_k,
+                                       self.name_ds, self.name_y, name_x, name_yend, name_q, self.name_w, self.name_regimes))
+
+        self.kryd = 0
+        self.kr = x.shape[1] + yend.shape[1] + 1
+        self.kf = 0
+        self.nr = len(self.regimes_set)
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * self.kr, 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+
+        results = {}
+        self.name_y, self.name_x, self.name_yend, self.name_q, self.name_z, self.name_h = [
+        ], [], [], [], [], []
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * self.kr):((counter + 1) * self.kr), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            self.name_yend += results[r].name_yend
+            self.name_q += results[r].name_q
+            self.name_z += results[r].name_z
+            self.name_h += results[r].name_h
+            counter += 1
+        self.multi = results
+        self.hac_var = sphstack(x, q)
+        if robust == 'hac':
+            hac_multi(self, gwk)
+        if robust == 'ogmm':
+            set_warn(
+                self, "Residuals treated as homoskedastic for the purpose of diagnostics.")
+        self.chow = REGI.Chow(self)
+        if spat_diag:
+            self._get_spat_diag_props(results, regi_ids, x, yend, q)
+        SUMMARY.TSLS_multi(
+            reg=self, multireg=self.multi, vm=vm, spat_diag=spat_diag, regimes=True, w=w)
+
+    def _get_spat_diag_props(self, results, regi_ids, x, yend, q):
+        self._cache = {}
+        x = USER.check_constant(x)
+        x = REGI.regimeX_setup(
+            x, self.regimes, [True] * x.shape[1], self.regimes_set)
+        self.z = sphstack(x, REGI.regimeX_setup(
+            yend, self.regimes, [True] * yend.shape[1], self.regimes_set))
+        self.h = sphstack(
+            x, REGI.regimeX_setup(q, self.regimes, [True] * q.shape[1], self.regimes_set))
+        hthi = np.linalg.inv(spdot(self.h.T, self.h))
+        zth = spdot(self.z.T, self.h)
+        self.varb = np.linalg.inv(spdot(spdot(zth, hthi), zth.T))
+
+
+def _work(y, x, w, regi_ids, r, yend, q, robust, sig2n_k, name_ds, name_y, name_x, name_yend, name_q, name_w, name_regimes):
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    yend_r = yend[regi_ids[r]]
+    q_r = q[regi_ids[r]]
+    x_constant = USER.check_constant(x_r)
+    if robust == 'hac' or robust == 'ogmm':
+        robust2 = None
+    else:
+        robust2 = robust
+    model = BaseTSLS(
+        y_r, x_constant, yend_r, q_r, robust=robust2, sig2n_k=sig2n_k)
+    model.title = "TWO STAGE LEAST SQUARES ESTIMATION - REGIME %s" % r
+    if robust == 'ogmm':
+        _optimal_weight(model, sig2n_k, warn=False)
+    model.robust = USER.set_robust(robust)
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_yend = ['%s_%s' % (str(r), i) for i in name_yend]
+    model.name_z = model.name_x + model.name_yend
+    model.name_q = ['%s_%s' % (str(r), i) for i in name_q]
+    model.name_h = model.name_x + model.name_q
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    if w:
+        w_r, warn = REGI.w_regime(w, regi_ids[r], r, transform=True)
+        set_warn(model, warn)
+        model.w = w_r
+    return model
+
+
+def _optimal_weight(reg, sig2n_k, warn=True):
+    try:
+        Hu = reg.h.toarray() * reg.u ** 2
+    except:
+        Hu = reg.h * reg.u ** 2
+    if sig2n_k:
+        S = spdot(reg.h.T, Hu, array_out=True) / (reg.n - reg.k)
+    else:
+        S = spdot(reg.h.T, Hu, array_out=True) / reg.n
+    Si = np.linalg.inv(S)
+    ZtH = spdot(reg.z.T, reg.h)
+    ZtHSi = spdot(ZtH, Si)
+    fac2 = np.linalg.inv(spdot(ZtHSi, ZtH.T, array_out=True))
+    fac3 = spdot(ZtHSi, spdot(reg.h.T, reg.y), array_out=True)
+    betas = np.dot(fac2, fac3)
+    if sig2n_k:
+        vm = fac2 * (reg.n - reg.k)
+    else:
+        vm = fac2 * reg.n
+    RegressionProps_basic(reg, betas=betas, vm=vm, sig2=False)
+    reg.title += " (Optimal-Weighted GMM)"
+    if warn:
+        set_warn(
+            reg, "Residuals treated as homoskedastic for the purpose of diagnostics.")
+    return
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+
+if __name__ == '__main__':
+    _test()
+    import numpy as np
+    import pysal
+    db = pysal.open(pysal.examples.get_path('NAT.dbf'), 'r')
+    y_var = 'HR60'
+    y = np.array([db.by_col(y_var)]).T
+    x_var = ['PS60', 'DV60', 'RD60']
+    x = np.array([db.by_col(name) for name in x_var]).T
+    yd_var = ['UE60']
+    yd = np.array([db.by_col(name) for name in yd_var]).T
+    q_var = ['FP59', 'MA60']
+    q = np.array([db.by_col(name) for name in q_var]).T
+    r_var = 'SOUTH'
+    regimes = db.by_col(r_var)
+    tslsr = TSLS_Regimes(y, x, yd, q, regimes, constant_regi='many', spat_diag=False, name_y=y_var, name_x=x_var,
+                         name_yend=yd_var, name_q=q_var, name_regimes=r_var, cols2regi=[
+                             False, True, True, True],
+                         sig2n_k=False)
+    print tslsr.summary
diff --git a/pysal/spreg/twosls_sp.py b/pysal/spreg/twosls_sp.py
new file mode 100644
index 0000000..694b2c0
--- /dev/null
+++ b/pysal/spreg/twosls_sp.py
@@ -0,0 +1,540 @@
+'''
+Spatial Two Stages Least Squares
+'''
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, David C. Folch david.folch at asu.edu"
+
+import copy
+import numpy as np
+import pysal
+import numpy.linalg as la
+import twosls as TSLS
+import robust as ROBUST
+import user_output as USER
+import summary_output as SUMMARY
+from utils import get_lags, set_endog, sp_att, set_warn
+
+__all__ = ["GM_Lag"]
+
+
+class BaseGM_Lag(TSLS.BaseTSLS):
+
+    """
+    Spatial two stage least squares (S2SLS) (note: no consistency checks,
+    diagnostics or constant added); Anselin (1988) [1]_
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x); cannot be
+                   used in combination with h
+    w            : Sparse matrix
+                   Spatial weights sparse matrix 
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    robust       : string
+                   If 'white', then a White consistent estimator of the
+                   variance-covariance matrix is given.  If 'hac', then a
+                   HAC consistent estimator of the variance-covariance
+                   matrix is given. Default set to None. 
+    gwk          : pysal W object
+                   Kernel spatial weights needed for HAC estimation. Note:
+                   matrix must have ones along the main diagonal.
+    sig2n_k      : boolean
+                   If True, then use n-k to estimate sigma^2. If False, use n.
+
+
+    Attributes
+    ----------
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    predy        : array
+                   nx1 array of predicted y values
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    kstar        : integer
+                   Number of endogenous variables. 
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    utu          : float
+                   Sum of squared residuals
+    sig2         : float
+                   Sigma squared used in computations
+    sig2n        : float
+                   Sigma squared (computed with n in the denominator)
+    sig2n_k      : float
+                   Sigma squared (computed with n-k in the denominator)
+    hth          : float
+                   H'H
+    hthi         : float
+                   (H'H)^-1
+    varb         : array
+                   (Z'H (H'H)^-1 H'Z)^-1
+    zthhthi      : array
+                   Z'H(H'H)^-1
+    pfora1a2     : array
+                   n(zthhthi)'varb
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
+    Kluwer Academic Publishers. Dordrecht.
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> import pysal.spreg.diagnostics as D
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> w.transform = 'r'
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+    >>> # no non-spatial endogenous variables
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("CRIME"))
+    >>> X = np.array(X).T
+    >>> w_lags = 2
+    >>> yd2, q2 = pysal.spreg.utils.set_endog(y, X, w, None, None, w_lags, True)
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> reg=BaseGM_Lag(y, X, yend=yd2, q=q2, w=w.sparse, w_lags=w_lags)
+    >>> reg.betas
+    array([[ 45.30170561],
+           [  0.62088862],
+           [ -0.48072345],
+           [  0.02836221]])
+    >>> D.se_betas(reg)
+    array([ 17.91278862,   0.52486082,   0.1822815 ,   0.31740089])
+    >>> reg=BaseGM_Lag(y, X, yend=yd2, q=q2, w=w.sparse, w_lags=w_lags, robust='white')
+    >>> reg.betas
+    array([[ 45.30170561],
+           [  0.62088862],
+           [ -0.48072345],
+           [  0.02836221]])
+    >>> D.se_betas(reg)
+    array([ 20.47077481,   0.50613931,   0.20138425,   0.38028295])
+    >>> # instrument for HOVAL with DISCBD
+    >>> X = np.array(db.by_col("INC"))
+    >>> X = np.reshape(X, (49,1))
+    >>> yd = np.array(db.by_col("CRIME"))
+    >>> yd = np.reshape(yd, (49,1))
+    >>> q = np.array(db.by_col("DISCBD"))
+    >>> q = np.reshape(q, (49,1))
+    >>> yd2, q2 = pysal.spreg.utils.set_endog(y, X, w, yd, q, w_lags, True)
+    >>> X = np.hstack((np.ones(y.shape),X))
+    >>> reg=BaseGM_Lag(y, X, w=w.sparse, yend=yd2, q=q2, w_lags=w_lags)
+    >>> reg.betas
+    array([[ 100.79359082],
+           [  -0.50215501],
+           [  -1.14881711],
+           [  -0.38235022]])
+    >>> D.se_betas(reg)
+    array([ 53.0829123 ,   1.02511494,   0.57589064,   0.59891744])
+
+    """
+
+    def __init__(self, y, x, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True,
+                 robust=None, gwk=None, sig2n_k=False):
+
+        TSLS.BaseTSLS.__init__(self, y=y, x=x, yend=yend, q=q,
+                               robust=robust, gwk=gwk, sig2n_k=sig2n_k)
+
+
+class GM_Lag(BaseGM_Lag):
+
+    """
+    Spatial two stage least squares (S2SLS) with results and diagnostics; 
+    Anselin (1988) [1]_
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x); cannot be
+                   used in combination with h
+    w            : pysal W object
+                   Spatial weights object 
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    robust       : string
+                   If 'white', then a White consistent estimator of the
+                   variance-covariance matrix is given.  If 'hac', then a
+                   HAC consistent estimator of the variance-covariance
+                   matrix is given. Default set to None. 
+    gwk          : pysal W object
+                   Kernel spatial weights needed for HAC estimation. Note:
+                   matrix must have ones along the main diagonal.
+    sig2n_k      : boolean
+                   If True, then use n-k to estimate sigma^2. If False, use n.
+    spat_diag    : boolean
+                   If True, then compute Anselin-Kelejian test
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_pred       : array
+                   nx1 array of residuals (using reduced form)
+    predy        : array
+                   nx1 array of predicted y values
+    predy_e      : array
+                   nx1 array of predicted y values (using reduced form)
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+    kstar        : integer
+                   Number of endogenous variables. 
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+    z            : array
+                   nxk array of variables (combination of x and yend)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+    robust       : string
+                   Adjustment for robust standard errors
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+    utu          : float
+                   Sum of squared residuals
+    sig2         : float
+                   Sigma squared used in computations
+    std_err      : array
+                   1xk array of standard errors of the betas    
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+    ak_test      : tuple
+                   Anselin-Kelejian test; tuple contains the pair (statistic,
+                   p-value)
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_z       : list of strings
+                   Names of exogenous and endogenous variables for use in 
+                   output
+    name_q       : list of strings
+                   Names of external instruments
+    name_h       : list of strings
+                   Names of all instruments used in ouput
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    title        : string
+                   Name of the regression method used
+    sig2n        : float
+                   Sigma squared (computed with n in the denominator)
+    sig2n_k      : float
+                   Sigma squared (computed with n-k in the denominator)
+    hth          : float
+                   H'H
+    hthi         : float
+                   (H'H)^-1
+    varb         : array
+                   (Z'H (H'H)^-1 H'Z)^-1
+    zthhthi      : array
+                   Z'H(H'H)^-1
+    pfora1a2     : array
+                   n(zthhthi)'varb
+
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
+    Kluwer Academic Publishers. Dordrecht.
+
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis. Since we will need some tests for our
+    model, we also import the diagnostics module.
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> import pysal.spreg.diagnostics as D
+
+    Open data on Columbus neighborhood crime (49 areas) using pysal.open().
+    This is the DBF associated with the Columbus shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
+
+    Extract the HOVAL column (home value) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y = np.array(db.by_col("HOVAL"))
+    >>> y = np.reshape(y, (49,1))
+
+    Extract INC (income) and CRIME (crime rates) vectors from the DBF to be used as
+    independent variables in the regression.  Note that PySAL requires this to
+    be an nxj numpy array, where j is the number of independent variables (not
+    including a constant). By default this model adds a vector of ones to the
+    independent variables passed in, but this can be overridden by passing
+    constant=False.
+
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("CRIME"))
+    >>> X = np.array(X).T
+
+    Since we want to run a spatial error model, we need to specify the spatial
+    weights matrix that includes the spatial configuration of the observations
+    into the error component of the model. To do that, we can open an already
+    existing gal file or create a new one. In this case, we will create one
+    from ``columbus.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    This class runs a lag model, which means that includes the spatial lag of
+    the dependent variable on the right-hand side of the equation. If we want
+    to have the names of the variables printed in the
+    output summary, we will have to pass them in as well, although this is
+    optional. The default most basic model to be run would be: 
+
+    >>> reg=GM_Lag(y, X, w=w, w_lags=2, name_x=['inc', 'crime'], name_y='hoval', name_ds='columbus')
+    >>> reg.betas
+    array([[ 45.30170561],
+           [  0.62088862],
+           [ -0.48072345],
+           [  0.02836221]])
+
+    Once the model is run, we can obtain the standard error of the coefficient
+    estimates by calling the diagnostics module:
+
+    >>> D.se_betas(reg)
+    array([ 17.91278862,   0.52486082,   0.1822815 ,   0.31740089])
+
+    But we can also run models that incorporates corrected standard errors
+    following the White procedure. For that, we will have to include the
+    optional parameter ``robust='white'``:
+
+    >>> reg=GM_Lag(y, X, w=w, w_lags=2, robust='white', name_x=['inc', 'crime'], name_y='hoval', name_ds='columbus')
+    >>> reg.betas
+    array([[ 45.30170561],
+           [  0.62088862],
+           [ -0.48072345],
+           [  0.02836221]])
+
+    And we can access the standard errors from the model object:
+
+    >>> reg.std_err
+    array([ 20.47077481,   0.50613931,   0.20138425,   0.38028295])
+
+    The class is flexible enough to accomodate a spatial lag model that,
+    besides the spatial lag of the dependent variable, includes other
+    non-spatial endogenous regressors. As an example, we will assume that
+    CRIME is actually endogenous and we decide to instrument for it with
+    DISCBD (distance to the CBD). We reload the X including INC only and
+    define CRIME as endogenous and DISCBD as instrument:
+
+    >>> X = np.array(db.by_col("INC"))
+    >>> X = np.reshape(X, (49,1))
+    >>> yd = np.array(db.by_col("CRIME"))
+    >>> yd = np.reshape(yd, (49,1))
+    >>> q = np.array(db.by_col("DISCBD"))
+    >>> q = np.reshape(q, (49,1))
+
+    And we can run the model again:
+
+    >>> reg=GM_Lag(y, X, w=w, yend=yd, q=q, w_lags=2, name_x=['inc'], name_y='hoval', name_yend=['crime'], name_q=['discbd'], name_ds='columbus')
+    >>> reg.betas
+    array([[ 100.79359082],
+           [  -0.50215501],
+           [  -1.14881711],
+           [  -0.38235022]])
+
+    Once the model is run, we can obtain the standard error of the coefficient
+    estimates by calling the diagnostics module:
+
+    >>> D.se_betas(reg)
+    array([ 53.0829123 ,   1.02511494,   0.57589064,   0.59891744])
+
+    """
+
+    def __init__(self, y, x, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True,
+                 robust=None, gwk=None, sig2n_k=False,
+                 spat_diag=False,
+                 vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None,
+                 name_w=None, name_gwk=None, name_ds=None):
+
+        n = USER.check_arrays(x, yend, q)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        USER.check_robust(robust, gwk)
+        yend2, q2 = set_endog(y, x, w, yend, q, w_lags, lag_q)
+        x_constant = USER.check_constant(x)
+        BaseGM_Lag.__init__(
+            self, y=y, x=x_constant, w=w.sparse, yend=yend2, q=q2,
+            w_lags=w_lags, robust=robust, gwk=gwk,
+            lag_q=lag_q, sig2n_k=sig2n_k)
+        self.rho = self.betas[-1]
+        self.predy_e, self.e_pred, warn = sp_att(w, self.y, self.predy,
+                                                 yend2[:, -1].reshape(self.n, 1), self.rho)
+        set_warn(self, warn)
+        self.title = "SPATIAL TWO STAGE LEAST SQUARES"
+        self.name_ds = USER.set_name_ds(name_ds)
+        self.name_y = USER.set_name_y(name_y)
+        self.name_x = USER.set_name_x(name_x, x)
+        self.name_yend = USER.set_name_yend(name_yend, yend)
+        self.name_yend.append(USER.set_name_yend_sp(self.name_y))
+        self.name_z = self.name_x + self.name_yend
+        self.name_q = USER.set_name_q(name_q, q)
+        self.name_q.extend(
+            USER.set_name_q_sp(self.name_x, w_lags, self.name_q, lag_q))
+        self.name_h = USER.set_name_h(self.name_x, self.name_q)
+        self.robust = USER.set_robust(robust)
+        self.name_w = USER.set_name_w(name_w, w)
+        self.name_gwk = USER.set_name_w(name_gwk, gwk)
+        SUMMARY.GM_Lag(reg=self, w=w, vm=vm, spat_diag=spat_diag)
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+
+if __name__ == '__main__':
+    _test()
+
+    import numpy as np
+    import pysal
+    db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
+    y_var = 'CRIME'
+    y = np.array([db.by_col(y_var)]).reshape(49, 1)
+    x_var = ['INC']
+    x = np.array([db.by_col(name) for name in x_var]).T
+    yd_var = ['HOVAL']
+    yd = np.array([db.by_col(name) for name in yd_var]).T
+    q_var = ['DISCBD']
+    q = np.array([db.by_col(name) for name in q_var]).T
+    w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    w.transform = 'r'
+    model = GM_Lag(
+        y, x, yd, q, w=w, spat_diag=True, name_y=y_var, name_x=x_var,
+        name_yend=yd_var, name_q=q_var, name_ds='columbus', name_w='columbus.gal')
+    print model.summary
diff --git a/pysal/spreg/twosls_sp_regimes.py b/pysal/spreg/twosls_sp_regimes.py
new file mode 100644
index 0000000..dd99d90
--- /dev/null
+++ b/pysal/spreg/twosls_sp_regimes.py
@@ -0,0 +1,705 @@
+'''
+Spatial Two Stages Least Squares with Regimes
+'''
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, Pedro V. Amaral pedro.amaral at asu.edu, David C. Folch david.folch at asu.edu"
+
+import numpy as np
+import pysal
+import regimes as REGI
+import user_output as USER
+import summary_output as SUMMARY
+import multiprocessing as mp
+from twosls_regimes import TSLS_Regimes, _optimal_weight
+from twosls import BaseTSLS
+from utils import set_endog, set_endog_sparse, sp_att, set_warn, sphstack, spdot
+from robust import hac_multi
+from platform import system
+
+
+class GM_Lag_Regimes(TSLS_Regimes, REGI.Regimes_Frame):
+
+    """
+    Spatial two stage least squares (S2SLS) with regimes; 
+    Anselin (1988) [1]_
+
+    Parameters
+    ----------
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, excluding the constant
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable to use as instruments (note: 
+                   this should not contain any variables from x); cannot be
+                   used in combination with h
+    constant_regi: ['one', 'many']
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime (default)
+    cols2regi    : list, 'all'
+                   Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all' (default), all the variables vary by regime.
+    w            : pysal W object
+                   Spatial weights object 
+    w_lags       : integer
+                   Orders of W to include as instruments for the spatially
+                   lagged dependent variable. For example, w_lags=1, then
+                   instruments are WX; if w_lags=2, then WX, WWX; and so on.
+    lag_q        : boolean
+                   If True, then include spatial lags of the additional 
+                   instruments (q).
+    regime_lag_sep: boolean
+                   If True (default), the spatial parameter for spatial lag is also
+                   computed according to different regimes. If False, 
+                   the spatial parameter is fixed accross regimes.
+                   Option valid only when regime_err_sep=True
+    regime_err_sep: boolean
+                   If True, a separate regression is run for each regime.
+    robust       : string
+                   If 'white', then a White consistent estimator of the
+                   variance-covariance matrix is given.
+                   If 'hac', then a HAC consistent estimator of the 
+                   variance-covariance matrix is given.
+                   If 'ogmm', then Optimal GMM is used to estimate
+                   betas and the variance-covariance matrix.
+                   Default set to None. 
+    gwk          : pysal W object
+                   Kernel spatial weights needed for HAC estimation. Note:
+                   matrix must have ones along the main diagonal.
+    sig2n_k      : boolean
+                   If True, then use n-k to estimate sigma^2. If False, use n.
+    spat_diag    : boolean
+                   If True, then compute Anselin-Kelejian test
+    vm           : boolean
+                   If True, include variance-covariance matrix in summary
+                   results
+    cores        : boolean
+                   Specifies if multiprocessing is to be used
+                   Default: no multiprocessing, cores = False
+                   Note: Multiprocessing may not work on all platforms.
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_q       : list of strings
+                   Names of instruments for use in output
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regimes variable for use in output
+
+    Attributes
+    ----------
+    summary      : string
+                   Summary of regression results and diagnostics (note: use in
+                   conjunction with the print command)
+    betas        : array
+                   kx1 array of estimated coefficients
+    u            : array
+                   nx1 array of residuals
+    e_pred       : array
+                   nx1 array of residuals (using reduced form)
+    predy        : array
+                   nx1 array of predicted y values
+    predy_e      : array
+                   nx1 array of predicted y values (using reduced form)
+    n            : integer
+                   Number of observations
+    k            : integer
+                   Number of variables for which coefficients are estimated
+                   (including the constant)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    kstar        : integer
+                   Number of endogenous variables. 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    y            : array
+                   nx1 array for dependent variable
+    x            : array
+                   Two dimensional array with n rows and one column for each
+                   independent (exogenous) variable, including the constant
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    yend         : array
+                   Two dimensional array with n rows and one column for each
+                   endogenous variable
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    q            : array
+                   Two dimensional array with n rows and one column for each
+                   external exogenous variable used as instruments 
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z            : array
+                   nxk array of variables (combination of x and yend)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    h            : array
+                   nxl array of instruments (combination of x and q)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    robust       : string
+                   Adjustment for robust standard errors
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    mean_y       : float
+                   Mean of dependent variable
+    std_y        : float
+                   Standard deviation of dependent variable
+    vm           : array
+                   Variance covariance matrix (kxk)
+    pr2          : float
+                   Pseudo R squared (squared correlation between y and ypred)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    pr2_e        : float
+                   Pseudo R squared (squared correlation between y and ypred_e
+                   (using reduced form))
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    utu          : float
+                   Sum of squared residuals
+    sig2         : float
+                   Sigma squared used in computations
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    std_err      : array
+                   1xk array of standard errors of the betas    
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    z_stat       : list of tuples
+                   z statistic; each tuple contains the pair (statistic,
+                   p-value), where each is a float
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    ak_test      : tuple
+                   Anselin-Kelejian test; tuple contains the pair (statistic,
+                   p-value)
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    name_y       : string
+                   Name of dependent variable for use in output
+    name_x       : list of strings
+                   Names of independent variables for use in output
+    name_yend    : list of strings
+                   Names of endogenous variables for use in output
+    name_z       : list of strings
+                   Names of exogenous and endogenous variables for use in 
+                   output
+    name_q       : list of strings
+                   Names of external instruments
+    name_h       : list of strings
+                   Names of all instruments used in ouput
+    name_w       : string
+                   Name of weights matrix for use in output
+    name_gwk     : string
+                   Name of kernel weights matrix for use in output
+    name_ds      : string
+                   Name of dataset for use in output
+    name_regimes : string
+                   Name of regimes variable for use in output
+    title        : string
+                   Name of the regression method used
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    sig2n        : float
+                   Sigma squared (computed with n in the denominator)
+    sig2n_k      : float
+                   Sigma squared (computed with n-k in the denominator)
+    hth          : float
+                   H'H
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    hthi         : float
+                   (H'H)^-1
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    varb         : array
+                   (Z'H (H'H)^-1 H'Z)^-1
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    zthhthi      : array
+                   Z'H(H'H)^-1
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    pfora1a2     : array
+                   n(zthhthi)'varb
+                   Only available in dictionary 'multi' when multiple regressions
+                   (see 'multi' below for details)
+    regimes      : list
+                   List of n values with the mapping of each
+                   observation to a regime. Assumed to be aligned with 'x'.
+    constant_regi: ['one', 'many']
+                   Ignored if regimes=False. Constant option for regimes.
+                   Switcher controlling the constant term setup. It may take
+                   the following values:
+                     *  'one': a vector of ones is appended to x and held
+                               constant across regimes
+                     * 'many': a vector of ones is appended to x and considered
+                               different per regime
+    cols2regi    : list, 'all'
+                   Ignored if regimes=False. Argument indicating whether each
+                   column of x should be considered as different per regime
+                   or held constant across regimes (False).
+                   If a list, k booleans indicating for each variable the
+                   option (True if one per regime, False to be held constant).
+                   If 'all', all the variables vary by regime.
+    regime_lag_sep   : boolean
+                   If True, the spatial parameter for spatial lag is also
+                   computed according to different regimes. If False (default), 
+                   the spatial parameter is fixed accross regimes.
+    regime_err_sep  : boolean
+                   If True, a separate regression is run for each regime.
+    kr           : int
+                   Number of variables/columns to be "regimized" or subject
+                   to change by regime. These will result in one parameter
+                   estimate by regime for each variable (i.e. nr parameters per
+                   variable)
+    kf           : int
+                   Number of variables/columns to be considered fixed or
+                   global across regimes and hence only obtain one parameter
+                   estimate
+    nr           : int
+                   Number of different regimes in the 'regimes' list
+    multi        : dictionary
+                   Only available when multiple regressions are estimated,
+                   i.e. when regime_err_sep=True and no variable is fixed
+                   across regimes.
+                   Contains all attributes of each individual regression
+
+    References
+    ----------
+
+    .. [1] Anselin, L. (1988) "Spatial Econometrics: Methods and Models".
+    Kluwer Academic Publishers. Dordrecht.
+
+    Examples
+    --------
+
+    We first need to import the needed modules, namely numpy to convert the
+    data we read into arrays that ``spreg`` understands and ``pysal`` to
+    perform all the analysis.
+
+    >>> import numpy as np
+    >>> import pysal
+
+    Open data on NCOVR US County Homicides (3085 areas) using pysal.open().
+    This is the DBF associated with the NAT shapefile.  Note that
+    pysal.open() also reads data in CSV format; since the actual class
+    requires data to be passed in as numpy arrays, the user can read their
+    data in using any method.  
+
+    >>> db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
+
+    Extract the HR90 column (homicide rates in 1990) from the DBF file and make it the
+    dependent variable for the regression. Note that PySAL requires this to be
+    an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
+    that other packages accept.
+
+    >>> y_var = 'HR90'
+    >>> y = np.array([db.by_col(y_var)]).reshape(3085,1)
+
+    Extract UE90 (unemployment rate) and PS90 (population structure) vectors from
+    the DBF to be used as independent variables in the regression. Other variables
+    can be inserted by adding their names to x_var, such as x_var = ['Var1','Var2','...]
+    Note that PySAL requires this to be an nxj numpy array, where j is the
+    number of independent variables (not including a constant). By default
+    this model adds a vector of ones to the independent variables passed in.
+
+    >>> x_var = ['PS90','UE90']
+    >>> x = np.array([db.by_col(name) for name in x_var]).T
+
+    The different regimes in this data are given according to the North and 
+    South dummy (SOUTH).
+
+    >>> r_var = 'SOUTH'
+    >>> regimes = db.by_col(r_var)
+
+    Since we want to run a spatial lag model, we need to specify
+    the spatial weights matrix that includes the spatial configuration of the
+    observations. To do that, we can open an already existing gal file or 
+    create a new one. In this case, we will create one from ``NAT.shp``.
+
+    >>> w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
+
+    Unless there is a good reason not to do it, the weights have to be
+    row-standardized so every row of the matrix sums to one. Among other
+    things, this allows to interpret the spatial lag of a variable as the
+    average value of the neighboring observations. In PySAL, this can be
+    easily performed in the following way:
+
+    >>> w.transform = 'r'
+
+    This class runs a lag model, which means that includes the spatial lag of
+    the dependent variable on the right-hand side of the equation. If we want
+    to have the names of the variables printed in the output summary, we will
+    have to pass them in as well, although this is optional.
+
+    >>> model=GM_Lag_Regimes(y, x, regimes, w=w, regime_lag_sep=False, regime_err_sep=False, name_y=y_var, name_x=x_var, name_regimes=r_var, name_ds='NAT', name_w='NAT.shp')
+    >>> model.betas
+    array([[ 1.28897623],
+           [ 0.79777722],
+           [ 0.56366891],
+           [ 8.73327838],
+           [ 1.30433406],
+           [ 0.62418643],
+           [-0.39993716]])
+
+    Once the model is run, we can have a summary of the output by typing:
+    model.summary . Alternatively, we can obtain the standard error of 
+    the coefficient estimates by calling:
+
+    >>> model.std_err
+    array([ 0.44682888,  0.14358192,  0.05655124,  1.06044865,  0.20184548,
+            0.06118262,  0.12387232])
+
+    In the example above, all coefficients but the spatial lag vary
+    according to the regime. It is also possible to have the spatial lag
+    varying according to the regime, which effective will result in an
+    independent spatial lag model estimated for each regime. To run these
+    models, the argument regime_lag_sep must be set to True:
+
+    >>> model=GM_Lag_Regimes(y, x, regimes, w=w, regime_lag_sep=True, name_y=y_var, name_x=x_var, name_regimes=r_var, name_ds='NAT', name_w='NAT.shp')
+    >>> print np.hstack((np.array(model.name_z).reshape(8,1),model.betas,np.sqrt(model.vm.diagonal().reshape(8,1))))
+    [['0_CONSTANT' '1.36584769' '0.39854720']
+     ['0_PS90' '0.80875730' '0.11324884']
+     ['0_UE90' '0.56946813' '0.04625087']
+     ['0_W_HR90' '-0.4342438' '0.13350159']
+     ['1_CONSTANT' '7.90731073' '1.63601874']
+     ['1_PS90' '1.27465703' '0.24709870']
+     ['1_UE90' '0.60167693' '0.07993322']
+     ['1_W_HR90' '-0.2960338' '0.19934459']]
+
+    Alternatively, we can type: 'model.summary' to see the organized results output.
+    The class is flexible enough to accomodate a spatial lag model that,
+    besides the spatial lag of the dependent variable, includes other
+    non-spatial endogenous regressors. As an example, we will add the endogenous
+    variable RD90 (resource deprivation) and we decide to instrument for it with
+    FP89 (families below poverty):
+
+    >>> yd_var = ['RD90']
+    >>> yd = np.array([db.by_col(name) for name in yd_var]).T
+    >>> q_var = ['FP89']
+    >>> q = np.array([db.by_col(name) for name in q_var]).T
+
+    And we can run the model again:
+
+    >>> model = GM_Lag_Regimes(y, x, regimes, yend=yd, q=q, w=w, regime_lag_sep=False, regime_err_sep=False, name_y=y_var, name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='NAT', name_w='NAT.shp')
+    >>> model.betas
+    array([[ 3.42195202],
+           [ 1.03311878],
+           [ 0.14308741],
+           [ 8.99740066],
+           [ 1.91877758],
+           [-0.32084816],
+           [ 2.38918212],
+           [ 3.67243761],
+           [ 0.06959139]])
+
+    Once the model is run, we can obtain the standard error of the coefficient
+    estimates. Alternatively, we can have a summary of the output by typing:
+    model.summary
+
+    >>> model.std_err
+    array([ 0.49163311,  0.12237382,  0.05633464,  0.72555909,  0.17250521,
+            0.06749131,  0.27370369,  0.25106224,  0.05804213])
+    """
+
+    def __init__(self, y, x, regimes, yend=None, q=None,
+                 w=None, w_lags=1, lag_q=True,
+                 robust=None, gwk=None, sig2n_k=False,
+                 spat_diag=False, constant_regi='many',
+                 cols2regi='all', regime_lag_sep=False, regime_err_sep=True,
+                 cores=False, vm=False, name_y=None, name_x=None,
+                 name_yend=None, name_q=None, name_regimes=None,
+                 name_w=None, name_gwk=None, name_ds=None):
+
+        n = USER.check_arrays(y, x)
+        USER.check_y(y, n)
+        USER.check_weights(w, y, w_required=True)
+        USER.check_robust(robust, gwk)
+        USER.check_spat_diag(spat_diag, w)
+        name_x = USER.set_name_x(name_x, x, constant=True)
+        name_y = USER.set_name_y(name_y)
+        name_yend = USER.set_name_yend(name_yend, yend)
+        name_q = USER.set_name_q(name_q, q)
+        name_q.extend(
+            USER.set_name_q_sp(name_x, w_lags, name_q, lag_q, force_all=True))
+        self.name_regimes = USER.set_name_ds(name_regimes)
+        self.constant_regi = constant_regi
+        self.n = n
+        cols2regi = REGI.check_cols2regi(
+            constant_regi, cols2regi, x, yend=yend, add_cons=False)
+        self.cols2regi = cols2regi
+        self.regimes_set = REGI._get_regimes_set(regimes)
+        self.regimes = regimes
+        USER.check_regimes(self.regimes_set, self.n, x.shape[1])
+        if regime_err_sep == True and robust == 'hac':
+            set_warn(
+                self, "Error by regimes is incompatible with HAC estimation for Spatial Lag models. Hence, error and lag by regimes have been disabled for this model.")
+            regime_err_sep = False
+            regime_lag_sep = False
+        self.regime_err_sep = regime_err_sep
+        self.regime_lag_sep = regime_lag_sep
+        if regime_lag_sep == True:
+            if not regime_err_sep:
+                raise Exception, "regime_err_sep must be True when regime_lag_sep=True."
+            cols2regi += [True]
+            w_i, regi_ids, warn = REGI.w_regimes(
+                w, regimes, self.regimes_set, transform=True, get_ids=True, min_n=len(cols2regi) + 1)
+            set_warn(self, warn)
+
+        else:
+            cols2regi += [False]
+
+        if regime_err_sep == True and set(cols2regi) == set([True]) and constant_regi == 'many':
+            self.y = y
+            self.GM_Lag_Regimes_Multi(y, x, w_i, w, regi_ids,
+                                      yend=yend, q=q, w_lags=w_lags, lag_q=lag_q, cores=cores,
+                                      robust=robust, gwk=gwk, sig2n_k=sig2n_k, cols2regi=cols2regi,
+                                      spat_diag=spat_diag, vm=vm, name_y=name_y, name_x=name_x,
+                                      name_yend=name_yend, name_q=name_q, name_regimes=self.name_regimes,
+                                      name_w=name_w, name_gwk=name_gwk, name_ds=name_ds)
+        else:
+            if regime_lag_sep == True:
+                w = REGI.w_regimes_union(w, w_i, self.regimes_set)
+            yend2, q2 = set_endog(y, x, w, yend, q, w_lags, lag_q)
+            name_yend.append(USER.set_name_yend_sp(name_y))
+            TSLS_Regimes.__init__(self, y=y, x=x, yend=yend2, q=q2,
+                                  regimes=regimes, w=w, robust=robust, gwk=gwk,
+                                  sig2n_k=sig2n_k, spat_diag=spat_diag, vm=vm,
+                                  constant_regi=constant_regi, cols2regi=cols2regi, regime_err_sep=regime_err_sep,
+                                  name_y=name_y, name_x=name_x, name_yend=name_yend, name_q=name_q,
+                                  name_regimes=name_regimes, name_w=name_w, name_gwk=name_gwk,
+                                  name_ds=name_ds, summ=False)
+            if regime_lag_sep:
+                self.sp_att_reg(w_i, regi_ids, yend2[:, -1].reshape(self.n, 1))
+            else:
+                self.rho = self.betas[-1]
+                self.predy_e, self.e_pred, warn = sp_att(w, self.y, self.predy,
+                                                         yend2[:, -1].reshape(self.n, 1), self.rho)
+                set_warn(self, warn)
+            self.regime_lag_sep = regime_lag_sep
+            self.title = "SPATIAL " + self.title
+            SUMMARY.GM_Lag(
+                reg=self, w=w, vm=vm, spat_diag=spat_diag, regimes=True)
+
+    def GM_Lag_Regimes_Multi(self, y, x, w_i, w, regi_ids, cores=False,
+                             yend=None, q=None, w_lags=1, lag_q=True,
+                             robust=None, gwk=None, sig2n_k=False, cols2regi='all',
+                             spat_diag=False, vm=False, name_y=None, name_x=None,
+                             name_yend=None, name_q=None, name_regimes=None,
+                             name_w=None, name_gwk=None, name_ds=None):
+        #        pool = mp.Pool(cores)
+        self.name_ds = USER.set_name_ds(name_ds)
+        name_x = USER.set_name_x(name_x, x)
+        name_yend.append(USER.set_name_yend_sp(name_y))
+        self.name_w = USER.set_name_w(name_w, w_i)
+        self.name_gwk = USER.set_name_w(name_gwk, gwk)
+        results_p = {}
+        """
+        for r in self.regimes_set:
+            w_r = w_i[r].sparse
+            if system() == 'Windows':
+                is_win = True
+                results_p[r] = _work(*(y,x,regi_ids,r,yend,q,w_r,w_lags,lag_q,robust,sig2n_k,self.name_ds,name_y,name_x,name_yend,name_q,self.name_w,name_regimes))
+            else:                
+                results_p[r] = pool.apply_async(_work,args=(y,x,regi_ids,r,yend,q,w_r,w_lags,lag_q,robust,sig2n_k,self.name_ds,name_y,name_x,name_yend,name_q,self.name_w,name_regimes, ))
+                is_win = False
+        """
+        for r in self.regimes_set:
+            w_r = w_i[r].sparse
+            if cores:
+                pool = mp.Pool(None)
+                results_p[r] = pool.apply_async(_work, args=(
+                    y, x, regi_ids, r, yend, q, w_r, w_lags, lag_q, robust, sig2n_k, self.name_ds, name_y, name_x, name_yend, name_q, self.name_w, name_regimes, ))
+            else:
+                results_p[r] = _work(*(y, x, regi_ids, r, yend, q, w_r, w_lags, lag_q, robust,
+                                       sig2n_k, self.name_ds, name_y, name_x, name_yend, name_q, self.name_w, name_regimes))
+
+        self.kryd = 0
+        self.kr = len(cols2regi) + 1
+        self.kf = 0
+        self.nr = len(self.regimes_set)
+        self.name_x_r = name_x + name_yend
+        self.name_regimes = name_regimes
+        self.vm = np.zeros((self.nr * self.kr, self.nr * self.kr), float)
+        self.betas = np.zeros((self.nr * self.kr, 1), float)
+        self.u = np.zeros((self.n, 1), float)
+        self.predy = np.zeros((self.n, 1), float)
+        self.predy_e = np.zeros((self.n, 1), float)
+        self.e_pred = np.zeros((self.n, 1), float)
+        """
+        if not is_win:
+            pool.close()
+            pool.join()
+        """
+        if cores:
+            pool.close()
+            pool.join()
+        results = {}
+        self.name_y, self.name_x, self.name_yend, self.name_q, self.name_z, self.name_h = [
+        ], [], [], [], [], []
+        counter = 0
+        for r in self.regimes_set:
+            """
+            if is_win:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            """
+            if not cores:
+                results[r] = results_p[r]
+            else:
+                results[r] = results_p[r].get()
+            results[r].predy_e, results[r].e_pred, warn = sp_att(w_i[r], results[r].y, results[
+                                                                 r].predy, results[r].yend[:, -1].reshape(results[r].n, 1), results[r].rho)
+            set_warn(results[r], warn)
+            results[r].w = w_i[r]
+            self.vm[(counter * self.kr):((counter + 1) * self.kr),
+                    (counter * self.kr):((counter + 1) * self.kr)] = results[r].vm
+            self.betas[
+                (counter * self.kr):((counter + 1) * self.kr), ] = results[r].betas
+            self.u[regi_ids[r], ] = results[r].u
+            self.predy[regi_ids[r], ] = results[r].predy
+            self.predy_e[regi_ids[r], ] = results[r].predy_e
+            self.e_pred[regi_ids[r], ] = results[r].e_pred
+            self.name_y += results[r].name_y
+            self.name_x += results[r].name_x
+            self.name_yend += results[r].name_yend
+            self.name_q += results[r].name_q
+            self.name_z += results[r].name_z
+            self.name_h += results[r].name_h
+            if r == self.regimes_set[0]:
+                self.hac_var = np.zeros((self.n, results[r].h.shape[1]), float)
+            self.hac_var[regi_ids[r], ] = results[r].h
+            counter += 1
+        self.multi = results
+        if robust == 'hac':
+            hac_multi(self, gwk, constant=True)
+        if robust == 'ogmm':
+            set_warn(
+                self, "Residuals treated as homoskedastic for the purpose of diagnostics.")
+        self.chow = REGI.Chow(self)
+        if spat_diag:
+            pass
+            #self._get_spat_diag_props(y, x, w, yend, q, w_lags, lag_q)
+        SUMMARY.GM_Lag_multi(
+            reg=self, multireg=self.multi, vm=vm, spat_diag=spat_diag, regimes=True, w=w)
+
+    def sp_att_reg(self, w_i, regi_ids, wy):
+        predy_e_r, e_pred_r = {}, {}
+        self.predy_e = np.zeros((self.n, 1), float)
+        self.e_pred = np.zeros((self.n, 1), float)
+        counter = 1
+        for r in self.regimes_set:
+            self.rho = self.betas[(self.kr - self.kryd) * self.nr + self.kf - (
+                self.yend.shape[1] - self.nr * self.kryd) + self.kryd * counter - 1]
+            self.predy_e[regi_ids[r], ], self.e_pred[regi_ids[r], ], warn = sp_att(w_i[r],
+                                                                                   self.y[regi_ids[r]], self.predy[
+                                                                                       regi_ids[r]],
+                                                                                   wy[regi_ids[r]], self.rho)
+            counter += 1
+
+    def _get_spat_diag_props(self, y, x, w, yend, q, w_lags, lag_q):
+        self._cache = {}
+        yend, q = set_endog(y, x, w, yend, q, w_lags, lag_q)
+        x = USER.check_constant(x)
+        x = REGI.regimeX_setup(
+            x, self.regimes, [True] * x.shape[1], self.regimes_set)
+        self.z = sphstack(x, REGI.regimeX_setup(
+            yend, self.regimes, [True] * (yend.shape[1] - 1) + [False], self.regimes_set))
+        self.h = sphstack(
+            x, REGI.regimeX_setup(q, self.regimes, [True] * q.shape[1], self.regimes_set))
+        hthi = np.linalg.inv(spdot(self.h.T, self.h))
+        zth = spdot(self.z.T, self.h)
+        self.varb = np.linalg.inv(spdot(spdot(zth, hthi), zth.T))
+
+
+def _work(y, x, regi_ids, r, yend, q, w_r, w_lags, lag_q, robust, sig2n_k, name_ds, name_y, name_x, name_yend, name_q, name_w, name_regimes):
+    y_r = y[regi_ids[r]]
+    x_r = x[regi_ids[r]]
+    if yend != None:
+        yend_r = yend[regi_ids[r]]
+    else:
+        yend_r = yend
+    if q != None:
+        q_r = q[regi_ids[r]]
+    else:
+        q_r = q
+    yend_r, q_r = set_endog_sparse(y_r, x_r, w_r, yend_r, q_r, w_lags, lag_q)
+    x_constant = USER.check_constant(x_r)
+    if robust == 'hac' or robust == 'ogmm':
+        robust2 = None
+    else:
+        robust2 = robust
+    model = BaseTSLS(
+        y_r, x_constant, yend_r, q_r, robust=robust2, sig2n_k=sig2n_k)
+    model.title = "SPATIAL TWO STAGE LEAST SQUARES ESTIMATION - REGIME %s" % r
+    if robust == 'ogmm':
+        _optimal_weight(model, sig2n_k, warn=False)
+    model.rho = model.betas[-1]
+    model.robust = USER.set_robust(robust)
+    model.name_ds = name_ds
+    model.name_y = '%s_%s' % (str(r), name_y)
+    model.name_x = ['%s_%s' % (str(r), i) for i in name_x]
+    model.name_yend = ['%s_%s' % (str(r), i) for i in name_yend]
+    model.name_z = model.name_x + model.name_yend
+    model.name_q = ['%s_%s' % (str(r), i) for i in name_q]
+    model.name_h = model.name_x + model.name_q
+    model.name_w = name_w
+    model.name_regimes = name_regimes
+    return model
+
+
+def _test():
+    import doctest
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+
+if __name__ == '__main__':
+    _test()
+    import numpy as np
+    import pysal
+    db = pysal.open(pysal.examples.get_path("columbus.dbf"), 'r')
+    y_var = 'CRIME'
+    y = np.array([db.by_col(y_var)]).reshape(49, 1)
+    x_var = ['INC']
+    x = np.array([db.by_col(name) for name in x_var]).T
+    yd_var = ['HOVAL']
+    yd = np.array([db.by_col(name) for name in yd_var]).T
+    q_var = ['DISCBD']
+    q = np.array([db.by_col(name) for name in q_var]).T
+    r_var = 'NSA'
+    regimes = db.by_col(r_var)
+    w = pysal.queen_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    w.transform = 'r'
+    model = GM_Lag_Regimes(y, x, regimes, yend=yd, q=q, w=w, constant_regi='many', spat_diag=True, sig2n_k=False, lag_q=True, name_y=y_var,
+                           name_x=x_var, name_yend=yd_var, name_q=q_var, name_regimes=r_var, name_ds='columbus', name_w='columbus.gal', regime_err_sep=True, robust='white')
+    print model.summary
diff --git a/pysal/spreg/user_output.py b/pysal/spreg/user_output.py
new file mode 100644
index 0000000..a8dd35d
--- /dev/null
+++ b/pysal/spreg/user_output.py
@@ -0,0 +1,642 @@
+"""Internal helper files for user output."""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, David C. Folch david.folch at asu.edu, Jing Yao jingyao at asu.edu"
+import textwrap as TW
+import numpy as np
+import copy as COPY
+import diagnostics as diagnostics
+import diagnostics_tsls as diagnostics_tsls
+import diagnostics_sp as diagnostics_sp
+import pysal
+import scipy
+from scipy.sparse.csr import csr_matrix
+from utils import spdot, sphstack
+
+__all__ = []
+
+
+def set_name_ds(name_ds):
+    """Set the dataset name in regression; return generic name if user
+    provides no explicit name."
+
+    Parameters
+    ----------
+
+    name_ds     : string
+                  User provided dataset name.
+
+    Returns
+    -------
+
+    name_ds     : string
+
+    """
+    if not name_ds:
+        name_ds = 'unknown'
+    return name_ds
+
+
+def set_name_y(name_y):
+    """Set the dataset name in regression; return generic name if user
+    provides no explicit name."
+
+    Parameters
+    ----------
+
+    name_ds     : string
+                  User provided dataset name.
+
+    Returns
+    -------
+
+    name_ds     : string
+
+    """
+    if not name_y:
+        name_y = 'dep_var'
+    return name_y
+
+
+def set_name_x(name_x, x, constant=False):
+    """Set the independent variable names in regression; return generic name if user
+    provides no explicit name."
+
+    Parameters
+    ----------
+
+    name_x      : list of string
+                  User provided exogenous variable names.
+
+    x           : array
+                  User provided exogenous variables.
+    constant    : boolean
+                  If False (default), constant name not included in name_x list yet
+                  Append 'CONSTANT' at the front of the names
+
+    Returns
+    -------
+
+    name_x      : list of strings
+
+    """
+    if not name_x:
+        name_x = ['var_' + str(i + 1) for i in range(x.shape[1])]
+    else:
+        name_x = name_x[:]
+    if not constant:
+        name_x.insert(0, 'CONSTANT')
+    return name_x
+
+
+def set_name_yend(name_yend, yend):
+    """Set the endogenous variable names in regression; return generic name if user
+    provides no explicit name."
+
+    Parameters
+    ----------
+
+    name_yend   : list of strings
+                  User provided exogenous variable names.
+
+    Returns
+    -------
+
+    name_yend   : list of strings
+
+    """
+    if yend != None:
+        if not name_yend:
+            return ['endogenous_' + str(i + 1) for i in range(len(yend[0]))]
+        else:
+            return name_yend[:]
+    else:
+        return []
+
+
+def set_name_q(name_q, q):
+    """Set the external instrument names in regression; return generic name if user
+    provides no explicit name."
+
+    Parameters
+    ----------
+
+    name_q      : string
+                  User provided instrument names.
+    q           : array
+                  Array of instruments
+
+    Returns
+    -------
+
+    name_q      : list of strings
+
+    """
+    if q != None:
+        if not name_q:
+            return ['instrument_' + str(i + 1) for i in range(len(q[0]))]
+        else:
+            return name_q[:]
+    else:
+        return []
+
+
+def set_name_yend_sp(name_y):
+    """Set the spatial lag name in regression; return generic name if user
+    provides no explicit name."
+
+    Parameters
+    ----------
+
+    name_y      : string
+                  User provided dependent variable name.
+
+    Returns
+    -------
+
+    name_yend_sp : string
+
+    """
+    return 'W_' + name_y
+
+
+def set_name_q_sp(name_x, w_lags, name_q, lag_q, force_all=False):
+    """Set the spatial instrument names in regression; return generic name if user
+    provides no explicit name."
+
+    Parameters
+    ----------
+
+    name_x      : list of strings
+                  User provided exogenous variable names.
+
+    w_lags      : int
+                  User provided number of spatial instruments lags
+
+    Returns
+    -------
+
+    name_q_sp   : list of strings
+
+    """
+    if force_all:
+        names = name_x
+    else:
+        names = name_x[1:]  # drop the constant
+    if lag_q:
+        names = names + name_q
+    sp_inst_names = []
+    for j in names:
+        sp_inst_names.append('W_' + j)
+    if w_lags > 1:
+        for i in range(2, w_lags + 1):
+            for j in names:
+                sp_inst_names.append('W' + str(i) + '_' + j)
+    return sp_inst_names
+
+
+def set_name_h(name_x, name_q):
+    """Set the full instruments names in regression; return generic name if user
+    provides no explicit name."
+
+    Parameters
+    ----------
+
+    name_x      : list of strings
+                  User provided exogenous variable names.
+    name_q      : list of strings
+                  User provided instrument variable names.
+
+    Returns
+    -------
+
+    name_h      : list of strings
+
+    """
+    return name_x + name_q
+
+
+def set_robust(robust):
+    """Return generic name if user passes None to the robust parameter in a
+    regression. Note: already verified that the name is valid in
+    check_robust() if the user passed anything besides None to robust.
+
+    Parameters
+    ----------
+
+    robust      : string or None
+                  Object passed by the user to a regression class
+
+    Returns
+    -------
+
+    robust      : string
+
+    """
+    if not robust:
+        return 'unadjusted'
+    return robust
+
+
+def set_name_w(name_w, w):
+    """Return generic name if user passes None to the robust parameter in a
+    regression. Note: already verified that the name is valid in
+    check_robust() if the user passed anything besides None to robust.
+
+    Parameters
+    ----------
+
+    name_w      : string
+                  Name passed in by user. Default is None.
+    w           : W object
+                  pysal W object passed in by user
+
+    Returns
+    -------
+
+    name_w      : string
+
+    """
+    if w != None:
+        if name_w != None:
+            return name_w
+        else:
+            return 'unknown'
+    return None
+
+
+def set_name_multi(multireg, multi_set, name_multiID, y, x, name_y, name_x, name_ds, title, name_w, robust, endog=False, sp_lag=False):
+    """Returns multiple regression objects with generic names
+
+    Parameters
+    ----------
+
+    endog       : tuple
+                  If the regression object contains endogenous variables, endog must have the
+                  following parameters in the following order: (yend, q, name_yend, name_q)
+    sp_lag       : tuple
+                  If the regression object contains spatial lag, sp_lag must have the
+                  following parameters in the following order: (w_lags, lag_q)
+
+    """
+    name_ds = set_name_ds(name_ds)
+    name_y = set_name_y(name_y)
+    name_x = set_name_x(name_x, x)
+    name_multiID = set_name_ds(name_multiID)
+    if endog or sp_lag:
+        name_yend = set_name_yend(endog[2], endog[0])
+        name_q = set_name_q(endog[3], endog[1])
+    for r in multi_set:
+        multireg[r].title = title + "%s" % r
+        multireg[r].name_ds = name_ds
+        multireg[r].robust = set_robust(robust)
+        multireg[r].name_w = name_w
+        multireg[r].name_y = '%s_%s' % (str(r), name_y)
+        multireg[r].name_x = ['%s_%s' % (str(r), i) for i in name_x]
+        multireg[r].name_multiID = name_multiID
+        if endog or sp_lag:
+            multireg[r].name_yend = ['%s_%s' % (str(r), i) for i in name_yend]
+            multireg[r].name_q = ['%s_%s' % (str(r), i) for i in name_q]
+            if sp_lag:
+                multireg[r].name_yend.append(
+                    set_name_yend_sp(multireg[r].name_y))
+                multireg[r].name_q.extend(
+                    set_name_q_sp(multireg[r].name_x, sp_lag[0], multireg[r].name_q, sp_lag[1]))
+            multireg[r].name_z = multireg[r].name_x + multireg[r].name_yend
+            multireg[r].name_h = multireg[r].name_x + multireg[r].name_q
+    return multireg
+
+
+def check_arrays(*arrays):
+    """Check if the objects passed by a user to a regression class are
+    correctly structured. If the user's data is correctly formed this function
+    returns nothing, if not then an exception is raised. Note, this does not 
+    check for model setup, simply the shape and types of the objects.
+
+    Parameters
+    ----------
+
+    *arrays : anything
+              Objects passed by the user to a regression class; any type
+              object can be passed and any number of objects can be passed
+
+    Returns
+    -------
+
+    Returns : int
+              number of observations
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> # Extract CRIME column from the dbf file
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+    >>> n = check_arrays(y, X)
+    >>> print n
+    49
+
+    """
+    allowed = ['ndarray', 'csr_matrix']
+    rows = []
+    for i in arrays:
+        if i == None:
+            continue
+        if i.__class__.__name__ not in allowed:
+            raise Exception, "all input data must be either numpy arrays or sparse csr matrices"
+        shape = i.shape
+        if len(shape) > 2:
+            raise Exception, "all input arrays must have exactly two dimensions"
+        if len(shape) == 1:
+            raise Exception, "all input arrays must have exactly two dimensions"
+        if shape[0] < shape[1]:
+            raise Exception, "one or more input arrays have more columns than rows"
+        rows.append(shape[0])
+    if len(set(rows)) > 1:
+        raise Exception, "arrays not all of same length"
+    return rows[0]
+
+
+def check_y(y, n):
+    """Check if the y object passed by a user to a regression class is
+    correctly structured. If the user's data is correctly formed this function
+    returns nothing, if not then an exception is raised. Note, this does not 
+    check for model setup, simply the shape and types of the objects.
+
+    Parameters
+    ----------
+
+    y       : anything
+              Object passed by the user to a regression class; any type
+              object can be passed
+
+    n       : int
+              number of observations
+
+    Returns
+    -------
+
+    Returns : nothing
+              Nothing is returned
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> # Extract CRIME column from the dbf file
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+    >>> check_y(y, 49)
+    >>> # should not raise an exception
+
+    """
+    if y.__class__.__name__ != 'ndarray':
+        print y.__class__.__name__
+        raise Exception, "y must be a numpy array"
+    shape = y.shape
+    if len(shape) > 2:
+        raise Exception, "all input arrays must have exactly two dimensions"
+    if len(shape) == 1:
+        raise Exception, "all input arrays must have exactly two dimensions"
+    if shape != (n, 1):
+        raise Exception, "y must be a single column array matching the length of other arrays"
+
+
+def check_weights(w, y, w_required=False):
+    """Check if the w parameter passed by the user is a pysal.W object and
+    check that its dimensionality matches the y parameter.  Note that this
+    check is not performed if w set to None.
+
+    Parameters
+    ----------
+
+    w       : any python object
+              Object passed by the user to a regression class; any type
+              object can be passed
+    y       : numpy array
+              Any shape numpy array can be passed. Note: if y passed
+              check_arrays, then it will be valid for this function
+
+    Returns
+    -------
+
+    Returns : nothing
+              Nothing is returned
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> # Extract CRIME column from the dbf file
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+    >>> w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read()
+    >>> check_weights(w, y)
+    >>> # should not raise an exception
+
+    """
+    if w_required == True or w != None:
+        if w == None:
+            raise Exception, "A weights matrix w must be provided to run this method."
+        if not isinstance(w, pysal.W):
+            raise Exception, "w must be a pysal.W object"
+        if w.n != y.shape[0]:
+            raise Exception, "y must be nx1, and w must be an nxn PySAL W object"
+        diag = w.sparse.diagonal()
+        # check to make sure all entries equal 0
+        if diag.min() != 0:
+            raise Exception, "All entries on diagonal must equal 0."
+        if diag.max() != 0:
+            raise Exception, "All entries on diagonal must equal 0."
+
+
+def check_robust(robust, wk):
+    """Check if the combination of robust and wk parameters passed by the user
+    are valid. Note: this does not check if the W object is a valid adaptive 
+    kernel weights matrix needed for the HAC.
+
+    Parameters
+    ----------
+
+    robust  : string or None
+              Object passed by the user to a regression class
+    w       : any python object
+              Object passed by the user to a regression class; any type
+              object can be passed
+
+    Returns
+    -------
+
+    Returns : nothing
+              Nothing is returned
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> # Extract CRIME column from the dbf file
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+    >>> wk = None
+    >>> check_robust('White', wk)
+    >>> # should not raise an exception
+
+    """
+    if robust:
+        if robust.lower() == 'hac':
+            if type(wk).__name__ != 'W' and type(wk).__name__ != 'Kernel':
+                raise Exception, "HAC requires that wk be a pysal.W object"
+            diag = wk.sparse.diagonal()
+            # check to make sure all entries equal 1
+            if diag.min() < 1.0:
+                print diag.min()
+                raise Exception, "All entries on diagonal of kernel weights matrix must equal 1."
+            if diag.max() > 1.0:
+                print diag.max()
+                raise Exception, "All entries on diagonal of kernel weights matrix must equal 1."
+            # ensure off-diagonal entries are in the set of real numbers [0,1)
+            wegt = wk.weights
+            for i in wk.id_order:
+                vals = wegt[i]
+                vmin = min(vals)
+                vmax = max(vals)
+                if vmin < 0.0:
+                    raise Exception, "Off-diagonal entries must be greater than or equal to 0."
+                if vmax > 1.0:
+                    # NOTE: we are not checking for the case of exactly 1.0 ###
+                    raise Exception, "Off-diagonal entries must be less than 1."
+        elif robust.lower() == 'white' or robust.lower() == 'ogmm':
+            if wk:
+                raise Exception, "White requires that wk be set to None"
+        else:
+            raise Exception, "invalid value passed to robust, see docs for valid options"
+
+
+def check_spat_diag(spat_diag, w):
+    """Check if there is a w parameter passed by the user if the user also
+    requests spatial diagnostics.
+
+    Parameters
+    ----------
+
+    spat_diag   : boolean
+                  Value passed by a used to a regression class
+    w           : any python object
+                  Object passed by the user to a regression class; any type
+                  object can be passed
+
+    Returns
+    -------
+
+    Returns : nothing
+              Nothing is returned
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> # Extract CRIME column from the dbf file
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+    >>> w = pysal.open(pysal.examples.get_path("columbus.gal"), 'r').read()
+    >>> check_spat_diag(True, w)
+    >>> # should not raise an exception
+
+    """
+    if spat_diag:
+        if type(w).__name__ != 'W':
+            raise Exception, "w must be a pysal.W object to run spatial diagnostics"
+
+
+def check_regimes(reg_set, N=None, K=None):
+    """Check if there are at least two regimes
+
+    Parameters
+    ----------
+
+    reg_set     : list
+                  List of the regimes IDs
+
+    Returns
+    -------
+
+    Returns : nothing
+              Nothing is returned
+
+    """
+    if len(reg_set) < 2:
+        raise Exception, "At least 2 regimes are needed to run regimes methods. Please check your regimes variable."
+    if 1.0 * N / len(reg_set) < K + 1:
+        raise Exception, "There aren't enough observations for the given number of regimes and variables. Please check your regimes variable."
+
+
+def check_constant(x):
+    """Check if the X matrix contains a constant, raise exception if it does
+    not
+
+    Parameters
+    ----------
+
+    x           : array
+                  Value passed by a used to a regression class
+
+    Returns
+    -------
+
+    Returns : nothing
+              Nothing is returned
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> X = []
+    >>> X.append(db.by_col("INC"))
+    >>> X.append(db.by_col("HOVAL"))
+    >>> X = np.array(X).T
+    >>> x_constant = check_constant(X)
+    >>> x_constant.shape
+    (49, 3)
+
+    """
+    if not diagnostics.constant_check:
+        raise Exception, "x array cannot contain a constant vector; constant will be added automatically"
+    else:
+        x_constant = COPY.copy(x)
+        return sphstack(np.ones((x_constant.shape[0], 1)), x_constant)
+
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/utils.py b/pysal/spreg/utils.py
new file mode 100644
index 0000000..ec87f20
--- /dev/null
+++ b/pysal/spreg/utils.py
@@ -0,0 +1,839 @@
+"""
+Tools for different procedure estimations
+"""
+
+__author__ = "Luc Anselin luc.anselin at asu.edu, \
+        Pedro V. Amaral pedro.amaral at asu.edu, \
+        David C. Folch david.folch at asu.edu, \
+        Daniel Arribas-Bel darribas at asu.edu"
+
+import numpy as np
+from scipy import sparse as SP
+import scipy.optimize as op
+import numpy.linalg as la
+from pysal import lag_spatial
+import copy
+
+
+class RegressionPropsY:
+
+    """
+    Helper class that adds common regression properties to any regression
+    class that inherits it.  It takes no parameters.  See BaseOLS for example
+    usage.
+
+    Parameters
+    ----------
+
+    Attributes
+    ----------
+    mean_y  : float
+              Mean of the dependent variable
+    std_y   : float
+              Standard deviation of the dependent variable
+
+    """
+
+    @property
+    def mean_y(self):
+        if 'mean_y' not in self._cache:
+            self._cache['mean_y'] = np.mean(self.y)
+        return self._cache['mean_y']
+
+    @property
+    def std_y(self):
+        if 'std_y' not in self._cache:
+            self._cache['std_y'] = np.std(self.y, ddof=1)
+        return self._cache['std_y']
+
+
+class RegressionPropsVM:
+
+    """
+    Helper class that adds common regression properties to any regression
+    class that inherits it.  It takes no parameters.  See BaseOLS for example
+    usage.
+
+    Parameters
+    ----------
+
+    Attributes
+    ----------
+    utu     : float
+              Sum of the squared residuals
+    sig2n    : float
+              Sigma squared with n in the denominator
+    sig2n_k : float
+              Sigma squared with n-k in the denominator
+    vm      : array
+              Variance-covariance matrix (kxk)
+
+    """
+
+    @property
+    def utu(self):
+        if 'utu' not in self._cache:
+            self._cache['utu'] = np.sum(self.u ** 2)
+        return self._cache['utu']
+
+    @property
+    def sig2n(self):
+        if 'sig2n' not in self._cache:
+            self._cache['sig2n'] = self.utu / self.n
+        return self._cache['sig2n']
+
+    @property
+    def sig2n_k(self):
+        if 'sig2n_k' not in self._cache:
+            self._cache['sig2n_k'] = self.utu / (self.n - self.k)
+        return self._cache['sig2n_k']
+
+    @property
+    def vm(self):
+        if 'vm' not in self._cache:
+            self._cache['vm'] = np.dot(self.sig2, self.xtxi)
+        return self._cache['vm']
+
+
+def get_A1_het(S):
+    """
+    Builds A1 as in Arraiz et al [1]_
+
+    .. math::
+
+        A_1 = W' W - diag(w'_{.i} w_{.i})
+
+    ...
+
+    Parameters
+    ----------
+
+    S               : csr_matrix
+                      PySAL W object converted into Scipy sparse matrix
+
+    Returns
+    -------
+
+    Implicit        : csr_matrix
+                      A1 matrix in scipy sparse format
+
+    References
+    ----------
+
+    .. [1] Arraiz, I., Drukker, D. M., Kelejian, H., Prucha, I. R. (2010) "A
+    Spatial Cliff-Ord-Type Model with Heteroskedastic Innovations: Small and
+    Large Sample Results". Journal of Regional Science, Vol. 60, No. 2, pp.
+    592-614.
+    """
+    StS = S.T * S
+    d = SP.spdiags([StS.diagonal()], [0], S.get_shape()[0], S.get_shape()[1])
+    d = d.asformat('csr')
+    return StS - d
+
+
+def get_A1_hom(s, scalarKP=False):
+    """
+    Builds A1 for the spatial error GM estimation with homoscedasticity as in Drukker et al. [1]_ (p. 9).
+
+    .. math::
+
+        A_1 = \{1 + [n^{-1} tr(W'W)]^2\}^{-1} \[W'W - n^{-1} tr(W'W) I\]
+
+    ...
+
+    Parameters
+    ----------
+
+    s               : csr_matrix
+                      PySAL W object converted into Scipy sparse matrix
+    scalarKP        : boolean
+                      Flag to include scalar corresponding to the first moment
+                      condition as in Drukker et al. [1]_ (Defaults to False)
+
+    Returns
+    -------
+
+    Implicit        : csr_matrix
+                      A1 matrix in scipy sparse format
+    References
+    ----------
+
+    .. [1] Drukker, Prucha, I. R., Raciborski, R. (2010) "A command for
+    estimating spatial-autoregressive models with spatial-autoregressive
+    disturbances and additional endogenous variables". The Stata Journal, 1,
+    N. 1, pp. 1-13.      
+    """
+    n = float(s.shape[0])
+    wpw = s.T * s
+    twpw = np.sum(wpw.diagonal())
+    e = SP.eye(n, n, format='csr')
+    e.data = np.ones(n) * (twpw / n)
+    num = wpw - e
+    if not scalarKP:
+        return num
+    else:
+        den = 1. + (twpw / n) ** 2.
+        return num / den
+
+
+def get_A2_hom(s):
+    """
+    Builds A2 for the spatial error GM estimation with homoscedasticity as in
+    Anselin (2011) [1]_ 
+
+    .. math::
+
+        A_2 = \dfrac{(W + W')}{2}
+
+    ...
+
+    Parameters
+    ----------
+    s               : csr_matrix
+                      PySAL W object converted into Scipy sparse matrix
+    Returns
+    -------
+    Implicit        : csr_matrix
+                      A2 matrix in scipy sparse format
+    References
+    ----------
+
+    .. [1] Anselin (2011) "GMM Estimation of Spatial Error Autocorrelation with and without Heteroskedasticity".
+    """
+    return (s + s.T) / 2.
+
+
+def _moments2eqs(A1, s, u):
+    '''
+    Helper to compute G and g in a system of two equations as in
+    the heteroskedastic error models from Drukker et al. [1]_
+    ...
+
+    Parameters
+    ----------
+
+    A1          : scipy.sparse.csr
+                  A1 matrix as in the paper, different deppending on whether
+                  it's homocedastic or heteroskedastic model
+
+    s           : W.sparse
+                  Sparse representation of spatial weights instance
+
+    u           : array
+                  Residuals. nx1 array assumed to be aligned with w
+
+    Attributes
+    ----------
+
+    moments     : list
+                  List of two arrays corresponding to the matrices 'G' and
+                  'g', respectively.
+
+
+    References
+    ----------
+
+    .. [1] Drukker, Prucha, I. R., Raciborski, R. (2010) "A command for
+    estimating spatial-autoregressive models with spatial-autoregressive
+    disturbances and additional endogenous variables". The Stata Journal, 1,
+    N. 1, pp. 1-13.
+    '''
+    n = float(s.shape[0])
+    A1u = A1 * u
+    wu = s * u
+    g1 = np.dot(u.T, A1u)
+    g2 = np.dot(u.T, wu)
+    g = np.array([[g1][0][0], [g2][0][0]]) / n
+
+    G11 = np.dot(u.T, ((A1 + A1.T) * wu))
+    G12 = -np.dot((wu.T * A1), wu)
+    G21 = np.dot(u.T, ((s + s.T) * wu))
+    G22 = -np.dot(wu.T, (s * wu))
+    G = np.array([[G11[0][0], G12[0][0]], [G21[0][0], G22[0][0]]]) / n
+    return [G, g]
+
+
+def optim_moments(moments_in, vcX=np.array([0])):
+    """
+    Optimization of moments
+    ...
+
+    Parameters
+    ----------
+
+    moments     : Moments
+                  Instance of gmm_utils.moments_het with G and g
+    vcX         : array
+                  Optional. 2x2 array with the Variance-Covariance matrix to be used as
+                  weights in the optimization (applies Cholesky
+                  decomposition). Set empty by default.
+
+    Returns
+    -------
+    x, f, d     : tuple
+                  x -- position of the minimum
+                  f -- value of func at the minimum
+                  d -- dictionary of information from routine
+                        d['warnflag'] is
+                            0 if converged
+                            1 if too many function evaluations
+                            2 if stopped for another reason, given in d['task']
+                        d['grad'] is the gradient at the minimum (should be 0 ish)
+                        d['funcalls'] is the number of function calls made
+    """
+    moments = copy.deepcopy(moments_in)
+    if vcX.any():
+        Ec = np.transpose(la.cholesky(la.inv(vcX)))
+        moments[0] = np.dot(Ec, moments_in[0])
+        moments[1] = np.dot(Ec, moments_in[1])
+    scale = np.min([[np.min(moments[0]), np.min(moments[1])]])
+    moments[0], moments[1] = moments[0] / scale, moments[1] / scale
+    if moments[0].shape[0] == 2:
+        optim_par = lambda par: foptim_par(
+            np.array([[float(par[0]), float(par[0]) ** 2.]]).T, moments)
+        start = [0.0]
+        bounds = [(-1.0, 1.0)]
+    if moments[0].shape[0] == 3:
+        optim_par = lambda par: foptim_par(
+            np.array([[float(par[0]), float(par[0]) ** 2., float(par[1])]]).T, moments)
+        start = [0.0, 0.0]
+        bounds = [(-1.0, 1.0), (0.0, None)]
+    lambdaX = op.fmin_l_bfgs_b(
+        optim_par, start, approx_grad=True, bounds=bounds)
+    return lambdaX[0][0]
+
+
+def foptim_par(par, moments):
+    """ 
+    Preparation of the function of moments for minimization
+    ...
+
+    Parameters
+    ----------
+
+    lambdapar       : float
+                      Spatial autoregressive parameter
+    moments         : list
+                      List of Moments with G (moments[0]) and g (moments[1])
+
+    Returns
+    -------
+
+    minimum         : float
+                      sum of square residuals (e) of the equation system 
+                      moments.g - moments.G * lambdapar = e
+    """
+    vv = np.dot(moments[0], par)
+    vv2 = moments[1] - vv
+    return sum(vv2 ** 2)
+
+
+def get_spFilter(w, lamb, sf):
+    '''
+    Compute the spatially filtered variables
+
+    Parameters
+    ----------
+    w       : weight
+              PySAL weights instance  
+    lamb    : double
+              spatial autoregressive parameter
+    sf      : array
+              the variable needed to compute the filter
+    Returns
+    --------
+    rs      : array
+              spatially filtered variable
+
+    Examples
+    --------
+
+    >>> import numpy as np
+    >>> import pysal
+    >>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
+    >>> y = np.array(db.by_col("CRIME"))
+    >>> y = np.reshape(y, (49,1))
+    >>> w=pysal.open(pysal.examples.get_path("columbus.gal")).read()        
+    >>> solu = get_spFilter(w,0.5,y)
+    >>> print solu[0:5]
+    [[  -8.9882875]
+     [ -20.5685065]
+     [ -28.196721 ]
+     [ -36.9051915]
+     [-111.1298   ]]
+
+    '''
+    try:
+        result = sf - lamb * (w.sparse * sf)
+    except:
+        result = sf - lamb * (w * sf)
+    return result
+
+
+def get_lags(w, x, w_lags):
+    '''
+    Calculates a given order of spatial lags and all the smaller orders
+
+    Parameters
+    ----------
+    w       : weight
+              PySAL weights instance
+    x       : array
+              nxk arrays with the variables to be lagged  
+    w_lags  : integer
+              Maximum order of spatial lag
+
+    Returns
+    --------
+    rs      : array
+              nxk*(w_lags+1) array with original and spatially lagged variables
+
+    '''
+    lag = lag_spatial(w, x)
+    spat_lags = lag
+    for i in range(w_lags - 1):
+        lag = lag_spatial(w, lag)
+        spat_lags = sphstack(spat_lags, lag)
+    return spat_lags
+
+
+def inverse_prod(w, data, scalar, post_multiply=False, inv_method="power_exp", threshold=0.0000000001, max_iterations=None):
+    """ 
+
+    Parameters
+    ----------
+
+    w               : Pysal W object
+                      nxn Pysal spatial weights object 
+
+    data            : Numpy array
+                      nx1 vector of data
+
+    scalar          : float
+                      Scalar value (typically rho or lambda)
+
+    post_multiply   : boolean
+                      If True then post-multiplies the data vector by the
+                      inverse of the spatial filter, if false then
+                      pre-multiplies.
+    inv_method      : string
+                      If "true_inv" uses the true inverse of W (slow);
+                      If "power_exp" uses the power expansion method (default)
+
+    threshold       : float
+                      Test value to stop the iterations. Test is against
+                      sqrt(increment' * increment), where increment is a
+                      vector representing the contribution from each
+                      iteration.
+
+    max_iterations  : integer
+                      Maximum number of iterations for the expansion.   
+
+    Examples
+    --------
+
+    >>> import numpy, pysal
+    >>> import numpy.linalg as la
+    >>> np.random.seed(10)
+    >>> w = pysal.lat2W(5, 5)
+    >>> w.transform = 'r'
+    >>> data = np.random.randn(w.n)
+    >>> data.shape = (w.n, 1)
+    >>> rho = 0.4
+    >>> inv_pow = inverse_prod(w, data, rho, inv_method="power_exp")
+    >>> # true matrix inverse
+    >>> inv_reg = inverse_prod(w, data, rho, inv_method="true_inv")
+    >>> np.allclose(inv_pow, inv_reg, atol=0.0001)
+    True
+    >>> # test the transpose version
+    >>> inv_pow = inverse_prod(w, data, rho, inv_method="power_exp", post_multiply=True)
+    >>> inv_reg = inverse_prod(w, data, rho, inv_method="true_inv", post_multiply=True)
+    >>> np.allclose(inv_pow, inv_reg, atol=0.0001)
+    True
+
+    """
+    if inv_method == "power_exp":
+        inv_prod = power_expansion(
+            w, data, scalar, post_multiply=post_multiply,
+            threshold=threshold, max_iterations=max_iterations)
+    elif inv_method == "true_inv":
+        try:
+            matrix = la.inv(np.eye(w.n) - (scalar * w.full()[0]))
+        except:
+            matrix = la.inv(np.eye(w.shape[0]) - (scalar * w))
+        if post_multiply:
+            inv_prod = spdot(data.T, matrix)
+        else:
+            inv_prod = spdot(matrix, data)
+    else:
+        raise Exception, "Invalid method selected for inversion."
+    return inv_prod
+
+
+def power_expansion(w, data, scalar, post_multiply=False, threshold=0.0000000001, max_iterations=None):
+    """
+    Compute the inverse of a matrix using the power expansion (Leontief
+    expansion).  General form is:
+
+        .. math:: 
+            x &= (I - \rho W)^{-1}v = [I + \rho W + \rho^2 WW + \dots]v \\
+              &= v + \rho Wv + \rho^2 WWv + \dots
+
+    Examples
+    --------
+    Tests for this function are in inverse_prod()
+
+    """
+    try:
+        ws = w.sparse
+    except:
+        ws = w
+    if post_multiply:
+        data = data.T
+    running_total = copy.copy(data)
+    increment = copy.copy(data)
+    count = 1
+    test = 10000000
+    if max_iterations == None:
+        max_iterations = 10000000
+    while test > threshold and count <= max_iterations:
+        if post_multiply:
+            increment = increment * ws * scalar
+        else:
+            increment = ws * increment * scalar
+        running_total += increment
+        test_old = test
+        test = la.norm(increment)
+        if test > test_old:
+            raise Exception, "power expansion will not converge, check model specification and that weight are less than 1"
+        count += 1
+    return running_total
+
+
+def set_endog(y, x, w, yend, q, w_lags, lag_q):
+    # Create spatial lag of y
+    yl = lag_spatial(w, y)
+    # spatial and non-spatial instruments
+    if issubclass(type(yend), np.ndarray):
+        if lag_q:
+            lag_vars = sphstack(x, q)
+        else:
+            lag_vars = x
+        spatial_inst = get_lags(w, lag_vars, w_lags)
+        q = sphstack(q, spatial_inst)
+        yend = sphstack(yend, yl)
+    elif yend == None:  # spatial instruments only
+        q = get_lags(w, x, w_lags)
+        yend = yl
+    else:
+        raise Exception, "invalid value passed to yend"
+    return yend, q
+
+    lag = lag_spatial(w, x)
+    spat_lags = lag
+    for i in range(w_lags - 1):
+        lag = lag_spatial(w, lag)
+        spat_lags = sphstack(spat_lags, lag)
+    return spat_lags
+
+
+def set_endog_sparse(y, x, w, yend, q, w_lags, lag_q):
+    """
+    Same as set_endog, but with a sparse object passed as weights instead of W object.
+    """
+    yl = w * y
+    # spatial and non-spatial instruments
+    if issubclass(type(yend), np.ndarray):
+        if lag_q:
+            lag_vars = sphstack(x, q)
+        else:
+            lag_vars = x
+        spatial_inst = w * lag_vars
+        for i in range(w_lags - 1):
+            spatial_inst = sphstack(spatial_inst, w * spatial_inst)
+        q = sphstack(q, spatial_inst)
+        yend = sphstack(yend, yl)
+    elif yend == None:  # spatial instruments only
+        q = w * x
+        for i in range(w_lags - 1):
+            q = sphstack(q, w * q)
+        yend = yl
+    else:
+        raise Exception, "invalid value passed to yend"
+    return yend, q
+
+
+def iter_msg(iteration, max_iter):
+    if iteration == max_iter:
+        iter_stop = "Maximum number of iterations reached."
+    else:
+        iter_stop = "Convergence threshold (epsilon) reached."
+    return iter_stop
+
+
+def sp_att(w, y, predy, w_y, rho):
+    xb = predy - rho * w_y
+    if np.abs(rho) < 1:
+        predy_sp = inverse_prod(w, xb, rho)
+        warn = None
+        # Note 1: Here if omitting pseudo-R2; If not, see Note 2.
+        resid_sp = y - predy_sp
+    else:
+        #warn = "Warning: Estimate for rho is outside the boundary (-1, 1). Computation of true inverse of W was required (slow)."
+        #predy_sp = inverse_prod(w, xb, rho, inv_method="true_inv")
+        warn = "*** WARNING: Estimate for spatial lag coefficient is outside the boundary (-1, 1). ***"
+        predy_sp = np.zeros(y.shape, float)
+        resid_sp = np.zeros(y.shape, float)
+    # resid_sp = y - predy_sp #Note 2: Here if computing true inverse; If not,
+    # see Note 1.
+    return predy_sp, resid_sp, warn
+
+
+def spdot(a, b, array_out=True):
+    """
+    Matrix multiplication function to deal with sparse and dense objects
+
+    Parameters
+    ----------
+
+    a           : array
+                  first multiplication factor. Can either be sparse or dense.
+    b           : array
+                  second multiplication factor. Can either be sparse or dense.
+    array_out   : boolean
+                  If True (default) the output object is always a np.array
+
+    Returns
+    -------
+
+    ab : array
+         product of a times b. Sparse if a and b are sparse. Dense otherwise.
+    """
+    if type(a).__name__ == 'ndarray' and type(b).__name__ == 'ndarray':
+        ab = np.dot(a, b)
+    elif type(a).__name__ == 'csr_matrix' or type(b).__name__ == 'csr_matrix' \
+            or type(a).__name__ == 'csc_matrix' or type(b).__name__ == 'csc_matrix':
+        ab = a * b
+        if array_out:
+            if type(ab).__name__ == 'csc_matrix' or type(ab).__name__ == 'csr_matrix':
+                ab = ab.toarray()
+    else:
+        raise Exception, "Invalid format for 'spdot' argument: %s and %s" % (
+            type(a).__name__, type(b).__name__)
+    return ab
+
+
+def spmultiply(a, b, array_out=True):
+    """
+    Element-wise multiplication function to deal with sparse and dense
+    objects. Both objects must be of the same type.
+
+    Parameters
+    ----------
+
+    a           : array
+                  first multiplication factor. Can either be sparse or dense.
+    b           : array
+                  second multiplication factor. Can either be sparse or dense.
+                  integer.
+    array_out   : boolean
+                  If True (default) the output object is always a np.array
+
+    Returns
+    -------
+
+    ab : array
+         elementwise multiplied object. Sparse if a is sparse. Dense otherwise.
+    """
+    if type(a).__name__ == 'ndarray' and type(b).__name__ == 'ndarray':
+        ab = a * b
+    elif (type(a).__name__ == 'csr_matrix' or type(a).__name__ == 'csc_matrix') \
+            and (type(b).__name__ == 'csr_matrix' or type(b).__name__ == 'csc_matrix'):
+        ab = a.multiply(b)
+        if array_out:
+            if type(ab).__name__ == 'csc_matrix' or type(ab).__name__ == 'csr_matrix':
+                ab = ab.toarray()
+    else:
+        raise Exception, "Invalid format for 'spmultiply' argument: %s and %s" % (
+            type(a).__name__, type(b).__name__)
+    return ab
+
+
+def sphstack(a, b, array_out=False):
+    """
+    Horizontal stacking of vectors (or matrices) to deal with sparse and dense objects
+
+    Parameters
+    ----------
+
+    a           : array or sparse matrix
+                  First object.
+    b           : array or sparse matrix
+                  Object to be stacked next to a
+    array_out   : boolean
+                  If True the output object is a np.array; if False (default)
+                  the output object is an np.array if both inputs are
+                  arrays or CSR matrix if at least one input is a CSR matrix
+
+    Returns
+    -------
+
+    ab          : array or sparse matrix
+                  Horizontally stacked objects
+    """
+    if type(a).__name__ == 'ndarray' and type(b).__name__ == 'ndarray':
+        ab = np.hstack((a, b))
+    elif type(a).__name__ == 'csr_matrix' or type(b).__name__ == 'csr_matrix':
+        ab = SP.hstack((a, b), format='csr')
+        if array_out:
+            if type(ab).__name__ == 'csr_matrix':
+                ab = ab.toarray()
+    else:
+        raise Exception, "Invalid format for 'sphstack' argument: %s and %s" % (
+            type(a).__name__, type(b).__name__)
+    return ab
+
+
+def spbroadcast(a, b, array_out=False):
+    """
+    Element-wise multiplication of a matrix and vector to deal with sparse 
+    and dense objects
+
+    Parameters
+    ----------
+
+    a           : array or sparse matrix
+                  Object with one or more columns.
+    b           : array
+                  Object with only one column
+    array_out   : boolean
+                  If True the output object is a np.array; if False (default)
+                  the output object is an np.array if both inputs are
+                  arrays or CSR matrix if at least one input is a CSR matrix
+
+    Returns
+    -------
+
+    ab          : array or sparse matrix
+                  Element-wise multiplication of a and b
+    """
+    if type(a).__name__ == 'ndarray' and type(b).__name__ == 'ndarray':
+        ab = a * b
+    elif type(a).__name__ == 'csr_matrix':
+        b_mod = SP.lil_matrix((b.shape[0], b.shape[0]))
+        b_mod.setdiag(b)
+        ab = (a.T * b_mod).T
+        if array_out:
+            if type(ab).__name__ == 'csr_matrix':
+                ab = ab.toarray()
+    else:
+        raise Exception, "Invalid format for 'spbroadcast' argument: %s and %s" % (
+            type(a).__name__, type(b).__name__)
+    return ab
+
+
+def spmin(a):
+    """
+    Minimum value in a matrix or vector to deal with sparse and dense objects
+
+    Parameters
+    ----------
+
+    a           : array or sparse matrix
+                  Object with one or more columns.
+
+    Returns
+    -------
+
+    min a       : int or float
+                  minimum value in a
+    """
+
+    if type(a).__name__ == 'ndarray':
+        return a.min()
+    elif type(a).__name__ == 'csr_matrix' or type(a).__name__ == 'csc_matrix':
+        try:
+            return min(a.data)
+        except:
+            if np.sum(a.data) == 0:
+                return 0
+            else:
+                raise Exception, "Error: could not evaluate the minimum value."
+    else:
+        raise Exception, "Invalid format for 'spmultiply' argument: %s and %s" % (
+            type(a).__name__, type(b).__name__)
+
+
+def spmax(a):
+    """
+    Maximum value in a matrix or vector to deal with sparse and dense objects
+
+    Parameters
+    ----------
+
+    a           : array or sparse matrix
+                  Object with one or more columns.
+
+    Returns
+    -------
+
+    max a       : int or float
+                  maximum value in a
+    """
+    if type(a).__name__ == 'ndarray':
+        return a.max()
+    elif type(a).__name__ == 'csr_matrix' or type(a).__name__ == 'csc_matrix':
+        try:
+            return max(a.data)
+        except:
+            if np.sum(a.data) == 0:
+                return 0
+            else:
+                raise Exception, "Error: could not evaluate the maximum value."
+    else:
+        raise Exception, "Invalid format for 'spmultiply' argument: %s and %s" % (
+            type(a).__name__, type(b).__name__)
+
+
+def set_warn(reg, warn):
+    ''' Groups warning messages for printout. '''
+    if warn:
+        try:
+            reg.warning += "Warning: " + warn + "\n"
+        except:
+            reg.warning = "Warning: " + warn + "\n"
+    else:
+        pass
+
+
+def RegressionProps_basic(reg, betas=None, predy=None, u=None, sig2=None, sig2n_k=None, vm=None):
+    ''' Set props based on arguments passed. '''
+    if betas != None:
+        reg.betas = betas
+    if predy != None:
+        reg.predy = predy
+    else:
+        try:
+            reg.predy = spdot(reg.z, reg.betas)
+        except:
+            reg.predy = spdot(reg.x, reg.betas)
+    if u != None:
+        reg.u = u
+    else:
+        reg.u = reg.y - reg.predy
+    if sig2 != None:
+        reg.sig2 = sig2
+    elif sig2n_k:
+        reg.sig2 = np.sum(reg.u ** 2) / (reg.n - reg.k)
+    else:
+        reg.sig2 = np.sum(reg.u ** 2) / reg.n
+    if vm != None:
+        reg.vm = vm
+
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/spreg/w_utils.py b/pysal/spreg/w_utils.py
new file mode 100644
index 0000000..e0bf4fa
--- /dev/null
+++ b/pysal/spreg/w_utils.py
@@ -0,0 +1,27 @@
+import numpy as np
+import pysal as ps
+import scipy.sparse as SPARSE
+
+
+def symmetrize(w):
+    """Generate symmetric matrix that has same eigenvalues as an asymmetric row
+    standardized matrix w
+
+    Parameters
+    ----------
+    w: weights object that has been row standardized
+
+    Returns
+    -------
+    a sparse symmetric matrix with same eigenvalues as w
+
+    """
+    current = w.transform
+    w.transform = 'B'
+    d = w.sparse.sum(axis=1)  # row sum
+    d.shape = (w.n,)
+    d = np.sqrt(d)
+    Di12 = SPARSE.spdiags(1. / d, [0], w.n, w.n)
+    D12 = SPARSE.spdiags(d, [0], w.n, w.n)
+    w.transform = 'r'
+    return D12 * w.sparse * Di12
diff --git a/pysal/test_NameSpace.py b/pysal/test_NameSpace.py
new file mode 100644
index 0000000..4ed7b8b
--- /dev/null
+++ b/pysal/test_NameSpace.py
@@ -0,0 +1,51 @@
+import os
+import unittest
+import pysal
+
+
+class TestNameSpace(unittest.TestCase):
+    """
+        This test makes sure we don't remove anything from the pysal NameSpace that
+        1.0 users might expect to be there.  1.0 Namespace was taken from the 1.1
+        Code sprint wave, with special names removes (__all__, etc)
+    """
+    def test_contents(self):
+        namespace_v1_0 = ['Box_Plot', 'DistanceBand', 'Equal_Interval',
+                          'Fisher_Jenks', 'Geary', 'Jenks_Caspall',
+                          'Jenks_Caspall_Forced', 'Jenks_Caspall_Sampled',
+                          'Join_Counts', 'K_classifiers', 'Kernel',
+                          'LISA_Markov', 'Markov', 'Max_P_Classifier',
+                          'Maximum_Breaks', 'Maxp', 'Maxp_LISA', 'Moran',
+                          'Moran_BV', 'Moran_BV_matrix', 'Moran_Local',
+                          'Natural_Breaks', 'Percentiles', 'Quantiles',
+                          'SpatialTau', 'Spatial_Markov', 'Std_Mean', 'Theil',
+                          'TheilD', 'TheilDSim', 'Theta', 'User_Defined', 'W', 'adaptive_kernelW',
+                          'adaptive_kernelW_from_shapefile', 'bin', 'bin1d',
+                          'binC', 'buildContiguity', 'cg', 'comb', 'common',
+                          'core', 'directional', 'ergodic', 'esda', 'full',
+                          'gadf', 'higher_order', 'inequality', 'kernelW',
+                          'kernelW_from_shapefile', 'knnW', 'knnW_from_array',
+                          'knnW_from_shapefile', 'lag_spatial', 'lat2W',
+                          'min_threshold_dist_from_shapefile', 'open',
+                          'order', 'quantile', 'queen_from_shapefile',
+                          'block_weights', 'region', 'remap_ids',
+                          'rook_from_shapefile', 'shimbel', 'spatial_dynamics',
+                          'threshold_binaryW_from_array', 'threshold_binaryW_from_shapefile',
+                          'threshold_continuousW_from_array', 'threshold_continuousW_from_shapefile',
+                          'version', 'w_difference', 'w_intersection', 'w_subset',
+                          'w_symmetric_difference', 'w_union', 'weights']
+
+        current_namespace = dir(pysal)
+        for item in namespace_v1_0:
+            self.assertTrue(item in current_namespace)
+        for item in current_namespace:
+            if item not in namespace_v1_0 and not item.startswith('__'):
+                print item, "added to name space"
+
+
+suite = unittest.TestLoader().loadTestsFromTestCase(TestNameSpace)
+
+if __name__ == '__main__':
+    unittest.main()
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/version.py b/pysal/version.py
new file mode 100644
index 0000000..7022797
--- /dev/null
+++ b/pysal/version.py
@@ -0,0 +1 @@
+version = "1.9.1"
diff --git a/pysal/weights/Contiguity.py b/pysal/weights/Contiguity.py
new file mode 100644
index 0000000..7a3e66d
--- /dev/null
+++ b/pysal/weights/Contiguity.py
@@ -0,0 +1,97 @@
+"""
+Contiguity based spatial weights
+"""
+
+__author__ = "Sergio J. Rey <srey at asu.edu> "
+__all__ = ['buildContiguity']
+
+import pysal
+from _contW_binning import ContiguityWeights_binning as ContiguityWeights
+from _contW_binning import ContiguityWeightsPolygons
+
+
+WT_TYPE = {'rook': 2, 'queen': 1}  # for _contW_Binning
+
+
+def buildContiguity(polygons, criterion="rook", ids=None):
+    """
+    Build contiguity weights from a source
+
+    Parameters
+    ----------
+
+    polygons   : an instance of a pysal geo file handler
+                 Any thing returned by pysal.open that is explicitly polygons
+    criterion  : string
+                 contiguity criterion ("rook","queen")
+    ids        : list
+                 identifiers for i,j
+
+    Returns
+    -------
+
+    w         : W instance
+                Contiguity weights object
+
+    Examples
+    --------
+
+    >>> w = buildContiguity(pysal.open(pysal.examples.get_path('10740.shp'),'r'))
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [163]
+    >>> w[0]
+    {1: 1.0, 4: 1.0, 101: 1.0, 85: 1.0, 5: 1.0}
+    >>> w = buildContiguity(pysal.open(pysal.examples.get_path('10740.shp'),'r'),criterion='queen')
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [163]
+    >>> w.pct_nonzero
+    0.031926364234056544
+    >>> w = buildContiguity(pysal.open(pysal.examples.get_path('10740.shp'),'r'),criterion='rook')
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [163]
+    >>> w.pct_nonzero
+    0.026351084812623275
+    >>> fips = pysal.open(pysal.examples.get_path('10740.dbf')).by_col('STFID')
+    >>> w = buildContiguity(pysal.open(pysal.examples.get_path('10740.shp'),'r'),ids=fips)
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  ['35043940300']
+    >>> w['35001000107']
+    {'35001003805': 1.0, '35001003721': 1.0, '35001000111': 1.0, '35001000112': 1.0, '35001000108': 1.0}
+
+    Notes
+    -----
+
+    The types of sources supported will expand over time.
+
+    See Also
+    --------
+    pysal.weights.W # need to fix sphinx links
+
+    """
+    if ids and len(ids) != len(set(ids)):
+        raise ValueError("The argument to the ids parameter contains duplicate entries.")
+
+    wt_type = WT_TYPE[criterion.lower()]
+    geo = polygons
+    if issubclass(type(geo), pysal.open):
+        geo.seek(0)  # Make sure we read from the beginging of the file.
+        geoObj = geo
+    else:
+        raise TypeError(
+            "Argument must be a FileIO handler or connection string")
+    neighbor_data = ContiguityWeights(geoObj, wt_type).w
+    neighbors = {}
+    #weights={}
+    if ids:
+        for key in neighbor_data:
+            ida = ids[key]
+            if ida not in neighbors:
+                neighbors[ida] = set()
+            neighbors[ida].update([ids[x] for x in neighbor_data[key]])
+        for key in neighbors:
+            neighbors[key] = list(neighbors[key])
+    else:
+        for key in neighbor_data:
+            neighbors[key] = list(neighbor_data[key])
+    return pysal.weights.W(neighbors, id_order=ids)
+
diff --git a/pysal/weights/Distance.py b/pysal/weights/Distance.py
new file mode 100644
index 0000000..dc44b41
--- /dev/null
+++ b/pysal/weights/Distance.py
@@ -0,0 +1,532 @@
+"""
+Distance based spatial weights
+"""
+
+__author__ = "Sergio J. Rey <srey at asu.edu> "
+
+import pysal
+import scipy.spatial
+from pysal.common import KDTree
+from pysal.weights import W
+import scipy.stats
+import numpy as np
+
+__all__ = ["knnW", "Kernel", "DistanceBand"]
+
+
+def knnW(data, k=2, p=2, ids=None, pct_unique=0.25):
+    """
+    Creates nearest neighbor weights matrix based on k nearest
+    neighbors.
+
+    Parameters
+    ----------
+
+    data       : array (n,k) or KDTree where KDtree.data is array (n,k)
+                 n observations on k characteristics used to measure
+                 distances between the n objects
+    k          : int
+                 number of nearest neighbors
+    p          : float
+                 Minkowski p-norm distance metric parameter:
+                 1<=p<=infinity
+                 2: Euclidean distance
+                 1: Manhattan distance
+    ids        : list
+                 identifiers to attach to each observation
+    pct_unique : float
+                 threshold percentage of unique points in data. Below this
+                 threshold tree is built on unique values only
+
+    Returns
+    -------
+
+    w         : W instance
+                Weights object with binary weights
+
+    Examples
+    --------
+
+    >>> x,y=np.indices((5,5))
+    >>> x.shape=(25,1)
+    >>> y.shape=(25,1)
+    >>> data=np.hstack([x,y])
+    >>> wnn2=knnW(data,k=2)
+    >>> wnn4=knnW(data,k=4)
+    >>> set([1,5,6,2]) == set(wnn4.neighbors[0])
+    True
+    >>> set([0,6,10,1]) == set(wnn4.neighbors[5])
+    True
+    >>> set([1,5]) == set(wnn2.neighbors[0])
+    True
+    >>> set([0,6]) == set(wnn2.neighbors[5])
+    True
+    >>> "%.2f"%wnn2.pct_nonzero
+    '0.08'
+    >>> wnn4.pct_nonzero
+    0.16
+    >>> wnn3e=knnW(data,p=2,k=3)
+    >>> set([1,5,6]) == set(wnn3e.neighbors[0])
+    True
+    >>> wnn3m=knnW(data,p=1,k=3)
+    >>> a = set([1,5,2])
+    >>> b = set([1,5,6])
+    >>> c = set([1,5,10])
+    >>> w0n = set(wnn3m.neighbors[0])
+    >>> a==w0n or b==w0n or c==w0n
+    True
+
+    ids
+
+    >>> wnn2 = knnW(data,2)
+    >>> wnn2[0]
+    {1: 1.0, 5: 1.0}
+    >>> wnn2[1]
+    {0: 1.0, 2: 1.0}
+
+    now with 1 rather than 0 offset
+
+    >>> wnn2 = knnW(data,2, ids = range(1,26))
+    >>> wnn2[1]
+    {2: 1.0, 6: 1.0}
+    >>> wnn2[2]
+    {1: 1.0, 3: 1.0}
+    >>> 0 in wnn2.neighbors
+    False
+
+    Notes
+    -----
+
+    Ties between neighbors of equal distance are arbitrarily broken.
+
+    See Also
+    --------
+    pysal.weights.W
+
+    """
+
+    if issubclass(type(data), scipy.spatial.KDTree):
+        kd = data
+        data = kd.data
+        nnq = kd.query(data, k=k+1, p=p)
+        info = nnq[1]
+    elif type(data).__name__ == 'ndarray':
+        # check if unique points are a small fraction of all points
+        ind =  np.lexsort(data.T)
+        u = data[np.concatenate(([True],np.any(data[ind[1:]]!=data[ind[:-1]],axis=1)))]
+        pct_u = len(u)*1. / len(data)
+        if pct_u < pct_unique:
+            tree = KDTree(u)
+            nnq = tree.query(data, k=k+1, p=p)
+            info = nnq[1]
+            uid = [np.where((data == ui).all(axis=1))[0][0] for ui in u]
+            new_info = np.zeros((len(data), k + 1), 'int')
+            for i, row in enumerate(info):
+                new_info[i] = [uid[j] for j in row]
+            info = new_info
+        else:
+            kd = KDTree(data)
+            # calculate
+            nnq = kd.query(data, k=k + 1, p=p)
+            info = nnq[1]
+    else:
+        print 'Unsupported type'
+        return None
+
+    neighbors = {}
+    for i, row in enumerate(info):
+        row = row.tolist()
+        if i in row:
+            row.remove(i)
+            focal = i
+        if ids:
+            row = [ ids[j] for j in row]
+            focal = ids[i]
+        neighbors[focal] = row
+    return pysal.weights.W(neighbors,  id_order=ids)
+
+
+class Kernel(W):
+    """Spatial weights based on kernel functions
+
+    Parameters
+    ----------
+
+    data        : array (n,k) or KDTree where KDtree.data is array (n,k)
+                  n observations on k characteristics used to measure
+                  distances between the n objects
+    bandwidth   : float or array-like (optional)
+                  the bandwidth :math:`h_i` for the kernel.
+    fixed       : binary
+                  If true then :math:`h_i=h \\forall i`. If false then
+                  bandwidth is adaptive across observations.
+    k           : int
+                  the number of nearest neighbors to use for determining
+                  bandwidth. For fixed bandwidth, :math:`h_i=max(dknn) \\forall i`
+                  where :math:`dknn` is a vector of k-nearest neighbor
+                  distances (the distance to the kth nearest neighbor for each
+                  observation).  For adaptive bandwidths, :math:`h_i=dknn_i`
+    diagonal    : boolean
+                  If true, set diagonal weights = 1.0, if false (default),
+                  diagonals weights are set to value according to kernel
+                  function.
+    function    : string {'triangular','uniform','quadratic','quartic','gaussian'}
+                  kernel function defined as follows with
+
+                  .. math::
+
+                      z_{i,j} = d_{i,j}/h_i
+
+                  triangular
+
+                  .. math::
+
+                      K(z) = (1 - |z|) \ if |z| \le 1
+
+                  uniform
+
+                  .. math::
+
+                      K(z) = 1/2 \ if |z| \le 1
+
+                  quadratic
+
+                  .. math::
+
+                      K(z) = (3/4)(1-z^2) \ if |z| \le 1
+
+                  quartic
+
+                  .. math::
+
+                      K(z) = (15/16)(1-z^2)^2 \ if |z| \le 1
+
+                  gaussian
+
+                  .. math::
+
+                      K(z) = (2\pi)^{(-1/2)} exp(-z^2 / 2)
+
+    eps         : float
+                  adjustment to ensure knn distance range is closed on the
+                  knnth observations
+
+    Examples
+    --------
+
+    >>> points=[(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+    >>> kw=Kernel(points)
+    >>> kw.weights[0]
+    [1.0, 0.500000049999995, 0.4409830615267465]
+    >>> kw.neighbors[0]
+    [0, 1, 3]
+    >>> kw.bandwidth
+    array([[ 20.000002],
+           [ 20.000002],
+           [ 20.000002],
+           [ 20.000002],
+           [ 20.000002],
+           [ 20.000002]])
+    >>> kw15=Kernel(points,bandwidth=15.0)
+    >>> kw15[0]
+    {0: 1.0, 1: 0.33333333333333337, 3: 0.2546440075000701}
+    >>> kw15.neighbors[0]
+    [0, 1, 3]
+    >>> kw15.bandwidth
+    array([[ 15.],
+           [ 15.],
+           [ 15.],
+           [ 15.],
+           [ 15.],
+           [ 15.]])
+
+    Adaptive bandwidths user specified
+
+    >>> bw=[25.0,15.0,25.0,16.0,14.5,25.0]
+    >>> kwa=Kernel(points,bandwidth=bw)
+    >>> kwa.weights[0]
+    [1.0, 0.6, 0.552786404500042, 0.10557280900008403]
+    >>> kwa.neighbors[0]
+    [0, 1, 3, 4]
+    >>> kwa.bandwidth
+    array([[ 25. ],
+           [ 15. ],
+           [ 25. ],
+           [ 16. ],
+           [ 14.5],
+           [ 25. ]])
+
+    Endogenous adaptive bandwidths
+
+    >>> kwea=Kernel(points,fixed=False)
+    >>> kwea.weights[0]
+    [1.0, 0.10557289844279438, 9.99999900663795e-08]
+    >>> kwea.neighbors[0]
+    [0, 1, 3]
+    >>> kwea.bandwidth
+    array([[ 11.18034101],
+           [ 11.18034101],
+           [ 20.000002  ],
+           [ 11.18034101],
+           [ 14.14213704],
+           [ 18.02775818]])
+
+    Endogenous adaptive bandwidths with Gaussian kernel
+
+    >>> kweag=Kernel(points,fixed=False,function='gaussian')
+    >>> kweag.weights[0]
+    [0.3989422804014327, 0.2674190291577696, 0.2419707487162134]
+    >>> kweag.bandwidth
+    array([[ 11.18034101],
+           [ 11.18034101],
+           [ 20.000002  ],
+           [ 11.18034101],
+           [ 14.14213704],
+           [ 18.02775818]])
+
+    Diagonals to 1.0
+
+    >>> kq = Kernel(points,function='gaussian')
+    >>> kq.weights
+    {0: [0.3989422804014327, 0.35206533556593145, 0.3412334260702758], 1: [0.35206533556593145, 0.3989422804014327, 0.2419707487162134, 0.3412334260702758, 0.31069657591175387], 2: [0.2419707487162134, 0.3989422804014327, 0.31069657591175387], 3: [0.3412334260702758, 0.3412334260702758, 0.3989422804014327, 0.3011374490937829, 0.26575287272131043], 4: [0.31069657591175387, 0.31069657591175387, 0.3011374490937829, 0.3989422804014327, 0.35206533556593145], 5: [0.26575287272131043, 0.3520653 [...]
+    >>> kqd = Kernel(points, function='gaussian', diagonal=True)
+    >>> kqd.weights
+    {0: [1.0, 0.35206533556593145, 0.3412334260702758], 1: [0.35206533556593145, 1.0, 0.2419707487162134, 0.3412334260702758, 0.31069657591175387], 2: [0.2419707487162134, 1.0, 0.31069657591175387], 3: [0.3412334260702758, 0.3412334260702758, 1.0, 0.3011374490937829, 0.26575287272131043], 4: [0.31069657591175387, 0.31069657591175387, 0.3011374490937829, 1.0, 0.35206533556593145], 5: [0.26575287272131043, 0.35206533556593145, 1.0]}
+    """
+    def __init__(self, data, bandwidth=None, fixed=True, k=2,
+                 function='triangular', eps=1.0000001, ids=None,
+                 diagonal=False):
+        if issubclass(type(data), scipy.spatial.KDTree):
+            self.kdt = data
+            self.data = self.kdt.data
+            data = self.data
+        else:
+            self.data = data
+            self.kdt = KDTree(self.data)
+        self.k = k + 1
+        self.function = function.lower()
+        self.fixed = fixed
+        self.eps = eps
+        if bandwidth:
+            try:
+                bandwidth = np.array(bandwidth)
+                bandwidth.shape = (len(bandwidth), 1)
+            except:
+                bandwidth = np.ones((len(data), 1), 'float') * bandwidth
+            self.bandwidth = bandwidth
+        else:
+            self._set_bw()
+
+        self._eval_kernel()
+        neighbors, weights = self._k_to_W(ids)
+        if diagonal:
+            for i in neighbors:
+                weights[i][neighbors[i].index(i)] = 1.0
+        W.__init__(self, neighbors, weights, ids)
+
+    def _k_to_W(self, ids=None):
+        allneighbors = {}
+        weights = {}
+        if ids:
+            ids = np.array(ids)
+        else:
+            ids = np.arange(len(self.data))
+        for i, neighbors in enumerate(self.kernel):
+            if len(self.neigh[i]) == 0:
+                allneighbors[ids[i]] = []
+                weights[ids[i]] = []
+            else:
+                allneighbors[ids[i]] = list(ids[self.neigh[i]])
+                weights[ids[i]] = self.kernel[i].tolist()
+        return allneighbors, weights
+
+    def _set_bw(self):
+        dmat, neigh = self.kdt.query(self.data, k=self.k)
+        if self.fixed:
+            # use max knn distance as bandwidth
+            bandwidth = dmat.max() * self.eps
+            n = len(dmat)
+            self.bandwidth = np.ones((n, 1), 'float') * bandwidth
+        else:
+            # use local max knn distance
+            self.bandwidth = dmat.max(axis=1) * self.eps
+            self.bandwidth.shape = (self.bandwidth.size, 1)
+            # identify knn neighbors for each point
+            nnq = self.kdt.query(self.data, k=self.k)
+            self.neigh = nnq[1]
+
+    def _eval_kernel(self):
+        # get points within bandwidth distance of each point
+        if not hasattr(self, 'neigh'):
+            kdtq = self.kdt.query_ball_point
+            neighbors = [kdtq(self.data[i], r=bwi[0]) for i,
+                         bwi in enumerate(self.bandwidth)]
+            self.neigh = neighbors
+        # get distances for neighbors
+        bw = self.bandwidth
+
+        kdtq = self.kdt.query
+        z = []
+        for i, nids in enumerate(self.neigh):
+            di, ni = kdtq(self.data[i], k=len(nids))
+            zi = np.array([dict(zip(ni, di))[nid] for nid in nids]) / bw[i]
+            z.append(zi)
+        zs = z
+        # functions follow Anselin and Rey (2010) table 5.4
+        if self.function == 'triangular':
+            self.kernel = [1 - zi for zi in zs]  
+        elif self.function == 'uniform':
+            self.kernel = [np.ones(zi.shape) * 0.5 for zi in zs]
+        elif self.function == 'quadratic':
+            self.kernel = [(3. / 4) * (1 - zi ** 2) for zi in zs]
+        elif self.function == 'quartic':
+            self.kernel = [(15. / 16) * (1 - zi ** 2) ** 2 for zi in zs]
+        elif self.function == 'gaussian':
+            c = np.pi * 2
+            c = c ** (-0.5)
+            self.kernel = [c * np.exp(-(zi ** 2) / 2.) for zi in zs]
+        else:
+            print 'Unsupported kernel function', self.function
+
+
+class DistanceBand(W):
+    """Spatial weights based on distance band
+
+    Parameters
+    ----------
+
+    data        : array (n,k) or KDTree where KDtree.data is array (n,k)
+                  n observations on k characteristics used to measure
+                  distances between the n objects
+    threshold  : float
+                 distance band
+    p          : float
+                 Minkowski p-norm distance metric parameter:
+                 1<=p<=infinity
+                 2: Euclidean distance
+                 1: Manhattan distance
+    binary     : binary
+                 If true w_{ij}=1 if d_{i,j}<=threshold, otherwise w_{i,j}=0
+                 If false wij=dij^{alpha}
+    alpha      : float
+                 distance decay parameter for weight (default -1.0)
+                 if alpha is positive the weights will not decline with
+                 distance. If binary is True, alpha is ignored
+
+    Examples
+    --------
+
+    >>> points=[(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+    >>> w=DistanceBand(points,threshold=11.2)
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [2]
+    >>> w.weights
+    {0: [1, 1], 1: [1, 1], 2: [], 3: [1, 1], 4: [1], 5: [1]}
+    >>> w.neighbors
+    {0: [1, 3], 1: [0, 3], 2: [], 3: [1, 0], 4: [5], 5: [4]}
+    >>> w=DistanceBand(points,threshold=14.2)
+    >>> w.weights
+    {0: [1, 1], 1: [1, 1, 1], 2: [1], 3: [1, 1], 4: [1, 1, 1], 5: [1]}
+    >>> w.neighbors
+    {0: [1, 3], 1: [0, 3, 4], 2: [4], 3: [1, 0], 4: [5, 1, 2], 5: [4]}
+
+    inverse distance weights
+
+    >>> w=DistanceBand(points,threshold=11.2,binary=False)
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [2]
+    >>> w.weights[0]
+    [0.10000000000000001, 0.089442719099991588]
+    >>> w.neighbors[0]
+    [1, 3]
+    >>>
+
+    gravity weights
+
+    >>> w=DistanceBand(points,threshold=11.2,binary=False,alpha=-2.)
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [2]
+    >>> w.weights[0]
+    [0.01, 0.0079999999999999984]
+
+    Notes
+    -----
+
+    this was initially implemented running scipy 0.8.0dev (in epd 6.1).
+    earlier versions of scipy (0.7.0) have a logic bug in scipy/sparse/dok.py
+    so serge changed line 221 of that file on sal-dev to fix the logic bug
+
+    """
+    def __init__(self, data, threshold, p=2, alpha=-1.0, binary=True, ids=None):
+        """
+        Casting to floats is a work around for a bug in scipy.spatial.  See detail in pysal issue #126
+        """
+        if issubclass(type(data), scipy.spatial.KDTree):
+            self.kd = data
+            self.data = self.kd.data
+        else:
+            try:
+                data = np.asarray(data)
+                if data.dtype.kind != 'f':
+                    data = data.astype(float)
+                self.data = data
+                self.kd = KDTree(self.data)
+            except:
+                raise ValueError("Could not make array from data")
+
+        self.p = p
+        self.threshold = threshold
+        self.binary = binary
+        self.alpha = alpha
+        self._band()
+        neighbors, weights = self._distance_to_W(ids)
+        W.__init__(self, neighbors, weights, ids)
+
+    def _band(self):
+        """
+        find all pairs within threshold
+        """
+        self.dmat = self.kd.sparse_distance_matrix(
+                self.kd, max_distance=self.threshold)
+
+    def _distance_to_W(self, ids=None):
+        if ids:
+            ids = np.array(ids)
+        else:
+            ids = np.arange(self.dmat.shape[0])
+        neighbors = dict([(i,[]) for i in ids])
+        weights = dict([(i,[]) for i in ids])
+        if self.binary:
+            for key,weight in self.dmat.items():
+                i,j = key
+                if j not in neighbors[i]:
+                    weights[i].append(1)
+                    neighbors[i].append(j)
+                if i not in neighbors[j]:
+                    weights[j].append(1)
+                    neighbors[j].append(i)
+
+        else:
+            for key,weight in self.dmat.items():
+                i,j = key
+                if j not in neighbors[i]:
+                    weights[i].append(weight**self.alpha)
+                    neighbors[i].append(j)
+                if i not in neighbors[j]:
+                    weights[j].append(weight**self.alpha)
+                    neighbors[j].append(i)
+
+        return neighbors, weights
+
+
+def _test():
+    import doctest
+    # the following line could be used to define an alternative to the '<BLANKLINE>' flag
+    #doctest.BLANKLINE_MARKER = 'something better than <BLANKLINE>'
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)
+
+if __name__ == '__main__':
+    _test()
diff --git a/pysal/weights/Wsets.py b/pysal/weights/Wsets.py
new file mode 100644
index 0000000..0e10346
--- /dev/null
+++ b/pysal/weights/Wsets.py
@@ -0,0 +1,506 @@
+"""
+Set-like manipulation of weights matrices.
+"""
+
+__author__ = "Sergio J. Rey <srey at asu.edu>, Charles Schmidt <schmidtc at gmail.com>, David Folch <david.folch at asu.edu>, Dani Arribas-Bel <darribas at asu.edu>"
+
+import pysal
+import copy
+from scipy.sparse import isspmatrix_csr
+from numpy import ones
+
+__all__ = ['w_union', 'w_intersection', 'w_difference',
+           'w_symmetric_difference', 'w_subset', 'w_clip']
+
+
+def w_union(w1, w2, silent_island_warning=False):
+    """Returns a binary weights object, w, that includes all neighbor pairs that
+    exist in either w1 or w2.
+
+    Parameters
+    ----------
+
+    w1                      : W object
+    w2                      : W object
+    silent_island_warning   : boolean
+                              Switch to turn off (default on) print statements
+                              for every observation with islands
+
+    Returns
+    -------
+
+    w       : W object
+
+    Notes
+    -----
+    ID comparisons are performed using ==, therefore the integer ID 2 is
+    equivalent to the float ID 2.0. Returns a matrix with all the unique IDs
+    from w1 and w2.
+
+    Examples
+    --------
+
+    Construct rook weights matrices for two regions, one is 4x4 (16 areas)
+    and the other is 6x4 (24 areas). A union of these two weights matrices
+    results in the new weights matrix matching the larger one.
+
+    >>> import pysal
+    >>> w1 = pysal.lat2W(4,4)
+    >>> w2 = pysal.lat2W(6,4)
+    >>> w = pysal.weights.w_union(w1, w2)
+    >>> w1[0] == w[0]
+    True
+    >>> w1.neighbors[15]
+    [11, 14]
+    >>> w2.neighbors[15]
+    [11, 14, 19]
+    >>> w.neighbors[15]
+    [19, 11, 14]
+    >>>
+
+    """
+    neighbors = dict(w1.neighbors.items())
+    for i in w2.neighbors:
+        if i in neighbors:
+            add_neigh = set(neighbors[i]).union(set(w2.neighbors[i]))
+            neighbors[i] = list(add_neigh)
+        else:
+            neighbors[i] = copy.copy(w2.neighbors[i])
+    return pysal.W(neighbors, silent_island_warning=silent_island_warning)
+
+
+def w_intersection(w1, w2, w_shape='w1', silent_island_warning=False):
+    """Returns a binary weights object, w, that includes only those neighbor
+    pairs that exist in both w1 and w2.
+
+    Parameters
+    ----------
+
+    w1                      : W object
+    w2                      : W object
+    w_shape                 : string
+                              Defines the shape of the returned weights matrix. 'w1' returns a
+                              matrix with the same IDs as w1; 'all' returns a matrix with all
+                              the unique IDs from w1 and w2; and 'min' returns a matrix with
+                              only the IDs occurring in both w1 and w2.
+    silent_island_warning   : boolean
+                              Switch to turn off (default on) print statements
+                              for every observation with islands
+
+    Returns
+    -------
+
+    w       : W object
+
+    Notes
+    -----
+    ID comparisons are performed using ==, therefore the integer ID 2 is
+    equivalent to the float ID 2.0.
+
+    Examples
+    --------
+
+    Construct rook weights matrices for two regions, one is 4x4 (16 areas)
+    and the other is 6x4 (24 areas). An intersection of these two weights
+    matrices results in the new weights matrix matching the smaller one.
+
+    >>> import pysal
+    >>> w1 = pysal.lat2W(4,4)
+    >>> w2 = pysal.lat2W(6,4)
+    >>> w = pysal.weights.w_intersection(w1, w2)
+    >>> w1[0] == w[0]
+    True
+    >>> w1.neighbors[15]
+    [11, 14]
+    >>> w2.neighbors[15]
+    [11, 14, 19]
+    >>> w.neighbors[15]
+    [11, 14]
+    >>>
+
+    """
+    if w_shape == 'w1':
+        neigh_keys = w1.neighbors.keys()
+    elif w_shape == 'all':
+        neigh_keys = set(w1.neighbors.keys()).union(set(w2.neighbors.keys()))
+    elif w_shape == 'min':
+        neigh_keys = set(w1.neighbors.keys(
+        )).intersection(set(w2.neighbors.keys()))
+    else:
+        raise Exception("invalid string passed to w_shape")
+
+    neighbors = {}
+    for i in neigh_keys:
+        if i in w1.neighbors and i in w2.neighbors:
+            add_neigh = set(w1.neighbors[i]).intersection(set(w2.neighbors[i]))
+            neighbors[i] = list(add_neigh)
+        else:
+            neighbors[i] = []
+
+    return pysal.W(neighbors, silent_island_warning=silent_island_warning)
+
+
+def w_difference(w1, w2, w_shape='w1', constrained=True, silent_island_warning=False):
+    """Returns a binary weights object, w, that includes only neighbor pairs
+    in w1 that are not in w2. The w_shape and constrained parameters
+    determine which pairs in w1 that are not in w2 are returned.
+
+    Parameters
+    ----------
+
+    w1                      : W object
+    w2                      : W object
+    w_shape                 : string
+                              Defines the shape of the returned weights matrix. 'w1' returns a
+                              matrix with the same IDs as w1; 'all' returns a matrix with all
+                              the unique IDs from w1 and w2; and 'min' returns a matrix with
+                              the IDs occurring in w1 and not in w2.
+    constrained             : boolean
+                              If False then the full set of neighbor pairs in w1 that are
+                              not in w2 are returned. If True then those pairs that would
+                              not be possible if w_shape='min' are dropped. Ignored if
+                              w_shape is set to 'min'.
+    silent_island_warning   : boolean
+                              Switch to turn off (default on) print statements
+                              for every observation with islands
+
+    Returns
+    -------
+
+    w       : W object
+
+    Notes
+    -----
+    ID comparisons are performed using ==, therefore the integer ID 2 is
+    equivalent to the float ID 2.0.
+
+    Examples
+    --------
+
+    Construct rook (w2) and queen (w1) weights matrices for two 4x4 regions
+    (16 areas). A queen matrix has all the joins a rook matrix does plus joins
+    between areas that share a corner. The new matrix formed by the difference
+    of rook from queen contains only join at corners (typically called a
+    bishop matrix). Note that the difference of queen from rook would result
+    in a weights matrix with no joins.
+
+    >>> import pysal
+    >>> w1 = pysal.lat2W(4,4,rook=False)
+    >>> w2 = pysal.lat2W(4,4,rook=True)
+    >>> w = pysal.weights.w_difference(w1, w2, constrained=False)
+    >>> w1[0] == w[0]
+    False
+    >>> w1.neighbors[15]
+    [10, 11, 14]
+    >>> w2.neighbors[15]
+    [11, 14]
+    >>> w.neighbors[15]
+    [10]
+    >>>
+
+    """
+    if w_shape == 'w1':
+        neigh_keys = w1.neighbors.keys()
+    elif w_shape == 'all':
+        neigh_keys = set(w1.neighbors.keys()).union(set(w2.neighbors.keys()))
+    elif w_shape == 'min':
+        neigh_keys = set(
+            w1.neighbors.keys()).difference(set(w2.neighbors.keys()))
+        if not neigh_keys:
+            raise Exception("returned an empty weights matrix")
+    else:
+        raise Exception("invalid string passed to w_shape")
+
+    neighbors = {}
+    for i in neigh_keys:
+        if i in w1.neighbors:
+            if i in w2.neighbors:
+                add_neigh = set(w1.neighbors[i]
+                                ).difference(set(w2.neighbors[i]))
+                neighbors[i] = list(add_neigh)
+            else:
+                neighbors[i] = copy.copy(w1.neighbors[i])
+        else:
+            neighbors[i] = []
+
+    if constrained or w_shape == 'min':
+        constrained_keys = set(
+            w1.neighbors.keys()).difference(set(w2.neighbors.keys()))
+        island_keys = set(neighbors.keys()).difference(constrained_keys)
+        for i in island_keys:
+            neighbors[i] = []
+        for i in constrained_keys:
+            neighbors[i] = list(
+                set(neighbors[i]).intersection(constrained_keys))
+
+    return pysal.W(neighbors, silent_island_warning=silent_island_warning)
+
+
+def w_symmetric_difference(w1, w2, w_shape='all', constrained=True, silent_island_warning=False):
+    """Returns a binary weights object, w, that includes only neighbor pairs
+    that are not shared by w1 and w2. The w_shape and constrained parameters
+    determine which pairs that are not shared by w1 and w2 are returned.
+
+    Parameters
+    ----------
+
+    w1                      : W object
+    w2                      : W object
+    w_shape                 : string
+                              Defines the shape of the returned weights matrix. 'all' returns a
+                              matrix with all the unique IDs from w1 and w2; and 'min' returns
+                              a matrix with the IDs not shared by w1 and w2.
+    constrained             : boolean
+                              If False then the full set of neighbor pairs that are not
+                              shared by w1 and w2 are returned. If True then those pairs
+                              that would not be possible if w_shape='min' are dropped.
+                              Ignored if w_shape is set to 'min'.
+    silent_island_warning   : boolean
+                              Switch to turn off (default on) print statements
+                              for every observation with islands
+
+    Returns
+    -------
+
+    w       : W object
+
+    Notes
+    -----
+    ID comparisons are performed using ==, therefore the integer ID 2 is
+    equivalent to the float ID 2.0.
+
+    Examples
+    --------
+
+    Construct queen weights matrix for a 4x4 (16 areas) region (w1) and a rook
+    matrix for a 6x4 (24 areas) region (w2). The symmetric difference of these
+    two matrices (with w_shape set to 'all' and constrained set to False)
+    contains the corner joins in the overlap area, all the joins in the
+    non-overlap area.
+
+    >>> import pysal
+    >>> w1 = pysal.lat2W(4,4,rook=False)
+    >>> w2 = pysal.lat2W(6,4,rook=True)
+    >>> w = pysal.weights.w_symmetric_difference(w1, w2, constrained=False)
+    >>> w1[0] == w[0]
+    False
+    >>> w1.neighbors[15]
+    [10, 11, 14]
+    >>> w2.neighbors[15]
+    [11, 14, 19]
+    >>> w.neighbors[15]
+    [10, 19]
+    >>>
+
+    """
+    if w_shape == 'all':
+        neigh_keys = set(w1.neighbors.keys()).union(set(w2.neighbors.keys()))
+    elif w_shape == 'min':
+        neigh_keys = set(w1.neighbors.keys(
+        )).symmetric_difference(set(w2.neighbors.keys()))
+    else:
+        raise Exception("invalid string passed to w_shape")
+
+    neighbors = {}
+    for i in neigh_keys:
+        if i in w1.neighbors:
+            if i in w2.neighbors:
+                add_neigh = set(w1.neighbors[i]).symmetric_difference(
+                    set(w2.neighbors[i]))
+                neighbors[i] = list(add_neigh)
+            else:
+                neighbors[i] = copy.copy(w1.neighbors[i])
+        elif i in w2.neighbors:
+            neighbors[i] = copy.copy(w2.neighbors[i])
+        else:
+            neighbors[i] = []
+
+    if constrained or w_shape == 'min':
+        constrained_keys = set(
+            w1.neighbors.keys()).difference(set(w2.neighbors.keys()))
+        island_keys = set(neighbors.keys()).difference(constrained_keys)
+        for i in island_keys:
+            neighbors[i] = []
+        for i in constrained_keys:
+            neighbors[i] = list(
+                set(neighbors[i]).intersection(constrained_keys))
+
+    return pysal.W(neighbors, silent_island_warning=silent_island_warning)
+
+
+def w_subset(w1, ids, silent_island_warning=False):
+    """Returns a binary weights object, w, that includes only those
+    observations in ids.
+
+    Parameters
+    ----------
+
+    w1                      : W object
+    ids                     : list
+                              A list containing the IDs to be include in the returned weights
+                              object.
+    silent_island_warning   : boolean
+                              Switch to turn off (default on) print statements
+                              for every observation with islands
+
+    Returns
+    -------
+
+    w       : W object
+
+    Examples
+    --------
+
+    Construct a rook weights matrix for a 6x4 region (24 areas). By default
+    PySAL assigns integer IDs to the areas in a region. By passing in a list
+    of integers from 0 to 15, the first 16 areas are extracted from the
+    previous weights matrix, and only those joins relevant to the new region
+    are retained.
+
+    >>> import pysal
+    >>> w1 = pysal.lat2W(6,4)
+    >>> ids = range(16)
+    >>> w = pysal.weights.w_subset(w1, ids)
+    >>> w1[0] == w[0]
+    True
+    >>> w1.neighbors[15]
+    [11, 14, 19]
+    >>> w.neighbors[15]
+    [11, 14]
+    >>>
+
+    """
+    neighbors = {}
+    ids_set = set(ids)
+    for i in ids:
+        if i in w1.neighbors:
+            neigh_add = ids_set.intersection(set(w1.neighbors[i]))
+            neighbors[i] = list(neigh_add)
+        else:
+            neighbors[i] = []
+
+    return pysal.W(neighbors, id_order=ids, silent_island_warning=silent_island_warning)
+
+
+def w_clip(w1, w2, outSP=True, silent_island_warning=False):
+    '''
+    Clip a continuous W object (w1) with a different W object (w2) so only cells where
+    w2 has a non-zero value remain with non-zero values in w1
+
+    Checks on w1 and w2 are performed to make sure they conform to the
+    appropriate format and, if not, they are converted.
+
+    Parameters
+    ----------
+    w1                      : pysal.W, scipy.sparse.csr.csr_matrix
+                              Potentially continuous weights matrix to be clipped. The clipped
+                              matrix wc will have at most the same elements as w1.
+    w2                      : pysal.W, scipy.sparse.csr.csr_matrix
+                              Weights matrix to use as shell to clip w1. Automatically
+                              converted to binary format. Only non-zero elements in w2 will be
+                              kept non-zero in wc. NOTE: assumed to be of the same shape as w1
+    outSP                   : boolean
+                              If True (default) return sparse version of the clipped W, if
+                              False, return pysal.W object of the clipped matrix
+    silent_island_warning   : boolean
+                              Switch to turn off (default on) print statements
+                              for every observation with islands
+
+    Returns
+    -------
+    wc      : pysal.W, scipy.sparse.csr.csr_matrix
+              Clipped W object (sparse if outSP=Ture). It inherits
+              ``id_order`` from w1.
+
+    Examples
+    --------
+    >>> import pysal as ps
+
+    First create a W object from a lattice using queen contiguity and
+    row-standardize it (note that these weights will stay when we clip the
+    object, but they will not neccesarily represent a row-standardization
+    anymore):
+
+    >>> w1 = ps.lat2W(3, 2, rook=False)
+    >>> w1.transform = 'R'
+
+    We will clip that geography assuming observations 0, 2, 3 and 4 belong to
+    one group and 1, 5 belong to another group and we don't want both groups
+    to interact with each other in our weights (i.e. w_ij = 0 if i and j in
+    different groups). For that, we use the following method:
+
+    >>> w2 = ps.block_weights(['r1', 'r2', 'r1', 'r1', 'r1', 'r2'])
+
+    To illustrate that w2 will only be considered as binary even when the
+    object passed is not, we can row-standardize it
+
+    >>> w2.transform = 'R'
+
+    The clipped object ``wc`` will contain only the spatial queen
+    relationships that occur within one group ('r1' or 'r2') but will have
+    gotten rid of those that happen across groups
+
+    >>> wcs = ps.weights.Wsets.w_clip(w1, w2, outSP=True)
+
+    This will create a sparse object (recommended when n is large).
+
+    >>> wcs.sparse.toarray()
+    array([[ 0.        ,  0.        ,  0.33333333,  0.33333333,  0.        ,
+             0.        ],
+           [ 0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
+             0.        ],
+           [ 0.2       ,  0.        ,  0.        ,  0.2       ,  0.2       ,
+             0.        ],
+           [ 0.2       ,  0.        ,  0.2       ,  0.        ,  0.2       ,
+             0.        ],
+           [ 0.        ,  0.        ,  0.33333333,  0.33333333,  0.        ,
+             0.        ],
+           [ 0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
+             0.        ]])
+
+    If we wanted an original W object, we can control that with the argument
+    ``outSP``:
+
+    >>> wc = ps.weights.Wsets.w_clip(w1, w2, outSP=False)
+    WARNING: there are 2 disconnected observations
+    Island ids:  [1, 5]
+    >>> wc.full()[0]
+    array([[ 0.        ,  0.        ,  0.33333333,  0.33333333,  0.        ,
+             0.        ],
+           [ 0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
+             0.        ],
+           [ 0.2       ,  0.        ,  0.        ,  0.2       ,  0.2       ,
+             0.        ],
+           [ 0.2       ,  0.        ,  0.2       ,  0.        ,  0.2       ,
+             0.        ],
+           [ 0.        ,  0.        ,  0.33333333,  0.33333333,  0.        ,
+             0.        ],
+           [ 0.        ,  0.        ,  0.        ,  0.        ,  0.        ,
+             0.        ]])
+
+    You can check they are actually the same:
+
+    >>> wcs.sparse.toarray() == wc.full()[0]
+    array([[ True,  True,  True,  True,  True,  True],
+           [ True,  True,  True,  True,  True,  True],
+           [ True,  True,  True,  True,  True,  True],
+           [ True,  True,  True,  True,  True,  True],
+           [ True,  True,  True,  True,  True,  True],
+           [ True,  True,  True,  True,  True,  True]], dtype=bool)
+
+    '''
+    if not w1.id_order:
+        w1.id_order = None
+    id_order = w1.id_order
+    if not isspmatrix_csr(w1):
+        w1 = w1.sparse
+    if not isspmatrix_csr(w2):
+        w2 = w2.sparse
+    w2.data = ones(w2.data.shape)
+    wc = w1.multiply(w2)
+    wc = pysal.weights.WSP(wc, id_order=id_order)
+    if not outSP:
+        wc = pysal.weights.WSP2W(wc, silent_island_warning=silent_island_warning)
+    return wc
+
+
diff --git a/pysal/weights/__init__.py b/pysal/weights/__init__.py
new file mode 100644
index 0000000..2eafa27
--- /dev/null
+++ b/pysal/weights/__init__.py
@@ -0,0 +1,12 @@
+"""
+:mod:`weights` --- Spatial Weights
+==================================
+
+"""
+from weights import *
+from util import *
+from Distance import *
+from Contiguity import *
+from user import *
+from spatial_lag import *
+from Wsets import *
diff --git a/pysal/weights/_contW_binning.py b/pysal/weights/_contW_binning.py
new file mode 100644
index 0000000..af81b76
--- /dev/null
+++ b/pysal/weights/_contW_binning.py
@@ -0,0 +1,389 @@
+#!/usr/bin/python
+#import math
+import pysal
+from pysal.cg.standalone import get_shared_segments
+
+__author__ = "Sergio J. Rey <srey at asu.edu> "
+__all__ = ["QUEEN", "ROOK", "ContiguityWeights_binning",
+           "ContiguityWeightsPolygons"]
+
+
+import time
+
+# delta to get buckets right
+DELTA = 0.000001
+
+QUEEN = 1
+ROOK = 2
+
+# constants for bucket sizes
+BUCK_SM = 8
+BUCK_LG = 80
+SHP_SMALL = 1000
+
+
+def bbcommon(bb, bbother):
+    """
+    Checks for overlaps of bounding boxes. First, east-west, then north-south.
+    Element 0 is west, element 2 is east, element 3 is north, element 1 is
+    south.
+    All four checks must be false for chflag to be true, meaning the two
+    bounding boxes do not overlap.
+    """
+    chflag = 0
+    if not ((bbother[2] < bb[0]) or (bbother[0] > bb[2])):
+        if not ((bbother[3] < bb[1]) or (bbother[1] > bb[3])):
+            chflag = 1
+    return chflag
+
+
+class ContiguityWeights_binning:
+
+    """
+    Contiguity using a binning algorithm
+    """
+
+    def __init__(self, shpFileObject, wttype):
+        self.shpFileObject = shpFileObject
+        self.wttype = wttype
+        self.do_weights()
+
+    def do_weights(self):
+        shpFileObject = self.shpFileObject
+
+        if shpFileObject.type != pysal.cg.Polygon:
+            return False
+
+        shapebox = shpFileObject.bbox      # bounding box
+
+        numPoly = len(shpFileObject)
+        self.numPoly = numPoly
+
+        # bucket size
+        if (numPoly < SHP_SMALL):
+            bucketmin = numPoly / BUCK_SM + 2
+        else:
+            bucketmin = numPoly / BUCK_LG + 2
+            # print 'bucketmin: ', bucketmin
+        # bucket length
+        lengthx = ((shapebox[2] + DELTA) - shapebox[0]) / bucketmin
+        lengthy = ((shapebox[3] + DELTA) - shapebox[1]) / bucketmin
+
+        # print lengthx, lengthy
+
+        # initialize buckets
+        columns = [set() for i in range(bucketmin)]
+        rows = [set() for i in range(bucketmin)]
+
+        minbox = shapebox[:2] * 2             # minx,miny,minx,miny
+        binWidth = [lengthx, lengthy] * 2      # lenx,leny,lenx,leny
+        bbcache = {}
+        poly2Column = [set() for i in range(numPoly)]
+        poly2Row = [set() for i in range(numPoly)]
+        for i in range(numPoly):
+            shpObj = shpFileObject.get(i)
+            bbcache[i] = shpObj.bounding_box[:]
+            projBBox = [int((shpObj.bounding_box[:][j] -
+                             minbox[j]) / binWidth[j]) for j in xrange(4)]
+            for j in range(projBBox[0], projBBox[2] + 1):
+                columns[j].add(i)
+                poly2Column[i].add(j)
+            for j in range(projBBox[1], projBBox[3] + 1):
+                rows[j].add(i)
+                poly2Row[i].add(j)
+
+        w = {}
+        if self.wttype == QUEEN:
+            # loop over polygons rather than bins
+            vertCache = {}
+            for polyId in xrange(numPoly):
+                if polyId not in vertCache:
+                    vertCache[polyId] = set(shpFileObject.get(polyId).vertices)
+                idRows = poly2Row[polyId]
+                idCols = poly2Column[polyId]
+                rowPotentialNeighbors = set()
+                colPotentialNeighbors = set()
+                for row in idRows:
+                    rowPotentialNeighbors = rowPotentialNeighbors.union(
+                        rows[row])
+                for col in idCols:
+                    colPotentialNeighbors = colPotentialNeighbors.union(
+                        columns[col])
+                potentialNeighbors = rowPotentialNeighbors.intersection(
+                    colPotentialNeighbors)
+                if polyId not in w:
+                    w[polyId] = set()
+                for j in potentialNeighbors:
+                    if polyId < j:
+                        if bbcommon(bbcache[polyId], bbcache[j]):
+                            if j not in vertCache:
+                                vertCache[j] = set(
+                                    shpFileObject.get(j).vertices)
+                            common = vertCache[
+                                polyId].intersection(vertCache[j])
+                            if len(common) > 0:
+                                w[polyId].add(j)
+                                if j not in w:
+                                    w[j] = set()
+                                w[j].add(polyId)
+        elif self.wttype == ROOK:
+            # check for a shared edge
+            edgeCache = {}
+            # loop over polygons rather than bins
+            for polyId in xrange(numPoly):
+                if polyId not in edgeCache:
+                    iEdges = {}
+                    iVerts = shpFileObject.get(polyId).vertices
+                    nv = len(iVerts)
+                    ne = nv - 1
+                    for i in xrange(ne):
+                        l = iVerts[i]
+                        r = iVerts[i + 1]
+                        iEdges[(l, r)] = []
+                        iEdges[(r, l)] = []
+                    edgeCache[polyId] = iEdges
+                iEdgeSet = set(edgeCache[polyId].keys())
+                idRows = poly2Row[polyId]
+                idCols = poly2Column[polyId]
+                rowPotentialNeighbors = set()
+                colPotentialNeighbors = set()
+                for row in idRows:
+                    rowPotentialNeighbors = rowPotentialNeighbors.union(
+                        rows[row])
+                for col in idCols:
+                    colPotentialNeighbors = colPotentialNeighbors.union(
+                        columns[col])
+                potentialNeighbors = rowPotentialNeighbors.intersection(
+                    colPotentialNeighbors)
+                if polyId not in w:
+                    w[polyId] = set()
+                for j in potentialNeighbors:
+                    if polyId < j:
+                        if bbcommon(bbcache[polyId], bbcache[j]):
+                            if j not in edgeCache:
+                                jVerts = shpFileObject.get(j).vertices
+                                jEdges = {}
+                                nv = len(jVerts)
+                                ne = nv - 1
+                                for e in xrange(ne):
+                                    l = jVerts[e]
+                                    r = jVerts[e + 1]
+                                    jEdges[(l, r)] = []
+                                    jEdges[(r, l)] = []
+                                edgeCache[j] = jEdges
+                            # for edge in edgeCache[j]:
+                            if iEdgeSet.intersection(edgeCache[j].keys()):
+                                w[polyId].add(j)
+                                if j not in w:
+                                    w[j] = set()
+                                w[j].add(polyId)
+                                # break
+        else:
+            print "Unsupported weight type."
+
+        self.w = w
+
+# Generalize to handle polygon collections - independent of origin file type
+
+
+class ContiguityWeightsPolygons:
+
+    """
+    Contiguity for a collection of polygons using a binning algorithm
+    """
+
+    def __init__(self, collection, wttype=1):
+        """
+
+        Parameters
+        ==========
+
+        collection: PySAL PolygonCollection 
+
+        wttype: int
+                1: Queen
+                2: Rook
+        """
+
+        self.collection = collection
+        self.wttype = wttype
+        self.do_weights()
+
+    def do_weights(self):
+        if self.collection.type != pysal.cg.Polygon:
+            return False
+
+        shapebox = self.collection.bbox      # bounding box
+
+        numPoly = self.collection.n
+        self.numPoly = numPoly
+
+        # bucket size
+        if (numPoly < SHP_SMALL):
+            bucketmin = numPoly / BUCK_SM + 2
+        else:
+            bucketmin = numPoly / BUCK_LG + 2
+            # print 'bucketmin: ', bucketmin
+        # bucket length
+        lengthx = ((shapebox[2] + DELTA) - shapebox[0]) / bucketmin
+        lengthy = ((shapebox[3] + DELTA) - shapebox[1]) / bucketmin
+
+        # print lengthx, lengthy
+
+        # initialize buckets
+        columns = [set() for i in range(bucketmin)]
+        rows = [set() for i in range(bucketmin)]
+
+        minbox = shapebox[:2] * 2             # minx,miny,minx,miny
+        binWidth = [lengthx, lengthy] * 2      # lenx,leny,lenx,leny
+        bbcache = {}
+        poly2Column = [set() for i in range(numPoly)]
+        poly2Row = [set() for i in range(numPoly)]
+        for i in range(numPoly):
+            shpObj = self.collection[i]
+            bbcache[i] = shpObj.bbox[:]
+            projBBox = [int((shpObj.bbox[:][j] -
+                             minbox[j]) / binWidth[j]) for j in xrange(4)]
+            for j in range(projBBox[0], projBBox[2] + 1):
+                columns[j].add(i)
+                poly2Column[i].add(j)
+            for j in range(projBBox[1], projBBox[3] + 1):
+                rows[j].add(i)
+                poly2Row[i].add(j)
+
+        w = {}
+        if self.wttype == QUEEN:
+            # loop over polygons rather than bins
+            vertCache = {}
+            for polyId in xrange(numPoly):
+                if polyId not in vertCache:
+                    vertCache[polyId] = set(self.collection[polyId].vertices)
+                idRows = poly2Row[polyId]
+                idCols = poly2Column[polyId]
+                rowPotentialNeighbors = set()
+                colPotentialNeighbors = set()
+                for row in idRows:
+                    rowPotentialNeighbors = rowPotentialNeighbors.union(
+                        rows[row])
+                for col in idCols:
+                    colPotentialNeighbors = colPotentialNeighbors.union(
+                        columns[col])
+                potentialNeighbors = rowPotentialNeighbors.intersection(
+                    colPotentialNeighbors)
+                if polyId not in w:
+                    w[polyId] = set()
+                for j in potentialNeighbors:
+                    if polyId < j:
+                        if j not in vertCache:
+                            vertCache[j] = set(self.collection[j].vertices)
+                        if bbcommon(bbcache[polyId], bbcache[j]):
+                            vertCache[j] = set(self.collection[j].vertices)
+                            common = vertCache[
+                                polyId].intersection(vertCache[j])
+                            if len(common) > 0:
+                                w[polyId].add(j)
+                                if j not in w:
+                                    w[j] = set()
+                                w[j].add(polyId)
+        elif self.wttype == ROOK:
+            # check for a shared edge
+            edgeCache = {}
+            # loop over polygons rather than bins
+            for polyId in xrange(numPoly):
+                if polyId not in edgeCache:
+                    iEdges = {}
+                    iVerts = shpFileObject.get(polyId).vertices
+                    nv = len(iVerts)
+                    ne = nv - 1
+                    for i in xrange(ne):
+                        l = iVerts[i]
+                        r = iVerts[i + 1]
+                        iEdges[(l, r)] = []
+                        iEdges[(r, l)] = []
+                    edgeCache[polyId] = iEdges
+                iEdgeSet = set(edgeCache[polyId].keys())
+                idRows = poly2Row[polyId]
+                idCols = poly2Column[polyId]
+                rowPotentialNeighbors = set()
+                colPotentialNeighbors = set()
+                for row in idRows:
+                    rowPotentialNeighbors = rowPotentialNeighbors.union(
+                        rows[row])
+                for col in idCols:
+                    colPotentialNeighbors = colPotentialNeighbors.union(
+                        columns[col])
+                potentialNeighbors = rowPotentialNeighbors.intersection(
+                    colPotentialNeighbors)
+                if polyId not in w:
+                    w[polyId] = set()
+                for j in potentialNeighbors:
+                    if polyId < j:
+                        if bbcommon(bbcache[polyId], bbcache[j]):
+                            if j not in edgeCache:
+                                jVerts = shpFileObject.get(j).vertices
+                                jEdges = {}
+                                nv = len(jVerts)
+                                ne = nv - 1
+                                for e in xrange(ne):
+                                    l = jVerts[e]
+                                    r = jVerts[e + 1]
+                                    jEdges[(l, r)] = []
+                                    jEdges[(r, l)] = []
+                                edgeCache[j] = jEdges
+                            # for edge in edgeCache[j]:
+                            if iEdgeSet.intersection(edgeCache[j].keys()):
+                                w[polyId].add(j)
+                                if j not in w:
+                                    w[j] = set()
+                                w[j].add(polyId)
+                                # break
+        else:
+            print "Unsupported weight type."
+
+        self.w = w
+
+if __name__ == "__main__":
+    import time
+    fname = pysal.examples.get_path('NAT.shp')
+    print 'QUEEN binning'
+    t0 = time.time()
+    qb = ContiguityWeights_binning(pysal.open(fname), QUEEN)
+    t1 = time.time()
+    print "using " + str(fname)
+    print "time elapsed for queen... using bins: " + str(t1 - t0)
+
+    t0 = time.time()
+    rb = ContiguityWeights_binning(pysal.open(fname), ROOK)
+    t1 = time.time()
+    print 'Rook binning'
+    print "using " + str(fname)
+    print "time elapsed for rook... using bins: " + str(t1 - t0)
+
+    from _contW_rtree import ContiguityWeights_rtree
+
+    t0 = time.time()
+    rt = ContiguityWeights_rtree(pysal.open(fname), ROOK)
+    t1 = time.time()
+
+    print "time elapsed for rook... using rtree: " + str(t1 - t0)
+    print rt.w == rb.w
+
+    print 'QUEEN'
+    t0 = time.time()
+    qt = ContiguityWeights_rtree(pysal.open(fname), QUEEN)
+    t1 = time.time()
+    print "using " + str(fname)
+    print "time elapsed for queen... using rtree: " + str(t1 - t0)
+    print qb.w == qt.w
+
+    print 'knn4'
+    t0 = time.time()
+    knn = pysal.knnW_from_shapefile(fname, k=4)
+    t1 = time.time()
+    print t1 - t0
+
+    print 'rook from shapefile'
+    t0 = time.time()
+    knn = pysal.rook_from_shapefile(fname)
+    t1 = time.time()
+    print t1 - t0
diff --git a/pysal/weights/_contW_rtree.py b/pysal/weights/_contW_rtree.py
new file mode 100644
index 0000000..de28648
--- /dev/null
+++ b/pysal/weights/_contW_rtree.py
@@ -0,0 +1,116 @@
+import pysal.cg.rtree as rtree
+from pysal.cg.standalone import get_shared_segments
+#Order by Degree of connectivity, i.e. rook is more connected then queen.
+QUEEN = 1
+ROOK = 2
+
+__author__ = "Charles R Schmidt <schmidtc at gmail.com>"
+__all__ = ["QUEEN", "ROOK", "ContiguityWeights_rtree"]
+
+Q_TARGET_MEM_SIZE = 250 * 1024 * 1024  # 250mb
+
+
+class _PolyQ(dict):
+    def __init__(self):
+        dict.__init__(self)
+        self.size = 20  # use the first 20 objects to calculate the average Size.
+        self.ids = []
+
+    def __checkSize(self):
+        """
+        Use the objects in the Q to calculate the average size of the objects
+        Adjust Q.size to hold Q_TARGET_MEM_SIZE/avgSize object
+        This is as many average size object that fit into Q_TARGET_MEM_SIZE
+        """
+        if len(self.ids) > 50:
+            return True
+        return False
+
+    def add(self, poly):
+        if poly.id not in self:
+            if len(self.ids) >= self.size:
+                if self.__checkSize():
+                    del self[self.ids.pop(0)]
+            self[poly.id] = poly
+            self.ids.append(poly.id)
+
+
+class ContiguityWeights_rtree:
+    def __init__(self, geoObj, joinType=ROOK):
+        self.index = rtree.Rtree()
+        self.geoObj = geoObj
+        self.joinType = joinType
+        self.w = {}
+        self.Q = _PolyQ()
+        self.cache_hits = 0
+        self.cache_misses = 0
+        self.create()
+        #print "Misses: ",self.cache_misses
+        #print "Hits: ",self.cache_hits
+
+    def create(self):
+        for id, poly in enumerate(self.geoObj):
+            poly.id = id
+            self.append(poly)
+
+    def append(self, poly):
+        self.Q.add(poly)
+        b = poly.bounding_box
+        bbox = [b.left, b.lower, b.right, b.upper]
+        for id in self.index.intersection(bbox):
+            id = int(id)
+            if self.check(id, poly) >= self.joinType:
+                self.setW(id, poly.id)
+        if poly.id not in self.w:  # add the null cases
+            self.w[poly.id] = set()
+        self.index.add(poly.id, bbox)
+
+    def setW(self, id0, id1):
+        "updates the W matrix seting two polygon's as neighbors"
+        w = self.w
+        if id0 not in w:
+            w[id0] = set()
+        if id1 not in w:
+            w[id1] = set()
+        w[id0].add(id1)
+        w[id1].add(id0)
+
+    def check(self, id0, poly1):
+        "Check's if two polygon's are neighbors"
+        if id0 in self.Q:
+            self.cache_hits += 1
+            poly0 = self.Q[id0]
+        else:
+            self.cache_misses += 1
+            poly0 = self.geoObj.get(id0)
+            poly0.id = id0
+            self.Q.add(poly0)
+        common = set(poly0.vertices).intersection(set(poly1.vertices))
+        if len(common) > 1 and self.joinType == ROOK:
+            #double check rook
+            if get_shared_segments(poly0, poly1, True):
+                return ROOK
+            return False
+            #for vert in common:
+            #    idx = poly0.vertices.index(vert)
+            #    IDX = poly1.vertices.index(vert)
+            #    try:
+            #        if poly0.vertices[idx+1] == poly1.vertices[IDX+1] or poly0.vertices[idx+1] == poly1.vertices[IDX-1]\
+            #        or poly0.vertices[idx-1] == poly1.vertices[IDX+1] or poly0.vertices[idx-1] == poly1.vertices[IDX-1]:
+            #            return ROOK
+            #    except IndexError:
+            #        pass
+            #return False
+        elif len(common) > 0:
+            return QUEEN
+        else:
+            return False
+
+if __name__ == '__main__':
+    import pysal
+    import time
+    t0 = time.time()
+    shp = pysal.open(pysal.examples.get_path('10740.shp'), 'r')
+    w = ContiguityWeights_rtree(shp, QUEEN)
+    t1 = time.time()
+    print "Completed in: ", t1 - t0, "seconds using rtree"
diff --git a/pysal/weights/spatial_lag.py b/pysal/weights/spatial_lag.py
new file mode 100644
index 0000000..d877171
--- /dev/null
+++ b/pysal/weights/spatial_lag.py
@@ -0,0 +1,84 @@
+"""
+spatial lag operations
+"""
+__authors__ = "Serge Rey <srey at asu.edu>, David C. Folch <david.folch at asu.edu>"
+__all__ = ['lag_spatial']
+
+
+def lag_spatial(w, y):
+    """
+    Spatial lag operator. If w is row standardized, returns the average of
+    each observation's neighbors; if not, returns the weighted sum of each
+    observation's neighbors.
+
+    Parameters
+    ----------
+
+    w : W
+        weights object
+    y : array
+        numpy array with dimensionality conforming to w (see examples)
+
+    Returns
+    -------
+
+    wy : array
+         array of numeric values for the spatial lag
+
+    Examples
+    --------
+
+    >>> import pysal
+    >>> import numpy as np
+
+    Setup a 9x9 binary spatial weights matrix and vector of data; compute the
+    spatial lag of the vector.
+
+    >>> w = pysal.lat2W(3, 3)
+    >>> y = np.arange(9)
+    >>> yl = pysal.lag_spatial(w, y)
+    >>> yl
+    array([  4.,   6.,   6.,  10.,  16.,  14.,  10.,  18.,  12.])
+
+    Row standardize the weights matrix and recompute the spatial lag
+
+    >>> w.transform = 'r'
+    >>> yl = pysal.lag_spatial(w, y)
+    >>> yl
+    array([ 2.        ,  2.        ,  3.        ,  3.33333333,  4.        ,
+            4.66666667,  5.        ,  6.        ,  6.        ])
+
+    Explicitly define data vector as 9x1 and recompute the spatial lag
+
+    >>> y.shape = (9, 1)
+    >>> yl = pysal.lag_spatial(w, y)
+    >>> yl
+    array([[ 2.        ],
+           [ 2.        ],
+           [ 3.        ],
+           [ 3.33333333],
+           [ 4.        ],
+           [ 4.66666667],
+           [ 5.        ],
+           [ 6.        ],
+           [ 6.        ]])
+
+    Take the spatial lag of a 9x2 data matrix
+
+    >>> yr = np.arange(8, -1, -1)
+    >>> yr.shape = (9, 1)
+    >>> x = np.hstack((y, yr))
+    >>> yl = pysal.lag_spatial(w, x)
+    >>> yl
+    array([[ 2.        ,  6.        ],
+           [ 2.        ,  6.        ],
+           [ 3.        ,  5.        ],
+           [ 3.33333333,  4.66666667],
+           [ 4.        ,  4.        ],
+           [ 4.66666667,  3.33333333],
+           [ 5.        ,  3.        ],
+           [ 6.        ,  2.        ],
+           [ 6.        ,  2.        ]])
+
+    """
+    return w.sparse * y
diff --git a/pysal/weights/tests/__init__.py b/pysal/weights/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pysal/weights/tests/test_Contiguity.py b/pysal/weights/tests/test_Contiguity.py
new file mode 100644
index 0000000..ca97b91
--- /dev/null
+++ b/pysal/weights/tests/test_Contiguity.py
@@ -0,0 +1,27 @@
+"""Unit test for Contiguity.py"""
+import unittest
+import pysal
+import numpy as np
+
+
+class TestContiguity(unittest.TestCase):
+    def setUp(self):
+        self.polyShp = pysal.examples.get_path('10740.shp')
+
+    def test_buildContiguity(self):
+        w = pysal.buildContiguity(pysal.open(self.polyShp, 'r'))
+        self.assertEqual(w[0], {1: 1.0, 4: 1.0, 101: 1.0, 85: 1.0, 5: 1.0})
+        w = pysal.buildContiguity(
+            pysal.open(self.polyShp, 'r'), criterion='queen')
+        self.assertEqual(w.pct_nonzero, 0.031926364234056544)
+        w = pysal.buildContiguity(
+            pysal.open(self.polyShp, 'r'), criterion='rook')
+        self.assertEqual(w.pct_nonzero, 0.026351084812623275)
+        fips = pysal.open(pysal.examples.get_path('10740.dbf')).by_col('STFID')
+        w = pysal.buildContiguity(pysal.open(self.polyShp, 'r'), ids=fips)
+        self.assertEqual(w['35001000107'], {'35001003805': 1.0, '35001003721':
+                                            1.0, '35001000111': 1.0, '35001000112': 1.0, '35001000108': 1.0})
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/pysal/weights/tests/test_Distance.py b/pysal/weights/tests/test_Distance.py
new file mode 100644
index 0000000..873a664
--- /dev/null
+++ b/pysal/weights/tests/test_Distance.py
@@ -0,0 +1,149 @@
+import os
+import unittest
+import pysal
+import numpy as np
+
+
+class TestDistanceWeights(unittest.TestCase):
+    def setUp(self):
+        np.random.seed(1234)
+        self.polyShp = pysal.examples.get_path('columbus.shp')
+        self.arcShp = pysal.examples.get_path('stl_hom.shp')
+        self.points = [(
+            10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+
+    def test_knnW(self):
+        x = np.indices((5, 5))
+        x, y = np.indices((5, 5))
+        x.shape = (25, 1)
+        y.shape = (25, 1)
+        data = np.hstack([x, y])
+        wnn2 = pysal.knnW(data, k=2)
+        wnn4 = pysal.knnW(data, k=4)
+        wnn4.neighbors[0]
+        self.assertEqual(set(wnn4.neighbors[0]), set([1, 5, 6, 2]))
+        self.assertEqual(set(wnn2.neighbors[5]), set([0, 6]))
+        self.assertEqual(wnn2.pct_nonzero, 0.080000000000000002)
+        wnn3e = pysal.knnW(data, p=2, k=3)
+        self.assertEqual(set(wnn3e.neighbors[0]), set([1, 5, 6]))
+        wc = pysal.knnW_from_shapefile(self.polyShp)
+        self.assertEqual(wc.pct_nonzero, 0.040816326530612242)
+        self.assertEqual(set(wc.neighbors[0]), set([2, 1]))
+        wc3 = pysal.knnW_from_shapefile(self.polyShp, k=3)
+        self.assertEqual(wc3.weights[1], [1, 1, 1])
+        self.assertEqual(set(wc3.neighbors[1]), set([0,3,7]))
+
+    def test_knnW_arc(self):
+        pts = [x.centroid for x in pysal.open(self.arcShp)]
+        dist = pysal.cg.sphere.arcdist  # default radius is Earth KM
+        full = np.matrix([[dist(pts[i], pts[j]) for j in xrange(
+            len(pts))] for i in xrange(len(pts))])
+
+        kd = pysal.cg.kdtree.KDTree(pts, distance_metric='Arc',
+                                    radius=pysal.cg.sphere.RADIUS_EARTH_KM)
+        w = pysal.knnW(kd, 4)
+        self.assertEqual(set(w.neighbors[4]), set([1,3,9,12]))
+        self.assertEqual(set(w.neighbors[40]), set([31,38,45,49]))
+        #self.assertTrue((full.argsort()[:, 1:5] == np.array(
+        #    [w.neighbors[x] for x in range(len(pts))])).all())
+
+    def test_Kernel(self):
+        kw = pysal.Kernel(self.points)
+        self.assertEqual(kw.weights[0], [1.0, 0.50000004999999503,
+                                         0.44098306152674649])
+        kw15 = pysal.Kernel(self.points, bandwidth=15.0)
+        self.assertEqual(kw15[0], {0: 1.0, 1: 0.33333333333333337,
+                                   3: 0.2546440075000701})
+        self.assertEqual(kw15.bandwidth[0], 15.)
+        self.assertEqual(kw15.bandwidth[-1], 15.)
+        bw = [25.0, 15.0, 25.0, 16.0, 14.5, 25.0]
+        kwa = pysal.Kernel(self.points, bandwidth=bw)
+        self.assertEqual(kwa.weights[0], [1.0, 0.59999999999999998,
+                                          0.55278640450004202,
+                                          0.10557280900008403])
+        self.assertEqual(kwa.neighbors[0], [0, 1, 3, 4])
+        self.assertEqual(kwa.bandwidth[0], 25.)
+        self.assertEqual(kwa.bandwidth[1], 15.)
+        self.assertEqual(kwa.bandwidth[2], 25.)
+        self.assertEqual(kwa.bandwidth[3], 16.)
+        self.assertEqual(kwa.bandwidth[4], 14.5)
+        self.assertEqual(kwa.bandwidth[5], 25.)
+        kwea = pysal.Kernel(self.points, fixed=False)
+        self.assertEqual(kwea.weights[0], [1.0, 0.10557289844279438,
+                                           9.9999990066379496e-08])
+        l = kwea.bandwidth.tolist()
+        self.assertEqual(l, [[11.180341005532938], [11.180341005532938],
+                             [20.000002000000002], [11.180341005532938],
+                             [14.142137037944515], [18.027758180095585]])
+        kweag = pysal.Kernel(self.points, fixed=False, function='gaussian')
+        self.assertEqual(kweag.weights[0], [0.3989422804014327,
+                                            0.26741902915776961,
+                                            0.24197074871621341])
+        l = kweag.bandwidth.tolist()
+        self.assertEqual(l, [[11.180341005532938], [11.180341005532938],
+                            [20.000002000000002], [11.180341005532938],
+                            [14.142137037944515], [18.027758180095585]])
+
+        kw = pysal.kernelW_from_shapefile(self.polyShp, idVariable='POLYID')
+        self.assertEqual(set(kw.weights[1]), set([0.0070787731484506233,
+                                         0.2052478782400463,
+                                         0.23051223027663237,
+                                         1.0
+                                         ]))
+        kwa = pysal.adaptive_kernelW_from_shapefile(self.polyShp)
+        self.assertEqual(kwa.weights[0], [1.0, 0.03178906767736345,
+                                          9.9999990066379496e-08])
+
+    def test_threshold(self):
+        md = pysal.min_threshold_dist_from_shapefile(self.polyShp)
+        self.assertEqual(md, 0.61886415807685413)
+        wid = pysal.threshold_continuousW_from_array(self.points, 11.2)
+        self.assertEqual(wid.weights[0], [0.10000000000000001,
+                                          0.089442719099991588])
+        wid2 = pysal.threshold_continuousW_from_array(
+            self.points, 11.2, alpha=-2.0)
+        self.assertEqual(wid2.weights[0], [0.01, 0.0079999999999999984])
+        w = pysal.threshold_continuousW_from_shapefile(
+            self.polyShp, 0.62, idVariable="POLYID")
+        self.assertEqual(w.weights[1], [1.6702346893743334,
+                                        1.7250729841938093])
+
+    def test_DistanceBand(self):
+        """ see issue #126 """
+        w = pysal.rook_from_shapefile(
+            pysal.examples.get_path("lattice10x10.shp"))
+        polygons = pysal.open(
+            pysal.examples.get_path("lattice10x10.shp"), "r").read()
+        points1 = [poly.centroid for poly in polygons]
+        w1 = pysal.DistanceBand(points1, 1)
+        for k in range(w.n):
+            self.assertEqual(w[k], w1[k])
+
+    def test_DistanceBand_ints(self):
+        """ see issue #126 """
+        w = pysal.rook_from_shapefile(
+            pysal.examples.get_path("lattice10x10.shp"))
+        polygons = pysal.open(
+            pysal.examples.get_path("lattice10x10.shp"), "r").read()
+        points2 = [tuple(map(int, poly.vertices[0])) for poly in polygons]
+        w2 = pysal.DistanceBand(points2, 1)
+        for k in range(w.n):
+            self.assertEqual(w[k], w2[k])
+
+    def test_DistanceBand_arc(self):
+        pts = [x.centroid for x in pysal.open(self.arcShp)]
+        dist = pysal.cg.sphere.arcdist  # default radius is Earth KM
+        full = np.matrix([[dist(pts[i], pts[j]) for j in xrange(
+            len(pts))] for i in xrange(len(pts))])
+
+        kd = pysal.cg.kdtree.KDTree(pts, distance_metric='Arc',
+                                    radius=pysal.cg.sphere.RADIUS_EARTH_KM)
+        w = pysal.DistanceBand(kd, full.max(), binary=False, alpha=1.0)
+        self.assertTrue((w.sparse.todense() == full).all())
+
+
+suite = unittest.TestLoader().loadTestsFromTestCase(TestDistanceWeights)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/weights/tests/test_Wsets.py b/pysal/weights/tests/test_Wsets.py
new file mode 100644
index 0000000..6c15030
--- /dev/null
+++ b/pysal/weights/tests/test_Wsets.py
@@ -0,0 +1,64 @@
+"""Unit test for Wsets module."""
+import unittest
+import pysal
+
+
+class TestWsets(unittest.TestCase):
+    """Unit test for Wsets module."""
+
+    def test_w_union(self):
+        """Unit test"""
+        w1 = pysal.lat2W(4, 4)
+        w2 = pysal.lat2W(6, 4)
+        w3 = pysal.weights.Wsets.w_union(w1, w2)
+        self.assertEqual(w1[0], w3[0])
+        self.assertEqual(w1.neighbors[15], [11, 14])
+        self.assertEqual(w2.neighbors[15], [11, 14, 19])
+        self.assertEqual(w3.neighbors[15], [19, 11, 14])
+
+    def test_w_intersection(self):
+        """Unit test"""
+        w1 = pysal.lat2W(4, 4)
+        w2 = pysal.lat2W(6, 4)
+        w3 = pysal.weights.Wsets.w_union(w1, w2)
+        self.assertEqual(w1[0], w3[0])
+        self.assertEqual(w1.neighbors[15], [11, 14])
+        self.assertEqual(w2.neighbors[15], [11, 14, 19])
+        self.assertEqual(w3.neighbors[15], [19, 11, 14])
+
+    def test_w_difference(self):
+        """Unit test"""
+        w1 = pysal.lat2W(4, 4, rook=False)
+        w2 = pysal.lat2W(4, 4, rook=True)
+        w3 = pysal.weights.Wsets.w_difference(w1, w2, constrained=False)
+        self.assertNotEqual(w1[0], w3[0])
+        self.assertEqual(w1.neighbors[15], [10, 11, 14])
+        self.assertEqual(w2.neighbors[15], [11, 14])
+        self.assertEqual(w3.neighbors[15], [10])
+
+    def test_w_symmetric_difference(self):
+        """Unit test"""
+        w1 = pysal.lat2W(4, 4, rook=False)
+        w2 = pysal.lat2W(6, 4, rook=True)
+        w3 = pysal.weights.Wsets.w_symmetric_difference(
+            w1, w2, constrained=False)
+        self.assertNotEqual(w1[0], w3[0])
+        self.assertEqual(w1.neighbors[15], [10, 11, 14])
+        self.assertEqual(w2.neighbors[15], [11, 14, 19])
+        self.assertEqual(w3.neighbors[15], [10, 19])
+
+    def test_w_subset(self):
+        """Unit test"""
+        w1 = pysal.lat2W(6, 4)
+        ids = range(16)
+        w2 = pysal.weights.Wsets.w_subset(w1, ids)
+        self.assertEqual(w1[0], w2[0])
+        self.assertEqual(w1.neighbors[15], [11, 14, 19])
+        self.assertEqual(w2.neighbors[15], [11, 14])
+
+
+suite = unittest.TestLoader().loadTestsFromTestCase(TestWsets)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/weights/tests/test__contW_binning.py b/pysal/weights/tests/test__contW_binning.py
new file mode 100644
index 0000000..e798bfc
--- /dev/null
+++ b/pysal/weights/tests/test__contW_binning.py
@@ -0,0 +1,132 @@
+import os
+import unittest
+import pysal
+from pysal.weights._contW_binning import ContiguityWeights_binning, QUEEN, ROOK
+
+
+class TestContiguityWeights(unittest.TestCase):
+    def setUp(self):
+        """ Setup the binning contiguity weights"""
+        shpObj = pysal.open(pysal.examples.get_path('virginia.shp'), 'r')
+        self.binningW = ContiguityWeights_binning(shpObj, QUEEN)
+        shpObj.close()
+
+    def test_w_type(self):
+        self.assert_(isinstance(self.binningW, ContiguityWeights_binning))
+
+    def test_QUEEN(self):
+        self.assertEqual(QUEEN, 1)
+
+    def test_ROOK(self):
+        self.assertEqual(ROOK, 2)
+
+    def test_ContiguityWeights_binning(self):
+        self.assert_(hasattr(self.binningW, 'w'))
+        self.assert_(issubclass(dict, type(self.binningW.w)))
+        self.assertEqual(len(self.binningW.w), 136)
+
+    def test_nested_polygons(self):
+        # load queen gal file created using Open Geoda.
+        geodaW = pysal.open(
+            pysal.examples.get_path('virginia.gal'), 'r').read()
+        # build matching W with pysal
+        pysalWb = self.build_W(
+            pysal.examples.get_path('virginia.shp'), QUEEN, 'POLY_ID')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalb_neighbors = pysalWb.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalb_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalb_neighbors)
+
+    def test_true_rook(self):
+        # load queen gal file created using Open Geoda.
+        geodaW = pysal.open(pysal.examples.get_path('rook31.gal'), 'r').read()
+        # build matching W with pysal
+        #pysalW = pysal.rook_from_shapefile(pysal.examples.get_path('rook31.shp'),','POLY_ID')
+        pysalWb = self.build_W(
+            pysal.examples.get_path('rook31.shp'), ROOK, 'POLY_ID')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalb_neighbors = pysalWb.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalb_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalb_neighbors)
+
+    def test_true_rook2(self):
+        # load queen gal file created using Open Geoda.
+        geodaW = pysal.open(
+            pysal.examples.get_path('stl_hom_rook.gal'), 'r').read()
+        # build matching W with pysal
+        pysalWb = self.build_W(pysal.examples.get_path(
+            'stl_hom.shp'), ROOK, 'POLY_ID_OG')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalb_neighbors = pysalWb.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalb_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalb_neighbors)
+
+    def test_true_rook3(self):
+        # load queen gal file created using Open Geoda.
+        geodaW = pysal.open(
+            pysal.examples.get_path('sacramentot2.gal'), 'r').read()
+        # build matching W with pysal
+        pysalWb = self.build_W(pysal.examples.get_path(
+            'sacramentot2.shp'), ROOK, 'POLYID')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalb_neighbors = pysalWb.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalb_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalb_neighbors)
+
+    def test_true_rook4(self):
+        # load queen gal file created using Open Geoda.
+        geodaW = pysal.open(
+            pysal.examples.get_path('virginia_rook.gal'), 'r').read()
+        # build matching W with pysal
+        pysalWb = self.build_W(
+            pysal.examples.get_path('virginia.shp'), ROOK, 'POLY_ID')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalb_neighbors = pysalWb.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalb_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalb_neighbors)
+
+    def build_W(self, shapefile, type, idVariable=None):
+        """ Building 2 W's the hard way.  We need to do this so we can test both rtree and binning """
+        dbname = os.path.splitext(shapefile)[0] + '.dbf'
+        db = pysal.open(dbname)
+        shpObj = pysal.open(shapefile)
+        neighbor_data = ContiguityWeights_binning(shpObj, type).w
+        neighbors = {}
+        weights = {}
+        if idVariable:
+            ids = db.by_col[idVariable]
+            self.assertEqual(len(ids), len(set(ids)))
+            for key in neighbor_data:
+                id = ids[key]
+                if id not in neighbors:
+                    neighbors[id] = set()
+                neighbors[id].update([ids[x] for x in neighbor_data[key]])
+            for key in neighbors:
+                neighbors[key] = list(neighbors[key])
+            binningW = pysal.W(neighbors, id_order=ids)
+        else:
+            neighbors[key] = list(neighbors[key])
+            binningW = pysal.W(neighbors)
+        return binningW
+
+#suite = unittest.TestLoader().loadTestsFromTestCase(_TestContiguityWeights)
+
+if __name__ == '__main__':
+    #runner = unittest.TextTestRunner()
+    #runner.run(suite)
+    unittest.main()
diff --git a/pysal/weights/tests/test__contW_rtree.py b/pysal/weights/tests/test__contW_rtree.py
new file mode 100644
index 0000000..d6195a0
--- /dev/null
+++ b/pysal/weights/tests/test__contW_rtree.py
@@ -0,0 +1,138 @@
+import os
+import unittest
+import pysal
+OK_TO_RUN = True
+try:
+    #import rtree
+    from pysal.weights._contW_rtree import ContiguityWeights_rtree, QUEEN, ROOK
+except ImportError:
+    OK_TO_RUN = False
+    print "Cannot test rtree contiguity weights, rtree not installed"
+
+
+class TestRtreeContiguityWeights(unittest.TestCase):
+    def setUp(self):
+        """ Setup the rtree contiguity weights"""
+        shpObj = pysal.open(pysal.examples.get_path('virginia.shp'), 'r')
+        self.rtreeW = ContiguityWeights_rtree(shpObj, QUEEN)
+        shpObj.close()
+
+    def test_w_type(self):
+        self.assert_(isinstance(self.rtreeW, ContiguityWeights_rtree))
+
+    def test_QUEEN(self):
+        self.assertEqual(QUEEN, 1)
+
+    def test_ROOK(self):
+        self.assertEqual(ROOK, 2)
+
+    def test_ContiguityWeights_rtree(self):
+        self.assert_(hasattr(self.rtreeW, 'w'))
+        self.assert_(issubclass(dict, type(self.rtreeW.w)))
+        self.assertEqual(len(self.rtreeW.w), 136)
+
+    def test_nested_polygons(self):
+        # load queen gal file created using Open Geoda.
+        geodaW = pysal.open(
+            pysal.examples.get_path('virginia.gal'), 'r').read()
+        # build matching W with pysal
+        pysalWr = self.build_W(
+            pysal.examples.get_path('virginia.shp'), QUEEN, 'POLY_ID')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalr_neighbors = pysalWr.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalr_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalr_neighbors)
+
+    def test_true_rook(self):
+        # load rook gal file created using Open Geoda.
+        geodaW = pysal.open(pysal.examples.get_path('rook31.gal'), 'r').read()
+        # build matching W with pysal
+        #pysalW = pysal.rook_from_shapefile(pysal.examples.get_path('rook31.shp'),'POLY_ID')
+        pysalWr = self.build_W(
+            pysal.examples.get_path('rook31.shp'), ROOK, 'POLY_ID')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalr_neighbors = pysalWr.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalr_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalr_neighbors)
+
+    def test_true_rook2(self):
+        # load rook gal file created using Open Geoda.
+        geodaW = pysal.open(
+            pysal.examples.get_path('stl_hom_rook.gal'), 'r').read()
+        # build matching W with pysal
+        pysalWr = self.build_W(pysal.examples.get_path(
+            'stl_hom.shp'), ROOK, 'POLY_ID_OG')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalr_neighbors = pysalWr.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalr_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalr_neighbors)
+
+    def test_true_rook3(self):
+        # load rook gal file created using Open Geoda.
+        geodaW = pysal.open(
+            pysal.examples.get_path('sacramentot2.gal'), 'r').read()
+        # build matching W with pysal
+        pysalWr = self.build_W(pysal.examples.get_path(
+            'sacramentot2.shp'), ROOK, 'POLYID')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalr_neighbors = pysalWr.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalr_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalr_neighbors)
+
+    def test_true_rook4(self):
+        # load rook gal file created using Open Geoda.
+        geodaW = pysal.open(
+            pysal.examples.get_path('virginia_rook.gal'), 'r').read()
+        # build matching W with pysal
+        pysalWr = self.build_W(
+            pysal.examples.get_path('virginia.shp'), ROOK, 'POLY_ID')
+        # compare output.
+        for key in geodaW.neighbors:
+            geoda_neighbors = map(int, geodaW.neighbors[key])
+            pysalr_neighbors = pysalWr.neighbors[int(key)]
+            geoda_neighbors.sort()
+            pysalr_neighbors.sort()
+            self.assertEqual(geoda_neighbors, pysalr_neighbors)
+
+    def build_W(self, shapefile, type, idVariable=None):
+        """ Building 2 W's the hard way.  We need to do this so we can test both rtree and binning """
+        dbname = os.path.splitext(shapefile)[0] + '.dbf'
+        db = pysal.open(dbname)
+        shpObj = pysal.open(shapefile)
+        neighbor_data = ContiguityWeights_rtree(shpObj, type).w
+        neighbors = {}
+        weights = {}
+        if idVariable:
+            ids = db.by_col[idVariable]
+            self.assertEqual(len(ids), len(set(ids)))
+            for key in neighbor_data:
+                id = ids[key]
+                if id not in neighbors:
+                    neighbors[id] = set()
+                neighbors[id].update([ids[x] for x in neighbor_data[key]])
+            for key in neighbors:
+                neighbors[key] = list(neighbors[key])
+            rtreeW = pysal.W(neighbors, id_order=ids)
+        else:
+            neighbors[key] = list(neighbors[key])
+            rtreeW = pysal.W(neighbors)
+        shpObj.seek(0)
+        return rtreeW
+
+suite = unittest.TestLoader().loadTestsFromTestCase(TestRtreeContiguityWeights)
+
+if __name__ == '__main__' and OK_TO_RUN:
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/weights/tests/test_spatial_lag.py b/pysal/weights/tests/test_spatial_lag.py
new file mode 100644
index 0000000..8883857
--- /dev/null
+++ b/pysal/weights/tests/test_spatial_lag.py
@@ -0,0 +1,41 @@
+
+import os
+import unittest
+import pysal
+import numpy as np
+
+
+class Testlag_spatial(unittest.TestCase):
+    def setUp(self):
+        self.neighbors = {'c': ['b'], 'b': ['c', 'a'], 'a': ['b']}
+        self.weights = {'c': [1.0], 'b': [1.0, 1.0], 'a': [1.0]}
+        self.id_order = ['a', 'b', 'c']
+        self.weights = {'c': [1.0], 'b': [1.0, 1.0], 'a': [1.0]}
+        self.w = pysal.W(self.neighbors, self.weights, self.id_order)
+        self.y = np.array([0, 1, 2])
+
+    def test_lag_spatial(self):
+        yl = pysal.lag_spatial(self.w, self.y)
+        np.testing.assert_array_almost_equal(yl, [1., 2., 1.])
+        self.w.id_order = ['b', 'c', 'a']
+        y = np.array([1, 2, 0])
+        yl = pysal.lag_spatial(self.w, y)
+        np.testing.assert_array_almost_equal(yl, [2., 1., 1.])
+        w = pysal.lat2W(3, 3)
+        y = np.arange(9)
+        yl = pysal.lag_spatial(w, y)
+        ylc = np.array([4., 6., 6., 10., 16., 14., 10., 18., 12.])
+        np.testing.assert_array_almost_equal(yl, ylc)
+        w.transform = 'r'
+        yl = pysal.lag_spatial(w, y)
+        ylc = np.array(
+            [2., 2., 3., 3.33333333, 4.,
+             4.66666667, 5., 6., 6.])
+        np.testing.assert_array_almost_equal(yl, ylc)
+
+
+suite = unittest.TestLoader().loadTestsFromTestCase(Testlag_spatial)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/weights/tests/test_user.py b/pysal/weights/tests/test_user.py
new file mode 100644
index 0000000..bac1480
--- /dev/null
+++ b/pysal/weights/tests/test_user.py
@@ -0,0 +1,161 @@
+import os
+import unittest
+import pysal
+import numpy as np
+
+
+class Testuser(unittest.TestCase):
+    def setUp(self):
+        self.wq = pysal.queen_from_shapefile(
+            pysal.examples.get_path("columbus.shp"))
+        self.wr = pysal.rook_from_shapefile(
+            pysal.examples.get_path("columbus.shp"))
+
+    def test_queen_from_shapefile(self):
+        self.assertAlmostEquals(self.wq.pct_nonzero, 0.098292378175760101)
+
+    def test_rook_from_shapefile(self):
+        self.assertAlmostEquals(self.wr.pct_nonzero, 0.083298625572678045)
+
+    def test_knnW_from_array(self):
+        import numpy as np
+        x, y = np.indices((5, 5))
+        x.shape = (25, 1)
+        y.shape = (25, 1)
+        data = np.hstack([x, y])
+        wnn2 = pysal.knnW_from_array(data, k=2)
+        wnn4 = pysal.knnW_from_array(data, k=4)
+        self.assertEquals(set(wnn4.neighbors[0]), set([1, 5, 6, 2]))
+        self.assertEquals(set(wnn4.neighbors[5]), set([0, 6, 10, 1]))
+        self.assertEquals(set(wnn2.neighbors[0]), set([1, 5]))
+        self.assertEquals(set(wnn2.neighbors[5]), set([0, 6]))
+        self.assertAlmostEquals(wnn2.pct_nonzero, 0.080000000000000002)
+        self.assertAlmostEquals(wnn4.pct_nonzero, 0.16)
+        wnn4 = pysal.knnW_from_array(data, k=4)
+        self.assertEquals(set(wnn4.neighbors[0]), set([1, 5, 6, 2]))
+        wnn3e = pysal.knnW(data, p=2, k=3)
+        self.assertEquals(set(wnn3e.neighbors[0]),set([1, 5, 6]))
+        wnn3m = pysal.knnW(data, p=1, k=3)
+        self.assertEquals(set(wnn3m.neighbors[0]), set([1, 5, 2]))
+
+    def test_knnW_from_shapefile(self):
+        wc = pysal.knnW_from_shapefile(pysal.examples.get_path("columbus.shp"))
+        self.assertAlmostEquals(wc.pct_nonzero, 0.040816326530612242)
+        wc3 = pysal.knnW_from_shapefile(pysal.examples.get_path(
+            "columbus.shp"), k=3)
+        self.assertEquals(wc3.weights[1], [1, 1, 1])
+        self.assertEquals(set(wc3.neighbors[1]), set([3, 0, 7]))
+        self.assertEquals(set(wc.neighbors[0]), set([2, 1]))
+        w = pysal.knnW_from_shapefile(pysal.examples.get_path('juvenile.shp'))
+        self.assertAlmostEquals(w.pct_nonzero, 0.011904761904761904)
+        w1 = pysal.knnW_from_shapefile(
+            pysal.examples.get_path('juvenile.shp'), k=1)
+        self.assertAlmostEquals(w1.pct_nonzero, 0.0059523809523809521)
+
+    def test_threshold_binaryW_from_array(self):
+        points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+        w = pysal.threshold_binaryW_from_array(points, threshold=11.2)
+        self.assertEquals(w.weights, {0: [1, 1], 1: [1, 1], 2: [],
+                                      3: [1, 1], 4: [1], 5: [1]})
+        self.assertEquals(w.neighbors, {0: [1, 3], 1: [0, 3], 2: [
+        ], 3: [1, 0], 4: [5], 5: [4]})
+
+    def test_threshold_binaryW_from_shapefile(self):
+
+        w = pysal.threshold_binaryW_from_shapefile(pysal.examples.get_path(
+            "columbus.shp"), 0.62, idVariable="POLYID")
+        self.assertEquals(w.weights[1], [1, 1])
+
+    def test_threshold_continuousW_from_array(self):
+        points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+        wid = pysal.threshold_continuousW_from_array(points, 11.2)
+        self.assertEquals(wid.weights[0], [0.10000000000000001,
+                                           0.089442719099991588])
+        wid2 = pysal.threshold_continuousW_from_array(points, 11.2, alpha=-2.0)
+        self.assertEquals(wid2.weights[0], [0.01, 0.0079999999999999984])
+
+    def test_threshold_continuousW_from_shapefile(self):
+        w = pysal.threshold_continuousW_from_shapefile(pysal.examples.get_path(
+            "columbus.shp"), 0.62, idVariable="POLYID")
+        self.assertEquals(
+            w.weights[1], [1.6702346893743334, 1.7250729841938093])
+
+    def test_kernelW(self):
+        points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+        kw = pysal.kernelW(points)
+        self.assertEquals(kw.weights[0], [1.0, 0.50000004999999503,
+                                          0.44098306152674649])
+        self.assertEquals(kw.neighbors[0], [0, 1, 3])
+        np.testing.assert_array_almost_equal(
+            kw.bandwidth, np.array([[20.000002],
+                                    [20.000002],
+                                    [20.000002],
+                                    [20.000002],
+                                    [20.000002],
+                                    [20.000002]]))
+
+    def test_min_threshold_dist_from_shapefile(self):
+        f = pysal.examples.get_path('columbus.shp')
+        min_d = pysal.min_threshold_dist_from_shapefile(f)
+        self.assertAlmostEquals(min_d, 0.61886415807685413)
+
+    def test_kernelW_from_shapefile(self):
+        kw = pysal.kernelW_from_shapefile(pysal.examples.get_path(
+            'columbus.shp'), idVariable='POLYID')
+        self.assertEquals(set(kw.weights[1]), set([0.0070787731484506233,
+                                         0.2052478782400463,
+                                         0.23051223027663237,
+                                         1.0
+                                         ]))
+        np.testing.assert_array_almost_equal(
+            kw.bandwidth[:3], np.array([[0.75333961], [0.75333961],
+                                        [0.75333961]]))
+
+    def test_adaptive_kernelW(self):
+        points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+        bw = [25.0, 15.0, 25.0, 16.0, 14.5, 25.0]
+        kwa = pysal.adaptive_kernelW(points, bandwidths=bw)
+        self.assertEqual(kwa.weights[0], [1.0, 0.59999999999999998,
+                                          0.55278640450004202,
+                                          0.10557280900008403])
+        self.assertEqual(kwa.neighbors[0], [0, 1, 3, 4])
+        np.testing.assert_array_almost_equal(kwa.bandwidth,
+                                             np.array([[25.], [15.], [25.],
+                                                      [16.], [14.5], [25.]]))
+
+        kweag = pysal.adaptive_kernelW(points, function='gaussian')
+        self.assertEqual(
+            kweag.weights[0], [0.3989422804014327, 0.26741902915776961,
+                               0.24197074871621341])
+        np.testing.assert_array_almost_equal(kweag.bandwidth,
+                                             np.array([[11.18034101],
+                                                       [11.18034101],
+                                                       [20.000002],
+                                                       [11.18034101],
+                                                       [14.14213704],
+                                                       [18.02775818]]))
+
+    def test_adaptive_kernelW_from_shapefile(self):
+        kwa = pysal.adaptive_kernelW_from_shapefile(
+            pysal.examples.get_path('columbus.shp'))
+        self.assertEquals(kwa.weights[0], [1.0, 0.03178906767736345,
+                                           9.9999990066379496e-08])
+        np.testing.assert_array_almost_equal(kwa.bandwidth[:3],
+                                             np.array([[0.59871832],
+                                                       [0.59871832],
+                                                       [0.56095647]]))
+
+    def test_build_lattice_shapefile(self):
+        of = "lattice.shp"
+        pysal.build_lattice_shapefile(20, 20, of)
+        w = pysal.rook_from_shapefile(of)
+        self.assertEquals(w.n, 400)
+        os.remove('lattice.shp')
+        os.remove('lattice.shx')
+
+
+suite = unittest.TestLoader().loadTestsFromTestCase(Testuser)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/weights/tests/test_util.py b/pysal/weights/tests/test_util.py
new file mode 100644
index 0000000..5ab0573
--- /dev/null
+++ b/pysal/weights/tests/test_util.py
@@ -0,0 +1,186 @@
+"""Unit test for util.py"""
+import pysal
+from pysal.common import *
+import pysal.weights
+import numpy as np
+from scipy import sparse, float32
+from scipy.spatial import KDTree
+import os
+import gc
+
+
+class Testutil(unittest.TestCase):
+    def setUp(self):
+        self.w = pysal.rook_from_shapefile(
+            pysal.examples.get_path('10740.shp'))
+
+    def test_lat2W(self):
+        w9 = pysal.lat2W(3, 3)
+        self.assertEquals(w9.pct_nonzero, 0.29629629629629628)
+        self.assertEquals(w9[0], {1: 1.0, 3: 1.0})
+        self.assertEquals(w9[3], {0: 1.0, 4: 1.0, 6: 1.0})
+
+    def test_lat2SW(self):
+        w9 = pysal.weights.lat2SW(3, 3)
+        rows, cols = w9.shape
+        n = rows * cols
+        pct_nonzero = w9.nnz / float(n)
+        self.assertEquals(pct_nonzero, 0.29629629629629628)
+        data = w9.todense().tolist()
+        self.assertEquals(data[0], [0, 1, 0, 1, 0, 0, 0, 0, 0])
+        self.assertEquals(data[1], [1, 0, 1, 0, 1, 0, 0, 0, 0])
+        self.assertEquals(data[2], [0, 1, 0, 0, 0, 1, 0, 0, 0])
+        self.assertEquals(data[3], [1, 0, 0, 0, 1, 0, 1, 0, 0])
+        self.assertEquals(data[4], [0, 1, 0, 1, 0, 1, 0, 1, 0])
+        self.assertEquals(data[5], [0, 0, 1, 0, 1, 0, 0, 0, 1])
+        self.assertEquals(data[6], [0, 0, 0, 1, 0, 0, 0, 1, 0])
+        self.assertEquals(data[7], [0, 0, 0, 0, 1, 0, 1, 0, 1])
+        self.assertEquals(data[8], [0, 0, 0, 0, 0, 1, 0, 1, 0])
+
+    def test_block_weights(self):
+        regimes = np.ones(25)
+        regimes[range(10, 20)] = 2
+        regimes[range(21, 25)] = 3
+        regimes = np.array([1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
+                            2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 1., 3., 3.,
+                            3., 3.])
+        w = pysal.block_weights(regimes)
+        ww0 = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
+        self.assertEquals(w.weights[0], ww0)
+        wn0 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 20]
+        self.assertEquals(w.neighbors[0], wn0)
+        regimes = ['n', 'n', 's', 's', 'e', 'e', 'w', 'w', 'e']
+        n = len(regimes)
+        w = pysal.block_weights(regimes)
+        wn = {0: [1], 1: [0], 2: [3], 3: [2], 4: [5, 8], 5: [4, 8],
+              6: [7], 7: [6], 8: [4, 5]}
+        self.assertEquals(w.neighbors, wn)
+
+    def test_comb(self):
+        x = range(4)
+        l = []
+        for i in pysal.comb(x, 2):
+            l.append(i)
+        lo = [[0, 1], [0, 2], [0, 3], [1, 2], [1, 3], [2, 3]]
+        self.assertEquals(l, lo)
+
+    def test_order(self):
+        w3 = pysal.order(self.w, kmax=3)
+        w3105 = [1, -1, 1, 2, 1]
+        self.assertEquals(w3105, w3[1][0:5])
+
+    def test_higher_order(self):
+        w10 = pysal.lat2W(10, 10)
+        w10_2 = pysal.higher_order(w10, 2)
+        w10_20 = {2: 1.0, 11: 1.0, 20: 1.0}
+        self.assertEquals(w10_20, w10_2[0])
+        w5 = pysal.lat2W()
+        w50 = {1: 1.0, 5: 1.0}
+        self.assertEquals(w50, w5[0])
+        w51 = {0: 1.0, 2: 1.0, 6: 1.0}
+        self.assertEquals(w51, w5[1])
+        w5_2 = pysal.higher_order(w5, 2)
+        w5_20 = {2: 1.0, 10: 1.0, 6: 1.0}
+        self.assertEquals(w5_20, w5_2[0])
+
+    def test_shimbel(self):
+        w5 = pysal.lat2W()
+        w5_shimbel = pysal.shimbel(w5)
+        w5_shimbel024 = 8
+        self.assertEquals(w5_shimbel024, w5_shimbel[0][24])
+        w5_shimbel004 = [-1, 1, 2, 3]
+        self.assertEquals(w5_shimbel004, w5_shimbel[0][0:4])
+
+    def test_full(self):
+        neighbors = {'first': ['second'], 'second': ['first',
+                                                     'third'], 'third': ['second']}
+        weights = {'first': [1], 'second': [1, 1], 'third': [1]}
+        w = pysal.W(neighbors, weights)
+        wf, ids = pysal.full(w)
+        wfo = np.array([[0., 1., 0.], [1., 0., 1.], [0., 1., 0.]])
+        np.testing.assert_array_almost_equal(wfo, wf, decimal=8)
+        idso = ['first', 'second', 'third']
+        self.assertEquals(idso, ids)
+
+    def test_full2W(self):
+        a = np.zeros((4, 4))
+        for i in range(len(a)):
+            for j in range(len(a[i])):
+                if i != j:
+                    a[i, j] = np.random.random(1)
+        w = pysal.weights.util.full2W(a)
+        np.testing.assert_array_equal(w.full()[0], a)
+        ids = ['myID0', 'myID1', 'myID2', 'myID3']
+        w = pysal.weights.util.full2W(a, ids=ids)
+        np.testing.assert_array_equal(w.full()[0], a)
+        w.full()[0] == a
+
+    def test_WSP2W(self):
+        sp = pysal.weights.lat2SW(2, 5)
+        wsp = pysal.weights.WSP(sp)
+        w = pysal.weights.WSP2W(wsp)
+        self.assertEquals(w.n, 10)
+        self.assertEquals(w[0], {1: 1, 5: 1})
+        w = pysal.open(pysal.examples.get_path('sids2.gal'), 'r').read()
+        wsp = pysal.weights.WSP(w.sparse, w.id_order)
+        w = pysal.weights.WSP2W(wsp)
+        self.assertEquals(w.n, 100)
+        self.assertEquals(w['37135'], {'37001': 1.0, '37033': 1.0,
+                                       '37037': 1.0, '37063': 1.0, '37145': 1.0})
+
+    def test_insert_diagonal(self):
+        w1 = pysal.weights.insert_diagonal(self.w)
+        r1 = {0: 1.0, 1: 1.0, 4: 1.0, 101: 1.0, 85: 1.0, 5: 1.0}
+        self.assertEquals(w1[0], r1)
+        w1 = pysal.weights.insert_diagonal(self.w, 20)
+        r1 = {0: 20, 1: 1.0, 4: 1.0, 101: 1.0, 85: 1.0, 5: 1.0}
+        self.assertEquals(w1[0], r1)
+        diag = np.arange(100, 100 + self.w.n)
+        w1 = pysal.weights.insert_diagonal(self.w, diag)
+        r1 = {0: 100, 1: 1.0, 4: 1.0, 101: 1.0, 85: 1.0, 5: 1.0}
+        self.assertEquals(w1[0], r1)
+
+    def test_remap_ids(self):
+        w = pysal.lat2W(3, 2)
+        wid_order = [0, 1, 2, 3, 4, 5]
+        self.assertEquals(wid_order, w.id_order)
+        wneighbors0 = [2, 1]
+        self.assertEquals(wneighbors0, w.neighbors[0])
+        old_to_new = {0: 'a', 1: 'b', 2: 'c', 3: 'd', 4: 'e', 5: 'f'}
+        w_new = pysal.remap_ids(w, old_to_new)
+        w_newid_order = ['a', 'b', 'c', 'd', 'e', 'f']
+        self.assertEquals(w_newid_order, w_new.id_order)
+        w_newdneighborsa = ['c', 'b']
+        self.assertEquals(w_newdneighborsa, w_new.neighbors['a'])
+
+    def test_get_ids(self):
+        polyids = pysal.weights.util.get_ids(
+            pysal.examples.get_path('columbus.shp'), "POLYID")
+        polyids5 = [1, 2, 3, 4, 5]
+        self.assertEquals(polyids5, polyids[:5])
+
+    def test_get_points_array_from_shapefile(self):
+        xy = pysal.weights.util.get_points_array_from_shapefile(
+            pysal.examples.get_path('juvenile.shp'))
+        xy3 = np.array([[94., 93.], [80., 95.], [79., 90.]])
+        np.testing.assert_array_almost_equal(xy3, xy[:3], decimal=8)
+        xy = pysal.weights.util.get_points_array_from_shapefile(
+            pysal.examples.get_path('columbus.shp'))
+        xy3 = np.array([[8.82721847, 14.36907602], [8.33265837,
+                                                    14.03162401], [9.01226541, 13.81971908]])
+        np.testing.assert_array_almost_equal(xy3, xy[:3], decimal=8)
+
+    def test_min_threshold_distance(self):
+        x, y = np.indices((5, 5))
+        x.shape = (25, 1)
+        y.shape = (25, 1)
+        data = np.hstack([x, y])
+        mint = 1.0
+        self.assertEquals(
+            mint, pysal.weights.util.min_threshold_distance(data))
+
+suite = unittest.TestLoader().loadTestsFromTestCase(Testutil)
+
+if __name__ == '__main__':
+    runner = unittest.TextTestRunner()
+    runner.run(suite)
diff --git a/pysal/weights/tests/test_weights.py b/pysal/weights/tests/test_weights.py
new file mode 100644
index 0000000..66d6a7b
--- /dev/null
+++ b/pysal/weights/tests/test_weights.py
@@ -0,0 +1,451 @@
+import unittest
+import pysal
+import numpy as np
+
+NPTA3E = np.testing.assert_array_almost_equal
+
+
+class TestW(unittest.TestCase):
+    def setUp(self):
+        from pysal import rook_from_shapefile
+        self.w = rook_from_shapefile(pysal.examples.get_path('10740.shp'))
+
+        self.neighbors = {0: [3, 1], 1: [0, 4, 2], 2: [1, 5], 3: [0, 6, 4], 4: [1, 3,
+                                                                                7, 5], 5: [2, 4, 8], 6: [3, 7], 7: [4, 6, 8], 8: [5, 7]}
+        self.weights = {0: [1, 1], 1: [1, 1, 1], 2: [1, 1], 3: [1, 1, 1], 4: [1, 1,
+                                                                              1, 1], 5: [1, 1, 1], 6: [1, 1], 7: [1, 1, 1], 8: [1, 1]}
+
+        self.w3x3 = pysal.lat2W(3, 3)
+
+    def test_W(self):
+        w = pysal.W(self.neighbors, self.weights)
+        self.assertEqual(w.pct_nonzero, 0.29629629629629628)
+
+    def test___getitem__(self):
+        self.assertEqual(
+            self.w[0], {1: 1.0, 4: 1.0, 101: 1.0, 85: 1.0, 5: 1.0})
+
+    def test___init__(self):
+        w = pysal.W(self.neighbors, self.weights)
+        self.assertEqual(w.pct_nonzero, 0.29629629629629628)
+
+    def test___iter__(self):
+        w = pysal.lat2W(3, 3)
+        res = {}
+        for i, wi in enumerate(w):
+            res[i] = wi
+        self.assertEqual(res[0], (0, {1: 1.0, 3: 1.0}))
+        self.assertEqual(res[8], (8, {5: 1.0, 7: 1.0}))
+
+    def test_asymmetries(self):
+        w = pysal.lat2W(3, 3)
+        w.transform = 'r'
+        result = w.asymmetry()
+        self.assertEqual(result, [(0, 1), (0, 3), (1, 0), (1, 2), (1, 4), (2, 1), (2, 5), (3, 0), (3, 4), (3, 6), (4, 1), (4, 3), (4, 5), (4, 7), (5, 2), (5, 4), (5, 8), (6, 3), (6, 7), (7, 4), (7, 6), (7, 8), (8, 5), (8, 7)])
+
+    def test_asymmetry(self):
+        w = pysal.lat2W(3, 3)
+        self.assertEqual(w.asymmetry(), [])
+        w.transform = 'r'
+        self.assertFalse(w.asymmetry() == [])
+
+    def test_cardinalities(self):
+        w = pysal.lat2W(3, 3)
+        self.assertEqual(w.cardinalities, {0: 2, 1: 3, 2: 2, 3: 3, 4: 4, 5: 3,
+                                           6: 2, 7: 3, 8: 2})
+
+    def test_diagW2(self):
+        NPTA3E(self.w3x3.diagW2, np.array([2., 3., 2., 3., 4., 3., 2.,
+                                           3., 2.]))
+
+    def test_diagWtW(self):
+        NPTA3E(self.w3x3.diagW2, np.array([2., 3., 2., 3., 4., 3., 2.,
+                                           3., 2.]))
+
+    def test_diagWtW_WW(self):
+        NPTA3E(self.w3x3.diagWtW_WW, np.array([4., 6., 4., 6., 8.,
+                                               6., 4., 6., 4.]))
+
+    def test_full(self):
+        wf = np.array([[0., 1., 0., 1., 0., 0., 0., 0., 0.],
+                       [1., 0., 1., 0., 1., 0., 0., 0., 0.],
+                       [0., 1., 0., 0., 0., 1., 0., 0., 0.],
+                       [1., 0., 0., 0., 1., 0., 1., 0., 0.],
+                       [0., 1., 0., 1., 0., 1., 0., 1., 0.],
+                       [0., 0., 1., 0., 1., 0., 0., 0., 1.],
+                       [0., 0., 0., 1., 0., 0., 0., 1., 0.],
+                       [0., 0., 0., 0., 1., 0., 1., 0., 1.],
+                       [0., 0., 0., 0., 0., 1., 0., 1., 0.]])
+        ids = range(9)
+
+        wf1, ids1 = self.w3x3.full()
+        NPTA3E(wf1, wf)
+        self.assertEqual(ids1, ids)
+
+    def test_get_transform(self):
+        self.assertEqual(self.w3x3.transform, 'O')
+        self.w3x3.transform = 'r'
+        self.assertEqual(self.w3x3.transform, 'R')
+        self.w3x3.transform = 'b'
+
+    def test_higher_order(self):
+        weights = {0: [1.0, 1.0, 1.0], 1: [1.0, 1.0, 1.0], 2: [1.0, 1.0, 1.0], 3: [1.0, 1.0,
+                                                                                   1.0], 4: [1.0, 1.0, 1.0, 1.0], 5: [1.0, 1.0, 1.0], 6: [1.0, 1.0, 1.0], 7:
+                   [1.0, 1.0, 1.0], 8: [1.0, 1.0, 1.0]}
+        neighbors = {0: [4, 6, 2], 1: [3, 5, 7], 2: [8, 0, 4], 3: [7, 1, 5],
+                     4: [8, 0, 2, 6], 5: [1, 3, 7], 6: [4, 0, 8], 7: [3, 1, 5], 8:
+                     [6, 2, 4]}
+        w2 = pysal.higher_order(self.w3x3, 2)
+        self.assertEqual(w2.neighbors, neighbors)
+        self.assertEqual(w2.weights, weights)
+
+    def test_histogram(self):
+        hist = [(0, 1), (1, 1), (2, 4), (3, 20), (4, 57), (5, 44), (6, 36),
+                (7, 15), (8, 7), (9, 1), (10, 6), (11, 0), (12, 2), (13, 0),
+                (14, 0), (15, 1)]
+        self.assertEqual(self.w.histogram, hist)
+
+    def test_id2i(self):
+        id2i = {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8}
+        self.assertEqual(self.w3x3.id2i, id2i)
+
+    def test_id_order_set(self):
+        w = pysal.W(neighbors={'a': ['b'], 'b': ['a', 'c'], 'c': ['b']})
+        self.assertFalse(w.id_order_set)
+
+    def test_islands(self):
+        w = pysal.W(neighbors={'a': ['b'], 'b': ['a', 'c'], 'c':
+                               ['b'], 'd': []})
+        self.assertEqual(w.islands, ['d'])
+        self.assertEqual(self.w3x3.islands, [])
+
+    def test_max_neighbors(self):
+        w = pysal.W(neighbors={'a': ['b'], 'b': ['a', 'c'], 'c':
+                               ['b'], 'd': []})
+        self.assertEqual(w.max_neighbors, 2)
+        self.assertEqual(self.w3x3.max_neighbors, 4)
+
+    def test_mean_neighbors(self):
+        w = pysal.lat2W()
+        self.assertEqual(w.mean_neighbors, 3.2)
+
+    def test_min_neighbors(self):
+        w = pysal.lat2W()
+        self.assertEqual(w.min_neighbors, 2)
+
+    def test_n(self):
+        w = pysal.lat2W()
+        self.assertEqual(w.n, 25)
+
+    def test_neighbor_offsets(self):
+        d = {0: [3, 1],
+             1: [0, 4, 2],
+             2: [1, 5],
+             3: [0, 6, 4],
+             4: [1, 3, 7, 5],
+             5: [2, 4, 8],
+             6: [3, 7],
+             7: [4, 6, 8],
+             8: [5, 7]}
+
+        self.assertEqual(self.w3x3.neighbor_offsets, d)
+
+    def test_nonzero(self):
+        self.assertEquals(self.w3x3.nonzero, 24)
+
+    def test_order(self):
+        w = pysal.lat2W(3, 3)
+        o = {0: [-1, 1, 2, 1, 2, 3, 2, 3, 0],
+             1: [1, -1, 1, 2, 1, 2, 3, 2, 3],
+             2: [2, 1, -1, 3, 2, 1, 0, 3, 2],
+             3: [1, 2, 3, -1, 1, 2, 1, 2, 3],
+             4: [2, 1, 2, 1, -1, 1, 2, 1, 2],
+             5: [3, 2, 1, 2, 1, -1, 3, 2, 1],
+             6: [2, 3, 0, 1, 2, 3, -1, 1, 2],
+             7: [3, 2, 3, 2, 1, 2, 1, -1, 1],
+             8: [0, 3, 2, 3, 2, 1, 2, 1, -1]}
+        self.assertEquals(pysal.order(w), o)
+
+    def test_pct_nonzero(self):
+        self.assertEqual(self.w3x3.pct_nonzero, 0.29629629629629628)
+
+    def test_s0(self):
+        self.assertEqual(self.w3x3.s0, 24.0)
+
+    def test_s1(self):
+        self.assertEqual(self.w3x3.s1, 48.0)
+
+    def test_s2(self):
+        self.assertEqual(self.w3x3.s2, 272.0)
+
+    def test_s2array(self):
+        s2a = np.array([[16.], [36.], [16.], [36.],
+                        [64.], [36.], [16.], [36.], [16.]])
+        NPTA3E(self.w3x3.s2array, s2a)
+
+    def test_sd(self):
+        self.assertEquals(self.w3x3.sd, 0.66666666666666663)
+
+    def test_set_transform(self):
+        w = pysal.lat2W(2, 2)
+        self.assertEqual(w.transform, 'O')
+        self.assertEquals(w.weights[0], [1.0, 1.0])
+        w.transform = 'r'
+        self.assertEquals(w.weights[0], [0.5, 0.5])
+
+    def test_shimbel(self):
+        d = {0: [-1, 1, 2, 1, 2, 3, 2, 3, 4],
+             1: [1, -1, 1, 2, 1, 2, 3, 2, 3],
+             2: [2, 1, -1, 3, 2, 1, 4, 3, 2],
+             3: [1, 2, 3, -1, 1, 2, 1, 2, 3],
+             4: [2, 1, 2, 1, -1, 1, 2, 1, 2],
+             5: [3, 2, 1, 2, 1, -1, 3, 2, 1],
+             6: [2, 3, 4, 1, 2, 3, -1, 1, 2],
+             7: [3, 2, 3, 2, 1, 2, 1, -1, 1],
+             8: [4, 3, 2, 3, 2, 1, 2, 1, -1]}
+        self.assertEquals(pysal.shimbel(self.w3x3), d)
+
+    def test_sparse(self):
+        self.assertEqual(self.w3x3.sparse.nnz, 24)
+
+    def test_trcW2(self):
+        self.assertEqual(self.w3x3.trcW2, 24.)
+
+    def test_trcWtW(self):
+        self.assertEqual(self.w3x3.trcWtW, 24.)
+
+    def test_trcWtW_WW(self):
+        self.assertEqual(self.w3x3.trcWtW_WW, 48.)
+
+
+class Test_WSP_Back_To_W(unittest.TestCase):
+    # Test to make sure we get back to the same W functionality
+    def setUp(self):
+        from pysal import rook_from_shapefile
+        self.w = rook_from_shapefile(pysal.examples.get_path('10740.shp'))
+        wsp = pysal.weights.WSP(self.w.sparse, self.w.id_order)
+        self.w = pysal.weights.WSP2W(wsp)
+
+        self.neighbors = {0: [3, 1], 1: [0, 4, 2], 2: [1, 5], 3: [0, 6, 4], 4: [1, 3,
+                                                                                7, 5], 5: [2, 4, 8], 6: [3, 7], 7: [4, 6, 8], 8: [5, 7]}
+        self.weights = {0: [1, 1], 1: [1, 1, 1], 2: [1, 1], 3: [1, 1, 1], 4: [1, 1,
+                                                                              1, 1], 5: [1, 1, 1], 6: [1, 1], 7: [1, 1, 1], 8: [1, 1]}
+
+        self.w3x3 = pysal.lat2W(3, 3)
+        w3x3 = pysal.weights.WSP(self.w3x3.sparse, self.w3x3.id_order)
+        self.w3x3 = pysal.weights.WSP2W(w3x3)
+
+    def test_W(self):
+        w = pysal.W(self.neighbors, self.weights)
+        self.assertEqual(w.pct_nonzero, 0.29629629629629628)
+
+    def test___getitem__(self):
+        self.assertEqual(
+            self.w[0], {1: 1.0, 4: 1.0, 101: 1.0, 85: 1.0, 5: 1.0})
+
+    def test___init__(self):
+        w = pysal.W(self.neighbors, self.weights)
+        self.assertEqual(w.pct_nonzero, 0.29629629629629628)
+
+    def test___iter__(self):
+        w = pysal.lat2W(3, 3)
+        res = {}
+        for i, wi in enumerate(w):
+            res[i] = wi
+        self.assertEqual(res[0], (0, {1: 1.0, 3: 1.0}))
+        self.assertEqual(res[8], (8, {5: 1.0, 7: 1.0}))
+
+    def test_asymmetries(self):
+        w = pysal.lat2W(3, 3)
+        w.transform = 'r'
+        result = w.asymmetry()
+        self.assertEqual(result, [(0, 1), (0, 3), (1, 0), (1, 2), (1, 4), (2, 1), (2, 5), (3, 0), (3, 4), (3, 6), (4, 1), (4, 3), (4, 5), (4, 7), (5, 2), (5, 4), (5, 8), (6, 3), (6, 7), (7, 4), (7, 6), (7, 8), (8, 5), (8, 7)])
+    def test_asymmetry(self):
+        w = pysal.lat2W(3, 3)
+        self.assertEqual(w.asymmetry(), [])
+        w.transform = 'r'
+        self.assertFalse(w.asymmetry() == [])
+
+    def test_cardinalities(self):
+        w = pysal.lat2W(3, 3)
+        self.assertEqual(w.cardinalities, {0: 2, 1: 3, 2: 2, 3: 3, 4: 4, 5: 3,
+                                           6: 2, 7: 3, 8: 2})
+
+    def test_diagW2(self):
+        NPTA3E(self.w3x3.diagW2, np.array([2., 3., 2., 3., 4., 3., 2.,
+                                           3., 2.]))
+
+    def test_diagWtW(self):
+        NPTA3E(self.w3x3.diagW2, np.array([2., 3., 2., 3., 4., 3., 2.,
+                                           3., 2.]))
+
+    def test_diagWtW_WW(self):
+        NPTA3E(self.w3x3.diagWtW_WW, np.array([4., 6., 4., 6., 8.,
+                                               6., 4., 6., 4.]))
+
+    def test_full(self):
+        wf = np.array([[0., 1., 0., 1., 0., 0., 0., 0., 0.],
+                       [1., 0., 1., 0., 1., 0., 0., 0., 0.],
+                       [0., 1., 0., 0., 0., 1., 0., 0., 0.],
+                       [1., 0., 0., 0., 1., 0., 1., 0., 0.],
+                       [0., 1., 0., 1., 0., 1., 0., 1., 0.],
+                       [0., 0., 1., 0., 1., 0., 0., 0., 1.],
+                       [0., 0., 0., 1., 0., 0., 0., 1., 0.],
+                       [0., 0., 0., 0., 1., 0., 1., 0., 1.],
+                       [0., 0., 0., 0., 0., 1., 0., 1., 0.]])
+        ids = range(9)
+
+        wf1, ids1 = self.w3x3.full()
+        NPTA3E(wf1, wf)
+        self.assertEqual(ids1, ids)
+
+    def test_get_transform(self):
+        self.assertEqual(self.w3x3.transform, 'O')
+        self.w3x3.transform = 'r'
+        self.assertEqual(self.w3x3.transform, 'R')
+        self.w3x3.transform = 'b'
+
+    def test_higher_order(self):
+        weights = {0: [1.0, 1.0, 1.0], 1: [1.0, 1.0, 1.0], 2: [1.0, 1.0, 1.0], 3: [1.0, 1.0,
+                                                                                   1.0], 4: [1.0, 1.0, 1.0, 1.0], 5: [1.0, 1.0, 1.0], 6: [1.0, 1.0, 1.0], 7:
+                   [1.0, 1.0, 1.0], 8: [1.0, 1.0, 1.0]}
+        neighbors = {0: [4, 6, 2], 1: [3, 5, 7], 2: [8, 0, 4], 3: [7, 1, 5],
+                     4: [8, 0, 2, 6], 5: [1, 3, 7], 6: [4, 0, 8], 7: [3, 1, 5], 8:
+                     [6, 2, 4]}
+        w2 = pysal.higher_order(self.w3x3, 2)
+        self.assertEqual(w2.neighbors, neighbors)
+        self.assertEqual(w2.weights, weights)
+
+    def test_histogram(self):
+        hist = [(0, 1), (1, 1), (2, 4), (3, 20), (4, 57), (5, 44), (6, 36),
+                (7, 15), (8, 7), (9, 1), (10, 6), (11, 0), (12, 2), (13, 0),
+                (14, 0), (15, 1)]
+        self.assertEqual(self.w.histogram, hist)
+
+    def test_id2i(self):
+        id2i = {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8}
+        self.assertEqual(self.w3x3.id2i, id2i)
+
+    def test_id_order_set(self):
+        w = pysal.W(neighbors={'a': ['b'], 'b': ['a', 'c'], 'c': ['b']})
+        self.assertFalse(w.id_order_set)
+
+    def test_islands(self):
+        w = pysal.W(neighbors={'a': ['b'], 'b': ['a', 'c'], 'c':
+                               ['b'], 'd': []})
+        self.assertEqual(w.islands, ['d'])
+        self.assertEqual(self.w3x3.islands, [])
+
+    def test_max_neighbors(self):
+        w = pysal.W(neighbors={'a': ['b'], 'b': ['a', 'c'], 'c':
+                               ['b'], 'd': []})
+        self.assertEqual(w.max_neighbors, 2)
+        self.assertEqual(self.w3x3.max_neighbors, 4)
+
+    def test_mean_neighbors(self):
+        w = pysal.lat2W()
+        self.assertEqual(w.mean_neighbors, 3.2)
+
+    def test_min_neighbors(self):
+        w = pysal.lat2W()
+        self.assertEqual(w.min_neighbors, 2)
+
+    def test_n(self):
+        w = pysal.lat2W()
+        self.assertEqual(w.n, 25)
+
+    def test_nonzero(self):
+        self.assertEquals(self.w3x3.nonzero, 24)
+
+    def test_order(self):
+        w = pysal.lat2W(3, 3)
+        o = {0: [-1, 1, 2, 1, 2, 3, 2, 3, 0],
+             1: [1, -1, 1, 2, 1, 2, 3, 2, 3],
+             2: [2, 1, -1, 3, 2, 1, 0, 3, 2],
+             3: [1, 2, 3, -1, 1, 2, 1, 2, 3],
+             4: [2, 1, 2, 1, -1, 1, 2, 1, 2],
+             5: [3, 2, 1, 2, 1, -1, 3, 2, 1],
+             6: [2, 3, 0, 1, 2, 3, -1, 1, 2],
+             7: [3, 2, 3, 2, 1, 2, 1, -1, 1],
+             8: [0, 3, 2, 3, 2, 1, 2, 1, -1]}
+        self.assertEquals(pysal.order(w), o)
+
+    def test_pct_nonzero(self):
+        self.assertEqual(self.w3x3.pct_nonzero, 0.29629629629629628)
+
+    def test_s0(self):
+        self.assertEqual(self.w3x3.s0, 24.0)
+
+    def test_s1(self):
+        self.assertEqual(self.w3x3.s1, 48.0)
+
+    def test_s2(self):
+        self.assertEqual(self.w3x3.s2, 272.0)
+
+    def test_s2array(self):
+        s2a = np.array([[16.], [36.], [16.], [36.],
+                        [64.], [36.], [16.], [36.], [16.]])
+        NPTA3E(self.w3x3.s2array, s2a)
+
+    def test_sd(self):
+        self.assertEquals(self.w3x3.sd, 0.66666666666666663)
+
+    def test_set_transform(self):
+        w = pysal.lat2W(2, 2)
+        self.assertEqual(w.transform, 'O')
+        self.assertEquals(w.weights[0], [1.0, 1.0])
+        w.transform = 'r'
+        self.assertEquals(w.weights[0], [0.5, 0.5])
+
+    def test_shimbel(self):
+        d = {0: [-1, 1, 2, 1, 2, 3, 2, 3, 4],
+             1: [1, -1, 1, 2, 1, 2, 3, 2, 3],
+             2: [2, 1, -1, 3, 2, 1, 4, 3, 2],
+             3: [1, 2, 3, -1, 1, 2, 1, 2, 3],
+             4: [2, 1, 2, 1, -1, 1, 2, 1, 2],
+             5: [3, 2, 1, 2, 1, -1, 3, 2, 1],
+             6: [2, 3, 4, 1, 2, 3, -1, 1, 2],
+             7: [3, 2, 3, 2, 1, 2, 1, -1, 1],
+             8: [4, 3, 2, 3, 2, 1, 2, 1, -1]}
+        self.assertEquals(pysal.shimbel(self.w3x3), d)
+
+    def test_sparse(self):
+        self.assertEqual(self.w3x3.sparse.nnz, 24)
+
+    def test_trcW2(self):
+        self.assertEqual(self.w3x3.trcW2, 24.)
+
+    def test_trcWtW(self):
+        self.assertEqual(self.w3x3.trcWtW, 24.)
+
+    def test_trcWtW_WW(self):
+        self.assertEqual(self.w3x3.trcWtW_WW, 48.)
+
+
+class TestWSP(unittest.TestCase):
+    def setUp(self):
+        from pysal import rook_from_shapefile
+        self.w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
+        self.wsp = pysal.weights.WSP(self.w.sparse, self.w.id_order)
+        w3x3 = pysal.lat2W(3, 3)
+        self.w3x3 = pysal.weights.WSP(w3x3.sparse)
+
+    def test_WSP(self):
+        self.assertEquals(self.w.id_order, self.wsp.id_order)
+        self.assertEquals(self.w.n, self.wsp.n)
+        np.testing.assert_array_equal(
+            self.w.sparse.todense(), self.wsp.sparse.todense())
+
+    def test_diagWtW_WW(self):
+        NPTA3E(self.w3x3.diagWtW_WW, np.array([4., 6., 4., 6., 8.,
+                                               6., 4., 6., 4.]))
+
+    def test_trcWtW_WW(self):
+        self.assertEqual(self.w3x3.trcWtW_WW, 48.)
+
+    def test_s0(self):
+        self.assertEqual(self.w3x3.s0, 24.0)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/pysal/weights/user.py b/pysal/weights/user.py
new file mode 100644
index 0000000..133b8c2
--- /dev/null
+++ b/pysal/weights/user.py
@@ -0,0 +1,1123 @@
+"""
+Convenience functions for the construction of spatial weights based on
+contiguity and distance criteria
+"""
+
+__author__ = "Sergio J. Rey <srey at asu.edu> "
+__all__ = ['queen_from_shapefile', 'rook_from_shapefile', 'knnW_from_array', 'knnW_from_shapefile', 'threshold_binaryW_from_array', 'threshold_binaryW_from_shapefile', 'threshold_continuousW_from_array', 'threshold_continuousW_from_shapefile', 'kernelW', 'kernelW_from_shapefile', 'adaptive_kernelW', 'adaptive_kernelW_from_shapefile', 'min_threshold_dist_from_shapefile', 'build_lattice_shapefile']
+
+import pysal
+from Contiguity import buildContiguity
+from Distance import knnW, Kernel, DistanceBand
+from util import get_ids, get_points_array_from_shapefile, min_threshold_distance
+import numpy as np
+
+def queen_from_shapefile(shapefile, idVariable=None, sparse=False):
+    """
+    Queen contiguity weights from a polygon shapefile
+
+    Parameters
+    ----------
+
+    shapefile   : string
+                  name of polygon shapefile including suffix.
+    idVariable  : string
+                  name of a column in the shapefile's DBF to use for ids.
+    sparse    : boolean
+                If True return WSP instance
+                If False return W instance
+    Returns
+    -------
+
+    w            : W
+                   instance of spatial weights
+
+    Examples
+    --------
+    >>> wq=queen_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> "%.3f"%wq.pct_nonzero
+    '0.098'
+    >>> wq=queen_from_shapefile(pysal.examples.get_path("columbus.shp"),"POLYID")
+    >>> "%.3f"%wq.pct_nonzero
+    '0.098'
+    >>> wq=queen_from_shapefile(pysal.examples.get_path("columbus.shp"), sparse=True)
+    >>> pct_sp = wq.sparse.nnz *1. / wq.n**2
+    >>> "%.3f"%pct_sp
+    '0.098'
+
+    Notes
+    -----
+
+    Queen contiguity defines as neighbors any pair of polygons that share at
+    least one vertex in their polygon definitions.
+
+    See Also
+    --------
+    :class:`pysal.weights.W`
+
+    """
+    shp = pysal.open(shapefile)
+    w = buildContiguity(shp, criterion='queen')
+    if idVariable:
+        ids = get_ids(shapefile, idVariable)
+        w.remap_ids(ids)
+    else:
+        ids = None
+    shp.close()
+    w.set_shapefile(shapefile, idVariable)
+
+    if sparse:
+        w = pysal.weights.WSP(w.sparse, id_order=ids)
+
+    return w
+
+
+def rook_from_shapefile(shapefile, idVariable=None, sparse=False):
+    """
+    Rook contiguity weights from a polygon shapefile
+
+    Parameters
+    ----------
+
+    shapefile : string
+                name of polygon shapefile including suffix.
+    sparse    : boolean
+                If True return WSP instance
+                If False return W instance
+
+    Returns
+    -------
+
+    w          : W
+                 instance of spatial weights
+
+    Examples
+    --------
+    >>> wr=rook_from_shapefile(pysal.examples.get_path("columbus.shp"), "POLYID")
+    >>> "%.3f"%wr.pct_nonzero
+    '0.083'
+    >>> wr=rook_from_shapefile(pysal.examples.get_path("columbus.shp"), sparse=True)
+    >>> pct_sp = wr.sparse.nnz *1. / wr.n**2
+    >>> "%.3f"%pct_sp
+    '0.083'
+
+    Notes
+    -----
+
+    Rook contiguity defines as neighbors any pair of polygons that share a
+    common edge in their polygon definitions.
+
+    See Also
+    --------
+    :class:`pysal.weights.W`
+
+    """
+    shp = pysal.open(shapefile)
+    w = buildContiguity(shp, criterion='rook')
+    if idVariable:
+        ids = get_ids(shapefile, idVariable)
+        w.remap_ids(ids)
+    else:
+        ids = None
+    shp.close()
+    w.set_shapefile(shapefile, idVariable)
+
+    if sparse:
+        w = pysal.weights.WSP(w.sparse, id_order=ids)
+
+
+    return w
+
+
+def spw_from_gal(galfile):
+    """
+    Sparse scipy matrix for w from a gal file
+
+    Parameters
+    ----------
+
+    galfile: string
+             name of gal file including suffix
+
+    Returns
+    -------
+
+    spw      : scipy sparse matrix in CSR format
+
+    ids      : array
+               identifiers for rows/cols of spw
+
+    Examples
+    --------
+
+    >>> spw = pysal.weights.user.spw_from_gal(pysal.examples.get_path("sids2.gal"))
+    >>> spw.sparse.nnz
+    462
+    """
+
+    return pysal.open(galfile, 'r').read(sparse=True)
+
+# Distance based weights
+
+
+def knnW_from_array(array, k=2, p=2, ids=None, radius=None):
+    """
+    Nearest neighbor weights from a numpy array
+
+    Parameters
+    ----------
+
+    data       : array (n,m)
+                 attribute data, n observations on m attributes
+    k          : int
+                 number of nearest neighbors
+    p          : float
+                 Minkowski p-norm distance metric parameter:
+                 1<=p<=infinity
+                 2: Euclidean distance
+                 1: Manhattan distance
+    ids        : list
+                 identifiers to attach to each observation
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+
+    Returns
+    -------
+
+    w         : W instance
+                Weights object with binary weights
+
+    Examples
+    --------
+    >>> import numpy as np
+    >>> x,y=np.indices((5,5))
+    >>> x.shape=(25,1)
+    >>> y.shape=(25,1)
+    >>> data=np.hstack([x,y])
+    >>> wnn2=knnW_from_array(data,k=2)
+    >>> wnn4=knnW_from_array(data,k=4)
+    >>> set([1, 5, 6, 2]) == set(wnn4.neighbors[0])
+    True
+    >>> set([0, 1, 10, 6]) == set(wnn4.neighbors[5])
+    True
+    >>> set([1, 5]) == set(wnn2.neighbors[0])
+    True
+    >>> set([0,6]) == set(wnn2.neighbors[5])
+    True
+    >>> "%.2f"%wnn2.pct_nonzero
+    '0.08'
+    >>> wnn4.pct_nonzero
+    0.16
+    >>> wnn4=knnW_from_array(data,k=4)
+    >>> set([ 1,5,6,2]) == set(wnn4.neighbors[0])
+    True
+    >>> wnn4=knnW_from_array(data,k=4)
+    >>> wnn3e=knnW(data,p=2,k=3)
+    >>> set([1,5,6]) == set(wnn3e.neighbors[0])
+    True
+    >>> wnn3m=knnW(data,p=1,k=3)
+    >>> set([1,5,2]) == set(wnn3m.neighbors[0])
+    True
+
+    Notes
+    -----
+
+    Ties between neighbors of equal distance are arbitrarily broken.
+
+    See Also
+    --------
+    :class:`pysal.weights.W`
+
+    """
+    if radius is not None:
+        array = pysal.cg.KDTree(array, distance_metric='Arc', radius=radius)
+    return knnW(array, k=k, p=p, ids=ids)
+
+
+def knnW_from_shapefile(shapefile, k=2, p=2, idVariable=None, radius=None):
+    """
+    Nearest neighbor weights from a shapefile
+
+    Parameters
+    ----------
+
+    shapefile  : string
+                 shapefile name with shp suffix
+    k          : int
+                 number of nearest neighbors
+    p          : float
+                 Minkowski p-norm distance metric parameter:
+                 1<=p<=infinity
+                 2: Euclidean distance
+                 1: Manhattan distance
+    idVariable : string
+                 name of a column in the shapefile's DBF to use for ids
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+
+    Returns
+    -------
+
+    w         : W instance
+                Weights object with binary weights
+
+    Examples
+    --------
+
+    Polygon shapefile
+
+    >>> wc=knnW_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> "%.4f"%wc.pct_nonzero
+    '0.0408'
+    >>> set([2,1]) == set(wc.neighbors[0])
+    True
+    >>> wc3=pysal.knnW_from_shapefile(pysal.examples.get_path("columbus.shp"),k=3)
+    >>> set(wc3.neighbors[0]) == set([2,1,3])
+    True
+    >>> set(wc3.neighbors[2]) == set([4,3,0])
+    True
+
+    1 offset rather than 0 offset
+
+    >>> wc3_1=knnW_from_shapefile(pysal.examples.get_path("columbus.shp"),k=3,idVariable="POLYID")
+    >>> set([4,3,2]) == set(wc3_1.neighbors[1])
+    True
+    >>> wc3_1.weights[2]
+    [1.0, 1.0, 1.0]
+    >>> set([4,1,8]) == set(wc3_1.neighbors[2])
+    True
+    
+
+    Point shapefile
+
+    >>> w=knnW_from_shapefile(pysal.examples.get_path("juvenile.shp"))
+    >>> w.pct_nonzero
+    0.011904761904761904
+    >>> w1=knnW_from_shapefile(pysal.examples.get_path("juvenile.shp"),k=1)
+    >>> "%.3f"%w1.pct_nonzero
+    '0.006'
+    >>>
+
+    Notes
+    -----
+
+    Supports polygon or point shapefiles. For polygon shapefiles, distance is
+    based on polygon centroids. Distances are defined using coordinates in
+    shapefile which are assumed to be projected and not geographical
+    coordinates.
+
+    Ties between neighbors of equal distance are arbitrarily broken.
+
+    See Also
+    --------
+    :class:`pysal.weights.W`
+
+    """
+
+    data = get_points_array_from_shapefile(shapefile)
+    if radius is not None:
+        data = pysal.cg.KDTree(data, distance_metric='Arc', radius=radius)
+    if idVariable:
+        ids = get_ids(shapefile, idVariable)
+        return knnW(data, k=k, p=p, ids=ids)
+    return knnW(data, k=k, p=p)
+
+
+def threshold_binaryW_from_array(array, threshold, p=2, radius=None):
+    """
+    Binary weights based on a distance threshold
+
+    Parameters
+    ----------
+
+    array       : array (n,m)
+                 attribute data, n observations on m attributes
+    threshold  : float
+                 distance band
+    p          : float
+                 Minkowski p-norm distance metric parameter:
+                 1<=p<=infinity
+                 2: Euclidean distance
+                 1: Manhattan distance
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+
+    Returns
+    -------
+
+    w         : W instance
+                Weights object with binary weights
+
+    Examples
+    --------
+    >>> points=[(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+    >>> w=threshold_binaryW_from_array(points,threshold=11.2)
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [2]
+    >>> w.weights
+    {0: [1, 1], 1: [1, 1], 2: [], 3: [1, 1], 4: [1], 5: [1]}
+    >>> w.neighbors
+    {0: [1, 3], 1: [0, 3], 2: [], 3: [1, 0], 4: [5], 5: [4]}
+    >>>
+    """
+    if radius is not None:
+        array = pysal.cg.KDTree(array, distance_metric='Arc', radius=radius)
+    return DistanceBand(array, threshold=threshold, p=p)
+
+
+def threshold_binaryW_from_shapefile(shapefile, threshold, p=2, idVariable=None, radius=None):
+    """
+    Threshold distance based binary weights from a shapefile
+
+    Parameters
+    ----------
+
+    shapefile  : string
+                 shapefile name with shp suffix
+    threshold  : float
+                 distance band
+    p          : float
+                 Minkowski p-norm distance metric parameter:
+                 1<=p<=infinity
+                 2: Euclidean distance
+                 1: Manhattan distance
+    idVariable : string
+                 name of a column in the shapefile's DBF to use for ids
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+
+    Returns
+    -------
+
+    w         : W instance
+                Weights object with binary weights
+
+    Examples
+    --------
+    >>> w = threshold_binaryW_from_shapefile(pysal.examples.get_path("columbus.shp"),0.62,idVariable="POLYID")
+    >>> w.weights[1]
+    [1, 1]
+
+    Notes
+    -----
+    Supports polygon or point shapefiles. For polygon shapefiles, distance is
+    based on polygon centroids. Distances are defined using coordinates in
+    shapefile which are assumed to be projected and not geographical
+    coordinates.
+
+    """
+    data = get_points_array_from_shapefile(shapefile)
+    if radius is not None:
+        data = pysal.cg.KDTree(data, distance_metric='Arc', radius=radius)
+    if idVariable:
+        ids = get_ids(shapefile, idVariable)
+        w = DistanceBand(data, threshold=threshold, p=p)
+        w.remap_ids(ids)
+        return w
+    return threshold_binaryW_from_array(data, threshold, p=p)
+
+
+def threshold_continuousW_from_array(array, threshold, p=2,
+                                     alpha=-1, radius=None):
+
+    """
+    Continuous weights based on a distance threshold
+
+    Parameters
+    ----------
+
+    array      : array (n,m)
+                 attribute data, n observations on m attributes
+    threshold  : float
+                 distance band
+    p          : float
+                 Minkowski p-norm distance metric parameter:
+                 1<=p<=infinity
+                 2: Euclidean distance
+                 1: Manhattan distance
+    alpha      : float
+                 distance decay parameter for weight (default -1.0)
+                 if alpha is positive the weights will not decline with
+                 distance.
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+
+    Returns
+    -------
+
+    w         : W instance
+                Weights object with continuous weights
+
+    Examples
+    --------
+
+    inverse distance weights
+
+    >>> points=[(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+    >>> wid=threshold_continuousW_from_array(points,11.2)
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [2]
+    >>> wid.weights[0]
+    [0.10000000000000001, 0.089442719099991588]
+
+    gravity weights
+
+    >>> wid2=threshold_continuousW_from_array(points,11.2,alpha=-2.0)
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [2]
+    >>> wid2.weights[0]
+    [0.01, 0.0079999999999999984]
+
+    """
+    if radius is not None:
+        array = pysal.cg.KDTree(array, distance_metric='Arc', radius=radius)
+    w = DistanceBand(
+        array, threshold=threshold, p=p, alpha=alpha, binary=False)
+    return w
+
+
+def threshold_continuousW_from_shapefile(shapefile, threshold, p=2,
+                                         alpha=-1, idVariable=None, radius=None):
+    """
+    Threshold distance based continuous weights from a shapefile
+
+    Parameters
+    ----------
+
+    shapefile  : string
+                 shapefile name with shp suffix
+    threshold  : float
+                 distance band
+    p          : float
+                 Minkowski p-norm distance metric parameter:
+                 1<=p<=infinity
+                 2: Euclidean distance
+                 1: Manhattan distance
+    alpha      : float
+                 distance decay parameter for weight (default -1.0)
+                 if alpha is positive the weights will not decline with
+                 distance.
+    idVariable : string
+                 name of a column in the shapefile's DBF to use for ids
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+
+    Returns
+    -------
+
+    w         : W instance
+                Weights object with continuous weights
+
+    Examples
+    --------
+    >>> w = threshold_continuousW_from_shapefile(pysal.examples.get_path("columbus.shp"),0.62,idVariable="POLYID")
+    >>> w.weights[1]
+    [1.6702346893743334, 1.7250729841938093]
+
+    Notes
+    -----
+    Supports polygon or point shapefiles. For polygon shapefiles, distance is
+    based on polygon centroids. Distances are defined using coordinates in
+    shapefile which are assumed to be projected and not geographical
+    coordinates.
+
+    """
+    data = get_points_array_from_shapefile(shapefile)
+    if radius is not None:
+        data = pysal.cg.KDTree(data, distance_metric='Arc', radius=radius)
+    if idVariable:
+        ids = get_ids(shapefile, idVariable)
+        w = DistanceBand(data, threshold=threshold, p=p, alpha=alpha, binary=False)
+        w.remap_ids(ids)
+    else:
+        w =  threshold_continuousW_from_array(data, threshold, p=p, alpha=alpha)
+    w.set_shapefile(shapefile,idVariable)
+    return w
+
+
+# Kernel Weights
+
+
+def kernelW(points, k=2, function='triangular', fixed=True,
+        radius=None, diagonal=False):
+    """
+    Kernel based weights
+
+    Parameters
+    ----------
+
+    points      : array (n,k)
+                  n observations on k characteristics used to measure
+                  distances between the n objects
+    k           : int
+                  the number of nearest neighbors to use for determining
+                  bandwidth. Bandwidth taken as :math:`h_i=max(dknn) \\forall i`
+                  where :math:`dknn` is a vector of k-nearest neighbor
+                  distances (the distance to the kth nearest neighbor for each
+                  observation).
+    function    : string {'triangular','uniform','quadratic','epanechnikov',
+                  'quartic','bisquare','gaussian'}
+
+                  .. math::
+
+                      z_{i,j} = d_{i,j}/h_i
+
+                  triangular
+
+                  .. math::
+
+                      K(z) = (1 - |z|) \ if |z| \le 1
+
+                  uniform
+
+                  .. math::
+
+                      K(z) = |z| \ if |z| \le 1
+
+                  quadratic
+
+                  .. math::
+
+                      K(z) = (3/4)(1-z^2) \ if |z| \le 1
+
+                  epanechnikov
+
+                  .. math::
+
+                      K(z) = (1-z^2) \ if |z| \le 1
+
+                  quartic
+
+                  .. math::
+
+                      K(z) = (15/16)(1-z^2)^2 \ if |z| \le 1
+
+                  bisquare
+
+                  .. math::
+
+                      K(z) = (1-z^2)^2 \ if |z| \le 1
+
+                  gaussian
+
+                  .. math::
+
+                      K(z) = (2\pi)^{(-1/2)} exp(-z^2 / 2)
+
+    fixed        : binary
+                   If true then :math:`h_i=h \\forall i`. If false then
+                   bandwidth is adaptive across observations.
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+    diagonal   : boolean
+                 If true, set diagonal weights = 1.0, if false (default)
+                 diagonal weights are set to value according to kernel
+                 function
+
+    Returns
+    -------
+
+    w            : W
+                   instance of spatial weights
+
+    Examples
+    --------
+    >>> points=[(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+    >>> kw=kernelW(points)
+    >>> kw.weights[0]
+    [1.0, 0.500000049999995, 0.4409830615267465]
+    >>> kw.neighbors[0]
+    [0, 1, 3]
+    >>> kw.bandwidth
+    array([[ 20.000002],
+           [ 20.000002],
+           [ 20.000002],
+           [ 20.000002],
+           [ 20.000002],
+           [ 20.000002]])
+
+    use different k
+
+    >>> kw=kernelW(points,k=3)
+    >>> kw.neighbors[0]
+    [0, 1, 3, 4]
+    >>> kw.bandwidth
+    array([[ 22.36068201],
+           [ 22.36068201],
+           [ 22.36068201],
+           [ 22.36068201],
+           [ 22.36068201],
+           [ 22.36068201]])
+
+    Diagonals to 1.0
+
+    >>> kq = kernelW(points,function='gaussian')
+    >>> kq.weights
+    {0: [0.3989422804014327, 0.35206533556593145, 0.3412334260702758], 1: [0.35206533556593145, 0.3989422804014327, 0.2419707487162134, 0.3412334260702758, 0.31069657591175387], 2: [0.2419707487162134, 0.3989422804014327, 0.31069657591175387], 3: [0.3412334260702758, 0.3412334260702758, 0.3989422804014327, 0.3011374490937829, 0.26575287272131043], 4: [0.31069657591175387, 0.31069657591175387, 0.3011374490937829, 0.3989422804014327, 0.35206533556593145], 5: [0.26575287272131043, 0.3520653 [...]
+    >>> kqd = kernelW(points, function='gaussian', diagonal=True)
+    >>> kqd.weights
+    {0: [1.0, 0.35206533556593145, 0.3412334260702758], 1: [0.35206533556593145, 1.0, 0.2419707487162134, 0.3412334260702758, 0.31069657591175387], 2: [0.2419707487162134, 1.0, 0.31069657591175387], 3: [0.3412334260702758, 0.3412334260702758, 1.0, 0.3011374490937829, 0.26575287272131043], 4: [0.31069657591175387, 0.31069657591175387, 0.3011374490937829, 1.0, 0.35206533556593145], 5: [0.26575287272131043, 0.35206533556593145, 1.0]}
+
+    """
+    if radius is not None:
+        points = pysal.cg.KDTree(points, distance_metric='Arc', radius=radius)
+    return Kernel(points, function=function, k=k, fixed=fixed,
+            diagonal=diagonal)
+
+
+def kernelW_from_shapefile(shapefile, k=2, function='triangular',
+        idVariable=None, fixed=True, radius=None, diagonal=False):
+    """
+    Kernel based weights
+
+    Parameters
+    ----------
+
+    shapefile   : string
+                  shapefile name with shp suffix
+    k           : int
+                  the number of nearest neighbors to use for determining
+                  bandwidth. Bandwidth taken as :math:`h_i=max(dknn) \\forall i`
+                  where :math:`dknn` is a vector of k-nearest neighbor
+                  distances (the distance to the kth nearest neighbor for each
+                  observation).
+    function    : string {'triangular','uniform','quadratic','epanechnikov',
+                  'quartic','bisquare','gaussian'}
+
+                  .. math::
+
+                      z_{i,j} = d_{i,j}/h_i
+
+                  triangular
+
+                  .. math::
+
+                      K(z) = (1 - |z|) \ if |z| \le 1
+
+                  uniform
+
+                  .. math::
+
+                      K(z) = |z| \ if |z| \le 1
+
+                  quadratic
+
+                  .. math::
+
+                      K(z) = (3/4)(1-z^2) \ if |z| \le 1
+
+                  epanechnikov
+
+                  .. math::
+
+                      K(z) = (1-z^2) \ if |z| \le 1
+
+                  quartic
+
+                  .. math::
+
+                      K(z) = (15/16)(1-z^2)^2 \ if |z| \le 1
+
+                  bisquare
+
+                  .. math::
+
+                      K(z) = (1-z^2)^2 \ if |z| \le 1
+
+                  gaussian
+
+                  .. math::
+
+                      K(z) = (2\pi)^{(-1/2)} exp(-z^2 / 2)
+    idVariable   : string
+                   name of a column in the shapefile's DBF to use for ids
+
+    fixed        : binary
+                   If true then :math:`h_i=h \\forall i`. If false then
+                   bandwidth is adaptive across observations.
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+    diagonal   : boolean
+                 If true, set diagonal weights = 1.0, if false (default)
+                 diagonal weights are set to value according to kernel
+                 function
+
+    Returns
+    -------
+
+    w            : W
+                   instance of spatial weights
+
+    Examples
+    --------
+    >>> kw = pysal.kernelW_from_shapefile(pysal.examples.get_path("columbus.shp"),idVariable='POLYID', function = 'gaussian')
+
+    >>> kwd = pysal.kernelW_from_shapefile(pysal.examples.get_path("columbus.shp"),idVariable='POLYID', function = 'gaussian', diagonal = True)
+    >>> set(kw.neighbors[1]) == set([4, 2, 3, 1])
+    True
+    >>> set(kwd.neighbors[1]) == set([4, 2, 3, 1])
+    True
+    >>> 
+    >>> set(kw.weights[1]) == set( [0.2436835517263174, 0.29090631630909874, 0.29671172124745776, 0.3989422804014327])
+    True
+    >>> set(kwd.weights[1]) == set( [0.2436835517263174, 0.29090631630909874, 0.29671172124745776, 1.0])
+    True
+    
+
+    Notes
+    -----
+    Supports polygon or point shapefiles. For polygon shapefiles, distance is
+    based on polygon centroids. Distances are defined using coordinates in
+    shapefile which are assumed to be projected and not geographical
+    coordinates.
+
+    """
+    points = get_points_array_from_shapefile(shapefile)
+    if radius is not None:
+        points = pysal.cg.KDTree(points, distance_metric='Arc', radius=radius)
+    if idVariable:
+        ids = get_ids(shapefile, idVariable)
+        return Kernel(points, function=function, k=k, ids=ids, fixed=fixed,
+                diagonal = diagonal)
+    return kernelW(points, k=k, function=function, fixed=fixed,
+            diagonal=diagonal)
+
+
+def adaptive_kernelW(points, bandwidths=None, k=2, function='triangular',
+        radius=None, diagonal=False):
+    """
+    Kernel weights with adaptive bandwidths
+
+    Parameters
+    ----------
+
+    points      : array (n,k)
+                  n observations on k characteristics used to measure
+                  distances between the n objects
+    bandwidths  : float or array-like (optional)
+                  the bandwidth :math:`h_i` for the kernel.
+                  if no bandwidth is specified k is used to determine the
+                  adaptive bandwidth
+    k           : int
+                  the number of nearest neighbors to use for determining
+                  bandwidth. For fixed bandwidth, :math:`h_i=max(dknn) \\forall i`
+                  where :math:`dknn` is a vector of k-nearest neighbor
+                  distances (the distance to the kth nearest neighbor for each
+                  observation).  For adaptive bandwidths, :math:`h_i=dknn_i`
+    function    : string {'triangular','uniform','quadratic','quartic','gaussian'}
+                  kernel function defined as follows with
+
+                  .. math::
+
+                      z_{i,j} = d_{i,j}/h_i
+
+                  triangular
+
+                  .. math::
+
+                      K(z) = (1 - |z|) \ if |z| \le 1
+
+                  uniform
+
+                  .. math::
+
+                      K(z) = |z| \ if |z| \le 1
+
+                  quadratic
+
+                  .. math::
+
+                      K(z) = (3/4)(1-z^2) \ if |z| \le 1
+
+                  quartic
+
+                  .. math::
+
+                      K(z) = (15/16)(1-z^2)^2 \ if |z| \le 1
+
+                  gaussian
+
+                  .. math::
+
+                      K(z) = (2\pi)^{(-1/2)} exp(-z^2 / 2)
+
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+    diagonal   : boolean
+                 If true, set diagonal weights = 1.0, if false (default)
+                 diagonal weights are set to value according to kernel
+                 function
+    Returns
+    -------
+
+    w            : W
+                   instance of spatial weights
+
+    Examples
+    --------
+
+    User specified bandwidths
+
+    >>> points=[(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)]
+    >>> bw=[25.0,15.0,25.0,16.0,14.5,25.0]
+    >>> kwa=adaptive_kernelW(points,bandwidths=bw)
+    >>> kwa.weights[0]
+    [1.0, 0.6, 0.552786404500042, 0.10557280900008403]
+    >>> kwa.neighbors[0]
+    [0, 1, 3, 4]
+    >>> kwa.bandwidth
+    array([[ 25. ],
+           [ 15. ],
+           [ 25. ],
+           [ 16. ],
+           [ 14.5],
+           [ 25. ]])
+
+    Endogenous adaptive bandwidths
+
+    >>> kwea=adaptive_kernelW(points)
+    >>> kwea.weights[0]
+    [1.0, 0.10557289844279438, 9.99999900663795e-08]
+    >>> kwea.neighbors[0]
+    [0, 1, 3]
+    >>> kwea.bandwidth
+    array([[ 11.18034101],
+           [ 11.18034101],
+           [ 20.000002  ],
+           [ 11.18034101],
+           [ 14.14213704],
+           [ 18.02775818]])
+
+    Endogenous adaptive bandwidths with Gaussian kernel
+
+    >>> kweag=adaptive_kernelW(points,function='gaussian')
+    >>> kweag.weights[0]
+    [0.3989422804014327, 0.2674190291577696, 0.2419707487162134]
+    >>> kweag.bandwidth
+    array([[ 11.18034101],
+           [ 11.18034101],
+           [ 20.000002  ],
+           [ 11.18034101],
+           [ 14.14213704],
+           [ 18.02775818]])
+
+    with diagonal
+
+    >>> kweag = pysal.adaptive_kernelW(points, function='gaussian')
+    >>> kweagd = pysal.adaptive_kernelW(points, function='gaussian', diagonal=True)
+    >>> kweag.neighbors[0]
+    [0, 1, 3]
+    >>> kweagd.neighbors[0]
+    [0, 1, 3]
+    >>> kweag.weights[0]
+    [0.3989422804014327, 0.2674190291577696, 0.2419707487162134]
+    >>> kweagd.weights[0]
+    [1.0, 0.2674190291577696, 0.2419707487162134]
+
+    """
+    if radius is not None:
+        points = pysal.cg.KDTree(points, distance_metric='Arc', radius=radius)
+    return Kernel(points, bandwidth=bandwidths, fixed=False, k=k,
+            function=function, diagonal=diagonal)
+
+
+def adaptive_kernelW_from_shapefile(shapefile, bandwidths=None, k=2, function='triangular',
+                                    idVariable=None, radius=None,
+                                    diagonal = False):
+    """
+    Kernel weights with adaptive bandwidths
+
+    Parameters
+    ----------
+
+    shapefile   : string
+                  shapefile name with shp suffix
+    bandwidths  : float or array-like (optional)
+                  the bandwidth :math:`h_i` for the kernel.
+                  if no bandwidth is specified k is used to determine the
+                  adaptive bandwidth
+    k           : int
+                  the number of nearest neighbors to use for determining
+                  bandwidth. For fixed bandwidth, :math:`h_i=max(dknn) \\forall i`
+                  where :math:`dknn` is a vector of k-nearest neighbor
+                  distances (the distance to the kth nearest neighbor for each
+                  observation).  For adaptive bandwidths, :math:`h_i=dknn_i`
+    function    : string {'triangular','uniform','quadratic','quartic','gaussian'}
+                  kernel function defined as follows with
+
+                  .. math::
+
+                      z_{i,j} = d_{i,j}/h_i
+
+                  triangular
+
+                  .. math::
+
+                      K(z) = (1 - |z|) \ if |z| \le 1
+
+                  uniform
+
+                  .. math::
+
+                      K(z) = |z| \ if |z| \le 1
+
+                  quadratic
+
+                  .. math::
+
+                      K(z) = (3/4)(1-z^2) \ if |z| \le 1
+
+                  quartic
+
+                  .. math::
+
+                      K(z) = (15/16)(1-z^2)^2 \ if |z| \le 1
+
+                  gaussian
+
+                  .. math::
+
+                      K(z) = (2\pi)^{(-1/2)} exp(-z^2 / 2)
+    idVariable   : string
+                   name of a column in the shapefile's DBF to use for ids
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+    diagonal   : boolean
+                 If true, set diagonal weights = 1.0, if false (default)
+                 diagonal weights are set to value according to kernel
+                 function
+
+    Returns
+    -------
+
+    w            : W
+                   instance of spatial weights
+
+    Examples
+    --------
+    >>> kwa = pysal.adaptive_kernelW_from_shapefile(pysal.examples.get_path("columbus.shp"), function='gaussian')
+    >>> kwad = pysal.adaptive_kernelW_from_shapefile(pysal.examples.get_path("columbus.shp"), function='gaussian', diagonal=True)
+    >>> kwa.neighbors[0]
+    [0, 2, 1]
+    >>> kwad.neighbors[0]
+    [0, 2, 1]
+    >>> kwa.weights[0]
+    [0.3989422804014327, 0.24966013701844503, 0.2419707487162134]
+    >>> kwad.weights[0]
+    [1.0, 0.24966013701844503, 0.2419707487162134]
+    >>>
+
+    Notes
+    -----
+    Supports polygon or point shapefiles. For polygon shapefiles, distance is
+    based on polygon centroids. Distances are defined using coordinates in
+    shapefile which are assumed to be projected and not geographical
+    coordinates.
+
+    """
+    points = get_points_array_from_shapefile(shapefile)
+    if radius is not None:
+        points = pysal.cg.KDTree(points, distance_metric='Arc', radius=radius)
+    if idVariable:
+        ids = get_ids(shapefile, idVariable)
+        return Kernel(points, bandwidth=bandwidths, fixed=False, k=k,
+                function=function, ids=ids, diagonal=diagonal)
+    return adaptive_kernelW(points, bandwidths=bandwidths, k=k,
+            function=function, diagonal=diagonal)
+
+
+def min_threshold_dist_from_shapefile(shapefile, radius=None, p=2):
+    """
+    Kernel weights with adaptive bandwidths
+
+    Parameters
+    ----------
+
+    shapefile  : string
+                 shapefile name with shp suffix
+    radius     : If supplied arc_distances will be calculated
+                 based on the given radius. p will be ignored.
+    p          : float
+                 Minkowski p-norm distance metric parameter:
+                 1<=p<=infinity
+                 2: Euclidean distance
+                 1: Manhattan distance
+
+    Returns
+    -------
+    d            : float
+                   minimum nearest neighbor distance between the n observations
+
+    Examples
+    --------
+    >>> md = min_threshold_dist_from_shapefile(pysal.examples.get_path("columbus.shp"))
+    >>> md
+    0.61886415807685413
+    >>> min_threshold_dist_from_shapefile(pysal.examples.get_path("stl_hom.shp"), pysal.cg.sphere.RADIUS_EARTH_MILES)
+    31.846942936393717
+
+    Notes
+    -----
+    Supports polygon or point shapefiles. For polygon shapefiles, distance is
+    based on polygon centroids. Distances are defined using coordinates in
+    shapefile which are assumed to be projected and not geographical
+    coordinates.
+
+    """
+    points = get_points_array_from_shapefile(shapefile)
+    if radius is not None:
+        points = pysal.cg.KDTree(points, distance_metric='Arc', radius=radius)
+    return min_threshold_distance(points,p)
+
+
+def build_lattice_shapefile(nrows, ncols, outFileName):
+    """
+    Build a lattice shapefile with nrows rows and ncols cols
+
+    Parameters
+    ----------
+
+    nrows       : int
+                  Number of rows
+    ncols       : int
+                  Number of cols
+    outFileName : str
+                  shapefile name with shp suffix
+
+    Returns
+    -------
+    None
+    """
+    if not outFileName.endswith('.shp'):
+        raise ValueError("outFileName must end with .shp")
+    o = pysal.open(outFileName, 'w')
+    dbf_name = outFileName.split(".")[0] + ".dbf"
+    d = pysal.open(dbf_name, 'w')
+    d.header = [ 'ID' ]
+    d.field_spec = [ ('N', 8, 0) ]
+    c = 0
+    for i in xrange(nrows):
+        for j in xrange(ncols):
+            ll = i, j
+            ul = i, j + 1
+            ur = i + 1, j + 1
+            lr = i + 1, j
+            o.write(pysal.cg.Polygon([ll, ul, ur, lr, ll]))
+            d.write([c])
+            c += 1
+    d.close()
+    o.close()
+
+def _test():
+    import doctest
+    # the following line could be used to define an alternative to the '<BLANKLINE>' flag
+    #doctest.BLANKLINE_MARKER = 'something better than <BLANKLINE>'
+    start_suppress = np.get_printoptions()['suppress']
+    np.set_printoptions(suppress=True)
+    doctest.testmod()
+    np.set_printoptions(suppress=start_suppress)    
+
+if __name__ == '__main__':
+    _test()
+
+
diff --git a/pysal/weights/util.py b/pysal/weights/util.py
new file mode 100644
index 0000000..572df3f
--- /dev/null
+++ b/pysal/weights/util.py
@@ -0,0 +1,1197 @@
+import pysal
+from pysal.common import *
+import pysal.weights
+import numpy as np
+from scipy import sparse, float32
+import scipy.spatial
+import os
+import operator
+import scipy
+
+__all__ = ['lat2W', 'block_weights', 'comb', 'order', 'higher_order',
+           'shimbel', 'remap_ids', 'full2W', 'full', 'WSP2W',
+           'insert_diagonal', 'get_ids', 'get_points_array_from_shapefile',
+           'min_threshold_distance', 'lat2SW', 'w_local_cluster',
+           'higher_order_sp', 'hexLat2W', 'regime_weights']
+
+
+def hexLat2W(nrows=5, ncols=5):
+    """
+    Create a W object for a hexagonal lattice.
+
+    Parameters
+    ----------
+
+    nrows   : int
+              number of rows
+    ncols   : int
+              number of columns
+
+    Returns
+    -------
+
+    w : W
+        instance of spatial weights class W
+
+    Notes
+    -----
+
+    Observations are row ordered: first k observations are in row 0, next k in row 1, and so on.
+
+    Construction is based on shifting every other column of a regular lattice
+    down 1/2 of a cell.
+
+    Examples
+    --------
+
+    >>> import pysal as ps
+    >>> w = ps.lat2W()
+    >>> w.neighbors[1]
+    [0, 6, 2]
+    >>> w.neighbors[21]
+    [16, 20, 22]
+    >>> wh = ps.hexLat2W()
+    >>> wh.neighbors[1]
+    [0, 6, 2, 5, 7]
+    >>> wh.neighbors[21]
+    [16, 20, 22]
+    >>>
+    """
+
+    if nrows == 1 or ncols == 1:
+        print "Hexagon lattice requires at least 2 rows and columns"
+        print "Returning a linear contiguity structure"
+        return lat2W(nrows, ncols)
+
+    n = nrows * ncols
+    rid = [i / ncols for i in xrange(n)]
+    cid = [i % ncols for i in xrange(n)]
+    r1 = nrows - 1
+    c1 = ncols - 1
+
+    w = lat2W(nrows, ncols).neighbors
+    for i in xrange(n):
+        odd = cid[i] % 2
+        if odd:
+            if rid[i] < r1:  # odd col index above last row
+                # new sw neighbor
+                if cid[i] > 0:
+                    j = i + ncols - 1
+                    w[i] = w.get(i, []) + [j]
+                # new se neighbor
+                if cid[i] < c1:
+                    j = i + ncols + 1
+                    w[i] = w.get(i, []) + [j]
+
+        else:  # even col
+            # nw
+            jnw = [i - ncols - 1]
+            # ne
+            jne = [i - ncols + 1]
+            if rid[i] > 0:
+                w[i]
+                if cid[i] == 0:
+                    w[i] = w.get(i, []) + jne
+                elif cid[i] == c1:
+                    w[i] = w.get(i, []) + jnw
+                else:
+                    w[i] = w.get(i, []) + jne
+                    w[i] = w.get(i, []) + jnw
+
+
+    return pysal.weights.W(w)
+
+
+def lat2W(nrows=5, ncols=5, rook=True, id_type='int'):
+    """
+    Create a W object for a regular lattice.
+
+    Parameters
+    ----------
+
+    nrows   : int
+              number of rows
+    ncols   : int
+              number of columns
+    rook    : boolean
+              type of contiguity. Default is rook. For queen, rook =False
+    id_type : string
+              string defining the type of IDs to use in the final W object;
+              options are 'int' (0, 1, 2 ...; default), 'float' (0.0,
+              1.0, 2.0, ...) and 'string' ('id0', 'id1', 'id2', ...)
+
+    Returns
+    -------
+
+    w : W
+        instance of spatial weights class W
+
+    Notes
+    -----
+
+    Observations are row ordered: first k observations are in row 0, next k in row 1, and so on.
+
+    Examples
+    --------
+
+    >>> from pysal import lat2W
+    >>> w9 = lat2W(3,3)
+    >>> "%.3f"%w9.pct_nonzero
+    '0.296'
+    >>> w9[0]
+    {1: 1.0, 3: 1.0}
+    >>> w9[3]
+    {0: 1.0, 4: 1.0, 6: 1.0}
+    >>>
+    """
+    n = nrows * ncols
+    r1 = nrows - 1
+    c1 = ncols - 1
+    rid = [i / ncols for i in xrange(n)]
+    cid = [i % ncols for i in xrange(n)]
+    w = {}
+    r = below = 0
+    for i in xrange(n - 1):
+        if rid[i] < r1:
+            below = rid[i] + 1
+            r = below * ncols + cid[i]
+            w[i] = w.get(i, []) + [r]
+            w[r] = w.get(r, []) + [i]
+        if cid[i] < c1:
+            right = cid[i] + 1
+            c = rid[i] * ncols + right
+            w[i] = w.get(i, []) + [c]
+            w[c] = w.get(c, []) + [i]
+        if not rook:
+            # southeast bishop
+            if cid[i] < c1 and rid[i] < r1:
+                r = (rid[i] + 1) * ncols + 1 + cid[i]
+                w[i] = w.get(i, []) + [r]
+                w[r] = w.get(r, []) + [i]
+            # southwest bishop
+            if cid[i] > 0 and rid[i] < r1:
+                r = (rid[i] + 1) * ncols - 1 + cid[i]
+                w[i] = w.get(i, []) + [r]
+                w[r] = w.get(r, []) + [i]
+
+    neighbors = {}
+    weights = {}
+    for key in w:
+        weights[key] = [1.] * len(w[key])
+    ids = range(n)
+    if id_type == 'string':
+        ids = ['id' + str(i) for i in ids]
+    elif id_type == 'float':
+        ids = [i * 1. for i in ids]
+    if id_type == 'string' or id_type == 'float':
+        id_dict = dict(zip(range(n), ids))
+        alt_w = {}
+        alt_weights = {}
+        for i in w:
+            values = [id_dict[j] for j in w[i]]
+            key = id_dict[i]
+            alt_w[key] = values
+            alt_weights[key] = weights[i]
+        w = alt_w
+        weights = alt_weights
+    return pysal.weights.W(w, weights, ids=ids, id_order=ids[:])
+
+def regime_weights(regimes):
+    """
+    Construct spatial weights for regime neighbors.
+
+    Block contiguity structures are relevant when defining neighbor relations
+    based on membership in a regime. For example, all counties belonging to
+    the same state could be defined as neighbors, in an analysis of all
+    counties in the US.
+
+    Parameters
+    ----------
+    regimes : list or array
+           ids of which regime an observation belongs to
+
+    Returns
+    -------
+
+    W : spatial weights instance
+
+    Examples
+    --------
+
+    >>> from pysal import regime_weights
+    >>> import numpy as np
+    >>> regimes = np.ones(25)
+    >>> regimes[range(10,20)] = 2
+    >>> regimes[range(21,25)] = 3
+    >>> regimes
+    array([ 1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  2.,  2.,  2.,
+            2.,  2.,  2.,  2.,  2.,  2.,  2.,  1.,  3.,  3.,  3.,  3.])
+    >>> w = regime_weights(regimes)
+    PendingDepricationWarning: regime_weights will be renamed to block_weights in PySAL 2.0
+    >>> w.weights[0]
+    [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
+    >>> w.neighbors[0]
+    [1, 2, 3, 4, 5, 6, 7, 8, 9, 20]
+    >>> regimes = ['n','n','s','s','e','e','w','w','e']
+    >>> n = len(regimes)
+    >>> w = regime_weights(regimes)
+    PendingDepricationWarning: regime_weights will be renamed to block_weights in PySAL 2.0
+    >>> w.neighbors
+    {0: [1], 1: [0], 2: [3], 3: [2], 4: [5, 8], 5: [4, 8], 6: [7], 7: [6], 8: [4, 5]}
+
+    Notes
+    -----
+    regime_weights will be deprecated in PySAL 2.0 and renamed to block_weights.
+
+    """
+    msg = "PendingDepricationWarning: regime_weights will be "
+    msg += "renamed to block_weights in PySAL 2.0"
+    print msg
+    return block_weights(regimes)
+
+
+
+def block_weights(regimes):
+    """
+    Construct spatial weights for regime neighbors.
+
+    Block contiguity structures are relevant when defining neighbor relations
+    based on membership in a regime. For example, all counties belonging to
+    the same state could be defined as neighbors, in an analysis of all
+    counties in the US.
+
+    Parameters
+    ----------
+    regimes : list or array
+           ids of which regime an observation belongs to
+
+    Returns
+    -------
+
+    W : spatial weights instance
+
+    Examples
+    --------
+
+    >>> from pysal import block_weights
+    >>> import numpy as np
+    >>> regimes = np.ones(25)
+    >>> regimes[range(10,20)] = 2
+    >>> regimes[range(21,25)] = 3
+    >>> regimes
+    array([ 1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  2.,  2.,  2.,
+            2.,  2.,  2.,  2.,  2.,  2.,  2.,  1.,  3.,  3.,  3.,  3.])
+    >>> w = block_weights(regimes)
+    >>> w.weights[0]
+    [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
+    >>> w.neighbors[0]
+    [1, 2, 3, 4, 5, 6, 7, 8, 9, 20]
+    >>> regimes = ['n','n','s','s','e','e','w','w','e']
+    >>> n = len(regimes)
+    >>> w = block_weights(regimes)
+    >>> w.neighbors
+    {0: [1], 1: [0], 2: [3], 3: [2], 4: [5, 8], 5: [4, 8], 6: [7], 7: [6], 8: [4, 5]}
+    """
+    rids = np.unique(regimes)
+    neighbors = {}
+    NPNZ = np.nonzero
+    regimes = np.array(regimes)
+    for rid in rids:
+        members = NPNZ(regimes == rid)[0]
+        for member in members:
+            neighbors[member] = members[NPNZ(members != member)[0]].tolist()
+    return pysal.weights.W(neighbors)
+
+
+def comb(items, n=None):
+    """
+    Combinations of size n taken from items
+
+    Parameters
+    ----------
+
+    items : sequence
+    n     : integer
+            size of combinations to take from items
+
+    Returns
+    -------
+
+    implicit : generator
+               combinations of size n taken from items
+
+    Examples
+    --------
+    >>> x = range(4)
+    >>> for c in comb(x, 2):
+    ...     print c
+    ...
+    [0, 1]
+    [0, 2]
+    [0, 3]
+    [1, 2]
+    [1, 3]
+    [2, 3]
+
+    """
+    if n is None:
+        n = len(items)
+    for i in range(len(items)):
+        v = items[i:i + 1]
+        if n == 1:
+            yield v
+        else:
+            rest = items[i + 1:]
+            for c in comb(rest, n - 1):
+                yield v + c
+
+
+def order(w, kmax=3):
+    """
+    Determine the non-redundant order of contiguity up to a specific
+    order.
+
+    Parameters
+    ----------
+
+    w       : W
+              spatial weights object
+
+    kmax    : int
+              maximum order of contiguity
+
+    Returns
+    -------
+
+    info : dictionary
+           observation id is the key, value is a list of contiguity
+           orders with a negative 1 in the ith position
+
+    Notes
+    -----
+    Implements the algorithm in Anselin and Smirnov (1996) [1]_
+
+    Examples
+    --------
+    >>> from pysal import rook_from_shapefile as rfs
+    >>> w = rfs(pysal.examples.get_path('10740.shp'))
+    WARNING: there is one disconnected observation (no neighbors)
+    Island id:  [163]
+    >>> w3 = order(w, kmax = 3)
+    >>> w3[1][0:5]
+    [1, -1, 1, 2, 1]
+
+    """
+    #ids = w.neighbors.keys()
+    ids = w.id_order
+    info = {}
+    for id_ in ids:
+        s = [0] * w.n
+        s[ids.index(id_)] = -1
+        for j in w.neighbors[id_]:
+            s[ids.index(j)] = 1
+        k = 1
+        while k < kmax:
+            knext = k + 1
+            if s.count(k):
+                # get neighbors of order k
+                js = [ids[j] for j, val in enumerate(s) if val == k]
+                # get first order neighbors for order k neighbors
+                for j in js:
+                    next_neighbors = w.neighbors[j]
+                    for neighbor in next_neighbors:
+                        nid = ids.index(neighbor)
+                        if s[nid] == 0:
+                            s[nid] = knext
+            k = knext
+        info[id_] = s
+    return info
+
+
+def higher_order(w, k=2):
+    """
+    Contiguity weights object of order k
+
+    Parameters
+    ----------
+
+    w     : W
+            spatial weights object
+    k     : int
+            order of contiguity
+
+    Returns
+    -------
+
+    implicit : W
+               spatial weights object
+
+    Notes
+    -----
+    Proper higher order neighbors are returned such that i and j are k-order
+    neighbors iff the shortest path from i-j is of length k.
+
+    Examples
+    --------
+    >>> from pysal import lat2W, higher_order
+    >>> w10 = lat2W(10, 10)
+    >>> w10_2 = higher_order(w10, 2)
+    >>> w10_2[0]
+    {2: 1.0, 11: 1.0, 20: 1.0}
+    >>> w5 = lat2W()
+    >>> w5[0]
+    {1: 1.0, 5: 1.0}
+    >>> w5[1]
+    {0: 1.0, 2: 1.0, 6: 1.0}
+    >>> w5_2 = higher_order(w5,2)
+    >>> w5_2[0]
+    {10: 1.0, 2: 1.0, 6: 1.0}
+    """
+    return higher_order_sp(w, k)
+
+
+def higher_order_sp(w, k=2, shortest_path=True, diagonal=False):
+    """
+    Contiguity weights for either a sparse W or pysal.weights.W  for order k
+
+    Parameters
+    ==========
+
+    w:  [W instance | scipy.sparse.csr.csr_instance]
+
+    k: Order of contiguity
+
+    shortest_path: Boolean
+
+                   True: i,j and k-order neighbors if the shortest path for
+                   i,j is k
+
+                   False: i,j are k-order neighbors if there is a path from
+                   i,j of length k
+
+    diagonal:  Boolean
+                
+                False: remove k-order (i,j) joins when i==j
+
+                True:  keep k-order (i,j) joins when i==j
+
+    Returns
+    -------
+
+    wk: [W instance | WSP instance] type matches type of w argument
+
+
+    Notes
+    -----
+    Lower order contiguities are removed.
+
+    Examples
+    --------
+
+    >>> import pysal
+    >>> w25 = pysal.lat2W(5,5)
+    >>> w25.n
+    25
+    >>> w25[0]
+    {1: 1.0, 5: 1.0}
+    >>> w25_2 = pysal.weights.util.higher_order_sp(w25, 2)
+    >>> w25_2[0]
+    {10: 1.0, 2: 1.0, 6: 1.0}
+    >>> w25_2 = pysal.weights.util.higher_order_sp(w25, 2, diagonal=True)
+    >>> w25_2[0]
+    {0: 1.0, 10: 1.0, 2: 1.0, 6: 1.0}
+    >>> w25_3 = pysal.weights.util.higher_order_sp(w25, 3)
+    >>> w25_3[0]
+    {15: 1.0, 3: 1.0, 11: 1.0, 7: 1.0}
+    >>> w25_3 = pysal.weights.util.higher_order_sp(w25, 3, shortest_path=False)
+    >>> w25_3[0]
+    {1: 1.0, 3: 1.0, 5: 1.0, 7: 1.0, 11: 1.0, 15: 1.0}
+
+    """
+    tw = type(w)
+    id_order = None
+    if tw == pysal.weights.weights.W:
+        id_order = w.id_order
+        w = w.sparse
+    elif tw != scipy.sparse.csr.csr_matrix:
+        print "Unsupported sparse argument."
+        return None
+
+    wk = w**k
+    rk, ck = wk.nonzero()
+    sk = set(zip(rk, ck))
+
+    if shortest_path:
+        for j in range(1, k):
+            wj = w**j
+            rj, cj = wj.nonzero()
+            sj = set(zip(rj, cj))
+            sk.difference_update(sj)
+
+    if not diagonal:
+        sk = set([(i,j) for i,j in sk if i!=j])
+
+    if id_order:
+        d = dict([(i,[]) for i in id_order])
+        for pair in sk:
+            k, v = pair
+            k = id_order[k]
+            v = id_order[v]
+            d[k].append(v)
+        return pysal.W(neighbors=d)
+    else:
+        d = {}
+        for pair in sk:
+            k, v = pair
+            if k in d:
+                d[k].append(v)
+            else:
+                d[k] = [v]
+        return pysal.weights.WSP(pysal.W(neighbors=d).sparse)
+
+
+def w_local_cluster(w):
+    """
+    Local clustering coefficients for each unit as a node in a graph. [ws]_
+
+    Parameters
+    ----------
+
+    w   : W
+          spatial weights object
+
+    Returns
+    -------
+
+    c     : array (w.n,1)
+            local clustering coefficients
+
+    Notes
+    -----
+
+    The local clustering coefficient :math:`c_i` quantifies how close the
+    neighbors of observation :math:`i` are to being a clique:
+
+            .. math::
+
+               c_i = | \{w_{j,k}\} |/ (k_i(k_i - 1)): j,k \in N_i
+
+    where :math:`N_i` is the set of neighbors to :math:`i`, :math:`k_i =
+    |N_i|` and :math:`\{w_{j,k}\}` is the set of non-zero elements of the
+    weights between pairs in :math:`N_i`.
+
+    References
+    ----------
+
+    .. [ws] Watts, D.J. and S.H. Strogatz (1988) "Collective dynamics of 'small-world' networks". Nature, 393: 440-442.
+
+    Examples
+    --------
+
+    >>> w = pysal.lat2W(3,3, rook=False)
+    >>> w_local_cluster(w)
+    array([[ 1.        ],
+           [ 0.6       ],
+           [ 1.        ],
+           [ 0.6       ],
+           [ 0.42857143],
+           [ 0.6       ],
+           [ 1.        ],
+           [ 0.6       ],
+           [ 1.        ]])
+
+    """
+
+    c = np.zeros((w.n, 1), float)
+    w.transformation = 'b'
+    for i, id in enumerate(w.id_order):
+        ki = max(w.cardinalities[id], 1)  # deal with islands
+        Ni = w.neighbors[id]
+        wi = pysal.w_subset(w, Ni).full()[0]
+        c[i] = wi.sum() / (ki * (ki - 1))
+    return c
+
+
+def shimbel(w):
+    """
+    Find the Shimbel matrix for first order contiguity matrix.
+
+    Parameters
+    ----------
+    w     : W
+            spatial weights object
+    Returns
+    -------
+
+    info  : list of lists
+            one list for each observation which stores the shortest
+            order between it and each of the the other observations.
+
+    Examples
+    --------
+    >>> from pysal import lat2W, shimbel
+    >>> w5 = lat2W()
+    >>> w5_shimbel = shimbel(w5)
+    >>> w5_shimbel[0][24]
+    8
+    >>> w5_shimbel[0][0:4]
+    [-1, 1, 2, 3]
+    >>>
+    """
+    info = {}
+    ids = w.id_order
+    for id in ids:
+        s = [0] * w.n
+        s[ids.index(id)] = -1
+        for j in w.neighbors[id]:
+            s[ids.index(j)] = 1
+        k = 1
+        flag = s.count(0)
+        while flag:
+            p = -1
+            knext = k + 1
+            for j in range(s.count(k)):
+                neighbor = s.index(k, p + 1)
+                p = neighbor
+                next_neighbors = w.neighbors[ids[p]]
+                for neighbor in next_neighbors:
+                    nid = ids.index(neighbor)
+                    if s[nid] == 0:
+                        s[nid] = knext
+            k = knext
+            flag = s.count(0)
+        info[id] = s
+    return info
+
+
+def full(w):
+    """
+    Generate a full numpy array
+
+    Parameters
+    ----------
+    w        : W
+               spatial weights object
+
+    Returns
+    -------
+
+    implicit : tuple
+               first element being the full numpy array and second element
+               keys being the ids associated with each row in the array.
+
+    Examples
+    --------
+    >>> from pysal import W, full
+    >>> neighbors = {'first':['second'],'second':['first','third'],'third':['second']}
+    >>> weights = {'first':[1],'second':[1,1],'third':[1]}
+    >>> w = W(neighbors, weights)
+    >>> wf, ids = full(w)
+    >>> wf
+    array([[ 0.,  1.,  0.],
+           [ 1.,  0.,  1.],
+           [ 0.,  1.,  0.]])
+    >>> ids
+    ['first', 'second', 'third']
+    """
+    wfull = np.zeros([w.n, w.n], dtype=float)
+    keys = w.neighbors.keys()
+    if w.id_order:
+        keys = w.id_order
+    for i, key in enumerate(keys):
+        n_i = w.neighbors[key]
+        w_i = w.weights[key]
+        for j, wij in zip(n_i, w_i):
+            c = keys.index(j)
+            wfull[i, c] = wij
+    return (wfull, keys)
+
+
+def full2W(m, ids=None):
+    '''
+    Create a PySAL W object from a full array
+    ...
+
+    Parameters
+    ----------
+    m       : array
+              nxn array with the full weights matrix
+    ids     : list
+              User ids assumed to be aligned with m
+
+    Returns
+    -------
+    w       : W
+              PySAL weights object
+
+    Examples
+    --------
+    >>> import pysal as ps
+    >>> import numpy as np
+
+    Create an array of zeros
+
+    >>> a = np.zeros((4, 4))
+
+    For loop to fill it with random numbers
+
+    >>> for i in range(len(a)):
+    ...     for j in range(len(a[i])):
+    ...         if i!=j:
+    ...             a[i, j] = np.random.random(1)
+
+    Create W object
+
+    >>> w = ps.weights.util.full2W(a)
+    >>> w.full()[0] == a
+    array([[ True,  True,  True,  True],
+           [ True,  True,  True,  True],
+           [ True,  True,  True,  True],
+           [ True,  True,  True,  True]], dtype=bool)
+
+    Create list of user ids
+
+    >>> ids = ['myID0', 'myID1', 'myID2', 'myID3']
+    >>> w = ps.weights.util.full2W(a, ids=ids)
+    >>> w.full()[0] == a
+    array([[ True,  True,  True,  True],
+           [ True,  True,  True,  True],
+           [ True,  True,  True,  True],
+           [ True,  True,  True,  True]], dtype=bool)
+    '''
+    if m.shape[0] != m.shape[1]:
+        raise ValueError('Your array is not square')
+    neighbors, weights = {}, {}
+    for i in xrange(m.shape[0]):
+    # for i, row in enumerate(m):
+        row = m[i]
+        if ids:
+            i = ids[i]
+        ngh = list(row.nonzero()[0])
+        weights[i] = list(row[ngh])
+        ngh = list(ngh)
+        if ids:
+            ngh = [ids[j] for j in ngh]
+        neighbors[i] = ngh
+    return pysal.W(neighbors, weights, id_order=ids)
+
+
+def WSP2W(wsp, silent_island_warning=False):
+    """
+    Convert a pysal WSP object (thin weights matrix) to a pysal W object.
+
+    Parameters
+    ----------
+    wsp                     : WSP
+                              PySAL sparse weights object
+    silent_island_warning   : boolean
+                              Switch to turn off (default on) print statements
+                              for every observation with islands
+
+    Returns
+    -------
+    w       : W
+              PySAL weights object
+
+    Examples
+    --------
+    >>> import pysal
+
+    Build a 10x10 scipy.sparse matrix for a rectangular 2x5 region of cells
+    (rook contiguity), then construct a PySAL sparse weights object (wsp).
+
+    >>> sp = pysal.weights.lat2SW(2, 5)
+    >>> wsp = pysal.weights.WSP(sp)
+    >>> wsp.n
+    10
+    >>> print wsp.sparse[0].todense()
+    [[0 1 0 0 0 1 0 0 0 0]]
+
+    Convert this sparse weights object to a standard PySAL weights object.
+
+    >>> w = pysal.weights.WSP2W(wsp)
+    >>> w.n
+    10
+    >>> print w.full()[0][0]
+    [ 0.  1.  0.  0.  0.  1.  0.  0.  0.  0.]
+
+    """
+    wsp.sparse
+    indices = wsp.sparse.indices
+    data = wsp.sparse.data
+    indptr = wsp.sparse.indptr
+    id_order = wsp.id_order
+    if id_order:
+        # replace indices with user IDs
+        indices = [id_order[i] for i in indices]
+    else:
+        id_order = range(wsp.n)
+    neighbors, weights = {}, {}
+    start = indptr[0]
+    for i in xrange(wsp.n):
+        oid = id_order[i]
+        end = indptr[i + 1]
+        neighbors[oid] = indices[start:end]
+        weights[oid] = data[start:end]
+        start = end
+    ids = copy.copy(wsp.id_order)
+    w = pysal.W(neighbors, weights, ids,
+                silent_island_warning=silent_island_warning)
+    w._sparse = copy.deepcopy(wsp.sparse)
+    w._cache['sparse'] = w._sparse
+    return w
+
+
+def insert_diagonal(w, diagonal=1.0, wsp=False):
+    """
+    Returns a new weights object with values inserted along the main diagonal.
+
+    Parameters
+    ----------
+    w        : W
+               Spatial weights object
+
+    diagonal : float, int or array
+               Defines the value(s) to which the weights matrix diagonal should
+               be set. If a constant is passed then each element along the
+               diagonal will get this value (default is 1.0). An array of length
+               w.n can be passed to set explicit values to each element along
+               the diagonal (assumed to be in the same order as w.id_order).
+
+    wsp      : boolean
+               If True return a thin weights object of the type WSP, if False
+               return the standard W object.
+
+    Returns
+    -------
+    w        : W
+               Spatial weights object
+
+    Examples
+    --------
+    >>> import pysal
+    >>> import numpy as np
+
+    Build a basic rook weights matrix, which has zeros on the diagonal, then
+    insert ones along the diagonal.
+
+    >>> w = pysal.lat2W(5, 5, id_type='string')
+    >>> w_const = pysal.weights.insert_diagonal(w)
+    >>> w['id0']
+    {'id5': 1.0, 'id1': 1.0}
+    >>> w_const['id0']
+    {'id5': 1.0, 'id0': 1.0, 'id1': 1.0}
+
+    Insert different values along the main diagonal.
+
+    >>> diag = np.arange(100, 125)
+    >>> w_var = pysal.weights.insert_diagonal(w, diag)
+    >>> w_var['id0']
+    {'id5': 1.0, 'id0': 100.0, 'id1': 1.0}
+
+    """
+    w_new = copy.deepcopy(w.sparse)
+    w_new = w_new.tolil()
+    if issubclass(type(diagonal), np.ndarray):
+        if w.n != diagonal.shape[0]:
+            raise Exception("shape of w and diagonal do not match")
+        w_new.setdiag(diagonal)
+    elif operator.isNumberType(diagonal):
+        w_new.setdiag([diagonal] * w.n)
+    else:
+        raise Exception("Invalid value passed to diagonal")
+    w_out = pysal.weights.WSP(w_new, copy.copy(w.id_order))
+    if wsp:
+        return w_out
+    else:
+        return WSP2W(w_out)
+
+
+def remap_ids(w, old2new, id_order=[]):
+    """
+    Remaps the IDs in a spatial weights object
+
+    Parameters
+    ----------
+    w        : W
+               Spatial weights object
+
+    old2new  : dictionary
+               Dictionary where the keys are the IDs in w (i.e. "old IDs") and
+               the values are the IDs to replace them (i.e. "new IDs")
+
+    id_order : list
+               An ordered list of new IDs, which defines the order of observations when
+               iterating over W. If not set then the id_order in w will be
+               used.
+
+    Returns
+    -------
+
+    implicit : W
+               Spatial weights object with new IDs
+
+    Examples
+    --------
+    >>> from pysal import lat2W, remap_ids
+    >>> w = lat2W(3,2)
+    >>> w.id_order
+    [0, 1, 2, 3, 4, 5]
+    >>> w.neighbors[0]
+    [2, 1]
+    >>> old_to_new = {0:'a', 1:'b', 2:'c', 3:'d', 4:'e', 5:'f'}
+    >>> w_new = remap_ids(w, old_to_new)
+    >>> w_new.id_order
+    ['a', 'b', 'c', 'd', 'e', 'f']
+    >>> w_new.neighbors['a']
+    ['c', 'b']
+
+    """
+    if not isinstance(w, pysal.weights.W):
+        raise Exception("w must be a spatial weights object")
+    new_neigh = {}
+    new_weights = {}
+    for key, value in w.neighbors.iteritems():
+        new_values = [old2new[i] for i in value]
+        new_key = old2new[key]
+        new_neigh[new_key] = new_values
+        new_weights[new_key] = copy.copy(w.weights[key])
+    if id_order:
+        return pysal.weights.W(new_neigh, new_weights, id_order)
+    else:
+        if w.id_order:
+            id_order = [old2new[i] for i in w.id_order]
+            return pysal.weights.W(new_neigh, new_weights, id_order)
+        else:
+            return pysal.weights.W(new_neigh, new_weights)
+
+
+def get_ids(shapefile, idVariable):
+    """
+    Gets the IDs from the DBF file that moves with a given shape file
+
+    Parameters
+    ----------
+    shapefile    : string
+                   name of a shape file including suffix
+    idVariable   : string
+                   name of a column in the shapefile's DBF to use for ids
+
+    Returns
+    -------
+    ids          : list
+                   a list of IDs
+
+    Examples
+    --------
+    >>> from pysal.weights.util import get_ids
+    >>> polyids = get_ids(pysal.examples.get_path("columbus.shp"), "POLYID")
+    >>> polyids[:5]
+    [1, 2, 3, 4, 5]
+    """
+
+    try:
+        dbname = os.path.splitext(shapefile)[0] + '.dbf'
+        db = pysal.open(dbname)
+        var = db.by_col[idVariable]
+        db.close()
+        return var
+    except IOError:
+        msg = 'The shapefile "%s" appears to be missing its DBF file. The DBF file "%s" could not be found.' % (
+            shapefile, dbname)
+        raise IOError(msg)
+    except AttributeError:
+        msg = 'The variable "%s" was not found in the DBF file. The DBF contains the following variables: %s.' % (
+            idVariable, ','.join(db.header))
+        raise KeyError(msg)
+
+
+def get_points_array_from_shapefile(shapefile):
+    """
+    Gets a data array of x and y coordinates from a given shape file
+
+    Parameters
+    ----------
+    shapefile     : string
+                    name of a shape file including suffix
+
+    Returns
+    -------
+    points        : array (n, 2)
+                    a data array of x and y coordinates
+
+    Notes
+    -----
+    If the given shape file includes polygons,
+    this function returns x and y coordinates of the polygons' centroids
+
+    Examples
+    --------
+    Point shapefile
+
+    >>> from pysal.weights.util import get_points_array_from_shapefile
+    >>> xy = get_points_array_from_shapefile(pysal.examples.get_path('juvenile.shp'))
+    >>> xy[:3]
+    array([[ 94.,  93.],
+           [ 80.,  95.],
+           [ 79.,  90.]])
+
+    Polygon shapefile
+
+    >>> xy = get_points_array_from_shapefile(pysal.examples.get_path('columbus.shp'))
+    >>> xy[:3]
+    array([[  8.82721847,  14.36907602],
+           [  8.33265837,  14.03162401],
+           [  9.01226541,  13.81971908]])
+    """
+
+    f = pysal.open(shapefile)
+    if f.type.__name__ == 'Polygon':
+        data = np.array([shape.centroid for shape in f])
+    elif f.type.__name__ == 'Point':
+        data = np.array([shape for shape in f])
+    f.close()
+    return data
+
+
+def min_threshold_distance(data, p=2):
+    """
+    Get the maximum nearest neighbor distance
+
+    Parameters
+    ----------
+
+    data    : array (n,k) or KDTree where KDtree.data is array (n,k)
+              n observations on k attributes
+    p       : float
+              Minkowski p-norm distance metric parameter:
+              1<=p<=infinity
+              2: Euclidean distance
+              1: Manhattan distance
+
+    Returns
+    -------
+    nnd    : float
+             maximum nearest neighbor distance between the n observations
+
+    Examples
+    --------
+    >>> from pysal.weights.util import min_threshold_distance
+    >>> import numpy as np
+    >>> x, y = np.indices((5, 5))
+    >>> x.shape = (25, 1)
+    >>> y.shape = (25, 1)
+    >>> data = np.hstack([x, y])
+    >>> min_threshold_distance(data)
+    1.0
+
+    """
+    if issubclass(type(data), scipy.spatial.KDTree):
+        kd = data
+        data = kd.data
+    else:
+        kd = KDTree(data)
+    nn = kd.query(data, k=2, p=p)
+    nnd = nn[0].max(axis=0)[1]
+    return nnd
+
+
+def lat2SW(nrows=3, ncols=5, criterion="rook", row_st=False):
+    """
+    Create a sparse W matrix for a regular lattice.
+
+    Parameters
+    ----------
+
+    nrows   : int
+              number of rows
+    ncols   : int
+              number of columns
+    rook    : "rook", "queen", or "bishop"
+              type of contiguity. Default is rook.
+    row_st  : boolean
+              If True, the created sparse W object is row-standardized so
+              every row sums up to one. Defaults to False.
+
+    Returns
+    -------
+
+    w : scipy.sparse.dia_matrix
+        instance of a scipy sparse matrix
+
+    Notes
+    -----
+
+    Observations are row ordered: first k observations are in row 0, next k in row 1, and so on.
+    This method directly creates the W matrix using the strucuture of the contiguity type.
+
+    Examples
+    --------
+
+    >>> from pysal import weights
+    >>> w9 = weights.lat2SW(3,3)
+    >>> w9[0,1]
+    1
+    >>> w9[3,6]
+    1
+    >>> w9r = weights.lat2SW(3,3, row_st=True)
+    >>> w9r[3,6]
+    0.33333333333333331
+    """
+    n = nrows * ncols
+    diagonals = []
+    offsets = []
+    if criterion == "rook" or criterion == "queen":
+        d = np.ones((1, n))
+        for i in range(ncols - 1, n, ncols):
+            d[0, i] = 0
+        diagonals.append(d)
+        offsets.append(-1)
+
+        d = np.ones((1, n))
+        diagonals.append(d)
+        offsets.append(-ncols)
+
+    if criterion == "queen" or criterion == "bishop":
+        d = np.ones((1, n))
+        for i in range(0, n, ncols):
+            d[0, i] = 0
+        diagonals.append(d)
+        offsets.append(-(ncols - 1))
+
+        d = np.ones((1, n))
+        for i in range(ncols - 1, n, ncols):
+            d[0, i] = 0
+        diagonals.append(d)
+        offsets.append(-(ncols + 1))
+    data = np.concatenate(diagonals)
+    offsets = np.array(offsets)
+    m = sparse.dia_matrix((data, offsets), shape=(n, n), dtype=np.int8)
+    m = m + m.T
+    if row_st:
+        m = sparse.spdiags(1. / m.sum(1).T, 0, *m.shape) * m
+    return m
+
+
+def write_gal(file, k=10):
+    f = open(file, 'w')
+    n = k * k
+    f.write("0 %d" % n)
+    for i in xrange(n):
+        row = i / k
+        col = i % k
+        neighs = [i - i, i + 1, i - k, i + k]
+        neighs = [j for j in neighs if j >= 0 and j < n]
+        f.write("\n%d %d\n" % (i, len(neighs)))
+        f.write(" ".join(map(str, neighs)))
+    f.close()
+
+if __name__ == "__main__":
+    from pysal import lat2W
+
+    assert (lat2W(5, 5).sparse.todense() == lat2SW(5, 5).todense()).all()
+    assert (lat2W(5, 3).sparse.todense() == lat2SW(5, 3).todense()).all()
+    assert (lat2W(5, 3, rook=False).sparse.todense() == lat2SW(5, 3,
+                                                               'queen').todense()).all()
+    assert (lat2W(50, 50, rook=False).sparse.todense() == lat2SW(50,
+                                                                 50, 'queen').todense()).all()
diff --git a/pysal/weights/weights.py b/pysal/weights/weights.py
new file mode 100644
index 0000000..4a0237d
--- /dev/null
+++ b/pysal/weights/weights.py
@@ -0,0 +1,1066 @@
+__all__ = ['W', 'WSP']
+__author__ = "Sergio J. Rey <srey at asu.edu> "
+
+import pysal
+import math
+import numpy as np
+import scipy.sparse
+from os.path import basename as BASENAME
+from pysal.weights import util
+
+
+class W(object):
+    """
+    Spatial weights
+
+    Parameters
+    ----------
+    neighbors       : dictionary
+                      key is region ID, value is a list of neighbor IDS
+                      Example:  {'a':['b'],'b':['a','c'],'c':['b']}
+    weights = None  : dictionary
+                      key is region ID, value is a list of edge weights
+                      If not supplied all edge weights are assumed to have a weight of 1.
+                      Example: {'a':[0.5],'b':[0.5,1.5],'c':[1.5]}
+    id_order = None : list
+                      An ordered list of ids, defines the order of
+                      observations when iterating over W if not set,
+                      lexicographical ordering is used to iterate and the
+                      id_order_set property will return False.  This can be
+                      set after creation by setting the 'id_order' property.
+    silent_island_warning   : boolean 
+                            By default PySAL will print a warning if the
+                            dataset contains any disconnected observations or
+                            islands. To silence this warning set this
+                            parameter to True.
+    ids = None      : list
+                      values to use for keys of the neighbors and weights dicts
+
+    Attributes
+    ----------
+
+    asymmetries
+    cardinalities
+    diagW2
+    diagWtW
+    diagWtW_WW
+    histogram
+    id2i
+    id_order
+    id_order_set
+    islands
+    max_neighbors
+    mean_neighbors
+    min_neighbors
+    n
+    neighbor_offsets
+    nonzero
+    pct_nonzero
+    s0
+    s1
+    s2
+    s2array
+    sd
+    sparse
+    trcW2
+    trcWtW
+    trcWtW_WW
+    transform
+
+    Examples
+    --------
+    >>> from pysal import W, lat2W
+    >>> neighbors = {0: [3, 1], 1: [0, 4, 2], 2: [1, 5], 3: [0, 6, 4], 4: [1, 3, 7, 5], 5: [2, 4, 8], 6: [3, 7], 7: [4, 6, 8], 8: [5, 7]}
+    >>> weights = {0: [1, 1], 1: [1, 1, 1], 2: [1, 1], 3: [1, 1, 1], 4: [1, 1, 1, 1], 5: [1, 1, 1], 6: [1, 1], 7: [1, 1, 1], 8: [1, 1]}
+    >>> w = W(neighbors, weights)
+    >>> "%.3f"%w.pct_nonzero
+    '0.296'
+
+    Read from external gal file
+
+    >>> import pysal
+    >>> w = pysal.open(pysal.examples.get_path("stl.gal")).read()
+    >>> w.n
+    78
+    >>> "%.3f"%w.pct_nonzero
+    '0.065'
+
+    Set weights implicitly
+
+    >>> neighbors = {0: [3, 1], 1: [0, 4, 2], 2: [1, 5], 3: [0, 6, 4], 4: [1, 3, 7, 5], 5: [2, 4, 8], 6: [3, 7], 7: [4, 6, 8], 8: [5, 7]}
+    >>> w = W(neighbors)
+    >>> "%.3f"%w.pct_nonzero
+    '0.296'
+    >>> w = lat2W(100, 100)
+    >>> w.trcW2
+    39600.0
+    >>> w.trcWtW
+    39600.0
+    >>> w.transform='r'
+    >>> w.trcW2
+    2530.7222222222586
+    >>> w.trcWtW
+    2533.6666666666774
+
+    Cardinality Histogram
+
+    >>> w=pysal.rook_from_shapefile(pysal.examples.get_path("sacramentot2.shp"))
+    >>> w.histogram
+    [(1, 1), (2, 6), (3, 33), (4, 103), (5, 114), (6, 73), (7, 35), (8, 17), (9, 9), (10, 4), (11, 4), (12, 3), (13, 0), (14, 1)]
+
+    Disconnected observations (islands)
+
+    >>> w = pysal.W({1:[0],0:[1],2:[], 3:[]})
+    WARNING: there are 2 disconnected observations
+    Island ids:  [2, 3]
+
+    """
+
+    def __init__(self, neighbors, weights=None, id_order=None,
+        silent_island_warning=False, ids=None):
+        self.silent_island_warning = silent_island_warning
+        self.transformations = {}
+        self.neighbors = neighbors
+        if not weights:
+            weights = {}
+            for key in neighbors:
+                weights[key] = [1.] * len(neighbors[key])
+        self.weights = weights
+        self.transformations['O'] = self.weights.copy()  # original weights
+        self.transform = 'O'
+        if id_order is None:
+            self._id_order = self.neighbors.keys()
+            self._id_order.sort()
+            self._id_order_set = False
+        else:
+            self._id_order = id_order
+            self._id_order_set = True
+        self._reset()
+        self._n = len(self.weights)
+        if self.islands and not self.silent_island_warning:
+            ni = len(self.islands)
+            if ni == 1:
+                print "WARNING: there is one disconnected observation (no neighbors)"
+                print "Island id: ", self.islands
+            else:
+                print "WARNING: there are %d disconnected observations" % ni
+                print "Island ids: ", self.islands
+
+    def _reset(self):
+        """
+        Reset properties
+        """
+        self._cache = {}
+
+    @property
+    def sparse(self):
+        """
+        Sparse matrix object
+
+        For any matrix manipulations required for w, w.sparse should be
+        used. This is based on scipy.sparse.
+        """
+        if 'sparse' not in self._cache:
+            self._sparse = self._build_sparse()
+            self._cache['sparse'] = self._sparse
+        return self._sparse
+
+    def _build_sparse(self):
+        """
+        construct the sparse attribute
+        """
+
+        row = []
+        col = []
+        data = []
+        id2i = self.id2i
+        for i, neigh_list in self.neighbor_offsets.iteritems():
+            card = self.cardinalities[i]
+            row.extend([id2i[i]] * card)
+            col.extend(neigh_list)
+            data.extend(self.weights[i])
+        row = np.array(row)
+        col = np.array(col)
+        data = np.array(data)
+        s = scipy.sparse.csr_matrix((data, (row, col)), shape=(self.n, self.n))
+        return s
+
+    @property
+    def id2i(self):
+        """
+        Dictionary where the key is an ID and the value is that ID's
+        index in W.id_order.
+        """
+        if 'id2i' not in self._cache:
+            self._id2i = {}
+            for i, id_i in enumerate(self._id_order):
+                self._id2i[id_i] = i
+            self._id2i = self._id2i
+            self._cache['id2i'] = self._id2i
+        return self._id2i
+
+    @property
+    def n(self):
+        """
+        number of units
+        """
+        if "n" not in self._cache:
+            self._n = len(self.neighbors)
+            self._cache['n'] = self._n
+        return self._n
+
+    @property
+    def s0(self):
+        """
+        float
+
+        .. math::
+
+               s0=\sum_i \sum_j w_{i,j}
+
+        """
+        if 's0' not in self._cache:
+            self._s0 = self.sparse.sum()
+            self._cache['s0'] = self._s0
+        return self._s0
+
+    @property
+    def s1(self):
+        """
+        float
+
+        .. math::
+
+               s1=1/2 \sum_i \sum_j (w_{i,j} + w_{j,i})^2
+
+        """
+        if 's1' not in self._cache:
+            t = self.sparse.transpose()
+            t = t + self.sparse
+            t2 = t.multiply(t)  # element-wise square
+            self._s1 = t2.sum() / 2.
+            self._cache['s1'] = self._s1
+        return self._s1
+
+    @property
+    def s2array(self):
+        """
+        individual elements comprising s2
+
+        See Also
+        --------
+        s2
+
+        """
+        if 's2array' not in self._cache:
+            s = self.sparse
+            self._s2array = np.array(s.sum(1) + s.sum(0).transpose()) ** 2
+            self._cache['s2array'] = self._s2array
+        return self._s2array
+
+    @property
+    def s2(self):
+        """
+        float
+
+        .. math::
+
+                s2=\sum_j (\sum_i w_{i,j} + \sum_i w_{j,i})^2
+
+        """
+        if 's2' not in self._cache:
+            self._s2 = self.s2array.sum()
+            self._cache['s2'] = self._s2
+        return self._s2
+
+    @property
+    def trcW2(self):
+        """
+        Trace of :math:`WW`
+
+        See Also
+        --------
+        diagW2
+
+        """
+        if 'trcW2' not in self._cache:
+            self._trcW2 = self.diagW2.sum()
+            self._cache['trcw2'] = self._trcW2
+        return self._trcW2
+
+    @property
+    def diagW2(self):
+        """
+        Diagonal of :math:`WW` : array
+
+        See Also
+        --------
+        trcW2
+
+        """
+        if 'diagw2' not in self._cache:
+            self._diagW2 = (self.sparse * self.sparse).diagonal()
+            self._cache['diagW2'] = self._diagW2
+        return self._diagW2
+
+    @property
+    def diagWtW(self):
+        """
+        Diagonal of :math:`W^{'}W`  : array
+
+        See Also
+        --------
+        trcWtW
+
+        """
+        if 'diagWtW' not in self._cache:
+            self._diagWtW = (self.sparse.transpose() * self.sparse).diagonal()
+            self._cache['diagWtW'] = self._diagWtW
+        return self._diagWtW
+
+    @property
+    def trcWtW(self):
+        """
+        Trace of :math:`W^{'}W`  : float
+
+        See Also
+        --------
+        diagWtW
+
+        """
+        if 'trcWtW' not in self._cache:
+            self._trcWtW = self.diagWtW.sum()
+            self._cache['trcWtW'] = self._trcWtW
+        return self._trcWtW
+
+    @property
+    def diagWtW_WW(self):
+        """
+        diagonal of :math:`W^{'}W + WW`
+        """
+        if 'diagWtW_WW' not in self._cache:
+            wt = self.sparse.transpose()
+            w = self.sparse
+            self._diagWtW_WW = (wt * w + w * w).diagonal()
+            self._cache['diagWtW_WW'] = self._diagWtW_WW
+        return self._diagWtW_WW
+
+    @property
+    def trcWtW_WW(self):
+        """
+        trace of :math:`W^{'}W + WW`
+        """
+        if 'trcWtW_WW' not in self._cache:
+            self._trcWtW_WW = self.diagWtW_WW.sum()
+            self._cache['trcWtW_WW'] = self._trcWtW_WW
+        return self._trcWtW_WW
+
+    @property
+    def pct_nonzero(self):
+        """
+        percentage of nonzero weights
+        """
+        if 'pct_nonzero' not in self._cache:
+            self._pct_nonzero = self.sparse.nnz / (1. * self._n ** 2)
+            self._cache['pct_nonzero'] = self._pct_nonzero
+        return self._pct_nonzero
+
+    @property
+    def cardinalities(self):
+        """
+        number of neighbors for each observation : dict
+        """
+        if 'cardinalities' not in self._cache:
+            c = {}
+            for i in self._id_order:
+                c[i] = len(self.neighbors[i])
+            self._cardinalities = c
+            self._cache['cardinalities'] = self._cardinalities
+        return self._cardinalities
+
+    @property
+    def max_neighbors(self):
+        """
+        largest number of neighbors
+        """
+        if 'max_neighbors' not in self._cache:
+            self._max_neighbors = max(self.cardinalities.values())
+            self._cache['max_neighbors'] = self._max_neighbors
+        return self._max_neighbors
+
+    @property
+    def mean_neighbors(self):
+        """
+        average number of neighbors
+        """
+        if 'mean_neighbors' not in self._cache:
+            self._mean_neighbors = np.mean(self.cardinalities.values())
+            self._cache['mean_neighbors'] = self._mean_neighbors
+        return self._mean_neighbors
+
+    @property
+    def min_neighbors(self):
+        """
+        minimum number of neighbors
+        """
+        if 'min_neighbors' not in self._cache:
+            self._min_neighbors = min(self.cardinalities.values())
+            self._cache['min_neighbors'] = self._min_neighbors
+        return self._min_neighbors
+
+    @property
+    def nonzero(self):
+        """
+        number of nonzero weights
+        """
+        if 'nonzero' not in self._cache:
+            self._nonzero = self.sparse.nnz
+            self._cache['nonzero'] = self._nonzero
+        return self._nonzero
+
+    @property
+    def sd(self):
+        """
+        standard deviation of number of neighbors : float
+        """
+        if 'sd' not in self._cache:
+            self._sd = np.std(self.cardinalities.values())
+            self._cache['sd'] = self._sd
+        return self._sd
+
+    @property
+    def asymmetries(self):
+        """
+        list of id pairs with asymmetric weights
+        """
+        if 'asymmetries' not in self._cache:
+            self._asymmetries = self.asymmetry()
+            self._cache['asymmetries'] = self._asymmetries
+        return self._asymmetries
+
+    @property
+    def islands(self):
+        """
+        list of ids without any neighbors
+        """
+        if 'islands' not in self._cache:
+            self._islands = [i for i,
+                             c in self.cardinalities.items() if c == 0]
+            self._cache['islands'] = self._islands
+        return self._islands
+
+    @property
+    def histogram(self):
+        """
+        cardinality histogram as a dictionary, key is the id, value is the
+        number of neighbors for that unit
+        """
+        if 'histogram' not in self._cache:
+            ct, bin = np.histogram(self.cardinalities.values(),
+                                   range(self.min_neighbors, self.max_neighbors + 2))
+            self._histogram = zip(bin, ct)
+            self._cache['histogram'] = self._histogram
+        return self._histogram
+
+    def __getitem__(self, key):
+        """
+        Allow a dictionary like interaction with the weights class.
+
+        Examples
+        --------
+        >>> from pysal import rook_from_shapefile as rfs
+        >>> w = rfs(pysal.examples.get_path("10740.shp"))
+        WARNING: there is one disconnected observation (no neighbors)
+        Island id:  [163]
+        >>> w[163]
+        {}
+        >>> w[0]
+        {1: 1.0, 4: 1.0, 101: 1.0, 85: 1.0, 5: 1.0}
+        """
+        return dict(zip(self.neighbors[key], self.weights[key]))
+
+    def __iter__(self):
+        """
+        Support iteration over weights
+
+        Examples
+        --------
+        >>> import pysal
+        >>> w=pysal.lat2W(3,3)
+        >>> for i,wi in enumerate(w):
+        ...     print i,wi
+        ...
+        0 (0, {1: 1.0, 3: 1.0})
+        1 (1, {0: 1.0, 2: 1.0, 4: 1.0})
+        2 (2, {1: 1.0, 5: 1.0})
+        3 (3, {0: 1.0, 4: 1.0, 6: 1.0})
+        4 (4, {1: 1.0, 3: 1.0, 5: 1.0, 7: 1.0})
+        5 (5, {8: 1.0, 2: 1.0, 4: 1.0})
+        6 (6, {3: 1.0, 7: 1.0})
+        7 (7, {8: 1.0, 4: 1.0, 6: 1.0})
+        8 (8, {5: 1.0, 7: 1.0})
+        >>>
+        """
+        for i in self._id_order:
+            yield i, dict(zip(self.neighbors[i], self.weights[i]))
+
+    def remap_ids(self, new_ids):
+        '''
+        In place modification throughout `W` of id values from `w.id_order` to
+        `new_ids` in all
+        ...
+
+        Arguments
+        ---------
+
+        new_ids     : list/ndarray
+                      Aligned list of new ids to be inserted. Note that first
+                      element of new_ids will replace first element of
+                      w.id_order, second element of new_ids replaces second
+                      element of w.id_order and so on.
+
+        Example
+        -------
+
+        >>> import pysal as ps
+        >>> w = ps.lat2W(3, 3)
+        >>> w.id_order
+        [0, 1, 2, 3, 4, 5, 6, 7, 8]
+        >>> w.neighbors[0]
+        [3, 1]
+        >>> new_ids = ['id%i'%id for id in w.id_order]
+        >>> _ = w.remap_ids(new_ids)
+        >>> w.id_order
+        ['id0', 'id1', 'id2', 'id3', 'id4', 'id5', 'id6', 'id7', 'id8']
+        >>> w.neighbors['id0']
+        ['id3', 'id1']
+        '''
+        old_ids = self._id_order
+        if len(old_ids) != len(new_ids):
+            raise Exception("W.remap_ids: length of `old_ids` does not match \
+            that of new_ids")
+        if len(set(new_ids)) != len(new_ids):
+            raise Exception("W.remap_ids: list `new_ids` contains duplicates") 
+        else:
+            new_neighbors = {}
+            new_weights = {}
+            old_transformations = self.transformations['O'].copy()
+            new_transformations = {}
+            for o,n in zip(old_ids, new_ids):
+                o_neighbors = self.neighbors[o]
+                o_weights = self.weights[o]
+                n_neighbors = [ new_ids[old_ids.index(j)] for j in o_neighbors]
+                new_neighbors[n] = n_neighbors
+                new_weights[n] = o_weights[:]
+                new_transformations[n] = old_transformations[o]
+            self.neighbors = new_neighbors
+            self.weights = new_weights
+            self.transformations["O"] = new_transformations
+
+            id_order = [ self._id_order.index(o) for o in old_ids]
+            for i,id_ in enumerate(id_order):
+                self.id_order[id_] = new_ids[i]
+
+            self._reset()
+
+    def __set_id_order(self, ordered_ids):
+        """
+        Set the iteration order in w.
+
+        W can be iterated over. On construction the iteration order is set to
+        the lexicographic order of the keys in the w.weights dictionary. If a specific order
+        is required it can be set with this method.
+
+        Parameters
+        ----------
+
+        ordered_ids : sequence
+                      identifiers for observations in specified order
+
+        Notes
+        -----
+
+        ordered_ids is checked against the ids implied by the keys in
+        w.weights. If they are not equivalent sets an exception is raised and
+        the iteration order is not changed.
+
+        Examples
+        --------
+
+        >>> import pysal
+        >>> w=pysal.lat2W(3,3)
+        >>> for i,wi in enumerate(w):
+        ...     print i,wi
+        ...
+        0 (0, {1: 1.0, 3: 1.0})
+        1 (1, {0: 1.0, 2: 1.0, 4: 1.0})
+        2 (2, {1: 1.0, 5: 1.0})
+        3 (3, {0: 1.0, 4: 1.0, 6: 1.0})
+        4 (4, {1: 1.0, 3: 1.0, 5: 1.0, 7: 1.0})
+        5 (5, {8: 1.0, 2: 1.0, 4: 1.0})
+        6 (6, {3: 1.0, 7: 1.0})
+        7 (7, {8: 1.0, 4: 1.0, 6: 1.0})
+        8 (8, {5: 1.0, 7: 1.0})
+        >>> w.id_order
+        [0, 1, 2, 3, 4, 5, 6, 7, 8]
+        >>> w.id_order=range(8,-1,-1)
+        >>> w.id_order
+        [8, 7, 6, 5, 4, 3, 2, 1, 0]
+        >>> for i,w_i in enumerate(w):
+        ...     print i,w_i
+        ...
+        0 (8, {5: 1.0, 7: 1.0})
+        1 (7, {8: 1.0, 4: 1.0, 6: 1.0})
+        2 (6, {3: 1.0, 7: 1.0})
+        3 (5, {8: 1.0, 2: 1.0, 4: 1.0})
+        4 (4, {1: 1.0, 3: 1.0, 5: 1.0, 7: 1.0})
+        5 (3, {0: 1.0, 4: 1.0, 6: 1.0})
+        6 (2, {1: 1.0, 5: 1.0})
+        7 (1, {0: 1.0, 2: 1.0, 4: 1.0})
+        8 (0, {1: 1.0, 3: 1.0})
+        >>>
+
+        """
+        if set(self._id_order) == set(ordered_ids):
+            self._id_order = ordered_ids
+            self._id_order_set = True
+            self._reset()
+        else:
+            raise Exception('ordered_ids do not align with W ids')
+
+    def __get_id_order(self):
+        """
+        Returns the ids for the observations in the order in which they
+        would be encountered if iterating over the weights .
+        """
+        return self._id_order
+
+    id_order = property(__get_id_order, __set_id_order)
+
+    @property
+    def id_order_set(self):
+        """
+        Returns True if user has set id_order, False if not.
+
+        Examples
+        --------
+        >>> from pysal import lat2W
+        >>> w=lat2W()
+        >>> w.id_order_set
+        True
+        """
+        return self._id_order_set
+
+    @property
+    def neighbor_offsets(self):
+        """
+        Given the current id_order, neighbor_offsets[id] is the offsets of the
+        id's neighbors in id_order
+
+        Examples
+        --------
+        >>> from pysal import W
+        >>> neighbors={'c': ['b'], 'b': ['c', 'a'], 'a': ['b']}
+        >>> weights ={'c': [1.0], 'b': [1.0, 1.0], 'a': [1.0]}
+        >>> w=W(neighbors,weights)
+        >>> w.id_order = ['a','b','c']
+        >>> w.neighbor_offsets['b']
+        [2, 0]
+        >>> w.id_order = ['b','a','c']
+        >>> w.neighbor_offsets['b']
+        [2, 1]
+        """
+        if "neighbors_0" not in self._cache:
+            self.__neighbors_0 = {}
+            id2i = self.id2i
+            for j, neigh_list in self.neighbors.iteritems():
+                self.__neighbors_0[j] = [id2i[neigh] for neigh in neigh_list]
+            self._cache['neighbors_0'] = self.__neighbors_0
+        return self.__neighbors_0
+
+    def get_transform(self):
+        """
+        Getter for transform property
+
+        Returns
+        -------
+        transformation : string (or none)
+
+        Examples
+        --------
+        >>> from pysal import lat2W
+        >>> w=lat2W()
+        >>> w.weights[0]
+        [1.0, 1.0]
+        >>> w.transform
+        'O'
+        >>> w.transform='r'
+        >>> w.weights[0]
+        [0.5, 0.5]
+        >>> w.transform='b'
+        >>> w.weights[0]
+        [1.0, 1.0]
+        >>>
+        """
+        return self._transform
+
+    def set_transform(self, value="B"):
+        """
+        Transformations of weights.
+
+        Notes
+        -----
+
+        Transformations are applied only to the value of the weights at
+        instantiation. Chaining of transformations cannot be done on a W
+        instance.
+
+        Parameters
+        ----------
+        transform : string (not case sensitive)
+
+        .. table::
+
+            ================   ======================================================
+            transform string   value
+            ================   ======================================================
+            B                  Binary
+            R                  Row-standardization (global sum=n)
+            D                  Double-standardization (global sum=1)
+            V                  Variance stabilizing
+            O                  Restore original transformation (from instantiation)
+            ================   ======================================================
+
+        Examples
+        --------
+        >>> from pysal import lat2W
+        >>> w=lat2W()
+        >>> w.weights[0]
+        [1.0, 1.0]
+        >>> w.transform
+        'O'
+        >>> w.transform='r'
+        >>> w.weights[0]
+        [0.5, 0.5]
+        >>> w.transform='b'
+        >>> w.weights[0]
+        [1.0, 1.0]
+        >>>
+        """
+        value = value.upper()
+        self._transform = value
+        if value in self.transformations:
+            self.weights = self.transformations[value]
+            self._reset()
+        else:
+            if value == "R":
+                # row standardized weights
+                weights = {}
+                self.weights = self.transformations['O']
+                for i in self.weights:
+                    wijs = self.weights[i]
+                    row_sum = sum(wijs) * 1.0
+                    if row_sum == 0.0:
+                        if not self.silent_island_warning:
+                            print 'WARNING: ', i, ' is an island (no neighbors)'
+                    weights[i] = [wij / row_sum for wij in wijs]
+                weights = weights
+                self.transformations[value] = weights
+                self.weights = weights
+                self._reset()
+            elif value == "D":
+                # doubly-standardized weights
+                # update current chars before doing global sum
+                self._reset()
+                s0 = self.s0
+                ws = 1.0 / s0
+                weights = {}
+                self.weights = self.transformations['O']
+                for i in self.weights:
+                    wijs = self.weights[i]
+                    weights[i] = [wij * ws for wij in wijs]
+                weights = weights
+                self.transformations[value] = weights
+                self.weights = weights
+                self._reset()
+            elif value == "B":
+                # binary transformation
+                weights = {}
+                self.weights = self.transformations['O']
+                for i in self.weights:
+                    wijs = self.weights[i]
+                    weights[i] = [1.0 for wij in wijs]
+                weights = weights
+                self.transformations[value] = weights
+                self.weights = weights
+                self._reset()
+            elif value == "V":
+                # variance stabilizing
+                weights = {}
+                q = {}
+                k = self.cardinalities
+                s = {}
+                Q = 0.0
+                self.weights = self.transformations['O']
+                for i in self.weights:
+                    wijs = self.weights[i]
+                    q[i] = math.sqrt(sum([wij * wij for wij in wijs]))
+                    s[i] = [wij / q[i] for wij in wijs]
+                    Q += sum([si for si in s[i]])
+                nQ = self.n / Q
+                for i in self.weights:
+                    weights[i] = [w * nQ for w in s[i]]
+                weights = weights
+                self.transformations[value] = weights
+                self.weights = weights
+                self._reset()
+            elif value == "O":
+                # put weights back to original transformation
+                weights = {}
+                original = self.transformations[value]
+                self.weights = original
+                self._reset()
+            else:
+                print 'unsupported weights transformation'
+
+    transform = property(get_transform, set_transform)
+
+    def asymmetry(self, intrinsic=True):
+        """
+        Asymmetry check
+
+        Parameters
+        ----------
+        intrinsic: boolean (default=True)
+                
+                intrinsic symmetry:
+                      :math:`w_{i,j} == w_{j,i}`
+
+                if intrisic is False:
+                    symmetry is defined as :math:`i \in N_j \ AND \ j \in N_i` where
+                    :math:`N_j` is the set of neighbors for j.
+            
+        Returns
+        -------
+        asymmetries : list
+                      empty if no asymmetries are found
+                      if asymmetries, then a list of (i,j) tuples is returned 
+
+        Examples
+        --------
+
+        >>> from pysal import lat2W
+        >>> w=lat2W(3,3)
+        >>> w.asymmetry()
+        []
+        >>> w.transform='r'
+        >>> w.asymmetry()
+        [(0, 1), (0, 3), (1, 0), (1, 2), (1, 4), (2, 1), (2, 5), (3, 0), (3, 4), (3, 6), (4, 1), (4, 3), (4, 5), (4, 7), (5, 2), (5, 4), (5, 8), (6, 3), (6, 7), (7, 4), (7, 6), (7, 8), (8, 5), (8, 7)]
+        >>> result = w.asymmetry(intrinsic=False)
+        >>> result
+        []
+        >>> neighbors={0:[1,2,3], 1:[1,2,3], 2:[0,1], 3:[0,1]}
+        >>> weights={0:[1,1,1], 1:[1,1,1], 2:[1,1], 3:[1,1]}
+        >>> w=W(neighbors,weights)
+        >>> w.asymmetry()
+        [(0, 1), (1, 0)]
+        """
+
+        if intrinsic:
+            wd = self.sparse.transpose() - self.sparse
+        else:
+            transform = self.transform
+            self.transform = 'b'
+            wd = self.sparse.transpose() - self.sparse
+            self.transform = transform
+
+        ids = np.nonzero(wd)
+        if len(ids[0]) == 0:
+            return []
+        else:
+            ijs = zip(ids[0], ids[1])
+            ijs.sort()
+            return ijs
+
+    def full(self):
+        """
+        Generate a full numpy array
+
+        Returns
+        -------
+
+        implicit : tuple
+                   first element being the full numpy array and second element
+                   keys being the ids associated with each row in the array.
+
+        Examples
+        --------
+        >>> from pysal import W
+        >>> neighbors={'first':['second'],'second':['first','third'],'third':['second']}
+        >>> weights={'first':[1],'second':[1,1],'third':[1]}
+        >>> w=W(neighbors,weights)
+        >>> wf,ids=w.full()
+        >>> wf
+        array([[ 0.,  1.,  0.],
+               [ 1.,  0.,  1.],
+               [ 0.,  1.,  0.]])
+        >>> ids
+        ['first', 'second', 'third']
+
+        See also
+        --------
+        full
+        """
+        return util.full(self)
+
+    def towsp(self):
+        '''
+        Generate a WSP object
+
+        Returns
+        -------
+
+        implicit : pysal.WSP
+                   Thin W class
+
+        Examples
+        --------
+        >>> import pysal as ps
+        >>> from pysal import W
+        >>> neighbors={'first':['second'],'second':['first','third'],'third':['second']}
+        >>> weights={'first':[1],'second':[1,1],'third':[1]}
+        >>> w=W(neighbors,weights)
+        >>> wsp=w.towsp()
+        >>> isinstance(wsp, ps.weights.weights.WSP)
+        True
+        >>> wsp.n
+        3
+        >>> wsp.s0
+        4
+
+        See also
+        --------
+        WSP
+        '''
+        return WSP(self.sparse, self._id_order)
+
+    def set_shapefile(self, shapefile, idVariable=None, full=False):
+        """
+        Adding meta data for writing headers of gal and gwt files
+
+        Parameters
+        ----------
+
+        shapefile : (string) 
+                    shapefile name used to construct weights
+
+        idVariable : (string) 
+                    name of attribute in shapefile to associate with ids in the weights
+
+        full : (boolean) 
+                True - write out entire path for shapefile, False
+                (default) only base of shapefile without extension
+
+        """
+        if full:
+            self._shpName = shapefile
+        else:
+            self._shpName = BASENAME(shapefile).split(".")[0]
+
+        self._varName = idVariable
+
+
+class WSP(object):
+
+    """
+    Thin W class for spreg
+
+    Parameters
+    ----------
+
+    sparse   : scipy sparse object
+               NxN object from scipy.sparse
+
+    id_order : list
+               An ordered list of ids, assumed to match the ordering in
+               sparse.
+
+    Attributes
+    ----------
+
+    n
+    s0
+    trcWtW_WW
+
+    Examples
+    --------
+
+    From GAL information
+
+    >>> import scipy.sparse
+    >>> import pysal
+    >>> rows = [0, 1, 1, 2, 2, 3]
+    >>> cols = [1, 0, 2, 1, 3, 3]
+    >>> weights =  [1, 0.75, 0.25, 0.9, 0.1, 1]
+    >>> sparse = scipy.sparse.csr_matrix((weights, (rows, cols)), shape=(4,4))
+    >>> w = pysal.weights.WSP(sparse)
+    >>> w.s0
+    4.0
+    >>> w.trcWtW_WW
+    6.3949999999999996
+    >>> w.n
+    4
+
+    """
+
+    def __init__(self, sparse, id_order=None):
+        if not scipy.sparse.issparse(sparse):
+            raise ValueError("must pass a scipy sparse object")
+        rows, cols = sparse.shape
+        if rows != cols:
+            raise ValueError("Weights object must be square")
+        self.sparse = sparse.tocsr()
+        self.n = sparse.shape[0]
+        if id_order:
+            if len(id_order) != self.n:
+                raise ValueError(
+                    "Number of values in id_order must match shape of sparse")
+        self.id_order = id_order
+        self._cache = {}
+
+    @property
+    def s0(self):
+        """
+        float
+
+        .. math::
+
+               s0=\sum_i \sum_j w_{i,j}
+
+        """
+        if 's0' not in self._cache:
+            self._s0 = self.sparse.sum()
+            self._cache['s0'] = self._s0
+        return self._s0
+
+    @property
+    def trcWtW_WW(self):
+        """
+        trace of :math:`W^{'}W + WW`
+        """
+        if 'trcWtW_WW' not in self._cache:
+            self._trcWtW_WW = self.diagWtW_WW.sum()
+            self._cache['trcWtW_WW'] = self._trcWtW_WW
+        return self._trcWtW_WW
+
+    @property
+    def diagWtW_WW(self):
+        """
+        diagonal of :math:`W^{'}W + WW`
+        """
+        if 'diagWtW_WW' not in self._cache:
+            wt = self.sparse.transpose()
+            w = self.sparse
+            self._diagWtW_WW = (wt * w + w * w).diagonal()
+            self._cache['diagWtW_WW'] = self._diagWtW_WW
+        return self._diagWtW_WW
diff --git a/pysal/weights/weights_from_geojson.ipynb b/pysal/weights/weights_from_geojson.ipynb
new file mode 100644
index 0000000..e9febb5
--- /dev/null
+++ b/pysal/weights/weights_from_geojson.ipynb
@@ -0,0 +1,311 @@
+{
+ "metadata": {
+  "name": "weights_from_geojson"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "heading",
+     "level": 2,
+     "metadata": {},
+     "source": [
+      "Prototyping the construction of PySAL weights from geojson. Supports local files, uri, and serialized objects\n",
+      "\n"
+     ]
+    },
+    {
+     "cell_type": "raw",
+     "metadata": {},
+     "source": [
+      "Author: Sergio Rey <sjsrey at gmail.com>"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import geojson\n",
+      "import pysal as ps\n",
+      "import urllib2 as urllib"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 1
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "class PolygonCollection:\n",
+      "    def __init__(self, polygons, bbox=None):\n",
+      "        \"\"\"\n",
+      "\n",
+      "        Parameters\n",
+      "        ==========\n",
+      "        polygons: dict\n",
+      "                  key is polygon Id, value is PySAL Polygon object\n",
+      "        bbox: list (optional)\n",
+      "              [left, lower, right, upper]\n",
+      "\n",
+      "        Notes\n",
+      "        =====\n",
+      "        bbox is supported in geojson specification at both the feature and feature collection level. However, not all geojson writers generate the bbox at the feature collection level. \n",
+      "        In those cases, the bbox property will be set on initial access.\n",
+      "\n",
+      "        \"\"\"\n",
+      "              \n",
+      "        self.type=ps.cg.Polygon\n",
+      "        self.n = len(polygons)\n",
+      "        self.polygons = polygons\n",
+      "        if bbox is None:\n",
+      "            self._bbox = None\n",
+      "        else:\n",
+      "            self._bbox = bbox\n",
+      "            \n",
+      "    @property\n",
+      "    def bbox(self):\n",
+      "        bboxes = np.array([self.polygons[p].bbox for p in self.polygons])\n",
+      "        mins = bboxes.min(axis=0)\n",
+      "        maxs = bboxes.max(axis=0)\n",
+      "        self._bbox = [ mins[0], mins[1], maxs[2], maxs[3] ]\n",
+      "        return self._bbox\n",
+      "        \n",
+      "    \n",
+      "    def __getitem__(self, index):\n",
+      "        return self.polygons[index]\n",
+      "            "
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 2
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "def queen_geojson(gjobj):\n",
+      "    \"\"\"\n",
+      "    Constructs a PySAL queen contiguity W from a geojson object\n",
+      "\n",
+      "    \"\"\"\n",
+      "    polys = []\n",
+      "    ids = []\n",
+      "    i = 0\n",
+      "    for feature in gjobj['features']:\n",
+      "        polys.append(ps.cg.asShape(geojson.Polygon(feature['geometry']['coordinates'])))\n",
+      "        ids.append(i)\n",
+      "        i += 1\n",
+      "    polygons = PolygonCollection(dict(zip(ids,polys)))\n",
+      "    neighbors = ps.weights.Contiguity.ContiguityWeightsPolygons(polygons).w\n",
+      "    return ps.W(neighbors)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 3
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "def queen_from_geojson(uri):\n",
+      "    gjobj = geojson.load(urllib.urlopen(uri))\n",
+      "    return queen_geojson(gjobj)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 4
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "def queen_from_geojsons(s):\n",
+      "    return queen_geojson(info)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 5
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "def queen_from_geojsonf(fileName):\n",
+      "    fp = open(fileName)\n",
+      "    obj = geojson.load(fp)\n",
+      "    fp.close()\n",
+      "    return queen_geojson(obj)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 6
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# from a uri\n",
+      "uri = \"http://toae.org/pub/columbus.json\""
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 7
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wq = queen_from_geojson(uri)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 8
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wq.n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 9,
+       "text": [
+        "49"
+       ]
+      }
+     ],
+     "prompt_number": 9
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# from a local file\n",
+      "wf = queen_from_geojsonf(ps.examples.get_path(\"columbus.json\"))"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 10
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wf.n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 11,
+       "text": [
+        "49"
+       ]
+      }
+     ],
+     "prompt_number": 11
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# from a serialized geojson object\n",
+      "\n",
+      "fp = open(ps.examples.get_path(\"columbus.json\"))\n",
+      "info = geojson.load(fp)\n",
+      "fp.close()\n",
+      "infos = geojson.dumps(info)\n",
+      "ws = queen_from_geojsons(infos)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [],
+     "prompt_number": 12
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ws.n"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 13,
+       "text": [
+        "49"
+       ]
+      }
+     ],
+     "prompt_number": 13
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wf.neighbors == ws.neighbors"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 14,
+       "text": [
+        "True"
+       ]
+      }
+     ],
+     "prompt_number": 14
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "wq.neighbors == wf.neighbors"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": [
+      {
+       "output_type": "pyout",
+       "prompt_number": 15,
+       "text": [
+        "True"
+       ]
+      }
+     ],
+     "prompt_number": 15
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..f3aca17
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1 @@
+scipy>=0.11
diff --git a/rtd.txt b/rtd.txt
new file mode 100644
index 0000000..b9d419a
--- /dev/null
+++ b/rtd.txt
@@ -0,0 +1 @@
+sphinxcontrib-napoleon
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..520e354
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,6 @@
+[nosetests]
+ignore-files=collection
+exclude-dir=pysal/contrib
+
+[wheel]
+universal=1
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..dcfa1bd
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,120 @@
+# coding: utf-8
+
+try:
+    from setuptools import setup, find_packages
+except ImportError:
+    from distutils.core import setup
+
+import sys
+import shutil
+import os
+if sys.version_info[0] < 3:
+    import __builtin__ as builtins
+else:
+    import builtins
+
+from pysal.version import version as dversion
+
+with open('README.txt') as file:
+    long_description = file.read()
+
+MAJOR = 1
+MINOR = 9
+MICRO = 1
+ISRELEASED = False
+VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
+
+
+# BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
+# update it when the contents of directories change.
+if os.path.exists('MANIFEST'):
+    os.remove('MANIFEST')
+
+
+def setup_package():
+
+    # Perform 2to3 if needed
+    local_path = os.path.dirname(os.path.abspath(sys.argv[0]))  # get cwd
+    src_path = local_path
+
+    if sys.version_info[0] == 3:
+        src_path = os.path.join(local_path, 'build', 'py3k')
+        sys.path.insert(0, os.path.join(local_path, 'tools'))
+        import py3tool
+        print("Converting to Python3 via 2to3...")
+        py3tool.sync_2to3('pysal', os.path.join(src_path, 'pysal'))
+
+        site_cfg = os.path.join(local_path, 'site.cfg')
+        if os.path.isfile(site_cfg):
+            shutil.copy(site_cfg, src_path)
+
+        # Ugly hack to make pip work with Python 3, see #1857.
+        # Explanation: pip messes with __file__ which interacts badly with the
+        # change in directory due to the 2to3 conversion.  Therefore we restore
+        # __file__ to what it would have been otherwise.
+        global __file__
+        __file__ = os.path.join(os.curdir, os.path.basename(__file__))
+        if '--egg-base' in sys.argv:
+            # Change pip-egg-info entry to absolute path, so pip can find it
+            # after changing directory.
+            idx = sys.argv.index('--egg-base')
+            if sys.argv[idx + 1] == 'pip-egg-info':
+                sys.argv[idx + 1] = os.path.join(local_path, 'pip-egg-info')
+
+    old_path = os.getcwd()
+    os.chdir(src_path)
+    sys.path.insert(0, src_path)
+
+
+    # get all file endings and copy whole file names without a file suffix
+    # assumes nested directories are only down one level
+    example_data_files = set()
+    for i in os.listdir("pysal/examples"):
+        if i.endswith(('py', 'pyc')):
+            continue
+        if not os.path.isdir("pysal/examples/" + i):
+            if "." in i:
+                glob_name = "examples/*." + i.split(".")[-1]
+            else:
+                glob_name = "examples/" + i
+        else:
+            glob_name = "examples/" + i + "/*"
+
+        example_data_files.add(glob_name)
+
+    setup(
+        name='PySAL',
+        version=dversion,
+        description="A library of spatial analysis functions.",
+        long_description=long_description,
+        maintainer="PySAL Developers",
+        maintainer_email='pysal-dev at googlegroups.com',
+        url='http://pysal.org',
+        download_url='https://pypi.python.org/pypi/PySAL',
+        license='BSD',
+        py_modules=['pysal'],
+        test_suite='nose.collector',
+        tests_require=['nose'],
+        keywords='spatial statistics',
+        classifiers=[
+            'Development Status :: 5 - Production/Stable',
+            'Intended Audience :: Science/Research',
+            'Intended Audience :: Developers',
+            'Intended Audience :: Education',
+            'Topic :: Scientific/Engineering',
+            'Topic :: Scientific/Engineering :: GIS',
+            'License :: OSI Approved :: BSD License',
+            'Programming Language :: Python',
+            'Programming Language :: Python :: 2.5',
+            'Programming Language :: Python :: 2.6',
+            'Programming Language :: Python :: 2.7'
+        ],
+        packages=find_packages(exclude=[".meta", "*.meta.*", "meta.*",
+                                        "meta"]),
+        package_data={'pysal': list(example_data_files)},
+        requires=['scipy']
+    )
+
+
+if __name__ == '__main__':
+    setup_package()
diff --git a/tools/cron.py b/tools/cron.py
new file mode 100644
index 0000000..948376d
--- /dev/null
+++ b/tools/cron.py
@@ -0,0 +1,16 @@
+#!/usr/bin/python
+"""Sets and unsets a lock on the local pysal repository."""
+import os, sys
+
+#check lock
+if os.path.exists('/tmp/pysal.lock'):
+    print "LOCK IN PLACE, another process is running perhaps?"
+    sys.exit(1)
+else:
+    lck = open('/tmp/pysal.lock','w')
+    lck.write('%d'%os.getpid())
+    lck.close()
+    lck = True
+    os.system('/Users/stephens/Dropbox/work/Projects/pysal/trunk/tools/test.sh')
+    os.remove('/tmp/pysal.lock')
+
diff --git a/tools/docs.sh b/tools/docs.sh
new file mode 100644
index 0000000..55d0517
--- /dev/null
+++ b/tools/docs.sh
@@ -0,0 +1,39 @@
+#!/bin/bash - 
+
+if [ -f /tmp/pysal.lock ]
+then
+#check repo for changes
+cd /home/pstephen/pysal/
+svn cleanup
+a=$(svnversion)
+svn update -r $((a+1)) 2>&1
+
+if [ $? = "0" ]
+then
+b=$(svnversion) 
+else exit
+fi
+
+if [ "$a" != "$b"  ]
+then 
+export PYTHONPATH=/home/pstephen/pysal/
+find /home/pstephen/pysal/pysal -name "*.pyc" -exec rm '{}' ';'
+
+# run coverage and copy to server
+/usr/local/bin/coverage html -d /home/pstephen/coverage pysal/*.py pysal/cg/*.py pysal/esda/*.py pysal/inequality/*.py pysal/network/*.py pysal/region/*.py pysal/spatial_dynamics/*.py pysal/spreg/*.py pysal/weights/*.py pysal/core/*.py pysal/core/IOHandlers/*.py pysal/core/util/*.py 
+rsync -r --delete /home/pstephen/coverage/ stephens at geodacenter.org:~/coverage
+
+# build new docs and copy to server
+cd /home/pstephen/pysal/doc
+/usr/bin/make clean
+/usr/bin/sphinx-build -Q -b html  -d build/doctrees  source build/html 
+rsync -r --delete /home/pstephen/pysal/doc/build/html/ stephens at geodacenter.org:~/dev
+
+# build source installer and copy to server
+cd /home/pstephen/pysal/
+/usr/bin/python setup.py sdist
+rsync -r --delete /home/pstephen/pysal/dist/pysal*  stephens at geodacenter.org:~/tmp/builds
+
+else echo "`date`, "$a", "$b", Repo unchanged." > /tmp/pysal.log 2>&1
+fi
+fi
diff --git a/tools/github_stats.py b/tools/github_stats.py
new file mode 100644
index 0000000..9fd5263
--- /dev/null
+++ b/tools/github_stats.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python
+"""Simple tools to query github.com and gather stats about issues.
+"""
+#-----------------------------------------------------------------------------
+# Imports
+#-----------------------------------------------------------------------------
+
+from __future__ import print_function
+
+import json
+import re
+import sys
+
+from datetime import datetime, timedelta
+from subprocess import check_output
+from urllib2 import urlopen
+
+#-----------------------------------------------------------------------------
+# Globals
+#-----------------------------------------------------------------------------
+
+ISO8601 = "%Y-%m-%dT%H:%M:%SZ"
+PER_PAGE = 100
+
+element_pat = re.compile(r'<(.+?)>')
+rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]')
+
+#-----------------------------------------------------------------------------
+# Functions
+#-----------------------------------------------------------------------------
+
+def parse_link_header(headers):
+    link_s = headers.get('link', '')
+    urls = element_pat.findall(link_s)
+    rels = rel_pat.findall(link_s)
+    d = {}
+    for rel,url in zip(rels, urls):
+        d[rel] = url
+    return d
+
+def get_paged_request(url):
+    """get a full list, handling APIv3's paging"""
+    results = []
+    while url:
+        print("fetching %s" % url, file=sys.stderr)
+        f = urlopen(url)
+        results.extend(json.load(f))
+        links = parse_link_header(f.headers)
+        url = links.get('next')
+    return results
+
+def get_issues(project="pysal/pysal", state="closed", pulls=False):
+    """Get a list of the issues from the Github API."""
+    which = 'pulls' if pulls else 'issues'
+    url = "https://api.github.com/repos/%s/%s?state=%s&per_page=%i" % (project, which, state, PER_PAGE)
+    return get_paged_request(url)
+
+
+def _parse_datetime(s):
+    """Parse dates in the format returned by the Github API."""
+    if s:
+        return datetime.strptime(s, ISO8601)
+    else:
+        return datetime.fromtimestamp(0)
+
+
+def issues2dict(issues):
+    """Convert a list of issues to a dict, keyed by issue number."""
+    idict = {}
+    for i in issues:
+        idict[i['number']] = i
+    return idict
+
+
+def is_pull_request(issue):
+    """Return True if the given issue is a pull request."""
+    return 'pull_request_url' in issue
+
+
+def issues_closed_since(period=timedelta(days=365), project="pysal/pysal", pulls=False):
+    """Get all issues closed since a particular point in time. period
+can either be a datetime object, or a timedelta object. In the
+latter case, it is used as a time before the present."""
+
+    which = 'pulls' if pulls else 'issues'
+
+    if isinstance(period, timedelta):
+        period = datetime.now() - period
+    url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, period.strftime(ISO8601), PER_PAGE)
+    allclosed = get_paged_request(url)
+    # allclosed = get_issues(project=project, state='closed', pulls=pulls, since=period)
+    filtered = [i for i in allclosed if _parse_datetime(i['closed_at']) > period]
+
+    # exclude rejected PRs
+    if pulls:
+        filtered = [ pr for pr in filtered if pr['merged_at'] ]
+
+    return filtered
+
+
+def sorted_by_field(issues, field='closed_at', reverse=False):
+    """Return a list of issues sorted by closing date date."""
+    return sorted(issues, key = lambda i:i[field], reverse=reverse)
+
+
+def report(issues, show_urls=False):
+    """Summary report about a list of issues, printing number and title.
+    """
+    # titles may have unicode in them, so we must encode everything below
+    if show_urls:
+        for i in issues:
+            role = 'ghpull' if 'merged_at' in i else 'ghissue'
+            print('* :%s:`%d`: %s' % (role, i['number'],
+                                        i['title'].encode('utf-8')))
+    else:
+        for i in issues:
+            print('* %d: %s' % (i['number'], i['title'].encode('utf-8')))
+
+#-----------------------------------------------------------------------------
+# Main script
+#-----------------------------------------------------------------------------
+
+if __name__ == "__main__":
+    # Whether to add reST urls for all issues in printout.
+    show_urls = True
+
+    # By default, search one month back
+    tag = None
+    if len(sys.argv) > 1:
+        try:
+            days = int(sys.argv[1])
+        except:
+            tag = sys.argv[1]
+    else:
+        tag = check_output(['git', 'describe', '--abbrev=0']).strip()
+
+    if tag:
+        cmd = ['git', 'log', '-1', '--format=%ai', tag]
+        tagday, tz = check_output(cmd).strip().rsplit(' ', 1)
+        since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S")
+    else:
+        since = datetime.now() - timedelta(days=days)
+
+    print("fetching GitHub stats since %s (tag: %s)" % (since, tag), file=sys.stderr)
+    # turn off to play interactively without redownloading, use %run -i
+    if 1:
+        issues = issues_closed_since(since, pulls=False)
+        pulls = issues_closed_since(since, pulls=True)
+
+    # For regular reports, it's nice to show them in reverse chronological order
+    issues = sorted_by_field(issues, reverse=True)
+    pulls = sorted_by_field(pulls, reverse=True)
+
+    n_issues, n_pulls = map(len, (issues, pulls))
+    n_total = n_issues + n_pulls
+
+    # Print summary report we can directly include into release notes.
+    print()
+    since_day = since.strftime("%Y/%m/%d")
+    today = datetime.today().strftime("%Y/%m/%d")
+    print(".. _github-stats:")
+    print('Github stats')
+    print('============')
+    print()
+    print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag))
+    print()
+    print("These lists are automatically generated, and may be incomplete or contain duplicates.")
+    print()
+    if tag:
+        # print git info, in addition to GitHub info:
+        since_tag = tag+'..'
+        cmd = ['git', 'log', '--oneline', since_tag]
+        ncommits = len(check_output(cmd).splitlines())
+
+        author_cmd = ['git', 'log', '--format=* %aN', since_tag]
+        all_authors = check_output(author_cmd).splitlines()
+        unique_authors = sorted(set(all_authors))
+
+        print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits))
+        print()
+        print('\n'.join(unique_authors))
+        print()
+
+    print()
+    print("We closed a total of %d issues, %d pull requests and %d regular issues;\n"
+          "this is the full list (generated with the script \n"
+          ":file:`tools/github_stats.py`):" % (n_total, n_pulls, n_issues))
+    print()
+    print('Pull Requests (%d):\n' % n_pulls)
+    report(pulls, show_urls)
+    print()
+    print('Issues (%d):\n' % n_issues)
+    report(issues, show_urls)
diff --git a/tools/py3tool.py b/tools/py3tool.py
new file mode 100644
index 0000000..23a16a4
--- /dev/null
+++ b/tools/py3tool.py
@@ -0,0 +1,345 @@
+#!/usr/bin/env python3
+# -*- python -*-
+"""
+%prog SUBMODULE...
+
+Hack to pipe submodules of Numpy through 2to3 and build them in-place
+one-by-one.
+
+Example usage:
+
+    python3 tools/py3tool.py testing distutils core
+
+This will copy files to _py3k/numpy, add a dummy __init__.py and
+version.py on the top level, and copy and 2to3 the files of the three
+submodules.
+
+When running py3tool again, only changed files are re-processed, which
+makes the test-bugfix cycle faster.
+
+"""
+from optparse import OptionParser
+import shutil
+import os
+import sys
+import re
+import subprocess
+import fnmatch
+
+if os.environ.get('USE_2TO3CACHE'):
+    import lib2to3cache
+
+BASE = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
+TEMP = os.path.normpath(os.path.join(BASE, '_py3k'))
+
+SCRIPT_2TO3 = os.path.join(BASE, 'tools', '2to3.py')
+
+EXTRA_2TO3_FLAGS = {
+    '*/setup.py': '-x import',
+    #'numpy/core/code_generators/generate_umath.py': '-x import',
+    #'numpy/core/code_generators/generate_numpy_api.py': '-x import',
+    #'numpy/core/code_generators/generate_ufunc_api.py': '-x import',
+    #'numpy/core/defchararray.py': '-x unicode',
+    #'numpy/compat/py3k.py': '-x unicode',
+    #'numpy/ma/timer_comparison.py': 'skip',
+    #'numpy/distutils/system_info.py': '-x reduce',
+    #'numpy/f2py/auxfuncs.py': '-x reduce',
+    #'numpy/lib/arrayterator.py': '-x reduce',
+    #'numpy/lib/tests/test_arrayterator.py': '-x reduce',
+    #'numpy/ma/core.py': '-x reduce',
+    #'numpy/ma/tests/test_core.py': '-x reduce',
+    #'numpy/ma/tests/test_old_ma.py': '-x reduce',
+    #'numpy/ma/timer_comparison.py': '-x reduce',
+    #'numpy/oldnumeric/ma.py': '-x reduce',
+}
+
+def main():
+    p = OptionParser(usage=__doc__.strip())
+    p.add_option("--clean", "-c", action="store_true",
+                 help="clean source directory")
+    options, args = p.parse_args()
+
+    if not args:
+        p.error('no submodules given')
+    else:
+        dirs = ['scipy/%s' % x for x in map(os.path.basename, args)]
+
+    # Prepare
+    if not os.path.isdir(TEMP):
+        os.makedirs(TEMP)
+
+    # Set up dummy files (for building only submodules)
+    dummy_files = {
+        '__init__.py': 'from scipy.version import version as __version__',
+        'version.py': 'version = "0.8.0.dev"'
+    }
+
+    for fn, content in dummy_files.items():
+        fn = os.path.join(TEMP, 'scipy', fn)
+        if not os.path.isfile(fn):
+            try:
+                os.makedirs(os.path.dirname(fn))
+            except OSError:
+                pass
+            f = open(fn, 'wb+')
+            f.write(content.encode('ascii'))
+            f.close()
+
+    # Environment
+    pp = [os.path.abspath(TEMP)]
+    def getenv():
+        env = dict(os.environ)
+        env.update({'PYTHONPATH': ':'.join(pp)})
+        return env
+
+    # Copy
+    for d in dirs:
+        src = os.path.join(BASE, d)
+        dst = os.path.join(TEMP, d)
+
+        # Run 2to3
+        sync_2to3(dst=dst,
+                  src=src,
+                  patchfile=os.path.join(TEMP, os.path.basename(d) + '.patch'),
+                  clean=options.clean)
+
+        # Run setup.py, falling back to Pdb post-mortem on exceptions
+        setup_py = os.path.join(dst, 'setup.py')
+        if os.path.isfile(setup_py):
+            code = """\
+import pdb, sys, traceback
+p = pdb.Pdb()
+try:
+    import __main__
+    __main__.__dict__.update({
+        "__name__": "__main__", "__file__": "setup.py",
+        "__builtins__": __builtins__})
+    fp = open("setup.py", "rb")
+    try:
+        exec(compile(fp.read(), "setup.py", 'exec'))
+    finally:
+        fp.close()
+except SystemExit:
+    raise
+except:
+    traceback.print_exc()
+    t = sys.exc_info()[2]
+    p.interaction(None, t)
+"""
+            ret = subprocess.call([sys.executable, '-c', code,
+                                   'build_ext', '-i'],
+                                  cwd=dst,
+                                  env=getenv())
+            if ret != 0:
+                raise RuntimeError("Build failed.")
+
+        # Run nosetests
+        subprocess.call(['nosetests3', '-v', d], cwd=TEMP)
+
+def custom_mangling(filename):
+    import_mangling = [
+        os.path.join('cluster', '__init__.py'),
+        os.path.join('cluster', 'hierarchy.py'),
+        os.path.join('cluster', 'vq.py'),
+        os.path.join('fftpack', 'basic.py'),
+        os.path.join('fftpack', 'pseudo_diffs.py'),
+        os.path.join('integrate', 'odepack.py'),
+        os.path.join('integrate', 'quadpack.py'),
+        os.path.join('integrate', '_ode.py'),
+        os.path.join('interpolate', 'fitpack.py'),
+        os.path.join('interpolate', 'fitpack2.py'),
+        os.path.join('interpolate', 'interpolate.py'),
+        os.path.join('interpolate', 'interpolate_wrapper.py'),
+        os.path.join('interpolate', 'ndgriddata.py'),
+        os.path.join('io', 'array_import.py'),
+        os.path.join('io', '__init__.py'),
+        os.path.join('io', 'matlab', 'miobase.py'),
+        os.path.join('io', 'matlab', 'mio4.py'),
+        os.path.join('io', 'matlab', 'mio5.py'),
+        os.path.join('io', 'matlab', 'mio5_params.py'),
+        os.path.join('linalg', 'basic.py'),
+        os.path.join('linalg', 'decomp.py'),
+        os.path.join('linalg', 'lapack.py'),
+        os.path.join('linalg', 'flinalg.py'),
+        os.path.join('linalg', 'iterative.py'),
+        os.path.join('linalg', 'misc.py'),
+        os.path.join('lib', 'blas', '__init__.py'),
+        os.path.join('lib', 'lapack', '__init__.py'),
+        os.path.join('ndimage', 'filters.py'),
+        os.path.join('ndimage', 'fourier.py'),
+        os.path.join('ndimage', 'interpolation.py'),
+        os.path.join('ndimage', 'measurements.py'),
+        os.path.join('ndimage', 'morphology.py'),
+        os.path.join('optimize', 'minpack.py'),
+        os.path.join('optimize', 'zeros.py'),
+        os.path.join('optimize', 'lbfgsb.py'),
+        os.path.join('optimize', 'cobyla.py'),
+        os.path.join('optimize', 'slsqp.py'),
+        os.path.join('optimize', 'nnls.py'),
+        os.path.join('signal', '__init__.py'),
+        os.path.join('signal', 'bsplines.py'),
+        os.path.join('signal', 'signaltools.py'),
+        os.path.join('signal', 'fir_filter_design.py'),
+        os.path.join('special', '__init__.py'),
+        os.path.join('special', 'add_newdocs.py'),
+        os.path.join('special', 'basic.py'),
+        os.path.join('special', 'orthogonal.py'),
+        os.path.join('spatial', '__init__.py'),
+        os.path.join('spatial', 'distance.py'),
+        os.path.join('sparse', 'linalg', 'isolve', 'iterative.py'),
+        os.path.join('sparse', 'linalg', 'dsolve', 'linsolve.py'),
+        os.path.join('sparse', 'linalg', 'dsolve', 'umfpack', 'umfpack.py'),
+        os.path.join('sparse', 'linalg', 'eigen', 'arpack', 'arpack.py'),
+        os.path.join('sparse', 'linalg', 'eigen', 'arpack', 'speigs.py'),
+        os.path.join('sparse', 'linalg', 'iterative', 'isolve', 'iterative.py'),
+        os.path.join('stats', 'stats.py'),
+        os.path.join('stats', 'distributions.py'),
+        os.path.join('stats', 'morestats.py'),
+        os.path.join('stats', 'kde.py'),
+        os.path.join('stats', 'mstats_basic.py'),
+    ]
+
+    if any(filename.endswith(x) for x in import_mangling):
+        print(filename)
+        f = open(filename, 'r', encoding='utf-8')
+        text = f.read()
+        f.close()
+        for mod in ['_vq', '_hierarchy_wrap', '_fftpack', 'convolve',
+                    '_flinalg', 'fblas', 'flapack', 'cblas', 'clapack',
+                    'calc_lwork', '_cephes', 'specfun', 'orthogonal_eval',
+                    'lambertw', 'ckdtree', '_distance_wrap', '_logit',
+                    '_minpack', '_zeros', '_lbfgsb', '_cobyla', '_slsqp',
+                    '_nnls',
+                    'sigtools', 'spline', 'spectral',
+                    '_fitpack', 'dfitpack', '_interpolate',
+                    '_odepack', '_quadpack', 'vode', '_dop',
+                    'vonmises_cython',
+                    'futil', 'mvn',
+                    '_nd_image',
+                    'numpyio',
+                    '_superlu', '_arpack', '_iterative', '_umfpack',
+                    'interpnd',
+                    'mio_utils', 'mio5_utils', 'streams'
+                    ]:
+            text = re.sub(r'^(\s*)import %s' % mod,
+                          r'\1from . import %s' % mod,
+                          text, flags=re.M)
+            text = re.sub(r'^(\s*)from %s import' % mod,
+                          r'\1from .%s import' % mod,
+                          text, flags=re.M)
+        #text = text.replace('from matrixlib', 'from .matrixlib')
+        f = open(filename, 'w', encoding='utf-8')
+        f.write(text)
+        f.close()
+
+def walk_sync(dir1, dir2, _seen=None):
+    if _seen is None:
+        seen = {}
+    else:
+        seen = _seen
+
+    if not dir1.endswith(os.path.sep):
+        dir1 = dir1 + os.path.sep
+
+    # Walk through stuff (which we haven't yet gone through) in dir1
+    for root, dirs, files in os.walk(dir1):
+        sub = root[len(dir1):]
+        if sub in seen:
+            dirs = [x for x in dirs if x not in seen[sub][0]]
+            files = [x for x in files if x not in seen[sub][1]]
+            seen[sub][0].extend(dirs)
+            seen[sub][1].extend(files)
+        else:
+            seen[sub] = (dirs, files)
+        if not dirs and not files:
+            continue
+        yield os.path.join(dir1, sub), os.path.join(dir2, sub), dirs, files
+
+    if _seen is None:
+        # Walk through stuff (which we haven't yet gone through) in dir2
+        for root2, root1, dirs, files in walk_sync(dir2, dir1, _seen=seen):
+            yield root1, root2, dirs, files
+
+def sync_2to3(src, dst, patchfile=None, clean=False):
+    import lib2to3.main
+    from io import StringIO
+
+    to_convert = []
+
+    for src_dir, dst_dir, dirs, files in walk_sync(src, dst):
+        for fn in dirs + files:
+            src_fn = os.path.join(src_dir, fn)
+            dst_fn = os.path.join(dst_dir, fn)
+
+            # skip temporary etc. files
+            if fn.startswith('.#') or fn.endswith('~'):
+                continue
+
+            # remove non-existing
+            if os.path.exists(dst_fn) and not os.path.exists(src_fn):
+                if clean:
+                    if os.path.isdir(dst_fn):
+                        shutil.rmtree(dst_fn)
+                    else:
+                        os.unlink(dst_fn)
+                continue
+
+            # make directories
+            if os.path.isdir(src_fn):
+                if not os.path.isdir(dst_fn):
+                    os.makedirs(dst_fn)
+                continue
+
+            dst_dir = os.path.dirname(dst_fn)
+            if os.path.isfile(dst_fn) and not os.path.isdir(dst_dir):
+                os.makedirs(dst_dir)
+
+            # don't replace up-to-date files
+            try:
+                if os.path.isfile(dst_fn) and \
+                       os.stat(dst_fn).st_mtime >= os.stat(src_fn).st_mtime:
+                    continue
+            except OSError:
+                pass
+
+            # copy file
+            shutil.copyfile(src_fn, dst_fn)
+
+            # add .py files to 2to3 list
+            if dst_fn.endswith('.py'):
+                to_convert.append((src_fn, dst_fn))
+
+    # run 2to3
+    flag_sets = {}
+    for fn, dst_fn in to_convert:
+        flag = ''
+        for pat, opt in EXTRA_2TO3_FLAGS.items():
+            if fnmatch.fnmatch(fn, pat):
+                flag = opt
+                break
+        flag_sets.setdefault(flag, []).append(dst_fn)
+
+    if patchfile:
+        p = open(patchfile, 'wb+')
+    else:
+        p = open(os.devnull, 'wb')
+
+    for flags, filenames in flag_sets.items():
+        if flags == 'skip':
+            continue
+
+        _old_stdout = sys.stdout
+        try:
+            sys.stdout = StringIO()
+            lib2to3.main.main("lib2to3.fixes", ['-w', '-n'] + flags.split()+filenames)
+        finally:
+            sys.stdout = _old_stdout
+
+    for fn, dst_fn in to_convert:
+        # perform custom mangling
+        custom_mangling(dst_fn)
+
+    p.close()
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/test.sh b/tools/test.sh
new file mode 100644
index 0000000..6f47437
--- /dev/null
+++ b/tools/test.sh
@@ -0,0 +1,69 @@
+#!/bin/bash - 
+
+if [ -f /tmp/pysal.lock ]
+    then
+    cd /Users/stephens/tmp/pysal/
+    svn cleanup
+    a=$(svnversion)
+    svn update -r $((a+1)) 2>&1
+
+    if [ $? = "0" ]
+        then
+        b=$(svnversion) 
+    else exit
+    fi
+
+    if [ "$a" != "$b"  ]
+        then 
+        #cd pysal
+        #rm -rf /tmp/pysal
+        #svn checkout http://pysal.googlecode.com/svn/trunk pysal
+        export PYTHONPATH=/Users/stephens/tmp/pysal
+        export PATH=/Library/Frameworks/EPD64.framework/Versions/Current/bin:$PATH
+        find pysal -name "*.pyc" -exec rm '{}' ';'
+
+        # setup message header
+        #echo "to: pas at asu.edu" > /tmp/report.txt
+        echo "to: phil.stphns at gmail.com" > /tmp/report.txt
+        echo "from: phil.stphns at gmail.com" >> /tmp/report.txt
+        printf "Subject: PySAL Unittest Results for revision $b" >> /tmp/report.txt
+        echo "" >> /tmp/report.txt 
+
+
+        cd /Users/stephens/tmp/pysal/
+        svn log -r $(svnversion) >> /tmp/report.txt 
+        echo "" >> /tmp/report.txt 
+
+        # print system information
+        python -c 'import os,sys, numpy, scipy; print sys.platform, sys.version; print "Scipy version:", scipy.__version__; print "Numpy version:", numpy.__version__' >> /tmp/report.txt
+        echo "" >> /tmp/report.txt 
+
+        echo "Full Coverage Report --> http://pysal.geodacenter.org/coverage/index.html" >> /tmp/report.txt
+        echo "" >> /tmp/report.txt 
+
+        # execute pep8 stats
+        echo "" >> /tmp/report.txt 
+        echo "PEP 8 Stats" >> /tmp/report.txt 
+        pep8 --statistics -qq . >> /tmp/report.txt
+        echo "" >> /tmp/report.txt 
+
+        # execute nose test framework
+        nosetests pysal/ >> /tmp/report.txt 2>&1
+        #nosetests --with-coverage --cover-html --cover-package=pysal --cover-html-dir=/tmp/coverage pysal/ >> /tmp/report.txt 2>&1
+        #rsync -r --delete /tmp/coverage/ stephens at geodacenter.org:~/coverage
+        echo "" >> /tmp/report.txt 
+
+        # execute sphinx doctest framework
+        cd /Users/stephens/tmp/pysal/doc/
+        /usr/bin/make clean
+        sphinx-build -b doctest -d build/doctrees  source build/doctest >> /tmp/report.txt  2>/dev/null
+        echo "" >> /tmp/report.txt 
+
+
+        # remove instances of a single period on a line which causes sendmail to send now
+        sed "s/^\./\.\./g" /tmp/report.txt > /tmp/report.eml
+        /usr/sbin/sendmail -t < /tmp/report.eml
+
+    else echo "`date`, "$a", "$b", Repo unchanged." > /tmp/pysal.log 2>&1
+  fi
+fi
diff --git a/travis.txt b/travis.txt
new file mode 100644
index 0000000..b7d4c0a
--- /dev/null
+++ b/travis.txt
@@ -0,0 +1,6 @@
+nose-exclude
+nose-progressive
+sphinx
+sphinxcontrib-napoleon
+networkx
+Shapely

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/pysal.git



More information about the Pkg-grass-devel mailing list