[pdal] 03/08: Imported Upstream version 1.0.1

Sebastiaan Couwenberg sebastic at moszumanska.debian.org
Sun Mar 20 03:18:28 UTC 2016


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch master
in repository pdal.

commit a141e226e6ece58ca60cb59558309950a26af41f
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date:   Tue Mar 15 21:00:28 2016 +0100

    Imported Upstream version 1.0.1
---
 CMakeLists.txt                                    |   6 +-
 HOWTORELEASE.txt                                  |  82 +++
 apps/CMakeLists.txt                               |  15 +-
 apps/pdal-config                                  |  68 ++
 apps/pdal-config.in                               |   3 +-
 doc/download.rst                                  |  13 +-
 package.sh                                        |  51 ++
 scripts/appveyor/install.cmd                      |  29 +
 scripts/bash-completion/README                    |   3 +
 scripts/bash-completion/pdal                      |  20 +
 scripts/ci/before_install.sh                      |  72 ++
 scripts/ci/common.sh                              |  34 +
 scripts/ci/script.sh                              |  61 ++
 scripts/linux-install-scripts/datum.sh            |  39 +
 scripts/linux-install-scripts/geowave             |  43 ++
 scripts/linux-install-scripts/geowave.sh          |  20 +
 scripts/linux-install-scripts/hexer.sh            |   8 +
 scripts/linux-install-scripts/laszip.sh           |   8 +
 scripts/linux-install-scripts/lazperf.sh          |   5 +
 scripts/linux-install-scripts/libgeotiff.sh       |   8 +
 scripts/linux-install-scripts/nitro.sh            |   8 +
 scripts/linux-install-scripts/p2g.sh              |   8 +
 scripts/linux-install-scripts/packages.sh         |  46 ++
 scripts/linux-install-scripts/pcl.sh              |  49 ++
 scripts/linux-install-scripts/pdal.sh             |  47 ++
 scripts/linux-install-scripts/pgpointcloud.sh     |  22 +
 scripts/linux-install-scripts/startup.sh          |   3 +
 scripts/linux-install-scripts/websocketpp.sh      |   7 +
 scripts/vagrant/loadpgpointcloud.xml              |  16 +
 scripts/vagrant/readpgpointcloud.xml              |  14 +
 src/gitsha.cpp                                    |   2 +-
 vendor/gtest-1.7.0/scripts/fuse_gtest_files.py    | 250 +++++++
 vendor/gtest-1.7.0/scripts/gen_gtest_pred_impl.py | 730 ++++++++++++++++++
 vendor/gtest-1.7.0/scripts/gtest-config.in        | 274 +++++++
 vendor/gtest-1.7.0/scripts/pump.py                | 855 ++++++++++++++++++++++
 35 files changed, 2904 insertions(+), 15 deletions(-)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index fddd41e..92a4e96 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -28,7 +28,7 @@ mark_as_advanced(CMAKE_VERBOSE_MAKEFILE)
 
 # the next line is the ONLY place in the entire pdal system where
 # the version info is hard-coded
-set(PDAL_VERSION_STRING "1.0.0" CACHE STRING "PDAL version" FORCE)
+set(PDAL_VERSION_STRING "1.0.1" CACHE STRING "PDAL version" FORCE)
 
 DISSECT_VERSION()
 GET_OS_INFO()
@@ -48,7 +48,7 @@ set(PDAL_BUILD_VERSION "1.0.0")
 # On OSX we reexport the symbols from libpdal_util.dylib into libpdalcpp.dylib
 # See src/CMakeLists.txt for the rest of the magic.
 #
-if (APPLE OR WINDOWS)
+if (APPLE OR WIN32)
     set(PDAL_LIB_NAME pdalcpp)
     set(PDAL_BASE_LIB_NAME pdalcpp)
     set(PDAL_UTIL_LIB_NAME pdal_util)
@@ -341,6 +341,7 @@ list(APPEND CPACK_SOURCE_IGNORE_FILES "Testing")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "PDAL.build/")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "/bin/")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "/lib/")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "/build/")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "Makefile")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "CMakeFiles")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "CTestTestfile.cmake")
@@ -348,7 +349,6 @@ list(APPEND CPACK_SOURCE_IGNORE_FILES "/test/data/local/")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "/doc/doxygen/")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "/doc/build/")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "/doc/presentations/")
-list(APPEND CPACK_SOURCE_IGNORE_FILES "/scripts/")
 list(APPEND CPACK_SOURCE_IGNORE_FILES "/cmake/examples/")
 
 include(CPack)
diff --git a/HOWTORELEASE.txt b/HOWTORELEASE.txt
new file mode 100644
index 0000000..4f283c9
--- /dev/null
+++ b/HOWTORELEASE.txt
@@ -0,0 +1,82 @@
+
+Steps for Making a PDAL Release
+==============================================================================
+
+:Author: Howard Butler
+:Contact: howard at hobu.co
+:Date: 09/11/2015
+
+This document describes the process for releasing a new version of PDAL.
+
+General Notes
+------------------------------------------------------------------------------
+
+Release Process
+
+1) Increment Version Numbers
+
+  - CMakeLists.txt
+    * set(PDAL_VERSION_STRING "1.0.0" CACHE STRING "PDAL version")
+    * DISSECT_VERSION() CMake macro will break version down into
+      PDAL_VERSION_MAJOR, PDAL_VERSION_MINOR, PDAL_VERSION_PATCH,
+      and PDAL_CANDIDATE_VERSION strings.
+
+  - Update SO versioning
+    set(PDAL_API_VERSION "1")
+    set(PDAL_BUILD_VERSION "1.0.0")
+    * https://github.com/libspatialindex/libspatialindex/pull/44#issuecomment-57088783
+
+2) Update README to include any relevant info about the release that
+   might have changed.
+
+3) Update ChangeLog with git2cl
+
+  * git2cl . > ChangeLog
+
+4) Build and run the tests.  Really.
+
+    ::
+
+        ctest -V
+
+5) Clone a new tree and issue cmake. The package_source CMake target is
+   aggressive about scooping up every file in the tree to include in the package.
+   It does ok with CMake-specific stuff, but any other cruft in the tree is
+   likely to get dumped into the package.
+
+   ::
+
+        git clone git://github.com/PDAL/PDAL.git pdal2
+        cmake .
+
+6) Make the source distribution. If you are doing a release candidate
+   add an RC tag to the invocation.
+
+  ::
+
+      ./package.sh
+      ./package.sh RC1
+
+
+   package.sh will rename the source files if necessary for the release
+   candidate tag and create .md5 sum files. This script only works on
+   linux and windows.
+
+7) Update docs/download.txt to point at the location of the new release
+
+8) Upload the new release to download.osgeo.org:/osgeo/download/pdal
+
+  ::
+
+        scp PDAL-* hobu at download.osgeo.org:/osgeo/download/pdal
+
+9) Tag the release.  Use the ``-f`` switch if you are retagging because you
+   missed something.
+
+  ::
+        git tag 1.0.0
+        git push --tags
+
+
+10) Write the release notes. Email PDAL mailing list with notice about release
+
diff --git a/apps/CMakeLists.txt b/apps/CMakeLists.txt
index 39fe64f..79d2b52 100644
--- a/apps/CMakeLists.txt
+++ b/apps/CMakeLists.txt
@@ -95,13 +95,16 @@ if(UNIX OR APPLE)
         PERMISSIONS OWNER_READ OWNER_WRITE GROUP_READ WORLD_READ)
 
 
-
     # Autoconf compatibility variables to use the same script source.
     configure_file("${CMAKE_CURRENT_SOURCE_DIR}/pdal-config.in"
-                   "${CMAKE_CURRENT_BINARY_DIR}/pdal-config" @ONLY)
+                   "${CMAKE_CURRENT_SOURCE_DIR}/pdal-config" @ONLY)
+
+    file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/pdal-config"
+         DESTINATION "${PDAL_OUTPUT_BIN_DIR}/"
+         FILE_PERMISSIONS  OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
 
-    install(PROGRAMS "${CMAKE_CURRENT_BINARY_DIR}/pdal-config"
-      DESTINATION "${CMAKE_INSTALL_PREFIX}/bin"
-      PERMISSIONS
-      OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
+    install(PROGRAMS "${PDAL_OUTPUT_BIN_DIR}/pdal-config"
+            DESTINATION "${CMAKE_INSTALL_PREFIX}/bin"
+            PERMISSIONS
+            OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
 endif()
diff --git a/apps/pdal-config b/apps/pdal-config
new file mode 100644
index 0000000..4876801
--- /dev/null
+++ b/apps/pdal-config
@@ -0,0 +1,68 @@
+#!/bin/sh
+prefix=/usr/local
+exec_prefix=/usr/local/bin
+libdir=/usr/local/lib
+
+
+usage()
+{
+	cat <<EOF
+Usage: pdal-config [OPTIONS]
+Options:
+	[--cflags]
+	[--cxxflags]
+	[--defines]
+	[--includes]
+	[--libs]
+	[--plugin-dir]
+	[--version]
+EOF
+	exit $1
+}
+
+if test $# -eq 0; then
+	usage 1 1>&2
+fi
+
+case $1 in
+  --libs)
+    echo -L/usr/local/lib -lpdalcpp
+    ;;
+
+  --plugin-dir)
+    echo /usr/local/lib
+    ;;
+
+  --prefix)
+    echo ${prefix}
+     ;;
+
+  --ldflags)
+    echo -L${libdir}
+    ;;
+
+  --defines)
+    echo 
+    ;;
+
+  --includes)
+    echo -I/usr/local/include -I/usr/include -I/usr/include/gdal -I/usr/include/libxml2 -I/usr/include -I/usr/include
+    ;;
+
+  --cflags)
+    echo 
+    ;;
+
+  --cxxflags)
+    echo -Wextra -Wall -Wno-unused-parameter -Wno-unused-variable -Wpointer-arith -Wcast-align -Wcast-qual -Wredundant-decls -Wno-long-long -Wno-unknown-pragmas -isystem /usr/local/include -std=c++11 -std=c++11
+    ;;
+
+  --version)
+    echo 1.0.1
+    ;;
+
+  *)
+    usage 1 1>&2
+    ;;
+
+esac
diff --git a/apps/pdal-config.in b/apps/pdal-config.in
index 286d7b3..d328eef 100644
--- a/apps/pdal-config.in
+++ b/apps/pdal-config.in
@@ -9,11 +9,12 @@ usage()
 	cat <<EOF
 Usage: pdal-config [OPTIONS]
 Options:
-	[--libs]
 	[--cflags]
 	[--cxxflags]
 	[--defines]
 	[--includes]
+	[--libs]
+	[--plugin-dir]
 	[--version]
 EOF
 	exit $1
diff --git a/doc/download.rst b/doc/download.rst
index 8f89849..ee3f43f 100644
--- a/doc/download.rst
+++ b/doc/download.rst
@@ -8,6 +8,11 @@ Download
 Current Release(s)
 ------------------------------------------------------------------------------
 
+* **2015-09-10** `PDAL-1.0.0-src.tar.gz`_ (`md5`_)
+
+.. _`PDAL-1.0.0-src.tar.gz`: http://download.osgeo.org/pdal/PDAL-1.0.0-src.tar.gz
+.. _`md5`: http://download.osgeo.org/pdal/PDAL-1.0.0-src.tar.gz.md5
+.. _`DebianGIS`: http://wiki.debian.org/DebianGis
 
 
 Past Releases
@@ -24,20 +29,20 @@ The main repository for PDAL is located on github at https://github.com/PDAL/PDA
 You can obtain a copy of the active source code by issuing the following command::
 
     git clone git at github.com:PDAL/PDAL.git pdal
-    
+
 
 
 Binaries
 ------------------------------------------------------------------------------
 
-A pre-1.0.0 release of PDAL is available via `OSGeo4W`_, however it is only 
-64-bit at this time. 
+A 1.0.0 release of PDAL is available via `OSGeo4W`_, however it is only
+64-bit at this time.
 
 RPMs
 ................................................................................
 
 RPMs for PDAL are available at http://pdal.s3-website-us-east-1.amazonaws.com/rpms/
-    
+
 .. _`OSGeo4W`: http://trac.osgeo.org/osgeo4w/
 
 
diff --git a/package.sh b/package.sh
new file mode 100755
index 0000000..ffc7104
--- /dev/null
+++ b/package.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+
+# Build PDAL package, including RC number of specified
+# ./package.sh
+# ./package.sh RC1
+
+RC=$1
+
+
+version=`./bin/pdal-config --version`
+
+package_name="PDAL-"$version"-src"
+
+
+
+if [[ "$OSTYPE" == "linux-gnu" ]]; then
+MD5="md5sum"
+elif [[ "$OSTYPE" == "darwin"* ]]; then
+        # Mac OSX
+MD5="md5"
+fi
+
+make dist
+
+extensions=".tar.gz .tar.bz2"
+for ext in $extensions
+do
+
+    filename=$package_name$ext
+    if [ -n "$RC" ]; then
+
+        rcname="PDAL-"$version$RC$ext
+        echo $rcname
+        cp $filename $rcname
+        `$MD5 $rcname > $rcname.md5`
+    fi
+
+    echo "$MD5 $filename > $filename.md5"
+    `$MD5 $filename > $filename.md5`
+
+done
+
+# name=`echo $filename|cut -d'.' -f1-3`
+# extension=`echo $filename|cut -d'.' -f4-`
+# echo $name
+
+
+# newname="$name$RC.$extension"
+# mv $filename "$newname"
+# `md5sum $newname > $newname.md5`
diff --git a/scripts/appveyor/install.cmd b/scripts/appveyor/install.cmd
new file mode 100644
index 0000000..1217d35
--- /dev/null
+++ b/scripts/appveyor/install.cmd
@@ -0,0 +1,29 @@
+ at echo off
+
+cmake -G "Visual Studio 11 2012 Win64" ^
+    -DBUILD_PLUGIN_ATTRIBUTE=%PDAL_OPTIONAL_COMPONENTS% ^
+    -DBUILD_PLUGIN_CPD=OFF ^
+    -DBUILD_PLUGIN_GREYHOUND=OFF ^
+    -DBUILD_PLUGIN_HEXBIN=OFF ^
+    -DBUILD_PLUGIN_ICEBRIDGE=OFF ^
+    -DBUILD_PLUGIN_MRSID=OFF ^
+    -DBUILD_PLUGIN_NITF=OFF ^
+    -DBUILD_PLUGIN_OCI=OFF ^
+    -DBUILD_PLUGIN_P2G=OFF ^
+    -DBUILD_PLUGIN_PCL=OFF ^
+    -DBUILD_PLUGIN_PGPOINTCLOUD=OFF ^
+    -DBUILD_PLUGIN_SQLITE=OFF ^
+    -DBUILD_PLUGIN_RIVLIB=OFF ^
+    -DBUILD_PLUGIN_PYTHON=%PDAL_OPTIONAL_COMPONENTS% ^
+    -DENABLE_CTEST=OFF ^
+    -DWITH_APPS=ON ^
+    -DWITH_LAZPERF=%PDAL_OPTIONAL_COMPONENTS% ^
+    -DWITH_GEOTIFF=%PDAL_OPTIONAL_COMPONENTS% ^
+    -DWITH_ICONV=%PDAL_OPTIONAL_COMPONENTS% ^
+    -DWITH_LASZIP=%PDAL_OPTIONAL_COMPONENTS% ^
+    -DWITH_LIBXML2=OFF ^
+    -DWITH_TESTS=ON ^
+    -DNUMPY_INCLUDE_DIR=%OSGEODIR%\apps\python27\lib\site-packages\numpy\core\include ^
+	-DNUMPY_VERSION=1.8.1 ^
+    -Dgtest_force_shared_crt=ON ^
+    .
diff --git a/scripts/bash-completion/README b/scripts/bash-completion/README
new file mode 100644
index 0000000..303ebec
--- /dev/null
+++ b/scripts/bash-completion/README
@@ -0,0 +1,3 @@
+Bash commandline completion for pdal.
+See https://www.gnu.org/software/bash/manual/html_node/Programmable-Completion.html
+On debian-based systems, copy the "pdal" file to /etc/bash_completion.d/
diff --git a/scripts/bash-completion/pdal b/scripts/bash-completion/pdal
new file mode 100644
index 0000000..1811373
--- /dev/null
+++ b/scripts/bash-completion/pdal
@@ -0,0 +1,20 @@
+# PDAL commandline completion for bash
+# See http://www.gnu.org/software/bash/manual/bash.html#Programmable-Completion-Builtins
+
+_pdal()
+{
+	local cur cmds
+    
+	COMPREPLY=()
+	cur=${COMP_WORDS[COMP_CWORD]}
+	cmds=$(pdal --list-commands)
+
+	if [ $COMP_CWORD -eq 1 ]; then
+		COMPREPLY=( $(compgen -o filenames -W "$cmds" $cur) )
+  else
+		COMPREPLY=( $(compgen -o filenames -G "${cur}*") )
+	fi
+
+	return 0
+}
+complete -F _pdal pdal
diff --git a/scripts/ci/before_install.sh b/scripts/ci/before_install.sh
new file mode 100755
index 0000000..86f0809
--- /dev/null
+++ b/scripts/ci/before_install.sh
@@ -0,0 +1,72 @@
+#!/bin/bash -e
+# Installs requirements for PDAL
+source ./scripts/ci/common.sh
+
+sudo apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 16126D3A3E5C1192
+sudo mv /etc/apt/sources.list.d/pgdg-source.list* /tmp
+sudo apt-get update -qq
+
+sudo apt-get install \
+    software-properties-common \
+    python-software-properties \
+    libeigen3-dev
+sudo add-apt-repository ppa:ubuntugis/ubuntugis-unstable -y
+sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
+sudo add-apt-repository ppa:boost-latest/ppa -y
+sudo add-apt-repository ppa:kalakris/cmake -y
+sudo add-apt-repository ppa:pdal/travis -y
+sudo apt-get update -qq
+
+sudo apt-get -qq remove postgis
+
+# From main
+if [[ $PDAL_CMAKE_GENERATOR == "Ninja" ]]
+then
+    sudo apt-get install ninja-build
+fi
+
+# From ppa:ubuntu-toolchain-r/test
+# Install g++-4.8 (even if we're building clang) for updated libstdc++
+sudo apt-get install g++-4.8
+
+# From ppa:boost-latest/ppa
+sudo apt-get install boost1.55
+
+# From ppa:kalakris/cmake
+sudo apt-get install cmake
+
+# From ppa:ubuntugis/ubuntugis-unstable
+sudo apt-get install \
+    libgdal1h \
+    libgdal-dev
+
+if [[ $PDAL_OPTIONAL_COMPONENTS == "all" ]]
+then
+    # From main
+    sudo apt-get install \
+        libflann-dev \
+        libhdf5-serial-dev \
+        libtiff4-dev \
+        postgresql-server-dev-9.1 \
+        python-numpy
+
+    # From ppa:ubuntugis/ppa
+    sudo apt-get install \
+        libgeotiff-dev \
+        libxml2-dev
+
+    # From ppa:ubuntugis/ubuntugis-unstable
+    sudo apt-get install \
+        libgeos++-dev \
+        libproj-dev
+
+    # From ppa:pdal/travis
+    sudo apt-get install \
+        hexboundary \
+        laz-perf \
+        pgpointcloud \
+        points2grid
+fi
+
+gcc --version
+clang --version
diff --git a/scripts/ci/common.sh b/scripts/ci/common.sh
new file mode 100644
index 0000000..6505da4
--- /dev/null
+++ b/scripts/ci/common.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+if [[ "$TRAVIS" != "true" ]] ; then
+	echo "Running this script makes no sense outside of travis-ci.org"
+	exit 1
+fi
+
+# Functions
+tmstamp() { echo -n "[$(date '+%H:%M:%S')]" ; }
+
+# Environment
+NUMTHREADS=2
+
+# if [[ -f /sys/devices/system/cpu/online ]]; then
+# if [[ "$CXX" == "g++" ]]; then
+#     factor = 1000
+# else
+#     factor = 1500
+# fi
+# 	# Calculates 1.5 times physical threads
+# 	NUMTHREADS=$(( ( $(cut -f 2 -d '-' /sys/devices/system/cpu/online) + 1 ) * $factor / 1000  ))
+# fi
+
+echo "NUMTHREADS = $NUMTHREADS"
+#NUMTHREADS=1 # disable MP
+export NUMTHREADS
+
+# pdal_test segfaults when built against external g++-built boost,
+# and I haven't found a good boost package built with clang yet
+if [[ "$CXX" == "clang++" ]]
+then
+    export PDAL_CMAKE_GENERATOR="Ninja"
+else
+    export PDAL_CMAKE_GENERATOR="Unix Makefiles"
+fi
diff --git a/scripts/ci/script.sh b/scripts/ci/script.sh
new file mode 100755
index 0000000..75f0ffa
--- /dev/null
+++ b/scripts/ci/script.sh
@@ -0,0 +1,61 @@
+#!/bin/bash -e
+# Builds and tests PDAL
+source ./scripts/ci/common.sh
+
+mkdir -p _build || exit 1
+cd _build || exit 1
+
+case "$PDAL_OPTIONAL_COMPONENTS" in
+    all)
+        OPTIONAL_COMPONENT_SWITCH=ON
+        ;;
+    none)
+        OPTIONAL_COMPONENT_SWITCH=OFF
+        ;;
+    *)
+        echo "Unrecognized value for PDAL_OPTIONAL_COMPONENTS=$PDAL_OPTIONAL_COMPONENTS"
+        exit 1
+esac
+
+if [[ "$CXX" == "g++" ]]
+then
+    export CXX="g++-4.8"
+fi
+
+cmake \
+    -DBUILD_PLUGIN_ATTRIBUTE=$OPTIONAL_COMPONENT_SWITCH \
+    -DBUILD_PLUGIN_CPD=OFF \
+    -DBUILD_PLUGIN_GREYHOUND=OFF \
+    -DBUILD_PLUGIN_HEXBIN=$OPTIONAL_COMPONENT_SWITCH \
+    -DBUILD_PLUGIN_ICEBRIDGE=$OPTIONAL_COMPONENT_SWITCH \
+    -DBUILD_PLUGIN_MRSID=OFF \
+    -DBUILD_PLUGIN_NITF=OFF \
+    -DBUILD_PLUGIN_OCI=OFF \
+    -DBUILD_PLUGIN_P2G=$OPTIONAL_COMPONENT_SWITCH \
+    -DBUILD_PLUGIN_PCL=OFF \
+    -DBUILD_PLUGIN_PGPOINTCLOUD=$OPTIONAL_COMPONENT_SWITCH \
+    -DBUILD_PLUGIN_SQLITE=$OPTIONAL_COMPONENT_SWITCH \
+    -DBUILD_PLUGIN_RIVLIB=OFF \
+    -DBUILD_PLUGIN_PYTHON=$OPTIONAL_COMPONENT_SWITCH \
+    -DENABLE_CTEST=OFF \
+    -DWITH_APPS=ON \
+    -DWITH_LAZPERF=$OPTIONAL_COMPONENT_SWITCH \
+    -DWITH_GEOTIFF=$OPTIONAL_COMPONENT_SWITCH \
+    -DWITH_LASZIP=$OPTIONAL_COMPONENT_SWITCH \
+    -DWITH_TESTS=ON \
+    -G "$PDAL_CMAKE_GENERATOR" \
+    ..
+
+if [[ $PDAL_CMAKE_GENERATOR == "Unix Makefiles" ]]
+then
+    MAKECMD=make
+else
+    MAKECMD=ninja
+fi
+
+# Don't use ninja's default number of threads becuase it can
+# saturate Travis's available memory.
+${MAKECMD} -j ${NUMTHREADS} && \
+    LD_LIBRARY_PATH=./lib && \
+    sudo PGUSER=postgres ctest -V && \
+    sudo ${MAKECMD} install
diff --git a/scripts/linux-install-scripts/datum.sh b/scripts/linux-install-scripts/datum.sh
new file mode 100755
index 0000000..c4554fe
--- /dev/null
+++ b/scripts/linux-install-scripts/datum.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+shopt -s expand_aliases
+alias unzip-stream="python -c \"import zipfile,sys,StringIO;zipfile.ZipFile(StringIO.StringIO(sys.stdin.read())).extractall(sys.argv[1] if len(sys.argv) == 2 else '.')\""
+
+GTXS=("http://download.osgeo.org/proj/vdatum/vertcon/vertconc.gtx" 
+      "http://download.osgeo.org/proj/vdatum/vertcon/vertcone.gtx" 
+      "http://download.osgeo.org/proj/vdatum/vertcon/vertconw.gtx" 
+      "http://download.osgeo.org/proj/vdatum/egm96_15/egm96_15.gtx" 
+      "http://download.osgeo.org/proj/vdatum/egm08_25/egm08_25.gtx" 
+      )
+
+ZIPS=("http://download.osgeo.org/proj/vdatum/usa_geoid2012.zip"
+      "http://download.osgeo.org/proj/vdatum/usa_geoid2009.zip"
+      "http://download.osgeo.org/proj/vdatum/usa_geoid2003.zip"
+      "http://download.osgeo.org/proj/vdatum/usa_geoid1999.zip"
+      )
+
+extract_path="/home/vagrant/datum"
+export_path="/usr/share/proj/"
+for ZIP in "${ZIPS[@]}"
+do
+    :
+    filename=$(basename "$ZIP")
+    extension="${filename##*.}"
+    filename="${filename%.*}"
+    wget $ZIP -O - | unzip-stream $extract_path
+    mv $extract_path/$filename/* $export_path
+    rm -rf $extract_path/$filename
+done
+
+
+cd $export_path
+for GTX in "${GTXS[@]}"
+do
+    :
+    wget $GTX
+done
+
+chmod -R 775 $export_path
\ No newline at end of file
diff --git a/scripts/linux-install-scripts/geowave b/scripts/linux-install-scripts/geowave
new file mode 100755
index 0000000..28f3de8
--- /dev/null
+++ b/scripts/linux-install-scripts/geowave
@@ -0,0 +1,43 @@
+#!/bin/sh
+SERVICE_NAME=geowave
+PATH_TO_JAR=/home/vagrant/geowave/geowave-deploy/target/geowave-deploy-0.8.5-jace.jar
+CLASS_TO_RUN=mil.nga.giat.geowave.demo.app.GeoWaveDemoApp
+PID_PATH_NAME=/tmp/geowave-pid
+case $1 in
+    start)
+        echo "Starting $SERVICE_NAME ..."
+        if [ ! -f $PID_PATH_NAME ]; then
+            nohup java -Dinteractive=false -cp $PATH_TO_JAR $CLASS_TO_RUN /tmp 2>> /dev/null >> /dev/null &
+                        echo $! > $PID_PATH_NAME
+            echo "$SERVICE_NAME started ..."
+        else
+            echo "$SERVICE_NAME is already running ..."
+        fi
+    ;;
+    stop)
+        if [ -f $PID_PATH_NAME ]; then
+            PID=$(cat $PID_PATH_NAME);
+            echo "$SERVICE_NAME stopping ..."
+            kill $PID;
+            echo "$SERVICE_NAME stopped ..."
+            rm $PID_PATH_NAME
+        else
+            echo "$SERVICE_NAME is not running ..."
+        fi
+    ;;
+    restart)
+        if [ -f $PID_PATH_NAME ]; then
+            PID=$(cat $PID_PATH_NAME);
+            echo "$SERVICE_NAME stopping ...";
+            kill $PID;
+            echo "$SERVICE_NAME stopped ...";
+            rm $PID_PATH_NAME
+            echo "$SERVICE_NAME starting ..."
+            nohup java -Dinteractive=false -cp $PATH_TO_JAR $CLASS_TO_RUN /tmp 2>> /dev/null >> /dev/null &
+                        echo $! > $PID_PATH_NAME
+            echo "$SERVICE_NAME started ..."
+        else
+            echo "$SERVICE_NAME is not running ..."
+        fi
+    ;;
+esac 
diff --git a/scripts/linux-install-scripts/geowave.sh b/scripts/linux-install-scripts/geowave.sh
new file mode 100755
index 0000000..e2973d7
--- /dev/null
+++ b/scripts/linux-install-scripts/geowave.sh
@@ -0,0 +1,20 @@
+#!/bin/bash -e
+# Installs GeoWave library
+git clone https://github.com/ngageoint/geowave.git geowave
+cd geowave
+git checkout tags/v0.8.5
+mvn clean package -pl geowave-deploy -am -P generate-jace-proxies,linux-amd64-gcc -DskipITs=true -DskipTests=true
+
+# Configure library paths
+chmod 777 /home/vagrant/geowave/geowave-deploy/target/dependency/jace/libjace.so
+sudo ln -s /home/vagrant/geowave/geowave-deploy/target/dependency/jace/libjace.so /usr/lib/libjace.so
+echo "/usr/lib/jvm/java-7-oracle/jre/lib/amd64" | sudo tee --append /etc/ld.so.conf.d/awt.conf
+echo "/usr/lib/jvm/java-7-oracle/jre/lib/amd64/server" | sudo tee --append /etc/ld.so.conf.d/jvm.conf
+sudo ldconfig
+
+# Install GeoWave as a service and configure to run at startup
+# Note: tr removes carriage returns and copies the file
+sudo tr -d '\r' < /vagrant/scripts/linux-install-scripts/geowave > /etc/init.d/geowave
+sudo chmod 755 /etc/init.d/geowave
+sudo update-rc.d geowave defaults
+sudo service geowave start
diff --git a/scripts/linux-install-scripts/hexer.sh b/scripts/linux-install-scripts/hexer.sh
new file mode 100755
index 0000000..7c0577d
--- /dev/null
+++ b/scripts/linux-install-scripts/hexer.sh
@@ -0,0 +1,8 @@
+#!/bin/bash -e
+# Installs hexer library
+
+git clone https://github.com/hobu/hexer.git
+cd hexer
+cmake . -DCMAKE_INSTALL_PREFIX=/usr -DWITH_GDAL=ON
+make
+sudo make install
\ No newline at end of file
diff --git a/scripts/linux-install-scripts/laszip.sh b/scripts/linux-install-scripts/laszip.sh
new file mode 100755
index 0000000..faddc9d
--- /dev/null
+++ b/scripts/linux-install-scripts/laszip.sh
@@ -0,0 +1,8 @@
+#!/bin/bash -e
+# Installs LASzip library
+
+git clone https://github.com/LASzip/LASzip.git laszip
+cd laszip
+cmake . -DCMAKE_INSTALL_PREFIX=/usr
+make
+sudo make install
\ No newline at end of file
diff --git a/scripts/linux-install-scripts/lazperf.sh b/scripts/linux-install-scripts/lazperf.sh
new file mode 100755
index 0000000..c69a129
--- /dev/null
+++ b/scripts/linux-install-scripts/lazperf.sh
@@ -0,0 +1,5 @@
+#!/bin/bash -e
+# Installs laz-perf library
+
+git clone https://github.com/verma/laz-perf.git
+cd laz-perf;  cmake .; make; sudo make install
diff --git a/scripts/linux-install-scripts/libgeotiff.sh b/scripts/linux-install-scripts/libgeotiff.sh
new file mode 100755
index 0000000..2f867ac
--- /dev/null
+++ b/scripts/linux-install-scripts/libgeotiff.sh
@@ -0,0 +1,8 @@
+#!/bin/bash -e
+# Installs requirements for PDAL
+
+# install libgeotiff from sources
+wget http://download.osgeo.org/geotiff/libgeotiff/libgeotiff-1.4.0.tar.gz
+tar -xzf libgeotiff-1.4.0.tar.gz
+cd libgeotiff-1.4.0
+./configure --prefix=/usr && make && sudo make install
\ No newline at end of file
diff --git a/scripts/linux-install-scripts/nitro.sh b/scripts/linux-install-scripts/nitro.sh
new file mode 100755
index 0000000..fa31c25
--- /dev/null
+++ b/scripts/linux-install-scripts/nitro.sh
@@ -0,0 +1,8 @@
+#!/bin/bash -e
+# Installs NITRO library
+
+git clone https://github.com/hobu/nitro.git
+cd nitro
+cmake . -DCMAKE_INSTALL_PREFIX=/usr
+make
+sudo make install
\ No newline at end of file
diff --git a/scripts/linux-install-scripts/p2g.sh b/scripts/linux-install-scripts/p2g.sh
new file mode 100755
index 0000000..4abb7b9
--- /dev/null
+++ b/scripts/linux-install-scripts/p2g.sh
@@ -0,0 +1,8 @@
+#!/bin/bash -e
+# Installs points2grid library
+
+git clone https://github.com/CRREL/points2grid.git
+cd points2grid
+cmake . -DCMAKE_INSTALL_PREFIX=/usr
+make
+sudo make install
\ No newline at end of file
diff --git a/scripts/linux-install-scripts/packages.sh b/scripts/linux-install-scripts/packages.sh
new file mode 100755
index 0000000..a9196b3
--- /dev/null
+++ b/scripts/linux-install-scripts/packages.sh
@@ -0,0 +1,46 @@
+sudo apt-get update -qq
+sudo apt-get install -y -q build-essential
+sudo apt-get install -y python-software-properties software-properties-common python g++ make cmake wget git
+sudo add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable
+sudo add-apt-repository -y ppa:webupd8team/java
+sudo apt-get update -qq
+echo debconf shared/accepted-oracle-license-v1-1 select true | sudo debconf-set-selections
+echo debconf shared/accepted-oracle-license-v1-1 seen true | sudo debconf-set-selections
+sudo apt-get install -y -q  git \
+                           cmake \
+                           libgeos-dev \
+                           libgdal-dev \
+                           libpq-dev \
+                           python-all-dev \
+                           python-numpy \
+                           libproj-dev \
+                           libtiff4-dev \
+                           libxml2-dev \
+                           libboost-all-dev \
+                           libbz2-dev \
+                           libsqlite0-dev \
+                           cmake-curses-gui \
+                           screen \
+                           postgis \
+                           libcunit1-dev \
+                           postgresql-server-dev-9.3 \
+                           postgresql-9.3-postgis-2.1 \
+                           libmsgpack-dev \
+                           libgeos++-dev \
+                           vim \
+                           libeigen3-dev \
+                           libflann-dev \
+                           libglew-dev \
+                           libhdf5-serial-dev \
+                           libjsoncpp-dev \
+                           vtk6 \
+                           libvtk6-dev \
+                           gcc-multilib \
+                           g++-multilib \
+                           libglew-dev \
+                           oracle-java7-installer \
+                           maven \
+                           libc6-i386
+
+
+
diff --git a/scripts/linux-install-scripts/pcl.sh b/scripts/linux-install-scripts/pcl.sh
new file mode 100755
index 0000000..c3703d4
--- /dev/null
+++ b/scripts/linux-install-scripts/pcl.sh
@@ -0,0 +1,49 @@
+NUMTHREADS=2
+if [[ -f /sys/devices/system/cpu/online ]]; then
+	# Calculates 1.5 times physical threads
+	NUMTHREADS=$(( ( $(cut -f 2 -d '-' /sys/devices/system/cpu/online) + 1 ) * 15 / 10  ))
+fi
+#NUMTHREADS=1 # disable MP
+export NUMTHREADS
+
+
+git clone https://github.com/PointCloudLibrary/pcl.git
+cd pcl
+mkdir build
+cd build
+git fetch origin --tags
+git checkout tags/pcl-1.7.2
+cmake .. \
+    -G "Unix Makefiles" \
+    -DCMAKE_BUILD_TYPE=Release \
+    -DCMAKE_INSTALL_PREFIX=/usr \
+    -DBUILD_outofcore:BOOL=OFF \
+    -DWITH_QT:BOOL=ON \
+    -DWITH_VTK:BOOL=OFF \
+    -DWITH_OPENNI:BOOL=OFF \
+    -DWITH_CUDA:BOOL=OFF \
+    -DWITH_LIBUSB:BOOL=OFF \
+    -DBUILD_people:BOOL=OFF \
+    -DBUILD_surface:BOOL=ON \
+    -DBUILD_tools:BOOL=OFF \
+    -DBUILD_visualization:BOOL=OFF \
+    -DBUILD_sample_consensus:BOOL=ON \
+    -DBUILD_tracking:BOOL=OFF \
+    -DBUILD_stereo:BOOL=OFF \
+    -DBUILD_keypoints:BOOL=OFF \
+    -DBUILD_pipeline:BOOL=ON \
+    -DCMAKE_CXX_FLAGS="-std=c++11" \
+    -DBUILD_io:BOOL=ON \
+    -DBUILD_octree:BOOL=ON \
+    -DBUILD_segmentation:BOOL=ON \
+    -DBUILD_search:BOOL=ON \
+    -DBUILD_geometry:BOOL=ON \
+    -DBUILD_filters:BOOL=ON \
+    -DBUILD_features:BOOL=ON \
+    -DBUILD_kdtree:BOOL=ON \
+    -DBUILD_common:BOOL=ON \
+    -DBUILD_ml:BOOL=ON
+
+make -j $NUMTHREADS
+sudo make install
+
diff --git a/scripts/linux-install-scripts/pdal.sh b/scripts/linux-install-scripts/pdal.sh
new file mode 100644
index 0000000..c20aea2
--- /dev/null
+++ b/scripts/linux-install-scripts/pdal.sh
@@ -0,0 +1,47 @@
+NUMTHREADS=2
+if [[ -f /sys/devices/system/cpu/online ]]; then
+	# Calculates 1.5 times physical threads
+	NUMTHREADS=$(( ( $(cut -f 2 -d '-' /sys/devices/system/cpu/online) + 1 ) * 15 / 10  ))
+fi
+#NUMTHREADS=1 # disable MP
+export NUMTHREADS
+
+git clone https://github.com/PDAL/PDAL.git pdal
+cd pdal
+mkdir build
+cd build
+cmake   -G "Unix Makefiles"  \
+        -DCMAKE_BUILD_TYPE=Release \
+        -DCMAKE_INSTALL_PREFIX=/usr \
+        -DWITH_ICONV=ON \
+        -DBUILD_PLUGIN_PCL=ON \
+        -DWITH_LASZIP=ON \
+        -DWITH_GEOTIFF=ON \
+        -DWITH_LAZPERF=ON \
+        -DWITH_LIBXML2=ON \
+        -DBUILD_PLUGIN_PYTHON=ON \
+        -DBUILD_PLUGIN_ATTRIBUTE=ON \
+        -DBUILD_PLUGIN_HEXBIN=ON \
+        -DBUILD_PLUGIN_ICEBRIDGE=ON \
+        -DBUILD_PLUGIN_NITF=ON \
+        -DBUILD_PLUGIN_P2G=ON \
+        -DBUILD_PLUGIN_PGPOINTCLOUD=ON \
+        -DBUILD_PLUGIN_SQLITE=ON \
+        -DBUILD_PLUGIN_GREYHOUND=ON \
+        -DLAZPERF_INCLUDE_DIR=/home/vagrant/laz-perf \
+        -DJSONCPP_ROOT_DIR=/usr/include/jsoncpp \
+        -DBUILD_PLUGIN_GEOWAVE=ON \
+        -DGEOWAVE_RUNTIME_JAR=/home/vagrant/geowave/geowave-deploy/target/geowave-deploy-0.8.5-jace.jar \
+        -DJACE_INCLUDE_DIR=/home/vagrant/geowave/geowave-deploy/target/dependency/jace/include \
+        -DJACE_LIBRARY=/home/vagrant/geowave/geowave-deploy/target/dependency/jace/libjace.so \
+        -DJACE_RUNTIME_JAR=/home/vagrant/geowave/geowave-deploy/target/dependency/jace-core-runtime-1.2.22.jar \
+        -DJAVA_AWT_INCLUDE_PATH=/usr/lib/jvm/java-7-oracle/include \
+        -DJAVA_AWT_LIBRARY=/usr/lib/jvm/java-7-oracle/jre/lib/amd64/libjawt.so \
+        -DJAVA_INCLUDE_PATH=/usr/lib/jvm/java-7-oracle/include \
+        -DJAVA_INCLUDE_PATH2=/usr/lib/jvm/java-7-oracle/include/linux \
+        -DJAVA_JVM_LIBRARY=/usr/lib/jvm/java-7-oracle/jre/lib/amd64/server/libjvm.so \
+        ..
+
+make -j $NUMTHREADS
+sudo make install
+
diff --git a/scripts/linux-install-scripts/pgpointcloud.sh b/scripts/linux-install-scripts/pgpointcloud.sh
new file mode 100755
index 0000000..5b74684
--- /dev/null
+++ b/scripts/linux-install-scripts/pgpointcloud.sh
@@ -0,0 +1,22 @@
+git clone https://github.com/pramsey/pointcloud.git
+cd pointcloud
+cmake -G "Unix Makefiles" \
+    -DCMAKE_BUILD_TYPE=Release \
+    -DCMAKE_CXX_FLAGS="-fPIC" \
+    -DCMAKE_C_FLAGS="-fPIC"
+make
+sudo make install
+sudo service postgresql start
+sudo -u postgres createuser -s vagrant
+sudo -u postgres createdb points
+echo "CREATE EXTENSION postgis;" | sudo -u vagrant psql -d points -U vagrant
+echo "CREATE EXTENSION pointcloud;" | sudo -u vagrant psql -d points -U vagrant
+echo "CREATE EXTENSION pointcloud_postgis;" | sudo -u vagrant psql -d points -U vagrant
+sudo wget http://liblas.org/samples/st-helens-small.las
+sudo wget https://raw.github.com/PDAL/PDAL/master/scripts/vagrant/loadpgpointcloud.xml
+sudo wget https://raw.github.com/PDAL/PDAL/master/scripts/vagrant/readpgpointcloud.xml
+chmod 777 st-helens-small.las
+chmod 777 readpgpointcloud.xml
+chmod 777 loadpgpointcloud.xml
+sudo -u vagrant PDAL_DRIVER_PATH=/usr/lib pdal pipeline --input loadpgpointcloud.xml
+sudo -u vagrant PDAL_DRIVER_PATH=/usr/lib pdal info --input readpgpointcloud.xml -p 0
diff --git a/scripts/linux-install-scripts/startup.sh b/scripts/linux-install-scripts/startup.sh
new file mode 100755
index 0000000..90ab9cb
--- /dev/null
+++ b/scripts/linux-install-scripts/startup.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+service postgresql start
diff --git a/scripts/linux-install-scripts/websocketpp.sh b/scripts/linux-install-scripts/websocketpp.sh
new file mode 100755
index 0000000..36eb86f
--- /dev/null
+++ b/scripts/linux-install-scripts/websocketpp.sh
@@ -0,0 +1,7 @@
+#!/bin/bash -e
+# Installs websocketpp library
+
+git clone https://github.com/zaphoyd/websocketpp.git
+cd websocketpp
+cmake . -DCMAKE_INSTALL_PREFIX=/usr
+sudo make install
diff --git a/scripts/vagrant/loadpgpointcloud.xml b/scripts/vagrant/loadpgpointcloud.xml
new file mode 100644
index 0000000..87d4659
--- /dev/null
+++ b/scripts/vagrant/loadpgpointcloud.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Pipeline version="1.0">
+    <Writer type="writers.pgpointcloud">
+        <Option name="connection">dbname='points'</Option>
+        <Option name="table">sthsm</Option>
+        <Option name="srid">26910</Option>
+        <Option name="capacity">600</Option>
+        <Filter type="filters.chipper">
+            <Option name="capacity">600</Option>
+	    <Reader type="readers.las">
+		<Option name="filename">/home/vagrant/pointcloud/st-helens-small.las</Option>
+		<Option name="spatialreference">EPSG:26910</Option>
+	    </Reader>
+        </Filter>
+    </Writer>
+</Pipeline>
diff --git a/scripts/vagrant/readpgpointcloud.xml b/scripts/vagrant/readpgpointcloud.xml
new file mode 100644
index 0000000..121373d
--- /dev/null
+++ b/scripts/vagrant/readpgpointcloud.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Pipeline version="1.0">
+    <Writer type="writers.las">
+        <Option name="filename">st-helens-small-out.las</Option>
+        <Option name="spatialreference">EPSG:26910</Option>
+        <Reader type="readers.pgpointcloud">
+            <Option name="connection">dbname='points'</Option>
+            <Option name="table">sthsm</Option>
+            <Option name="column">pa</Option>
+            <Option name="srid">26910</Option>
+            <Option name="where">PC_Intersects(pa, ST_MakeEnvelope(560037.36, 5114846.45, 562667.31, 5118943.24, 26910))</Option>
+        </Reader>
+    </Writer>
+</Pipeline>
diff --git a/src/gitsha.cpp b/src/gitsha.cpp
index ec6a8a6..bd149e6 100644
--- a/src/gitsha.cpp
+++ b/src/gitsha.cpp
@@ -1,3 +1,3 @@
 #include <pdal/gitsha.h>
-#define GIT_SHA1 "89eea705470bbb83c9109532759a012b2c802670"
+#define GIT_SHA1 "a9d6693901c7f8c7bb8e57f640a2954c9115343f"
 const char g_GIT_SHA1[] = GIT_SHA1;
diff --git a/vendor/gtest-1.7.0/scripts/fuse_gtest_files.py b/vendor/gtest-1.7.0/scripts/fuse_gtest_files.py
new file mode 100755
index 0000000..57ef72f
--- /dev/null
+++ b/vendor/gtest-1.7.0/scripts/fuse_gtest_files.py
@@ -0,0 +1,250 @@
+#!/usr/bin/env python
+#
+# Copyright 2009, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""fuse_gtest_files.py v0.2.0
+Fuses Google Test source code into a .h file and a .cc file.
+
+SYNOPSIS
+       fuse_gtest_files.py [GTEST_ROOT_DIR] OUTPUT_DIR
+
+       Scans GTEST_ROOT_DIR for Google Test source code, and generates
+       two files: OUTPUT_DIR/gtest/gtest.h and OUTPUT_DIR/gtest/gtest-all.cc.
+       Then you can build your tests by adding OUTPUT_DIR to the include
+       search path and linking with OUTPUT_DIR/gtest/gtest-all.cc.  These
+       two files contain everything you need to use Google Test.  Hence
+       you can "install" Google Test by copying them to wherever you want.
+
+       GTEST_ROOT_DIR can be omitted and defaults to the parent
+       directory of the directory holding this script.
+
+EXAMPLES
+       ./fuse_gtest_files.py fused_gtest
+       ./fuse_gtest_files.py path/to/unpacked/gtest fused_gtest
+
+This tool is experimental.  In particular, it assumes that there is no
+conditional inclusion of Google Test headers.  Please report any
+problems to googletestframework at googlegroups.com.  You can read
+http://code.google.com/p/googletest/wiki/GoogleTestAdvancedGuide for
+more information.
+"""
+
+__author__ = 'wan at google.com (Zhanyong Wan)'
+
+import os
+import re
+import sets
+import sys
+
+# We assume that this file is in the scripts/ directory in the Google
+# Test root directory.
+DEFAULT_GTEST_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
+
+# Regex for matching '#include "gtest/..."'.
+INCLUDE_GTEST_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(gtest/.+)"')
+
+# Regex for matching '#include "src/..."'.
+INCLUDE_SRC_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(src/.+)"')
+
+# Where to find the source seed files.
+GTEST_H_SEED = 'include/gtest/gtest.h'
+GTEST_SPI_H_SEED = 'include/gtest/gtest-spi.h'
+GTEST_ALL_CC_SEED = 'src/gtest-all.cc'
+
+# Where to put the generated files.
+GTEST_H_OUTPUT = 'gtest/gtest.h'
+GTEST_ALL_CC_OUTPUT = 'gtest/gtest-all.cc'
+
+
+def VerifyFileExists(directory, relative_path):
+  """Verifies that the given file exists; aborts on failure.
+
+  relative_path is the file path relative to the given directory.
+  """
+
+  if not os.path.isfile(os.path.join(directory, relative_path)):
+    print 'ERROR: Cannot find %s in directory %s.' % (relative_path,
+                                                      directory)
+    print ('Please either specify a valid project root directory '
+           'or omit it on the command line.')
+    sys.exit(1)
+
+
+def ValidateGTestRootDir(gtest_root):
+  """Makes sure gtest_root points to a valid gtest root directory.
+
+  The function aborts the program on failure.
+  """
+
+  VerifyFileExists(gtest_root, GTEST_H_SEED)
+  VerifyFileExists(gtest_root, GTEST_ALL_CC_SEED)
+
+
+def VerifyOutputFile(output_dir, relative_path):
+  """Verifies that the given output file path is valid.
+
+  relative_path is relative to the output_dir directory.
+  """
+
+  # Makes sure the output file either doesn't exist or can be overwritten.
+  output_file = os.path.join(output_dir, relative_path)
+  if os.path.exists(output_file):
+    # TODO(wan at google.com): The following user-interaction doesn't
+    # work with automated processes.  We should provide a way for the
+    # Makefile to force overwriting the files.
+    print ('%s already exists in directory %s - overwrite it? (y/N) ' %
+           (relative_path, output_dir))
+    answer = sys.stdin.readline().strip()
+    if answer not in ['y', 'Y']:
+      print 'ABORTED.'
+      sys.exit(1)
+
+  # Makes sure the directory holding the output file exists; creates
+  # it and all its ancestors if necessary.
+  parent_directory = os.path.dirname(output_file)
+  if not os.path.isdir(parent_directory):
+    os.makedirs(parent_directory)
+
+
+def ValidateOutputDir(output_dir):
+  """Makes sure output_dir points to a valid output directory.
+
+  The function aborts the program on failure.
+  """
+
+  VerifyOutputFile(output_dir, GTEST_H_OUTPUT)
+  VerifyOutputFile(output_dir, GTEST_ALL_CC_OUTPUT)
+
+
+def FuseGTestH(gtest_root, output_dir):
+  """Scans folder gtest_root to generate gtest/gtest.h in output_dir."""
+
+  output_file = file(os.path.join(output_dir, GTEST_H_OUTPUT), 'w')
+  processed_files = sets.Set()  # Holds all gtest headers we've processed.
+
+  def ProcessFile(gtest_header_path):
+    """Processes the given gtest header file."""
+
+    # We don't process the same header twice.
+    if gtest_header_path in processed_files:
+      return
+
+    processed_files.add(gtest_header_path)
+
+    # Reads each line in the given gtest header.
+    for line in file(os.path.join(gtest_root, gtest_header_path), 'r'):
+      m = INCLUDE_GTEST_FILE_REGEX.match(line)
+      if m:
+        # It's '#include "gtest/..."' - let's process it recursively.
+        ProcessFile('include/' + m.group(1))
+      else:
+        # Otherwise we copy the line unchanged to the output file.
+        output_file.write(line)
+
+  ProcessFile(GTEST_H_SEED)
+  output_file.close()
+
+
+def FuseGTestAllCcToFile(gtest_root, output_file):
+  """Scans folder gtest_root to generate gtest/gtest-all.cc in output_file."""
+
+  processed_files = sets.Set()
+
+  def ProcessFile(gtest_source_file):
+    """Processes the given gtest source file."""
+
+    # We don't process the same #included file twice.
+    if gtest_source_file in processed_files:
+      return
+
+    processed_files.add(gtest_source_file)
+
+    # Reads each line in the given gtest source file.
+    for line in file(os.path.join(gtest_root, gtest_source_file), 'r'):
+      m = INCLUDE_GTEST_FILE_REGEX.match(line)
+      if m:
+        if 'include/' + m.group(1) == GTEST_SPI_H_SEED:
+          # It's '#include "gtest/gtest-spi.h"'.  This file is not
+          # #included by "gtest/gtest.h", so we need to process it.
+          ProcessFile(GTEST_SPI_H_SEED)
+        else:
+          # It's '#include "gtest/foo.h"' where foo is not gtest-spi.
+          # We treat it as '#include "gtest/gtest.h"', as all other
+          # gtest headers are being fused into gtest.h and cannot be
+          # #included directly.
+
+          # There is no need to #include "gtest/gtest.h" more than once.
+          if not GTEST_H_SEED in processed_files:
+            processed_files.add(GTEST_H_SEED)
+            output_file.write('#include "%s"\n' % (GTEST_H_OUTPUT,))
+      else:
+        m = INCLUDE_SRC_FILE_REGEX.match(line)
+        if m:
+          # It's '#include "src/foo"' - let's process it recursively.
+          ProcessFile(m.group(1))
+        else:
+          output_file.write(line)
+
+  ProcessFile(GTEST_ALL_CC_SEED)
+
+
+def FuseGTestAllCc(gtest_root, output_dir):
+  """Scans folder gtest_root to generate gtest/gtest-all.cc in output_dir."""
+
+  output_file = file(os.path.join(output_dir, GTEST_ALL_CC_OUTPUT), 'w')
+  FuseGTestAllCcToFile(gtest_root, output_file)
+  output_file.close()
+
+
+def FuseGTest(gtest_root, output_dir):
+  """Fuses gtest.h and gtest-all.cc."""
+
+  ValidateGTestRootDir(gtest_root)
+  ValidateOutputDir(output_dir)
+
+  FuseGTestH(gtest_root, output_dir)
+  FuseGTestAllCc(gtest_root, output_dir)
+
+
+def main():
+  argc = len(sys.argv)
+  if argc == 2:
+    # fuse_gtest_files.py OUTPUT_DIR
+    FuseGTest(DEFAULT_GTEST_ROOT_DIR, sys.argv[1])
+  elif argc == 3:
+    # fuse_gtest_files.py GTEST_ROOT_DIR OUTPUT_DIR
+    FuseGTest(sys.argv[1], sys.argv[2])
+  else:
+    print __doc__
+    sys.exit(1)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/vendor/gtest-1.7.0/scripts/gen_gtest_pred_impl.py b/vendor/gtest-1.7.0/scripts/gen_gtest_pred_impl.py
new file mode 100755
index 0000000..3e7ab04
--- /dev/null
+++ b/vendor/gtest-1.7.0/scripts/gen_gtest_pred_impl.py
@@ -0,0 +1,730 @@
+#!/usr/bin/env python
+#
+# Copyright 2006, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""gen_gtest_pred_impl.py v0.1
+
+Generates the implementation of Google Test predicate assertions and
+accompanying tests.
+
+Usage:
+
+  gen_gtest_pred_impl.py MAX_ARITY
+
+where MAX_ARITY is a positive integer.
+
+The command generates the implementation of up-to MAX_ARITY-ary
+predicate assertions, and writes it to file gtest_pred_impl.h in the
+directory where the script is.  It also generates the accompanying
+unit test in file gtest_pred_impl_unittest.cc.
+"""
+
+__author__ = 'wan at google.com (Zhanyong Wan)'
+
+import os
+import sys
+import time
+
+# Where this script is.
+SCRIPT_DIR = os.path.dirname(sys.argv[0])
+
+# Where to store the generated header.
+HEADER = os.path.join(SCRIPT_DIR, '../include/gtest/gtest_pred_impl.h')
+
+# Where to store the generated unit test.
+UNIT_TEST = os.path.join(SCRIPT_DIR, '../test/gtest_pred_impl_unittest.cc')
+
+
+def HeaderPreamble(n):
+  """Returns the preamble for the header file.
+
+  Args:
+    n:  the maximum arity of the predicate macros to be generated.
+  """
+
+  # A map that defines the values used in the preamble template.
+  DEFS = {
+    'today' : time.strftime('%m/%d/%Y'),
+    'year' : time.strftime('%Y'),
+    'command' : '%s %s' % (os.path.basename(sys.argv[0]), n),
+    'n' : n
+    }
+
+  return (
+"""// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This file is AUTOMATICALLY GENERATED on %(today)s by command
+// '%(command)s'.  DO NOT EDIT BY HAND!
+//
+// Implements a family of generic predicate assertion macros.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
+#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
+
+// Makes sure this header is not included before gtest.h.
+#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
+# error Do not include gtest_pred_impl.h directly.  Include gtest.h instead.
+#endif  // GTEST_INCLUDE_GTEST_GTEST_H_
+
+// This header implements a family of generic predicate assertion
+// macros:
+//
+//   ASSERT_PRED_FORMAT1(pred_format, v1)
+//   ASSERT_PRED_FORMAT2(pred_format, v1, v2)
+//   ...
+//
+// where pred_format is a function or functor that takes n (in the
+// case of ASSERT_PRED_FORMATn) values and their source expression
+// text, and returns a testing::AssertionResult.  See the definition
+// of ASSERT_EQ in gtest.h for an example.
+//
+// If you don't care about formatting, you can use the more
+// restrictive version:
+//
+//   ASSERT_PRED1(pred, v1)
+//   ASSERT_PRED2(pred, v1, v2)
+//   ...
+//
+// where pred is an n-ary function or functor that returns bool,
+// and the values v1, v2, ..., must support the << operator for
+// streaming to std::ostream.
+//
+// We also define the EXPECT_* variations.
+//
+// For now we only support predicates whose arity is at most %(n)s.
+// Please email googletestframework at googlegroups.com if you need
+// support for higher arities.
+
+// GTEST_ASSERT_ is the basic statement to which all of the assertions
+// in this file reduce.  Don't use this in your code.
+
+#define GTEST_ASSERT_(expression, on_failure) \\
+  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\
+  if (const ::testing::AssertionResult gtest_ar = (expression)) \\
+    ; \\
+  else \\
+    on_failure(gtest_ar.failure_message())
+""" % DEFS)
+
+
+def Arity(n):
+  """Returns the English name of the given arity."""
+
+  if n < 0:
+    return None
+  elif n <= 3:
+    return ['nullary', 'unary', 'binary', 'ternary'][n]
+  else:
+    return '%s-ary' % n
+
+
+def Title(word):
+  """Returns the given word in title case.  The difference between
+  this and string's title() method is that Title('4-ary') is '4-ary'
+  while '4-ary'.title() is '4-Ary'."""
+
+  return word[0].upper() + word[1:]
+
+
+def OneTo(n):
+  """Returns the list [1, 2, 3, ..., n]."""
+
+  return range(1, n + 1)
+
+
+def Iter(n, format, sep=''):
+  """Given a positive integer n, a format string that contains 0 or
+  more '%s' format specs, and optionally a separator string, returns
+  the join of n strings, each formatted with the format string on an
+  iterator ranged from 1 to n.
+
+  Example:
+
+  Iter(3, 'v%s', sep=', ') returns 'v1, v2, v3'.
+  """
+
+  # How many '%s' specs are in format?
+  spec_count = len(format.split('%s')) - 1
+  return sep.join([format % (spec_count * (i,)) for i in OneTo(n)])
+
+
+def ImplementationForArity(n):
+  """Returns the implementation of n-ary predicate assertions."""
+
+  # A map the defines the values used in the implementation template.
+  DEFS = {
+    'n' : str(n),
+    'vs' : Iter(n, 'v%s', sep=', '),
+    'vts' : Iter(n, '#v%s', sep=', '),
+    'arity' : Arity(n),
+    'Arity' : Title(Arity(n))
+    }
+
+  impl = """
+
+// Helper function for implementing {EXPECT|ASSERT}_PRED%(n)s.  Don't use
+// this in your code.
+template <typename Pred""" % DEFS
+
+  impl += Iter(n, """,
+          typename T%s""")
+
+  impl += """>
+AssertionResult AssertPred%(n)sHelper(const char* pred_text""" % DEFS
+
+  impl += Iter(n, """,
+                                  const char* e%s""")
+
+  impl += """,
+                                  Pred pred"""
+
+  impl += Iter(n, """,
+                                  const T%s& v%s""")
+
+  impl += """) {
+  if (pred(%(vs)s)) return AssertionSuccess();
+
+""" % DEFS
+
+  impl += '  return AssertionFailure() << pred_text << "("'
+
+  impl += Iter(n, """
+                            << e%s""", sep=' << ", "')
+
+  impl += ' << ") evaluates to false, where"'
+
+  impl += Iter(n, """
+                            << "\\n" << e%s << " evaluates to " << v%s""")
+
+  impl += """;
+}
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
+// Don't use this in your code.
+#define GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, on_failure)\\
+  GTEST_ASSERT_(pred_format(%(vts)s, %(vs)s), \\
+                on_failure)
+
+// Internal macro for implementing {EXPECT|ASSERT}_PRED%(n)s.  Don't use
+// this in your code.
+#define GTEST_PRED%(n)s_(pred, %(vs)s, on_failure)\\
+  GTEST_ASSERT_(::testing::AssertPred%(n)sHelper(#pred""" % DEFS
+
+  impl += Iter(n, """, \\
+                                             #v%s""")
+
+  impl += """, \\
+                                             pred"""
+
+  impl += Iter(n, """, \\
+                                             v%s""")
+
+  impl += """), on_failure)
+
+// %(Arity)s predicate assertion macros.
+#define EXPECT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
+  GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_NONFATAL_FAILURE_)
+#define EXPECT_PRED%(n)s(pred, %(vs)s) \\
+  GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_NONFATAL_FAILURE_)
+#define ASSERT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
+  GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_FATAL_FAILURE_)
+#define ASSERT_PRED%(n)s(pred, %(vs)s) \\
+  GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_FATAL_FAILURE_)
+
+""" % DEFS
+
+  return impl
+
+
+def HeaderPostamble():
+  """Returns the postamble for the header file."""
+
+  return """
+
+#endif  // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
+"""
+
+
+def GenerateFile(path, content):
+  """Given a file path and a content string, overwrites it with the
+  given content."""
+
+  print 'Updating file %s . . .' % path
+
+  f = file(path, 'w+')
+  print >>f, content,
+  f.close()
+
+  print 'File %s has been updated.' % path
+
+
+def GenerateHeader(n):
+  """Given the maximum arity n, updates the header file that implements
+  the predicate assertions."""
+
+  GenerateFile(HEADER,
+               HeaderPreamble(n)
+               + ''.join([ImplementationForArity(i) for i in OneTo(n)])
+               + HeaderPostamble())
+
+
+def UnitTestPreamble():
+  """Returns the preamble for the unit test file."""
+
+  # A map that defines the values used in the preamble template.
+  DEFS = {
+    'today' : time.strftime('%m/%d/%Y'),
+    'year' : time.strftime('%Y'),
+    'command' : '%s %s' % (os.path.basename(sys.argv[0]), sys.argv[1]),
+    }
+
+  return (
+"""// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This file is AUTOMATICALLY GENERATED on %(today)s by command
+// '%(command)s'.  DO NOT EDIT BY HAND!
+
+// Regression test for gtest_pred_impl.h
+//
+// This file is generated by a script and quite long.  If you intend to
+// learn how Google Test works by reading its unit tests, read
+// gtest_unittest.cc instead.
+//
+// This is intended as a regression test for the Google Test predicate
+// assertions.  We compile it as part of the gtest_unittest target
+// only to keep the implementation tidy and compact, as it is quite
+// involved to set up the stage for testing Google Test using Google
+// Test itself.
+//
+// Currently, gtest_unittest takes ~11 seconds to run in the testing
+// daemon.  In the future, if it grows too large and needs much more
+// time to finish, we should consider separating this file into a
+// stand-alone regression test.
+
+#include <iostream>
+
+#include "gtest/gtest.h"
+#include "gtest/gtest-spi.h"
+
+// A user-defined data type.
+struct Bool {
+  explicit Bool(int val) : value(val != 0) {}
+
+  bool operator>(int n) const { return value > Bool(n).value; }
+
+  Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }
+
+  bool operator==(const Bool& rhs) const { return value == rhs.value; }
+
+  bool value;
+};
+
+// Enables Bool to be used in assertions.
+std::ostream& operator<<(std::ostream& os, const Bool& x) {
+  return os << (x.value ? "true" : "false");
+}
+
+""" % DEFS)
+
+
+def TestsForArity(n):
+  """Returns the tests for n-ary predicate assertions."""
+
+  # A map that defines the values used in the template for the tests.
+  DEFS = {
+    'n' : n,
+    'es' : Iter(n, 'e%s', sep=', '),
+    'vs' : Iter(n, 'v%s', sep=', '),
+    'vts' : Iter(n, '#v%s', sep=', '),
+    'tvs' : Iter(n, 'T%s v%s', sep=', '),
+    'int_vs' : Iter(n, 'int v%s', sep=', '),
+    'Bool_vs' : Iter(n, 'Bool v%s', sep=', '),
+    'types' : Iter(n, 'typename T%s', sep=', '),
+    'v_sum' : Iter(n, 'v%s', sep=' + '),
+    'arity' : Arity(n),
+    'Arity' : Title(Arity(n)),
+    }
+
+  tests = (
+"""// Sample functions/functors for testing %(arity)s predicate assertions.
+
+// A %(arity)s predicate function.
+template <%(types)s>
+bool PredFunction%(n)s(%(tvs)s) {
+  return %(v_sum)s > 0;
+}
+
+// The following two functions are needed to circumvent a bug in
+// gcc 2.95.3, which sometimes has problem with the above template
+// function.
+bool PredFunction%(n)sInt(%(int_vs)s) {
+  return %(v_sum)s > 0;
+}
+bool PredFunction%(n)sBool(%(Bool_vs)s) {
+  return %(v_sum)s > 0;
+}
+""" % DEFS)
+
+  tests += """
+// A %(arity)s predicate functor.
+struct PredFunctor%(n)s {
+  template <%(types)s>
+  bool operator()(""" % DEFS
+
+  tests += Iter(n, 'const T%s& v%s', sep=""",
+                  """)
+
+  tests += """) {
+    return %(v_sum)s > 0;
+  }
+};
+""" % DEFS
+
+  tests += """
+// A %(arity)s predicate-formatter function.
+template <%(types)s>
+testing::AssertionResult PredFormatFunction%(n)s(""" % DEFS
+
+  tests += Iter(n, 'const char* e%s', sep=""",
+                                             """)
+
+  tests += Iter(n, """,
+                                             const T%s& v%s""")
+
+  tests += """) {
+  if (PredFunction%(n)s(%(vs)s))
+    return testing::AssertionSuccess();
+
+  return testing::AssertionFailure()
+      << """ % DEFS
+
+  tests += Iter(n, 'e%s', sep=' << " + " << ')
+
+  tests += """
+      << " is expected to be positive, but evaluates to "
+      << %(v_sum)s << ".";
+}
+""" % DEFS
+
+  tests += """
+// A %(arity)s predicate-formatter functor.
+struct PredFormatFunctor%(n)s {
+  template <%(types)s>
+  testing::AssertionResult operator()(""" % DEFS
+
+  tests += Iter(n, 'const char* e%s', sep=""",
+                                      """)
+
+  tests += Iter(n, """,
+                                      const T%s& v%s""")
+
+  tests += """) const {
+    return PredFormatFunction%(n)s(%(es)s, %(vs)s);
+  }
+};
+""" % DEFS
+
+  tests += """
+// Tests for {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
+
+class Predicate%(n)sTest : public testing::Test {
+ protected:
+  virtual void SetUp() {
+    expected_to_finish_ = true;
+    finished_ = false;""" % DEFS
+
+  tests += """
+    """ + Iter(n, 'n%s_ = ') + """0;
+  }
+"""
+
+  tests += """
+  virtual void TearDown() {
+    // Verifies that each of the predicate's arguments was evaluated
+    // exactly once."""
+
+  tests += ''.join(["""
+    EXPECT_EQ(1, n%s_) <<
+        "The predicate assertion didn't evaluate argument %s "
+        "exactly once.";""" % (i, i + 1) for i in OneTo(n)])
+
+  tests += """
+
+    // Verifies that the control flow in the test function is expected.
+    if (expected_to_finish_ && !finished_) {
+      FAIL() << "The predicate assertion unexpactedly aborted the test.";
+    } else if (!expected_to_finish_ && finished_) {
+      FAIL() << "The failed predicate assertion didn't abort the test "
+                "as expected.";
+    }
+  }
+
+  // true iff the test function is expected to run to finish.
+  static bool expected_to_finish_;
+
+  // true iff the test function did run to finish.
+  static bool finished_;
+""" % DEFS
+
+  tests += Iter(n, """
+  static int n%s_;""")
+
+  tests += """
+};
+
+bool Predicate%(n)sTest::expected_to_finish_;
+bool Predicate%(n)sTest::finished_;
+""" % DEFS
+
+  tests += Iter(n, """int Predicate%%(n)sTest::n%s_;
+""") % DEFS
+
+  tests += """
+typedef Predicate%(n)sTest EXPECT_PRED_FORMAT%(n)sTest;
+typedef Predicate%(n)sTest ASSERT_PRED_FORMAT%(n)sTest;
+typedef Predicate%(n)sTest EXPECT_PRED%(n)sTest;
+typedef Predicate%(n)sTest ASSERT_PRED%(n)sTest;
+""" % DEFS
+
+  def GenTest(use_format, use_assert, expect_failure,
+              use_functor, use_user_type):
+    """Returns the test for a predicate assertion macro.
+
+    Args:
+      use_format:     true iff the assertion is a *_PRED_FORMAT*.
+      use_assert:     true iff the assertion is a ASSERT_*.
+      expect_failure: true iff the assertion is expected to fail.
+      use_functor:    true iff the first argument of the assertion is
+                      a functor (as opposed to a function)
+      use_user_type:  true iff the predicate functor/function takes
+                      argument(s) of a user-defined type.
+
+    Example:
+
+      GenTest(1, 0, 0, 1, 0) returns a test that tests the behavior
+      of a successful EXPECT_PRED_FORMATn() that takes a functor
+      whose arguments have built-in types."""
+
+    if use_assert:
+      assrt = 'ASSERT'  # 'assert' is reserved, so we cannot use
+                        # that identifier here.
+    else:
+      assrt = 'EXPECT'
+
+    assertion = assrt + '_PRED'
+
+    if use_format:
+      pred_format = 'PredFormat'
+      assertion += '_FORMAT'
+    else:
+      pred_format = 'Pred'
+
+    assertion += '%(n)s' % DEFS
+
+    if use_functor:
+      pred_format_type = 'functor'
+      pred_format += 'Functor%(n)s()'
+    else:
+      pred_format_type = 'function'
+      pred_format += 'Function%(n)s'
+      if not use_format:
+        if use_user_type:
+          pred_format += 'Bool'
+        else:
+          pred_format += 'Int'
+
+    test_name = pred_format_type.title()
+
+    if use_user_type:
+      arg_type = 'user-defined type (Bool)'
+      test_name += 'OnUserType'
+      if expect_failure:
+        arg = 'Bool(n%s_++)'
+      else:
+        arg = 'Bool(++n%s_)'
+    else:
+      arg_type = 'built-in type (int)'
+      test_name += 'OnBuiltInType'
+      if expect_failure:
+        arg = 'n%s_++'
+      else:
+        arg = '++n%s_'
+
+    if expect_failure:
+      successful_or_failed = 'failed'
+      expected_or_not = 'expected.'
+      test_name +=  'Failure'
+    else:
+      successful_or_failed = 'successful'
+      expected_or_not = 'UNEXPECTED!'
+      test_name +=  'Success'
+
+    # A map that defines the values used in the test template.
+    defs = DEFS.copy()
+    defs.update({
+      'assert' : assrt,
+      'assertion' : assertion,
+      'test_name' : test_name,
+      'pf_type' : pred_format_type,
+      'pf' : pred_format,
+      'arg_type' : arg_type,
+      'arg' : arg,
+      'successful' : successful_or_failed,
+      'expected' : expected_or_not,
+      })
+
+    test = """
+// Tests a %(successful)s %(assertion)s where the
+// predicate-formatter is a %(pf_type)s on a %(arg_type)s.
+TEST_F(%(assertion)sTest, %(test_name)s) {""" % defs
+
+    indent = (len(assertion) + 3)*' '
+    extra_indent = ''
+
+    if expect_failure:
+      extra_indent = '  '
+      if use_assert:
+        test += """
+  expected_to_finish_ = false;
+  EXPECT_FATAL_FAILURE({  // NOLINT"""
+      else:
+        test += """
+  EXPECT_NONFATAL_FAILURE({  // NOLINT"""
+
+    test += '\n' + extra_indent + """  %(assertion)s(%(pf)s""" % defs
+
+    test = test % defs
+    test += Iter(n, ',\n' + indent + extra_indent + '%(arg)s' % defs)
+    test += ');\n' + extra_indent + '  finished_ = true;\n'
+
+    if expect_failure:
+      test += '  }, "");\n'
+
+    test += '}\n'
+    return test
+
+  # Generates tests for all 2**6 = 64 combinations.
+  tests += ''.join([GenTest(use_format, use_assert, expect_failure,
+                            use_functor, use_user_type)
+                    for use_format in [0, 1]
+                    for use_assert in [0, 1]
+                    for expect_failure in [0, 1]
+                    for use_functor in [0, 1]
+                    for use_user_type in [0, 1]
+                    ])
+
+  return tests
+
+
+def UnitTestPostamble():
+  """Returns the postamble for the tests."""
+
+  return ''
+
+
+def GenerateUnitTest(n):
+  """Returns the tests for up-to n-ary predicate assertions."""
+
+  GenerateFile(UNIT_TEST,
+               UnitTestPreamble()
+               + ''.join([TestsForArity(i) for i in OneTo(n)])
+               + UnitTestPostamble())
+
+
+def _Main():
+  """The entry point of the script.  Generates the header file and its
+  unit test."""
+
+  if len(sys.argv) != 2:
+    print __doc__
+    print 'Author: ' + __author__
+    sys.exit(1)
+
+  n = int(sys.argv[1])
+  GenerateHeader(n)
+  GenerateUnitTest(n)
+
+
+if __name__ == '__main__':
+  _Main()
diff --git a/vendor/gtest-1.7.0/scripts/gtest-config.in b/vendor/gtest-1.7.0/scripts/gtest-config.in
new file mode 100755
index 0000000..780f843
--- /dev/null
+++ b/vendor/gtest-1.7.0/scripts/gtest-config.in
@@ -0,0 +1,274 @@
+#!/bin/sh
+
+# These variables are automatically filled in by the configure script.
+name="@PACKAGE_TARNAME@"
+version="@PACKAGE_VERSION@"
+
+show_usage()
+{
+  echo "Usage: gtest-config [OPTIONS...]"
+}
+
+show_help()
+{
+  show_usage
+  cat <<\EOF
+
+The `gtest-config' script provides access to the necessary compile and linking
+flags to connect with Google C++ Testing Framework, both in a build prior to
+installation, and on the system proper after installation. The installation
+overrides may be issued in combination with any other queries, but will only
+affect installation queries if called on a built but not installed gtest. The
+installation queries may not be issued with any other types of queries, and
+only one installation query may be made at a time. The version queries and
+compiler flag queries may be combined as desired but not mixed. Different
+version queries are always combined with logical "and" semantics, and only the
+last of any particular query is used while all previous ones ignored. All
+versions must be specified as a sequence of numbers separated by periods.
+Compiler flag queries output the union of the sets of flags when combined.
+
+ Examples:
+  gtest-config --min-version=1.0 || echo "Insufficient Google Test version."
+
+  g++ $(gtest-config --cppflags --cxxflags) -o foo.o -c foo.cpp
+  g++ $(gtest-config --ldflags --libs) -o foo foo.o
+
+  # When using a built but not installed Google Test:
+  g++ $(../../my_gtest_build/scripts/gtest-config ...) ...
+
+  # When using an installed Google Test, but with installation overrides:
+  export GTEST_PREFIX="/opt"
+  g++ $(gtest-config --libdir="/opt/lib64" ...) ...
+
+ Help:
+  --usage                    brief usage information
+  --help                     display this help message
+
+ Installation Overrides:
+  --prefix=<dir>             overrides the installation prefix
+  --exec-prefix=<dir>        overrides the executable installation prefix
+  --libdir=<dir>             overrides the library installation prefix
+  --includedir=<dir>         overrides the header file installation prefix
+
+ Installation Queries:
+  --prefix                   installation prefix
+  --exec-prefix              executable installation prefix
+  --libdir                   library installation directory
+  --includedir               header file installation directory
+  --version                  the version of the Google Test installation
+
+ Version Queries:
+  --min-version=VERSION      return 0 if the version is at least VERSION
+  --exact-version=VERSION    return 0 if the version is exactly VERSION
+  --max-version=VERSION      return 0 if the version is at most VERSION
+
+ Compilation Flag Queries:
+  --cppflags                 compile flags specific to the C-like preprocessors
+  --cxxflags                 compile flags appropriate for C++ programs
+  --ldflags                  linker flags
+  --libs                     libraries for linking
+
+EOF
+}
+
+# This function bounds our version with a min and a max. It uses some clever
+# POSIX-compliant variable expansion to portably do all the work in the shell
+# and avoid any dependency on a particular "sed" or "awk" implementation.
+# Notable is that it will only ever compare the first 3 components of versions.
+# Further components will be cleanly stripped off. All versions must be
+# unadorned, so "v1.0" will *not* work. The minimum version must be in $1, and
+# the max in $2. TODO(chandlerc at google.com): If this ever breaks, we should
+# investigate expanding this via autom4te from AS_VERSION_COMPARE rather than
+# continuing to maintain our own shell version.
+check_versions()
+{
+  major_version=${version%%.*}
+  minor_version="0"
+  point_version="0"
+  if test "${version#*.}" != "${version}"; then
+    minor_version=${version#*.}
+    minor_version=${minor_version%%.*}
+  fi
+  if test "${version#*.*.}" != "${version}"; then
+    point_version=${version#*.*.}
+    point_version=${point_version%%.*}
+  fi
+
+  min_version="$1"
+  min_major_version=${min_version%%.*}
+  min_minor_version="0"
+  min_point_version="0"
+  if test "${min_version#*.}" != "${min_version}"; then
+    min_minor_version=${min_version#*.}
+    min_minor_version=${min_minor_version%%.*}
+  fi
+  if test "${min_version#*.*.}" != "${min_version}"; then
+    min_point_version=${min_version#*.*.}
+    min_point_version=${min_point_version%%.*}
+  fi
+
+  max_version="$2"
+  max_major_version=${max_version%%.*}
+  max_minor_version="0"
+  max_point_version="0"
+  if test "${max_version#*.}" != "${max_version}"; then
+    max_minor_version=${max_version#*.}
+    max_minor_version=${max_minor_version%%.*}
+  fi
+  if test "${max_version#*.*.}" != "${max_version}"; then
+    max_point_version=${max_version#*.*.}
+    max_point_version=${max_point_version%%.*}
+  fi
+
+  test $(($major_version)) -lt $(($min_major_version)) && exit 1
+  if test $(($major_version)) -eq $(($min_major_version)); then
+    test $(($minor_version)) -lt $(($min_minor_version)) && exit 1
+    if test $(($minor_version)) -eq $(($min_minor_version)); then
+      test $(($point_version)) -lt $(($min_point_version)) && exit 1
+    fi
+  fi
+
+  test $(($major_version)) -gt $(($max_major_version)) && exit 1
+  if test $(($major_version)) -eq $(($max_major_version)); then
+    test $(($minor_version)) -gt $(($max_minor_version)) && exit 1
+    if test $(($minor_version)) -eq $(($max_minor_version)); then
+      test $(($point_version)) -gt $(($max_point_version)) && exit 1
+    fi
+  fi
+
+  exit 0
+}
+
+# Show the usage line when no arguments are specified.
+if test $# -eq 0; then
+  show_usage
+  exit 1
+fi
+
+while test $# -gt 0; do
+  case $1 in
+    --usage)          show_usage;         exit 0;;
+    --help)           show_help;          exit 0;;
+
+    # Installation overrides
+    --prefix=*)       GTEST_PREFIX=${1#--prefix=};;
+    --exec-prefix=*)  GTEST_EXEC_PREFIX=${1#--exec-prefix=};;
+    --libdir=*)       GTEST_LIBDIR=${1#--libdir=};;
+    --includedir=*)   GTEST_INCLUDEDIR=${1#--includedir=};;
+
+    # Installation queries
+    --prefix|--exec-prefix|--libdir|--includedir|--version)
+      if test -n "${do_query}"; then
+        show_usage
+        exit 1
+      fi
+      do_query=${1#--}
+      ;;
+
+    # Version checking
+    --min-version=*)
+      do_check_versions=yes
+      min_version=${1#--min-version=}
+      ;;
+    --max-version=*)
+      do_check_versions=yes
+      max_version=${1#--max-version=}
+      ;;
+    --exact-version=*)
+      do_check_versions=yes
+      exact_version=${1#--exact-version=}
+      ;;
+
+    # Compiler flag output
+    --cppflags)       echo_cppflags=yes;;
+    --cxxflags)       echo_cxxflags=yes;;
+    --ldflags)        echo_ldflags=yes;;
+    --libs)           echo_libs=yes;;
+
+    # Everything else is an error
+    *)                show_usage;         exit 1;;
+  esac
+  shift
+done
+
+# These have defaults filled in by the configure script but can also be
+# overridden by environment variables or command line parameters.
+prefix="${GTEST_PREFIX:- at prefix@}"
+exec_prefix="${GTEST_EXEC_PREFIX:- at exec_prefix@}"
+libdir="${GTEST_LIBDIR:- at libdir@}"
+includedir="${GTEST_INCLUDEDIR:- at includedir@}"
+
+# We try and detect if our binary is not located at its installed location. If
+# it's not, we provide variables pointing to the source and build tree rather
+# than to the install tree. This allows building against a just-built gtest
+# rather than an installed gtest.
+bindir="@bindir@"
+this_relative_bindir=`dirname $0`
+this_bindir=`cd ${this_relative_bindir}; pwd -P`
+if test "${this_bindir}" = "${this_bindir%${bindir}}"; then
+  # The path to the script doesn't end in the bindir sequence from Autoconf,
+  # assume that we are in a build tree.
+  build_dir=`dirname ${this_bindir}`
+  src_dir=`cd ${this_bindir}; cd @top_srcdir@; pwd -P`
+
+  # TODO(chandlerc at google.com): This is a dangerous dependency on libtool, we
+  # should work to remove it, and/or remove libtool altogether, replacing it
+  # with direct references to the library and a link path.
+  gtest_libs="${build_dir}/lib/libgtest.la @PTHREAD_CFLAGS@ @PTHREAD_LIBS@"
+  gtest_ldflags=""
+
+  # We provide hooks to include from either the source or build dir, where the
+  # build dir is always preferred. This will potentially allow us to write
+  # build rules for generated headers and have them automatically be preferred
+  # over provided versions.
+  gtest_cppflags="-I${build_dir}/include -I${src_dir}/include"
+  gtest_cxxflags="@PTHREAD_CFLAGS@"
+else
+  # We're using an installed gtest, although it may be staged under some
+  # prefix. Assume (as our own libraries do) that we can resolve the prefix,
+  # and are present in the dynamic link paths.
+  gtest_ldflags="-L${libdir}"
+  gtest_libs="-l${name} @PTHREAD_CFLAGS@ @PTHREAD_LIBS@"
+  gtest_cppflags="-I${includedir}"
+  gtest_cxxflags="@PTHREAD_CFLAGS@"
+fi
+
+# Do an installation query if requested.
+if test -n "$do_query"; then
+  case $do_query in
+    prefix)           echo $prefix;       exit 0;;
+    exec-prefix)      echo $exec_prefix;  exit 0;;
+    libdir)           echo $libdir;       exit 0;;
+    includedir)       echo $includedir;   exit 0;;
+    version)          echo $version;      exit 0;;
+    *)                show_usage;         exit 1;;
+  esac
+fi
+
+# Do a version check if requested.
+if test "$do_check_versions" = "yes"; then
+  # Make sure we didn't receive a bad combination of parameters.
+  test "$echo_cppflags" = "yes" && show_usage && exit 1
+  test "$echo_cxxflags" = "yes" && show_usage && exit 1
+  test "$echo_ldflags" = "yes"  && show_usage && exit 1
+  test "$echo_libs" = "yes"     && show_usage && exit 1
+
+  if test "$exact_version" != ""; then
+    check_versions $exact_version $exact_version
+    # unreachable
+  else
+    check_versions ${min_version:-0.0.0} ${max_version:-9999.9999.9999}
+    # unreachable
+  fi
+fi
+
+# Do the output in the correct order so that these can be used in-line of
+# a compiler invocation.
+output=""
+test "$echo_cppflags" = "yes" && output="$output $gtest_cppflags"
+test "$echo_cxxflags" = "yes" && output="$output $gtest_cxxflags"
+test "$echo_ldflags" = "yes"  && output="$output $gtest_ldflags"
+test "$echo_libs" = "yes"     && output="$output $gtest_libs"
+echo $output
+
+exit 0
diff --git a/vendor/gtest-1.7.0/scripts/pump.py b/vendor/gtest-1.7.0/scripts/pump.py
new file mode 100755
index 0000000..5efb653
--- /dev/null
+++ b/vendor/gtest-1.7.0/scripts/pump.py
@@ -0,0 +1,855 @@
+#!/usr/bin/env python
+#
+# Copyright 2008, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""pump v0.2.0 - Pretty Useful for Meta Programming.
+
+A tool for preprocessor meta programming.  Useful for generating
+repetitive boilerplate code.  Especially useful for writing C++
+classes, functions, macros, and templates that need to work with
+various number of arguments.
+
+USAGE:
+       pump.py SOURCE_FILE
+
+EXAMPLES:
+       pump.py foo.cc.pump
+         Converts foo.cc.pump to foo.cc.
+
+GRAMMAR:
+       CODE ::= ATOMIC_CODE*
+       ATOMIC_CODE ::= $var ID = EXPRESSION
+           | $var ID = [[ CODE ]]
+           | $range ID EXPRESSION..EXPRESSION
+           | $for ID SEPARATOR [[ CODE ]]
+           | $($)
+           | $ID
+           | $(EXPRESSION)
+           | $if EXPRESSION [[ CODE ]] ELSE_BRANCH
+           | [[ CODE ]]
+           | RAW_CODE
+       SEPARATOR ::= RAW_CODE | EMPTY
+       ELSE_BRANCH ::= $else [[ CODE ]]
+           | $elif EXPRESSION [[ CODE ]] ELSE_BRANCH
+           | EMPTY
+       EXPRESSION has Python syntax.
+"""
+
+__author__ = 'wan at google.com (Zhanyong Wan)'
+
+import os
+import re
+import sys
+
+
+TOKEN_TABLE = [
+    (re.compile(r'\$var\s+'), '$var'),
+    (re.compile(r'\$elif\s+'), '$elif'),
+    (re.compile(r'\$else\s+'), '$else'),
+    (re.compile(r'\$for\s+'), '$for'),
+    (re.compile(r'\$if\s+'), '$if'),
+    (re.compile(r'\$range\s+'), '$range'),
+    (re.compile(r'\$[_A-Za-z]\w*'), '$id'),
+    (re.compile(r'\$\(\$\)'), '$($)'),
+    (re.compile(r'\$'), '$'),
+    (re.compile(r'\[\[\n?'), '[['),
+    (re.compile(r'\]\]\n?'), ']]'),
+    ]
+
+
+class Cursor:
+  """Represents a position (line and column) in a text file."""
+
+  def __init__(self, line=-1, column=-1):
+    self.line = line
+    self.column = column
+
+  def __eq__(self, rhs):
+    return self.line == rhs.line and self.column == rhs.column
+
+  def __ne__(self, rhs):
+    return not self == rhs
+
+  def __lt__(self, rhs):
+    return self.line < rhs.line or (
+        self.line == rhs.line and self.column < rhs.column)
+
+  def __le__(self, rhs):
+    return self < rhs or self == rhs
+
+  def __gt__(self, rhs):
+    return rhs < self
+
+  def __ge__(self, rhs):
+    return rhs <= self
+
+  def __str__(self):
+    if self == Eof():
+      return 'EOF'
+    else:
+      return '%s(%s)' % (self.line + 1, self.column)
+
+  def __add__(self, offset):
+    return Cursor(self.line, self.column + offset)
+
+  def __sub__(self, offset):
+    return Cursor(self.line, self.column - offset)
+
+  def Clone(self):
+    """Returns a copy of self."""
+
+    return Cursor(self.line, self.column)
+
+
+# Special cursor to indicate the end-of-file.
+def Eof():
+  """Returns the special cursor to denote the end-of-file."""
+  return Cursor(-1, -1)
+
+
+class Token:
+  """Represents a token in a Pump source file."""
+
+  def __init__(self, start=None, end=None, value=None, token_type=None):
+    if start is None:
+      self.start = Eof()
+    else:
+      self.start = start
+    if end is None:
+      self.end = Eof()
+    else:
+      self.end = end
+    self.value = value
+    self.token_type = token_type
+
+  def __str__(self):
+    return 'Token @%s: \'%s\' type=%s' % (
+        self.start, self.value, self.token_type)
+
+  def Clone(self):
+    """Returns a copy of self."""
+
+    return Token(self.start.Clone(), self.end.Clone(), self.value,
+                 self.token_type)
+
+
+def StartsWith(lines, pos, string):
+  """Returns True iff the given position in lines starts with 'string'."""
+
+  return lines[pos.line][pos.column:].startswith(string)
+
+
+def FindFirstInLine(line, token_table):
+  best_match_start = -1
+  for (regex, token_type) in token_table:
+    m = regex.search(line)
+    if m:
+      # We found regex in lines
+      if best_match_start < 0 or m.start() < best_match_start:
+        best_match_start = m.start()
+        best_match_length = m.end() - m.start()
+        best_match_token_type = token_type
+
+  if best_match_start < 0:
+    return None
+
+  return (best_match_start, best_match_length, best_match_token_type)
+
+
+def FindFirst(lines, token_table, cursor):
+  """Finds the first occurrence of any string in strings in lines."""
+
+  start = cursor.Clone()
+  cur_line_number = cursor.line
+  for line in lines[start.line:]:
+    if cur_line_number == start.line:
+      line = line[start.column:]
+    m = FindFirstInLine(line, token_table)
+    if m:
+      # We found a regex in line.
+      (start_column, length, token_type) = m
+      if cur_line_number == start.line:
+        start_column += start.column
+      found_start = Cursor(cur_line_number, start_column)
+      found_end = found_start + length
+      return MakeToken(lines, found_start, found_end, token_type)
+    cur_line_number += 1
+  # We failed to find str in lines
+  return None
+
+
+def SubString(lines, start, end):
+  """Returns a substring in lines."""
+
+  if end == Eof():
+    end = Cursor(len(lines) - 1, len(lines[-1]))
+
+  if start >= end:
+    return ''
+
+  if start.line == end.line:
+    return lines[start.line][start.column:end.column]
+
+  result_lines = ([lines[start.line][start.column:]] +
+                  lines[start.line + 1:end.line] +
+                  [lines[end.line][:end.column]])
+  return ''.join(result_lines)
+
+
+def StripMetaComments(str):
+  """Strip meta comments from each line in the given string."""
+
+  # First, completely remove lines containing nothing but a meta
+  # comment, including the trailing \n.
+  str = re.sub(r'^\s*\$\$.*\n', '', str)
+
+  # Then, remove meta comments from contentful lines.
+  return re.sub(r'\s*\$\$.*', '', str)
+
+
+def MakeToken(lines, start, end, token_type):
+  """Creates a new instance of Token."""
+
+  return Token(start, end, SubString(lines, start, end), token_type)
+
+
+def ParseToken(lines, pos, regex, token_type):
+  line = lines[pos.line][pos.column:]
+  m = regex.search(line)
+  if m and not m.start():
+    return MakeToken(lines, pos, pos + m.end(), token_type)
+  else:
+    print 'ERROR: %s expected at %s.' % (token_type, pos)
+    sys.exit(1)
+
+
+ID_REGEX = re.compile(r'[_A-Za-z]\w*')
+EQ_REGEX = re.compile(r'=')
+REST_OF_LINE_REGEX = re.compile(r'.*?(?=$|\$\$)')
+OPTIONAL_WHITE_SPACES_REGEX = re.compile(r'\s*')
+WHITE_SPACE_REGEX = re.compile(r'\s')
+DOT_DOT_REGEX = re.compile(r'\.\.')
+
+
+def Skip(lines, pos, regex):
+  line = lines[pos.line][pos.column:]
+  m = re.search(regex, line)
+  if m and not m.start():
+    return pos + m.end()
+  else:
+    return pos
+
+
+def SkipUntil(lines, pos, regex, token_type):
+  line = lines[pos.line][pos.column:]
+  m = re.search(regex, line)
+  if m:
+    return pos + m.start()
+  else:
+    print ('ERROR: %s expected on line %s after column %s.' %
+           (token_type, pos.line + 1, pos.column))
+    sys.exit(1)
+
+
+def ParseExpTokenInParens(lines, pos):
+  def ParseInParens(pos):
+    pos = Skip(lines, pos, OPTIONAL_WHITE_SPACES_REGEX)
+    pos = Skip(lines, pos, r'\(')
+    pos = Parse(pos)
+    pos = Skip(lines, pos, r'\)')
+    return pos
+
+  def Parse(pos):
+    pos = SkipUntil(lines, pos, r'\(|\)', ')')
+    if SubString(lines, pos, pos + 1) == '(':
+      pos = Parse(pos + 1)
+      pos = Skip(lines, pos, r'\)')
+      return Parse(pos)
+    else:
+      return pos
+
+  start = pos.Clone()
+  pos = ParseInParens(pos)
+  return MakeToken(lines, start, pos, 'exp')
+
+
+def RStripNewLineFromToken(token):
+  if token.value.endswith('\n'):
+    return Token(token.start, token.end, token.value[:-1], token.token_type)
+  else:
+    return token
+
+
+def TokenizeLines(lines, pos):
+  while True:
+    found = FindFirst(lines, TOKEN_TABLE, pos)
+    if not found:
+      yield MakeToken(lines, pos, Eof(), 'code')
+      return
+
+    if found.start == pos:
+      prev_token = None
+      prev_token_rstripped = None
+    else:
+      prev_token = MakeToken(lines, pos, found.start, 'code')
+      prev_token_rstripped = RStripNewLineFromToken(prev_token)
+
+    if found.token_type == '$var':
+      if prev_token_rstripped:
+        yield prev_token_rstripped
+      yield found
+      id_token = ParseToken(lines, found.end, ID_REGEX, 'id')
+      yield id_token
+      pos = Skip(lines, id_token.end, OPTIONAL_WHITE_SPACES_REGEX)
+
+      eq_token = ParseToken(lines, pos, EQ_REGEX, '=')
+      yield eq_token
+      pos = Skip(lines, eq_token.end, r'\s*')
+
+      if SubString(lines, pos, pos + 2) != '[[':
+        exp_token = ParseToken(lines, pos, REST_OF_LINE_REGEX, 'exp')
+        yield exp_token
+        pos = Cursor(exp_token.end.line + 1, 0)
+    elif found.token_type == '$for':
+      if prev_token_rstripped:
+        yield prev_token_rstripped
+      yield found
+      id_token = ParseToken(lines, found.end, ID_REGEX, 'id')
+      yield id_token
+      pos = Skip(lines, id_token.end, WHITE_SPACE_REGEX)
+    elif found.token_type == '$range':
+      if prev_token_rstripped:
+        yield prev_token_rstripped
+      yield found
+      id_token = ParseToken(lines, found.end, ID_REGEX, 'id')
+      yield id_token
+      pos = Skip(lines, id_token.end, OPTIONAL_WHITE_SPACES_REGEX)
+
+      dots_pos = SkipUntil(lines, pos, DOT_DOT_REGEX, '..')
+      yield MakeToken(lines, pos, dots_pos, 'exp')
+      yield MakeToken(lines, dots_pos, dots_pos + 2, '..')
+      pos = dots_pos + 2
+      new_pos = Cursor(pos.line + 1, 0)
+      yield MakeToken(lines, pos, new_pos, 'exp')
+      pos = new_pos
+    elif found.token_type == '$':
+      if prev_token:
+        yield prev_token
+      yield found
+      exp_token = ParseExpTokenInParens(lines, found.end)
+      yield exp_token
+      pos = exp_token.end
+    elif (found.token_type == ']]' or found.token_type == '$if' or
+          found.token_type == '$elif' or found.token_type == '$else'):
+      if prev_token_rstripped:
+        yield prev_token_rstripped
+      yield found
+      pos = found.end
+    else:
+      if prev_token:
+        yield prev_token
+      yield found
+      pos = found.end
+
+
+def Tokenize(s):
+  """A generator that yields the tokens in the given string."""
+  if s != '':
+    lines = s.splitlines(True)
+    for token in TokenizeLines(lines, Cursor(0, 0)):
+      yield token
+
+
+class CodeNode:
+  def __init__(self, atomic_code_list=None):
+    self.atomic_code = atomic_code_list
+
+
+class VarNode:
+  def __init__(self, identifier=None, atomic_code=None):
+    self.identifier = identifier
+    self.atomic_code = atomic_code
+
+
+class RangeNode:
+  def __init__(self, identifier=None, exp1=None, exp2=None):
+    self.identifier = identifier
+    self.exp1 = exp1
+    self.exp2 = exp2
+
+
+class ForNode:
+  def __init__(self, identifier=None, sep=None, code=None):
+    self.identifier = identifier
+    self.sep = sep
+    self.code = code
+
+
+class ElseNode:
+  def __init__(self, else_branch=None):
+    self.else_branch = else_branch
+
+
+class IfNode:
+  def __init__(self, exp=None, then_branch=None, else_branch=None):
+    self.exp = exp
+    self.then_branch = then_branch
+    self.else_branch = else_branch
+
+
+class RawCodeNode:
+  def __init__(self, token=None):
+    self.raw_code = token
+
+
+class LiteralDollarNode:
+  def __init__(self, token):
+    self.token = token
+
+
+class ExpNode:
+  def __init__(self, token, python_exp):
+    self.token = token
+    self.python_exp = python_exp
+
+
+def PopFront(a_list):
+  head = a_list[0]
+  a_list[:1] = []
+  return head
+
+
+def PushFront(a_list, elem):
+  a_list[:0] = [elem]
+
+
+def PopToken(a_list, token_type=None):
+  token = PopFront(a_list)
+  if token_type is not None and token.token_type != token_type:
+    print 'ERROR: %s expected at %s' % (token_type, token.start)
+    print 'ERROR: %s found instead' % (token,)
+    sys.exit(1)
+
+  return token
+
+
+def PeekToken(a_list):
+  if not a_list:
+    return None
+
+  return a_list[0]
+
+
+def ParseExpNode(token):
+  python_exp = re.sub(r'([_A-Za-z]\w*)', r'self.GetValue("\1")', token.value)
+  return ExpNode(token, python_exp)
+
+
+def ParseElseNode(tokens):
+  def Pop(token_type=None):
+    return PopToken(tokens, token_type)
+
+  next = PeekToken(tokens)
+  if not next:
+    return None
+  if next.token_type == '$else':
+    Pop('$else')
+    Pop('[[')
+    code_node = ParseCodeNode(tokens)
+    Pop(']]')
+    return code_node
+  elif next.token_type == '$elif':
+    Pop('$elif')
+    exp = Pop('code')
+    Pop('[[')
+    code_node = ParseCodeNode(tokens)
+    Pop(']]')
+    inner_else_node = ParseElseNode(tokens)
+    return CodeNode([IfNode(ParseExpNode(exp), code_node, inner_else_node)])
+  elif not next.value.strip():
+    Pop('code')
+    return ParseElseNode(tokens)
+  else:
+    return None
+
+
+def ParseAtomicCodeNode(tokens):
+  def Pop(token_type=None):
+    return PopToken(tokens, token_type)
+
+  head = PopFront(tokens)
+  t = head.token_type
+  if t == 'code':
+    return RawCodeNode(head)
+  elif t == '$var':
+    id_token = Pop('id')
+    Pop('=')
+    next = PeekToken(tokens)
+    if next.token_type == 'exp':
+      exp_token = Pop()
+      return VarNode(id_token, ParseExpNode(exp_token))
+    Pop('[[')
+    code_node = ParseCodeNode(tokens)
+    Pop(']]')
+    return VarNode(id_token, code_node)
+  elif t == '$for':
+    id_token = Pop('id')
+    next_token = PeekToken(tokens)
+    if next_token.token_type == 'code':
+      sep_token = next_token
+      Pop('code')
+    else:
+      sep_token = None
+    Pop('[[')
+    code_node = ParseCodeNode(tokens)
+    Pop(']]')
+    return ForNode(id_token, sep_token, code_node)
+  elif t == '$if':
+    exp_token = Pop('code')
+    Pop('[[')
+    code_node = ParseCodeNode(tokens)
+    Pop(']]')
+    else_node = ParseElseNode(tokens)
+    return IfNode(ParseExpNode(exp_token), code_node, else_node)
+  elif t == '$range':
+    id_token = Pop('id')
+    exp1_token = Pop('exp')
+    Pop('..')
+    exp2_token = Pop('exp')
+    return RangeNode(id_token, ParseExpNode(exp1_token),
+                     ParseExpNode(exp2_token))
+  elif t == '$id':
+    return ParseExpNode(Token(head.start + 1, head.end, head.value[1:], 'id'))
+  elif t == '$($)':
+    return LiteralDollarNode(head)
+  elif t == '$':
+    exp_token = Pop('exp')
+    return ParseExpNode(exp_token)
+  elif t == '[[':
+    code_node = ParseCodeNode(tokens)
+    Pop(']]')
+    return code_node
+  else:
+    PushFront(tokens, head)
+    return None
+
+
+def ParseCodeNode(tokens):
+  atomic_code_list = []
+  while True:
+    if not tokens:
+      break
+    atomic_code_node = ParseAtomicCodeNode(tokens)
+    if atomic_code_node:
+      atomic_code_list.append(atomic_code_node)
+    else:
+      break
+  return CodeNode(atomic_code_list)
+
+
+def ParseToAST(pump_src_text):
+  """Convert the given Pump source text into an AST."""
+  tokens = list(Tokenize(pump_src_text))
+  code_node = ParseCodeNode(tokens)
+  return code_node
+
+
+class Env:
+  def __init__(self):
+    self.variables = []
+    self.ranges = []
+
+  def Clone(self):
+    clone = Env()
+    clone.variables = self.variables[:]
+    clone.ranges = self.ranges[:]
+    return clone
+
+  def PushVariable(self, var, value):
+    # If value looks like an int, store it as an int.
+    try:
+      int_value = int(value)
+      if ('%s' % int_value) == value:
+        value = int_value
+    except Exception:
+      pass
+    self.variables[:0] = [(var, value)]
+
+  def PopVariable(self):
+    self.variables[:1] = []
+
+  def PushRange(self, var, lower, upper):
+    self.ranges[:0] = [(var, lower, upper)]
+
+  def PopRange(self):
+    self.ranges[:1] = []
+
+  def GetValue(self, identifier):
+    for (var, value) in self.variables:
+      if identifier == var:
+        return value
+
+    print 'ERROR: meta variable %s is undefined.' % (identifier,)
+    sys.exit(1)
+
+  def EvalExp(self, exp):
+    try:
+      result = eval(exp.python_exp)
+    except Exception, e:
+      print 'ERROR: caught exception %s: %s' % (e.__class__.__name__, e)
+      print ('ERROR: failed to evaluate meta expression %s at %s' %
+             (exp.python_exp, exp.token.start))
+      sys.exit(1)
+    return result
+
+  def GetRange(self, identifier):
+    for (var, lower, upper) in self.ranges:
+      if identifier == var:
+        return (lower, upper)
+
+    print 'ERROR: range %s is undefined.' % (identifier,)
+    sys.exit(1)
+
+
+class Output:
+  def __init__(self):
+    self.string = ''
+
+  def GetLastLine(self):
+    index = self.string.rfind('\n')
+    if index < 0:
+      return ''
+
+    return self.string[index + 1:]
+
+  def Append(self, s):
+    self.string += s
+
+
+def RunAtomicCode(env, node, output):
+  if isinstance(node, VarNode):
+    identifier = node.identifier.value.strip()
+    result = Output()
+    RunAtomicCode(env.Clone(), node.atomic_code, result)
+    value = result.string
+    env.PushVariable(identifier, value)
+  elif isinstance(node, RangeNode):
+    identifier = node.identifier.value.strip()
+    lower = int(env.EvalExp(node.exp1))
+    upper = int(env.EvalExp(node.exp2))
+    env.PushRange(identifier, lower, upper)
+  elif isinstance(node, ForNode):
+    identifier = node.identifier.value.strip()
+    if node.sep is None:
+      sep = ''
+    else:
+      sep = node.sep.value
+    (lower, upper) = env.GetRange(identifier)
+    for i in range(lower, upper + 1):
+      new_env = env.Clone()
+      new_env.PushVariable(identifier, i)
+      RunCode(new_env, node.code, output)
+      if i != upper:
+        output.Append(sep)
+  elif isinstance(node, RawCodeNode):
+    output.Append(node.raw_code.value)
+  elif isinstance(node, IfNode):
+    cond = env.EvalExp(node.exp)
+    if cond:
+      RunCode(env.Clone(), node.then_branch, output)
+    elif node.else_branch is not None:
+      RunCode(env.Clone(), node.else_branch, output)
+  elif isinstance(node, ExpNode):
+    value = env.EvalExp(node)
+    output.Append('%s' % (value,))
+  elif isinstance(node, LiteralDollarNode):
+    output.Append('$')
+  elif isinstance(node, CodeNode):
+    RunCode(env.Clone(), node, output)
+  else:
+    print 'BAD'
+    print node
+    sys.exit(1)
+
+
+def RunCode(env, code_node, output):
+  for atomic_code in code_node.atomic_code:
+    RunAtomicCode(env, atomic_code, output)
+
+
+def IsSingleLineComment(cur_line):
+  return '//' in cur_line
+
+
+def IsInPreprocessorDirective(prev_lines, cur_line):
+  if cur_line.lstrip().startswith('#'):
+    return True
+  return prev_lines and prev_lines[-1].endswith('\\')
+
+
+def WrapComment(line, output):
+  loc = line.find('//')
+  before_comment = line[:loc].rstrip()
+  if before_comment == '':
+    indent = loc
+  else:
+    output.append(before_comment)
+    indent = len(before_comment) - len(before_comment.lstrip())
+  prefix = indent*' ' + '// '
+  max_len = 80 - len(prefix)
+  comment = line[loc + 2:].strip()
+  segs = [seg for seg in re.split(r'(\w+\W*)', comment) if seg != '']
+  cur_line = ''
+  for seg in segs:
+    if len((cur_line + seg).rstrip()) < max_len:
+      cur_line += seg
+    else:
+      if cur_line.strip() != '':
+        output.append(prefix + cur_line.rstrip())
+      cur_line = seg.lstrip()
+  if cur_line.strip() != '':
+    output.append(prefix + cur_line.strip())
+
+
+def WrapCode(line, line_concat, output):
+  indent = len(line) - len(line.lstrip())
+  prefix = indent*' '  # Prefix of the current line
+  max_len = 80 - indent - len(line_concat)  # Maximum length of the current line
+  new_prefix = prefix + 4*' '  # Prefix of a continuation line
+  new_max_len = max_len - 4  # Maximum length of a continuation line
+  # Prefers to wrap a line after a ',' or ';'.
+  segs = [seg for seg in re.split(r'([^,;]+[,;]?)', line.strip()) if seg != '']
+  cur_line = ''  # The current line without leading spaces.
+  for seg in segs:
+    # If the line is still too long, wrap at a space.
+    while cur_line == '' and len(seg.strip()) > max_len:
+      seg = seg.lstrip()
+      split_at = seg.rfind(' ', 0, max_len)
+      output.append(prefix + seg[:split_at].strip() + line_concat)
+      seg = seg[split_at + 1:]
+      prefix = new_prefix
+      max_len = new_max_len
+
+    if len((cur_line + seg).rstrip()) < max_len:
+      cur_line = (cur_line + seg).lstrip()
+    else:
+      output.append(prefix + cur_line.rstrip() + line_concat)
+      prefix = new_prefix
+      max_len = new_max_len
+      cur_line = seg.lstrip()
+  if cur_line.strip() != '':
+    output.append(prefix + cur_line.strip())
+
+
+def WrapPreprocessorDirective(line, output):
+  WrapCode(line, ' \\', output)
+
+
+def WrapPlainCode(line, output):
+  WrapCode(line, '', output)
+
+
+def IsMultiLineIWYUPragma(line):
+  return re.search(r'/\* IWYU pragma: ', line)
+
+
+def IsHeaderGuardIncludeOrOneLineIWYUPragma(line):
+  return (re.match(r'^#(ifndef|define|endif\s*//)\s*[\w_]+\s*$', line) or
+          re.match(r'^#include\s', line) or
+          # Don't break IWYU pragmas, either; that causes iwyu.py problems.
+          re.search(r'// IWYU pragma: ', line))
+
+
+def WrapLongLine(line, output):
+  line = line.rstrip()
+  if len(line) <= 80:
+    output.append(line)
+  elif IsSingleLineComment(line):
+    if IsHeaderGuardIncludeOrOneLineIWYUPragma(line):
+      # The style guide made an exception to allow long header guard lines,
+      # includes and IWYU pragmas.
+      output.append(line)
+    else:
+      WrapComment(line, output)
+  elif IsInPreprocessorDirective(output, line):
+    if IsHeaderGuardIncludeOrOneLineIWYUPragma(line):
+      # The style guide made an exception to allow long header guard lines,
+      # includes and IWYU pragmas.
+      output.append(line)
+    else:
+      WrapPreprocessorDirective(line, output)
+  elif IsMultiLineIWYUPragma(line):
+    output.append(line)
+  else:
+    WrapPlainCode(line, output)
+
+
+def BeautifyCode(string):
+  lines = string.splitlines()
+  output = []
+  for line in lines:
+    WrapLongLine(line, output)
+  output2 = [line.rstrip() for line in output]
+  return '\n'.join(output2) + '\n'
+
+
+def ConvertFromPumpSource(src_text):
+  """Return the text generated from the given Pump source text."""
+  ast = ParseToAST(StripMetaComments(src_text))
+  output = Output()
+  RunCode(Env(), ast, output)
+  return BeautifyCode(output.string)
+
+
+def main(argv):
+  if len(argv) == 1:
+    print __doc__
+    sys.exit(1)
+
+  file_path = argv[-1]
+  output_str = ConvertFromPumpSource(file(file_path, 'r').read())
+  if file_path.endswith('.pump'):
+    output_file_path = file_path[:-5]
+  else:
+    output_file_path = '-'
+  if output_file_path == '-':
+    print output_str,
+  else:
+    output_file = file(output_file_path, 'w')
+    output_file.write('// This file was GENERATED by command:\n')
+    output_file.write('//     %s %s\n' %
+                      (os.path.basename(__file__), os.path.basename(file_path)))
+    output_file.write('// DO NOT EDIT BY HAND!!!\n\n')
+    output_file.write(output_str)
+    output_file.close()
+
+
+if __name__ == '__main__':
+  main(sys.argv)

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/pdal.git



More information about the Pkg-grass-devel mailing list