[mapnik] 01/07: Imported Upstream version 3.0.7+ds

Sebastiaan Couwenberg sebastic at moszumanska.debian.org
Wed Oct 14 07:50:26 UTC 2015


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch master
in repository mapnik.

commit 32fd7aa88fd49ec7a3b49bd326e62240cd786c53
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date:   Wed Oct 14 08:54:07 2015 +0200

    Imported Upstream version 3.0.7+ds
---
 .gitignore                                         |    2 +-
 CHANGELOG.md                                       |   14 +
 INSTALL.md                                         |    6 +
 SConstruct                                         | 1962 ++++++++++----------
 appveyor.yml                                       |    2 +-
 demo/viewer/mapwidget.cpp                          |   10 +-
 include/mapnik/grid/grid_util.hpp                  |  112 --
 include/mapnik/span_image_filter.hpp               |  158 --
 include/mapnik/version.hpp                         |   11 +-
 plugins/input/csv/csv_utils.hpp                    |    2 +-
 plugins/input/geojson/geojson_datasource.cpp       |    9 +-
 plugins/input/ogr/ogr_datasource.cpp               |   28 +-
 plugins/input/pgraster/pgraster_datasource.cpp     |   21 +-
 plugins/input/postgis/postgis_datasource.cpp       |   23 +-
 plugins/input/postgis/postgis_featureset.cpp       |    4 +
 plugins/input/shape/shape_datasource.cpp           |    9 +-
 plugins/input/sqlite/sqlite_datasource.cpp         |   13 +-
 scripts/build-local.bat                            |    2 +-
 src/map.cpp                                        |   15 +-
 utils/mapnik-config/build.py                       |   18 +
 utils/mapnik-index/build.py                        |    2 +
 utils/mapnik-index/mapnik-index.cpp                |  291 +--
 utils/mapnik-index/process_csv_file.cpp            |  215 +++
 .../mapnik-index/process_csv_file.hpp              |   32 +-
 utils/mapnik-index/process_geojson_file.cpp        |   90 +
 .../mapnik-index/process_geojson_file.hpp          |   32 +-
 utils/{nik2img => mapnik-render}/build.py          |   10 +-
 .../mapnik-render.cpp}                             |    0
 28 files changed, 1482 insertions(+), 1611 deletions(-)

diff --git a/.gitignore b/.gitignore
index ddd8581..2ecdd4b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,7 +34,7 @@ utils/mapnik-index/mapnik-index
 utils/ogrindex/ogrindex
 utils/pgsql2sqlite/pgsql2sqlite
 utils/svg2png/svg2png
-utils/nik2img/nik2img
+utils/mapnik-render/mapnik-render
 demo/python/demo*
 demo/python/map.xml
 tests/data/sqlite/*index
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d4ecd27..bbeea03 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,20 @@ Developers: Please commit along with changes.
 
 For a complete change history, see the git log.
 
+## 3.0.7
+
+Released: October 12, 2015
+
+(Packaged from e161253)
+
+#### Summary
+
+ - Removed `MAPNIK_VERSION_IS_RELEASE` define / `mapnik-config --version` not longer reports `-pre` for non-release versions.
+   Use `mapnik-config --git-revision` instead (https://github.com/mapnik/mapnik/issues/3123)
+ - Renamed `nik2img` command to `mapnik-render`
+ - PostGIS: Fixed handling of all attributes when `key_field_as_attribute=false` (https://github.com/mapnik/mapnik/issues/3120)
+ - PostGIS: Fixed parsing of `key_field_as_attribute` as boolean: now `true/false` can be used in addition to `0/1`
+
 ## 3.0.6
 
 Released: October 7, 2015
diff --git a/INSTALL.md b/INSTALL.md
index 229a315..0776a6f 100644
--- a/INSTALL.md
+++ b/INSTALL.md
@@ -156,6 +156,12 @@ You can run the Mapnik tests locally (without installing) like:
 
     make test
 
+## Python Bindings
+
+Python bindings are not included by default. You'll need to add those separately. 
+
+ * Build from source: https://github.com/mapnik/python-mapnik
+
 ## Learning Mapnik
 
 ### Help
diff --git a/SConstruct b/SConstruct
index 0474561..59faf04 100644
--- a/SConstruct
+++ b/SConstruct
@@ -105,28 +105,28 @@ pretty_dep_names = {
 # Core plugin build configuration
 # opts.AddVariables still hardcoded however...
 PLUGINS = { # plugins with external dependencies
-	    # configured by calling project, hence 'path':None
-	    'postgis': {'default':True,'path':None,'inc':'libpq-fe.h','lib':'pq','lang':'C'},
-	    'pgraster': {'default':True,'path':None,'inc':'libpq-fe.h','lib':'pq','lang':'C'},
-	    'gdal':    {'default':True,'path':None,'inc':'gdal_priv.h','lib':'gdal','lang':'C++'},
-	    'ogr':     {'default':True,'path':None,'inc':'ogrsf_frmts.h','lib':'gdal','lang':'C++'},
-	    'sqlite':  {'default':True,'path':'SQLITE','inc':'sqlite3.h','lib':'sqlite3','lang':'C'},
-	    # plugins without external dependencies requiring CheckLibWithHeader...
-	    'shape':   {'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'},
-	    'csv':     {'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'},
-	    'raster':  {'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'},
-	    'geojson': {'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'},
-	    'topojson':{'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'}
-	    }
+            # configured by calling project, hence 'path':None
+            'postgis': {'default':True,'path':None,'inc':'libpq-fe.h','lib':'pq','lang':'C'},
+            'pgraster': {'default':True,'path':None,'inc':'libpq-fe.h','lib':'pq','lang':'C'},
+            'gdal':    {'default':True,'path':None,'inc':'gdal_priv.h','lib':'gdal','lang':'C++'},
+            'ogr':     {'default':True,'path':None,'inc':'ogrsf_frmts.h','lib':'gdal','lang':'C++'},
+            'sqlite':  {'default':True,'path':'SQLITE','inc':'sqlite3.h','lib':'sqlite3','lang':'C'},
+            # plugins without external dependencies requiring CheckLibWithHeader...
+            'shape':   {'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'},
+            'csv':     {'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'},
+            'raster':  {'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'},
+            'geojson': {'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'},
+            'topojson':{'default':True,'path':None,'inc':None,'lib':None,'lang':'C++'}
+            }
 
 
 def init_environment(env):
     env.Decider('MD5-timestamp')
     env.SourceCode(".", None)
     if os.environ.get('RANLIB'):
-	env['RANLIB'] = os.environ['RANLIB']
+        env['RANLIB'] = os.environ['RANLIB']
     if os.environ.get('AR'):
-	env['AR'] = os.environ['AR']
+        env['AR'] = os.environ['AR']
 
 #### SCons build options and initial setup ####
 env = Environment(ENV=os.environ)
@@ -142,58 +142,58 @@ def color_print(color,text,newline=True):
     # 4 - blue
     text = "\033[9%sm%s\033[0m" % (color,text)
     if not newline:
-	print text,
+        print text,
     else:
-	print text
+        print text
 
 def regular_print(color,text,newline=True):
     if not newline:
-	print text,
+        print text,
     else:
-	print text
+        print text
 
 def call(cmd, silent=False):
     stdin, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
     if not stderr:
-	return stdin.strip()
+        return stdin.strip()
     elif not silent:
-	color_print(1,'Problem encounted with SCons scripts, please post bug report to: https://github.com/mapnik/mapnik/issues \nError was: %s' % stderr)
+        color_print(1,'Problem encounted with SCons scripts, please post bug report to: https://github.com/mapnik/mapnik/issues \nError was: %s' % stderr)
 
 def strip_first(string,find,replace=''):
     if string.startswith(find):
-	return string.replace(find,replace,1)
+        return string.replace(find,replace,1)
     return string
 
 # http://www.scons.org/wiki/InstallTargets
 def create_uninstall_target(env, path, is_glob=False):
     if 'uninstall' in COMMAND_LINE_TARGETS:
-	if is_glob:
-	    all_files = Glob(path,strings=True)
-	    for filei in all_files:
-		env.Command( "uninstall-"+filei, filei,
-		[
-		Delete("$SOURCE"),
-		])
-		env.Alias("uninstall", "uninstall-"+filei)
-	else:
-	    if os.path.exists(path):
-		env.Command( "uninstall-"+path, path,
-		[
-		Delete("$SOURCE"),
-		])
-		env.Alias("uninstall", "uninstall-"+path)
+        if is_glob:
+            all_files = Glob(path,strings=True)
+            for filei in all_files:
+                env.Command( "uninstall-"+filei, filei,
+                [
+                Delete("$SOURCE"),
+                ])
+                env.Alias("uninstall", "uninstall-"+filei)
+        else:
+            if os.path.exists(path):
+                env.Command( "uninstall-"+path, path,
+                [
+                Delete("$SOURCE"),
+                ])
+                env.Alias("uninstall", "uninstall-"+path)
 
 def shortest_name(libs):
     name = '-'*200
     for lib in libs:
-	if len(name) > len(lib):
-	    name = lib
+        if len(name) > len(lib):
+            name = lib
     return name
 
 def rm_path(item,set,_env):
     for i in _env[set]:
-	if i.startswith(item):
-	    _env[set].remove(i)
+        if i.startswith(item):
+            _env[set].remove(i)
 
 def sort_paths(items,priority):
     """Sort paths such that compiling and linking will globally prefer custom or local libs
@@ -217,57 +217,57 @@ def sort_paths(items,priority):
     # parse types of paths into logical/meaningful groups
     # based on commonly encountered lib directories on linux and osx
     for i in items:
-	# internal paths for code kept inside
-	# the mapnik sources
-	if i.startswith('#'):
-	    path_types['internal'].append(i)
-	# Mac OS X user installed frameworks
-	elif '/Library/Frameworks' in i:
-	    path_types['frameworks'].append(i)
-	# various 'local' installs like /usr/local or /opt/local
-	elif 'local' in i or '/sw' in i:
-	    if '/usr/local' in i:
-		path_types['user'].insert(0,i)
-	    else:
-		path_types['user'].append(i)
-	# key system libs (likely others will fall into 'other')
-	elif '/usr/' in i or '/System' in i or i.startswith('/lib'):
-	    path_types['system'].append(i)
-	# anything not yet matched...
-	# likely a combo of rare system lib paths and
-	# very custom user paths that should ideally be
-	# in 'user'
-	else:
-	    path_types['other'].append(i)
+        # internal paths for code kept inside
+        # the mapnik sources
+        if i.startswith('#'):
+            path_types['internal'].append(i)
+        # Mac OS X user installed frameworks
+        elif '/Library/Frameworks' in i:
+            path_types['frameworks'].append(i)
+        # various 'local' installs like /usr/local or /opt/local
+        elif 'local' in i or '/sw' in i:
+            if '/usr/local' in i:
+                path_types['user'].insert(0,i)
+            else:
+                path_types['user'].append(i)
+        # key system libs (likely others will fall into 'other')
+        elif '/usr/' in i or '/System' in i or i.startswith('/lib'):
+            path_types['system'].append(i)
+        # anything not yet matched...
+        # likely a combo of rare system lib paths and
+        # very custom user paths that should ideally be
+        # in 'user'
+        else:
+            path_types['other'].append(i)
     # build up new list based on priority list
     for path in priority:
-	if path_types.has_key(path):
-	    dirs = path_types[path]
-	    new.extend(dirs)
-	    path_types.pop(path)
-	else:
-	    color_print(1,'\nSorry, "%s" is NOT a valid value for option "LINK_PRIORITY": values include: %s' % (path,','.join(path_types.keys())))
-	    color_print(1,'\tinternal: the local directory of the Mapnik sources (prefix #) (eg. used to link internal agg)')
-	    color_print(1,'\tframeworks: on osx the /Library/Frameworks directory')
-	    color_print(1,'\tuser: any path with "local" or "/sw" inside it')
-	    color_print(1,'\tsystem: any path not yet matched with "/usr/","/lib", or "/System" (osx) inside it')
-	    color_print(1,'\tother: any paths you specified not matched by criteria used to parse the others')
-	    color_print(1,'\tother: any paths you specified not matched by criteria used to parse the others')
-	    color_print(1,'The Default priority is: %s' % ','.join(DEFAULT_LINK_PRIORITY))
-	    color_print(1,'Any priority groups not listed will be appended to the list at the end')
-	    Exit(1)
+        if path_types.has_key(path):
+            dirs = path_types[path]
+            new.extend(dirs)
+            path_types.pop(path)
+        else:
+            color_print(1,'\nSorry, "%s" is NOT a valid value for option "LINK_PRIORITY": values include: %s' % (path,','.join(path_types.keys())))
+            color_print(1,'\tinternal: the local directory of the Mapnik sources (prefix #) (eg. used to link internal agg)')
+            color_print(1,'\tframeworks: on osx the /Library/Frameworks directory')
+            color_print(1,'\tuser: any path with "local" or "/sw" inside it')
+            color_print(1,'\tsystem: any path not yet matched with "/usr/","/lib", or "/System" (osx) inside it')
+            color_print(1,'\tother: any paths you specified not matched by criteria used to parse the others')
+            color_print(1,'\tother: any paths you specified not matched by criteria used to parse the others')
+            color_print(1,'The Default priority is: %s' % ','.join(DEFAULT_LINK_PRIORITY))
+            color_print(1,'Any priority groups not listed will be appended to the list at the end')
+            Exit(1)
     # append remaining paths potentially not requested
     # by any custom priority list defined by user
     for k,v in path_types.items():
-	new.extend(v)
+        new.extend(v)
     return new
 
 def pretty_dep(dep):
     pretty = pretty_dep_names.get(dep)
     if pretty:
-	return '%s (%s)' % (dep,pretty)
+        return '%s (%s)' % (dep,pretty)
     elif 'boost' in dep:
-	return '%s (%s)' % (dep,'more info see: https://github.com/mapnik/mapnik/wiki/Mapnik-Installation & http://www.boost.org')
+        return '%s (%s)' % (dep,'more info see: https://github.com/mapnik/mapnik/wiki/Mapnik-Installation & http://www.boost.org')
     return dep
 
 
@@ -400,9 +400,9 @@ opts.AddVariables(
     BoolVariable('DEMO', 'Compile demo c++ application', 'True'),
     BoolVariable('PGSQL2SQLITE', 'Compile and install a utility to convert postgres tables to sqlite', 'False'),
     BoolVariable('SHAPEINDEX', 'Compile and install a utility to generate shapefile indexes in the custom format (.index) Mapnik supports', 'True'),
-    BoolVariable('MAPNIK_INDEX', 'Compile and install a utility to generate file indexes for CSV and GeoJSON in the custom format (.index) Mapnik supports', 'True'),
+    BoolVariable('MAPNIK_INDEX', 'Compile and install a utility to generate spatial indexes for CSV and GeoJSON in the custom format (.index) Mapnik supports', 'True'),
     BoolVariable('SVG2PNG', 'Compile and install a utility to generate render an svg file to a png on the command line', 'False'),
-    BoolVariable('NIK2IMG', 'Compile and install a utility to generate render a map to an image', 'True'),
+    BoolVariable('MAPNIK_RENDER', 'Compile and install a utility to render a map to an image', 'True'),
     BoolVariable('COLOR_PRINT', 'Print build status information in color', 'True'),
     BoolVariable('BIGINT', 'Compile support for 64-bit integers in mapnik::value', 'True'),
     )
@@ -411,74 +411,74 @@ opts.AddVariables(
 # these include all scons core variables as well as custom
 # env variables needed in SConscript files
 pickle_store = [# Scons internal variables
-	'CC', # compiler user to check if c deps compile during configure
-	'CXX', # C++ compiler to compile mapnik
-	'CFLAGS',
-	'CPPDEFINES',
-	'CPPFLAGS', # c preprocessor flags
-	'CPPPATH',
-	'CXXFLAGS', # C++ flags built up during configure
-	'LIBPATH',
-	'LIBS',
-	'LINKFLAGS',
-	'CUSTOM_LDFLAGS', # user submitted
-	'CUSTOM_DEFINES', # user submitted
-	'CUSTOM_CXXFLAGS', # user submitted
-	'CUSTOM_CFLAGS', # user submitted
-	'MAPNIK_LIB_NAME',
-	'LINK',
-	'RUNTIME_LINK',
-	# Mapnik's SConstruct build variables
-	'PLUGINS',
-	'ABI_VERSION',
-	'MAPNIK_VERSION_STRING',
-	'MAPNIK_VERSION',
-	'PLATFORM',
-	'BOOST_ABI',
-	'BOOST_APPEND',
-	'LIBDIR_SCHEMA',
-	'REQUESTED_PLUGINS',
-	'COLOR_PRINT',
-	'HAS_CAIRO',
-	'MAPNIK_HAS_DLFCN',
-	'HAS_PYCAIRO',
-	'PYCAIRO_PATHS',
-	'HAS_LIBXML2',
-	'PKG_CONFIG_PATH',
-	'PATH',
-	'PATH_REMOVE',
-	'PATH_REPLACE',
-	'MAPNIK_LIB_DIR',
-	'MAPNIK_LIB_DIR_DEST',
-	'INSTALL_PREFIX',
-	'MAPNIK_INPUT_PLUGINS',
-	'MAPNIK_INPUT_PLUGINS_DEST',
-	'MAPNIK_FONTS',
-	'MAPNIK_FONTS_DEST',
-	'MAPNIK_BUNDLED_SHARE_DIRECTORY',
-	'MAPNIK_LIB_BASE',
-	'MAPNIK_LIB_BASE_DEST',
-	'EXTRA_FREETYPE_LIBS',
-	'LIBMAPNIK_CPPATHS',
-	'LIBMAPNIK_DEFINES',
-	'LIBMAPNIK_CXXFLAGS',
-	'CAIRO_LIBPATHS',
-	'CAIRO_ALL_LIBS',
-	'CAIRO_CPPPATHS',
-	'GRID_RENDERER',
-	'SVG_RENDERER',
-	'SQLITE_LINKFLAGS',
-	'BOOST_LIB_VERSION_FROM_HEADER',
-	'BIGINT',
-	'HOST'
-	]
+        'CC', # compiler user to check if c deps compile during configure
+        'CXX', # C++ compiler to compile mapnik
+        'CFLAGS',
+        'CPPDEFINES',
+        'CPPFLAGS', # c preprocessor flags
+        'CPPPATH',
+        'CXXFLAGS', # C++ flags built up during configure
+        'LIBPATH',
+        'LIBS',
+        'LINKFLAGS',
+        'CUSTOM_LDFLAGS', # user submitted
+        'CUSTOM_DEFINES', # user submitted
+        'CUSTOM_CXXFLAGS', # user submitted
+        'CUSTOM_CFLAGS', # user submitted
+        'MAPNIK_LIB_NAME',
+        'LINK',
+        'RUNTIME_LINK',
+        # Mapnik's SConstruct build variables
+        'PLUGINS',
+        'ABI_VERSION',
+        'MAPNIK_VERSION_STRING',
+        'MAPNIK_VERSION',
+        'PLATFORM',
+        'BOOST_ABI',
+        'BOOST_APPEND',
+        'LIBDIR_SCHEMA',
+        'REQUESTED_PLUGINS',
+        'COLOR_PRINT',
+        'HAS_CAIRO',
+        'MAPNIK_HAS_DLFCN',
+        'HAS_PYCAIRO',
+        'PYCAIRO_PATHS',
+        'HAS_LIBXML2',
+        'PKG_CONFIG_PATH',
+        'PATH',
+        'PATH_REMOVE',
+        'PATH_REPLACE',
+        'MAPNIK_LIB_DIR',
+        'MAPNIK_LIB_DIR_DEST',
+        'INSTALL_PREFIX',
+        'MAPNIK_INPUT_PLUGINS',
+        'MAPNIK_INPUT_PLUGINS_DEST',
+        'MAPNIK_FONTS',
+        'MAPNIK_FONTS_DEST',
+        'MAPNIK_BUNDLED_SHARE_DIRECTORY',
+        'MAPNIK_LIB_BASE',
+        'MAPNIK_LIB_BASE_DEST',
+        'EXTRA_FREETYPE_LIBS',
+        'LIBMAPNIK_CPPATHS',
+        'LIBMAPNIK_DEFINES',
+        'LIBMAPNIK_CXXFLAGS',
+        'CAIRO_LIBPATHS',
+        'CAIRO_ALL_LIBS',
+        'CAIRO_CPPPATHS',
+        'GRID_RENDERER',
+        'SVG_RENDERER',
+        'SQLITE_LINKFLAGS',
+        'BOOST_LIB_VERSION_FROM_HEADER',
+        'BIGINT',
+        'HOST'
+        ]
 
 # Add all other user configurable options to pickle pickle_store
 # We add here more options than are needed for the build stage
 # but helpful so that scons -h shows the exact cached options
 for opt in opts.options:
     if opt.key not in pickle_store:
-	pickle_store.append(opt.key)
+        pickle_store.append(opt.key)
 
 # Method of adding configure behavior to Scons adapted from:
 # http://freeorion.svn.sourceforge.net/svnroot/freeorion/trunk/FreeOrion/SConstruct
@@ -510,24 +510,24 @@ opts.Update(env)
 # if we are not configuring overwrite environment with pickled settings
 if not force_configure:
     if os.path.exists(SCONS_CONFIGURE_CACHE):
-	try:
-	    pickled_environment = open(SCONS_CONFIGURE_CACHE, 'r')
-	    pickled_values = pickle.load(pickled_environment)
-	    for key, value in pickled_values.items():
-		env[key] = value
-	    preconfigured = True
-	except:
-	    preconfigured = False
+        try:
+            pickled_environment = open(SCONS_CONFIGURE_CACHE, 'r')
+            pickled_values = pickle.load(pickled_environment)
+            for key, value in pickled_values.items():
+                env[key] = value
+            preconfigured = True
+        except:
+            preconfigured = False
     else:
-	preconfigured = False
+        preconfigured = False
 
 # check for missing keys in pickled settings
 # which can occur when keys are added or changed between
 # rebuilds, e.g. for folks following trunk
 for opt in pickle_store:
     if not opt in env:
-	#print 'missing opt', opt
-	preconfigured = False
+        #print 'missing opt', opt
+        preconfigured = False
 
 # if custom arguments are supplied make sure to accept them
 if opts.args:
@@ -540,8 +540,8 @@ if opts.args:
 
 elif preconfigured:
     if not HELP_REQUESTED:
-	color_print(4,'Using previous successful configuration...')
-	color_print(4,'Re-configure by running "python scons/scons.py configure".')
+        color_print(4,'Using previous successful configuration...')
+        color_print(4,'Re-configure by running "python scons/scons.py configure".')
 
 if env.has_key('COLOR_PRINT') and env['COLOR_PRINT'] == False:
     color_print = regular_print
@@ -557,11 +557,11 @@ def prioritize_paths(context,silent=True):
     env = context.env
     prefs = env['LINK_PRIORITY'].split(',')
     if not silent:
-	context.Message( 'Sorting lib and inc compiler paths...')
+        context.Message( 'Sorting lib and inc compiler paths...')
     env['LIBPATH'] = sort_paths(env['LIBPATH'],prefs)
     env['CPPPATH'] = sort_paths(env['CPPPATH'],prefs)
     if silent:
-	context.did_show_result=1
+        context.did_show_result=1
     ret = context.Result( True )
     return ret
 
@@ -588,42 +588,42 @@ def parse_config(context, config, checks='--libs --cflags'):
     tool = config.lower().replace('_','-')
     toolname = tool
     if config in ('GDAL_CONFIG'):
-	toolname += ' %s' % checks
+        toolname += ' %s' % checks
     context.Message( 'Checking for %s... ' % toolname)
     cmd = '%s %s' % (env[config],checks)
     ret = context.TryAction(cmd)[0]
     parsed = False
     if ret:
-	try:
-	    if 'gdal-config' in cmd:
-		env.ParseConfig(cmd)
-		# hack for potential -framework GDAL syntax
-		# which will not end up being added to env['LIBS']
-		# and thus breaks knowledge below that gdal worked
-		# TODO - upgrade our scons logic to support Framework linking
-		if env['PLATFORM'] == 'Darwin':
-		    value = call(cmd,silent=True)
-		    if value and '-framework GDAL' in value:
-			env['LIBS'].append('gdal')
-			if os.path.exists('/Library/Frameworks/GDAL.framework/unix/lib'):
-			    env['LIBPATH'].insert(0,'/Library/Frameworks/GDAL.framework/unix/lib')
-		    if 'GDAL' in env.get('FRAMEWORKS',[]):
-			env["FRAMEWORKS"].remove("GDAL")
-	    else:
-		env.ParseConfig(cmd)
-	    parsed = True
-	except OSError, e:
-	    ret = False
-	    print ' (xml2-config not found!)'
+        try:
+            if 'gdal-config' in cmd:
+                env.ParseConfig(cmd)
+                # hack for potential -framework GDAL syntax
+                # which will not end up being added to env['LIBS']
+                # and thus breaks knowledge below that gdal worked
+                # TODO - upgrade our scons logic to support Framework linking
+                if env['PLATFORM'] == 'Darwin':
+                    value = call(cmd,silent=True)
+                    if value and '-framework GDAL' in value:
+                        env['LIBS'].append('gdal')
+                        if os.path.exists('/Library/Frameworks/GDAL.framework/unix/lib'):
+                            env['LIBPATH'].insert(0,'/Library/Frameworks/GDAL.framework/unix/lib')
+                    if 'GDAL' in env.get('FRAMEWORKS',[]):
+                        env["FRAMEWORKS"].remove("GDAL")
+            else:
+                env.ParseConfig(cmd)
+            parsed = True
+        except OSError, e:
+            ret = False
+            print ' (xml2-config not found!)'
     if not parsed:
-	if config in ('GDAL_CONFIG'):
-	    # optional deps...
-	    if tool not in env['SKIPPED_DEPS']:
-		env['SKIPPED_DEPS'].append(tool)
-	    conf.rollback_option(config)
-	else: # freetype and libxml2, not optional
-	    if tool not in env['MISSING_DEPS']:
-		env['MISSING_DEPS'].append(tool)
+        if config in ('GDAL_CONFIG'):
+            # optional deps...
+            if tool not in env['SKIPPED_DEPS']:
+                env['SKIPPED_DEPS'].append(tool)
+            conf.rollback_option(config)
+        else: # freetype and libxml2, not optional
+            if tool not in env['MISSING_DEPS']:
+                env['MISSING_DEPS'].append(tool)
     context.Result( ret )
     return ret
 
@@ -636,22 +636,22 @@ def get_pkg_lib(context, config, lib):
     ret = context.TryAction(cmd)[0]
     parsed = False
     if ret:
-	try:
-	    value = call(cmd,silent=True)
-	    if ' ' in value:
-		parts = value.split(' ')
-		if len(parts) > 1:
-		    value = parts[1]
-	    libnames = re.findall(libpattern,value)
-	    if libnames:
-		libname = libnames[0]
-	    else:
-		# osx 1.8 install gives '-framework GDAL'
-		libname = 'gdal'
-	except Exception, e:
-	    ret = False
-	    print ' unable to determine library name:'# %s' % str(e)
-	    return None
+        try:
+            value = call(cmd,silent=True)
+            if ' ' in value:
+                parts = value.split(' ')
+                if len(parts) > 1:
+                    value = parts[1]
+            libnames = re.findall(libpattern,value)
+            if libnames:
+                libname = libnames[0]
+            else:
+                # osx 1.8 install gives '-framework GDAL'
+                libname = 'gdal'
+        except Exception, e:
+            ret = False
+            print ' unable to determine library name:'# %s' % str(e)
+            return None
     context.Result( libname )
     return libname
 
@@ -662,15 +662,15 @@ def parse_pg_config(context, config):
     context.Message( 'Checking for %s... ' % tool)
     ret = context.TryAction(env[config])[0]
     if ret:
-	lib_path = call('%s --libdir' % env[config])
-	inc_path = call('%s --includedir' % env[config])
-	env.AppendUnique(CPPPATH = fix_path(inc_path))
-	env.AppendUnique(LIBPATH = fix_path(lib_path))
-	lpq = env['PLUGINS']['postgis']['lib']
-	env.Append(LIBS = lpq)
+        lib_path = call('%s --libdir' % env[config])
+        inc_path = call('%s --includedir' % env[config])
+        env.AppendUnique(CPPPATH = fix_path(inc_path))
+        env.AppendUnique(LIBPATH = fix_path(lib_path))
+        lpq = env['PLUGINS']['postgis']['lib']
+        env.Append(LIBS = lpq)
     else:
-	env['SKIPPED_DEPS'].append(tool)
-	conf.rollback_option(config)
+        env['SKIPPED_DEPS'].append(tool)
+        conf.rollback_option(config)
     context.Result( ret )
     return ret
 
@@ -679,8 +679,8 @@ def ogr_enabled(context):
     context.Message( 'Checking if gdal is ogr enabled... ')
     ret = context.TryAction('%s --ogr-enabled' % env['GDAL_CONFIG'])[0]
     if not ret:
-	if 'ogr' not in env['SKIPPED_DEPS']:
-	    env['SKIPPED_DEPS'].append('ogr')
+        if 'ogr' not in env['SKIPPED_DEPS']:
+            env['SKIPPED_DEPS'].append('ogr')
     context.Result( ret )
     return ret
 
@@ -688,8 +688,8 @@ def rollback_option(context,variable):
     global opts
     env = context.env
     for item in opts.options:
-	if item.key == variable:
-	    env[variable] = item.default
+        if item.key == variable:
+            env[variable] = item.default
 
 def FindBoost(context, prefixes, thread_flag):
     """Routine to auto-find boost header dir, lib dir, and library naming structure.
@@ -704,72 +704,72 @@ def FindBoost(context, prefixes, thread_flag):
     env['BOOST_APPEND'] = str()
 
     if env['THREADING'] == 'multi':
-	search_lib = 'libboost_thread'
+        search_lib = 'libboost_thread'
     else:
-	search_lib = 'libboost_filesystem'
+        search_lib = 'libboost_filesystem'
 
     # note: must call normpath to strip trailing slash otherwise dirname
     # does not remove 'lib' and 'include'
     prefixes.insert(0,os.path.dirname(os.path.normpath(env['BOOST_INCLUDES'])))
     prefixes.insert(0,os.path.dirname(os.path.normpath(env['BOOST_LIBS'])))
     for searchDir in prefixes:
-	libItems = glob(os.path.join(searchDir, env['LIBDIR_SCHEMA'], '%s*.*' % search_lib))
-	if not libItems:
-	    libItems = glob(os.path.join(searchDir, 'lib/%s*.*' % search_lib))
-	incItems = glob(os.path.join(searchDir, 'include/boost*/'))
-	if len(libItems) >= 1 and len(incItems) >= 1:
-	    BOOST_LIB_DIR = os.path.dirname(libItems[0])
-	    BOOST_INCLUDE_DIR = incItems[0].rstrip('boost/')
-	    shortest_lib_name = shortest_name(libItems)
-	    match = re.search(r'%s(.*)\..*' % search_lib, shortest_lib_name)
-	    if hasattr(match,'groups'):
-		BOOST_APPEND = match.groups()[0]
-	    break
+        libItems = glob(os.path.join(searchDir, env['LIBDIR_SCHEMA'], '%s*.*' % search_lib))
+        if not libItems:
+            libItems = glob(os.path.join(searchDir, 'lib/%s*.*' % search_lib))
+        incItems = glob(os.path.join(searchDir, 'include/boost*/'))
+        if len(libItems) >= 1 and len(incItems) >= 1:
+            BOOST_LIB_DIR = os.path.dirname(libItems[0])
+            BOOST_INCLUDE_DIR = incItems[0].rstrip('boost/')
+            shortest_lib_name = shortest_name(libItems)
+            match = re.search(r'%s(.*)\..*' % search_lib, shortest_lib_name)
+            if hasattr(match,'groups'):
+                BOOST_APPEND = match.groups()[0]
+            break
 
     msg = str()
 
     if BOOST_LIB_DIR:
-	msg += '\nFound boost libs: %s' % BOOST_LIB_DIR
-	env['BOOST_LIBS'] = BOOST_LIB_DIR
+        msg += '\nFound boost libs: %s' % BOOST_LIB_DIR
+        env['BOOST_LIBS'] = BOOST_LIB_DIR
     elif not env['BOOST_LIBS']:
-	env['BOOST_LIBS'] = '/usr/' + env['LIBDIR_SCHEMA']
-	msg += '\nUsing default boost lib dir: %s' % env['BOOST_LIBS']
+        env['BOOST_LIBS'] = '/usr/' + env['LIBDIR_SCHEMA']
+        msg += '\nUsing default boost lib dir: %s' % env['BOOST_LIBS']
     else:
-	msg += '\nUsing boost lib dir: %s' % env['BOOST_LIBS']
+        msg += '\nUsing boost lib dir: %s' % env['BOOST_LIBS']
 
     if BOOST_INCLUDE_DIR:
-	msg += '\nFound boost headers: %s' % BOOST_INCLUDE_DIR
-	env['BOOST_INCLUDES'] = BOOST_INCLUDE_DIR
+        msg += '\nFound boost headers: %s' % BOOST_INCLUDE_DIR
+        env['BOOST_INCLUDES'] = BOOST_INCLUDE_DIR
     elif not env['BOOST_INCLUDES']:
-	env['BOOST_INCLUDES'] = '/usr/include'
-	msg += '\nUsing default boost include dir: %s' % env['BOOST_INCLUDES']
+        env['BOOST_INCLUDES'] = '/usr/include'
+        msg += '\nUsing default boost include dir: %s' % env['BOOST_INCLUDES']
     else:
-	msg += '\nUsing boost include dir: %s' % env['BOOST_INCLUDES']
+        msg += '\nUsing boost include dir: %s' % env['BOOST_INCLUDES']
 
     if not env['BOOST_TOOLKIT'] and not env['BOOST_ABI'] and not env['BOOST_VERSION']:
-	if BOOST_APPEND:
-	    msg += '\nFound boost lib name extension: %s' % BOOST_APPEND
-	    env['BOOST_APPEND'] = BOOST_APPEND
+        if BOOST_APPEND:
+            msg += '\nFound boost lib name extension: %s' % BOOST_APPEND
+            env['BOOST_APPEND'] = BOOST_APPEND
     else:
-	# Creating BOOST_APPEND according to the Boost library naming order,
-	# which goes <toolset>-<threading>-<abi>-<version>. See:
-	#  http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html#library-naming
-	append_params = ['']
-	if env['BOOST_TOOLKIT']: append_params.append(env['BOOST_TOOLKIT'])
-	if thread_flag: append_params.append(thread_flag)
-	if env['BOOST_ABI']: append_params.append(env['BOOST_ABI'])
-	if env['BOOST_VERSION']: append_params.append(env['BOOST_VERSION'])
-
-	# Constructing the BOOST_APPEND setting that will be used to find the
-	# Boost libraries.
-	if len(append_params) > 1:
-	    env['BOOST_APPEND'] = '-'.join(append_params)
-	msg += '\nFound boost lib name extension: %s' % env['BOOST_APPEND']
+        # Creating BOOST_APPEND according to the Boost library naming order,
+        # which goes <toolset>-<threading>-<abi>-<version>. See:
+        #  http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html#library-naming
+        append_params = ['']
+        if env['BOOST_TOOLKIT']: append_params.append(env['BOOST_TOOLKIT'])
+        if thread_flag: append_params.append(thread_flag)
+        if env['BOOST_ABI']: append_params.append(env['BOOST_ABI'])
+        if env['BOOST_VERSION']: append_params.append(env['BOOST_VERSION'])
+
+        # Constructing the BOOST_APPEND setting that will be used to find the
+        # Boost libraries.
+        if len(append_params) > 1:
+            env['BOOST_APPEND'] = '-'.join(append_params)
+        msg += '\nFound boost lib name extension: %s' % env['BOOST_APPEND']
 
     env.AppendUnique(CPPPATH = fix_path(env['BOOST_INCLUDES']))
     env.AppendUnique(LIBPATH = fix_path(env['BOOST_LIBS']))
     if env['COLOR_PRINT']:
-	msg = "\033[94m%s\033[0m" % (msg)
+        msg = "\033[94m%s\033[0m" % (msg)
     ret = context.Result(msg)
     return ret
 
@@ -778,14 +778,14 @@ def CheckBoost(context, version, silent=False):
     v_arr = version.split(".")
     version_n = 0
     if len(v_arr) > 0:
-	version_n += int(v_arr[0])*100000
+        version_n += int(v_arr[0])*100000
     if len(v_arr) > 1:
-	version_n += int(v_arr[1])*100
+        version_n += int(v_arr[1])*100
     if len(v_arr) > 2:
-	version_n += int(v_arr[2])
+        version_n += int(v_arr[2])
 
     if not silent:
-	context.Message('Checking for Boost version >= %s... ' % (version))
+        context.Message('Checking for Boost version >= %s... ' % (version))
     ret = context.TryRun("""
 
 #include <boost/version.hpp>
@@ -797,13 +797,13 @@ int main()
 
 """ % version_n, '.cpp')[0]
     if silent:
-	context.did_show_result=1
+        context.did_show_result=1
     context.Result(ret)
     return ret
 
 def CheckCairoHasFreetype(context, silent=False):
     if not silent:
-	context.Message('Checking for cairo freetype font support ... ')
+        context.Message('Checking for cairo freetype font support ... ')
     context.env.AppendUnique(CPPPATH=copy(env['CAIRO_CPPPATHS']))
 
     ret = context.TryRun("""
@@ -821,15 +821,15 @@ int main()
 
 """, '.cpp')[0]
     if silent:
-	context.did_show_result=1
+        context.did_show_result=1
     context.Result(ret)
     for item in env['CAIRO_CPPPATHS']:
-	rm_path(item,'CPPPATH',context.env)
+        rm_path(item,'CPPPATH',context.env)
     return ret
 
 def CheckHasDlfcn(context, silent=False):
     if not silent:
-	context.Message('Checking for dlfcn.h support ... ')
+        context.Message('Checking for dlfcn.h support ... ')
     ret = context.TryCompile("""
 
 #include <dlfcn.h>
@@ -841,7 +841,7 @@ int main()
 
 """, '.cpp')
     if silent:
-	context.did_show_result=1
+        context.did_show_result=1
     context.Result(ret)
     return ret
 
@@ -866,7 +866,7 @@ return 0;
 
 def CheckBoostScopedEnum(context, silent=False):
     if not silent:
-	context.Message('Checking whether Boost was compiled with C++11 scoped enums ... ')
+        context.Message('Checking whether Boost was compiled with C++11 scoped enums ... ')
     ret = context.TryLink("""
 #include <boost/filesystem.hpp>
 
@@ -878,7 +878,7 @@ int main()
 }
 """, '.cpp')
     if silent:
-	context.did_show_result=1
+        context.did_show_result=1
     context.Result(ret)
     return ret
 
@@ -900,13 +900,13 @@ int main()
     context.did_show_result=1
     result = ret[1].strip()
     if not result:
-	context.Result('error, could not get major and minor version from unicode/uversion.h')
-	return False
+        context.Result('error, could not get major and minor version from unicode/uversion.h')
+        return False
 
     major, minor = map(int,result.split('.'))
     if major >= 4 and minor >= 0:
-	color_print(4,'found: icu %s' % result)
-	return True
+        color_print(4,'found: icu %s' % result)
+        return True
 
     color_print(1,'\nFound insufficient icu version... %s' % result)
     return False
@@ -929,24 +929,24 @@ int main()
     context.did_show_result=1
     result = ret[1].strip()
     if not result:
-	context.Result('error, could not get version from hb.h')
-	return False
+        context.Result('error, could not get version from hb.h')
+        return False
 
     items = result.split(';')
     if items[0] == '1':
-	color_print(4,'found: HarfBuzz %s' % items[1])
-	return True
+        color_print(4,'found: HarfBuzz %s' % items[1])
+        return True
 
     color_print(1,'\nHarfbuzz >= %s required but found ... %s' % (HARFBUZZ_MIN_VERSION_STRING,items[1]))
     return False
 
 def boost_regex_has_icu(context):
     if env['RUNTIME_LINK'] == 'static':
-	# re-order icu libs to ensure linux linker is happy
-	for lib_name in ['icui18n',env['ICU_LIB_NAME'],'icudata']:
-	    if lib_name in context.env['LIBS']:
-		context.env['LIBS'].remove(lib_name)
-	    context.env.Append(LIBS=lib_name)
+        # re-order icu libs to ensure linux linker is happy
+        for lib_name in ['icui18n',env['ICU_LIB_NAME'],'icudata']:
+            if lib_name in context.env['LIBS']:
+                context.env['LIBS'].remove(lib_name)
+            context.env.Append(LIBS=lib_name)
     ret = context.TryRun("""
 
 #include <boost/regex/icu.hpp>
@@ -956,12 +956,12 @@ int main()
 {
     U_NAMESPACE_QUALIFIER UnicodeString ustr;
     try {
-	boost::u32regex pattern = boost::make_u32regex(ustr);
+        boost::u32regex pattern = boost::make_u32regex(ustr);
     }
     // an exception is fine, still indicates support is
     // likely compiled into regex
     catch (...) {
-	return 0;
+        return 0;
     }
     return 0;
 }
@@ -970,7 +970,7 @@ int main()
     context.Message('Checking if boost_regex was built with ICU unicode support... ')
     context.Result(ret[0])
     if ret[0]:
-	return True
+        return True
     return False
 
 def sqlite_has_rtree(context, silent=False):
@@ -992,20 +992,20 @@ int main()
     rc = sqlite3_open(":memory:", &db);
     if (rc != SQLITE_OK)
     {
-	printf("error 1: %s\\n", sqlite3_errmsg(db));
+        printf("error 1: %s\\n", sqlite3_errmsg(db));
     }
     const char * sql = "create virtual table foo using rtree(pkid, xmin, xmax, ymin, ymax)";
     rc = sqlite3_exec(db, sql, 0, 0, 0);
     if (rc != SQLITE_OK)
     {
-	printf("error 2: %s\\n", sqlite3_errmsg(db));
-	sqlite3_close(db);
+        printf("error 2: %s\\n", sqlite3_errmsg(db));
+        sqlite3_close(db);
     }
     else
     {
-	printf("yes, has rtree!\\n");
-	sqlite3_close(db);
-	return 0;
+        printf("yes, has rtree!\\n");
+        sqlite3_close(db);
+        return 0;
     }
 
     return -1;
@@ -1013,12 +1013,12 @@ int main()
 
 """, '.c')
     if not silent:
-	context.Message('Checking if SQLite supports RTREE... ')
+        context.Message('Checking if SQLite supports RTREE... ')
     if silent:
-	context.did_show_result=1
+        context.did_show_result=1
     context.Result(ret[0])
     if ret[0]:
-	return True
+        return True
     return False
 
 def supports_cxx11(context,silent=False):
@@ -1035,54 +1035,48 @@ int main()
 
 """, '.cpp')
     if not silent:
-	context.Message('Checking if compiler (%s) supports -std=c++11 flag... ' % context.env.get('CXX','CXX'))
+        context.Message('Checking if compiler (%s) supports -std=c++11 flag... ' % context.env.get('CXX','CXX'))
     if silent:
-	context.did_show_result=1
+        context.did_show_result=1
     context.Result(ret[0])
     if ret[0]:
-	return True
+        return True
     return False
 
 
 
 conf_tests = { 'prioritize_paths'      : prioritize_paths,
-	       'CheckPKGConfig'        : CheckPKGConfig,
-	       'CheckPKG'              : CheckPKG,
-	       'CheckPKGVersion'       : CheckPKGVersion,
-	       'FindBoost'             : FindBoost,
-	       'CheckBoost'            : CheckBoost,
-	       'CheckCairoHasFreetype' : CheckCairoHasFreetype,
-	       'CheckHasDlfcn'         : CheckHasDlfcn,
-	       'GetBoostLibVersion'    : GetBoostLibVersion,
-	       'parse_config'          : parse_config,
-	       'parse_pg_config'       : parse_pg_config,
-	       'ogr_enabled'           : ogr_enabled,
-	       'get_pkg_lib'           : get_pkg_lib,
-	       'rollback_option'       : rollback_option,
-	       'icu_at_least_four_two' : icu_at_least_four_two,
-	       'harfbuzz_version'      : harfbuzz_version,
-	       'boost_regex_has_icu'   : boost_regex_has_icu,
-	       'sqlite_has_rtree'      : sqlite_has_rtree,
-	       'supports_cxx11'        : supports_cxx11,
-	       'CheckBoostScopedEnum'  : CheckBoostScopedEnum,
-	       }
+               'CheckPKGConfig'        : CheckPKGConfig,
+               'CheckPKG'              : CheckPKG,
+               'CheckPKGVersion'       : CheckPKGVersion,
+               'FindBoost'             : FindBoost,
+               'CheckBoost'            : CheckBoost,
+               'CheckCairoHasFreetype' : CheckCairoHasFreetype,
+               'CheckHasDlfcn'         : CheckHasDlfcn,
+               'GetBoostLibVersion'    : GetBoostLibVersion,
+               'parse_config'          : parse_config,
+               'parse_pg_config'       : parse_pg_config,
+               'ogr_enabled'           : ogr_enabled,
+               'get_pkg_lib'           : get_pkg_lib,
+               'rollback_option'       : rollback_option,
+               'icu_at_least_four_two' : icu_at_least_four_two,
+               'harfbuzz_version'      : harfbuzz_version,
+               'boost_regex_has_icu'   : boost_regex_has_icu,
+               'sqlite_has_rtree'      : sqlite_has_rtree,
+               'supports_cxx11'        : supports_cxx11,
+               'CheckBoostScopedEnum'  : CheckBoostScopedEnum,
+               }
 
 def GetMapnikLibVersion():
     ver = []
-    is_pre = False
     for line in open('include/mapnik/version.hpp').readlines():
-	if line.startswith('#define MAPNIK_MAJOR_VERSION'):
-	    ver.append(line.split(' ')[2].strip())
-	if line.startswith('#define MAPNIK_MINOR_VERSION'):
-	    ver.append(line.split(' ')[2].strip())
-	if line.startswith('#define MAPNIK_PATCH_VERSION'):
-	    ver.append(line.split(' ')[2].strip())
-	if line.startswith('#define MAPNIK_VERSION_IS_RELEASE'):
-	    if line.split(' ')[2].strip() == "0":
-		is_pre = True
+        if line.startswith('#define MAPNIK_MAJOR_VERSION'):
+            ver.append(line.split(' ')[2].strip())
+        if line.startswith('#define MAPNIK_MINOR_VERSION'):
+            ver.append(line.split(' ')[2].strip())
+        if line.startswith('#define MAPNIK_PATCH_VERSION'):
+            ver.append(line.split(' ')[2].strip())
     version_string = ".".join(ver)
-    if is_pre:
-	version_string += '-pre'
     return version_string
 
 if not preconfigured:
@@ -1090,40 +1084,40 @@ if not preconfigured:
     color_print(4,'Configuring build environment...')
 
     if not env['FAST']:
-	SetCacheMode('force')
+        SetCacheMode('force')
 
     if env['USE_CONFIG']:
-	if not env['CONFIG'].endswith('.py'):
-	    color_print(1,'SCons CONFIG file specified is not a python file, will not be read...')
-	else:
-	    # Accept more than one file as comma-delimited list
-	    user_confs = env['CONFIG'].split(',')
-	    # If they exist add the files to the existing `opts`
-	    for conf in user_confs:
-		if os.path.exists(conf):
-		    opts.files.append(conf)
-		    color_print(4,"SCons CONFIG found: '%s', variables will be inherited..." % conf)
-		    optfile = file(conf)
-		    #print optfile.read().replace("\n", " ").replace("'","").replace(" = ","=")
-		    optfile.close()
-
-		elif not conf == SCONS_LOCAL_CONFIG:
-		    # if default missing, no worries
-		    # but if the default is overridden and the file is not found, give warning
-		    color_print(1,"SCons CONFIG not found: '%s'" % conf)
-	    # Recreate the base environment using modified `opts`
-	    env = Environment(ENV=os.environ,options=opts)
-	    init_environment(env)
-	    env['USE_CONFIG'] = True
+        if not env['CONFIG'].endswith('.py'):
+            color_print(1,'SCons CONFIG file specified is not a python file, will not be read...')
+        else:
+            # Accept more than one file as comma-delimited list
+            user_confs = env['CONFIG'].split(',')
+            # If they exist add the files to the existing `opts`
+            for conf in user_confs:
+                if os.path.exists(conf):
+                    opts.files.append(conf)
+                    color_print(4,"SCons CONFIG found: '%s', variables will be inherited..." % conf)
+                    optfile = file(conf)
+                    #print optfile.read().replace("\n", " ").replace("'","").replace(" = ","=")
+                    optfile.close()
+
+                elif not conf == SCONS_LOCAL_CONFIG:
+                    # if default missing, no worries
+                    # but if the default is overridden and the file is not found, give warning
+                    color_print(1,"SCons CONFIG not found: '%s'" % conf)
+            # Recreate the base environment using modified `opts`
+            env = Environment(ENV=os.environ,options=opts)
+            init_environment(env)
+            env['USE_CONFIG'] = True
     else:
-	color_print(4,'SCons USE_CONFIG specified as false, will not inherit variables python config file...')
+        color_print(4,'SCons USE_CONFIG specified as false, will not inherit variables python config file...')
 
     conf = Configure(env, custom_tests = conf_tests)
 
     if env['DEBUG']:
-	mode = 'debug mode'
+        mode = 'debug mode'
     else:
-	mode = 'release mode'
+        mode = 'release mode'
 
     env['PLATFORM'] = platform.uname()[0]
     color_print(4,"Configuring on %s in *%s*..." % (env['PLATFORM'],mode))
@@ -1148,7 +1142,7 @@ if not preconfigured:
     # previously a leading / was expected for LIB_DIR_NAME
     # now strip it to ensure expected behavior
     if env['LIB_DIR_NAME'].startswith(os.path.sep):
-	env['LIB_DIR_NAME'] = strip_first(env['LIB_DIR_NAME'],os.path.sep)
+        env['LIB_DIR_NAME'] = strip_first(env['LIB_DIR_NAME'],os.path.sep)
 
     # base install location
     env['MAPNIK_LIB_BASE'] = os.path.join(env['PREFIX'],env['LIBDIR_SCHEMA'])
@@ -1158,9 +1152,9 @@ if not preconfigured:
     env['MAPNIK_INPUT_PLUGINS'] = os.path.join(env['MAPNIK_LIB_DIR'],'input')
     # fonts sub directory
     if env['SYSTEM_FONTS']:
-	env['MAPNIK_FONTS'] = os.path.normpath(env['SYSTEM_FONTS'])
+        env['MAPNIK_FONTS'] = os.path.normpath(env['SYSTEM_FONTS'])
     else:
-	env['MAPNIK_FONTS'] = os.path.join(env['MAPNIK_LIB_DIR'],'fonts')
+        env['MAPNIK_FONTS'] = os.path.join(env['MAPNIK_LIB_DIR'],'fonts')
 
     # install prefix is a pre-pended base location to
     # re-route the install and only intended for package building
@@ -1173,9 +1167,9 @@ if not preconfigured:
     env['MAPNIK_LIB_DIR_DEST'] =  os.path.join(env['MAPNIK_LIB_BASE_DEST'],env['LIB_DIR_NAME'])
     env['MAPNIK_INPUT_PLUGINS_DEST'] = os.path.join(env['MAPNIK_LIB_DIR_DEST'],'input')
     if env['SYSTEM_FONTS']:
-	env['MAPNIK_FONTS_DEST'] = os.path.normpath(env['SYSTEM_FONTS'])
+        env['MAPNIK_FONTS_DEST'] = os.path.normpath(env['SYSTEM_FONTS'])
     else:
-	env['MAPNIK_FONTS_DEST'] = os.path.join(env['MAPNIK_LIB_DIR_DEST'],'fonts')
+        env['MAPNIK_FONTS_DEST'] = os.path.join(env['MAPNIK_LIB_DIR_DEST'],'fonts')
 
     if env['LINKING'] == 'static':
        env['MAPNIK_LIB_NAME'] = '${LIBPREFIX}${MAPNIK_NAME}${LIBSUFFIX}'
@@ -1183,15 +1177,15 @@ if not preconfigured:
        env['MAPNIK_LIB_NAME'] = '${SHLIBPREFIX}${MAPNIK_NAME}${SHLIBSUFFIX}'
 
     if env['PKG_CONFIG_PATH']:
-	env['ENV']['PKG_CONFIG_PATH'] = fix_path(env['PKG_CONFIG_PATH'])
-	# otherwise this variable == os.environ["PKG_CONFIG_PATH"]
+        env['ENV']['PKG_CONFIG_PATH'] = fix_path(env['PKG_CONFIG_PATH'])
+        # otherwise this variable == os.environ["PKG_CONFIG_PATH"]
 
     if env['PATH']:
-	env['ENV']['PATH'] = fix_path(env['PATH']) + ':' + env['ENV']['PATH']
+        env['ENV']['PATH'] = fix_path(env['PATH']) + ':' + env['ENV']['PATH']
 
     if env['SYSTEM_FONTS']:
-	if not os.path.isdir(env['SYSTEM_FONTS']):
-	    color_print(1,'Warning: Directory specified for SYSTEM_FONTS does not exist!')
+        if not os.path.isdir(env['SYSTEM_FONTS']):
+            color_print(1,'Warning: Directory specified for SYSTEM_FONTS does not exist!')
 
     # Set up for libraries and headers dependency checks
     env['CPPPATH'] = ['#include']
@@ -1199,8 +1193,8 @@ if not preconfigured:
 
     # set any custom cxxflags and ldflags to come first
     if sys.platform == 'darwin' and not env['HOST']:
-	DEFAULT_CXX11_CXXFLAGS += ' -stdlib=libc++'
-	DEFAULT_CXX11_LINKFLAGS = ' -stdlib=libc++'
+        DEFAULT_CXX11_CXXFLAGS += ' -stdlib=libc++'
+        DEFAULT_CXX11_LINKFLAGS = ' -stdlib=libc++'
     env.Append(CPPDEFINES = env['CUSTOM_DEFINES'])
     env.Append(CXXFLAGS = DEFAULT_CXX11_CXXFLAGS)
     env.Append(CXXFLAGS = env['CUSTOM_CXXFLAGS'])
@@ -1212,11 +1206,11 @@ if not preconfigured:
 
     thread_suffix = 'mt'
     if env['PLATFORM'] == 'FreeBSD':
-	thread_suffix = ''
-	env.Append(LIBS = 'pthread')
+        thread_suffix = ''
+        env.Append(LIBS = 'pthread')
 
     if env['SHAPE_MEMORY_MAPPED_FILE']:
-	env.Append(CPPDEFINES = '-DSHAPE_MEMORY_MAPPED_FILE')
+        env.Append(CPPDEFINES = '-DSHAPE_MEMORY_MAPPED_FILE')
 
     # allow for mac osx /usr/lib/libicucore.dylib compatibility
     # requires custom supplied headers since Apple does not include them
@@ -1225,365 +1219,365 @@ if not preconfigured:
     # http://www.opensource.apple.com/tarballs/ICU/
     # then copy the headers to a location that mapnik will find
     if 'core' in env['ICU_LIB_NAME']:
-	env.Append(CPPDEFINES = '-DU_HIDE_DRAFT_API')
-	env.Append(CPPDEFINES = '-DUDISABLE_RENAMING')
-	if os.path.exists(env['ICU_LIB_NAME']):
-	    #-sICU_LINK=" -L/usr/lib -licucore
-	    env['ICU_LIB_NAME'] = os.path.basename(env['ICU_LIB_NAME']).replace('.dylib','').replace('lib','')
+        env.Append(CPPDEFINES = '-DU_HIDE_DRAFT_API')
+        env.Append(CPPDEFINES = '-DUDISABLE_RENAMING')
+        if os.path.exists(env['ICU_LIB_NAME']):
+            #-sICU_LINK=" -L/usr/lib -licucore
+            env['ICU_LIB_NAME'] = os.path.basename(env['ICU_LIB_NAME']).replace('.dylib','').replace('lib','')
 
     # Adding the required prerequisite library directories to the include path for
     # compiling and the library path for linking, respectively.
     for required in ('ICU', 'SQLITE', 'HB'):
-	inc_path = env['%s_INCLUDES' % required]
-	lib_path = env['%s_LIBS' % required]
-	env.AppendUnique(CPPPATH = fix_path(inc_path))
-	env.AppendUnique(LIBPATH = fix_path(lib_path))
+        inc_path = env['%s_INCLUDES' % required]
+        lib_path = env['%s_LIBS' % required]
+        env.AppendUnique(CPPPATH = fix_path(inc_path))
+        env.AppendUnique(LIBPATH = fix_path(lib_path))
 
     REQUIRED_LIBSHEADERS = [
-	['z', 'zlib.h', True,'C'],
-	[env['ICU_LIB_NAME'],'unicode/unistr.h',True,'C++'],
-	['harfbuzz', 'harfbuzz/hb.h',True,'C++']
+        ['z', 'zlib.h', True,'C'],
+        [env['ICU_LIB_NAME'],'unicode/unistr.h',True,'C++'],
+        ['harfbuzz', 'harfbuzz/hb.h',True,'C++']
     ]
 
     if env.get('FREETYPE_LIBS') or env.get('FREETYPE_INCLUDES'):
-	REQUIRED_LIBSHEADERS.insert(0,['freetype','ft2build.h',True,'C'])
-	if env.get('FREETYPE_INCLUDES'):
-	    inc_path = env['FREETYPE_INCLUDES']
-	    env.AppendUnique(CPPPATH = fix_path(inc_path))
-	if env.get('FREETYPE_LIBS'):
-	    lib_path = env['FREETYPE_LIBS']
-	    env.AppendUnique(LIBPATH = fix_path(lib_path))
+        REQUIRED_LIBSHEADERS.insert(0,['freetype','ft2build.h',True,'C'])
+        if env.get('FREETYPE_INCLUDES'):
+            inc_path = env['FREETYPE_INCLUDES']
+            env.AppendUnique(CPPPATH = fix_path(inc_path))
+        if env.get('FREETYPE_LIBS'):
+            lib_path = env['FREETYPE_LIBS']
+            env.AppendUnique(LIBPATH = fix_path(lib_path))
     elif conf.parse_config('FREETYPE_CONFIG'):
-	# check if freetype links to bz2
-	if env['RUNTIME_LINK'] == 'static':
-	    temp_env = env.Clone()
-	    temp_env['LIBS'] = []
-	    try:
-		# TODO - freetype-config accepts --static as of v2.5.3
-		temp_env.ParseConfig('%s --libs' % env['FREETYPE_CONFIG'])
-		if 'bz2' in temp_env['LIBS']:
-		    env['EXTRA_FREETYPE_LIBS'].append('bz2')
-	    except OSError,e:
-		pass
+        # check if freetype links to bz2
+        if env['RUNTIME_LINK'] == 'static':
+            temp_env = env.Clone()
+            temp_env['LIBS'] = []
+            try:
+                # TODO - freetype-config accepts --static as of v2.5.3
+                temp_env.ParseConfig('%s --libs' % env['FREETYPE_CONFIG'])
+                if 'bz2' in temp_env['LIBS']:
+                    env['EXTRA_FREETYPE_LIBS'].append('bz2')
+            except OSError,e:
+                pass
 
     # libxml2 should be optional but is currently not
     # https://github.com/mapnik/mapnik/issues/913
     if env.get('XMLPARSER') and env['XMLPARSER'] == 'libxml2':
-	if env.get('XML2_LIBS') or env.get('XML2_INCLUDES'):
-	    OPTIONAL_LIBSHEADERS.insert(0,['libxml2','libxml/parser.h',True,'C'])
-	    if env.get('XML2_INCLUDES'):
-		inc_path = env['XML2_INCLUDES']
-		env.AppendUnique(CPPPATH = fix_path(inc_path))
-	    if env.get('XML2_LIBS'):
-		lib_path = env['XML2_LIBS']
-		env.AppendUnique(LIBPATH = fix_path(lib_path))
-	elif conf.parse_config('XML2_CONFIG',checks='--cflags'):
-	    env['HAS_LIBXML2'] = True
-	else:
-	    env['MISSING_DEPS'].append('libxml2')
+        if env.get('XML2_LIBS') or env.get('XML2_INCLUDES'):
+            OPTIONAL_LIBSHEADERS.insert(0,['libxml2','libxml/parser.h',True,'C'])
+            if env.get('XML2_INCLUDES'):
+                inc_path = env['XML2_INCLUDES']
+                env.AppendUnique(CPPPATH = fix_path(inc_path))
+            if env.get('XML2_LIBS'):
+                lib_path = env['XML2_LIBS']
+                env.AppendUnique(LIBPATH = fix_path(lib_path))
+        elif conf.parse_config('XML2_CONFIG',checks='--cflags'):
+            env['HAS_LIBXML2'] = True
+        else:
+            env['MISSING_DEPS'].append('libxml2')
 
     if not env['HOST']:
-	if conf.CheckHasDlfcn():
-	    env.Append(CPPDEFINES = '-DMAPNIK_HAS_DLCFN')
-	else:
-	    env['SKIPPED_DEPS'].extend(['dlfcn'])
+        if conf.CheckHasDlfcn():
+            env.Append(CPPDEFINES = '-DMAPNIK_HAS_DLCFN')
+        else:
+            env['SKIPPED_DEPS'].extend(['dlfcn'])
 
     OPTIONAL_LIBSHEADERS = []
 
     if env['JPEG']:
-	OPTIONAL_LIBSHEADERS.append(['jpeg', ['stdio.h', 'jpeglib.h'], False,'C','-DHAVE_JPEG'])
-	inc_path = env['%s_INCLUDES' % 'JPEG']
-	lib_path = env['%s_LIBS' % 'JPEG']
-	env.AppendUnique(CPPPATH = fix_path(inc_path))
-	env.AppendUnique(LIBPATH = fix_path(lib_path))
+        OPTIONAL_LIBSHEADERS.append(['jpeg', ['stdio.h', 'jpeglib.h'], False,'C','-DHAVE_JPEG'])
+        inc_path = env['%s_INCLUDES' % 'JPEG']
+        lib_path = env['%s_LIBS' % 'JPEG']
+        env.AppendUnique(CPPPATH = fix_path(inc_path))
+        env.AppendUnique(LIBPATH = fix_path(lib_path))
     else:
-	env['SKIPPED_DEPS'].extend(['jpeg'])
+        env['SKIPPED_DEPS'].extend(['jpeg'])
 
     if env['PROJ']:
-	OPTIONAL_LIBSHEADERS.append(['proj', 'proj_api.h', False,'C','-DMAPNIK_USE_PROJ4'])
-	inc_path = env['%s_INCLUDES' % 'PROJ']
-	lib_path = env['%s_LIBS' % 'PROJ']
-	env.AppendUnique(CPPPATH = fix_path(inc_path))
-	env.AppendUnique(LIBPATH = fix_path(lib_path))
+        OPTIONAL_LIBSHEADERS.append(['proj', 'proj_api.h', False,'C','-DMAPNIK_USE_PROJ4'])
+        inc_path = env['%s_INCLUDES' % 'PROJ']
+        lib_path = env['%s_LIBS' % 'PROJ']
+        env.AppendUnique(CPPPATH = fix_path(inc_path))
+        env.AppendUnique(LIBPATH = fix_path(lib_path))
     else:
-	env['SKIPPED_DEPS'].extend(['proj'])
+        env['SKIPPED_DEPS'].extend(['proj'])
 
     if env['PNG']:
-	OPTIONAL_LIBSHEADERS.append(['png', 'png.h', False,'C','-DHAVE_PNG'])
-	inc_path = env['%s_INCLUDES' % 'PNG']
-	lib_path = env['%s_LIBS' % 'PNG']
-	env.AppendUnique(CPPPATH = fix_path(inc_path))
-	env.AppendUnique(LIBPATH = fix_path(lib_path))
+        OPTIONAL_LIBSHEADERS.append(['png', 'png.h', False,'C','-DHAVE_PNG'])
+        inc_path = env['%s_INCLUDES' % 'PNG']
+        lib_path = env['%s_LIBS' % 'PNG']
+        env.AppendUnique(CPPPATH = fix_path(inc_path))
+        env.AppendUnique(LIBPATH = fix_path(lib_path))
     else:
-	env['SKIPPED_DEPS'].extend(['png'])
+        env['SKIPPED_DEPS'].extend(['png'])
 
     if env['WEBP']:
-	OPTIONAL_LIBSHEADERS.append(['webp', 'webp/decode.h', False,'C','-DHAVE_WEBP'])
-	inc_path = env['%s_INCLUDES' % 'WEBP']
-	lib_path = env['%s_LIBS' % 'WEBP']
-	env.AppendUnique(CPPPATH = fix_path(inc_path))
-	env.AppendUnique(LIBPATH = fix_path(lib_path))
+        OPTIONAL_LIBSHEADERS.append(['webp', 'webp/decode.h', False,'C','-DHAVE_WEBP'])
+        inc_path = env['%s_INCLUDES' % 'WEBP']
+        lib_path = env['%s_LIBS' % 'WEBP']
+        env.AppendUnique(CPPPATH = fix_path(inc_path))
+        env.AppendUnique(LIBPATH = fix_path(lib_path))
     else:
-	env['SKIPPED_DEPS'].extend(['webp'])
+        env['SKIPPED_DEPS'].extend(['webp'])
 
     if env['TIFF']:
-	OPTIONAL_LIBSHEADERS.append(['tiff', 'tiff.h', False,'C','-DHAVE_TIFF'])
-	inc_path = env['%s_INCLUDES' % 'TIFF']
-	lib_path = env['%s_LIBS' % 'TIFF']
-	env.AppendUnique(CPPPATH = fix_path(inc_path))
-	env.AppendUnique(LIBPATH = fix_path(lib_path))
+        OPTIONAL_LIBSHEADERS.append(['tiff', 'tiff.h', False,'C','-DHAVE_TIFF'])
+        inc_path = env['%s_INCLUDES' % 'TIFF']
+        lib_path = env['%s_LIBS' % 'TIFF']
+        env.AppendUnique(CPPPATH = fix_path(inc_path))
+        env.AppendUnique(LIBPATH = fix_path(lib_path))
     else:
-	env['SKIPPED_DEPS'].extend(['tiff'])
+        env['SKIPPED_DEPS'].extend(['tiff'])
 
     # if requested, sort LIBPATH and CPPPATH before running CheckLibWithHeader tests
     if env['PRIORITIZE_LINKING']:
-	conf.prioritize_paths(silent=True)
+        conf.prioritize_paths(silent=True)
 
     # test for C++11 support, which is required
     if not env['HOST'] and not conf.supports_cxx11():
-	color_print(1,"C++ compiler does not support C++11 standard (-std=c++11), which is required. Please upgrade your compiler to at least g++ 4.7 (ideally 4.8)")
-	Exit(1)
+        color_print(1,"C++ compiler does not support C++11 standard (-std=c++11), which is required. Please upgrade your compiler to at least g++ 4.7 (ideally 4.8)")
+        Exit(1)
 
     if not env['HOST']:
-	for libname, headers, required, lang in REQUIRED_LIBSHEADERS:
-	    if not conf.CheckLibWithHeader(libname, headers, lang):
-		if required:
-		    color_print(1, 'Could not find required header or shared library for %s' % libname)
-		    env['MISSING_DEPS'].append(libname)
-		else:
-		    color_print(4, 'Could not find optional header or shared library for %s' % libname)
-		    env['SKIPPED_DEPS'].append(libname)
-	    else:
-		if libname == env['ICU_LIB_NAME']:
-		    if env['ICU_LIB_NAME'] not in env['MISSING_DEPS']:
-			if not conf.icu_at_least_four_two():
-			    # expression_string.cpp and map.cpp use fromUTF* function only available in >= ICU 4.2
-			    env['MISSING_DEPS'].append(env['ICU_LIB_NAME'])
-		elif libname == 'harfbuzz':
-		    if not conf.harfbuzz_version():
-			env['SKIPPED_DEPS'].append('harfbuzz-min-version')
+        for libname, headers, required, lang in REQUIRED_LIBSHEADERS:
+            if not conf.CheckLibWithHeader(libname, headers, lang):
+                if required:
+                    color_print(1, 'Could not find required header or shared library for %s' % libname)
+                    env['MISSING_DEPS'].append(libname)
+                else:
+                    color_print(4, 'Could not find optional header or shared library for %s' % libname)
+                    env['SKIPPED_DEPS'].append(libname)
+            else:
+                if libname == env['ICU_LIB_NAME']:
+                    if env['ICU_LIB_NAME'] not in env['MISSING_DEPS']:
+                        if not conf.icu_at_least_four_two():
+                            # expression_string.cpp and map.cpp use fromUTF* function only available in >= ICU 4.2
+                            env['MISSING_DEPS'].append(env['ICU_LIB_NAME'])
+                elif libname == 'harfbuzz':
+                    if not conf.harfbuzz_version():
+                        env['SKIPPED_DEPS'].append('harfbuzz-min-version')
 
     if env['BIGINT']:
-	env.Append(CPPDEFINES = '-DBIGINT')
+        env.Append(CPPDEFINES = '-DBIGINT')
 
     if env['THREADING'] == 'multi':
-	thread_flag = thread_suffix
+        thread_flag = thread_suffix
     else:
-	thread_flag = ''
+        thread_flag = ''
 
     conf.FindBoost(BOOST_SEARCH_PREFIXES,thread_flag)
 
     has_boost_devel = True
     if not env['HOST']:
-	if not conf.CheckHeader(header='boost/version.hpp',language='C++'):
-	    env['MISSING_DEPS'].append('boost development headers')
-	    has_boost_devel = False
+        if not conf.CheckHeader(header='boost/version.hpp',language='C++'):
+            env['MISSING_DEPS'].append('boost development headers')
+            has_boost_devel = False
 
     if has_boost_devel:
-	if not env['HOST']:
-	    env['BOOST_LIB_VERSION_FROM_HEADER'] = conf.GetBoostLibVersion()
-
-	# The other required boost headers.
-	BOOST_LIBSHEADERS = [
-	    ['system', 'boost/system/system_error.hpp', True],
-	    ['filesystem', 'boost/filesystem/operations.hpp', True],
-	    ['regex', 'boost/regex.hpp', True],
-	    ['program_options', 'boost/program_options.hpp', False]
-	]
-
-	if env['THREADING'] == 'multi':
-	    BOOST_LIBSHEADERS.append(['thread', 'boost/thread/mutex.hpp', True])
-	    # on solaris the configure checks for boost_thread
-	    # require the -pthreads flag to be able to check for
-	    # threading support, so we add as a global library instead
-	    # of attaching to cxxflags after configure
-	    if env['PLATFORM'] == 'SunOS':
-		env.Append(CXXFLAGS = '-pthreads')
-
-	# if requested, sort LIBPATH and CPPPATH before running CheckLibWithHeader tests
-	if env['PRIORITIZE_LINKING']:
-	    conf.prioritize_paths(silent=True)
-
-	if not env['HOST']:
-	    # if the user is not setting custom boost configuration
-	    # enforce boost version greater than or equal to BOOST_MIN_VERSION
-	    if not conf.CheckBoost(BOOST_MIN_VERSION):
-		color_print(4,'Found boost lib version... %s' % env.get('BOOST_LIB_VERSION_FROM_HEADER') )
-		color_print(1,'Boost version %s or greater is required' % BOOST_MIN_VERSION)
-		if not env['BOOST_VERSION']:
-		    env['MISSING_DEPS'].append('boost version >= %s' % BOOST_MIN_VERSION)
-	    else:
-		color_print(4,'Found boost lib version... %s' % env.get('BOOST_LIB_VERSION_FROM_HEADER') )
-
-	if not env['HOST']:
-	    for count, libinfo in enumerate(BOOST_LIBSHEADERS):
-		if not conf.CheckLibWithHeader('boost_%s%s' % (libinfo[0],env['BOOST_APPEND']), libinfo[1], 'C++'):
-		    if libinfo[2]:
-			color_print(1,'Could not find required header or shared library for boost %s' % libinfo[0])
-			env['MISSING_DEPS'].append('boost ' + libinfo[0])
-		    else:
-			color_print(4,'Could not find optional header or shared library for boost %s' % libinfo[0])
-			env['SKIPPED_DEPS'].append('boost ' + libinfo[0])
-
-	# Boost versions before 1.57 are broken when the system package and
-	# Mapnik are compiled against different standards. On Ubuntu 14.04
-	# using boost 1.54, it breaks scoped enums. It's a bit of a hack to
-	# just turn it off like this, but seems the only available work-
-	# around. See https://svn.boost.org/trac/boost/ticket/6779 for more
-	# details.
-	boost_version = [int(x) for x in env.get('BOOST_LIB_VERSION_FROM_HEADER').split('_')]
-	if not conf.CheckBoostScopedEnum():
-	    if boost_version < [1, 51]:
-		env.Append(CXXFLAGS = '-DBOOST_NO_SCOPED_ENUMS')
-	    elif boost_version < [1, 57]:
-		env.Append(CXXFLAGS = '-DBOOST_NO_CXX11_SCOPED_ENUMS')
+        if not env['HOST']:
+            env['BOOST_LIB_VERSION_FROM_HEADER'] = conf.GetBoostLibVersion()
+
+        # The other required boost headers.
+        BOOST_LIBSHEADERS = [
+            ['system', 'boost/system/system_error.hpp', True],
+            ['filesystem', 'boost/filesystem/operations.hpp', True],
+            ['regex', 'boost/regex.hpp', True],
+            ['program_options', 'boost/program_options.hpp', False]
+        ]
+
+        if env['THREADING'] == 'multi':
+            BOOST_LIBSHEADERS.append(['thread', 'boost/thread/mutex.hpp', True])
+            # on solaris the configure checks for boost_thread
+            # require the -pthreads flag to be able to check for
+            # threading support, so we add as a global library instead
+            # of attaching to cxxflags after configure
+            if env['PLATFORM'] == 'SunOS':
+                env.Append(CXXFLAGS = '-pthreads')
+
+        # if requested, sort LIBPATH and CPPPATH before running CheckLibWithHeader tests
+        if env['PRIORITIZE_LINKING']:
+            conf.prioritize_paths(silent=True)
+
+        if not env['HOST']:
+            # if the user is not setting custom boost configuration
+            # enforce boost version greater than or equal to BOOST_MIN_VERSION
+            if not conf.CheckBoost(BOOST_MIN_VERSION):
+                color_print(4,'Found boost lib version... %s' % env.get('BOOST_LIB_VERSION_FROM_HEADER') )
+                color_print(1,'Boost version %s or greater is required' % BOOST_MIN_VERSION)
+                if not env['BOOST_VERSION']:
+                    env['MISSING_DEPS'].append('boost version >= %s' % BOOST_MIN_VERSION)
+            else:
+                color_print(4,'Found boost lib version... %s' % env.get('BOOST_LIB_VERSION_FROM_HEADER') )
+
+        if not env['HOST']:
+            for count, libinfo in enumerate(BOOST_LIBSHEADERS):
+                if not conf.CheckLibWithHeader('boost_%s%s' % (libinfo[0],env['BOOST_APPEND']), libinfo[1], 'C++'):
+                    if libinfo[2]:
+                        color_print(1,'Could not find required header or shared library for boost %s' % libinfo[0])
+                        env['MISSING_DEPS'].append('boost ' + libinfo[0])
+                    else:
+                        color_print(4,'Could not find optional header or shared library for boost %s' % libinfo[0])
+                        env['SKIPPED_DEPS'].append('boost ' + libinfo[0])
+
+        # Boost versions before 1.57 are broken when the system package and
+        # Mapnik are compiled against different standards. On Ubuntu 14.04
+        # using boost 1.54, it breaks scoped enums. It's a bit of a hack to
+        # just turn it off like this, but seems the only available work-
+        # around. See https://svn.boost.org/trac/boost/ticket/6779 for more
+        # details.
+        boost_version = [int(x) for x in env.get('BOOST_LIB_VERSION_FROM_HEADER').split('_')]
+        if not conf.CheckBoostScopedEnum():
+            if boost_version < [1, 51]:
+                env.Append(CXXFLAGS = '-DBOOST_NO_SCOPED_ENUMS')
+            elif boost_version < [1, 57]:
+                env.Append(CXXFLAGS = '-DBOOST_NO_CXX11_SCOPED_ENUMS')
 
     if not env['HOST'] and env['ICU_LIB_NAME'] not in env['MISSING_DEPS']:
-	# http://lists.boost.org/Archives/boost/2009/03/150076.php
-	# we need libicui18n if using static boost libraries, so it is
-	# important to try this check with the library linked
-	if conf.boost_regex_has_icu():
-	    # TODO - should avoid having this be globally defined...
-	    env.Append(CPPDEFINES = '-DBOOST_REGEX_HAS_ICU')
-	else:
-	    env['SKIPPED_DEPS'].append('boost_regex_icu')
-
-	for libname, headers, required, lang, define in OPTIONAL_LIBSHEADERS:
-	    if not env['HOST']:
-		if not conf.CheckLibWithHeader(libname, headers, lang):
-		    if required:
-			color_print(1, 'Could not find required header or shared library for %s' % libname)
-			env['MISSING_DEPS'].append(libname)
-		    else:
-			color_print(4, 'Could not find optional header or shared library for %s' % libname)
-			env['SKIPPED_DEPS'].append(libname)
-		else:
-		    env.Append(CPPDEFINES = define)
-	    else:
-		env.Append(CPPDEFINES = define)
+        # http://lists.boost.org/Archives/boost/2009/03/150076.php
+        # we need libicui18n if using static boost libraries, so it is
+        # important to try this check with the library linked
+        if conf.boost_regex_has_icu():
+            # TODO - should avoid having this be globally defined...
+            env.Append(CPPDEFINES = '-DBOOST_REGEX_HAS_ICU')
+        else:
+            env['SKIPPED_DEPS'].append('boost_regex_icu')
+
+        for libname, headers, required, lang, define in OPTIONAL_LIBSHEADERS:
+            if not env['HOST']:
+                if not conf.CheckLibWithHeader(libname, headers, lang):
+                    if required:
+                        color_print(1, 'Could not find required header or shared library for %s' % libname)
+                        env['MISSING_DEPS'].append(libname)
+                    else:
+                        color_print(4, 'Could not find optional header or shared library for %s' % libname)
+                        env['SKIPPED_DEPS'].append(libname)
+                else:
+                    env.Append(CPPDEFINES = define)
+            else:
+                env.Append(CPPDEFINES = define)
 
     env['REQUESTED_PLUGINS'] = [ driver.strip() for driver in Split(env['INPUT_PLUGINS'])]
 
     SQLITE_HAS_RTREE = None
     if env['HOST']:
-	SQLITE_HAS_RTREE = True
+        SQLITE_HAS_RTREE = True
 
     CHECK_PKG_CONFIG = conf.CheckPKGConfig('0.15.0')
 
     if len(env['REQUESTED_PLUGINS']):
-	if env['HOST']:
-	    for plugin in env['REQUESTED_PLUGINS']:
-		details = env['PLUGINS'][plugin]
-		if details['lib']:
-		    env.AppendUnique(LIBS=details['lib'])
-	else:
-	    color_print(4,'Checking for requested plugins dependencies...')
-	    for plugin in env['REQUESTED_PLUGINS']:
-		details = env['PLUGINS'][plugin]
-		if plugin == 'gdal':
-		    if conf.parse_config('GDAL_CONFIG',checks='--libs'):
-			conf.parse_config('GDAL_CONFIG',checks='--cflags')
-			libname = conf.get_pkg_lib('GDAL_CONFIG','gdal')
-			if libname:
-			    if not conf.CheckLibWithHeader(libname, details['inc'], details['lang']):
-				env['SKIPPED_DEPS'].append('gdal')
-				if libname in env['LIBS']:
-				     env['LIBS'].remove(libname)
-			    else:
-				details['lib'] = libname
-		elif plugin == 'postgis' or plugin == 'pgraster':
-		    if env.get('PG_LIBS') or env.get('PG_INCLUDES'):
-			libname = details['lib']
-			if env.get('PG_INCLUDES'):
-			    inc_path = env['PG_INCLUDES']
-			    env.AppendUnique(CPPPATH = fix_path(inc_path))
-			if env.get('PG_LIBS'):
-			    lib_path = env['PG_LIBS']
-			    env.AppendUnique(LIBPATH = fix_path(lib_path))
-			if not conf.CheckLibWithHeader(libname, details['inc'], details['lang']):
-			    env['SKIPPED_DEPS'].append(libname)
-			    if libname in env['LIBS']:
-				 env['LIBS'].remove(libname)
-			else:
-			    details['lib'] = libname
-		    else:
-			conf.parse_pg_config('PG_CONFIG')
-		elif plugin == 'ogr':
-		    if conf.ogr_enabled():
-			if conf.parse_config('GDAL_CONFIG',checks='--libs'):
-			    conf.parse_config('GDAL_CONFIG',checks='--cflags')
-			    libname = conf.get_pkg_lib('GDAL_CONFIG','ogr')
-			    if libname:
-				if not conf.CheckLibWithHeader(libname, details['inc'], details['lang']):
-				    if 'gdal' not in env['SKIPPED_DEPS']:
-					env['SKIPPED_DEPS'].append('gdal')
-				    if libname in env['LIBS']:
-					 env['LIBS'].remove(libname)
-				else:
-				    details['lib'] = libname
-		elif details['path'] and details['lib'] and details['inc']:
-		    backup = env.Clone().Dictionary()
-		    # Note, the 'delete_existing' keyword makes sure that these paths are prepended
-		    # to the beginning of the path list even if they already exist
-		    incpath = env['%s_INCLUDES' % details['path']]
-		    libpath = env['%s_LIBS' % details['path']]
-		    env.PrependUnique(CPPPATH = fix_path(incpath),delete_existing=True)
-		    env.PrependUnique(LIBPATH = fix_path(libpath),delete_existing=True)
-		    if not conf.CheckLibWithHeader(details['lib'], details['inc'], details['lang']):
-			env.Replace(**backup)
-			env['SKIPPED_DEPS'].append(details['lib'])
-		    if plugin == 'sqlite':
-			sqlite_backup = env.Clone().Dictionary()
-			# if statically linking, on linux we likely
-			# need to link sqlite to pthreads and dl
-			if env['RUNTIME_LINK'] == 'static' and not env['PLATFORM'] == 'Darwin':
-			    if CHECK_PKG_CONFIG and conf.CheckPKG('sqlite3'):
-				sqlite_env = env.Clone()
-				try:
-				    sqlite_env.ParseConfig('pkg-config --static --libs sqlite3')
-				    for lib in sqlite_env['LIBS']:
-					if not lib in env['LIBS']:
-					    env["SQLITE_LINKFLAGS"].append(lib)
-					    env.Append(LIBS=lib)
-				except OSError,e:
-				    for lib in ["sqlite3","dl","pthread"]:
-					if not lib in env['LIBS']:
-					    env["SQLITE_LINKFLAGS"].append("lib")
-					    env.Append(LIBS=lib)
-			    else:
-				for lib in ["sqlite3","dl","pthread"]:
-				    if not lib in env['LIBS']:
-					env["SQLITE_LINKFLAGS"].append("lib")
-					env.Append(LIBS=lib)
-			SQLITE_HAS_RTREE = conf.sqlite_has_rtree()
-			if not SQLITE_HAS_RTREE:
-			    env.Replace(**sqlite_backup)
-			    if details['lib'] in env['LIBS']:
-				env['LIBS'].remove(details['lib'])
-			    env['SKIPPED_DEPS'].append('sqlite_rtree')
-			else:
-			    env.Replace(**sqlite_backup)
-		elif details['lib'] and details['inc']:
-		    if not conf.CheckLibWithHeader(details['lib'], details['inc'], details['lang']):
-			env['SKIPPED_DEPS'].append(details['lib'])
-
-	    # re-append the local paths for mapnik sources to the beginning of the list
-	    # to make sure they come before any plugins that were 'prepended'
-	    env.PrependUnique(CPPPATH = '#include', delete_existing=True)
-	    env.PrependUnique(LIBPATH = '#src', delete_existing=True)
+        if env['HOST']:
+            for plugin in env['REQUESTED_PLUGINS']:
+                details = env['PLUGINS'][plugin]
+                if details['lib']:
+                    env.AppendUnique(LIBS=details['lib'])
+        else:
+            color_print(4,'Checking for requested plugins dependencies...')
+            for plugin in env['REQUESTED_PLUGINS']:
+                details = env['PLUGINS'][plugin]
+                if plugin == 'gdal':
+                    if conf.parse_config('GDAL_CONFIG',checks='--libs'):
+                        conf.parse_config('GDAL_CONFIG',checks='--cflags')
+                        libname = conf.get_pkg_lib('GDAL_CONFIG','gdal')
+                        if libname:
+                            if not conf.CheckLibWithHeader(libname, details['inc'], details['lang']):
+                                env['SKIPPED_DEPS'].append('gdal')
+                                if libname in env['LIBS']:
+                                     env['LIBS'].remove(libname)
+                            else:
+                                details['lib'] = libname
+                elif plugin == 'postgis' or plugin == 'pgraster':
+                    if env.get('PG_LIBS') or env.get('PG_INCLUDES'):
+                        libname = details['lib']
+                        if env.get('PG_INCLUDES'):
+                            inc_path = env['PG_INCLUDES']
+                            env.AppendUnique(CPPPATH = fix_path(inc_path))
+                        if env.get('PG_LIBS'):
+                            lib_path = env['PG_LIBS']
+                            env.AppendUnique(LIBPATH = fix_path(lib_path))
+                        if not conf.CheckLibWithHeader(libname, details['inc'], details['lang']):
+                            env['SKIPPED_DEPS'].append(libname)
+                            if libname in env['LIBS']:
+                                 env['LIBS'].remove(libname)
+                        else:
+                            details['lib'] = libname
+                    else:
+                        conf.parse_pg_config('PG_CONFIG')
+                elif plugin == 'ogr':
+                    if conf.ogr_enabled():
+                        if conf.parse_config('GDAL_CONFIG',checks='--libs'):
+                            conf.parse_config('GDAL_CONFIG',checks='--cflags')
+                            libname = conf.get_pkg_lib('GDAL_CONFIG','ogr')
+                            if libname:
+                                if not conf.CheckLibWithHeader(libname, details['inc'], details['lang']):
+                                    if 'gdal' not in env['SKIPPED_DEPS']:
+                                        env['SKIPPED_DEPS'].append('gdal')
+                                    if libname in env['LIBS']:
+                                         env['LIBS'].remove(libname)
+                                else:
+                                    details['lib'] = libname
+                elif details['path'] and details['lib'] and details['inc']:
+                    backup = env.Clone().Dictionary()
+                    # Note, the 'delete_existing' keyword makes sure that these paths are prepended
+                    # to the beginning of the path list even if they already exist
+                    incpath = env['%s_INCLUDES' % details['path']]
+                    libpath = env['%s_LIBS' % details['path']]
+                    env.PrependUnique(CPPPATH = fix_path(incpath),delete_existing=True)
+                    env.PrependUnique(LIBPATH = fix_path(libpath),delete_existing=True)
+                    if not conf.CheckLibWithHeader(details['lib'], details['inc'], details['lang']):
+                        env.Replace(**backup)
+                        env['SKIPPED_DEPS'].append(details['lib'])
+                    if plugin == 'sqlite':
+                        sqlite_backup = env.Clone().Dictionary()
+                        # if statically linking, on linux we likely
+                        # need to link sqlite to pthreads and dl
+                        if env['RUNTIME_LINK'] == 'static' and not env['PLATFORM'] == 'Darwin':
+                            if CHECK_PKG_CONFIG and conf.CheckPKG('sqlite3'):
+                                sqlite_env = env.Clone()
+                                try:
+                                    sqlite_env.ParseConfig('pkg-config --static --libs sqlite3')
+                                    for lib in sqlite_env['LIBS']:
+                                        if not lib in env['LIBS']:
+                                            env["SQLITE_LINKFLAGS"].append(lib)
+                                            env.Append(LIBS=lib)
+                                except OSError,e:
+                                    for lib in ["sqlite3","dl","pthread"]:
+                                        if not lib in env['LIBS']:
+                                            env["SQLITE_LINKFLAGS"].append("lib")
+                                            env.Append(LIBS=lib)
+                            else:
+                                for lib in ["sqlite3","dl","pthread"]:
+                                    if not lib in env['LIBS']:
+                                        env["SQLITE_LINKFLAGS"].append("lib")
+                                        env.Append(LIBS=lib)
+                        SQLITE_HAS_RTREE = conf.sqlite_has_rtree()
+                        if not SQLITE_HAS_RTREE:
+                            env.Replace(**sqlite_backup)
+                            if details['lib'] in env['LIBS']:
+                                env['LIBS'].remove(details['lib'])
+                            env['SKIPPED_DEPS'].append('sqlite_rtree')
+                        else:
+                            env.Replace(**sqlite_backup)
+                elif details['lib'] and details['inc']:
+                    if not conf.CheckLibWithHeader(details['lib'], details['inc'], details['lang']):
+                        env['SKIPPED_DEPS'].append(details['lib'])
+
+            # re-append the local paths for mapnik sources to the beginning of the list
+            # to make sure they come before any plugins that were 'prepended'
+            env.PrependUnique(CPPPATH = '#include', delete_existing=True)
+            env.PrependUnique(LIBPATH = '#src', delete_existing=True)
 
     if not env['HOST']:
-	if env['PGSQL2SQLITE']:
-	    if 'sqlite3' not in env['LIBS']:
-		env.AppendUnique(LIBS='sqlite3')
-		env.AppendUnique(CPPPATH = fix_path(env['SQLITE_INCLUDES']))
-		env.AppendUnique(LIBPATH = fix_path(env['SQLITE_LIBS']))
-	    if 'pq' not in env['LIBS']:
-		if not conf.parse_pg_config('PG_CONFIG'):
-		    env['PGSQL2SQLITE'] = False
-	    if not SQLITE_HAS_RTREE:
-		env['SKIPPED_DEPS'].append('pgsql2sqlite_rtree')
-		env['PGSQL2SQLITE'] = False
+        if env['PGSQL2SQLITE']:
+            if 'sqlite3' not in env['LIBS']:
+                env.AppendUnique(LIBS='sqlite3')
+                env.AppendUnique(CPPPATH = fix_path(env['SQLITE_INCLUDES']))
+                env.AppendUnique(LIBPATH = fix_path(env['SQLITE_LIBS']))
+            if 'pq' not in env['LIBS']:
+                if not conf.parse_pg_config('PG_CONFIG'):
+                    env['PGSQL2SQLITE'] = False
+            if not SQLITE_HAS_RTREE:
+                env['SKIPPED_DEPS'].append('pgsql2sqlite_rtree')
+                env['PGSQL2SQLITE'] = False
 
     # we rely on an internal, patched copy of agg with critical fixes
     # prepend to make sure we link locally
@@ -1593,236 +1587,236 @@ if not preconfigured:
     env.Prepend(CPPPATH = '#deps')
 
     if env['CAIRO']:
-	if env['CAIRO_LIBS'] or env['CAIRO_INCLUDES']:
-	    c_inc = env['CAIRO_INCLUDES']
-	    if env['CAIRO_LIBS']:
-		env["CAIRO_LIBPATHS"].append(fix_path(env['CAIRO_LIBS']))
-		if not env['CAIRO_INCLUDES']:
-		    c_inc = env['CAIRO_LIBS'].replace('lib','',1)
-	    if c_inc:
-		c_inc = os.path.normpath(fix_path(env['CAIRO_INCLUDES']))
-		if c_inc.endswith('include'):
-		    c_inc = os.path.dirname(c_inc)
-		env["CAIRO_CPPPATHS"].extend(
-		    [
-		      os.path.join(c_inc,'include/cairo'),
-		      os.path.join(c_inc,'include/pixman-1'),
-		      #os.path.join(c_inc,'include/freetype2'),
-		      #os.path.join(c_inc,'include/libpng'),
-		    ]
-		)
-		env["CAIRO_ALL_LIBS"] = ['cairo']
-		if env['RUNTIME_LINK'] == 'static':
-		    env["CAIRO_ALL_LIBS"].extend(
-			['pixman-1','expat']
-		    )
-		# todo - run actual checkLib?
-		env['HAS_CAIRO'] = True
-	else:
-	    if not CHECK_PKG_CONFIG:
-		env['HAS_CAIRO'] = False
-		env['SKIPPED_DEPS'].append('pkg-config')
-		env['SKIPPED_DEPS'].append('cairo')
-	    elif not conf.CheckPKG('cairo'):
-		env['HAS_CAIRO'] = False
-		env['SKIPPED_DEPS'].append('cairo')
-	    else:
-		print 'Checking for cairo lib and include paths... ',
-		cmd = 'pkg-config --libs --cflags cairo'
-		if env['RUNTIME_LINK'] == 'static':
-		    cmd += ' --static'
-		cairo_env = env.Clone()
-		try:
-		    cairo_env.ParseConfig(cmd)
-		    for lib in cairo_env['LIBS']:
-			if not lib in env['LIBS']:
-			    env["CAIRO_ALL_LIBS"].append(lib)
-		    for lpath in cairo_env['LIBPATH']:
-			if not lpath in env['LIBPATH']:
-			    env["CAIRO_LIBPATHS"].append(lpath)
-		    for inc in cairo_env['CPPPATH']:
-			if not inc in env['CPPPATH']:
-			    env["CAIRO_CPPPATHS"].append(inc)
-		    env['HAS_CAIRO'] = True
-		    print 'yes'
-		except OSError,e:
-		    color_print(1,'no')
-		    env['SKIPPED_DEPS'].append('cairo')
-		    color_print(1,'pkg-config reported: %s' % e)
+        if env['CAIRO_LIBS'] or env['CAIRO_INCLUDES']:
+            c_inc = env['CAIRO_INCLUDES']
+            if env['CAIRO_LIBS']:
+                env["CAIRO_LIBPATHS"].append(fix_path(env['CAIRO_LIBS']))
+                if not env['CAIRO_INCLUDES']:
+                    c_inc = env['CAIRO_LIBS'].replace('lib','',1)
+            if c_inc:
+                c_inc = os.path.normpath(fix_path(env['CAIRO_INCLUDES']))
+                if c_inc.endswith('include'):
+                    c_inc = os.path.dirname(c_inc)
+                env["CAIRO_CPPPATHS"].extend(
+                    [
+                      os.path.join(c_inc,'include/cairo'),
+                      os.path.join(c_inc,'include/pixman-1'),
+                      #os.path.join(c_inc,'include/freetype2'),
+                      #os.path.join(c_inc,'include/libpng'),
+                    ]
+                )
+                env["CAIRO_ALL_LIBS"] = ['cairo']
+                if env['RUNTIME_LINK'] == 'static':
+                    env["CAIRO_ALL_LIBS"].extend(
+                        ['pixman-1','expat']
+                    )
+                # todo - run actual checkLib?
+                env['HAS_CAIRO'] = True
+        else:
+            if not CHECK_PKG_CONFIG:
+                env['HAS_CAIRO'] = False
+                env['SKIPPED_DEPS'].append('pkg-config')
+                env['SKIPPED_DEPS'].append('cairo')
+            elif not conf.CheckPKG('cairo'):
+                env['HAS_CAIRO'] = False
+                env['SKIPPED_DEPS'].append('cairo')
+            else:
+                print 'Checking for cairo lib and include paths... ',
+                cmd = 'pkg-config --libs --cflags cairo'
+                if env['RUNTIME_LINK'] == 'static':
+                    cmd += ' --static'
+                cairo_env = env.Clone()
+                try:
+                    cairo_env.ParseConfig(cmd)
+                    for lib in cairo_env['LIBS']:
+                        if not lib in env['LIBS']:
+                            env["CAIRO_ALL_LIBS"].append(lib)
+                    for lpath in cairo_env['LIBPATH']:
+                        if not lpath in env['LIBPATH']:
+                            env["CAIRO_LIBPATHS"].append(lpath)
+                    for inc in cairo_env['CPPPATH']:
+                        if not inc in env['CPPPATH']:
+                            env["CAIRO_CPPPATHS"].append(inc)
+                    env['HAS_CAIRO'] = True
+                    print 'yes'
+                except OSError,e:
+                    color_print(1,'no')
+                    env['SKIPPED_DEPS'].append('cairo')
+                    color_print(1,'pkg-config reported: %s' % e)
 
     else:
-	color_print(4,'Not building with cairo support, pass CAIRO=True to enable')
+        color_print(4,'Not building with cairo support, pass CAIRO=True to enable')
 
     if not env['HOST'] and env['HAS_CAIRO']:
-	if not conf.CheckCairoHasFreetype():
-	    env['SKIPPED_DEPS'].append('cairo')
-	    env['HAS_CAIRO'] = False
+        if not conf.CheckCairoHasFreetype():
+            env['SKIPPED_DEPS'].append('cairo')
+            env['HAS_CAIRO'] = False
 
     #### End Config Stage for Required Dependencies ####
 
     if env['MISSING_DEPS']:
-	# if required dependencies are missing, print warnings and then let SCons finish without building or saving local config
-	color_print(1,'\nExiting... the following required dependencies were not found:\n   - %s' % '\n   - '.join([pretty_dep(dep) for dep in env['MISSING_DEPS']]))
-	color_print(1,"\nSee '%s' for details on possible problems." % (fix_path(SCONS_LOCAL_LOG)))
-	if env['SKIPPED_DEPS']:
-	    color_print(4,'\nAlso, these OPTIONAL dependencies were not found:\n   - %s' % '\n   - '.join([pretty_dep(dep) for dep in env['SKIPPED_DEPS']]))
-	color_print(4,"\nSet custom paths to these libraries and header files on the command-line or in a file called '%s'" % SCONS_LOCAL_CONFIG)
-	color_print(4,"    ie. $ python scons/scons.py BOOST_INCLUDES=/usr/local/include BOOST_LIBS=/usr/local/lib")
-	color_print(4, "\nOnce all required dependencies are found a local '%s' will be saved and then install:" % SCONS_LOCAL_CONFIG)
-	color_print(4,"    $ sudo python scons/scons.py install")
-	color_print(4,"\nTo view available path variables:\n    $ python scons/scons.py --help or -h")
-	color_print(4,'\nTo view overall SCons help options:\n    $ python scons/scons.py --help-options or -H\n')
-	color_print(4,'More info: https://github.com/mapnik/mapnik/wiki/Mapnik-Installation')
-	if not HELP_REQUESTED:
-	    Exit(1)
+        # if required dependencies are missing, print warnings and then let SCons finish without building or saving local config
+        color_print(1,'\nExiting... the following required dependencies were not found:\n   - %s' % '\n   - '.join([pretty_dep(dep) for dep in env['MISSING_DEPS']]))
+        color_print(1,"\nSee '%s' for details on possible problems." % (fix_path(SCONS_LOCAL_LOG)))
+        if env['SKIPPED_DEPS']:
+            color_print(4,'\nAlso, these OPTIONAL dependencies were not found:\n   - %s' % '\n   - '.join([pretty_dep(dep) for dep in env['SKIPPED_DEPS']]))
+        color_print(4,"\nSet custom paths to these libraries and header files on the command-line or in a file called '%s'" % SCONS_LOCAL_CONFIG)
+        color_print(4,"    ie. $ python scons/scons.py BOOST_INCLUDES=/usr/local/include BOOST_LIBS=/usr/local/lib")
+        color_print(4, "\nOnce all required dependencies are found a local '%s' will be saved and then install:" % SCONS_LOCAL_CONFIG)
+        color_print(4,"    $ sudo python scons/scons.py install")
+        color_print(4,"\nTo view available path variables:\n    $ python scons/scons.py --help or -h")
+        color_print(4,'\nTo view overall SCons help options:\n    $ python scons/scons.py --help-options or -H\n')
+        color_print(4,'More info: https://github.com/mapnik/mapnik/wiki/Mapnik-Installation')
+        if not HELP_REQUESTED:
+            Exit(1)
     else:
-	# Save the custom variables in a SCONS_LOCAL_CONFIG
-	# that will be reloaded to allow for `install` without re-specifying custom variables
-	color_print(4,"\nAll Required dependencies found!\n")
-	if env['USE_CONFIG']:
-	    if os.path.exists(SCONS_LOCAL_CONFIG):
-		action = 'Overwriting and re-saving'
-		os.unlink(SCONS_LOCAL_CONFIG)
-	    else:
-		action = 'Saving new'
-	    color_print(4,"%s file '%s'..." % (action,SCONS_LOCAL_CONFIG))
-	    color_print(4,"Will hold custom path variables from commandline and python config file(s)...")
-	    opts.Save(SCONS_LOCAL_CONFIG,env)
-	else:
-	  color_print(4,"Did not use user config file, no custom path variables will be saved...")
-
-	if env['SKIPPED_DEPS']:
-	    color_print(4,'\nNote: will build without these OPTIONAL dependencies:\n   - %s' % '\n   - '.join([pretty_dep(dep) for dep in env['SKIPPED_DEPS']]))
-	    print
-
-	# fetch the mapnik version header in order to set the
-	# ABI version used to build libmapnik.so on linux in src/build.py
-	abi = GetMapnikLibVersion()
-	abi_no_pre = abi.replace('-pre','').split('.')
-	env['ABI_VERSION'] = abi_no_pre
-	env['MAPNIK_VERSION_STRING'] = abi
-	env['MAPNIK_VERSION'] = str(int(abi_no_pre[0])*100000+int(abi_no_pre[1])*100+int(abi_no_pre[2]))
-
-	# Common DEFINES.
-	env.Append(CPPDEFINES = '-D%s' % env['PLATFORM'].upper())
-	if env['THREADING'] == 'multi':
-	    env.Append(CPPDEFINES = '-DMAPNIK_THREADSAFE')
-
-	if env['NO_ATEXIT']:
-	    env.Append(CPPDEFINES = '-DMAPNIK_NO_ATEXIT')
-
-	# Mac OSX (Darwin) special settings
-	if env['PLATFORM'] == 'Darwin':
-	    pthread = ''
-	else:
-	    pthread = '-pthread'
-
-	# Common debugging flags.
-	# http://lists.fedoraproject.org/pipermail/devel/2010-November/144952.html
-	debug_flags  = ['-g', '-fno-omit-frame-pointer']
-	debug_defines = ['-DDEBUG', '-DMAPNIK_DEBUG']
-	ndebug_defines = ['-DNDEBUG']
-
-	# faster compile
-	# http://www.boost.org/doc/libs/1_47_0/libs/spirit/doc/html/spirit/what_s_new/spirit_2_5.html#spirit.what_s_new.spirit_2_5.breaking_changes
-	env.Append(CPPDEFINES = '-DBOOST_SPIRIT_NO_PREDEFINED_TERMINALS=1')
-	env.Append(CPPDEFINES = '-DBOOST_PHOENIX_NO_PREDEFINED_TERMINALS=1')
-	# c++11 support / https://github.com/mapnik/mapnik/issues/1683
-	#  - upgrade to PHOENIX_V3 since that is needed for c++11 compile
-	env.Append(CPPDEFINES = '-DBOOST_SPIRIT_USE_PHOENIX_V3=1')
-
-	# Enable logging in debug mode (always) and release mode (when specified)
-	if env['DEFAULT_LOG_SEVERITY']:
-	    if env['DEFAULT_LOG_SEVERITY'] not in severities:
-		severities_list = ', '.join(["'%s'" % s for s in severities])
-		color_print(1,"Cannot set default logger severity to '%s', available options are %s." % (env['DEFAULT_LOG_SEVERITY'], severities_list))
-		Exit(1)
-	    else:
-		log_severity = severities.index(env['DEFAULT_LOG_SEVERITY'])
-	else:
-	    severities_list = ', '.join(["'%s'" % s for s in severities])
-	    color_print(1,"No logger severity specified, available options are %s." % severities_list)
-	    Exit(1)
-
-	log_enabled = ['-DMAPNIK_LOG', '-DMAPNIK_DEFAULT_LOG_SEVERITY=%d' % log_severity]
-
-	if env['DEBUG']:
-	    debug_defines += log_enabled
-	else:
-	    if env['ENABLE_LOG']:
-		ndebug_defines += log_enabled
-
-	# Enable statistics reporting
-	if env['ENABLE_STATS']:
-	    debug_defines.append('-DMAPNIK_STATS')
-	    ndebug_defines.append('-DMAPNIK_STATS')
-
-	# Add rdynamic to allow using statics between application and plugins
-	# http://stackoverflow.com/questions/8623657/multiple-instances-of-singleton-across-shared-libraries-on-linux
-	if env['PLATFORM'] != 'Darwin' and env['CXX'] == 'g++':
-	    env.MergeFlags('-rdynamic')
-
-	if env['DEBUG']:
-	    env.Append(CXXFLAGS = debug_flags)
-	    env.Append(CPPDEFINES = debug_defines)
-	else:
-	    env.Append(CPPDEFINES = ndebug_defines)
-
-	# Common flags for g++/clang++ CXX compiler.
-	# TODO: clean up code more to make -Wextra -Wsign-compare -Wsign-conversion -Wconversion viable
-	common_cxx_flags = '-Wall %s %s -ftemplate-depth-300 -Wsign-compare -Wshadow ' % (env['WARNING_CXXFLAGS'], pthread)
-
-	if 'clang++' in env['CXX']:
-	    common_cxx_flags += ' -Wno-unknown-pragmas -Wno-unsequenced '
-	elif 'g++' in env['CXX']:
-	    common_cxx_flags += ' -Wno-pragmas '
-
-	if env['DEBUG']:
-	    env.Append(CXXFLAGS = common_cxx_flags + '-O0')
-	else:
-	    # TODO - add back -fvisibility-inlines-hidden
-	    # https://github.com/mapnik/mapnik/issues/1863
-	    env.Append(CXXFLAGS = common_cxx_flags + '-O%s' % (env['OPTIMIZATION']))
-	if env['DEBUG_UNDEFINED']:
-	    env.Append(CXXFLAGS = '-fsanitize=undefined-trap -fsanitize-undefined-trap-on-error -ftrapv -fwrapv')
-
-	# if requested, sort LIBPATH and CPPPATH one last time before saving...
-	if env['PRIORITIZE_LINKING']:
-	    conf.prioritize_paths(silent=True)
-
-	# finish config stage and pickle results
-	env = conf.Finish()
-	env_cache = open(SCONS_CONFIGURE_CACHE, 'w')
-	pickle_dict = {}
-	for i in pickle_store:
-	    pickle_dict[i] = env.get(i)
-	pickle.dump(pickle_dict,env_cache)
-	env_cache.close()
-	# fix up permissions on configure outputs
-	# this is hackish but avoids potential problems
-	# with a non-root configure following a root install
-	# that also triggered a re-configure
-	try:
-	    os.chmod(SCONS_CONFIGURE_CACHE,0666)
-	except: pass
-	try:
-	    os.chmod(SCONS_LOCAL_CONFIG,0666)
-	except: pass
-	try:
-	    os.chmod('.sconsign.dblite',0666)
-	except: pass
-	try:
-	    os.chmod(SCONS_LOCAL_LOG,0666)
-	except: pass
-	try:
-	    for item in glob('%s/*' % SCONF_TEMP_DIR):
-		os.chmod(item,0666)
-	except: pass
-
-	if 'configure' in command_line_args:
-	    color_print(4,'\nConfigure completed: run `make` to build or `make install`')
-	    if not HELP_REQUESTED:
-		Exit(0)
+        # Save the custom variables in a SCONS_LOCAL_CONFIG
+        # that will be reloaded to allow for `install` without re-specifying custom variables
+        color_print(4,"\nAll Required dependencies found!\n")
+        if env['USE_CONFIG']:
+            if os.path.exists(SCONS_LOCAL_CONFIG):
+                action = 'Overwriting and re-saving'
+                os.unlink(SCONS_LOCAL_CONFIG)
+            else:
+                action = 'Saving new'
+            color_print(4,"%s file '%s'..." % (action,SCONS_LOCAL_CONFIG))
+            color_print(4,"Will hold custom path variables from commandline and python config file(s)...")
+            opts.Save(SCONS_LOCAL_CONFIG,env)
+        else:
+          color_print(4,"Did not use user config file, no custom path variables will be saved...")
+
+        if env['SKIPPED_DEPS']:
+            color_print(4,'\nNote: will build without these OPTIONAL dependencies:\n   - %s' % '\n   - '.join([pretty_dep(dep) for dep in env['SKIPPED_DEPS']]))
+            print
+
+        # fetch the mapnik version header in order to set the
+        # ABI version used to build libmapnik.so on linux in src/build.py
+        abi = GetMapnikLibVersion()
+        abi_split = abi.split('.')
+        env['ABI_VERSION'] = abi_split
+        env['MAPNIK_VERSION_STRING'] = abi
+        env['MAPNIK_VERSION'] = str(int(abi_split[0])*100000+int(abi_split[1])*100+int(abi_split[2]))
+
+        # Common DEFINES.
+        env.Append(CPPDEFINES = '-D%s' % env['PLATFORM'].upper())
+        if env['THREADING'] == 'multi':
+            env.Append(CPPDEFINES = '-DMAPNIK_THREADSAFE')
+
+        if env['NO_ATEXIT']:
+            env.Append(CPPDEFINES = '-DMAPNIK_NO_ATEXIT')
+
+        # Mac OSX (Darwin) special settings
+        if env['PLATFORM'] == 'Darwin':
+            pthread = ''
+        else:
+            pthread = '-pthread'
+
+        # Common debugging flags.
+        # http://lists.fedoraproject.org/pipermail/devel/2010-November/144952.html
+        debug_flags  = ['-g', '-fno-omit-frame-pointer']
+        debug_defines = ['-DDEBUG', '-DMAPNIK_DEBUG']
+        ndebug_defines = ['-DNDEBUG']
+
+        # faster compile
+        # http://www.boost.org/doc/libs/1_47_0/libs/spirit/doc/html/spirit/what_s_new/spirit_2_5.html#spirit.what_s_new.spirit_2_5.breaking_changes
+        env.Append(CPPDEFINES = '-DBOOST_SPIRIT_NO_PREDEFINED_TERMINALS=1')
+        env.Append(CPPDEFINES = '-DBOOST_PHOENIX_NO_PREDEFINED_TERMINALS=1')
+        # c++11 support / https://github.com/mapnik/mapnik/issues/1683
+        #  - upgrade to PHOENIX_V3 since that is needed for c++11 compile
+        env.Append(CPPDEFINES = '-DBOOST_SPIRIT_USE_PHOENIX_V3=1')
+
+        # Enable logging in debug mode (always) and release mode (when specified)
+        if env['DEFAULT_LOG_SEVERITY']:
+            if env['DEFAULT_LOG_SEVERITY'] not in severities:
+                severities_list = ', '.join(["'%s'" % s for s in severities])
+                color_print(1,"Cannot set default logger severity to '%s', available options are %s." % (env['DEFAULT_LOG_SEVERITY'], severities_list))
+                Exit(1)
+            else:
+                log_severity = severities.index(env['DEFAULT_LOG_SEVERITY'])
+        else:
+            severities_list = ', '.join(["'%s'" % s for s in severities])
+            color_print(1,"No logger severity specified, available options are %s." % severities_list)
+            Exit(1)
+
+        log_enabled = ['-DMAPNIK_LOG', '-DMAPNIK_DEFAULT_LOG_SEVERITY=%d' % log_severity]
+
+        if env['DEBUG']:
+            debug_defines += log_enabled
+        else:
+            if env['ENABLE_LOG']:
+                ndebug_defines += log_enabled
+
+        # Enable statistics reporting
+        if env['ENABLE_STATS']:
+            debug_defines.append('-DMAPNIK_STATS')
+            ndebug_defines.append('-DMAPNIK_STATS')
+
+        # Add rdynamic to allow using statics between application and plugins
+        # http://stackoverflow.com/questions/8623657/multiple-instances-of-singleton-across-shared-libraries-on-linux
+        if env['PLATFORM'] != 'Darwin' and env['CXX'] == 'g++':
+            env.MergeFlags('-rdynamic')
+
+        if env['DEBUG']:
+            env.Append(CXXFLAGS = debug_flags)
+            env.Append(CPPDEFINES = debug_defines)
+        else:
+            env.Append(CPPDEFINES = ndebug_defines)
+
+        # Common flags for g++/clang++ CXX compiler.
+        # TODO: clean up code more to make -Wextra -Wsign-compare -Wsign-conversion -Wconversion viable
+        common_cxx_flags = '-Wall %s %s -ftemplate-depth-300 -Wsign-compare -Wshadow ' % (env['WARNING_CXXFLAGS'], pthread)
+
+        if 'clang++' in env['CXX']:
+            common_cxx_flags += ' -Wno-unknown-pragmas -Wno-unsequenced '
+        elif 'g++' in env['CXX']:
+            common_cxx_flags += ' -Wno-pragmas '
+
+        if env['DEBUG']:
+            env.Append(CXXFLAGS = common_cxx_flags + '-O0')
+        else:
+            # TODO - add back -fvisibility-inlines-hidden
+            # https://github.com/mapnik/mapnik/issues/1863
+            env.Append(CXXFLAGS = common_cxx_flags + '-O%s' % (env['OPTIMIZATION']))
+        if env['DEBUG_UNDEFINED']:
+            env.Append(CXXFLAGS = '-fsanitize=undefined-trap -fsanitize-undefined-trap-on-error -ftrapv -fwrapv')
+
+        # if requested, sort LIBPATH and CPPPATH one last time before saving...
+        if env['PRIORITIZE_LINKING']:
+            conf.prioritize_paths(silent=True)
+
+        # finish config stage and pickle results
+        env = conf.Finish()
+        env_cache = open(SCONS_CONFIGURE_CACHE, 'w')
+        pickle_dict = {}
+        for i in pickle_store:
+            pickle_dict[i] = env.get(i)
+        pickle.dump(pickle_dict,env_cache)
+        env_cache.close()
+        # fix up permissions on configure outputs
+        # this is hackish but avoids potential problems
+        # with a non-root configure following a root install
+        # that also triggered a re-configure
+        try:
+            os.chmod(SCONS_CONFIGURE_CACHE,0666)
+        except: pass
+        try:
+            os.chmod(SCONS_LOCAL_CONFIG,0666)
+        except: pass
+        try:
+            os.chmod('.sconsign.dblite',0666)
+        except: pass
+        try:
+            os.chmod(SCONS_LOCAL_LOG,0666)
+        except: pass
+        try:
+            for item in glob('%s/*' % SCONF_TEMP_DIR):
+                os.chmod(item,0666)
+        except: pass
+
+        if 'configure' in command_line_args:
+            color_print(4,'\nConfigure completed: run `make` to build or `make install`')
+            if not HELP_REQUESTED:
+                Exit(0)
 
 # autogenerate help on default/current SCons options
 Help(opts.GenerateHelpText(env))
@@ -1831,43 +1825,43 @@ Help(opts.GenerateHelpText(env))
 if not HELP_REQUESTED:
 
     if 'uninstall' in COMMAND_LINE_TARGETS:
-	# dummy action in case there is nothing to uninstall, to avoid phony error..
-	env.Alias("uninstall", "")
+        # dummy action in case there is nothing to uninstall, to avoid phony error..
+        env.Alias("uninstall", "")
     env['create_uninstall_target'] = create_uninstall_target
 
     if env['PKG_CONFIG_PATH']:
-	env['ENV']['PKG_CONFIG_PATH'] = fix_path(env['PKG_CONFIG_PATH'])
-	# otherwise this variable == os.environ["PKG_CONFIG_PATH"]
+        env['ENV']['PKG_CONFIG_PATH'] = fix_path(env['PKG_CONFIG_PATH'])
+        # otherwise this variable == os.environ["PKG_CONFIG_PATH"]
 
     if env['PATH']:
-	env['ENV']['PATH'] = fix_path(env['PATH']) + ':' + env['ENV']['PATH']
+        env['ENV']['PATH'] = fix_path(env['PATH']) + ':' + env['ENV']['PATH']
 
     if env['PATH_REMOVE']:
-	for p in env['PATH_REMOVE'].split(':'):
-	    if p in env['ENV']['PATH']:
-		env['ENV']['PATH'].replace(p,'')
-	    rm_path(p,'LIBPATH',env)
-	    rm_path(p,'CPPPATH',env)
-	    rm_path(p,'CXXFLAGS',env)
-	    rm_path(p,'CAIRO_LIBPATHS',env)
-	    rm_path(p,'CAIRO_CPPPATHS',env)
+        for p in env['PATH_REMOVE'].split(':'):
+            if p in env['ENV']['PATH']:
+                env['ENV']['PATH'].replace(p,'')
+            rm_path(p,'LIBPATH',env)
+            rm_path(p,'CPPPATH',env)
+            rm_path(p,'CXXFLAGS',env)
+            rm_path(p,'CAIRO_LIBPATHS',env)
+            rm_path(p,'CAIRO_CPPPATHS',env)
 
     if env['PATH_REPLACE']:
-	searches,replace = env['PATH_REPLACE'].split(':')
-	for search in searches.split(','):
-	    if search in env['ENV']['PATH']:
-		env['ENV']['PATH'] = os.path.abspath(env['ENV']['PATH'].replace(search,replace))
-	    def replace_path(set,s,r):
-		idx = 0
-		for i in env[set]:
-		    if s in i:
-			env[set][idx] = os.path.abspath(env[set][idx].replace(s,r))
-		    idx +=1
-	    replace_path('LIBPATH',search,replace)
-	    replace_path('CPPPATH',search,replace)
-	    replace_path('CXXFLAGS',search,replace)
-	    replace_path('CAIRO_LIBPATHS',search,replace)
-	    replace_path('CAIRO_CPPPATHS',search,replace)
+        searches,replace = env['PATH_REPLACE'].split(':')
+        for search in searches.split(','):
+            if search in env['ENV']['PATH']:
+                env['ENV']['PATH'] = os.path.abspath(env['ENV']['PATH'].replace(search,replace))
+            def replace_path(set,s,r):
+                idx = 0
+                for i in env[set]:
+                    if s in i:
+                        env[set][idx] = os.path.abspath(env[set][idx].replace(s,r))
+                    idx +=1
+            replace_path('LIBPATH',search,replace)
+            replace_path('CPPPATH',search,replace)
+            replace_path('CXXFLAGS',search,replace)
+            replace_path('CAIRO_LIBPATHS',search,replace)
+            replace_path('CAIRO_CPPPATHS',search,replace)
 
     # export env so it is available in build.py files
     Export('env')
@@ -1877,15 +1871,15 @@ if not HELP_REQUESTED:
     Export('plugin_base')
 
     if env['FAST']:
-	# caching is 'auto' by default in SCons
-	# But let's also cache implicit deps...
-	EnsureSConsVersion(0,98)
-	SetOption('implicit_cache', 1)
-	SetOption('max_drift', 1)
+        # caching is 'auto' by default in SCons
+        # But let's also cache implicit deps...
+        EnsureSConsVersion(0,98)
+        SetOption('implicit_cache', 1)
+        SetOption('max_drift', 1)
 
     # Build agg first, doesn't need anything special
     if env['RUNTIME_LINK'] == 'shared':
-	SConscript('deps/agg/build.py')
+        SConscript('deps/agg/build.py')
 
     # Build spirit grammars
     SConscript('src/json/build.py')
@@ -1906,78 +1900,78 @@ if not HELP_REQUESTED:
     POSTGIS_BUILT = False
     PGRASTER_BUILT = False
     for plugin in env['PLUGINS']:
-	if env['PLUGIN_LINKING'] == 'static' or plugin not in env['REQUESTED_PLUGINS']:
-	    if os.path.exists('plugins/input/%s.input' % plugin):
-		os.unlink('plugins/input/%s.input' % plugin)
-	elif plugin in env['REQUESTED_PLUGINS']:
-	    details = env['PLUGINS'][plugin]
-	    if details['lib'] in env['LIBS']:
-		if env['PLUGIN_LINKING'] == 'shared':
-		    SConscript('plugins/input/%s/build.py' % plugin)
-		# hack to avoid breaking on plugins with the same dep
-		if plugin == 'ogr': OGR_BUILT = True
-		if plugin == 'gdal': GDAL_BUILT = True
-		if plugin == 'postgis': POSTGIS_BUILT = True
-		if plugin == 'pgraster': PGRASTER_BUILT = True
-		if plugin == 'ogr' or plugin == 'gdal':
-		    if GDAL_BUILT and OGR_BUILT:
-			env['LIBS'].remove(details['lib'])
-		elif plugin == 'postgis' or plugin == 'pgraster':
-		    if POSTGIS_BUILT and PGRASTER_BUILT:
-			env['LIBS'].remove(details['lib'])
-		else:
-		    env['LIBS'].remove(details['lib'])
-	    elif not details['lib']:
-		if env['PLUGIN_LINKING'] == 'shared':
-		    # build internal datasource input plugins
-		    SConscript('plugins/input/%s/build.py' % plugin)
-	    else:
-		color_print(1,"Notice: dependencies not met for plugin '%s', not building..." % plugin)
-		if os.path.exists('plugins/input/%s.input' % plugin):
-		    os.unlink('plugins/input/%s.input' % plugin)
+        if env['PLUGIN_LINKING'] == 'static' or plugin not in env['REQUESTED_PLUGINS']:
+            if os.path.exists('plugins/input/%s.input' % plugin):
+                os.unlink('plugins/input/%s.input' % plugin)
+        elif plugin in env['REQUESTED_PLUGINS']:
+            details = env['PLUGINS'][plugin]
+            if details['lib'] in env['LIBS']:
+                if env['PLUGIN_LINKING'] == 'shared':
+                    SConscript('plugins/input/%s/build.py' % plugin)
+                # hack to avoid breaking on plugins with the same dep
+                if plugin == 'ogr': OGR_BUILT = True
+                if plugin == 'gdal': GDAL_BUILT = True
+                if plugin == 'postgis': POSTGIS_BUILT = True
+                if plugin == 'pgraster': PGRASTER_BUILT = True
+                if plugin == 'ogr' or plugin == 'gdal':
+                    if GDAL_BUILT and OGR_BUILT:
+                        env['LIBS'].remove(details['lib'])
+                elif plugin == 'postgis' or plugin == 'pgraster':
+                    if POSTGIS_BUILT and PGRASTER_BUILT:
+                        env['LIBS'].remove(details['lib'])
+                else:
+                    env['LIBS'].remove(details['lib'])
+            elif not details['lib']:
+                if env['PLUGIN_LINKING'] == 'shared':
+                    # build internal datasource input plugins
+                    SConscript('plugins/input/%s/build.py' % plugin)
+            else:
+                color_print(1,"Notice: dependencies not met for plugin '%s', not building..." % plugin)
+                if os.path.exists('plugins/input/%s.input' % plugin):
+                    os.unlink('plugins/input/%s.input' % plugin)
 
     create_uninstall_target(env, env['MAPNIK_LIB_DIR_DEST'], False)
     create_uninstall_target(env, env['MAPNIK_INPUT_PLUGINS_DEST'] , False)
 
     if 'install' in COMMAND_LINE_TARGETS:
-	# if statically linking plugins still make sure
-	# to create the dynamic plugins directory
-	if env['PLUGIN_LINKING'] == 'static':
-	    if not os.path.exists(env['MAPNIK_INPUT_PLUGINS_DEST']):
-		os.makedirs(env['MAPNIK_INPUT_PLUGINS_DEST'])
-	# before installing plugins, wipe out any previously
-	# installed plugins that we are no longer building
-	for plugin in PLUGINS.keys():
-	    plugin_path = os.path.join(env['MAPNIK_INPUT_PLUGINS_DEST'],'%s.input' % plugin)
-	    if os.path.exists(plugin_path):
-		if plugin not in env['REQUESTED_PLUGINS'] or env['PLUGIN_LINKING'] == 'static':
-		    color_print(4,"Notice: removing out of date plugin: '%s'" % plugin_path)
-		    os.unlink(plugin_path)
+        # if statically linking plugins still make sure
+        # to create the dynamic plugins directory
+        if env['PLUGIN_LINKING'] == 'static':
+            if not os.path.exists(env['MAPNIK_INPUT_PLUGINS_DEST']):
+                os.makedirs(env['MAPNIK_INPUT_PLUGINS_DEST'])
+        # before installing plugins, wipe out any previously
+        # installed plugins that we are no longer building
+        for plugin in PLUGINS.keys():
+            plugin_path = os.path.join(env['MAPNIK_INPUT_PLUGINS_DEST'],'%s.input' % plugin)
+            if os.path.exists(plugin_path):
+                if plugin not in env['REQUESTED_PLUGINS'] or env['PLUGIN_LINKING'] == 'static':
+                    color_print(4,"Notice: removing out of date plugin: '%s'" % plugin_path)
+                    os.unlink(plugin_path)
 
     # Build the c++ rundemo app if requested
     if not env['HOST']:
-	if env['DEMO']:
-	    SConscript('demo/c++/build.py')
+        if env['DEMO']:
+            SConscript('demo/c++/build.py')
 
     # Build shapeindex and remove its dependency from the LIBS
     if not env['HOST']:
-	if 'boost_program_options%s' % env['BOOST_APPEND'] in env['LIBS']:
-	    if env['SHAPEINDEX']:
-		SConscript('utils/shapeindex/build.py')
-	    if env['MAPNIK_INDEX']:
-		SConscript('utils/mapnik-index/build.py')
-	    # Build the pgsql2psqlite app if requested
-	    if env['PGSQL2SQLITE']:
-		SConscript('utils/pgsql2sqlite/build.py')
-	    if env['SVG2PNG']:
-		SConscript('utils/svg2png/build.py')
-	    if env['NIK2IMG']:
-		SConscript('utils/nik2img/build.py')
-	    # devtools not ready for public
-	    #SConscript('utils/ogrindex/build.py')
-	    env['LIBS'].remove('boost_program_options%s' % env['BOOST_APPEND'])
-	else :
-	    color_print(1,"WARNING: Cannot find boost_program_options. 'shapeindex' and other command line programs will not be available")
+        if 'boost_program_options%s' % env['BOOST_APPEND'] in env['LIBS']:
+            if env['SHAPEINDEX']:
+                SConscript('utils/shapeindex/build.py')
+            if env['MAPNIK_INDEX']:
+                SConscript('utils/mapnik-index/build.py')
+            # Build the pgsql2psqlite app if requested
+            if env['PGSQL2SQLITE']:
+                SConscript('utils/pgsql2sqlite/build.py')
+            if env['SVG2PNG']:
+                SConscript('utils/svg2png/build.py')
+            if env['MAPNIK_RENDER']:
+                SConscript('utils/mapnik-render/build.py')
+            # devtools not ready for public
+            #SConscript('utils/ogrindex/build.py')
+            env['LIBS'].remove('boost_program_options%s' % env['BOOST_APPEND'])
+        else :
+            color_print(1,"WARNING: Cannot find boost_program_options. 'shapeindex' and other command line programs will not be available")
 
     # Configure fonts and if requested install the bundled DejaVu fonts
     SConscript('fonts/build.py')
@@ -1986,10 +1980,10 @@ if not HELP_REQUESTED:
     SConscript('test/build.py')
 
     if env['BENCHMARK']:
-	SConscript('benchmark/build.py')
+        SConscript('benchmark/build.py')
 
     if os.path.exists('./bindings/python/build.py'):
-	SConscript('./bindings/python/build.py')
+        SConscript('./bindings/python/build.py')
 
     # install mapnik-config script
     SConscript('utils/mapnik-config/build.py')
diff --git a/appveyor.yml b/appveyor.yml
index b227544..6566a9a 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -1,6 +1,6 @@
 environment:
   msvs_toolset: 14
-  BOOST_VERSION: 58
+  BOOST_VERSION: 59
   FASTBUILD: 1
   matrix:
     - platform: x64
diff --git a/demo/viewer/mapwidget.cpp b/demo/viewer/mapwidget.cpp
index e35d131..b317cb4 100644
--- a/demo/viewer/mapwidget.cpp
+++ b/demo/viewer/mapwidget.cpp
@@ -180,9 +180,6 @@ void MapWidget::mousePressEvent(QMouseEvent* e)
                    feature_ptr feat  = fs->next();
                    if (feat)
                    {
-
-// FIXME
-#if 0
                        feature_kv_iterator itr(*feat,true);
                        feature_kv_iterator end(*feat);
 
@@ -192,9 +189,10 @@ void MapWidget::mousePressEvent(QMouseEvent* e)
                                                                  std::get<1>(*itr).to_string().c_str()));
                        }
 
+#if 0 //
                        using path_type = mapnik::transform_path_adapter<mapnik::view_transform,mapnik::vertex_adapter>;
 
-                       for  (unsigned i=0; i<feat->num_geometries();++i)
+                       for  (unsigned i=0; i < feat->num_geometries();++i)
                        {
                            mapnik::geometry_type const& geom = feat->get_geometry(i);
                            mapnik::vertex_adapter va(geom);
@@ -219,9 +217,9 @@ void MapWidget::mousePressEvent(QMouseEvent* e)
                                painter.drawPath(qpath);
                                update();
                            }
-               }
+                       }
 #endif
-               }
+                   }
                }
 
                if (info.size() > 0)
diff --git a/include/mapnik/grid/grid_util.hpp b/include/mapnik/grid/grid_util.hpp
deleted file mode 100644
index 826d2e1..0000000
--- a/include/mapnik/grid/grid_util.hpp
+++ /dev/null
@@ -1,112 +0,0 @@
-/*****************************************************************************
- *
- * This file is part of Mapnik (c++ mapping toolkit)
- *
- * Copyright (C) 2015 Artem Pavlenko
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 2.1 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with this library; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
- *
- *****************************************************************************/
-
-#ifndef GRID_UTIL_HPP
-#define GRID_UTIL_HPP
-
-// mapnik
-#include <mapnik/grid/grid.hpp>
-
-namespace mapnik {
-
-/*
- * Nearest neighbor resampling for grids
- */
-
-static inline void scale_grid(mapnik::grid::data_type & target,
-                              const mapnik::grid::data_type & source,
-                              double x_off_f, double y_off_f)
-{
-
-    int source_width=source.width();
-    int source_height=source.height();
-
-    int target_width=target.width();
-    int target_height=target.height();
-
-    if (source_width<1 || source_height<1 ||
-        target_width<1 || target_height<1) return;
-    int x = 0;
-    int y = 0;
-    int xs = 0;
-    int ys = 0;
-    int tw2 = target_width/2;
-    int th2 = target_height/2;
-    int offs_x = rint((source_width-target_width-x_off_f*2*source_width)/2);
-    int offs_y = rint((source_height-target_height-y_off_f*2*source_height)/2);
-    unsigned yprt = 0;
-    unsigned yprt1 = 0;
-    unsigned xprt = 0;
-    unsigned xprt1 = 0;
-
-    //no scaling or subpixel offset
-    if (target_height == source_height && target_width == source_width && offs_x == 0 && offs_y == 0){
-        for (y=0;y<target_height;++y)
-            target.set_row(y,source.get_row(y),target_width);
-        return;
-    }
-
-    for (y=0;y<target_height;++y)
-    {
-        ys = (y*source_height+offs_y)/target_height;
-        int ys1 = ys+1;
-        if (ys1>=source_height)
-            ys1--;
-        if (ys<0)
-            ys=ys1=0;
-        if (source_height/2<target_height)
-            yprt = (y*source_height+offs_y)%target_height;
-        else
-            yprt = th2;
-        yprt1 = target_height-yprt;
-        for (x=0;x<target_width;++x)
-        {
-            xs = (x*source_width+offs_x)/target_width;
-            if (source_width/2<target_width)
-                xprt = (x*source_width+offs_x)%target_width;
-            else
-                xprt = tw2;
-            xprt1 = target_width-xprt;
-            int xs1 = xs+1;
-            if (xs1>=source_width)
-                xs1--;
-            if (xs<0)
-                xs=xs1=0;
-
-            mapnik::grid::value_type a = source(xs,ys);
-            mapnik::grid::value_type b = source(xs1,ys);
-            mapnik::grid::value_type c = source(xs,ys1);
-            mapnik::grid::value_type d = source(xs1,ys1);
-
-            if ((a > 0) && (b > 0))
-                target(x,y) = b;
-            else if ((c > 0) && (d > 0))
-                target(x,y) = d;
-            else
-                target(x,y) = a;
-        }
-    }
-}
-
-}
-
-#endif // GRID_UTIL_HPP
diff --git a/include/mapnik/span_image_filter.hpp b/include/mapnik/span_image_filter.hpp
deleted file mode 100644
index d99c21a..0000000
--- a/include/mapnik/span_image_filter.hpp
+++ /dev/null
@@ -1,158 +0,0 @@
-/*****************************************************************************
- *
- * This file is part of Mapnik (c++ mapping toolkit)
- *
- * Copyright (C) 2015 Artem Pavlenko
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 2.1 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with this library; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
- *
- *****************************************************************************/
-
-#include <cstdint>
-#include "agg_span_image_filter_rgba.h"
-
-namespace mapnik {
-
-using namespace agg;
-
-template<class Source>
-class span_image_resample_rgba_affine :
-        public span_image_resample_affine<Source>
-{
-public:
-    using source_type = Source;
-    using color_type = typename source_type::color_type;
-    using order_type = typename source_type::order_type;
-    using base_type = span_image_resample_affine<source_type>;
-    using interpolator_type = typename base_type::interpolator_type;
-    using value_type = typename color_type::value_type;
-    using long_type = typename color_type::long_type;
-    enum base_scale_e
-    {
-        base_shift      = color_type::base_shift,
-        base_mask       = color_type::base_mask,
-        downscale_shift = image_filter_shift
-    };
-
-    //--------------------------------------------------------------------
-    span_image_resample_rgba_affine() {}
-    span_image_resample_rgba_affine(source_type& src,
-                                    interpolator_type& inter,
-                                    const image_filter_lut& filter) :
-        base_type(src, inter, filter)
-    {}
-
-
-    //--------------------------------------------------------------------
-    void generate(color_type* span, int x, int y, unsigned len)
-    {
-        base_type::interpolator().begin(x + base_type::filter_dx_dbl(),
-                                        y + base_type::filter_dy_dbl(), len);
-
-        long_type fg[4];
-
-        int diameter     = base_type::filter().diameter();
-        int filter_scale = diameter << image_subpixel_shift;
-        int radius_x     = (diameter * base_type::m_rx) >> 1;
-        int radius_y     = (diameter * base_type::m_ry) >> 1;
-        int len_x_lr     =
-            (diameter * base_type::m_rx + image_subpixel_mask) >>
-            image_subpixel_shift;
-
-        const std::int16_t* weight_array = base_type::filter().weight_array();
-
-        do
-        {
-            base_type::interpolator().coordinates(&x, &y);
-
-            x += base_type::filter_dx_int() - radius_x;
-            y += base_type::filter_dy_int() - radius_y;
-
-            fg[0] = fg[1] = fg[2] = fg[3] = image_filter_scale / 2;
-
-            int y_lr = y >> image_subpixel_shift;
-            int y_hr = ((image_subpixel_mask - (y & image_subpixel_mask)) *
-                        base_type::m_ry_inv) >>
-                image_subpixel_shift;
-            int total_weight = 0;
-            int x_lr = x >> image_subpixel_shift;
-            int x_hr = ((image_subpixel_mask - (x & image_subpixel_mask)) *
-                        base_type::m_rx_inv) >>
-                image_subpixel_shift;
-
-            int x_hr2 = x_hr;
-            const value_type* fg_ptr =
-                (const value_type*)base_type::source().span(x_lr, y_lr, len_x_lr);
-            for(;;)
-            {
-                int weight_y = weight_array[y_hr];
-                x_hr = x_hr2;
-                for(;;)
-                {
-                    int weight = (weight_y * weight_array[x_hr] +
-                                  image_filter_scale / 2) >>
-                        downscale_shift;
-
-                    fg[0] += *fg_ptr++ * weight;
-                    fg[1] += *fg_ptr++ * weight;
-                    fg[2] += *fg_ptr++ * weight;
-                    fg[3] += *fg_ptr   * weight;
-
-                    total_weight += weight;
-                    x_hr  += base_type::m_rx_inv;
-                    if(x_hr >= filter_scale) break;
-                    fg_ptr = (const value_type*)base_type::source().next_x();
-                }
-                y_hr += base_type::m_ry_inv;
-                if(y_hr >= filter_scale) break;
-                fg_ptr = (const value_type*)base_type::source().next_y();
-            }
-
-            if (total_weight)
-            {
-                fg[3] /= total_weight;
-                fg[0] /= total_weight;
-                fg[1] /= total_weight;
-                fg[2] /= total_weight;
-
-                if(fg[0] < 0) fg[0] = 0;
-                if(fg[1] < 0) fg[1] = 0;
-                if(fg[2] < 0) fg[2] = 0;
-                if(fg[3] < 0) fg[3] = 0;
-            }
-            else
-            {
-                fg[0] = 0;
-                fg[1] = 0;
-                fg[2] = 0;
-                fg[3] = 0;
-            }
-
-            if(fg[order_type::R] > base_mask)         fg[order_type::R] = base_mask;
-            if(fg[order_type::G] > base_mask)         fg[order_type::G] = base_mask;
-            if(fg[order_type::B] > base_mask)         fg[order_type::B] = base_mask;
-            if(fg[order_type::A] > base_mask)         fg[order_type::A] = base_mask;
-
-            span->r = (value_type)fg[order_type::R];
-            span->g = (value_type)fg[order_type::G];
-            span->b = (value_type)fg[order_type::B];
-            span->a = (value_type)fg[order_type::A];
-
-            ++span;
-            ++base_type::interpolator();
-        } while(--len);
-    }
-};
-}
diff --git a/include/mapnik/version.hpp b/include/mapnik/version.hpp
index ad784d0..6fcd056 100644
--- a/include/mapnik/version.hpp
+++ b/include/mapnik/version.hpp
@@ -23,11 +23,9 @@
 #ifndef MAPNIK_VERSION_HPP
 #define MAPNIK_VERSION_HPP
 
-#define MAPNIK_VERSION_IS_RELEASE 1
-
 #define MAPNIK_MAJOR_VERSION 3
 #define MAPNIK_MINOR_VERSION 0
-#define MAPNIK_PATCH_VERSION 6
+#define MAPNIK_PATCH_VERSION 7
 
 #define MAPNIK_VERSION (MAPNIK_MAJOR_VERSION*100000) + (MAPNIK_MINOR_VERSION*100) + (MAPNIK_PATCH_VERSION)
 
@@ -36,15 +34,8 @@
 #define MAPNIK_STRINGIFY_HELPER(n) #n
 #endif
 
-#if MAPNIK_VERSION_IS_RELEASE
 #define MAPNIK_VERSION_STRING   MAPNIK_STRINGIFY(MAPNIK_MAJOR_VERSION) "." \
                                 MAPNIK_STRINGIFY(MAPNIK_MINOR_VERSION) "." \
                                 MAPNIK_STRINGIFY(MAPNIK_PATCH_VERSION)
 
-#else
-#define MAPNIK_VERSION_STRING   MAPNIK_STRINGIFY(MAPNIK_MAJOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_MINOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_PATCH_VERSION) "-pre"
-#endif
-
 #endif // MAPNIK_VERSION_HPP
diff --git a/plugins/input/csv/csv_utils.hpp b/plugins/input/csv/csv_utils.hpp
index 77a2710..2d3cb46 100644
--- a/plugins/input/csv/csv_utils.hpp
+++ b/plugins/input/csv/csv_utils.hpp
@@ -251,7 +251,7 @@ static inline void locate_geometry_column(std::string const& header, std::size_t
     }
 }
 
-static mapnik::geometry::geometry<double> extract_geometry(std::vector<std::string> const& row, geometry_column_locator const& locator)
+static inline mapnik::geometry::geometry<double> extract_geometry(std::vector<std::string> const& row, geometry_column_locator const& locator)
 {
     mapnik::geometry::geometry<double> geom;
     if (locator.type == geometry_column_locator::WKT)
diff --git a/plugins/input/geojson/geojson_datasource.cpp b/plugins/input/geojson/geojson_datasource.cpp
index 15a9503..ea3f30b 100644
--- a/plugins/input/geojson/geojson_datasource.cpp
+++ b/plugins/input/geojson/geojson_datasource.cpp
@@ -114,7 +114,7 @@ geojson_datasource::geojson_datasource(parameters const& params)
   : datasource(params),
     type_(datasource::Vector),
     desc_(geojson_datasource::name(),
-              *params.get<std::string>("encoding","utf-8")),
+          *params.get<std::string>("encoding","utf-8")),
     filename_(),
     inline_string_(),
     extent_(),
@@ -418,12 +418,9 @@ mapnik::featureset_ptr geojson_datasource::features_at_point(mapnik::coord2d con
     mapnik::box2d<double> query_bbox(pt, pt);
     query_bbox.pad(tol);
     mapnik::query q(query_bbox);
-    std::vector<mapnik::attribute_descriptor> const& desc = desc_.get_descriptors();
-    std::vector<mapnik::attribute_descriptor>::const_iterator itr = desc.begin();
-    std::vector<mapnik::attribute_descriptor>::const_iterator end = desc.end();
-    for ( ;itr!=end;++itr)
+    for (auto const& attr_info : desc_.get_descriptors())
     {
-        q.add_property_name(itr->get_name());
+        q.add_property_name(attr_info.get_name());
     }
     return features(q);
 }
diff --git a/plugins/input/ogr/ogr_datasource.cpp b/plugins/input/ogr/ogr_datasource.cpp
index 9e2b081..11f80ea 100644
--- a/plugins/input/ogr/ogr_datasource.cpp
+++ b/plugins/input/ogr/ogr_datasource.cpp
@@ -492,12 +492,9 @@ void validate_attribute_names(query const& q, std::vector<attribute_descriptor>
     {
         bool found_name = false;
 
-        std::vector<attribute_descriptor>::const_iterator itr = names.begin();
-        std::vector<attribute_descriptor>::const_iterator end = names.end();
-
-        for (; itr!=end; ++itr)
+        for (auto const& attr_info : names)
         {
-            if (itr->get_name() == *pos)
+            if (attr_info.get_name() == *pos)
             {
                 found_name = true;
                 break;
@@ -508,11 +505,9 @@ void validate_attribute_names(query const& q, std::vector<attribute_descriptor>
         {
             std::ostringstream s;
             s << "OGR Plugin: no attribute named '" << *pos << "'. Valid attributes are: ";
-            std::vector<attribute_descriptor>::const_iterator e_itr = names.begin();
-            std::vector<attribute_descriptor>::const_iterator e_end = names.end();
-            for ( ;e_itr!=e_end;++e_itr)
+            for (auto const& attr_info2 : names)
             {
-                s << e_itr->get_name() << std::endl;
+                s << attr_info2.get_name() << std::endl;
             }
             throw mapnik::datasource_exception(s.str());
         }
@@ -533,10 +528,10 @@ featureset_ptr ogr_datasource::features(query const& q) const
         // feature context (schema)
         mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
 
-        std::vector<attribute_descriptor>::const_iterator itr = desc_ar.begin();
-        std::vector<attribute_descriptor>::const_iterator end = desc_ar.end();
-
-        for (; itr!=end; ++itr) ctx->push(itr->get_name()); // TODO only push query attributes
+        for (auto const& attr_info : desc_ar)
+        {
+            ctx->push(attr_info.get_name()); // TODO only push query attributes
+        }
 
         validate_attribute_names(q, desc_ar);
 
@@ -576,9 +571,10 @@ featureset_ptr ogr_datasource::features_at_point(coord2d const& pt, double tol)
         // feature context (schema)
         mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
 
-        std::vector<attribute_descriptor>::const_iterator itr = desc_ar.begin();
-        std::vector<attribute_descriptor>::const_iterator end = desc_ar.end();
-        for (; itr!=end; ++itr) ctx->push(itr->get_name());
+        for (auto const& attr_info : desc_ar)
+        {
+            ctx->push(attr_info.get_name()); // TODO only push query attributes
+        }
 
         OGRLayer* layer = layer_.layer();
 
diff --git a/plugins/input/pgraster/pgraster_datasource.cpp b/plugins/input/pgraster/pgraster_datasource.cpp
index fb2c4b0..dc3e1a5 100644
--- a/plugins/input/pgraster/pgraster_datasource.cpp
+++ b/plugins/input/pgraster/pgraster_datasource.cpp
@@ -1042,28 +1042,29 @@ featureset_ptr pgraster_datasource::features_at_point(coord2d const& pt, double
             s << "SELECT ST_AsBinary(\"" << geometryColumn_ << "\") AS geom";
 
             mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
-            std::vector<attribute_descriptor>::const_iterator itr = desc_.get_descriptors().begin();
-            std::vector<attribute_descriptor>::const_iterator end = desc_.get_descriptors().end();
+            auto const& desc = desc_.get_descriptors();
 
-            if (! key_field_.empty())
+            if (!key_field_.empty())
             {
                 mapnik::sql_utils::quote_attr(s, key_field_);
                 ctx->push(key_field_);
-                for (; itr != end; ++itr)
+                for (auto const& attr_info : desc)
                 {
-                    if (itr->get_name() != key_field_)
+                    std::string const& name = attr_info.get_name();
+                    if (name != key_field_)
                     {
-                        mapnik::sql_utils::quote_attr(s, itr->get_name());
-                        ctx->push(itr->get_name());
+                        mapnik::sql_utils::quote_attr(s, name);
+                        ctx->push(name);
                     }
                 }
             }
             else
             {
-                for (; itr != end; ++itr)
+                for (auto const& attr_info : desc)
                 {
-                    mapnik::sql_utils::quote_attr(s, itr->get_name());
-                    ctx->push(itr->get_name());
+                    std::string const& name = attr_info.get_name();
+                    mapnik::sql_utils::quote_attr(s, name);
+                    ctx->push(name);
                 }
             }
 
diff --git a/plugins/input/postgis/postgis_datasource.cpp b/plugins/input/postgis/postgis_datasource.cpp
index d1bba46..bfc006b 100644
--- a/plugins/input/postgis/postgis_datasource.cpp
+++ b/plugins/input/postgis/postgis_datasource.cpp
@@ -95,7 +95,7 @@ postgis_datasource::postgis_datasource(parameters const& params)
       // params below are for testing purposes only and may be removed at any time
       intersect_min_scale_(*params.get<mapnik::value_integer>("intersect_min_scale", 0)),
       intersect_max_scale_(*params.get<mapnik::value_integer>("intersect_max_scale", 0)),
-      key_field_as_attribute_(*params.get<mapnik::value_integer>("key_field_as_attribute", true))
+      key_field_as_attribute_(*params.get<mapnik::boolean_type>("key_field_as_attribute", true))
 {
 #ifdef MAPNIK_STATS
     mapnik::progress_timer __stats__(std::clog, "postgis_datasource::init");
@@ -903,31 +903,32 @@ featureset_ptr postgis_datasource::features_at_point(coord2d const& pt, double t
             s << "SELECT ST_AsBinary(\"" << geometryColumn_ << "\") AS geom";
 
             mapnik::context_ptr ctx = std::make_shared<mapnik::context_type>();
-            std::vector<attribute_descriptor>::const_iterator itr = desc_.get_descriptors().begin();
-            std::vector<attribute_descriptor>::const_iterator end = desc_.get_descriptors().end();
+            auto const& desc = desc_.get_descriptors();
 
-            if (! key_field_.empty())
+            if (!key_field_.empty())
             {
                 mapnik::sql_utils::quote_attr(s, key_field_);
                 if (key_field_as_attribute_)
                 {
                     ctx->push(key_field_);
                 }
-                for (; itr != end; ++itr)
+                for (auto const& attr_info : desc)
                 {
-                    if (itr->get_name() != key_field_)
+                    std::string const& name = attr_info.get_name();
+                    if (name != key_field_)
                     {
-                        mapnik::sql_utils::quote_attr(s, itr->get_name());
-                        ctx->push(itr->get_name());
+                        mapnik::sql_utils::quote_attr(s, name);
+                        ctx->push(name);
                     }
                 }
             }
             else
             {
-                for (; itr != end; ++itr)
+                for (auto const& attr_info : desc)
                 {
-                    mapnik::sql_utils::quote_attr(s, itr->get_name());
-                    ctx->push(itr->get_name());
+                    std::string const& name = attr_info.get_name();
+                    mapnik::sql_utils::quote_attr(s, name);
+                    ctx->push(name);
                 }
             }
 
diff --git a/plugins/input/postgis/postgis_featureset.cpp b/plugins/input/postgis/postgis_featureset.cpp
index 9e0db0e..6d417be 100644
--- a/plugins/input/postgis/postgis_featureset.cpp
+++ b/plugins/input/postgis/postgis_featureset.cpp
@@ -128,6 +128,10 @@ feature_ptr postgis_featureset::next()
 
         totalGeomSize_ += size;
         unsigned num_attrs = ctx_->size() + 1;
+        if (!key_field_as_attribute_)
+        {
+            num_attrs++;
+        }
         for (; pos < num_attrs; ++pos)
         {
             std::string name = rs_->getFieldName(pos);
diff --git a/plugins/input/shape/shape_datasource.cpp b/plugins/input/shape/shape_datasource.cpp
index 05e24a4..cdde8ee 100644
--- a/plugins/input/shape/shape_datasource.cpp
+++ b/plugins/input/shape/shape_datasource.cpp
@@ -263,15 +263,12 @@ featureset_ptr shape_datasource::features_at_point(coord2d const& pt, double tol
 
     filter_at_point filter(pt,tol);
     // collect all attribute names
-    std::vector<attribute_descriptor> const& desc_vector = desc_.get_descriptors();
-    std::vector<attribute_descriptor>::const_iterator itr = desc_vector.begin();
-    std::vector<attribute_descriptor>::const_iterator end = desc_vector.end();
+    auto const& desc = desc_.get_descriptors();
     std::set<std::string> names;
 
-    while (itr != end)
+    for (auto const& attr_info : desc)
     {
-        names.insert(itr->get_name());
-        ++itr;
+        names.insert(attr_info.get_name());
     }
 
     if (indexed_)
diff --git a/plugins/input/sqlite/sqlite_datasource.cpp b/plugins/input/sqlite/sqlite_datasource.cpp
index 036e31a..939d9d3 100644
--- a/plugins/input/sqlite/sqlite_datasource.cpp
+++ b/plugins/input/sqlite/sqlite_datasource.cpp
@@ -574,16 +574,15 @@ featureset_ptr sqlite_datasource::features_at_point(coord2d const& pt, double to
             ctx->push(key_field_);
         }
 
-        std::vector<attribute_descriptor>::const_iterator itr = desc_.get_descriptors().begin();
-        std::vector<attribute_descriptor>::const_iterator end = desc_.get_descriptors().end();
+        auto const& desc = desc_.get_descriptors();
 
-        for ( ; itr != end; ++itr)
+        for (auto const& attr_info : desc)
         {
-            std::string fld_name = itr->get_name();
-            if (fld_name != key_field_)
+            std::string const& name = attr_info.get_name();
+            if (name != key_field_)
             {
-                s << ",[" << itr->get_name() << "]";
-                ctx->push(itr->get_name());
+                s << ",[" << name << "]";
+                ctx->push(name);
             }
         }
 
diff --git a/scripts/build-local.bat b/scripts/build-local.bat
index 240e70f..5d9affc 100644
--- a/scripts/build-local.bat
+++ b/scripts/build-local.bat
@@ -10,7 +10,7 @@ SET APPVEYOR=true
 SET LOCAL_BUILD_DONT_SKIP_TESTS=true
 
 SET MAPNIK_GIT=3.0.5
-SET BOOST_VERSION=58
+SET BOOST_VERSION=59
 SET FASTBUILD=1
 SET configuration=Release
 SET msvs_toolset=14
diff --git a/src/map.cpp b/src/map.cpp
index a891ebd..81ad147 100644
--- a/src/map.cpp
+++ b/src/map.cpp
@@ -521,21 +521,19 @@ void Map::zoom_all()
         box2d<double> ext;
         bool success = false;
         bool first = true;
-        std::vector<layer>::const_iterator itr = layers_.begin();
-        std::vector<layer>::const_iterator end = layers_.end();
-        while (itr != end)
+        for (auto const& layer : layers_)
         {
-            if (itr->active())
+            if (layer.active())
             {
-                std::string const& layer_srs = itr->srs();
+                std::string const& layer_srs = layer.srs();
                 projection proj1(layer_srs);
                 proj_transform prj_trans(proj0,proj1);
-                box2d<double> layer_ext = itr->envelope();
+                box2d<double> layer_ext = layer.envelope();
                 if (prj_trans.backward(layer_ext, PROJ_ENVELOPE_POINTS))
                 {
                     success = true;
-                    MAPNIK_LOG_DEBUG(map) << "map: Layer " << itr->name() << " original ext=" << itr->envelope();
-                    MAPNIK_LOG_DEBUG(map) << "map: Layer " << itr->name() << " transformed to map srs=" << layer_ext;
+                    MAPNIK_LOG_DEBUG(map) << "map: Layer " << layer.name() << " original ext=" << layer.envelope();
+                    MAPNIK_LOG_DEBUG(map) << "map: Layer " << layer.name() << " transformed to map srs=" << layer_ext;
                     if (first)
                     {
                         ext = layer_ext;
@@ -547,7 +545,6 @@ void Map::zoom_all()
                     }
                 }
             }
-            ++itr;
         }
         if (success)
         {
diff --git a/utils/mapnik-config/build.py b/utils/mapnik-config/build.py
index 60974e7..e74684e 100644
--- a/utils/mapnik-config/build.py
+++ b/utils/mapnik-config/build.py
@@ -29,6 +29,23 @@ Import('env')
 
 config_env = env.Clone()
 
+
+def GetMapnikLibVersion():
+    ver = []
+    for line in open('../../include/mapnik/version.hpp').readlines():
+        if line.startswith('#define MAPNIK_MAJOR_VERSION'):
+            ver.append(line.split(' ')[2].strip())
+        if line.startswith('#define MAPNIK_MINOR_VERSION'):
+            ver.append(line.split(' ')[2].strip())
+        if line.startswith('#define MAPNIK_PATCH_VERSION'):
+            ver.append(line.split(' ')[2].strip())
+    version_string = ".".join(ver)
+    return version_string
+
+if (GetMapnikLibVersion() != config_env['MAPNIK_VERSION_STRING']):
+    print 'Error: version.hpp mismatch (%s) to cached value (%s): please reconfigure mapnik' % (GetMapnikLibVersion(),config_env['MAPNIK_VERSION_STRING'])
+    Exit(1)
+
 config_variables = '''#!/usr/bin/env bash
 
 ## variables
@@ -153,6 +170,7 @@ target_path = os.path.normpath(os.path.join(config_env['INSTALL_PREFIX'],'bin'))
 full_target = os.path.join(target_path,config_file)
 
 Depends(full_target, env.subst('../../src/%s' % env['MAPNIK_LIB_NAME']))
+Depends(full_target, '../../include/mapnik/version.hpp')
 
 if 'install' in COMMAND_LINE_TARGETS:
     # we must add 'install' catch here because otherwise
diff --git a/utils/mapnik-index/build.py b/utils/mapnik-index/build.py
index 02258ca..3358744 100644
--- a/utils/mapnik-index/build.py
+++ b/utils/mapnik-index/build.py
@@ -30,6 +30,8 @@ program_env = env.Clone()
 source = Split(
     """
     mapnik-index.cpp
+    process_csv_file.cpp
+    process_geojson_file.cpp
     """
     )
 
diff --git a/utils/mapnik-index/mapnik-index.cpp b/utils/mapnik-index/mapnik-index.cpp
index 0f54d4c..c3547d3 100644
--- a/utils/mapnik-index/mapnik-index.cpp
+++ b/utils/mapnik-index/mapnik-index.cpp
@@ -26,10 +26,10 @@
 #include <fstream>
 
 #include <mapnik/util/fs.hpp>
-#include <mapnik/geometry_envelope.hpp>
 #include <mapnik/quad_tree.hpp>
-#include "../../plugins/input/csv/csv_utils.hpp"
 
+#include "process_csv_file.hpp"
+#include "process_geojson_file.hpp"
 #pragma GCC diagnostic push
 #pragma GCC diagnostic ignored "-Wunused-parameter"
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
@@ -37,37 +37,39 @@
 #include <boost/program_options.hpp>
 #pragma GCC diagnostic pop
 
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wshadow"
-#pragma GCC diagnostic ignored "-Wsign-conversion"
-#include <boost/interprocess/mapped_region.hpp>
-#include <boost/interprocess/streams/bufferstream.hpp>
-#pragma GCC diagnostic pop
-#include <mapnik/mapped_memory_cache.hpp>
-#include <boost/version.hpp>
-
 const int DEFAULT_DEPTH = 8;
 const double DEFAULT_RATIO = 0.55;
 
+namespace mapnik { namespace detail {
+
+bool is_csv(std::string const& filename)
+{
+    return boost::iends_with(filename,".csv")
+        || boost::iends_with(filename,".tsv");
+}
+
+bool is_geojson(std::string const& filename)
+{
+    return boost::iends_with(filename,".geojson")
+        || boost::iends_with(filename,".json");
+}
+
+}}
+
 int main (int argc, char** argv)
 {
     //using namespace mapnik;
     namespace po = boost::program_options;
-    using std::string;
-    using std::vector;
-    using std::clog;
-    using std::endl;
-
     bool verbose = false;
     unsigned int depth = DEFAULT_DEPTH;
     double ratio = DEFAULT_RATIO;
-    vector<string> csv_files;
+    std::vector<std::string> files;
     char separator = 0;
     char quote = 0;
     std::string manual_headers;
     try
     {
-        po::options_description desc("csvindex utility");
+        po::options_description desc("Mapnik CSV/GeoJSON index utility");
         desc.add_options()
             ("help,h", "produce usage message")
             ("version,V","print version string")
@@ -77,24 +79,23 @@ int main (int argc, char** argv)
             ("separator,s", po::value<char>(), "CSV columns separator")
             ("quote,q", po::value<char>(), "CSV columns quote")
             ("manual-headers,H", po::value<std::string>(), "CSV manual headers string")
-            ("csv_files",po::value<vector<string> >(),"CSV files to index: file1 file2 ...fileN")
+            ("files",po::value<std::vector<std::string> >(),"Files to index: file1 file2 ...fileN")
             ;
 
         po::positional_options_description p;
-        p.add("csv_files",-1);
+        p.add("files",-1);
         po::variables_map vm;
         po::store(po::command_line_parser(argc, argv).options(desc).positional(p).run(), vm);
         po::notify(vm);
 
         if (vm.count("version"))
         {
-            clog << "version 0.3.0" <<std::endl;
+            std::clog << "version 1.0.0" << std::endl;
             return 1;
         }
-
         if (vm.count("help"))
         {
-            clog << desc << endl;
+            std::clog << desc << std::endl;
             return 1;
         }
         if (vm.count("verbose"))
@@ -121,233 +122,81 @@ int main (int argc, char** argv)
         {
             manual_headers = vm["manual-headers"].as<std::string>();
         }
-        if (vm.count("csv_files"))
+        if (vm.count("files"))
         {
-            csv_files=vm["csv_files"].as< vector<string> >();
+            files=vm["files"].as<std::vector<std::string> >();
         }
     }
     catch (std::exception const& ex)
     {
-        clog << "Error: " << ex.what() << endl;
-        return -1;
+        std::clog << "Error: " << ex.what() << std::endl;
+        return EXIT_FAILURE;
     }
 
-    clog << "max tree depth:" << depth << endl;
-    clog << "split ratio:" << ratio << endl;
+    std::clog << "max tree depth:" << depth << std::endl;
+    std::clog << "split ratio:" << ratio << std::endl;
 
-    if (csv_files.size() == 0)
+    if (files.size() == 0)
     {
-        clog << "no csv files to index" << endl;
-        return 0;
+        std::clog << "no files to index" << std::endl;
+        return EXIT_FAILURE;
     }
 
-    for (auto const& filename : csv_files)
+    using box_type = mapnik::box2d<double>;
+    using item_type = std::pair<box_type, std::pair<std::size_t, std::size_t>>;
+
+    for (auto const& filename : files)
     {
-        clog << "processing " << filename << endl;
-        std::string csvname (filename);
-        if (! mapnik::util::exists (csvname))
+        std::clog << "processing " << filename << std::endl;
+        if (!mapnik::util::exists (filename))
         {
-            clog << "Error : file " << csvname << " does not exist" << endl;
+            std::clog << "Error : file " << filename << " does not exist" << std::endl;
             continue;
         }
-        using file_source_type = boost::interprocess::ibufferstream;
-        file_source_type csv_file;
 
-        mapnik::mapped_region_ptr mapped_region;
-        boost::optional<mapnik::mapped_region_ptr> memory =
-            mapnik::mapped_memory_cache::instance().find(csvname, true);
-        if (memory)
-        {
-            mapped_region = *memory;
-            csv_file.buffer(static_cast<char*>(mapped_region->get_address()),mapped_region->get_size());
-        }
-        else
-        {
-            clog << "Error : cannot mmap " << csvname << endl;
-            continue;
-        }
-        auto file_length = detail::file_length(csv_file);
-        // set back to start
-        csv_file.seekg(0, std::ios::beg);
-        char newline;
-        bool has_newline;
-        char detected_quote;
-        std::tie(newline, has_newline, detected_quote) = detail::autodect_newline_and_quote(csv_file, file_length);
-        if (quote == 0) quote = detected_quote;
-        // set back to start
-        csv_file.seekg(0, std::ios::beg);
-        // get first line
-        std::string csv_line;
-        csv_utils::getline_csv(csv_file, csv_line, newline, quote);
-        if (separator == 0) separator = detail::detect_separator(csv_line);
-        csv_file.seekg(0, std::ios::beg);
-        int line_number = 1;
-        detail::geometry_column_locator locator;
-        std::vector<std::string> headers;
-        std::clog << "Parsing CSV using SEPARATOR=" << separator << " QUOTE=" << quote << std::endl;
-        if (!manual_headers.empty())
-        {
-            std::size_t index = 0;
-            headers = csv_utils::parse_line(manual_headers, separator, quote);
-            for (auto const& header : headers)
-            {
-                detail::locate_geometry_column(header, index++, locator);
-                headers.push_back(header);
-            }
-        }
-        else // parse first line as headers
+        std::vector<item_type> boxes;
+        mapnik::box2d<double> extent;
+        if (mapnik::detail::is_csv(filename))
         {
-            while (csv_utils::getline_csv(csv_file,csv_line,newline, quote))
-            {
-                try
-                {
-                    headers = csv_utils::parse_line(csv_line, separator, quote);
-                    // skip blank lines
-                    if (headers.size() > 0 && headers[0].empty()) ++line_number;
-                    else
-                    {
-                        std::size_t index = 0;
-                        for (auto & header : headers)
-                        {
-                            if (header.empty())
-                            {
-                                // create a placeholder for the empty header
-                                std::ostringstream s;
-                                s << "_" << index;
-                                header = s.str();
-                            }
-                            else
-                            {
-                                detail::locate_geometry_column(header, index, locator);
-                            }
-                            ++index;
-                        }
-                        ++line_number;
-                        break;
-                    }
-                }
-                catch (std::exception const& ex)
-                {
-                    std::string s("CSV index: error parsing headers: ");
-                    s += ex.what();
-                    std::clog << s << std::endl;
-                    return 1;
-                }
-            }
+            auto result = mapnik::detail::process_csv_file(boxes, filename, manual_headers, separator, quote);
+            if (!result.first) continue;
+            extent = result.second;
         }
-
-        if (locator.type == detail::geometry_column_locator::UNKNOWN)
+        else if (mapnik::detail::is_geojson(filename))
         {
-            std::clog << "CSV index: could not detect column headers with the name of wkt, geojson, x/y, or "
-                      << "latitude/longitude - this is required for reading geometry data" << std::endl;
-            return 1;
+            auto result = mapnik::detail::process_geojson_file(boxes, filename);
+            if (!result.first) continue;
+            extent = result.second;
         }
 
-        std::size_t num_headers = headers.size();
-        auto pos = csv_file.tellg();
-
-        // handle rare case of a single line of data and user-provided headers
-        // where a lack of a newline will mean that csv_utils::getline_csv returns false
-        bool is_first_row = false;
-        if (!has_newline)
+        if (extent.valid())
         {
-            csv_file.setstate(std::ios::failbit);
-            pos = 0;
-            if (!csv_line.empty())
+            std::clog << extent << std::endl;
+            mapnik::quad_tree<std::pair<std::size_t, std::size_t>> tree(extent, depth, ratio);
+            for (auto const& item : boxes)
             {
-                is_first_row = true;
+                tree.insert(std::get<1>(item), std::get<0>(item));
             }
-        }
-
-        mapnik::box2d<double> extent;
-        using box_type = mapnik::box2d<double>;
-        using item_type = std::pair<box_type, std::pair<unsigned, unsigned>>;
-        std::vector<item_type> boxes;
 
-        while (is_first_row || csv_utils::getline_csv(csv_file, csv_line, newline, quote))
-        {
-            auto record_offset = pos;
-            auto record_size = csv_line.length();
-            pos = csv_file.tellg();
-            is_first_row = false;
-            // skip blank lines
-            if (record_size <= 10)
+            std::fstream file((filename + ".index").c_str(),
+                              std::ios::in | std::ios::out | std::ios::trunc | std::ios::binary);
+            if (!file)
             {
-                std::string trimmed = csv_line;
-                boost::trim_if(trimmed, boost::algorithm::is_any_of("\",'\r\n "));
-                if (trimmed.empty())
-                {
-                    std::clog << "CSV index: empty row encountered at line: " << line_number << std::endl;
-                    continue;
-                }
+                std::clog << "cannot open index file for writing file \""
+                          << (filename + ".index") << "\"" << std::endl;
             }
-            try
+            else
             {
-                auto values = csv_utils::parse_line(csv_line, separator, quote);
-                unsigned num_fields = values.size();
-                if (num_fields > num_headers || num_fields < num_headers)
-                {
-                    std::ostringstream s;
-                    s << "CSV Index: # of columns("
-                      << num_fields << ") > # of headers("
-                      << num_headers << ") parsed for row " << line_number << "\n";
-                    std::clog << s.str() << std::endl;
-                    return 1;
-                }
-
-                auto geom = detail::extract_geometry(values, locator);
-                if (!geom.is<mapnik::geometry::geometry_empty>())
-                {
-                    auto box = mapnik::geometry::envelope(geom);
-                    if (!extent.valid()) extent = box;
-                    else extent.expand_to_include(box);
-                    boxes.emplace_back(std::move(box), make_pair(record_offset, record_size));
-                }
-                else
-                {
-                    std::ostringstream s;
-                    s << "CSV Index: expected geometry column: could not parse row "
-                      << line_number << " "
-                      << values[locator.index] << "'";
-                    std::clog << s.str() << std::endl;;
-                }
-            }
-            catch (std::exception const& ex)
-            {
-                std::ostringstream s;
-                s << "CSV Index: unexpected error parsing line: " << line_number
-                  << " - found " << headers.size() << " with values like: " << csv_line << "\n"
-                  << " and got error like: " << ex.what();
-                std::clog << s.str() << std::endl;
-                return 1;
+                tree.trim();
+                std::clog <<  "number nodes=" << tree.count() << std::endl;
+                //tree.print();
+                file.exceptions(std::ios::failbit | std::ios::badbit);
+                tree.write(file);
+                file.flush();
+                file.close();
             }
         }
-
-        std::clog << extent << std::endl;
-        mapnik::quad_tree<std::pair<std::size_t, std::size_t>> tree(extent, depth, ratio);
-        for (auto const& item : boxes)
-        {
-            tree.insert(std::get<1>(item), std::get<0>(item));
-        }
-
-        std::fstream file((csvname + ".index").c_str(),
-                          std::ios::in | std::ios::out | std::ios::trunc | std::ios::binary);
-        if (!file)
-        {
-            clog << "cannot open index file for writing file \""
-                 << (csvname + ".index") << "\"" << endl;
-        }
-        else
-        {
-            tree.trim();
-            std::clog <<  "number nodes=" << tree.count() << std::endl;
-            //tree.print();
-            file.exceptions(std::ios::failbit | std::ios::badbit);
-            tree.write(file);
-            file.flush();
-            file.close();
-        }
     }
-    clog << "done!" << endl;
-    return 0;
+    std::clog << "done!" << std::endl;
+    return EXIT_SUCCESS;
 }
diff --git a/utils/mapnik-index/process_csv_file.cpp b/utils/mapnik-index/process_csv_file.cpp
new file mode 100644
index 0000000..3b4344a
--- /dev/null
+++ b/utils/mapnik-index/process_csv_file.cpp
@@ -0,0 +1,215 @@
+/*****************************************************************************
+ *
+ * This file is part of Mapnik (c++ mapping toolkit)
+ *
+ * Copyright (C) 2015 Artem Pavlenko
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+ *
+ *****************************************************************************/
+
+#include "process_csv_file.hpp"
+#include "../../plugins/input/csv/csv_utils.hpp"
+#include <mapnik/geometry_envelope.hpp>
+
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wsign-conversion"
+#include <boost/interprocess/mapped_region.hpp>
+#include <boost/interprocess/streams/bufferstream.hpp>
+#pragma GCC diagnostic pop
+#include <mapnik/mapped_memory_cache.hpp>
+
+namespace mapnik { namespace detail {
+
+template <typename T>
+std::pair<bool,box2d<double>> process_csv_file(T & boxes, std::string const& filename, std::string const& manual_headers, char separator, char quote)
+{
+    using file_source_type = boost::interprocess::ibufferstream;
+    file_source_type csv_file;
+    mapnik::box2d<double> extent;
+    mapnik::mapped_region_ptr mapped_region;
+    boost::optional<mapnik::mapped_region_ptr> memory =
+        mapnik::mapped_memory_cache::instance().find(filename, true);
+    if (memory)
+    {
+        mapped_region = *memory;
+        csv_file.buffer(static_cast<char*>(mapped_region->get_address()),mapped_region->get_size());
+    }
+    else
+    {
+        std::clog << "Error : cannot mmap " << filename << std::endl;
+        return std::make_pair(false, extent);
+    }
+    auto file_length = ::detail::file_length(csv_file);
+    // set back to start
+    csv_file.seekg(0, std::ios::beg);
+    char newline;
+    bool has_newline;
+    char detected_quote;
+    std::tie(newline, has_newline, detected_quote) = ::detail::autodect_newline_and_quote(csv_file, file_length);
+    if (quote == 0) quote = detected_quote;
+    // set back to start
+    csv_file.seekg(0, std::ios::beg);
+    // get first line
+    std::string csv_line;
+    csv_utils::getline_csv(csv_file, csv_line, newline, quote);
+    if (separator == 0) separator = ::detail::detect_separator(csv_line);
+    csv_file.seekg(0, std::ios::beg);
+    int line_number = 1;
+    ::detail::geometry_column_locator locator;
+    std::vector<std::string> headers;
+    std::clog << "Parsing CSV using SEPARATOR=" << separator << " QUOTE=" << quote << std::endl;
+    if (!manual_headers.empty())
+    {
+        std::size_t index = 0;
+        headers = csv_utils::parse_line(manual_headers, separator, quote);
+        for (auto const& header : headers)
+        {
+            ::detail::locate_geometry_column(header, index++, locator);
+            headers.push_back(header);
+        }
+    }
+    else // parse first line as headers
+    {
+        while (csv_utils::getline_csv(csv_file,csv_line,newline, quote))
+        {
+            try
+            {
+                headers = csv_utils::parse_line(csv_line, separator, quote);
+                // skip blank lines
+                if (headers.size() > 0 && headers[0].empty()) ++line_number;
+                else
+                {
+                    std::size_t index = 0;
+                    for (auto & header : headers)
+                    {
+                        if (header.empty())
+                        {
+                            // create a placeholder for the empty header
+                            std::ostringstream s;
+                            s << "_" << index;
+                            header = s.str();
+                        }
+                        else
+                        {
+                            ::detail::locate_geometry_column(header, index, locator);
+                        }
+                        ++index;
+                    }
+                    ++line_number;
+                    break;
+                }
+            }
+            catch (std::exception const& ex)
+            {
+                std::string s("CSV index: error parsing headers: ");
+                s += ex.what();
+                std::clog << s << std::endl;
+                return std::make_pair(false, extent);
+            }
+        }
+    }
+
+    if (locator.type == ::detail::geometry_column_locator::UNKNOWN)
+    {
+        std::clog << "CSV index: could not detect column headers with the name of wkt, geojson, x/y, or "
+                  << "latitude/longitude - this is required for reading geometry data" << std::endl;
+        return std::make_pair(false, extent);
+    }
+
+    std::size_t num_headers = headers.size();
+    auto pos = csv_file.tellg();
+
+    // handle rare case of a single line of data and user-provided headers
+    // where a lack of a newline will mean that csv_utils::getline_csv returns false
+    bool is_first_row = false;
+    if (!has_newline)
+    {
+        csv_file.setstate(std::ios::failbit);
+        pos = 0;
+        if (!csv_line.empty())
+        {
+            is_first_row = true;
+        }
+    }
+
+    while (is_first_row || csv_utils::getline_csv(csv_file, csv_line, newline, quote))
+    {
+        auto record_offset = pos;
+        auto record_size = csv_line.length();
+        pos = csv_file.tellg();
+        is_first_row = false;
+        // skip blank lines
+        if (record_size <= 10)
+        {
+            std::string trimmed = csv_line;
+            boost::trim_if(trimmed, boost::algorithm::is_any_of("\",'\r\n "));
+            if (trimmed.empty())
+            {
+                std::clog << "CSV index: empty row encountered at line: " << line_number << std::endl;
+                continue;
+            }
+        }
+        try
+        {
+            auto values = csv_utils::parse_line(csv_line, separator, quote);
+            unsigned num_fields = values.size();
+            if (num_fields > num_headers || num_fields < num_headers)
+            {
+                std::ostringstream s;
+                s << "CSV Index: # of columns("
+                  << num_fields << ") > # of headers("
+                  << num_headers << ") parsed for row " << line_number << "\n";
+                std::clog << s.str() << std::endl;
+                return std::make_pair(false, extent);
+            }
+
+            auto geom = ::detail::extract_geometry(values, locator);
+            if (!geom.is<mapnik::geometry::geometry_empty>())
+            {
+                auto box = mapnik::geometry::envelope(geom);
+                if (!extent.valid()) extent = box;
+                else extent.expand_to_include(box);
+                boxes.emplace_back(std::move(box), make_pair(record_offset, record_size));
+            }
+            else
+            {
+                std::ostringstream s;
+                s << "CSV Index: expected geometry column: could not parse row "
+                  << line_number << " "
+                  << values[locator.index] << "'";
+                std::clog << s.str() << std::endl;;
+            }
+        }
+        catch (std::exception const& ex)
+        {
+            std::ostringstream s;
+            s << "CSV Index: unexpected error parsing line: " << line_number
+              << " - found " << headers.size() << " with values like: " << csv_line << "\n"
+              << " and got error like: " << ex.what();
+            std::clog << s.str() << std::endl;
+            return std::make_pair(false, extent);
+        }
+    }
+    return std::make_pair(true, extent);;
+}
+
+using box_type = mapnik::box2d<double>;
+using item_type = std::pair<box_type, std::pair<std::size_t, std::size_t>>;
+using boxes_type = std::vector<item_type>;
+template std::pair<bool,box2d<double>> process_csv_file(boxes_type&, std::string const&, std::string const&, char, char);
+
+}}
diff --git a/include/mapnik/version.hpp b/utils/mapnik-index/process_csv_file.hpp
similarity index 51%
copy from include/mapnik/version.hpp
copy to utils/mapnik-index/process_csv_file.hpp
index ad784d0..f84393d 100644
--- a/include/mapnik/version.hpp
+++ b/utils/mapnik-index/process_csv_file.hpp
@@ -20,31 +20,17 @@
  *
  *****************************************************************************/
 
-#ifndef MAPNIK_VERSION_HPP
-#define MAPNIK_VERSION_HPP
+#ifndef MAPNIK_UTILS_PROCESS_CSV_FILE_HPP
+#define MAPNIK_UTILS_PROCESS_CSV_FILE_HPP
 
-#define MAPNIK_VERSION_IS_RELEASE 1
+#include <utility>
+#include <mapnik/box2d.hpp>
 
-#define MAPNIK_MAJOR_VERSION 3
-#define MAPNIK_MINOR_VERSION 0
-#define MAPNIK_PATCH_VERSION 6
+namespace mapnik { namespace detail {
 
-#define MAPNIK_VERSION (MAPNIK_MAJOR_VERSION*100000) + (MAPNIK_MINOR_VERSION*100) + (MAPNIK_PATCH_VERSION)
+template <typename T>
+std::pair<bool, box2d<double>> process_csv_file(T & boxes, std::string const& filename, std::string const& manual_headers, char separator, char quote);
 
-#ifndef MAPNIK_STRINGIFY
-#define MAPNIK_STRINGIFY(n) MAPNIK_STRINGIFY_HELPER(n)
-#define MAPNIK_STRINGIFY_HELPER(n) #n
-#endif
+}}
 
-#if MAPNIK_VERSION_IS_RELEASE
-#define MAPNIK_VERSION_STRING   MAPNIK_STRINGIFY(MAPNIK_MAJOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_MINOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_PATCH_VERSION)
-
-#else
-#define MAPNIK_VERSION_STRING   MAPNIK_STRINGIFY(MAPNIK_MAJOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_MINOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_PATCH_VERSION) "-pre"
-#endif
-
-#endif // MAPNIK_VERSION_HPP
+#endif // MAPNIK_UTILS_PROCESS_CSV_FILE_HPP
diff --git a/utils/mapnik-index/process_geojson_file.cpp b/utils/mapnik-index/process_geojson_file.cpp
new file mode 100644
index 0000000..ed9adc3
--- /dev/null
+++ b/utils/mapnik-index/process_geojson_file.cpp
@@ -0,0 +1,90 @@
+/*****************************************************************************
+ *
+ * This file is part of Mapnik (c++ mapping toolkit)
+ *
+ * Copyright (C) 2015 Artem Pavlenko
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+ *
+ *****************************************************************************/
+
+#include "process_geojson_file.hpp"
+#include <mapnik/geometry.hpp>
+#include <mapnik/geometry_envelope.hpp>
+#include <mapnik/geometry_adapters.hpp>
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wsign-compare"
+#pragma GCC diagnostic ignored "-Wsign-conversion"
+#include <boost/interprocess/mapped_region.hpp>
+#include <boost/interprocess/streams/bufferstream.hpp>
+#include <boost/spirit/include/qi.hpp>
+#pragma GCC diagnostic pop
+#include <mapnik/mapped_memory_cache.hpp>
+#include <mapnik/json/positions_grammar.hpp>
+#include <mapnik/json/extract_bounding_box_grammar_impl.hpp>
+
+namespace {
+using base_iterator_type = char const*;
+const mapnik::json::extract_bounding_box_grammar<base_iterator_type> geojson_datasource_static_bbox_grammar;
+}
+
+namespace mapnik { namespace detail {
+
+template <typename T>
+std::pair<bool,box2d<double>> process_geojson_file(T & boxes, std::string const& filename)
+{
+    mapnik::box2d<double> extent;
+    mapnik::mapped_region_ptr mapped_region;
+    boost::optional<mapnik::mapped_region_ptr> memory =
+        mapnik::mapped_memory_cache::instance().find(filename, true);
+    if (!memory)
+    {
+        std::clog << "Error : cannot mmap " << filename << std::endl;
+        return std::make_pair(false, extent);
+    }
+    else
+    {
+        mapped_region = *memory;
+    }
+    char const* start = reinterpret_cast<char const*>(mapped_region->get_address());
+    char const* end = start + mapped_region->get_size();
+    boost::spirit::standard::space_type space;
+    try
+    {
+        if (!boost::spirit::qi::phrase_parse(start, end, (geojson_datasource_static_bbox_grammar)(boost::phoenix::ref(boxes)) , space))
+        {
+            std::clog << "mapnik-index (GeoJSON) : could not parse: '" <<  filename <<  "'";
+            return std::make_pair(false, extent);
+        }
+    }
+    catch (std::exception const& ex)
+    {
+        std::clog << "mapnik-index:" << ex.what() << std::endl;
+    }
+    for (auto const& item : boxes)
+    {
+        if (!extent.valid()) extent = item.first;
+        else extent.expand_to_include(item.first);
+    }
+    return std::make_pair(true, extent);
+}
+
+using box_type = mapnik::box2d<double>;
+using item_type = std::pair<box_type, std::pair<std::size_t, std::size_t>>;
+using boxes_type = std::vector<item_type>;
+template std::pair<bool,box2d<double>> process_geojson_file(boxes_type&, std::string const&);
+
+}}
diff --git a/include/mapnik/version.hpp b/utils/mapnik-index/process_geojson_file.hpp
similarity index 51%
copy from include/mapnik/version.hpp
copy to utils/mapnik-index/process_geojson_file.hpp
index ad784d0..d7ccd63 100644
--- a/include/mapnik/version.hpp
+++ b/utils/mapnik-index/process_geojson_file.hpp
@@ -20,31 +20,17 @@
  *
  *****************************************************************************/
 
-#ifndef MAPNIK_VERSION_HPP
-#define MAPNIK_VERSION_HPP
+#ifndef MAPNIK_UTILS_PROCESS_GEOJSON_FILE_HPP
+#define MAPNIK_UTILS_PROCESS_GEOJSON_FILE_HPP
 
-#define MAPNIK_VERSION_IS_RELEASE 1
+#include <utility>
+#include <mapnik/box2d.hpp>
 
-#define MAPNIK_MAJOR_VERSION 3
-#define MAPNIK_MINOR_VERSION 0
-#define MAPNIK_PATCH_VERSION 6
+namespace mapnik { namespace detail {
 
-#define MAPNIK_VERSION (MAPNIK_MAJOR_VERSION*100000) + (MAPNIK_MINOR_VERSION*100) + (MAPNIK_PATCH_VERSION)
+template <typename T>
+std::pair<bool, box2d<double>> process_geojson_file(T & boxes, std::string const& filename);
 
-#ifndef MAPNIK_STRINGIFY
-#define MAPNIK_STRINGIFY(n) MAPNIK_STRINGIFY_HELPER(n)
-#define MAPNIK_STRINGIFY_HELPER(n) #n
-#endif
+}}
 
-#if MAPNIK_VERSION_IS_RELEASE
-#define MAPNIK_VERSION_STRING   MAPNIK_STRINGIFY(MAPNIK_MAJOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_MINOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_PATCH_VERSION)
-
-#else
-#define MAPNIK_VERSION_STRING   MAPNIK_STRINGIFY(MAPNIK_MAJOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_MINOR_VERSION) "." \
-                                MAPNIK_STRINGIFY(MAPNIK_PATCH_VERSION) "-pre"
-#endif
-
-#endif // MAPNIK_VERSION_HPP
+#endif // MAPNIK_UTILS_PROCESS_GEOJSON_FILE_HPP
diff --git a/utils/nik2img/build.py b/utils/mapnik-render/build.py
similarity index 74%
rename from utils/nik2img/build.py
rename to utils/mapnik-render/build.py
index 244edbe..5cdaa16 100644
--- a/utils/nik2img/build.py
+++ b/utils/mapnik-render/build.py
@@ -7,7 +7,7 @@ program_env = env.Clone()
 
 source = Split(
     """
-    nik2img.cpp
+    mapnik-render.cpp
     """
     )
 
@@ -24,11 +24,11 @@ libraries.extend(copy(env['LIBMAPNIK_LIBS']))
 if env['RUNTIME_LINK'] == 'static' and env['PLATFORM'] == 'Linux':
     libraries.append('dl')
 
-nik2img = program_env.Program('nik2img', source, LIBS=libraries)
-Depends(nik2img, env.subst('../../src/%s' % env['MAPNIK_LIB_NAME']))
+mapnik_render = program_env.Program('mapnik-render', source, LIBS=libraries)
+Depends(mapnik_render, env.subst('../../src/%s' % env['MAPNIK_LIB_NAME']))
 
 if 'uninstall' not in COMMAND_LINE_TARGETS:
-    env.Install(os.path.join(env['INSTALL_PREFIX'],'bin'), nik2img)
+    env.Install(os.path.join(env['INSTALL_PREFIX'],'bin'), mapnik_render)
     env.Alias('install', os.path.join(env['INSTALL_PREFIX'],'bin'))
 
-env['create_uninstall_target'](env, os.path.join(env['INSTALL_PREFIX'],'bin','nik2img'))
+env['create_uninstall_target'](env, os.path.join(env['INSTALL_PREFIX'],'bin','mapnik-render'))
diff --git a/utils/nik2img/nik2img.cpp b/utils/mapnik-render/mapnik-render.cpp
similarity index 100%
rename from utils/nik2img/nik2img.cpp
rename to utils/mapnik-render/mapnik-render.cpp

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/mapnik.git



More information about the Pkg-grass-devel mailing list