[python-mapnik] 01/06: Imported Upstream version 0.0~20150817-da1d9dd

Sebastiaan Couwenberg sebastic at moszumanska.debian.org
Tue Aug 25 21:08:29 UTC 2015


This is an automated email from the git hooks/post-receive script.

sebastic pushed a commit to branch master
in repository python-mapnik.

commit 79d3466b953ec209374d797c7f49b3100446386d
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date:   Tue Aug 25 22:25:11 2015 +0200

    Imported Upstream version 0.0~20150817-da1d9dd
---
 bootstrap.sh                                       |    2 +-
 mapnik/__init__.py                                 |  183 +--
 mapnik/mapnik_settings.py                          |    1 +
 mapnik/printing.py                                 |  955 +++++++++------
 setup.py                                           |  232 ++--
 src/mapnik_color.cpp                               |    2 +
 src/mapnik_coord.cpp                               |    2 +
 src/mapnik_datasource.cpp                          |    2 +
 src/mapnik_datasource_cache.cpp                    |    2 +
 src/mapnik_envelope.cpp                            |    2 +
 src/mapnik_expression.cpp                          |    2 +
 src/mapnik_feature.cpp                             |    2 +
 src/mapnik_featureset.cpp                          |    2 +
 src/mapnik_font_engine.cpp                         |    2 +
 src/mapnik_fontset.cpp                             |    2 +
 src/mapnik_gamma_method.cpp                        |    2 +
 src/mapnik_geometry.cpp                            |    2 +
 src/mapnik_grid.cpp                                |    2 +
 src/mapnik_grid_view.cpp                           |    2 +
 src/mapnik_image.cpp                               |    4 +-
 src/mapnik_image_view.cpp                          |    2 +
 src/mapnik_label_collision_detector.cpp            |    2 +
 src/mapnik_layer.cpp                               |    2 +
 src/mapnik_logger.cpp                              |    2 +
 src/mapnik_map.cpp                                 |    2 +
 src/mapnik_palette.cpp                             |    2 +
 src/mapnik_parameters.cpp                          |    4 +-
 src/mapnik_proj_transform.cpp                      |    2 +
 src/mapnik_projection.cpp                          |    2 +
 src/mapnik_python.cpp                              |    2 +
 src/mapnik_query.cpp                               |    2 +
 src/mapnik_raster_colorizer.cpp                    |    2 +
 src/mapnik_rule.cpp                                |    2 +
 src/mapnik_scaling_method.cpp                      |    2 +
 src/mapnik_style.cpp                               |    2 +
 src/mapnik_symbolizer.cpp                          |    2 +
 src/mapnik_text_placement.cpp                      |    2 +
 src/mapnik_view_transform.cpp                      |    2 +
 src/python_grid_utils.cpp                          |   15 +-
 src/python_to_value.hpp                            |    3 +
 .../agg_rasterizer_integer_overflow_test.py        |  133 +-
 test/python_tests/box2d_test.py                    |   82 +-
 test/python_tests/buffer_clear_test.py             |   40 +-
 test/python_tests/cairo_test.py                    |  213 ++--
 test/python_tests/color_test.py                    |   26 +-
 test/python_tests/compare_test.py                  |  130 +-
 test/python_tests/compositing_test.py              |  238 ++--
 test/python_tests/copy_test.py                     |  115 +-
 test/python_tests/csv_test.py                      |  656 +++++-----
 test/python_tests/datasource_test.py               |   99 +-
 test/python_tests/datasource_xml_template_test.py  |   12 +-
 test/python_tests/extra_map_props_test.py          |   76 +-
 test/python_tests/feature_id_test.py               |   38 +-
 test/python_tests/feature_test.py                  |   82 +-
 test/python_tests/filter_test.py                   |  397 +++---
 test/python_tests/fontset_test.py                  |   18 +-
 test/python_tests/geojson_plugin_test.py           |   73 +-
 test/python_tests/geometry_io_test.py              |  277 +++--
 test/python_tests/grayscale_test.py                |   13 +-
 test/python_tests/image_encoding_speed_test.py     |   47 +-
 test/python_tests/image_filters_test.py            |   38 +-
 test/python_tests/image_test.py                    |  381 +++---
 test/python_tests/image_tiff_test.py               |  397 +++---
 test/python_tests/introspection_test.py            |   26 +-
 test/python_tests/json_feature_properties_test.py  |  156 +--
 test/python_tests/layer_buffer_size_test.py        |   31 +-
 test/python_tests/layer_modification_test.py       |   36 +-
 test/python_tests/layer_test.py                    |   37 +-
 test/python_tests/load_map_test.py                 |   61 +-
 test/python_tests/map_query_test.py                |   90 +-
 test/python_tests/mapnik_logger_test.py            |   17 +-
 test/python_tests/mapnik_test_data_test.py         |   53 +-
 .../python_tests/markers_complex_rendering_test.py |   44 +-
 test/python_tests/memory_datasource_test.py        |   19 +-
 test/python_tests/multi_tile_raster_test.py        |   62 +-
 test/python_tests/object_test.py                   |    3 +-
 test/python_tests/ogr_and_shape_geometries_test.py |   34 +-
 test/python_tests/ogr_test.py                      |  245 +++-
 test/python_tests/osm_test.py                      |   43 +-
 test/python_tests/palette_test.py                  |   62 +-
 test/python_tests/parameters_test.py               |   62 +-
 test/python_tests/pgraster_test.py                 | 1291 +++++++++++---------
 test/python_tests/pickling_test.py                 |   19 +-
 test/python_tests/png_encoding_test.py             |  172 +--
 test/python_tests/pngsuite_test.py                 |   18 +-
 test/python_tests/postgis_test.py                  | 1149 +++++++++--------
 test/python_tests/projection_test.py               |  111 +-
 test/python_tests/python_plugin_test.py            |    2 +-
 test/python_tests/query_test.py                    |   17 +-
 test/python_tests/query_tolerance_test.py          |   25 +-
 test/python_tests/raster_colorizer_test.py         |  125 +-
 test/python_tests/raster_symbolizer_test.py        |  134 +-
 test/python_tests/rasterlite_test.py               |   26 +-
 test/python_tests/render_grid_test.py              |  941 +++++++++++---
 test/python_tests/render_test.py                   |  222 ++--
 test/python_tests/reprojection_test.py             |   93 +-
 test/python_tests/save_map_test.py                 |   47 +-
 test/python_tests/shapefile_test.py                |  112 +-
 test/python_tests/shapeindex_test.py               |   67 +-
 test/python_tests/sqlite_rtree_test.py             |  110 +-
 test/python_tests/sqlite_test.py                   |  841 +++++++++----
 test/python_tests/style_test.py                    |   19 +-
 test/python_tests/topojson_plugin_test.py          |   54 +-
 test/python_tests/utilities.py                     |   63 +-
 test/python_tests/webp_encoding_test.py            |  131 +-
 test/run_tests.py                                  |   61 +-
 test/visual.py                                     |  203 +--
 107 files changed, 7458 insertions(+), 4718 deletions(-)

diff --git a/bootstrap.sh b/bootstrap.sh
index 806f3f5..3d207b6 100755
--- a/bootstrap.sh
+++ b/bootstrap.sh
@@ -22,7 +22,7 @@ function install() {
 }
 
 function install_mason_deps() {
-    install mapnik 3.0.0-rc3
+    install mapnik 3.0.0
     install protobuf 2.6.1
     install freetype 2.5.4
     install harfbuzz 2cd5323
diff --git a/mapnik/__init__.py b/mapnik/__init__.py
index 3eef555..25414c0 100644
--- a/mapnik/__init__.py
+++ b/mapnik/__init__.py
@@ -47,6 +47,7 @@ try:
 except ImportError:
     import simplejson as json
 
+
 def bootstrap_env():
     """
     If an optional settings file exists, inherit its
@@ -60,8 +61,9 @@ def bootstrap_env():
 
         env = {'ICU_DATA':'/usr/local/share/icu/'}
     """
-    if os.path.exists(os.path.join(os.path.dirname(__file__),'mapnik_settings.py')):
-        from mapnik_settings import env
+    if os.path.exists(os.path.join(
+            os.path.dirname(__file__), 'mapnik_settings.py')):
+        from .mapnik_settings import env
         process_keys = os.environ.keys()
         for key, value in env.items():
             if key not in process_keys:
@@ -69,40 +71,46 @@ def bootstrap_env():
 
 bootstrap_env()
 
-from _mapnik import *
+from ._mapnik import *
 
-import printing
+from . import printing
 printing.renderer = render
 
 # The base Boost.Python class
 BoostPythonMetaclass = Coord.__class__
 
+
 class _MapnikMetaclass(BoostPythonMetaclass):
+
     def __init__(self, name, bases, dict):
         for b in bases:
             if type(b) not in (self, type):
-                for k,v in list(dict.items()):
+                for k, v in list(dict.items()):
                     if hasattr(b, k):
-                        setattr(b, '_c_'+k, getattr(b, k))
-                    setattr(b,k,v)
+                        setattr(b, '_c_' + k, getattr(b, k))
+                    setattr(b, k, v)
         return type.__init__(self, name, bases, dict)
 
 # metaclass injector compatible with both python 2 and 3
 # http://mikewatkins.ca/2008/11/29/python-2-and-3-metaclasses/
 _injector = _MapnikMetaclass('_injector', (object, ), {})
 
-def Filter(*args,**kwargs):
+
+def Filter(*args, **kwargs):
     warnings.warn("'Filter' is deprecated and will be removed in Mapnik 3.x, use 'Expression' instead",
-    DeprecationWarning, 2)
+                  DeprecationWarning, 2)
     return Expression(*args, **kwargs)
 
+
 class Envelope(Box2d):
+
     def __init__(self, *args, **kwargs):
         warnings.warn("'Envelope' is deprecated and will be removed in Mapnik 3.x, use 'Box2d' instead",
-        DeprecationWarning, 2)
+                      DeprecationWarning, 2)
         Box2d.__init__(self, *args, **kwargs)
 
-class _Coord(Coord,_injector):
+
+class _Coord(Coord, _injector):
     """
     Represents a point with two coordinates (either lon/lat or x/y).
 
@@ -133,6 +141,7 @@ class _Coord(Coord,_injector):
     >>> Coord(10, 10) == Coord(10, 10)
     True
     """
+
     def __repr__(self):
         return 'Coord(%s,%s)' % (self.x, self.y)
 
@@ -175,7 +184,8 @@ class _Coord(Coord,_injector):
         """
         return inverse_(self, projection)
 
-class _Box2d(Box2d,_injector):
+
+class _Box2d(Box2d, _injector):
     """
     Represents a spatial envelope (i.e. bounding box).
 
@@ -205,7 +215,7 @@ class _Box2d(Box2d,_injector):
 
     def __repr__(self):
         return 'Box2d(%s,%s,%s,%s)' % \
-            (self.minx,self.miny,self.maxx,self.maxy)
+            (self.minx, self.miny, self.maxx, self.maxy)
 
     def forward(self, projection):
         """
@@ -229,12 +239,13 @@ class _Box2d(Box2d,_injector):
         """
         return inverse_(self, projection)
 
-class _Projection(Projection,_injector):
+
+class _Projection(Projection, _injector):
 
     def __repr__(self):
         return "Projection('%s')" % self.params()
 
-    def forward(self,obj):
+    def forward(self, obj):
         """
         Projects the given object (Box2d or Coord)
         from the geographic space into the cartesian space.
@@ -243,9 +254,9 @@ class _Projection(Projection,_injector):
           Box2d.forward(self, projection),
           Coord.forward(self, projection).
         """
-        return forward_(obj,self)
+        return forward_(obj, self)
 
-    def inverse(self,obj):
+    def inverse(self, obj):
         """
         Projects the given object (Box2d or Coord)
         from the cartesian space into the geographic space.
@@ -254,45 +265,53 @@ class _Projection(Projection,_injector):
           Box2d.inverse(self, projection),
           Coord.inverse(self, projection).
         """
-        return inverse_(obj,self)
+        return inverse_(obj, self)
+
 
-class _Feature(Feature,_injector):
+class _Feature(Feature, _injector):
     __geo_interface__ = property(lambda self: json.loads(self.to_geojson()))
 
-class _Geometry(Geometry,_injector):
+
+class _Geometry(Geometry, _injector):
     __geo_interface__ = property(lambda self: json.loads(self.to_geojson()))
 
-class _Datasource(Datasource,_injector):
 
-    def all_features(self,fields=None,variables={}):
+class _Datasource(Datasource, _injector):
+
+    def all_features(self, fields=None, variables={}):
         query = Query(self.envelope())
-        query.set_variables(variables);
+        query.set_variables(variables)
         attributes = fields or self.fields()
         for fld in attributes:
             query.add_property_name(fld)
         return self.features(query).features
 
-    def featureset(self,fields=None,variables={}):
+    def featureset(self, fields=None, variables={}):
         query = Query(self.envelope())
-        query.set_variables(variables);
+        query.set_variables(variables)
         attributes = fields or self.fields()
         for fld in attributes:
             query.add_property_name(fld)
         return self.features(query)
 
-class _Color(Color,_injector):
+
+class _Color(Color, _injector):
+
     def __repr__(self):
-        return "Color(R=%d,G=%d,B=%d,A=%d)" % (self.r,self.g,self.b,self.a)
+        return "Color(R=%d,G=%d,B=%d,A=%d)" % (self.r, self.g, self.b, self.a)
+
 
-class _SymbolizerBase(SymbolizerBase,_injector):
-     # back compatibility
-     @property
-     def filename(self):
-         return self['file']
+class _SymbolizerBase(SymbolizerBase, _injector):
+    # back compatibility
+
+    @property
+    def filename(self):
+        return self['file']
+
+    @filename.setter
+    def filename(self, val):
+        self['file'] = val
 
-     @filename.setter
-     def filename(self, val):
-         self['file'] = val
 
 def _add_symbol_method_to_symbolizers(vars=globals()):
 
@@ -300,7 +319,7 @@ def _add_symbol_method_to_symbolizers(vars=globals()):
         return self
 
     def symbol_for_cls(self):
-        return getattr(self,self.type())()
+        return getattr(self, self.type())()
 
     for name, obj in vars.items():
         if name.endswith('Symbolizer') and not name.startswith('_'):
@@ -308,9 +327,10 @@ def _add_symbol_method_to_symbolizers(vars=globals()):
                 symbol = symbol_for_cls
             else:
                 symbol = symbol_for_subcls
-            type('dummy', (obj,_injector), {'symbol': symbol})
+            type('dummy', (obj, _injector), {'symbol': symbol})
 _add_symbol_method_to_symbolizers()
 
+
 def Datasource(**keywords):
     """Wrapper around CreateDatasource.
 
@@ -329,6 +349,7 @@ def Datasource(**keywords):
 
 # convenience factory methods
 
+
 def Shapefile(**keywords):
     """Create a Shapefile Datasource.
 
@@ -348,6 +369,7 @@ def Shapefile(**keywords):
     keywords['type'] = 'shape'
     return CreateDatasource(keywords)
 
+
 def CSV(**keywords):
     """Create a CSV Datasource.
 
@@ -378,6 +400,7 @@ def CSV(**keywords):
     keywords['type'] = 'csv'
     return CreateDatasource(keywords)
 
+
 def GeoJSON(**keywords):
     """Create a GeoJSON Datasource.
 
@@ -395,6 +418,7 @@ def GeoJSON(**keywords):
     keywords['type'] = 'geojson'
     return CreateDatasource(keywords)
 
+
 def PostGIS(**keywords):
     """Create a PostGIS Datasource.
 
@@ -437,6 +461,7 @@ def PostGIS(**keywords):
     keywords['type'] = 'postgis'
     return CreateDatasource(keywords)
 
+
 def PgRaster(**keywords):
     """Create a PgRaster Datasource.
 
@@ -483,6 +508,7 @@ def PgRaster(**keywords):
     keywords['type'] = 'pgraster'
     return CreateDatasource(keywords)
 
+
 def Raster(**keywords):
     """Create a Raster (Tiff) Datasource.
 
@@ -514,6 +540,7 @@ def Raster(**keywords):
     keywords['type'] = 'raster'
     return CreateDatasource(keywords)
 
+
 def Gdal(**keywords):
     """Create a GDAL Raster Datasource.
 
@@ -534,9 +561,11 @@ def Gdal(**keywords):
     keywords['type'] = 'gdal'
     if 'bbox' in keywords:
         if isinstance(keywords['bbox'], (tuple, list)):
-            keywords['bbox'] = ','.join([str(item) for item in keywords['bbox']])
+            keywords['bbox'] = ','.join([str(item)
+                                         for item in keywords['bbox']])
     return CreateDatasource(keywords)
 
+
 def Occi(**keywords):
     """Create a Oracle Spatial (10g) Vector Datasource.
 
@@ -566,6 +595,7 @@ def Occi(**keywords):
     keywords['type'] = 'occi'
     return CreateDatasource(keywords)
 
+
 def Ogr(**keywords):
     """Create a OGR Vector Datasource.
 
@@ -588,6 +618,7 @@ def Ogr(**keywords):
     keywords['type'] = 'ogr'
     return CreateDatasource(keywords)
 
+
 def SQLite(**keywords):
     """Create a SQLite Datasource.
 
@@ -616,6 +647,7 @@ def SQLite(**keywords):
     keywords['type'] = 'sqlite'
     return CreateDatasource(keywords)
 
+
 def Rasterlite(**keywords):
     """Create a Rasterlite Datasource.
 
@@ -636,6 +668,7 @@ def Rasterlite(**keywords):
     keywords['type'] = 'rasterlite'
     return CreateDatasource(keywords)
 
+
 def Osm(**keywords):
     """Create a Osm Datasource.
 
@@ -658,6 +691,7 @@ def Osm(**keywords):
     keywords['type'] = 'osm'
     return CreateDatasource(keywords)
 
+
 def Python(**keywords):
     """Create a Python Datasource.
 
@@ -669,6 +703,7 @@ def Python(**keywords):
     keywords['type'] = 'python'
     return CreateDatasource(keywords)
 
+
 def MemoryDatasource(**keywords):
     """Create a Memory Datasource.
 
@@ -676,9 +711,10 @@ def MemoryDatasource(**keywords):
         (TODO)
     """
     params = Parameters()
-    params.append(Parameter('type','memory'))
+    params.append(Parameter('type', 'memory'))
     return MemoryDatasourceBase(params)
 
+
 class PythonDatasource(object):
     """A base class for a Python data source.
 
@@ -687,6 +723,7 @@ class PythonDatasource(object):
       geometry_type -- one of the DataGeometryType enumeration values, default Point
       data_type -- one of the DataType enumerations, default Vector
     """
+
     def __init__(self, envelope=None, geometry_type=None, data_type=None):
         self.envelope = envelope or Box2d(-180, -90, 180, 90)
         self.geometry_type = geometry_type or DataGeometryType.Point
@@ -770,14 +807,17 @@ class PythonDatasource(object):
 
         return itertools.imap(make_it, features, itertools.count(1))
 
-class _TextSymbolizer(TextSymbolizer,_injector):
+
+class _TextSymbolizer(TextSymbolizer, _injector):
+
     @property
     def name(self):
         if isinstance(self.properties.format_tree, FormattingText):
             return self.properties.format_tree.text
         else:
             # There is no single expression which could be returned as name
-            raise RuntimeError("TextSymbolizer uses complex formatting features, but old compatibility interface is used to access it. Use self.properties.format_tree instead.")
+            raise RuntimeError(
+                "TextSymbolizer uses complex formatting features, but old compatibility interface is used to access it. Use self.properties.format_tree instead.")
 
     @name.setter
     def name(self, name):
@@ -799,7 +839,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def face_name(self, face_name):
         self.format.face_name = face_name
 
-
     @property
     def fontset(self):
         return self.format.fontset
@@ -808,7 +847,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def fontset(self, fontset):
         self.format.fontset = fontset
 
-
     @property
     def character_spacing(self):
         return self.format.character_spacing
@@ -817,7 +855,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def character_spacing(self, character_spacing):
         self.format.character_spacing = character_spacing
 
-
     @property
     def line_spacing(self):
         return self.format.line_spacing
@@ -826,7 +863,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def line_spacing(self, line_spacing):
         self.format.line_spacing = line_spacing
 
-
     @property
     def text_opacity(self):
         return self.format.text_opacity
@@ -835,7 +871,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def text_opacity(self, text_opacity):
         self.format.text_opacity = text_opacity
 
-
     @property
     def wrap_before(self):
         return self.format.wrap_before
@@ -844,7 +879,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def wrap_before(self, wrap_before):
         self.format.wrap_before = wrap_before
 
-
     @property
     def text_transform(self):
         return self.format.text_transform
@@ -853,7 +887,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def text_transform(self, text_transform):
         self.format.text_transform = text_transform
 
-
     @property
     def fill(self):
         return self.format.fill
@@ -862,7 +895,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def fill(self, fill):
         self.format.fill = fill
 
-
     @property
     def halo_fill(self):
         return self.format.halo_fill
@@ -871,8 +903,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def halo_fill(self, halo_fill):
         self.format.halo_fill = halo_fill
 
-
-
     @property
     def halo_radius(self):
         return self.format.halo_radius
@@ -881,7 +911,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def halo_radius(self, halo_radius):
         self.format.halo_radius = halo_radius
 
-
     @property
     def label_placement(self):
         return self.properties.label_placement
@@ -890,8 +919,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def label_placement(self, label_placement):
         self.properties.label_placement = label_placement
 
-
-
     @property
     def horizontal_alignment(self):
         return self.properties.horizontal_alignment
@@ -900,8 +927,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def horizontal_alignment(self, horizontal_alignment):
         self.properties.horizontal_alignment = horizontal_alignment
 
-
-
     @property
     def justify_alignment(self):
         return self.properties.justify_alignment
@@ -910,8 +935,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def justify_alignment(self, justify_alignment):
         self.properties.justify_alignment = justify_alignment
 
-
-
     @property
     def vertical_alignment(self):
         return self.properties.vertical_alignment
@@ -920,8 +943,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def vertical_alignment(self, vertical_alignment):
         self.properties.vertical_alignment = vertical_alignment
 
-
-
     @property
     def orientation(self):
         return self.properties.orientation
@@ -930,8 +951,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def orientation(self, orientation):
         self.properties.orientation = orientation
 
-
-
     @property
     def displacement(self):
         return self.properties.displacement
@@ -940,8 +959,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def displacement(self, displacement):
         self.properties.displacement = displacement
 
-
-
     @property
     def label_spacing(self):
         return self.properties.label_spacing
@@ -950,8 +967,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def label_spacing(self, label_spacing):
         self.properties.label_spacing = label_spacing
 
-
-
     @property
     def label_position_tolerance(self):
         return self.properties.label_position_tolerance
@@ -960,8 +975,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def label_position_tolerance(self, label_position_tolerance):
         self.properties.label_position_tolerance = label_position_tolerance
 
-
-
     @property
     def avoid_edges(self):
         return self.properties.avoid_edges
@@ -970,8 +983,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def avoid_edges(self, avoid_edges):
         self.properties.avoid_edges = avoid_edges
 
-
-
     @property
     def minimum_distance(self):
         return self.properties.minimum_distance
@@ -980,8 +991,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def minimum_distance(self, minimum_distance):
         self.properties.minimum_distance = minimum_distance
 
-
-
     @property
     def minimum_padding(self):
         return self.properties.minimum_padding
@@ -990,8 +999,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def minimum_padding(self, minimum_padding):
         self.properties.minimum_padding = minimum_padding
 
-
-
     @property
     def minimum_path_length(self):
         return self.properties.minimum_path_length
@@ -1000,8 +1007,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def minimum_path_length(self, minimum_path_length):
         self.properties.minimum_path_length = minimum_path_length
 
-
-
     @property
     def maximum_angle_char_delta(self):
         return self.properties.maximum_angle_char_delta
@@ -1010,7 +1015,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def maximum_angle_char_delta(self, maximum_angle_char_delta):
         self.properties.maximum_angle_char_delta = maximum_angle_char_delta
 
-
     @property
     def allow_overlap(self):
         return self.properties.allow_overlap
@@ -1019,8 +1023,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def allow_overlap(self, allow_overlap):
         self.properties.allow_overlap = allow_overlap
 
-
-
     @property
     def text_ratio(self):
         return self.properties.text_ratio
@@ -1029,8 +1031,6 @@ class _TextSymbolizer(TextSymbolizer,_injector):
     def text_ratio(self, text_ratio):
         self.properties.text_ratio = text_ratio
 
-
-
     @property
     def wrap_width(self):
         return self.properties.wrap_width
@@ -1043,26 +1043,29 @@ class _TextSymbolizer(TextSymbolizer,_injector):
 def mapnik_version_from_string(version_string):
     """Return the Mapnik version from a string."""
     n = version_string.split('.')
-    return (int(n[0]) * 100000) + (int(n[1]) * 100) + (int(n[2]));
+    return (int(n[0]) * 100000) + (int(n[1]) * 100) + (int(n[2]))
+
 
 def register_plugins(path=None):
     """Register plugins located by specified path"""
     if not path:
-        if os.environ.has_key('MAPNIK_INPUT_PLUGINS_DIRECTORY'):
+        if 'MAPNIK_INPUT_PLUGINS_DIRECTORY' in os.environ:
             path = os.environ.get('MAPNIK_INPUT_PLUGINS_DIRECTORY')
         else:
-            from paths import inputpluginspath
+            from .paths import inputpluginspath
             path = inputpluginspath
     DatasourceCache.register_datasources(path)
 
-def register_fonts(path=None,valid_extensions=['.ttf','.otf','.ttc','.pfa','.pfb','.ttc','.dfont','.woff']):
+
+def register_fonts(path=None, valid_extensions=[
+                   '.ttf', '.otf', '.ttc', '.pfa', '.pfb', '.ttc', '.dfont', '.woff']):
     """Recursively register fonts using path argument as base directory"""
     if not path:
-       if os.environ.has_key('MAPNIK_FONT_DIRECTORY'):
-           path = os.environ.get('MAPNIK_FONT_DIRECTORY')
-       else:
-           from paths import fontscollectionpath
-           path = fontscollectionpath
+        if 'MAPNIK_FONT_DIRECTORY' in os.environ:
+            path = os.environ.get('MAPNIK_FONT_DIRECTORY')
+        else:
+            from .paths import fontscollectionpath
+            path = fontscollectionpath
     for dirpath, _, filenames in os.walk(path):
         for filename in filenames:
             if os.path.splitext(filename.lower())[1] in valid_extensions:
diff --git a/mapnik/mapnik_settings.py b/mapnik/mapnik_settings.py
index 6c48cea..a5170cf 100644
--- a/mapnik/mapnik_settings.py
+++ b/mapnik/mapnik_settings.py
@@ -1,4 +1,5 @@
 import os
+
 mapnik_data_dir = os.path.dirname(os.path.realpath(__file__))
 
 env = {}
diff --git a/mapnik/printing.py b/mapnik/printing.py
index e61f7c0..4519e79 100644
--- a/mapnik/printing.py
+++ b/mapnik/printing.py
@@ -15,13 +15,15 @@ page.render_map(m,"my_output_file.pdf")
 see the documentation of mapnik.printing.PDFPrinter() for options
 
 """
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
 
-from . import render, Map, Box2d, Layer, Feature, Projection, Coord, Style, Geometry
 import math
 import os
 import tempfile
 
+from . import (Box2d, Coord, Feature, Geometry, Layer, Map, Projection, Style,
+               render)
+
 try:
     import cairo
     HAS_PYCAIRO_MODULE = True
@@ -41,6 +43,7 @@ try:
 except ImportError:
     HAS_PYPDF = False
 
+
 class centering:
     """Style of centering to use with the map, the default is constrained
 
@@ -52,142 +55,152 @@ class centering:
     horizontal:
     both:
     """
-    none=0
-    constrained=1
-    unconstrained=2
-    vertical=3
-    horizontal=4
-    both=5
+    none = 0
+    constrained = 1
+    unconstrained = 2
+    vertical = 3
+    horizontal = 4
+    both = 5
 
 """Some predefined page sizes custom sizes can also be passed
 a tuple of the page width and height in meters"""
 pagesizes = {
-    "a0": (0.841000,1.189000),
-    "a0l": (1.189000,0.841000),
-    "b0": (1.000000,1.414000),
-    "b0l": (1.414000,1.000000),
-    "c0": (0.917000,1.297000),
-    "c0l": (1.297000,0.917000),
-    "a1": (0.594000,0.841000),
-    "a1l": (0.841000,0.594000),
-    "b1": (0.707000,1.000000),
-    "b1l": (1.000000,0.707000),
-    "c1": (0.648000,0.917000),
-    "c1l": (0.917000,0.648000),
-    "a2": (0.420000,0.594000),
-    "a2l": (0.594000,0.420000),
-    "b2": (0.500000,0.707000),
-    "b2l": (0.707000,0.500000),
-    "c2": (0.458000,0.648000),
-    "c2l": (0.648000,0.458000),
-    "a3": (0.297000,0.420000),
-    "a3l": (0.420000,0.297000),
-    "b3": (0.353000,0.500000),
-    "b3l": (0.500000,0.353000),
-    "c3": (0.324000,0.458000),
-    "c3l": (0.458000,0.324000),
-    "a4": (0.210000,0.297000),
-    "a4l": (0.297000,0.210000),
-    "b4": (0.250000,0.353000),
-    "b4l": (0.353000,0.250000),
-    "c4": (0.229000,0.324000),
-    "c4l": (0.324000,0.229000),
-    "a5": (0.148000,0.210000),
-    "a5l": (0.210000,0.148000),
-    "b5": (0.176000,0.250000),
-    "b5l": (0.250000,0.176000),
-    "c5": (0.162000,0.229000),
-    "c5l": (0.229000,0.162000),
-    "a6": (0.105000,0.148000),
-    "a6l": (0.148000,0.105000),
-    "b6": (0.125000,0.176000),
-    "b6l": (0.176000,0.125000),
-    "c6": (0.114000,0.162000),
-    "c6l": (0.162000,0.114000),
-    "a7": (0.074000,0.105000),
-    "a7l": (0.105000,0.074000),
-    "b7": (0.088000,0.125000),
-    "b7l": (0.125000,0.088000),
-    "c7": (0.081000,0.114000),
-    "c7l": (0.114000,0.081000),
-    "a8": (0.052000,0.074000),
-    "a8l": (0.074000,0.052000),
-    "b8": (0.062000,0.088000),
-    "b8l": (0.088000,0.062000),
-    "c8": (0.057000,0.081000),
-    "c8l": (0.081000,0.057000),
-    "a9": (0.037000,0.052000),
-    "a9l": (0.052000,0.037000),
-    "b9": (0.044000,0.062000),
-    "b9l": (0.062000,0.044000),
-    "c9": (0.040000,0.057000),
-    "c9l": (0.057000,0.040000),
-    "a10": (0.026000,0.037000),
-    "a10l": (0.037000,0.026000),
-    "b10": (0.031000,0.044000),
-    "b10l": (0.044000,0.031000),
-    "c10": (0.028000,0.040000),
-    "c10l": (0.040000,0.028000),
-    "letter": (0.216,0.279),
-    "letterl": (0.279,0.216),
-    "legal": (0.216,0.356),
-    "legall": (0.356,0.216),
+    "a0": (0.841000, 1.189000),
+    "a0l": (1.189000, 0.841000),
+    "b0": (1.000000, 1.414000),
+    "b0l": (1.414000, 1.000000),
+    "c0": (0.917000, 1.297000),
+    "c0l": (1.297000, 0.917000),
+    "a1": (0.594000, 0.841000),
+    "a1l": (0.841000, 0.594000),
+    "b1": (0.707000, 1.000000),
+    "b1l": (1.000000, 0.707000),
+    "c1": (0.648000, 0.917000),
+    "c1l": (0.917000, 0.648000),
+    "a2": (0.420000, 0.594000),
+    "a2l": (0.594000, 0.420000),
+    "b2": (0.500000, 0.707000),
+    "b2l": (0.707000, 0.500000),
+    "c2": (0.458000, 0.648000),
+    "c2l": (0.648000, 0.458000),
+    "a3": (0.297000, 0.420000),
+    "a3l": (0.420000, 0.297000),
+    "b3": (0.353000, 0.500000),
+    "b3l": (0.500000, 0.353000),
+    "c3": (0.324000, 0.458000),
+    "c3l": (0.458000, 0.324000),
+    "a4": (0.210000, 0.297000),
+    "a4l": (0.297000, 0.210000),
+    "b4": (0.250000, 0.353000),
+    "b4l": (0.353000, 0.250000),
+    "c4": (0.229000, 0.324000),
+    "c4l": (0.324000, 0.229000),
+    "a5": (0.148000, 0.210000),
+    "a5l": (0.210000, 0.148000),
+    "b5": (0.176000, 0.250000),
+    "b5l": (0.250000, 0.176000),
+    "c5": (0.162000, 0.229000),
+    "c5l": (0.229000, 0.162000),
+    "a6": (0.105000, 0.148000),
+    "a6l": (0.148000, 0.105000),
+    "b6": (0.125000, 0.176000),
+    "b6l": (0.176000, 0.125000),
+    "c6": (0.114000, 0.162000),
+    "c6l": (0.162000, 0.114000),
+    "a7": (0.074000, 0.105000),
+    "a7l": (0.105000, 0.074000),
+    "b7": (0.088000, 0.125000),
+    "b7l": (0.125000, 0.088000),
+    "c7": (0.081000, 0.114000),
+    "c7l": (0.114000, 0.081000),
+    "a8": (0.052000, 0.074000),
+    "a8l": (0.074000, 0.052000),
+    "b8": (0.062000, 0.088000),
+    "b8l": (0.088000, 0.062000),
+    "c8": (0.057000, 0.081000),
+    "c8l": (0.081000, 0.057000),
+    "a9": (0.037000, 0.052000),
+    "a9l": (0.052000, 0.037000),
+    "b9": (0.044000, 0.062000),
+    "b9l": (0.062000, 0.044000),
+    "c9": (0.040000, 0.057000),
+    "c9l": (0.057000, 0.040000),
+    "a10": (0.026000, 0.037000),
+    "a10l": (0.037000, 0.026000),
+    "b10": (0.031000, 0.044000),
+    "b10l": (0.044000, 0.031000),
+    "c10": (0.028000, 0.040000),
+    "c10l": (0.040000, 0.028000),
+    "letter": (0.216, 0.279),
+    "letterl": (0.279, 0.216),
+    "legal": (0.216, 0.356),
+    "legall": (0.356, 0.216),
 }
 """size of a pt in meters"""
-pt_size=0.0254/72.0
+pt_size = 0.0254 / 72.0
+
 
 def m2pt(x):
     """convert distance from meters to points"""
-    return x/pt_size
+    return x / pt_size
+
 
 def pt2m(x):
     """convert distance from points to meters"""
-    return x*pt_size
+    return x * pt_size
+
 
 def m2in(x):
     """convert distance from meters to inches"""
-    return x/0.0254
+    return x / 0.0254
+
 
-def m2px(x,resolution):
+def m2px(x, resolution):
     """convert distance from meters to pixels at the given resolution in DPI/PPI"""
-    return m2in(x)*resolution
+    return m2in(x) * resolution
+
 
 class resolutions:
     """some predefined resolutions in DPI"""
-    dpi72=72
-    dpi150=150
-    dpi300=300
-    dpi600=600
+    dpi72 = 72
+    dpi150 = 150
+    dpi300 = 300
+    dpi600 = 600
+
 
 def any_scale(scale):
     """Scale helper function that allows any scale"""
     return scale
 
-def sequence_scale(scale,scale_sequence):
+
+def sequence_scale(scale, scale_sequence):
     """Default scale helper, this rounds scale to a 'sensible' value"""
     factor = math.floor(math.log10(scale))
-    norm = scale/(10**factor)
+    norm = scale / (10**factor)
 
     for s in scale_sequence:
         if norm <= s:
-            return s*10**factor
-    return scale_sequence[0]*10**(factor+1)
+            return s * 10**factor
+    return scale_sequence[0] * 10**(factor + 1)
+
 
 def default_scale(scale):
     """Default scale helper, this rounds scale to a 'sensible' value"""
-    return sequence_scale(scale, (1,1.25,1.5,1.75,2,2.5,3,4,5,6,7.5,8,9,10))
+    return sequence_scale(scale, (1, 1.25, 1.5, 1.75, 2,
+                                  2.5, 3, 4, 5, 6, 7.5, 8, 9, 10))
+
 
 def deg_min_sec_scale(scale):
-    for x in (1.0/3600,
-              2.0/3600,
-              5.0/3600,
-              10.0/3600,
-              30.0/3600,
-              1.0/60,
-              2.0/60,
-              5.0/60,
-              10.0/60,
-              30.0/60,
+    for x in (1.0 / 3600,
+              2.0 / 3600,
+              5.0 / 3600,
+              10.0 / 3600,
+              30.0 / 3600,
+              1.0 / 60,
+              2.0 / 60,
+              5.0 / 60,
+              10.0 / 60,
+              30.0 / 60,
               1,
               2,
               5,
@@ -200,21 +213,24 @@ def deg_min_sec_scale(scale):
     else:
         return x
 
+
 def format_deg_min_sec(value):
     deg = math.floor(value)
-    min = math.floor((value-deg)/(1.0/60))
-    sec = int((value - deg*1.0/60)/1.0/3600)
-    return "%d°%d'%d\"" % (deg,min,sec)
+    min = math.floor((value - deg) / (1.0 / 60))
+    sec = int((value - deg * 1.0 / 60) / 1.0 / 3600)
+    return "%d°%d'%d\"" % (deg, min, sec)
+
 
-def round_grid_generator(first,last,step):
-        val = (math.floor(first / step) + 1) * step
+def round_grid_generator(first, last, step):
+    val = (math.floor(first / step) + 1) * step
+    yield val
+    while val < last:
+        val += step
         yield val
-        while val < last:
-            val += step
-            yield val
 
 
-def convert_pdf_pages_to_layers(filename,output_name=None,layer_names=(),reverse_all_but_last=True):
+def convert_pdf_pages_to_layers(
+        filename, output_name=None, layer_names=(), reverse_all_but_last=True):
     """
     opens the given multipage PDF and converts each page to be a layer in a single page PDF
     layer_names should be a sequence of the user visible names of the layers, if not given
@@ -225,7 +241,6 @@ def convert_pdf_pages_to_layers(filename,output_name=None,layer_names=(),reverse
 
     requires pyPdf >= 1.13 to be available"""
 
-
     if not HAS_PYPDF:
         raise Exception("pyPdf Not available")
 
@@ -233,14 +248,16 @@ def convert_pdf_pages_to_layers(filename,output_name=None,layer_names=(),reverse
     if output_name:
         outfile = file(output_name, 'wb')
     else:
-        (outfd,outfilename) = tempfile.mkstemp(dir=os.path.dirname(filename))
-        outfile = os.fdopen(outfd,'wb')
+        (outfd, outfilename) = tempfile.mkstemp(dir=os.path.dirname(filename))
+        outfile = os.fdopen(outfd, 'wb')
 
     i = pyPdf.PdfFileReader(infile)
     o = pyPdf.PdfFileWriter()
 
     template_page_size = i.pages[0].mediaBox
-    op = o.addBlankPage(width=template_page_size.getWidth(),height=template_page_size.getHeight())
+    op = o.addBlankPage(
+        width=template_page_size.getWidth(),
+        height=template_page_size.getHeight())
 
     contentkey = pyPdf.generic.NameObject('/Contents')
     resourcekey = pyPdf.generic.NameObject('/Resources')
@@ -257,20 +274,24 @@ def convert_pdf_pages_to_layers(filename,output_name=None,layer_names=(),reverse
         ocgstart._data = "/OC %s BDC\n" % ocgname
         ocgend = pyPdf.generic.DecodedStreamObject()
         ocgend._data = "EMC\n"
-        if isinstance(p['/Contents'],pyPdf.generic.ArrayObject):
-            p[pyPdf.generic.NameObject('/Contents')].insert(0,ocgstart)
+        if isinstance(p['/Contents'], pyPdf.generic.ArrayObject):
+            p[pyPdf.generic.NameObject('/Contents')].insert(0, ocgstart)
             p[pyPdf.generic.NameObject('/Contents')].append(ocgend)
         else:
-            p[pyPdf.generic.NameObject('/Contents')] = pyPdf.generic.ArrayObject((ocgstart,p['/Contents'],ocgend))
+            p[pyPdf.generic.NameObject(
+                '/Contents')] = pyPdf.generic.ArrayObject((ocgstart, p['/Contents'], ocgend))
 
         op.mergePage(p)
 
         ocg = pyPdf.generic.DictionaryObject()
-        ocg[pyPdf.generic.NameObject('/Type')] = pyPdf.generic.NameObject('/OCG')
+        ocg[pyPdf.generic.NameObject(
+            '/Type')] = pyPdf.generic.NameObject('/OCG')
         if len(layer_names) > i:
-            ocg[pyPdf.generic.NameObject('/Name')] = pyPdf.generic.TextStringObject(layer_names[i])
+            ocg[pyPdf.generic.NameObject(
+                '/Name')] = pyPdf.generic.TextStringObject(layer_names[i])
         else:
-            ocg[pyPdf.generic.NameObject('/Name')] = pyPdf.generic.TextStringObject('Layer %d' % (i+1))
+            ocg[pyPdf.generic.NameObject(
+                '/Name')] = pyPdf.generic.TextStringObject('Layer %d' % (i + 1))
         indirect_ocg = o._addObject(ocg)
         properties[ocgname] = indirect_ocg
         ocgs.append(indirect_ocg)
@@ -280,19 +301,24 @@ def convert_pdf_pages_to_layers(filename,output_name=None,layer_names=(),reverse
     ocproperties = pyPdf.generic.DictionaryObject()
     ocproperties[pyPdf.generic.NameObject('/OCGs')] = ocgs
     defaultview = pyPdf.generic.DictionaryObject()
-    defaultview[pyPdf.generic.NameObject('/Name')] = pyPdf.generic.TextStringObject('Default')
-    defaultview[pyPdf.generic.NameObject('/BaseState ')] = pyPdf.generic.NameObject('/ON ')
+    defaultview[pyPdf.generic.NameObject(
+        '/Name')] = pyPdf.generic.TextStringObject('Default')
+    defaultview[pyPdf.generic.NameObject(
+        '/BaseState ')] = pyPdf.generic.NameObject('/ON ')
     defaultview[pyPdf.generic.NameObject('/ON')] = ocgs
     if reverse_all_but_last:
-        defaultview[pyPdf.generic.NameObject('/Order')] = pyPdf.generic.ArrayObject(reversed(ocgs[:-1]))
+        defaultview[pyPdf.generic.NameObject(
+            '/Order')] = pyPdf.generic.ArrayObject(reversed(ocgs[:-1]))
         defaultview[pyPdf.generic.NameObject('/Order')].append(ocgs[-1])
     else:
-        defaultview[pyPdf.generic.NameObject('/Order')] = pyPdf.generic.ArrayObject(reversed(ocgs))
+        defaultview[pyPdf.generic.NameObject(
+            '/Order')] = pyPdf.generic.ArrayObject(reversed(ocgs))
     defaultview[pyPdf.generic.NameObject('/OFF')] = pyPdf.generic.ArrayObject()
 
     ocproperties[pyPdf.generic.NameObject('/D')] = o._addObject(defaultview)
 
-    o._root.getObject()[pyPdf.generic.NameObject('/OCProperties')] = o._addObject(ocproperties)
+    o._root.getObject()[pyPdf.generic.NameObject(
+        '/OCProperties')] = o._addObject(ocproperties)
 
     o.write(outfile)
 
@@ -302,10 +328,12 @@ def convert_pdf_pages_to_layers(filename,output_name=None,layer_names=(),reverse
     if not output_name:
         os.rename(outfilename, filename)
 
+
 class PDFPrinter:
     """Main class for creating PDF print outs, basically contruct an instance
     with appropriate options and then call render_map with your mapnik map
     """
+
     def __init__(self,
                  pagesize=pagesizes["a4"],
                  margin=0.005,
@@ -356,16 +384,18 @@ class PDFPrinter:
         self.map_box = None
         self.scale = None
 
-        # don't both to round the scale if they are not preserving the aspect ratio
+        # don't both to round the scale if they are not preserving the aspect
+        # ratio
         if not preserve_aspect:
             self._scale = any_scale
 
         if percent_box:
-            self._box = Box2d(percent_box[0]*pagesize[0],percent_box[1]*pagesize[1],
-                         percent_box[2]*pagesize[0],percent_box[3]*pagesize[1])
+            self._box = Box2d(percent_box[0] * pagesize[0], percent_box[1] * pagesize[1],
+                              percent_box[2] * pagesize[0], percent_box[3] * pagesize[1])
 
         if not HAS_PYCAIRO_MODULE:
-            raise Exception("PDF rendering only available when pycairo is available")
+            raise Exception(
+                "PDF rendering only available when pycairo is available")
 
         self.font_name = "DejaVu Sans"
 
@@ -375,9 +405,13 @@ class PDFPrinter:
             self._s = None
 
         if self._use_ocg_layers:
-            convert_pdf_pages_to_layers(self._filename,layer_names=self._layer_names + ["Legend and Information"],reverse_all_but_last=True)
+            convert_pdf_pages_to_layers(
+                self._filename,
+                layer_names=self._layer_names +
+                ["Legend and Information"],
+                reverse_all_but_last=True)
 
-    def add_geospatial_pdf_header(self,m,filename,epsg=None,wkt=None):
+    def add_geospatial_pdf_header(self, m, filename, epsg=None, wkt=None):
         """ Postprocessing step to add geospatial PDF information to PDF file as per
         PDF standard 1.7 extension level 3 (also in draft PDF v2 standard at time of writing)
 
@@ -385,64 +419,74 @@ class PDFPrinter:
 
         Should be called *after* the page has had .finish() called"""
         if HAS_PYPDF and (epsg or wkt):
-            infile=file(filename,'rb')
-            (outfd,outfilename) = tempfile.mkstemp(dir=os.path.dirname(filename))
-            outfile = os.fdopen(outfd,'wb')
+            infile = file(filename, 'rb')
+            (outfd, outfilename) = tempfile.mkstemp(
+                dir=os.path.dirname(filename))
+            outfile = os.fdopen(outfd, 'wb')
 
-            i=pyPdf.PdfFileReader(infile)
-            o=pyPdf.PdfFileWriter()
+            i = pyPdf.PdfFileReader(infile)
+            o = pyPdf.PdfFileWriter()
 
             # preserve OCProperties at document root if we have one
-            if i.trailer['/Root'].has_key(pyPdf.generic.NameObject('/OCProperties')):
-                o._root.getObject()[pyPdf.generic.NameObject('/OCProperties')] = i.trailer['/Root'].getObject()[pyPdf.generic.NameObject('/OCProperties')]
+            if pyPdf.generic.NameObject('/OCProperties') in i.trailer['/Root']:
+                o._root.getObject()[pyPdf.generic.NameObject('/OCProperties')] = i.trailer[
+                    '/Root'].getObject()[pyPdf.generic.NameObject('/OCProperties')]
 
             for p in i.pages:
                 gcs = pyPdf.generic.DictionaryObject()
-                gcs[pyPdf.generic.NameObject('/Type')]=pyPdf.generic.NameObject('/PROJCS')
+                gcs[pyPdf.generic.NameObject(
+                    '/Type')] = pyPdf.generic.NameObject('/PROJCS')
                 if epsg:
-                    gcs[pyPdf.generic.NameObject('/EPSG')]=pyPdf.generic.NumberObject(int(epsg))
+                    gcs[pyPdf.generic.NameObject(
+                        '/EPSG')] = pyPdf.generic.NumberObject(int(epsg))
                 if wkt:
-                    gcs[pyPdf.generic.NameObject('/WKT')]=pyPdf.generic.TextStringObject(wkt)
+                    gcs[pyPdf.generic.NameObject(
+                        '/WKT')] = pyPdf.generic.TextStringObject(wkt)
 
                 measure = pyPdf.generic.DictionaryObject()
-                measure[pyPdf.generic.NameObject('/Type')]=pyPdf.generic.NameObject('/Measure')
-                measure[pyPdf.generic.NameObject('/Subtype')]=pyPdf.generic.NameObject('/GEO')
-                measure[pyPdf.generic.NameObject('/GCS')]=gcs
-                bounds=pyPdf.generic.ArrayObject()
-                for x in (0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0):
+                measure[pyPdf.generic.NameObject(
+                    '/Type')] = pyPdf.generic.NameObject('/Measure')
+                measure[pyPdf.generic.NameObject(
+                    '/Subtype')] = pyPdf.generic.NameObject('/GEO')
+                measure[pyPdf.generic.NameObject('/GCS')] = gcs
+                bounds = pyPdf.generic.ArrayObject()
+                for x in (0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0):
                     bounds.append(pyPdf.generic.FloatObject(str(x)))
-                measure[pyPdf.generic.NameObject('/Bounds')]=bounds
-                measure[pyPdf.generic.NameObject('/LPTS')]=bounds
-                gpts=pyPdf.generic.ArrayObject()
-
-                proj=Projection(m.srs)
-                env=m.envelope()
-                for x in ((env.minx, env.miny), (env.minx, env.maxy), (env.maxx, env.maxy), (env.maxx, env.miny)):
-                    latlon_corner=proj.inverse(Coord(*x))
+                measure[pyPdf.generic.NameObject('/Bounds')] = bounds
+                measure[pyPdf.generic.NameObject('/LPTS')] = bounds
+                gpts = pyPdf.generic.ArrayObject()
+
+                proj = Projection(m.srs)
+                env = m.envelope()
+                for x in ((env.minx, env.miny), (env.minx, env.maxy),
+                          (env.maxx, env.maxy), (env.maxx, env.miny)):
+                    latlon_corner = proj.inverse(Coord(*x))
                     # these are in lat,lon order according to the standard
-                    gpts.append(pyPdf.generic.FloatObject(str(latlon_corner.y)))
-                    gpts.append(pyPdf.generic.FloatObject(str(latlon_corner.x)))
-                measure[pyPdf.generic.NameObject('/GPTS')]=gpts
+                    gpts.append(pyPdf.generic.FloatObject(
+                        str(latlon_corner.y)))
+                    gpts.append(pyPdf.generic.FloatObject(
+                        str(latlon_corner.x)))
+                measure[pyPdf.generic.NameObject('/GPTS')] = gpts
 
-                vp=pyPdf.generic.DictionaryObject()
-                vp[pyPdf.generic.NameObject('/Type')]=pyPdf.generic.NameObject('/Viewport')
-                bbox=pyPdf.generic.ArrayObject()
+                vp = pyPdf.generic.DictionaryObject()
+                vp[pyPdf.generic.NameObject(
+                    '/Type')] = pyPdf.generic.NameObject('/Viewport')
+                bbox = pyPdf.generic.ArrayObject()
 
                 for x in self.map_box:
                     bbox.append(pyPdf.generic.FloatObject(str(x)))
-                vp[pyPdf.generic.NameObject('/BBox')]=bbox
-                vp[pyPdf.generic.NameObject('/Measure')]=measure
+                vp[pyPdf.generic.NameObject('/BBox')] = bbox
+                vp[pyPdf.generic.NameObject('/Measure')] = measure
 
                 vpa = pyPdf.generic.ArrayObject()
                 vpa.append(vp)
-                p[pyPdf.generic.NameObject('/VP')]=vpa
+                p[pyPdf.generic.NameObject('/VP')] = vpa
                 o.addPage(p)
 
             o.write(outfile)
-            infile=None
+            infile = None
             outfile.close()
-            os.rename(outfilename,filename)
-
+            os.rename(outfilename, filename)
 
     def get_context(self):
         """allow access so that extra 'bits' can be rendered to the page directly"""
@@ -457,16 +501,17 @@ class PDFPrinter:
     def get_margin(self):
         return self._margin
 
-    def write_text(self,ctx,text,box_width=None,size=10, fill_color=(0.0, 0.0, 0.0), alignment=None):
+    def write_text(self, ctx, text, box_width=None, size=10,
+                   fill_color=(0.0, 0.0, 0.0), alignment=None):
         if HAS_PANGOCAIRO_MODULE:
-            (attr,t,accel) = pango.parse_markup(text)
+            (attr, t, accel) = pango.parse_markup(text)
             pctx = pangocairo.CairoContext(ctx)
             l = pctx.create_layout()
             l.set_attributes(attr)
-            fd = pango.FontDescription("%s %d" % (self.font_name,size))
+            fd = pango.FontDescription("%s %d" % (self.font_name, size))
             l.set_font_description(fd)
             if box_width:
-                l.set_width(int(box_width*pango.SCALE))
+                l.set_width(int(box_width * pango.SCALE))
             if alignment:
                 l.set_alignment(alignment)
             pctx.update_layout(l)
@@ -476,12 +521,15 @@ class PDFPrinter:
             return l.get_pixel_extents()[0]
 
         else:
-            ctx.rel_move_to(0,size)
-            ctx.select_font_face(self.font_name, cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL)
+            ctx.rel_move_to(0, size)
+            ctx.select_font_face(
+                self.font_name,
+                cairo.FONT_SLANT_NORMAL,
+                cairo.FONT_WEIGHT_NORMAL)
             ctx.set_font_size(size)
             ctx.show_text(text)
-            ctx.rel_move_to(0,size)
-            return (0,0,len(text)*size,size)
+            ctx.rel_move_to(0, size)
+            return (0, 0, len(text) * size, size)
 
     def _get_context(self):
         if HAS_PANGOCAIRO_MODULE:
@@ -495,9 +543,15 @@ class PDFPrinter:
         in page coordinates (i.e. meters)
         """
         # take off our page margins
-        render_area = Box2d(self._margin,self._margin,self._pagesize[0]-self._margin,self._pagesize[1]-self._margin)
-
-        #then if user specified a box to render get intersection with that
+        render_area = Box2d(
+            self._margin,
+            self._margin,
+            self._pagesize[0] -
+            self._margin,
+            self._pagesize[1] -
+            self._margin)
+
+        # then if user specified a box to render get intersection with that
         if self._box:
             return render_area.intersect(self._box)
 
@@ -506,118 +560,130 @@ class PDFPrinter:
     def _get_render_area_size(self):
         """Get the width and height (in meters) of the area we can render the map to, returned as a tuple"""
         render_area = self._get_render_area()
-        return (render_area.width(),render_area.height())
+        return (render_area.width(), render_area.height())
 
-    def _is_h_contrained(self,m):
+    def _is_h_contrained(self, m):
         """Test if the map size is constrained on the horizontal or vertical axes"""
         available_area = self._get_render_area_size()
-        map_aspect = m.envelope().width()/m.envelope().height()
-        page_aspect = available_area[0]/available_area[1]
+        map_aspect = m.envelope().width() / m.envelope().height()
+        page_aspect = available_area[0] / available_area[1]
 
         return map_aspect > page_aspect
 
-    def _get_meta_info_corner(self,render_size,m):
+    def _get_meta_info_corner(self, render_size, m):
         """Get the corner (in page coordinates) of a possibly
         sensible place to render metadata such as a legend or scale"""
-        (x,y) = self._get_render_corner(render_size,m)
+        (x, y) = self._get_render_corner(render_size, m)
         if self._is_h_contrained(m):
-            y += render_size[1]+0.005
+            y += render_size[1] + 0.005
             x = self._margin
         else:
-            x += render_size[0]+0.005
+            x += render_size[0] + 0.005
             y = self._margin
 
-        return (x,y)
+        return (x, y)
 
-    def _get_render_corner(self,render_size,m):
+    def _get_render_corner(self, render_size, m):
         """Get the corner of the box we should render our map into"""
         available_area = self._get_render_area()
 
-        x=available_area[0]
-        y=available_area[1]
+        x = available_area[0]
+        y = available_area[1]
 
         h_is_contrained = self._is_h_contrained(m)
 
         if (self._centering == centering.both or
-            self._centering == centering.horizontal or
-            (self._centering == centering.constrained and h_is_contrained) or
-            (self._centering == centering.unconstrained and not h_is_contrained)):
-            x+=(available_area.width()-render_size[0])/2
+                self._centering == centering.horizontal or
+                (self._centering == centering.constrained and h_is_contrained) or
+                (self._centering == centering.unconstrained and not h_is_contrained)):
+            x += (available_area.width() - render_size[0]) / 2
 
         if (self._centering == centering.both or
-            self._centering == centering.vertical or
-            (self._centering == centering.constrained and not h_is_contrained) or
-            (self._centering == centering.unconstrained and h_is_contrained)):
-            y+=(available_area.height()-render_size[1])/2
-        return (x,y)
+                self._centering == centering.vertical or
+                (self._centering == centering.constrained and not h_is_contrained) or
+                (self._centering == centering.unconstrained and h_is_contrained)):
+            y += (available_area.height() - render_size[1]) / 2
+        return (x, y)
 
     def _get_map_pixel_size(self, width_page_m, height_page_m):
         """for a given map size in paper coordinates return a tuple of the map 'pixel' size we
         should create at the defined resolution"""
-        return (int(m2px(width_page_m,self._resolution)), int(m2px(height_page_m,self._resolution)))
+        return (int(m2px(width_page_m, self._resolution)),
+                int(m2px(height_page_m, self._resolution)))
 
-    def render_map(self,m, filename):
+    def render_map(self, m, filename):
         """Render the given map to filename"""
 
         # store this for later so we can post process the PDF
         self._filename = filename
 
-        # work out the best scale to render out map at given the available space
-        (eff_width,eff_height) = self._get_render_area_size()
-        map_aspect = m.envelope().width()/m.envelope().height()
-        page_aspect = eff_width/eff_height
+        # work out the best scale to render out map at given the available
+        # space
+        (eff_width, eff_height) = self._get_render_area_size()
+        map_aspect = m.envelope().width() / m.envelope().height()
+        page_aspect = eff_width / eff_height
 
-        scalex=m.envelope().width()/eff_width
-        scaley=m.envelope().height()/eff_height
+        scalex = m.envelope().width() / eff_width
+        scaley = m.envelope().height() / eff_height
 
-        scale=max(scalex,scaley)
+        scale = max(scalex, scaley)
 
-        rounded_mapscale=self._scale(scale)
-        scalefactor = scale/rounded_mapscale
-        mapw=eff_width*scalefactor
-        maph=eff_height*scalefactor
+        rounded_mapscale = self._scale(scale)
+        scalefactor = scale / rounded_mapscale
+        mapw = eff_width * scalefactor
+        maph = eff_height * scalefactor
         if self._preserve_aspect:
             if map_aspect > page_aspect:
-                maph=mapw*(1/map_aspect)
+                maph = mapw * (1 / map_aspect)
             else:
-                mapw=maph*map_aspect
+                mapw = maph * map_aspect
 
-        # set the map size so that raster elements render at the correct resolution
-        m.resize(*self._get_map_pixel_size(mapw,maph))
+        # set the map size so that raster elements render at the correct
+        # resolution
+        m.resize(*self._get_map_pixel_size(mapw, maph))
         # calculate the translation for the map starting point
-        (tx,ty) = self._get_render_corner((mapw,maph),m)
+        (tx, ty) = self._get_render_corner((mapw, maph), m)
 
         # create our cairo surface and context and then render the map into it
-        self._s = cairo.PDFSurface(filename, m2pt(self._pagesize[0]),m2pt(self._pagesize[1]))
-        ctx=cairo.Context(self._s)
+        self._s = cairo.PDFSurface(
+            filename, m2pt(
+                self._pagesize[0]), m2pt(
+                self._pagesize[1]))
+        ctx = cairo.Context(self._s)
 
         for l in m.layers:
             # extract the layer names for naming layers if we use OCG
             self._layer_names.append(l.name)
 
-            layer_map = Map(m.width,m.height,m.srs)
+            layer_map = Map(m.width, m.height, m.srs)
             layer_map.layers.append(l)
             for s in l.styles:
-                layer_map.append_style(s,m.find_style(s))
+                layer_map.append_style(s, m.find_style(s))
             layer_map.zoom_to_box(m.envelope())
 
             def render_map():
                 ctx.save()
-                ctx.translate(m2pt(tx),m2pt(ty))
-                #cairo defaults to 72dpi
-                ctx.scale(72.0/self._resolution,72.0/self._resolution)
+                ctx.translate(m2pt(tx), m2pt(ty))
+                # cairo defaults to 72dpi
+                ctx.scale(72.0 / self._resolution, 72.0 / self._resolution)
                 render(layer_map, ctx)
                 ctx.restore()
 
             # antimeridian
             render_map()
-            if self._is_latlon and (m.envelope().minx < -180 or m.envelope().maxx > 180):
+            if self._is_latlon and (
+                    m.envelope().minx < -180 or m.envelope().maxx > 180):
                 old_env = m.envelope()
                 if m.envelope().minx < -180:
                     delta = 360
                 else:
                     delta = -360
-                m.zoom_to_box(Box2d(old_env.minx+delta,old_env.miny,old_env.maxx+delta,old_env.maxy))
+                m.zoom_to_box(
+                    Box2d(
+                        old_env.minx + delta,
+                        old_env.miny,
+                        old_env.maxx + delta,
+                        old_env.maxy))
                 render_map()
                 # restore the original env
                 m.zoom_to_box(old_env)
@@ -626,77 +692,118 @@ class PDFPrinter:
                 self._s.show_page()
 
         self.scale = rounded_mapscale
-        self.map_box = Box2d(tx,ty,tx+mapw,ty+maph)
+        self.map_box = Box2d(tx, ty, tx + mapw, ty + maph)
 
-    def render_on_map_lat_lon_grid(self,m,dec_degrees=True):
+    def render_on_map_lat_lon_grid(self, m, dec_degrees=True):
         # don't render lat_lon grid if we are already in latlon
         if self._is_latlon:
             return
-        p2=Projection(m.srs)
+        p2 = Projection(m.srs)
 
         latlon_bounds = p2.inverse(m.envelope())
         if p2.inverse(m.envelope().center()).x > latlon_bounds.maxx:
-            latlon_bounds = Box2d(latlon_bounds.maxx,latlon_bounds.miny,latlon_bounds.minx+360,latlon_bounds.maxy)
+            latlon_bounds = Box2d(
+                latlon_bounds.maxx,
+                latlon_bounds.miny,
+                latlon_bounds.minx + 360,
+                latlon_bounds.maxy)
 
         if p2.inverse(m.envelope().center()).y > latlon_bounds.maxy:
-            latlon_bounds = Box2d(latlon_bounds.miny,latlon_bounds.maxy,latlon_bounds.maxx,latlon_bounds.miny+360)
+            latlon_bounds = Box2d(
+                latlon_bounds.miny,
+                latlon_bounds.maxy,
+                latlon_bounds.maxx,
+                latlon_bounds.miny + 360)
 
         latlon_mapwidth = latlon_bounds.width()
         # render an extra 20% so we generally won't miss the ends of lines
-        latlon_buffer = 0.2*latlon_mapwidth
+        latlon_buffer = 0.2 * latlon_mapwidth
         if dec_degrees:
-            latlon_divsize = default_scale(latlon_mapwidth/7.0)
+            latlon_divsize = default_scale(latlon_mapwidth / 7.0)
         else:
-            latlon_divsize = deg_min_sec_scale(latlon_mapwidth/7.0)
-        latlon_interpsize = latlon_mapwidth/m.width
-
-        self._render_lat_lon_axis(m,p2,latlon_bounds.minx,latlon_bounds.maxx,latlon_bounds.miny,latlon_bounds.maxy,latlon_buffer,latlon_interpsize,latlon_divsize,dec_degrees,True)
-        self._render_lat_lon_axis(m,p2,latlon_bounds.miny,latlon_bounds.maxy,latlon_bounds.minx,latlon_bounds.maxx,latlon_buffer,latlon_interpsize,latlon_divsize,dec_degrees,False)
-
-    def _render_lat_lon_axis(self,m,p2,x1,x2,y1,y2,latlon_buffer,latlon_interpsize,latlon_divsize,dec_degrees,is_x_axis):
-        ctx=cairo.Context(self._s)
-        ctx.set_source_rgb(1,0,0)
+            latlon_divsize = deg_min_sec_scale(latlon_mapwidth / 7.0)
+        latlon_interpsize = latlon_mapwidth / m.width
+
+        self._render_lat_lon_axis(
+            m,
+            p2,
+            latlon_bounds.minx,
+            latlon_bounds.maxx,
+            latlon_bounds.miny,
+            latlon_bounds.maxy,
+            latlon_buffer,
+            latlon_interpsize,
+            latlon_divsize,
+            dec_degrees,
+            True)
+        self._render_lat_lon_axis(
+            m,
+            p2,
+            latlon_bounds.miny,
+            latlon_bounds.maxy,
+            latlon_bounds.minx,
+            latlon_bounds.maxx,
+            latlon_buffer,
+            latlon_interpsize,
+            latlon_divsize,
+            dec_degrees,
+            False)
+
+    def _render_lat_lon_axis(self, m, p2, x1, x2, y1, y2, latlon_buffer,
+                             latlon_interpsize, latlon_divsize, dec_degrees, is_x_axis):
+        ctx = cairo.Context(self._s)
+        ctx.set_source_rgb(1, 0, 0)
         ctx.set_line_width(1)
         latlon_labelsize = 6
 
-        ctx.translate(m2pt(self.map_box.minx),m2pt(self.map_box.miny))
-        ctx.rectangle(0,0,m2pt(self.map_box.width()),m2pt(self.map_box.height()))
+        ctx.translate(m2pt(self.map_box.minx), m2pt(self.map_box.miny))
+        ctx.rectangle(
+            0, 0, m2pt(
+                self.map_box.width()), m2pt(
+                self.map_box.height()))
         ctx.clip()
 
-        ctx.select_font_face("DejaVu", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL)
+        ctx.select_font_face(
+            "DejaVu",
+            cairo.FONT_SLANT_NORMAL,
+            cairo.FONT_WEIGHT_NORMAL)
         ctx.set_font_size(latlon_labelsize)
 
         box_top = self.map_box.height()
         if not is_x_axis:
-            ctx.translate(m2pt(self.map_box.width()/2),m2pt(self.map_box.height()/2))
-            ctx.rotate(-math.pi/2)
-            ctx.translate(-m2pt(self.map_box.height()/2),-m2pt(self.map_box.width()/2))
+            ctx.translate(m2pt(self.map_box.width() / 2),
+                          m2pt(self.map_box.height() / 2))
+            ctx.rotate(-math.pi / 2)
+            ctx.translate(-m2pt(self.map_box.height() / 2), -
+                          m2pt(self.map_box.width() / 2))
             box_top = self.map_box.width()
 
-        for xvalue in round_grid_generator(x1 - latlon_buffer,x2 + latlon_buffer,latlon_divsize):
+        for xvalue in round_grid_generator(
+                x1 - latlon_buffer, x2 + latlon_buffer, latlon_divsize):
             yvalue = y1 - latlon_buffer
             start_cross = None
             end_cross = None
-            while yvalue < y2+latlon_buffer:
+            while yvalue < y2 + latlon_buffer:
                 if is_x_axis:
-                    start = m.view_transform().forward(p2.forward(Coord(xvalue,yvalue)))
+                    start = m.view_transform().forward(p2.forward(Coord(xvalue, yvalue)))
                 else:
-                    temp = m.view_transform().forward(p2.forward(Coord(yvalue,xvalue)))
-                    start = Coord(m2pt(self.map_box.height())-temp.y,temp.x)
+                    temp = m.view_transform().forward(p2.forward(Coord(yvalue, xvalue)))
+                    start = Coord(m2pt(self.map_box.height()) - temp.y, temp.x)
                 yvalue += latlon_interpsize
                 if is_x_axis:
-                    end = m.view_transform().forward(p2.forward(Coord(xvalue,yvalue)))
+                    end = m.view_transform().forward(p2.forward(Coord(xvalue, yvalue)))
                 else:
-                    temp = m.view_transform().forward(p2.forward(Coord(yvalue,xvalue)))
-                    end = Coord(m2pt(self.map_box.height())-temp.y,temp.x)
+                    temp = m.view_transform().forward(p2.forward(Coord(yvalue, xvalue)))
+                    end = Coord(m2pt(self.map_box.height()) - temp.y, temp.x)
 
-                ctx.move_to(start.x,start.y)
-                ctx.line_to(end.x,end.y)
+                ctx.move_to(start.x, start.y)
+                ctx.line_to(end.x, end.y)
                 ctx.stroke()
 
-                if cmp(start.y, 0) != cmp(end.y,0):
+                if cmp(start.y, 0) != cmp(end.y, 0):
                     start_cross = end.x
-                if cmp(start.y,m2pt(self.map_box.height())) != cmp(end.y, m2pt(self.map_box.height())):
+                if cmp(start.y, m2pt(self.map_box.height())) != cmp(
+                        end.y, m2pt(self.map_box.height())):
                     end_cross = end.x
 
             if dec_degrees:
@@ -704,91 +811,136 @@ class PDFPrinter:
             else:
                 line_text = format_deg_min_sec(xvalue)
             if start_cross:
-                ctx.move_to(start_cross+2,latlon_labelsize)
+                ctx.move_to(start_cross + 2, latlon_labelsize)
                 ctx.show_text(line_text)
             if end_cross:
-                ctx.move_to(end_cross+2,m2pt(box_top)-2)
+                ctx.move_to(end_cross + 2, m2pt(box_top) - 2)
                 ctx.show_text(line_text)
 
-    def render_on_map_scale(self,m):
-        (div_size,page_div_size) = self._get_sensible_scalebar_size(m)
-
-        first_value_x = (math.floor(m.envelope().minx / div_size) + 1) * div_size
-        first_value_x_percent = (first_value_x-m.envelope().minx)/m.envelope().width()
-        self._render_scale_axis(first_value_x,first_value_x_percent,self.map_box.minx,self.map_box.maxx,page_div_size,div_size,self.map_box.miny,self.map_box.maxy,True)
-
-        first_value_y = (math.floor(m.envelope().miny / div_size) + 1) * div_size
-        first_value_y_percent = (first_value_y-m.envelope().miny)/m.envelope().height()
-        self._render_scale_axis(first_value_y,first_value_y_percent,self.map_box.miny,self.map_box.maxy,page_div_size,div_size,self.map_box.minx,self.map_box.maxx,False)
+    def render_on_map_scale(self, m):
+        (div_size, page_div_size) = self._get_sensible_scalebar_size(m)
+
+        first_value_x = (
+            math.floor(
+                m.envelope().minx / div_size) + 1) * div_size
+        first_value_x_percent = (
+            first_value_x - m.envelope().minx) / m.envelope().width()
+        self._render_scale_axis(
+            first_value_x,
+            first_value_x_percent,
+            self.map_box.minx,
+            self.map_box.maxx,
+            page_div_size,
+            div_size,
+            self.map_box.miny,
+            self.map_box.maxy,
+            True)
+
+        first_value_y = (
+            math.floor(
+                m.envelope().miny / div_size) + 1) * div_size
+        first_value_y_percent = (
+            first_value_y - m.envelope().miny) / m.envelope().height()
+        self._render_scale_axis(
+            first_value_y,
+            first_value_y_percent,
+            self.map_box.miny,
+            self.map_box.maxy,
+            page_div_size,
+            div_size,
+            self.map_box.minx,
+            self.map_box.maxx,
+            False)
 
         if self._use_ocg_layers:
             self._s.show_page()
             self._layer_names.append("Coordinate Grid Overlay")
 
-    def _get_sensible_scalebar_size(self,m,width=-1):
+    def _get_sensible_scalebar_size(self, m, width=-1):
         # aim for about 8 divisions across the map
-        # also make sure we can fit the bar with in page area width if specified
-        div_size = sequence_scale(m.envelope().width()/8, [1,2,5])
-        page_div_size = self.map_box.width()*div_size/m.envelope().width()
+        # also make sure we can fit the bar with in page area width if
+        # specified
+        div_size = sequence_scale(m.envelope().width() / 8, [1, 2, 5])
+        page_div_size = self.map_box.width() * div_size / m.envelope().width()
         while width > 0 and page_div_size > width:
-            div_size /=2
+            div_size /= 2
             page_div_size /= 2
-        return (div_size,page_div_size)
+        return (div_size, page_div_size)
 
-    def _render_box(self,ctx,x,y,w,h,text=None,stroke_color=(0,0,0),fill_color=(0,0,0)):
+    def _render_box(self, ctx, x, y, w, h, text=None,
+                    stroke_color=(0, 0, 0), fill_color=(0, 0, 0)):
         ctx.set_line_width(1)
         ctx.set_source_rgb(*fill_color)
-        ctx.rectangle(x,y,w,h)
+        ctx.rectangle(x, y, w, h)
         ctx.fill()
 
         ctx.set_source_rgb(*stroke_color)
-        ctx.rectangle(x,y,w,h)
+        ctx.rectangle(x, y, w, h)
         ctx.stroke()
 
         if text:
-            ctx.move_to(x+1,y)
-            self.write_text(ctx,text,fill_color=[1-z for z in fill_color],size=h-2)
+            ctx.move_to(x + 1, y)
+            self.write_text(
+                ctx, text, fill_color=[
+                    1 - z for z in fill_color], size=h - 2)
 
-    def _render_scale_axis(self,first,first_percent,start,end,page_div_size,div_size,boundary_start,boundary_end,is_x_axis):
+    def _render_scale_axis(self, first, first_percent, start, end,
+                           page_div_size, div_size, boundary_start, boundary_end, is_x_axis):
         prev = start
         text = None
-        fill=(0,0,0)
-        border_size=8
-        value = first_percent * (end-start) + start
-        label_value = first-div_size
+        fill = (0, 0, 0)
+        border_size = 8
+        value = first_percent * (end - start) + start
+        label_value = first - div_size
         if self._is_latlon and label_value < -180:
             label_value += 360
 
-        ctx=cairo.Context(self._s)
+        ctx = cairo.Context(self._s)
 
         if not is_x_axis:
-            ctx.translate(m2pt(self.map_box.center().x),m2pt(self.map_box.center().y))
-            ctx.rotate(-math.pi/2)
-            ctx.translate(-m2pt(self.map_box.center().y),-m2pt(self.map_box.center().x))
+            ctx.translate(
+                m2pt(
+                    self.map_box.center().x), m2pt(
+                    self.map_box.center().y))
+            ctx.rotate(-math.pi / 2)
+            ctx.translate(-m2pt(self.map_box.center().y), -
+                          m2pt(self.map_box.center().x))
 
         while value < end:
-            ctx.move_to(m2pt(value),m2pt(boundary_start))
-            ctx.line_to(m2pt(value),m2pt(boundary_end))
-            ctx.set_source_rgb(0.5,0.5,0.5)
+            ctx.move_to(m2pt(value), m2pt(boundary_start))
+            ctx.line_to(m2pt(value), m2pt(boundary_end))
+            ctx.set_source_rgb(0.5, 0.5, 0.5)
             ctx.set_line_width(1)
             ctx.stroke()
 
-            for bar in (m2pt(boundary_start)-border_size,m2pt(boundary_end)):
-                self._render_box(ctx,m2pt(prev),bar,m2pt(value-prev),border_size,text,fill_color=fill)
+            for bar in (m2pt(boundary_start) - border_size,
+                        m2pt(boundary_end)):
+                self._render_box(
+                    ctx,
+                    m2pt(prev),
+                    bar,
+                    m2pt(
+                        value -
+                        prev),
+                    border_size,
+                    text,
+                    fill_color=fill)
 
             prev = value
-            value+=page_div_size
-            fill = [1-z for z in fill]
+            value += page_div_size
+            fill = [1 - z for z in fill]
             label_value += div_size
             if self._is_latlon and label_value > 180:
                 label_value -= 360
             text = "%d" % label_value
         else:
-            for bar in (m2pt(boundary_start)-border_size,m2pt(boundary_end)):
-                self._render_box(ctx,m2pt(prev),bar,m2pt(end-prev),border_size,fill_color=fill)
-
+            for bar in (m2pt(boundary_start) - border_size,
+                        m2pt(boundary_end)):
+                self._render_box(
+                    ctx, m2pt(prev), bar, m2pt(
+                        end - prev), border_size, fill_color=fill)
 
-    def render_scale(self,m,ctx=None,width=0.05):
+    def render_scale(self, m, ctx=None, width=0.05):
         """ m: map to render scale for
         ctx: A cairo context to render the scale to. If this is None (the default) then
             automatically create a context and choose the best location for the scale bar.
@@ -797,20 +949,21 @@ class PDFPrinter:
         will return the size of the rendered scale block in pts
         """
 
-        (w,h) = (0,0)
+        (w, h) = (0, 0)
 
         # don't render scale if we are lat lon
         # dont report scale if we have warped the aspect ratio
         if self._preserve_aspect and not self._is_latlon:
-            bar_size=8.0
-            box_count=3
+            bar_size = 8.0
+            box_count = 3
             if ctx is None:
-                ctx=cairo.Context(self._s)
-                (tx,ty) = self._get_meta_info_corner((self.map_box.width(),self.map_box.height()),m)
-                ctx.translate(tx,ty)
-
-            (div_size,page_div_size) = self._get_sensible_scalebar_size(m, width/box_count)
+                ctx = cairo.Context(self._s)
+                (tx, ty) = self._get_meta_info_corner(
+                    (self.map_box.width(), self.map_box.height()), m)
+                ctx.translate(tx, ty)
 
+            (div_size, page_div_size) = self._get_sensible_scalebar_size(
+                m, width / box_count)
 
             div_unit = "m"
             if div_size > 1000:
@@ -820,36 +973,52 @@ class PDFPrinter:
             text = "0%s" % div_unit
             ctx.save()
             if width > 0:
-                ctx.translate(m2pt(width-box_count*page_div_size)/2,0)
+                ctx.translate(m2pt(width - box_count * page_div_size) / 2, 0)
             for ii in range(box_count):
-                fill=(ii%2,)*3
-                self._render_box(ctx, m2pt(ii*page_div_size), h, m2pt(page_div_size), bar_size, text, fill_color=fill)
-                fill = [1-z for z in fill]
-                text = "%g%s" % ((ii+1)*div_size,div_unit)
-            #else:
+                fill = (ii % 2,) * 3
+                self._render_box(
+                    ctx,
+                    m2pt(
+                        ii *
+                        page_div_size),
+                    h,
+                    m2pt(page_div_size),
+                    bar_size,
+                    text,
+                    fill_color=fill)
+                fill = [1 - z for z in fill]
+                text = "%g%s" % ((ii + 1) * div_size, div_unit)
+            # else:
             #    self._render_box(ctx, m2pt(box_count*page_div_size), h, m2pt(page_div_size), bar_size, text, fill_color=(1,1,1), stroke_color=(1,1,1))
-            w = (box_count)*page_div_size
+            w = (box_count) * page_div_size
             h += bar_size
             ctx.restore()
 
             if width > 0:
-                box_width=m2pt(width)
+                box_width = m2pt(width)
             else:
                 box_width = None
 
-            font_size=6
-            ctx.move_to(0,h)
+            font_size = 6
+            ctx.move_to(0, h)
             if HAS_PANGOCAIRO_MODULE:
                 alignment = pango.ALIGN_CENTER
             else:
                 alignment = None
 
-            text_ext=self.write_text(ctx,"Scale 1:%d" % self.scale,box_width=box_width,size=font_size, alignment=alignment)
-            h+=text_ext[3]+2
+            text_ext = self.write_text(
+                ctx,
+                "Scale 1:%d" %
+                self.scale,
+                box_width=box_width,
+                size=font_size,
+                alignment=alignment)
+            h += text_ext[3] + 2
 
-        return (w,h)
+        return (w, h)
 
-    def render_legend(self,m, page_break=False, ctx=None, collumns=1,width=None, height=None, item_per_rule=False, attribution={}, legend_item_box_size=(0.015,0.0075)):
+    def render_legend(self, m, page_break=False, ctx=None, collumns=1, width=None, height=None,
+                      item_per_rule=False, attribution={}, legend_item_box_size=(0.015, 0.0075)):
         """ m: map to render legend for
         ctx: A cairo context to render the legend to. If this is None (the default) then
             automatically create a context and choose the best location for the legend.
@@ -862,20 +1031,21 @@ class PDFPrinter:
         will return the size of the rendered block in pts
         """
 
-        (w,h) = (0,0)
+        (w, h) = (0, 0)
         if self._s:
             if ctx is None:
-                ctx=cairo.Context(self._s)
-                (tx,ty) = self._get_meta_info_corner((self.map_box.width(),self.map_box.height()),m)
-                ctx.translate(m2pt(tx),m2pt(ty))
-                width = self._pagesize[0]-2*tx
-                height = self._pagesize[1]-self._margin-ty
-
-            x=0
-            y=0
+                ctx = cairo.Context(self._s)
+                (tx, ty) = self._get_meta_info_corner(
+                    (self.map_box.width(), self.map_box.height()), m)
+                ctx.translate(m2pt(tx), m2pt(ty))
+                width = self._pagesize[0] - 2 * tx
+                height = self._pagesize[1] - self._margin - ty
+
+            x = 0
+            y = 0
             if width:
-                cwidth = width/collumns
-                w=m2pt(width)
+                cwidth = width / collumns
+                w = m2pt(width)
             else:
                 cwidth = None
             current_collumn = 0
@@ -883,7 +1053,7 @@ class PDFPrinter:
             processed_layers = []
             for l in reversed(m.layers):
                 have_layer_header = False
-                added_styles={}
+                added_styles = {}
                 layer_title = l.name
                 if layer_title in processed_layers:
                     continue
@@ -899,10 +1069,11 @@ class PDFPrinter:
                             st = m.find_style(s)
                             for r in st.rules:
                                 # we need to do the scale test here as well so we don't
-                                # add unused scale rules to the legend description
+                                # add unused scale rules to the legend
+                                # description
                                 if ((not r.filter) or r.filter.evaluate(f) == '1') and \
-                                    r.min_scale <= m.scale_denominator() and m.scale_denominator() < r.max_scale:
-                                    active_rules.append((s,r.name))
+                                        r.min_scale <= m.scale_denominator() and m.scale_denominator() < r.max_scale:
+                                    active_rules.append((s, r.name))
                                     if r.filter and str(r.filter) != "true":
                                         if len(rule_text) > 0:
                                             rule_text += " AND "
@@ -911,58 +1082,65 @@ class PDFPrinter:
                                         else:
                                             rule_text += str(r.filter)
                         active_rules = tuple(active_rules)
-                        if added_styles.has_key(active_rules):
+                        if active_rules in added_styles:
                             continue
 
-                        added_styles[active_rules] = (f,rule_text)
+                        added_styles[active_rules] = (f, rule_text)
                         if not item_per_rule:
                             break
                     else:
-                        added_styles[l] = (None,None)
+                        added_styles[l] = (None, None)
 
-                legend_items = added_styles.keys()
-                legend_items.sort()
+                legend_items = sorted(added_styles.keys())
                 for li in legend_items:
                     if True:
-                        (f,rule_text) = added_styles[li]
-
-
-                        legend_map_size = (int(m2pt(legend_item_box_size[0])),int(m2pt(legend_item_box_size[1])))
-                        lemap=Map(legend_map_size[0],legend_map_size[1],srs=m.srs)
+                        (f, rule_text) = added_styles[li]
+
+                        legend_map_size = (int(m2pt(legend_item_box_size[0])), int(
+                            m2pt(legend_item_box_size[1])))
+                        lemap = Map(
+                            legend_map_size[0],
+                            legend_map_size[1],
+                            srs=m.srs)
                         if m.background:
                             lemap.background = m.background
                         # the buffer is needed to ensure that text labels that overflow the edge of the
                         # map still render for the legend
-                        lemap.buffer_size=1000
+                        lemap.buffer_size = 1000
                         for s in l.styles:
-                            sty=m.find_style(s)
+                            sty = m.find_style(s)
                             lestyle = Style()
                             for r in sty.rules:
                                 for sym in r.symbols:
                                     try:
-                                        sym.avoid_edges=False
+                                        sym.avoid_edges = False
                                     except:
-                                        print "**** Cant set avoid edges for rule", r.name
+                                        print(
+                                            "**** Cant set avoid edges for rule", r.name)
                                 if r.min_scale <= m.scale_denominator() and m.scale_denominator() < r.max_scale:
                                     lerule = r
                                     lerule.min_scale = 0
                                     lerule.max_scale = float("inf")
                                     lestyle.rules.append(lerule)
-                            lemap.append_style(s,lestyle)
+                            lemap.append_style(s, lestyle)
 
                         ds = MemoryDatasource()
                         if f is None:
-                            ds=l.datasource
+                            ds = l.datasource
                             layer_srs = l.srs
                         elif f.envelope().width() == 0:
-                            ds.add_feature(Feature(f.id(),Geometry2d.from_wkt("POINT(0 0)"),**f.attributes))
-                            lemap.zoom_to_box(Box2d(-1,-1,1,1))
+                            ds.add_feature(
+                                Feature(
+                                    f.id(),
+                                    Geometry2d.from_wkt("POINT(0 0)"),
+                                    **f.attributes))
+                            lemap.zoom_to_box(Box2d(-1, -1, 1, 1))
                             layer_srs = m.srs
                         else:
                             ds.add_feature(f)
                             layer_srs = l.srs
 
-                        lelayer = Layer("LegendLayer",layer_srs)
+                        lelayer = Layer("LegendLayer", layer_srs)
                         lelayer.datasource = ds
                         for s in l.styles:
                             lelayer.styles.append(s)
@@ -976,52 +1154,73 @@ class PDFPrinter:
                         if not have_layer_header:
                             item_size += 8
 
-                        if y+item_size > m2pt(height):
+                        if y + item_size > m2pt(height):
                             current_collumn += 1
-                            y=0
+                            y = 0
                             if current_collumn >= collumns:
                                 if page_break:
                                     self._s.show_page()
-                                    x=0
+                                    x = 0
                                     current_collumn = 0
                                 else:
                                     break
 
                         if not have_layer_header and item_per_rule:
-                            ctx.move_to(x+m2pt(current_collumn*cwidth),y)
-                            e=self.write_text(ctx, l.name, m2pt(cwidth), 8)
-                            y+=e[3]+2
+                            ctx.move_to(x + m2pt(current_collumn * cwidth), y)
+                            e = self.write_text(ctx, l.name, m2pt(cwidth), 8)
+                            y += e[3] + 2
                             have_layer_header = True
                         ctx.save()
-                        ctx.translate(x+m2pt(current_collumn*cwidth),y)
-                        #extra save around map render as it sets up a clip box and doesn't clear it
+                        ctx.translate(x + m2pt(current_collumn * cwidth), y)
+                        # extra save around map render as it sets up a clip box
+                        # and doesn't clear it
                         ctx.save()
                         render(lemap, ctx)
                         ctx.restore()
 
-                        ctx.rectangle(0,0,*legend_map_size)
-                        ctx.set_source_rgb(0.5,0.5,0.5)
+                        ctx.rectangle(0, 0, *legend_map_size)
+                        ctx.set_source_rgb(0.5, 0.5, 0.5)
                         ctx.set_line_width(1)
                         ctx.stroke()
                         ctx.restore()
 
-                        ctx.move_to(x+legend_map_size[0]+m2pt(current_collumn*cwidth)+2,y)
+                        ctx.move_to(
+                            x +
+                            legend_map_size[0] +
+                            m2pt(
+                                current_collumn *
+                                cwidth) +
+                            2,
+                            y)
                         legend_entry_size = legend_map_size[1]
                         legend_text_size = 0
                         if not item_per_rule:
                             rule_text = layer_title
                         if rule_text:
-                            e=self.write_text(ctx, rule_text, m2pt(cwidth-legend_item_box_size[0]-0.005), 6)
+                            e = self.write_text(
+                                ctx, rule_text, m2pt(
+                                    cwidth - legend_item_box_size[0] - 0.005), 6)
                             legend_text_size += e[3]
-                            ctx.rel_move_to(0,e[3])
-                        if attribution.has_key(layer_title):
-                            e=self.write_text(ctx, attribution[layer_title], m2pt(cwidth-legend_item_box_size[0]-0.005), 6, fill_color=(0.5,0.5,0.5))
+                            ctx.rel_move_to(0, e[3])
+                        if layer_title in attribution:
+                            e = self.write_text(
+                                ctx,
+                                attribution[layer_title],
+                                m2pt(
+                                    cwidth -
+                                    legend_item_box_size[0] -
+                                    0.005),
+                                6,
+                                fill_color=(
+                                    0.5,
+                                    0.5,
+                                    0.5))
                             legend_text_size += e[3]
 
                         if legend_text_size > legend_entry_size:
-                            legend_entry_size=legend_text_size
+                            legend_entry_size = legend_text_size
 
-                        y+=legend_entry_size +2
+                        y += legend_entry_size + 2
                         if y > h:
                             h = y
-        return (w,h)
+        return (w, h)
diff --git a/setup.py b/setup.py
index 85757e6..15a70fa 100755
--- a/setup.py
+++ b/setup.py
@@ -1,23 +1,41 @@
 #! /usr/bin/env python
 
-from distutils import sysconfig
-from setuptools import setup, Extension
 import os
+import re
+import shutil
 import subprocess
 import sys
-import shutil
-import re
+from distutils import sysconfig
+
+from setuptools import Extension, setup
+
+PYTHON3 = sys.version_info[0] == 3
+
+
+# Utils
+def check_output(args):
+    output = subprocess.check_output(args)
+    if PYTHON3:
+        # check_output returns bytes in PYTHON3.
+        output = output.decode()
+    return output.rstrip('\n')
+
 
 cflags = sysconfig.get_config_var('CFLAGS')
-sysconfig._config_vars['CFLAGS'] = re.sub(' +', ' ', cflags.replace('-g', '').replace('-Os', '').replace('-arch i386', ''))
+sysconfig._config_vars['CFLAGS'] = re.sub(
+    ' +', ' ', cflags.replace('-g', '').replace('-Os', '').replace('-arch i386', ''))
 opt = sysconfig.get_config_var('OPT')
-sysconfig._config_vars['OPT'] = re.sub(' +', ' ', opt.replace('-g', '').replace('-Os', ''))
+sysconfig._config_vars['OPT'] = re.sub(
+    ' +', ' ', opt.replace('-g', '').replace('-Os', ''))
 ldshared = sysconfig.get_config_var('LDSHARED')
-sysconfig._config_vars['LDSHARED'] = re.sub(' +', ' ', ldshared.replace('-g', '').replace('-Os', '').replace('-arch i386', ''))
+sysconfig._config_vars['LDSHARED'] = re.sub(
+    ' +', ' ', ldshared.replace('-g', '').replace('-Os', '').replace('-arch i386', ''))
 ldflags = sysconfig.get_config_var('LDFLAGS')
-sysconfig._config_vars['LDFLAGS'] = re.sub(' +', ' ', ldflags.replace('-g', '').replace('-Os', '').replace('-arch i386', ''))
+sysconfig._config_vars['LDFLAGS'] = re.sub(
+    ' +', ' ', ldflags.replace('-g', '').replace('-Os', '').replace('-arch i386', ''))
 pycflags = sysconfig.get_config_var('PY_CFLAGS')
-sysconfig._config_vars['PY_CFLAGS'] = re.sub(' +', ' ', pycflags.replace('-g', '').replace('-Os', '').replace('-arch i386', ''))
+sysconfig._config_vars['PY_CFLAGS'] = re.sub(
+    ' +', ' ', pycflags.replace('-g', '').replace('-Os', '').replace('-arch i386', ''))
 sysconfig._config_vars['CFLAGSFORSHARED'] = ''
 os.environ['ARCHFLAGS'] = ''
 
@@ -35,13 +53,13 @@ boost_system_lib = os.environ.get("BOOST_SYSTEM_LIB", 'boost_system-mt')
 boost_thread_lib = os.environ.get("BOOST_THREAD_LIB", 'boost_thread-mt')
 
 try:
-    linkflags = subprocess.check_output([mapnik_config, '--libs']).rstrip('\n').split(' ')
+    linkflags = check_output([mapnik_config, '--libs']).split(' ')
     lib_path = linkflags[0][2:]
-    linkflags.extend(subprocess.check_output([mapnik_config, '--ldflags']).rstrip('\n').split(' '))
+    linkflags.extend(check_output([mapnik_config, '--ldflags']).split(' '))
 except:
-    raise Exception("Failed to find proper linking flags from mapnik config");
+    raise Exception("Failed to find proper linking flags from mapnik config")
 
-## Dynamically make the mapnik/paths.py file if it doesn't exist.
+# Dynamically make the mapnik/paths.py file if it doesn't exist.
 if os.path.isfile('mapnik/paths.py'):
     create_paths = False
 else:
@@ -55,61 +73,68 @@ if mason_build:
         if sys.platform == 'darwin':
             base_f = 'libmapnik.dylib'
         else:
-            base_f = 'libmapnik.so.3.0'   
-        f = os.path.join(lib_path, base_f) 
+            base_f = 'libmapnik.so.3.0'
+        f = os.path.join(lib_path, base_f)
         shutil.copyfile(f, os.path.join('mapnik', base_f))
     except shutil.Error:
         pass
-    input_plugin_path = subprocess.check_output([mapnik_config, '--input-plugins']).rstrip('\n')
+    input_plugin_path = check_output([mapnik_config, '--input-plugins'])
     input_plugin_files = os.listdir(input_plugin_path)
-    input_plugin_files = [os.path.join(input_plugin_path, f) for f in input_plugin_files]
-    if not os.path.exists(os.path.join('mapnik','plugins','input')):
-        os.makedirs(os.path.join('mapnik','plugins', 'input'))
+    input_plugin_files = [os.path.join(
+        input_plugin_path, f) for f in input_plugin_files]
+    if not os.path.exists(os.path.join('mapnik', 'plugins', 'input')):
+        os.makedirs(os.path.join('mapnik', 'plugins', 'input'))
     for f in input_plugin_files:
         try:
-            shutil.copyfile(f, os.path.join('mapnik', 'plugins', 'input', os.path.basename(f)))
+            shutil.copyfile(f, os.path.join(
+                'mapnik', 'plugins', 'input', os.path.basename(f)))
         except shutil.Error:
             pass
-    font_path = subprocess.check_output([mapnik_config, '--fonts']).rstrip('\n')
+    font_path = check_output([mapnik_config, '--fonts'])
     font_files = os.listdir(font_path)
     font_files = [os.path.join(font_path, f) for f in font_files]
-    if not os.path.exists(os.path.join('mapnik','plugins','fonts')):
-        os.makedirs(os.path.join('mapnik','plugins','fonts'))
+    if not os.path.exists(os.path.join('mapnik', 'plugins', 'fonts')):
+        os.makedirs(os.path.join('mapnik', 'plugins', 'fonts'))
     for f in font_files:
         try:
-            shutil.copyfile(f, os.path.join('mapnik','plugins','fonts', os.path.basename(f)))
+            shutil.copyfile(f, os.path.join(
+                'mapnik', 'plugins', 'fonts', os.path.basename(f)))
         except shutil.Error:
             pass
     if create_paths:
-        f_paths.write('mapniklibpath = os.path.join(os.path.dirname(os.path.realpath(__file__)), "plugins")\n')
+        f_paths.write(
+            'mapniklibpath = os.path.join(os.path.dirname(os.path.realpath(__file__)), "plugins")\n')
 elif create_paths:
-    f_paths.write("mapniklibpath = '"+lib_path+"/mapnik'\n")
+    f_paths.write("mapniklibpath = '" + lib_path + "/mapnik'\n")
     f_paths.write('mapniklibpath = os.path.normpath(mapniklibpath)\n')
 
 if create_paths:
     f_paths.write("inputpluginspath = os.path.join(mapniklibpath,'input')\n")
-    f_paths.write("fontscollectionpath = os.path.join(mapniklibpath,'fonts')\n")
-    f_paths.write("__all__ = [mapniklibpath,inputpluginspath,fontscollectionpath]\n")
+    f_paths.write(
+        "fontscollectionpath = os.path.join(mapniklibpath,'fonts')\n")
+    f_paths.write(
+        "__all__ = [mapniklibpath,inputpluginspath,fontscollectionpath]\n")
     f_paths.close()
 
 
 if not mason_build:
-    icu_path = subprocess.check_output([mapnik_config, '--icu-data']).rstrip('\n')
+    icu_path = check_output([mapnik_config, '--icu-data'])
 else:
     icu_path = 'mason_packages/.link/share/icu/'
 if icu_path:
     icu_files = os.listdir(icu_path)
     icu_files = [os.path.join(icu_path, f) for f in icu_files]
-    if not os.path.exists(os.path.join('mapnik','plugins','icu')):
-        os.makedirs(os.path.join('mapnik','plugins','icu'))
+    if not os.path.exists(os.path.join('mapnik', 'plugins', 'icu')):
+        os.makedirs(os.path.join('mapnik', 'plugins', 'icu'))
     for f in icu_files:
         try:
-            shutil.copyfile(f, os.path.join('mapnik','plugins','icu', os.path.basename(f)))
+            shutil.copyfile(f, os.path.join(
+                'mapnik', 'plugins', 'icu', os.path.basename(f)))
         except shutil.Error:
             pass
 
 if not mason_build:
-    gdal_path = subprocess.check_output([mapnik_config, '--gdal-data']).rstrip('\n')
+    gdal_path = check_output([mapnik_config, '--gdal-data'])
 else:
     gdal_path = 'mason_packages/.link/share/gdal/'
     if os.path.exists('mason_packages/.link/share/gdal/gdal/'):
@@ -117,16 +142,17 @@ else:
 if gdal_path:
     gdal_files = os.listdir(gdal_path)
     gdal_files = [os.path.join(gdal_path, f) for f in gdal_files]
-    if not os.path.exists(os.path.join('mapnik','plugins','gdal')):
-        os.makedirs(os.path.join('mapnik','plugins','gdal'))
+    if not os.path.exists(os.path.join('mapnik', 'plugins', 'gdal')):
+        os.makedirs(os.path.join('mapnik', 'plugins', 'gdal'))
     for f in gdal_files:
         try:
-            shutil.copyfile(f, os.path.join('mapnik','plugins','gdal', os.path.basename(f)))
+            shutil.copyfile(f, os.path.join(
+                'mapnik', 'plugins', 'gdal', os.path.basename(f)))
         except shutil.Error:
             pass
 
 if not mason_build:
-    proj_path = subprocess.check_output([mapnik_config, '--proj-lib']).rstrip('\n')
+    proj_path = check_output([mapnik_config, '--proj-lib'])
 else:
     proj_path = 'mason_packages/.link/share/proj/'
     if os.path.exists('mason_packages/.link/share/proj/proj/'):
@@ -134,95 +160,99 @@ else:
 if proj_path:
     proj_files = os.listdir(proj_path)
     proj_files = [os.path.join(proj_path, f) for f in proj_files]
-    if not os.path.exists(os.path.join('mapnik','plugins','proj')):
-        os.makedirs(os.path.join('mapnik','plugins','proj'))
+    if not os.path.exists(os.path.join('mapnik', 'plugins', 'proj')):
+        os.makedirs(os.path.join('mapnik', 'plugins', 'proj'))
     for f in proj_files:
         try:
-            shutil.copyfile(f, os.path.join('mapnik','plugins','proj', os.path.basename(f)))
+            shutil.copyfile(f, os.path.join(
+                'mapnik', 'plugins', 'proj', os.path.basename(f)))
         except shutil.Error:
             pass
 
-extra_comp_args = subprocess.check_output([mapnik_config, '--cflags']).rstrip('\n').split(' ')
+extra_comp_args = check_output([mapnik_config, '--cflags']).split(' ')
 
 if sys.platform == 'darwin':
     extra_comp_args.append('-mmacosx-version-min=10.8')
+    # silence warning coming from boost python macros which
+    # would is hard to silence via pragma
+    extra_comp_args.append('-Wno-parentheses-equality')
     linkflags.append('-mmacosx-version-min=10.8')
 else:
-    linkflags.append('-lrt') 
-    linkflags.append('-Wl,-z,origin') 
+    linkflags.append('-lrt')
+    linkflags.append('-Wl,-z,origin')
     linkflags.append('-Wl,-rpath=$ORIGIN')
 
-if os.environ.get("CC",False) == False:
-    os.environ["CC"] = subprocess.check_output([mapnik_config, '--cxx']).rstrip('\n')
-if os.environ.get("CXX",False) == False:
-    os.environ["CXX"] = subprocess.check_output([mapnik_config, '--cxx']).rstrip('\n')
+if os.environ.get("CC", False) == False:
+    os.environ["CC"] = check_output([mapnik_config, '--cxx'])
+if os.environ.get("CXX", False) == False:
+    os.environ["CXX"] = check_output([mapnik_config, '--cxx'])
 
 setup(
-    name = "mapnik",
-    version = "0.1",
-    packages = ['mapnik'],
-    author = "Blake Thompson",
-    author_email = "flippmoke at gmail.com",
-    description = "Python bindings for Mapnik",
-    license = "GNU LESSER GENERAL PUBLIC LICENSE",
-    keywords = "mapnik mapbox mapping carteography",
-    url = "http://mapnik.org/", 
-    tests_require = [
+    name="mapnik",
+    version="0.1",
+    packages=['mapnik'],
+    author="Blake Thompson",
+    author_email="flippmoke at gmail.com",
+    description="Python bindings for Mapnik",
+    license="GNU LESSER GENERAL PUBLIC LICENSE",
+    keywords="mapnik mapbox mapping carteography",
+    url="http://mapnik.org/",
+    tests_require=[
         'nose',
     ],
-    package_data = {
+    package_data={
         'mapnik': ['libmapnik.*', 'plugins/*/*'],
     },
-    test_suite = 'nose.collector',
-    ext_modules = [
+    test_suite='nose.collector',
+    ext_modules=[
         Extension('mapnik._mapnik', [
-                'src/mapnik_color.cpp',
-                'src/mapnik_coord.cpp',
-                'src/mapnik_datasource.cpp',
-                'src/mapnik_datasource_cache.cpp',
-                'src/mapnik_envelope.cpp',
-                'src/mapnik_expression.cpp',
-                'src/mapnik_feature.cpp',
-                'src/mapnik_featureset.cpp',
-                'src/mapnik_font_engine.cpp',
-                'src/mapnik_fontset.cpp',
-                'src/mapnik_gamma_method.cpp',
-                'src/mapnik_geometry.cpp',
-                'src/mapnik_grid.cpp',
-                'src/mapnik_grid_view.cpp',
-                'src/mapnik_image.cpp',
-                'src/mapnik_image_view.cpp',
-                'src/mapnik_label_collision_detector.cpp',
-                'src/mapnik_layer.cpp',
-                'src/mapnik_logger.cpp',
-                'src/mapnik_map.cpp',
-                'src/mapnik_palette.cpp',
-                'src/mapnik_parameters.cpp',
-                'src/mapnik_proj_transform.cpp',
-                'src/mapnik_projection.cpp',
-                'src/mapnik_python.cpp',
-                'src/mapnik_query.cpp',
-                'src/mapnik_raster_colorizer.cpp',
-                'src/mapnik_rule.cpp',
-                'src/mapnik_scaling_method.cpp',
-                'src/mapnik_style.cpp',
-                'src/mapnik_svg_generator_grammar.cpp',
-                'src/mapnik_symbolizer.cpp',
-                'src/mapnik_text_placement.cpp',
-                'src/mapnik_view_transform.cpp',
-                'src/python_grid_utils.cpp',
-            ],
+            'src/mapnik_color.cpp',
+            'src/mapnik_coord.cpp',
+            'src/mapnik_datasource.cpp',
+            'src/mapnik_datasource_cache.cpp',
+            'src/mapnik_envelope.cpp',
+            'src/mapnik_expression.cpp',
+            'src/mapnik_feature.cpp',
+            'src/mapnik_featureset.cpp',
+            'src/mapnik_font_engine.cpp',
+            'src/mapnik_fontset.cpp',
+            'src/mapnik_gamma_method.cpp',
+            'src/mapnik_geometry.cpp',
+            'src/mapnik_grid.cpp',
+            'src/mapnik_grid_view.cpp',
+            'src/mapnik_image.cpp',
+            'src/mapnik_image_view.cpp',
+            'src/mapnik_label_collision_detector.cpp',
+            'src/mapnik_layer.cpp',
+            'src/mapnik_logger.cpp',
+            'src/mapnik_map.cpp',
+            'src/mapnik_palette.cpp',
+            'src/mapnik_parameters.cpp',
+            'src/mapnik_proj_transform.cpp',
+            'src/mapnik_projection.cpp',
+            'src/mapnik_python.cpp',
+            'src/mapnik_query.cpp',
+            'src/mapnik_raster_colorizer.cpp',
+            'src/mapnik_rule.cpp',
+            'src/mapnik_scaling_method.cpp',
+            'src/mapnik_style.cpp',
+            'src/mapnik_svg_generator_grammar.cpp',
+            'src/mapnik_symbolizer.cpp',
+            'src/mapnik_text_placement.cpp',
+            'src/mapnik_view_transform.cpp',
+            'src/python_grid_utils.cpp',
+        ],
             language='c++',
-            libraries = [
-                'mapnik', 
+            libraries=[
+                'mapnik',
                 'mapnik-wkt',
                 'mapnik-json',
                 boost_python_lib,
                 boost_thread_lib,
                 boost_system_lib
-            ],
-            extra_compile_args = extra_comp_args,
-            extra_link_args = linkflags,
+        ],
+            extra_compile_args=extra_comp_args,
+            extra_link_args=linkflags,
         )
     ]
 )
diff --git a/src/mapnik_color.cpp b/src/mapnik_color.cpp
index 4ab765e..df98a4a 100644
--- a/src/mapnik_color.cpp
+++ b/src/mapnik_color.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
diff --git a/src/mapnik_coord.cpp b/src/mapnik_coord.cpp
index 13b8961..e985938 100644
--- a/src/mapnik_coord.cpp
+++ b/src/mapnik_coord.cpp
@@ -28,6 +28,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
diff --git a/src/mapnik_datasource.cpp b/src/mapnik_datasource.cpp
index 41cd790..c35e5c0 100644
--- a/src/mapnik_datasource.cpp
+++ b/src/mapnik_datasource.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/noncopyable.hpp>
diff --git a/src/mapnik_datasource_cache.cpp b/src/mapnik_datasource_cache.cpp
index 5439991..77accef 100644
--- a/src/mapnik_datasource_cache.cpp
+++ b/src/mapnik_datasource_cache.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 #include <boost/python.hpp>
 #include <boost/noncopyable.hpp>
 #pragma GCC diagnostic pop
diff --git a/src/mapnik_envelope.cpp b/src/mapnik_envelope.cpp
index 3964053..5bbfb2c 100644
--- a/src/mapnik_envelope.cpp
+++ b/src/mapnik_envelope.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
 
diff --git a/src/mapnik_expression.cpp b/src/mapnik_expression.cpp
index 60c4a44..714c1a3 100644
--- a/src/mapnik_expression.cpp
+++ b/src/mapnik_expression.cpp
@@ -30,6 +30,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/noncopyable.hpp>
diff --git a/src/mapnik_feature.cpp b/src/mapnik_feature.cpp
index 16d9d17..d77b9ee 100644
--- a/src/mapnik_feature.cpp
+++ b/src/mapnik_feature.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/iterator.hpp>
diff --git a/src/mapnik_featureset.cpp b/src/mapnik_featureset.cpp
index 8e9ddf6..e19348f 100644
--- a/src/mapnik_featureset.cpp
+++ b/src/mapnik_featureset.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/noncopyable.hpp>
diff --git a/src/mapnik_font_engine.cpp b/src/mapnik_font_engine.cpp
index dfc89f4..4eafb1f 100644
--- a/src/mapnik_font_engine.cpp
+++ b/src/mapnik_font_engine.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/noncopyable.hpp>
diff --git a/src/mapnik_fontset.cpp b/src/mapnik_fontset.cpp
index 651efd1..26c294d 100644
--- a/src/mapnik_fontset.cpp
+++ b/src/mapnik_fontset.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
diff --git a/src/mapnik_gamma_method.cpp b/src/mapnik_gamma_method.cpp
index c1849cc..591270e 100644
--- a/src/mapnik_gamma_method.cpp
+++ b/src/mapnik_gamma_method.cpp
@@ -28,6 +28,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
diff --git a/src/mapnik_geometry.cpp b/src/mapnik_geometry.cpp
index d2ec6c1..f0306da 100644
--- a/src/mapnik_geometry.cpp
+++ b/src/mapnik_geometry.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/def.hpp>
diff --git a/src/mapnik_grid.cpp b/src/mapnik_grid.cpp
index 1147ac2..6cb8bd3 100644
--- a/src/mapnik_grid.cpp
+++ b/src/mapnik_grid.cpp
@@ -31,6 +31,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/module.hpp>
diff --git a/src/mapnik_grid_view.cpp b/src/mapnik_grid_view.cpp
index 230ccc0..4874756 100644
--- a/src/mapnik_grid_view.cpp
+++ b/src/mapnik_grid_view.cpp
@@ -31,6 +31,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/module.hpp>
diff --git a/src/mapnik_image.cpp b/src/mapnik_image.cpp
index 89ae397..da62680 100644
--- a/src/mapnik_image.cpp
+++ b/src/mapnik_image.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/module.hpp>
@@ -143,7 +145,7 @@ std::shared_ptr<image_any> copy(mapnik::image_any const& im, mapnik::image_dtype
     return std::make_shared<image_any>(mapnik::image_copy(im, type, offset, scaling));
 }
 
-unsigned compare(mapnik::image_any const& im1, mapnik::image_any const& im2, double threshold, bool alpha)
+std::size_t compare(mapnik::image_any const& im1, mapnik::image_any const& im2, double threshold, bool alpha)
 {
     return mapnik::compare(im1, im2, threshold, alpha);
 }
diff --git a/src/mapnik_image_view.cpp b/src/mapnik_image_view.cpp
index 1086cda..762d3da 100644
--- a/src/mapnik_image_view.cpp
+++ b/src/mapnik_image_view.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/module.hpp>
diff --git a/src/mapnik_label_collision_detector.cpp b/src/mapnik_label_collision_detector.cpp
index 0d7ab8f..c844bed 100644
--- a/src/mapnik_label_collision_detector.cpp
+++ b/src/mapnik_label_collision_detector.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/module.hpp>
diff --git a/src/mapnik_layer.cpp b/src/mapnik_layer.cpp
index 9836a2b..d28bf4b 100644
--- a/src/mapnik_layer.cpp
+++ b/src/mapnik_layer.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/suite/indexing/vector_indexing_suite.hpp>
diff --git a/src/mapnik_logger.cpp b/src/mapnik_logger.cpp
index 8fc7c32..16baf0f 100644
--- a/src/mapnik_logger.cpp
+++ b/src/mapnik_logger.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/noncopyable.hpp>
diff --git a/src/mapnik_map.cpp b/src/mapnik_map.cpp
index 3f3719f..3320c12 100644
--- a/src/mapnik_map.cpp
+++ b/src/mapnik_map.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/noncopyable.hpp>
diff --git a/src/mapnik_palette.cpp b/src/mapnik_palette.cpp
index 33bc23a..97c78f1 100644
--- a/src/mapnik_palette.cpp
+++ b/src/mapnik_palette.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/noncopyable.hpp>
diff --git a/src/mapnik_parameters.cpp b/src/mapnik_parameters.cpp
index fb58f3d..0863782 100644
--- a/src/mapnik_parameters.cpp
+++ b/src/mapnik_parameters.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
@@ -163,7 +165,7 @@ mapnik::parameter get_params_by_index(mapnik::parameters const& p, int index)
     throw boost::python::error_already_set();
 }
 
-unsigned get_params_size(mapnik::parameters const& p)
+std::size_t get_params_size(mapnik::parameters const& p)
 {
     return p.size();
 }
diff --git a/src/mapnik_proj_transform.cpp b/src/mapnik_proj_transform.cpp
index c4b0091..560abbf 100644
--- a/src/mapnik_proj_transform.cpp
+++ b/src/mapnik_proj_transform.cpp
@@ -30,6 +30,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/noncopyable.hpp>
diff --git a/src/mapnik_projection.cpp b/src/mapnik_projection.cpp
index 15b62a6..a136cd0 100644
--- a/src/mapnik_projection.cpp
+++ b/src/mapnik_projection.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
diff --git a/src/mapnik_python.cpp b/src/mapnik_python.cpp
index 3cc80cc..7639493 100644
--- a/src/mapnik_python.cpp
+++ b/src/mapnik_python.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 #include "python_to_value.hpp"
 #include <boost/python/args.hpp>        // for keywords, arg, etc
 #include <boost/python/converter/from_python.hpp>
diff --git a/src/mapnik_query.cpp b/src/mapnik_query.cpp
index 8d77eef..5510b7b 100644
--- a/src/mapnik_query.cpp
+++ b/src/mapnik_query.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include "python_to_value.hpp"
 #include <boost/python.hpp>
diff --git a/src/mapnik_raster_colorizer.cpp b/src/mapnik_raster_colorizer.cpp
index a57d2d6..c7aef73 100644
--- a/src/mapnik_raster_colorizer.cpp
+++ b/src/mapnik_raster_colorizer.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/suite/indexing/vector_indexing_suite.hpp>
diff --git a/src/mapnik_rule.cpp b/src/mapnik_rule.cpp
index b319597..4a309e6 100644
--- a/src/mapnik_rule.cpp
+++ b/src/mapnik_rule.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/implicit.hpp>
diff --git a/src/mapnik_scaling_method.cpp b/src/mapnik_scaling_method.cpp
index 24522bb..8c91828 100644
--- a/src/mapnik_scaling_method.cpp
+++ b/src/mapnik_scaling_method.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
diff --git a/src/mapnik_style.cpp b/src/mapnik_style.cpp
index da0f47b..aa12615 100644
--- a/src/mapnik_style.cpp
+++ b/src/mapnik_style.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/suite/indexing/vector_indexing_suite.hpp>
diff --git a/src/mapnik_symbolizer.cpp b/src/mapnik_symbolizer.cpp
index 4195494..aa66420 100644
--- a/src/mapnik_symbolizer.cpp
+++ b/src/mapnik_symbolizer.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/suite/indexing/map_indexing_suite.hpp>
diff --git a/src/mapnik_text_placement.cpp b/src/mapnik_text_placement.cpp
index 7bab210..522f3bc 100644
--- a/src/mapnik_text_placement.cpp
+++ b/src/mapnik_text_placement.cpp
@@ -29,6 +29,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #include <boost/python/stl_iterator.hpp>
diff --git a/src/mapnik_view_transform.cpp b/src/mapnik_view_transform.cpp
index 8dc177c..b3e6105 100644
--- a/src/mapnik_view_transform.cpp
+++ b/src/mapnik_view_transform.cpp
@@ -28,6 +28,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
diff --git a/src/python_grid_utils.cpp b/src/python_grid_utils.cpp
index 62dba2b..df4ea52 100644
--- a/src/python_grid_utils.cpp
+++ b/src/python_grid_utils.cpp
@@ -30,6 +30,8 @@
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
@@ -69,13 +71,13 @@ void grid2utf(T const& grid_type,
     // start counting at utf8 codepoint 32, aka space character
     std::uint16_t codepoint = 32;
 
-    unsigned array_size = data.width();
-    for (unsigned y = 0; y < data.height(); ++y)
+    std::size_t array_size = data.width();
+    for (std::size_t y = 0; y < data.height(); ++y)
     {
         std::uint16_t idx = 0;
         const std::unique_ptr<Py_UNICODE[]> line(new Py_UNICODE[array_size]);
         typename T::value_type const* row = data.get_row(y);
-        for (unsigned x = 0; x < data.width(); ++x)
+        for (std::size_t x = 0; x < data.width(); ++x)
         {
             typename T::value_type feature_id = row[x];
             feature_pos = feature_keys.find(feature_id);
@@ -199,14 +201,13 @@ void grid2utf2(T const& grid_type,
     mapnik::grid::data_type target(data.width()/resolution,data.height()/resolution);
     mapnik::scale_grid(target,grid_type.data(),0.0,0.0);
 
-    unsigned array_size = target.width();
-    for (unsigned y = 0; y < target.height(); ++y)
+    std::size_t array_size = target.width();
+    for (std::size_t y = 0; y < target.height(); ++y)
     {
         uint16_t idx = 0;
         const std::unique_ptr<Py_UNICODE[]> line(new Py_UNICODE[array_size]);
         mapnik::grid::value_type * row = target.get_row(y);
-        unsigned x;
-        for (x = 0; x < target.width(); ++x)
+        for (std::size_t x = 0; x < target.width(); ++x)
         {
             feature_pos = feature_keys.find(row[x]);
             if (feature_pos != feature_keys.end())
diff --git a/src/python_to_value.hpp b/src/python_to_value.hpp
index 89ac66d..d8c63ee 100644
--- a/src/python_to_value.hpp
+++ b/src/python_to_value.hpp
@@ -27,7 +27,10 @@
 #pragma GCC diagnostic ignored "-Wunused-parameter"
 #pragma GCC diagnostic ignored "-Wunused-local-typedef"
 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
+#pragma GCC diagnostic ignored "-Wunused-parameter"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+
 #include <boost/python.hpp>
 #pragma GCC diagnostic pop
 
diff --git a/test/python_tests/agg_rasterizer_integer_overflow_test.py b/test/python_tests/agg_rasterizer_integer_overflow_test.py
index bfd8128..af705e3 100644
--- a/test/python_tests/agg_rasterizer_integer_overflow_test.py
+++ b/test/python_tests/agg_rasterizer_integer_overflow_test.py
@@ -1,71 +1,90 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
+import json
+
 from nose.tools import eq_
-from utilities import run_all
+
 import mapnik
-import json
+
+from .utilities import run_all
 
 # geojson box of the world
-geojson  = { "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -17963313.143242701888084, -6300857.11560364998877 ], [ -17963313.143242701888084, 13071343.332991421222687 ], [ 7396658.353099936619401, 13071343.332991421222687 ], [ 7396658.353099936619401, -6300857.11560364998877 ], [ -17963313.143242701888084, -6300857.11560364998877 ] ] ] } }
+geojson = {"type": "Feature",
+           "properties": {},
+           "geometry": {"type": "Polygon",
+                        "coordinates": [[[-17963313.143242701888084,
+                                          -6300857.11560364998877],
+                                         [-17963313.143242701888084,
+                                          13071343.332991421222687],
+                                         [7396658.353099936619401,
+                                          13071343.332991421222687],
+                                         [7396658.353099936619401,
+                                          -6300857.11560364998877],
+                                         [-17963313.143242701888084,
+                                          -6300857.11560364998877]]]}}
+
 
 def test_that_coordinates_do_not_overflow_and_polygon_is_rendered_memory():
-  expected_color = mapnik.Color('white')
-  projection = '+init=epsg:4326'
-  ds = mapnik.MemoryDatasource()
-  context = mapnik.Context()
-  feat = mapnik.Feature.from_geojson(json.dumps(geojson),context)
-  ds.add_feature(feat)
-  s = mapnik.Style()
-  r = mapnik.Rule()
-  sym = mapnik.PolygonSymbolizer()
-  sym.fill = expected_color
-  r.symbols.append(sym)
-  s.rules.append(r)
-  lyr = mapnik.Layer('Layer',projection)
-  lyr.datasource = ds
-  lyr.styles.append('style')
-  m = mapnik.Map(256,256,projection)
-  m.background_color = mapnik.Color('green')
-  m.append_style('style',s)
-  m.layers.append(lyr)
-  # 17/20864/45265.png
-  m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6197514.253362091,-13657768.213995293,6198125.749588372))
-  # works 15/5216/11316.png
-  #m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372))
-  im = mapnik.Image(256,256)
-  mapnik.render(m,im)
-  eq_(im.get_pixel(128,128),expected_color.packed())
+    expected_color = mapnik.Color('white')
+    projection = '+init=epsg:4326'
+    ds = mapnik.MemoryDatasource()
+    context = mapnik.Context()
+    feat = mapnik.Feature.from_geojson(json.dumps(geojson), context)
+    ds.add_feature(feat)
+    s = mapnik.Style()
+    r = mapnik.Rule()
+    sym = mapnik.PolygonSymbolizer()
+    sym.fill = expected_color
+    r.symbols.append(sym)
+    s.rules.append(r)
+    lyr = mapnik.Layer('Layer', projection)
+    lyr.datasource = ds
+    lyr.styles.append('style')
+    m = mapnik.Map(256, 256, projection)
+    m.background_color = mapnik.Color('green')
+    m.append_style('style', s)
+    m.layers.append(lyr)
+    # 17/20864/45265.png
+    m.zoom_to_box(mapnik.Box2d(-13658379.710221574,
+                               6197514.253362091, -13657768.213995293, 6198125.749588372))
+    # works 15/5216/11316.png
+    # m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372))
+    im = mapnik.Image(256, 256)
+    mapnik.render(m, im)
+    eq_(im.get_pixel(128, 128), expected_color.packed())
+
 
 def test_that_coordinates_do_not_overflow_and_polygon_is_rendered_csv():
-  expected_color = mapnik.Color('white')
-  projection = '+init=epsg:4326'
-  ds = mapnik.MemoryDatasource()
-  context = mapnik.Context()
-  feat = mapnik.Feature.from_geojson(json.dumps(geojson),context)
-  ds.add_feature(feat)
-  geojson_string = "geojson\n'%s'" % json.dumps(geojson['geometry'])
-  ds = mapnik.Datasource(**{'type':'csv','inline':geojson_string})
-  s = mapnik.Style()
-  r = mapnik.Rule()
-  sym = mapnik.PolygonSymbolizer()
-  sym.fill = expected_color
-  r.symbols.append(sym)
-  s.rules.append(r)
-  lyr = mapnik.Layer('Layer',projection)
-  lyr.datasource = ds
-  lyr.styles.append('style')
-  m = mapnik.Map(256,256,projection)
-  m.background_color = mapnik.Color('green')
-  m.append_style('style',s)
-  m.layers.append(lyr)
-  # 17/20864/45265.png
-  m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6197514.253362091,-13657768.213995293,6198125.749588372))
-  # works 15/5216/11316.png
-  #m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372))
-  im = mapnik.Image(256,256)
-  mapnik.render(m,im)
-  eq_(im.get_pixel(128,128),expected_color.packed())
+    expected_color = mapnik.Color('white')
+    projection = '+init=epsg:4326'
+    ds = mapnik.MemoryDatasource()
+    context = mapnik.Context()
+    feat = mapnik.Feature.from_geojson(json.dumps(geojson), context)
+    ds.add_feature(feat)
+    geojson_string = "geojson\n'%s'" % json.dumps(geojson['geometry'])
+    ds = mapnik.Datasource(**{'type': 'csv', 'inline': geojson_string})
+    s = mapnik.Style()
+    r = mapnik.Rule()
+    sym = mapnik.PolygonSymbolizer()
+    sym.fill = expected_color
+    r.symbols.append(sym)
+    s.rules.append(r)
+    lyr = mapnik.Layer('Layer', projection)
+    lyr.datasource = ds
+    lyr.styles.append('style')
+    m = mapnik.Map(256, 256, projection)
+    m.background_color = mapnik.Color('green')
+    m.append_style('style', s)
+    m.layers.append(lyr)
+    # 17/20864/45265.png
+    m.zoom_to_box(mapnik.Box2d(-13658379.710221574,
+                               6197514.253362091, -13657768.213995293, 6198125.749588372))
+    # works 15/5216/11316.png
+    # m.zoom_to_box(mapnik.Box2d(-13658379.710221574,6195679.764683247,-13655933.72531645,6198125.749588372))
+    im = mapnik.Image(256, 256)
+    mapnik.render(m, im)
+    eq_(im.get_pixel(128, 128), expected_color.packed())
 
 if __name__ == "__main__":
     exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
diff --git a/test/python_tests/box2d_test.py b/test/python_tests/box2d_test.py
index c441002..7fe0a9f 100644
--- a/test/python_tests/box2d_test.py
+++ b/test/python_tests/box2d_test.py
@@ -1,16 +1,20 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from nose.tools import eq_,assert_true,assert_almost_equal,assert_false
-from utilities import run_all
+from nose.tools import assert_almost_equal, assert_false, assert_true, eq_
+
 import mapnik
 
+from .utilities import run_all
+
+
 def test_coord_init():
     c = mapnik.Coord(100, 100)
 
     eq_(c.x, 100)
     eq_(c.y, 100)
 
+
 def test_coord_multiplication():
     c = mapnik.Coord(100, 100)
     c *= 2
@@ -18,6 +22,7 @@ def test_coord_multiplication():
     eq_(c.x, 200)
     eq_(c.y, 200)
 
+
 def test_envelope_init():
     e = mapnik.Box2d(100, 100, 200, 200)
 
@@ -42,26 +47,27 @@ def test_envelope_init():
     eq_(e.maxx, 200)
     eq_(e.maxy, 200)
 
-    eq_(e[0],100)
-    eq_(e[1],100)
-    eq_(e[2],200)
-    eq_(e[3],200)
-    eq_(e[0],e[-4])
-    eq_(e[1],e[-3])
-    eq_(e[2],e[-2])
-    eq_(e[3],e[-1])
+    eq_(e[0], 100)
+    eq_(e[1], 100)
+    eq_(e[2], 200)
+    eq_(e[3], 200)
+    eq_(e[0], e[-4])
+    eq_(e[1], e[-3])
+    eq_(e[2], e[-2])
+    eq_(e[3], e[-1])
 
     c = e.center()
 
     eq_(c.x, 150)
     eq_(c.y, 150)
 
+
 def test_envelope_static_init():
     e = mapnik.Box2d.from_string('100 100 200 200')
     e2 = mapnik.Box2d.from_string('100,100,200,200')
     e3 = mapnik.Box2d.from_string('100 , 100 , 200 , 200')
-    eq_(e,e2)
-    eq_(e,e3)
+    eq_(e, e2)
+    eq_(e, e3)
 
     assert_true(e.contains(100, 100))
     assert_true(e.contains(100, 200))
@@ -84,35 +90,36 @@ def test_envelope_static_init():
     eq_(e.maxx, 200)
     eq_(e.maxy, 200)
 
-    eq_(e[0],100)
-    eq_(e[1],100)
-    eq_(e[2],200)
-    eq_(e[3],200)
-    eq_(e[0],e[-4])
-    eq_(e[1],e[-3])
-    eq_(e[2],e[-2])
-    eq_(e[3],e[-1])
+    eq_(e[0], 100)
+    eq_(e[1], 100)
+    eq_(e[2], 200)
+    eq_(e[3], 200)
+    eq_(e[0], e[-4])
+    eq_(e[1], e[-3])
+    eq_(e[2], e[-2])
+    eq_(e[3], e[-1])
 
     c = e.center()
 
     eq_(c.x, 150)
     eq_(c.y, 150)
 
+
 def test_envelope_multiplication():
     # no width then no impact of multiplication
     a = mapnik.Box2d(100, 100, 100, 100)
     a *= 5
-    eq_(a.minx,100)
-    eq_(a.miny,100)
-    eq_(a.maxx,100)
-    eq_(a.maxy,100)
+    eq_(a.minx, 100)
+    eq_(a.miny, 100)
+    eq_(a.maxx, 100)
+    eq_(a.maxy, 100)
 
     a = mapnik.Box2d(100.0, 100.0, 100.0, 100.0)
     a *= 5
-    eq_(a.minx,100)
-    eq_(a.miny,100)
-    eq_(a.maxx,100)
-    eq_(a.maxy,100)
+    eq_(a.minx, 100)
+    eq_(a.miny, 100)
+    eq_(a.maxx, 100)
+    eq_(a.maxy, 100)
 
     a = mapnik.Box2d(100.0, 100.0, 100.001, 100.001)
     a *= 5
@@ -123,10 +130,10 @@ def test_envelope_multiplication():
 
     e = mapnik.Box2d(100, 100, 200, 200)
     e *= 2
-    eq_(e.minx,50)
-    eq_(e.miny,50)
-    eq_(e.maxx,250)
-    eq_(e.maxy,250)
+    eq_(e.minx, 50)
+    eq_(e.miny, 50)
+    eq_(e.maxx, 250)
+    eq_(e.maxy, 250)
 
     assert_true(e.contains(50, 50))
     assert_true(e.contains(50, 250))
@@ -154,23 +161,24 @@ def test_envelope_multiplication():
     eq_(c.x, 150)
     eq_(c.y, 150)
 
+
 def test_envelope_clipping():
-    e1 = mapnik.Box2d(-180,-90,180,90)
-    e2 = mapnik.Box2d(-120,40,-110,48)
+    e1 = mapnik.Box2d(-180, -90, 180, 90)
+    e2 = mapnik.Box2d(-120, 40, -110, 48)
     e1.clip(e2)
-    eq_(e1,e2)
+    eq_(e1, e2)
 
     # madagascar in merc
     e1 = mapnik.Box2d(4772116.5490, -2744395.0631, 5765186.4203, -1609458.0673)
     e2 = mapnik.Box2d(5124338.3753, -2240522.1727, 5207501.8621, -2130452.8520)
     e1.clip(e2)
-    eq_(e1,e2)
+    eq_(e1, e2)
 
     # nz in lon/lat
     e1 = mapnik.Box2d(163.8062, -47.1897, 179.3628, -33.9069)
     e2 = mapnik.Box2d(173.7378, -39.6395, 174.4849, -38.9252)
     e1.clip(e2)
-    eq_(e1,e2)
+    eq_(e1, e2)
 
 if __name__ == "__main__":
     exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
diff --git a/test/python_tests/buffer_clear_test.py b/test/python_tests/buffer_clear_test.py
index b4b3bda..b94e9e4 100644
--- a/test/python_tests/buffer_clear_test.py
+++ b/test/python_tests/buffer_clear_test.py
@@ -1,32 +1,40 @@
-import os, mapnik
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_clearing_image_data():
-    im = mapnik.Image(256,256)
+    im = mapnik.Image(256, 256)
     # make sure it equals itself
     bytes = im.tostring()
-    eq_(im.tostring(),bytes)
+    eq_(im.tostring(), bytes)
     # set background, then clear
     im.fill(mapnik.Color('green'))
-    eq_(im.tostring()!=bytes,True)
+    eq_(im.tostring() != bytes, True)
     # clear image, should now equal original
     im.clear()
-    eq_(im.tostring(),bytes)
+    eq_(im.tostring(), bytes)
+
 
 def make_map():
     ds = mapnik.MemoryDatasource()
     context = mapnik.Context()
     context.push('Name')
     pixel_key = 1
-    f = mapnik.Feature(context,pixel_key)
+    f = mapnik.Feature(context, pixel_key)
     f['Name'] = str(pixel_key)
-    f.geometry=mapnik.Geometry.from_wkt('POLYGON ((0 0, 0 256, 256 256, 256 0, 0 0))')
+    f.geometry = mapnik.Geometry.from_wkt(
+        'POLYGON ((0 0, 0 256, 256 256, 256 0, 0 0))')
     ds.add_feature(f)
     s = mapnik.Style()
     r = mapnik.Rule()
@@ -36,25 +44,25 @@ def make_map():
     lyr = mapnik.Layer('Places')
     lyr.datasource = ds
     lyr.styles.append('places_labels')
-    width,height = 256,256
-    m = mapnik.Map(width,height)
-    m.append_style('places_labels',s)
+    width, height = 256, 256
+    m = mapnik.Map(width, height)
+    m.append_style('places_labels', s)
     m.layers.append(lyr)
     m.zoom_all()
     return m
 
 if mapnik.has_grid_renderer():
     def test_clearing_grid_data():
-        g = mapnik.Grid(256,256)
+        g = mapnik.Grid(256, 256)
         utf = g.encode()
         # make sure it equals itself
-        eq_(g.encode(),utf)
+        eq_(g.encode(), utf)
         m = make_map()
-        mapnik.render_layer(m,g,layer=0,fields=['__id__','Name'])
-        eq_(g.encode()!=utf,True)
+        mapnik.render_layer(m, g, layer=0, fields=['__id__', 'Name'])
+        eq_(g.encode() != utf, True)
         # clear grid, should now match original
         g.clear()
-        eq_(g.encode(),utf)
+        eq_(g.encode(), utf)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/cairo_test.py b/test/python_tests/cairo_test.py
index 3c626d4..c6c25a3 100644
--- a/test/python_tests/cairo_test.py
+++ b/test/python_tests/cairo_test.py
@@ -1,23 +1,30 @@
 #!/usr/bin/env python
 
+from __future__ import print_function
+
 import os
 import shutil
-import mapnik
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def make_tmp_map():
-    m = mapnik.Map(512,512)
+    m = mapnik.Map(512, 512)
     m.background_color = mapnik.Color('steelblue')
     ds = mapnik.MemoryDatasource()
     context = mapnik.Context()
     context.push('Name')
-    f = mapnik.Feature(context,1)
+    f = mapnik.Feature(context, 1)
     f['Name'] = 'Hello'
     f.geometry = mapnik.Geometry.from_wkt('POINT (0 0)')
     ds.add_feature(f)
@@ -30,49 +37,56 @@ def make_tmp_map():
     lyr = mapnik.Layer('Layer')
     lyr.datasource = ds
     lyr.styles.append('style')
-    m.append_style('style',s)
+    m.append_style('style', s)
     m.layers.append(lyr)
     return m
 
-def draw_title(m,ctx,text,size=10,color=mapnik.Color('black')):
+
+def draw_title(m, ctx, text, size=10, color=mapnik.Color('black')):
     """ Draw a Map Title near the top of a page."""
-    middle = m.width/2.0
+    middle = m.width / 2.0
     ctx.set_source_rgba(*cairo_color(color))
-    ctx.select_font_face("DejaVu Sans Book", cairo.FONT_SLANT_NORMAL, cairo.FONT_WEIGHT_NORMAL)
+    ctx.select_font_face(
+        "DejaVu Sans Book",
+        cairo.FONT_SLANT_NORMAL,
+        cairo.FONT_WEIGHT_NORMAL)
     ctx.set_font_size(size)
     x_bearing, y_bearing, width, height = ctx.text_extents(text)[:4]
     ctx.move_to(middle - width / 2 - x_bearing, 20.0 - height / 2 - y_bearing)
     ctx.show_text(text)
 
-def draw_neatline(m,ctx):
-    w,h = m.width, m.height
+
+def draw_neatline(m, ctx):
+    w, h = m.width, m.height
     ctx.set_source_rgba(*cairo_color(mapnik.Color('black')))
     outline = [
-      [0,0],[w,0],[w,h],[0,h]
+        [0, 0], [w, 0], [w, h], [0, h]
     ]
     ctx.set_line_width(1)
-    for idx,pt in enumerate(outline):
+    for idx, pt in enumerate(outline):
         if (idx == 0):
-          ctx.move_to(*pt)
+            ctx.move_to(*pt)
         else:
-          ctx.line_to(*pt)
+            ctx.line_to(*pt)
     ctx.close_path()
     inset = 6
     inline = [
-      [inset,inset],[w-inset,inset],[w-inset,h-inset],[inset,h-inset]
+        [inset, inset], [w - inset, inset], [w -
+                                             inset, h - inset], [inset, h - inset]
     ]
-    ctx.set_line_width(inset/2)
-    for idx,pt in enumerate(inline):
+    ctx.set_line_width(inset / 2)
+    for idx, pt in enumerate(inline):
         if (idx == 0):
-          ctx.move_to(*pt)
+            ctx.move_to(*pt)
         else:
-          ctx.line_to(*pt)
+            ctx.line_to(*pt)
     ctx.close_path()
     ctx.stroke()
 
+
 def cairo_color(c):
     """ Return a Cairo color tuple from a Mapnik Color."""
-    ctx_c = (c.r/255.0,c.g/255.0,c.b/255.0,c.a/255.0)
+    ctx_c = (c.r / 255.0, c.g / 255.0, c.b / 255.0, c.a / 255.0)
     return ctx_c
 
 if mapnik.has_pycairo():
@@ -80,116 +94,147 @@ if mapnik.has_pycairo():
 
     def test_passing_pycairo_context_svg():
         m = make_tmp_map()
-        m.zoom_to_box(mapnik.Box2d(-180,-90,180,90))
+        m.zoom_to_box(mapnik.Box2d(-180, -90, 180, 90))
         test_cairo_file = '/tmp/mapnik-cairo-context-test.svg'
         surface = cairo.SVGSurface(test_cairo_file, m.width, m.height)
         expected_cairo_file = './images/pycairo/cairo-cairo-expected.svg'
         context = cairo.Context(surface)
-        mapnik.render(m,context)
-        draw_title(m,context,"Hello Map",size=20)
-        draw_neatline(m,context)
+        mapnik.render(m, context)
+        draw_title(m, context, "Hello Map", size=20)
+        draw_neatline(m, context)
         surface.finish()
         if not os.path.exists(expected_cairo_file) or os.environ.get('UPDATE'):
-            print 'generated expected cairo surface file %s' % expected_cairo_file
-            shutil.copy(test_cairo_file,expected_cairo_file)
-        diff = abs(os.stat(expected_cairo_file).st_size-os.stat(test_cairo_file).st_size)
-        msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,test_cairo_file,'tests/python_tests/'+ expected_cairo_file)
-        eq_( diff < 1500, True, msg)
+            print('generated expected cairo surface file', expected_cairo_file)
+            shutil.copy(test_cairo_file, expected_cairo_file)
+        diff = abs(
+            os.stat(expected_cairo_file).st_size -
+            os.stat(test_cairo_file).st_size)
+        msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+            diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file)
+        eq_(diff < 1500, True, msg)
         os.remove(test_cairo_file)
 
     def test_passing_pycairo_context_pdf():
         m = make_tmp_map()
-        m.zoom_to_box(mapnik.Box2d(-180,-90,180,90))
+        m.zoom_to_box(mapnik.Box2d(-180, -90, 180, 90))
         test_cairo_file = '/tmp/mapnik-cairo-context-test.pdf'
         surface = cairo.PDFSurface(test_cairo_file, m.width, m.height)
         expected_cairo_file = './images/pycairo/cairo-cairo-expected.pdf'
         context = cairo.Context(surface)
-        mapnik.render(m,context)
-        draw_title(m,context,"Hello Map",size=20)
-        draw_neatline(m,context)
+        mapnik.render(m, context)
+        draw_title(m, context, "Hello Map", size=20)
+        draw_neatline(m, context)
         surface.finish()
         if not os.path.exists(expected_cairo_file) or os.environ.get('UPDATE'):
-            print 'generated expected cairo surface file %s' % expected_cairo_file
-            shutil.copy(test_cairo_file,expected_cairo_file)
-        diff = abs(os.stat(expected_cairo_file).st_size-os.stat(test_cairo_file).st_size)
-        msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,test_cairo_file,'tests/python_tests/'+ expected_cairo_file)
-        eq_( diff < 1500, True, msg)
+            print('generated expected cairo surface file', expected_cairo_file)
+            shutil.copy(test_cairo_file, expected_cairo_file)
+        diff = abs(
+            os.stat(expected_cairo_file).st_size -
+            os.stat(test_cairo_file).st_size)
+        msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+            diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file)
+        eq_(diff < 1500, True, msg)
         os.remove(test_cairo_file)
 
     def test_passing_pycairo_context_png():
         m = make_tmp_map()
-        m.zoom_to_box(mapnik.Box2d(-180,-90,180,90))
+        m.zoom_to_box(mapnik.Box2d(-180, -90, 180, 90))
         test_cairo_file = '/tmp/mapnik-cairo-context-test.png'
         surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, m.width, m.height)
         expected_cairo_file = './images/pycairo/cairo-cairo-expected.png'
         expected_cairo_file2 = './images/pycairo/cairo-cairo-expected-reduced.png'
         context = cairo.Context(surface)
-        mapnik.render(m,context)
-        draw_title(m,context,"Hello Map",size=20)
-        draw_neatline(m,context)
+        mapnik.render(m, context)
+        draw_title(m, context, "Hello Map", size=20)
+        draw_neatline(m, context)
         surface.write_to_png(test_cairo_file)
-        reduced_color_image = test_cairo_file.replace('png','-mapnik.png')
+        reduced_color_image = test_cairo_file.replace('png', '-mapnik.png')
         im = mapnik.Image.from_cairo(surface)
-        im.save(reduced_color_image,'png8')
+        im.save(reduced_color_image, 'png8')
         surface.finish()
         if not os.path.exists(expected_cairo_file) or os.environ.get('UPDATE'):
-            print 'generated expected cairo surface file %s' % expected_cairo_file
-            shutil.copy(test_cairo_file,expected_cairo_file)
-        diff = abs(os.stat(expected_cairo_file).st_size-os.stat(test_cairo_file).st_size)
-        msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,test_cairo_file,'tests/python_tests/'+ expected_cairo_file)
-        eq_( diff < 500, True, msg)
+            print('generated expected cairo surface file', expected_cairo_file)
+            shutil.copy(test_cairo_file, expected_cairo_file)
+        diff = abs(
+            os.stat(expected_cairo_file).st_size -
+            os.stat(test_cairo_file).st_size)
+        msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+            diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file)
+        eq_(diff < 500, True, msg)
         os.remove(test_cairo_file)
-        if not os.path.exists(expected_cairo_file2) or os.environ.get('UPDATE'):
-            print 'generated expected cairo surface file %s' % expected_cairo_file2
-            shutil.copy(reduced_color_image,expected_cairo_file2)
-        diff = abs(os.stat(expected_cairo_file2).st_size-os.stat(reduced_color_image).st_size)
-        msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,reduced_color_image,'tests/python_tests/'+ expected_cairo_file2)
-        eq_( diff < 500, True, msg)
+        if not os.path.exists(
+                expected_cairo_file2) or os.environ.get('UPDATE'):
+            print(
+                'generated expected cairo surface file',
+                expected_cairo_file2)
+            shutil.copy(reduced_color_image, expected_cairo_file2)
+        diff = abs(
+            os.stat(expected_cairo_file2).st_size -
+            os.stat(reduced_color_image).st_size)
+        msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+            diff, reduced_color_image, 'tests/python_tests/' + expected_cairo_file2)
+        eq_(diff < 500, True, msg)
         os.remove(reduced_color_image)
 
     if 'sqlite' in mapnik.DatasourceCache.plugin_names():
-        def _pycairo_surface(type,sym):
-                test_cairo_file = '/tmp/mapnik-cairo-surface-test.%s.%s' % (sym,type)
-                expected_cairo_file = './images/pycairo/cairo-surface-expected.%s.%s' % (sym,type)
-                m = mapnik.Map(256,256)
-                mapnik.load_map(m,'../data/good_maps/%s_symbolizer.xml' % sym)
-                m.zoom_all()
-                if hasattr(cairo,'%sSurface' % type.upper()):
-                    surface = getattr(cairo,'%sSurface' % type.upper())(test_cairo_file, m.width,m.height)
-                    mapnik.render(m, surface)
-                    surface.finish()
-                    if not os.path.exists(expected_cairo_file) or os.environ.get('UPDATE'):
-                        print 'generated expected cairo surface file %s' % expected_cairo_file
-                        shutil.copy(test_cairo_file,expected_cairo_file)
-                    diff = abs(os.stat(expected_cairo_file).st_size-os.stat(test_cairo_file).st_size)
-                    msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (diff,test_cairo_file,'tests/python_tests/'+ expected_cairo_file)
-                    if os.uname()[0] == 'Darwin':
-                        eq_( diff < 2100, True, msg)
-                    else:
-                        eq_( diff < 23000, True, msg)
-                    os.remove(test_cairo_file)
-                    return True
+        def _pycairo_surface(type, sym):
+            test_cairo_file = '/tmp/mapnik-cairo-surface-test.%s.%s' % (
+                sym, type)
+            expected_cairo_file = './images/pycairo/cairo-surface-expected.%s.%s' % (
+                sym, type)
+            m = mapnik.Map(256, 256)
+            mapnik.load_map(m, '../data/good_maps/%s_symbolizer.xml' % sym)
+            m.zoom_all()
+            if hasattr(cairo, '%sSurface' % type.upper()):
+                surface = getattr(
+                    cairo,
+                    '%sSurface' %
+                    type.upper())(
+                    test_cairo_file,
+                    m.width,
+                    m.height)
+                mapnik.render(m, surface)
+                surface.finish()
+                if not os.path.exists(
+                        expected_cairo_file) or os.environ.get('UPDATE'):
+                    print(
+                        'generated expected cairo surface file',
+                        expected_cairo_file)
+                    shutil.copy(test_cairo_file, expected_cairo_file)
+                diff = abs(
+                    os.stat(expected_cairo_file).st_size -
+                    os.stat(test_cairo_file).st_size)
+                msg = 'diff in size (%s) between actual (%s) and expected(%s)' % (
+                    diff, test_cairo_file, 'tests/python_tests/' + expected_cairo_file)
+                if os.uname()[0] == 'Darwin':
+                    eq_(diff < 2100, True, msg)
                 else:
-                    print 'skipping cairo.%s test since surface is not available' % type.upper()
-                    return True
+                    eq_(diff < 23000, True, msg)
+                os.remove(test_cairo_file)
+                return True
+            else:
+                print(
+                    'skipping cairo.%s test since surface is not available' %
+                    type.upper())
+                return True
 
         def test_pycairo_svg_surface1():
-            eq_(_pycairo_surface('svg','point'),True)
+            eq_(_pycairo_surface('svg', 'point'), True)
 
         def test_pycairo_svg_surface2():
-            eq_(_pycairo_surface('svg','building'),True)
+            eq_(_pycairo_surface('svg', 'building'), True)
 
         def test_pycairo_svg_surface3():
-            eq_(_pycairo_surface('svg','polygon'),True)
+            eq_(_pycairo_surface('svg', 'polygon'), True)
 
         def test_pycairo_pdf_surface1():
-            eq_(_pycairo_surface('pdf','point'),True)
+            eq_(_pycairo_surface('pdf', 'point'), True)
 
         def test_pycairo_pdf_surface2():
-            eq_(_pycairo_surface('pdf','building'),True)
+            eq_(_pycairo_surface('pdf', 'building'), True)
 
         def test_pycairo_pdf_surface3():
-            eq_(_pycairo_surface('pdf','polygon'),True)
+            eq_(_pycairo_surface('pdf', 'polygon'), True)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/color_test.py b/test/python_tests/color_test.py
index 900faf1..4288431 100644
--- a/test/python_tests/color_test.py
+++ b/test/python_tests/color_test.py
@@ -1,17 +1,21 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-import sys
-import os, mapnik
-from timeit import Timer, time
-from nose.tools import *
-from utilities import execution_path, run_all, get_unique_colors
+import os
+
+from nose.tools import eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_color_init():
     c = mapnik.Color(12, 128, 255)
     eq_(c.r, 12)
@@ -25,7 +29,7 @@ def test_color_init():
     eq_(c.b, 64)
     eq_(c.a, 128)
     eq_(False, c.get_premultiplied())
-    c = mapnik.Color(16, 32, 64, 128,True)
+    c = mapnik.Color(16, 32, 64, 128, True)
     eq_(c.r, 16)
     eq_(c.g, 32)
     eq_(c.b, 64)
@@ -74,6 +78,7 @@ def test_color_init():
     eq_(rgba_int, c.packed())
     eq_(True, c.get_premultiplied())
 
+
 def test_color_properties():
     c = mapnik.Color(16, 32, 64, 128)
     eq_(c.r, 16)
@@ -89,6 +94,7 @@ def test_color_properties():
     c.a = 128
     eq_(c.a, 128)
 
+
 def test_color_premultiply():
     c = mapnik.Color(16, 33, 255, 128)
     eq_(c.premultiply(), True)
@@ -105,10 +111,10 @@ def test_color_premultiply():
     c.demultiply()
     c.demultiply()
     # This will not return the same values as before but we expect that
-    eq_(c.r,15)
-    eq_(c.g,33)
-    eq_(c.b,255)
-    eq_(c.a,128)
+    eq_(c.r, 15)
+    eq_(c.g, 33)
+    eq_(c.b, 255)
+    eq_(c.a, 128)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/compare_test.py b/test/python_tests/compare_test.py
index f4b6563..bb8397a 100644
--- a/test/python_tests/compare_test.py
+++ b/test/python_tests/compare_test.py
@@ -1,111 +1,123 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-import os, mapnik
-from nose.tools import *
-from utilities import execution_path, run_all
+import os
+
+from nose.tools import eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_another_compare():
-    im = mapnik.Image(5,5)
-    im2 = mapnik.Image(5,5)
+    im = mapnik.Image(5, 5)
+    im2 = mapnik.Image(5, 5)
     im2.fill(mapnik.Color('rgba(255,255,255,0)'))
-    eq_(im.compare(im2,16), im.width() * im.height())
+    eq_(im.compare(im2, 16), im.width() * im.height())
+
 
 def test_compare_rgba8():
-    im = mapnik.Image(5,5,mapnik.ImageType.rgba8)
-    im.fill(mapnik.Color(0,0,0,0))
+    im = mapnik.Image(5, 5, mapnik.ImageType.rgba8)
+    im.fill(mapnik.Color(0, 0, 0, 0))
     eq_(im.compare(im), 0)
-    im2 = mapnik.Image(5,5,mapnik.ImageType.rgba8)
-    im2.fill(mapnik.Color(0,0,0,0))
+    im2 = mapnik.Image(5, 5, mapnik.ImageType.rgba8)
+    im2.fill(mapnik.Color(0, 0, 0, 0))
     eq_(im.compare(im2), 0)
     eq_(im2.compare(im), 0)
-    im2.fill(mapnik.Color(0,0,0,12))
+    im2.fill(mapnik.Color(0, 0, 0, 12))
     eq_(im.compare(im2), 25)
     eq_(im.compare(im2, 0, False), 0)
-    im3 = mapnik.Image(5,5,mapnik.ImageType.rgba8)
-    im3.set_pixel(0,0, mapnik.Color(0,0,0,0))
-    im3.set_pixel(0,1, mapnik.Color(1,1,1,1))
-    im3.set_pixel(1,0, mapnik.Color(2,2,2,2))
-    im3.set_pixel(1,1, mapnik.Color(3,3,3,3))
+    im3 = mapnik.Image(5, 5, mapnik.ImageType.rgba8)
+    im3.set_pixel(0, 0, mapnik.Color(0, 0, 0, 0))
+    im3.set_pixel(0, 1, mapnik.Color(1, 1, 1, 1))
+    im3.set_pixel(1, 0, mapnik.Color(2, 2, 2, 2))
+    im3.set_pixel(1, 1, mapnik.Color(3, 3, 3, 3))
     eq_(im.compare(im3), 3)
-    eq_(im.compare(im3,1),2)
-    eq_(im.compare(im3,2),1)
-    eq_(im.compare(im3,3),0)
+    eq_(im.compare(im3, 1), 2)
+    eq_(im.compare(im3, 2), 1)
+    eq_(im.compare(im3, 3), 0)
+
 
 def test_compare_2_image():
-    im = mapnik.Image(5,5)
-    im.set_pixel(0,0, mapnik.Color(254, 254, 254, 254))
-    im.set_pixel(4,4, mapnik.Color('white'))
-    im2 = mapnik.Image(5,5)
-    eq_(im2.compare(im,16), 2)
+    im = mapnik.Image(5, 5)
+    im.set_pixel(0, 0, mapnik.Color(254, 254, 254, 254))
+    im.set_pixel(4, 4, mapnik.Color('white'))
+    im2 = mapnik.Image(5, 5)
+    eq_(im2.compare(im, 16), 2)
+
 
 def test_compare_dimensions():
-    im = mapnik.Image(2,2)
-    im2 = mapnik.Image(3,3)
+    im = mapnik.Image(2, 2)
+    im2 = mapnik.Image(3, 3)
     eq_(im.compare(im2), 4)
     eq_(im2.compare(im), 9)
 
+
 def test_compare_gray8():
-    im = mapnik.Image(2,2,mapnik.ImageType.gray8)
+    im = mapnik.Image(2, 2, mapnik.ImageType.gray8)
     im.fill(0)
     eq_(im.compare(im), 0)
-    im2 = mapnik.Image(2,2,mapnik.ImageType.gray8)
+    im2 = mapnik.Image(2, 2, mapnik.ImageType.gray8)
     im2.fill(0)
     eq_(im.compare(im2), 0)
     eq_(im2.compare(im), 0)
     eq_(im.compare(im2, 0, False), 0)
-    im3 = mapnik.Image(2,2,mapnik.ImageType.gray8)
-    im3.set_pixel(0,0,0)
-    im3.set_pixel(0,1,1)
-    im3.set_pixel(1,0,2)
-    im3.set_pixel(1,1,3)
-    eq_(im.compare(im3),3)
-    eq_(im.compare(im3,1),2)
-    eq_(im.compare(im3,2),1)
-    eq_(im.compare(im3,3),0)
+    im3 = mapnik.Image(2, 2, mapnik.ImageType.gray8)
+    im3.set_pixel(0, 0, 0)
+    im3.set_pixel(0, 1, 1)
+    im3.set_pixel(1, 0, 2)
+    im3.set_pixel(1, 1, 3)
+    eq_(im.compare(im3), 3)
+    eq_(im.compare(im3, 1), 2)
+    eq_(im.compare(im3, 2), 1)
+    eq_(im.compare(im3, 3), 0)
+
 
 def test_compare_gray16():
-    im = mapnik.Image(2,2,mapnik.ImageType.gray16)
+    im = mapnik.Image(2, 2, mapnik.ImageType.gray16)
     im.fill(0)
     eq_(im.compare(im), 0)
-    im2 = mapnik.Image(2,2,mapnik.ImageType.gray16)
+    im2 = mapnik.Image(2, 2, mapnik.ImageType.gray16)
     im2.fill(0)
     eq_(im.compare(im2), 0)
     eq_(im2.compare(im), 0)
     eq_(im.compare(im2, 0, False), 0)
-    im3 = mapnik.Image(2,2,mapnik.ImageType.gray16)
-    im3.set_pixel(0,0,0)
-    im3.set_pixel(0,1,1)
-    im3.set_pixel(1,0,2)
-    im3.set_pixel(1,1,3)
-    eq_(im.compare(im3),3)
-    eq_(im.compare(im3,1),2)
-    eq_(im.compare(im3,2),1)
-    eq_(im.compare(im3,3),0)
+    im3 = mapnik.Image(2, 2, mapnik.ImageType.gray16)
+    im3.set_pixel(0, 0, 0)
+    im3.set_pixel(0, 1, 1)
+    im3.set_pixel(1, 0, 2)
+    im3.set_pixel(1, 1, 3)
+    eq_(im.compare(im3), 3)
+    eq_(im.compare(im3, 1), 2)
+    eq_(im.compare(im3, 2), 1)
+    eq_(im.compare(im3, 3), 0)
+
 
 def test_compare_gray32f():
-    im = mapnik.Image(2,2,mapnik.ImageType.gray32f)
+    im = mapnik.Image(2, 2, mapnik.ImageType.gray32f)
     im.fill(0.5)
     eq_(im.compare(im), 0)
-    im2 = mapnik.Image(2,2,mapnik.ImageType.gray32f)
+    im2 = mapnik.Image(2, 2, mapnik.ImageType.gray32f)
     im2.fill(0.5)
     eq_(im.compare(im2), 0)
     eq_(im2.compare(im), 0)
     eq_(im.compare(im2, 0, False), 0)
-    im3 = mapnik.Image(2,2,mapnik.ImageType.gray32f)
-    im3.set_pixel(0,0,0.5)
-    im3.set_pixel(0,1,1.5)
-    im3.set_pixel(1,0,2.5)
-    im3.set_pixel(1,1,3.5)
-    eq_(im.compare(im3),3)
-    eq_(im.compare(im3,1.0),2)
-    eq_(im.compare(im3,2.0),1)
-    eq_(im.compare(im3,3.0),0)
+    im3 = mapnik.Image(2, 2, mapnik.ImageType.gray32f)
+    im3.set_pixel(0, 0, 0.5)
+    im3.set_pixel(0, 1, 1.5)
+    im3.set_pixel(1, 0, 2.5)
+    im3.set_pixel(1, 1, 3.5)
+    eq_(im.compare(im3), 3)
+    eq_(im.compare(im3, 1.0), 2)
+    eq_(im.compare(im3, 2.0), 1)
+    eq_(im.compare(im3, 3.0), 0)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/compositing_test.py b/test/python_tests/compositing_test.py
index a0c8255..ac09ef9 100644
--- a/test/python_tests/compositing_test.py
+++ b/test/python_tests/compositing_test.py
@@ -1,25 +1,36 @@
-#encoding: utf8
+# encoding: utf8
+
+from __future__ import print_function
 
-from nose.tools import eq_
 import os
-from utilities import execution_path, run_all
-from utilities import get_unique_colors, pixel2channels, side_by_side_image
+
+from nose.tools import eq_
+
 import mapnik
 
+from .utilities import (execution_path, get_unique_colors, pixel2channels,
+                        run_all, side_by_side_image)
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
-def is_pre(color,alpha):
-    return (color*255.0/alpha) <= 255
 
-def debug_image(image,step=2):
-    for x in range(0,image.width(),step):
-        for y in range(0,image.height(),step):
-            pixel = image.get_pixel(x,y)
-            red,green,blue,alpha = pixel2channels(pixel)
-            print "rgba(%s,%s,%s,%s) at %s,%s" % (red,green,blue,alpha,x,y)
+def is_pre(color, alpha):
+    return (color * 255.0 / alpha) <= 255
+
+
+def debug_image(image, step=2):
+    for x in range(0, image.width(), step):
+        for y in range(0, image.height(), step):
+            pixel = image.get_pixel(x, y)
+            red, green, blue, alpha = pixel2channels(pixel)
+            print(
+                "rgba(%s,%s,%s,%s) at %s,%s" %
+                (red, green, blue, alpha, x, y))
+
 
 def replace_style(m, name, style):
     m.remove_style(name)
@@ -27,52 +38,60 @@ def replace_style(m, name, style):
 
 # note: it is impossible to know for all pixel colors
 # we can only detect likely cases of non premultiplied colors
+
+
 def validate_pixels_are_not_premultiplied(image):
     over_alpha = False
     transparent = True
     fully_opaque = True
-    for x in range(0,image.width(),2):
-        for y in range(0,image.height(),2):
-            pixel = image.get_pixel(x,y)
-            red,green,blue,alpha = pixel2channels(pixel)
+    for x in range(0, image.width(), 2):
+        for y in range(0, image.height(), 2):
+            pixel = image.get_pixel(x, y)
+            red, green, blue, alpha = pixel2channels(pixel)
             if alpha > 0:
                 transparent = False
                 if alpha < 255:
                     fully_opaque = False
-                color_max = max(red,green,blue)
+                color_max = max(red, green, blue)
                 if color_max > alpha:
                     over_alpha = True
     return over_alpha or transparent or fully_opaque
 
+
 def validate_pixels_are_not_premultiplied2(image):
     looks_not_multiplied = False
-    for x in range(0,image.width(),2):
-        for y in range(0,image.height(),2):
-            pixel = image.get_pixel(x,y)
-            red,green,blue,alpha = pixel2channels(pixel)
-            #each value of the color channels will never be bigger than that of the alpha channel.
+    for x in range(0, image.width(), 2):
+        for y in range(0, image.height(), 2):
+            pixel = image.get_pixel(x, y)
+            red, green, blue, alpha = pixel2channels(pixel)
+            # each value of the color channels will never be bigger than that
+            # of the alpha channel.
             if alpha > 0:
                 if red > 0 and red > alpha:
-                    print 'red: %s, a: %s' % (red,alpha)
+                    print('red: %s, a: %s' % (red, alpha))
                     looks_not_multiplied = True
     return looks_not_multiplied
 
+
 def validate_pixels_are_premultiplied(image):
     bad_pixels = []
-    for x in range(0,image.width(),2):
-        for y in range(0,image.height(),2):
-            pixel = image.get_pixel(x,y)
-            red,green,blue,alpha = pixel2channels(pixel)
+    for x in range(0, image.width(), 2):
+        for y in range(0, image.height(), 2):
+            pixel = image.get_pixel(x, y)
+            red, green, blue, alpha = pixel2channels(pixel)
             if alpha > 0:
-                pixel = image.get_pixel(x,y)
-                is_valid = ((0 <= red <= alpha) and is_pre(red,alpha)) \
-                        and ((0 <= green <= alpha) and is_pre(green,alpha)) \
-                        and ((0 <= blue <= alpha) and is_pre(blue,alpha)) \
-                        and (alpha >= 0 and alpha <= 255)
+                pixel = image.get_pixel(x, y)
+                is_valid = ((0 <= red <= alpha) and is_pre(red, alpha)) \
+                    and ((0 <= green <= alpha) and is_pre(green, alpha)) \
+                    and ((0 <= blue <= alpha) and is_pre(blue, alpha)) \
+                    and (alpha >= 0 and alpha <= 255)
                 if not is_valid:
-                    bad_pixels.append("rgba(%s,%s,%s,%s) at %s,%s" % (red,green,blue,alpha,x,y))
+                    bad_pixels.append(
+                        "rgba(%s,%s,%s,%s) at %s,%s" %
+                        (red, green, blue, alpha, x, y))
     num_bad = len(bad_pixels)
-    return (num_bad == 0,bad_pixels)
+    return (num_bad == 0, bad_pixels)
+
 
 def test_compare_images():
     b = mapnik.Image.open('./images/support/b.png')
@@ -83,71 +102,83 @@ def test_compare_images():
     for name in mapnik.CompositeOp.names:
         a = mapnik.Image.open('./images/support/a.png')
         a.premultiply()
-        a.composite(b,getattr(mapnik.CompositeOp,name))
+        a.composite(b, getattr(mapnik.CompositeOp, name))
         actual = '/tmp/mapnik-comp-op-test-' + name + '.png'
         expected = 'images/composited/' + name + '.png'
         valid = validate_pixels_are_premultiplied(a)
         if not valid[0]:
-            fails.append('%s not validly premultiplied!:\n\t %s pixels (%s)' % (name,len(valid[1]),valid[1][0]))
+            fails.append(
+                '%s not validly premultiplied!:\n\t %s pixels (%s)' %
+                (name, len(
+                    valid[1]), valid[1][0]))
         a.demultiply()
         if not validate_pixels_are_not_premultiplied(a):
             fails.append('%s not validly demultiplied' % (name))
-        a.save(actual,'png32')
+        a.save(actual, 'png32')
         if not os.path.exists(expected) or os.environ.get('UPDATE'):
-            print 'generating expected test image: %s' % expected
-            a.save(expected,'png32')
+            print('generating expected test image: %s' % expected)
+            a.save(expected, 'png32')
         expected_im = mapnik.Image.open(expected)
         # compare them
         if a.tostring('png32') == expected_im.tostring('png32'):
             successes.append(name)
         else:
-            fails.append('failed comparing actual (%s) and expected(%s)' % (actual,'tests/python_tests/'+ expected))
+            fails.append(
+                'failed comparing actual (%s) and expected(%s)' %
+                (actual, 'tests/python_tests/' + expected))
             fail_im = side_by_side_image(expected_im, a)
-            fail_im.save('/tmp/mapnik-comp-op-test-' + name + '.fail.png','png32')
-    eq_(len(successes),num_ops,'\n'+'\n'.join(fails))
+            fail_im.save(
+                '/tmp/mapnik-comp-op-test-' +
+                name +
+                '.fail.png',
+                'png32')
+    eq_(len(successes), num_ops, '\n' + '\n'.join(fails))
     b.demultiply()
     # b will be slightly modified by pre and then de multiplication rounding errors
     # TODO - write test to ensure the image is 99% the same.
     #expected_b = mapnik.Image.open('./images/support/b.png')
-    #b.save('/tmp/mapnik-comp-op-test-original-mask.png')
+    # b.save('/tmp/mapnik-comp-op-test-original-mask.png')
     #eq_(b.tostring('png32'),expected_b.tostring('png32'), '/tmp/mapnik-comp-op-test-original-mask.png is no longer equivalent to original mask: ./images/support/b.png')
 
+
 def test_pre_multiply_status():
     b = mapnik.Image.open('./images/support/b.png')
     # not premultiplied yet, should appear that way
     result = validate_pixels_are_not_premultiplied(b)
-    eq_(result,True)
+    eq_(result, True)
     # not yet premultiplied therefore should return false
     result = validate_pixels_are_premultiplied(b)
-    eq_(result[0],False)
+    eq_(result[0], False)
     # now actually premultiply the pixels
     b.premultiply()
     # now checking if premultiplied should succeed
     result = validate_pixels_are_premultiplied(b)
-    eq_(result[0],True)
+    eq_(result[0], True)
     # should now not appear to look not premultiplied
     result = validate_pixels_are_not_premultiplied(b)
-    eq_(result,False)
+    eq_(result, False)
     # now actually demultiply the pixels
     b.demultiply()
     # should now appear demultiplied
     result = validate_pixels_are_not_premultiplied(b)
-    eq_(result,True)
+    eq_(result, True)
+
 
 def test_pre_multiply_status_of_map1():
-    m = mapnik.Map(256,256)
-    im = mapnik.Image(m.width,m.height)
-    eq_(validate_pixels_are_not_premultiplied(im),True)
-    mapnik.render(m,im)
-    eq_(validate_pixels_are_not_premultiplied(im),True)
+    m = mapnik.Map(256, 256)
+    im = mapnik.Image(m.width, m.height)
+    eq_(validate_pixels_are_not_premultiplied(im), True)
+    mapnik.render(m, im)
+    eq_(validate_pixels_are_not_premultiplied(im), True)
+
 
 def test_pre_multiply_status_of_map2():
-    m = mapnik.Map(256,256)
-    m.background = mapnik.Color(1,1,1,255)
-    im = mapnik.Image(m.width,m.height)
-    eq_(validate_pixels_are_not_premultiplied(im),True)
-    mapnik.render(m,im)
-    eq_(validate_pixels_are_not_premultiplied(im),True)
+    m = mapnik.Map(256, 256)
+    m.background = mapnik.Color(1, 1, 1, 255)
+    im = mapnik.Image(m.width, m.height)
+    eq_(validate_pixels_are_not_premultiplied(im), True)
+    mapnik.render(m, im)
+    eq_(validate_pixels_are_not_premultiplied(im), True)
 
 if 'shape' in mapnik.DatasourceCache.plugin_names():
     def test_style_level_comp_op():
@@ -166,84 +197,97 @@ if 'shape' in mapnik.DatasourceCache.plugin_names():
             mapnik.render(m, im)
             actual = '/tmp/mapnik-style-comp-op-' + name + '.png'
             expected = 'images/style-comp-op/' + name + '.png'
-            im.save(actual,'png32')
+            im.save(actual, 'png32')
             if not os.path.exists(expected) or os.environ.get('UPDATE'):
-                print 'generating expected test image: %s' % expected
-                im.save(expected,'png32')
+                print('generating expected test image: %s' % expected)
+                im.save(expected, 'png32')
             expected_im = mapnik.Image.open(expected)
             # compare them
             if im.tostring('png32') == expected_im.tostring('png32'):
                 successes.append(name)
             else:
-                fails.append('failed comparing actual (%s) and expected(%s)' % (actual,'tests/python_tests/'+ expected))
+                fails.append(
+                    'failed comparing actual (%s) and expected(%s)' %
+                    (actual, 'tests/python_tests/' + expected))
                 fail_im = side_by_side_image(expected_im, im)
-                fail_im.save('/tmp/mapnik-style-comp-op-' + name + '.fail.png','png32')
-        eq_(len(fails), 0, '\n'+'\n'.join(fails))
+                fail_im.save(
+                    '/tmp/mapnik-style-comp-op-' +
+                    name +
+                    '.fail.png',
+                    'png32')
+        eq_(len(fails), 0, '\n' + '\n'.join(fails))
 
     def test_style_level_opacity():
-        m = mapnik.Map(512,512)
-        mapnik.load_map(m,'../data/good_maps/style_level_opacity_and_blur.xml')
+        m = mapnik.Map(512, 512)
+        mapnik.load_map(
+            m, '../data/good_maps/style_level_opacity_and_blur.xml')
         m.zoom_all()
-        im = mapnik.Image(512,512)
-        mapnik.render(m,im)
+        im = mapnik.Image(512, 512)
+        mapnik.render(m, im)
         actual = '/tmp/mapnik-style-level-opacity.png'
         expected = 'images/support/mapnik-style-level-opacity.png'
-        im.save(actual,'png32')
+        im.save(actual, 'png32')
         expected_im = mapnik.Image.open(expected)
-        eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected))
+        eq_(im.tostring('png32'),
+            expected_im.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual,
+                                                                'tests/python_tests/' + expected))
+
 
 def test_rounding_and_color_expectations():
-    m = mapnik.Map(1,1)
+    m = mapnik.Map(1, 1)
     m.background = mapnik.Color('rgba(255,255,255,.4999999)')
-    im = mapnik.Image(m.width,m.height)
-    mapnik.render(m,im)
-    eq_(get_unique_colors(im),['rgba(255,255,255,127)'])
-    m = mapnik.Map(1,1)
+    im = mapnik.Image(m.width, m.height)
+    mapnik.render(m, im)
+    eq_(get_unique_colors(im), ['rgba(255,255,255,127)'])
+    m = mapnik.Map(1, 1)
     m.background = mapnik.Color('rgba(255,255,255,.5)')
-    im = mapnik.Image(m.width,m.height)
-    mapnik.render(m,im)
-    eq_(get_unique_colors(im),['rgba(255,255,255,128)'])
+    im = mapnik.Image(m.width, m.height)
+    mapnik.render(m, im)
+    eq_(get_unique_colors(im), ['rgba(255,255,255,128)'])
     im_file = mapnik.Image.open('../data/images/stripes_pattern.png')
-    eq_(get_unique_colors(im_file),['rgba(0,0,0,0)', 'rgba(74,74,74,255)'])
+    eq_(get_unique_colors(im_file), ['rgba(0,0,0,0)', 'rgba(74,74,74,255)'])
     # should have no effect
     im_file.premultiply()
-    eq_(get_unique_colors(im_file),['rgba(0,0,0,0)', 'rgba(74,74,74,255)'])
+    eq_(get_unique_colors(im_file), ['rgba(0,0,0,0)', 'rgba(74,74,74,255)'])
     im_file.apply_opacity(.5)
     # should have effect now that image has transparency
     im_file.premultiply()
-    eq_(get_unique_colors(im_file),['rgba(0,0,0,0)', 'rgba(37,37,37,127)'])
+    eq_(get_unique_colors(im_file), ['rgba(0,0,0,0)', 'rgba(37,37,37,127)'])
     # should restore to original nonpremultiplied colors
     im_file.demultiply()
-    eq_(get_unique_colors(im_file),['rgba(0,0,0,0)', 'rgba(74,74,74,127)'])
+    eq_(get_unique_colors(im_file), ['rgba(0,0,0,0)', 'rgba(74,74,74,127)'])
 
 
 def test_background_image_and_background_color():
-    m = mapnik.Map(8,8)
+    m = mapnik.Map(8, 8)
     m.background = mapnik.Color('rgba(255,255,255,.5)')
     m.background_image = '../data/images/stripes_pattern.png'
-    im = mapnik.Image(m.width,m.height)
-    mapnik.render(m,im)
-    eq_(get_unique_colors(im),['rgba(255,255,255,128)', 'rgba(74,74,74,255)'])
+    im = mapnik.Image(m.width, m.height)
+    mapnik.render(m, im)
+    eq_(get_unique_colors(im), ['rgba(255,255,255,128)', 'rgba(74,74,74,255)'])
+
 
 def test_background_image_with_alpha_and_background_color():
-    m = mapnik.Map(10,10)
+    m = mapnik.Map(10, 10)
     m.background = mapnik.Color('rgba(255,255,255,.5)')
     m.background_image = '../data/images/yellow_half_trans.png'
-    im = mapnik.Image(m.width,m.height)
-    mapnik.render(m,im)
-    eq_(get_unique_colors(im),['rgba(255,255,85,191)'])
+    im = mapnik.Image(m.width, m.height)
+    mapnik.render(m, im)
+    eq_(get_unique_colors(im), ['rgba(255,255,85,191)'])
+
 
 def test_background_image_with_alpha_and_background_color_against_composited_control():
-    m = mapnik.Map(10,10)
+    m = mapnik.Map(10, 10)
     m.background = mapnik.Color('rgba(255,255,255,.5)')
     m.background_image = '../data/images/yellow_half_trans.png'
-    im = mapnik.Image(m.width,m.height)
-    mapnik.render(m,im)
+    im = mapnik.Image(m.width, m.height)
+    mapnik.render(m, im)
     # create and composite the expected result
-    im1 = mapnik.Image(10,10)
+    im1 = mapnik.Image(10, 10)
     im1.fill(mapnik.Color('rgba(255,255,255,.5)'))
     im1.premultiply()
-    im2 = mapnik.Image(10,10)
+    im2 = mapnik.Image(10, 10)
     im2.fill(mapnik.Color('rgba(255,255,0,.5)'))
     im2.premultiply()
     im1.composite(im2)
@@ -251,7 +295,7 @@ def test_background_image_with_alpha_and_background_color_against_composited_con
     # compare image rendered (compositing in `agg_renderer<T>::setup`)
     # vs image composited via python bindings
     #raise Todo("looks like we need to investigate PNG color rounding when saving")
-    #eq_(get_unique_colors(im),get_unique_colors(im1))
+    # eq_(get_unique_colors(im),get_unique_colors(im1))
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/copy_test.py b/test/python_tests/copy_test.py
index d3cf9b1..b4aa45d 100644
--- a/test/python_tests/copy_test.py
+++ b/test/python_tests/copy_test.py
@@ -1,47 +1,55 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-import os, mapnik
-from nose.tools import *
-from utilities import execution_path, run_all
+import os
+
+from nose.tools import eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_image_16_8_simple():
-    im = mapnik.Image(2,2,mapnik.ImageType.gray16)
-    im.set_pixel(0,0, 256)
-    im.set_pixel(0,1, 999)
-    im.set_pixel(1,0, 5)
-    im.set_pixel(1,1, 2)
+    im = mapnik.Image(2, 2, mapnik.ImageType.gray16)
+    im.set_pixel(0, 0, 256)
+    im.set_pixel(0, 1, 999)
+    im.set_pixel(1, 0, 5)
+    im.set_pixel(1, 1, 2)
     im2 = im.copy(mapnik.ImageType.gray8)
-    eq_(im2.get_pixel(0,0), 255)
-    eq_(im2.get_pixel(0,1), 255)
-    eq_(im2.get_pixel(1,0), 5)
-    eq_(im2.get_pixel(1,1), 2)
+    eq_(im2.get_pixel(0, 0), 255)
+    eq_(im2.get_pixel(0, 1), 255)
+    eq_(im2.get_pixel(1, 0), 5)
+    eq_(im2.get_pixel(1, 1), 2)
     # Cast back!
     im = im2.copy(mapnik.ImageType.gray16)
-    eq_(im.get_pixel(0,0), 255)
-    eq_(im.get_pixel(0,1), 255)
-    eq_(im.get_pixel(1,0), 5)
-    eq_(im.get_pixel(1,1), 2)
+    eq_(im.get_pixel(0, 0), 255)
+    eq_(im.get_pixel(0, 1), 255)
+    eq_(im.get_pixel(1, 0), 5)
+    eq_(im.get_pixel(1, 1), 2)
+
 
 def test_image_32f_8_simple():
-    im = mapnik.Image(2,2,mapnik.ImageType.gray32f)
-    im.set_pixel(0,0, 120.1234)
-    im.set_pixel(0,1, -23.4)
-    im.set_pixel(1,0, 120.6)
-    im.set_pixel(1,1, 360.2)
+    im = mapnik.Image(2, 2, mapnik.ImageType.gray32f)
+    im.set_pixel(0, 0, 120.1234)
+    im.set_pixel(0, 1, -23.4)
+    im.set_pixel(1, 0, 120.6)
+    im.set_pixel(1, 1, 360.2)
     im2 = im.copy(mapnik.ImageType.gray8)
-    eq_(im2.get_pixel(0,0), 120)
-    eq_(im2.get_pixel(0,1), 0)
-    eq_(im2.get_pixel(1,0), 120) # Notice this is truncated!
-    eq_(im2.get_pixel(1,1), 255)
+    eq_(im2.get_pixel(0, 0), 120)
+    eq_(im2.get_pixel(0, 1), 0)
+    eq_(im2.get_pixel(1, 0), 120)  # Notice this is truncated!
+    eq_(im2.get_pixel(1, 1), 255)
+
 
 def test_image_offset_and_scale():
-    im = mapnik.Image(2,2,mapnik.ImageType.gray16)
+    im = mapnik.Image(2, 2, mapnik.ImageType.gray16)
     eq_(im.offset, 0.0)
     eq_(im.scaling, 1.0)
     im.offset = 1.0
@@ -49,44 +57,47 @@ def test_image_offset_and_scale():
     eq_(im.offset, 1.0)
     eq_(im.scaling, 2.0)
 
+
 def test_image_16_8_scale_and_offset():
-    im = mapnik.Image(2,2,mapnik.ImageType.gray16)
-    im.set_pixel(0,0, 256)
-    im.set_pixel(0,1, 258)
-    im.set_pixel(1,0, 99999)
-    im.set_pixel(1,1, 615)
+    im = mapnik.Image(2, 2, mapnik.ImageType.gray16)
+    im.set_pixel(0, 0, 256)
+    im.set_pixel(0, 1, 258)
+    im.set_pixel(1, 0, 99999)
+    im.set_pixel(1, 1, 615)
     offset = 255
     scaling = 3
     im2 = im.copy(mapnik.ImageType.gray8, offset, scaling)
-    eq_(im2.get_pixel(0,0), 0)
-    eq_(im2.get_pixel(0,1), 1)
-    eq_(im2.get_pixel(1,0), 255)
-    eq_(im2.get_pixel(1,1), 120)
+    eq_(im2.get_pixel(0, 0), 0)
+    eq_(im2.get_pixel(0, 1), 1)
+    eq_(im2.get_pixel(1, 0), 255)
+    eq_(im2.get_pixel(1, 1), 120)
     # pixels will be a little off due to offsets in reverting!
     im3 = im2.copy(mapnik.ImageType.gray16)
-    eq_(im3.get_pixel(0,0), 255) # Rounding error with ints
-    eq_(im3.get_pixel(0,1), 258) # same
-    eq_(im3.get_pixel(1,0), 1020) # The other one was way out of range for our scale/offset
-    eq_(im3.get_pixel(1,1), 615) # same 
+    eq_(im3.get_pixel(0, 0), 255)  # Rounding error with ints
+    eq_(im3.get_pixel(0, 1), 258)  # same
+    # The other one was way out of range for our scale/offset
+    eq_(im3.get_pixel(1, 0), 1020)
+    eq_(im3.get_pixel(1, 1), 615)  # same
+
 
 def test_image_16_32f_scale_and_offset():
-    im = mapnik.Image(2,2,mapnik.ImageType.gray16)
-    im.set_pixel(0,0, 256)
-    im.set_pixel(0,1, 258)
-    im.set_pixel(1,0, 0)
-    im.set_pixel(1,1, 615)
+    im = mapnik.Image(2, 2, mapnik.ImageType.gray16)
+    im.set_pixel(0, 0, 256)
+    im.set_pixel(0, 1, 258)
+    im.set_pixel(1, 0, 0)
+    im.set_pixel(1, 1, 615)
     offset = 255
     scaling = 3.2
     im2 = im.copy(mapnik.ImageType.gray32f, offset, scaling)
-    eq_(im2.get_pixel(0,0), 0.3125)
-    eq_(im2.get_pixel(0,1), 0.9375)
-    eq_(im2.get_pixel(1,0), -79.6875)
-    eq_(im2.get_pixel(1,1), 112.5)
+    eq_(im2.get_pixel(0, 0), 0.3125)
+    eq_(im2.get_pixel(0, 1), 0.9375)
+    eq_(im2.get_pixel(1, 0), -79.6875)
+    eq_(im2.get_pixel(1, 1), 112.5)
     im3 = im2.copy(mapnik.ImageType.gray16)
-    eq_(im3.get_pixel(0,0), 256) 
-    eq_(im3.get_pixel(0,1), 258)
-    eq_(im3.get_pixel(1,0), 0) 
-    eq_(im3.get_pixel(1,1), 615) 
+    eq_(im3.get_pixel(0, 0), 256)
+    eq_(im3.get_pixel(0, 1), 258)
+    eq_(im3.get_pixel(1, 0), 0)
+    eq_(im3.get_pixel(1, 1), 615)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/csv_test.py b/test/python_tests/csv_test.py
index 5011f57..c6bffa0 100644
--- a/test/python_tests/csv_test.py
+++ b/test/python_tests/csv_test.py
@@ -1,28 +1,38 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
+from __future__ import print_function
+
 import glob
-from nose.tools import eq_,raises
-from utilities import execution_path
+import os
+
+from nose.tools import eq_, raises
+
+import mapnik
+
+from .utilities import execution_path
 
-import os, mapnik
 
 default_logging_severity = mapnik.logger.get_severity()
 
+
 def setup():
-    # make the tests silent since we intentially test error conditions that are noisy
-    mapnik.logger.set_severity(mapnik.severity_type.None)
+    # make the tests silent since we intentially test error conditions that
+    # are noisy
+    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def teardown():
     mapnik.logger.set_severity(default_logging_severity)
 
 if 'csv' in mapnik.DatasourceCache.plugin_names():
 
     def get_csv_ds(filename):
-        return mapnik.Datasource(type='csv',file=os.path.join('../data/csv/',filename))
+        return mapnik.Datasource(
+            type='csv', file=os.path.join('../data/csv/', filename))
 
     def test_broken_files(visual=False):
         broken = glob.glob("../data/csv/fails/*.*")
@@ -34,232 +44,262 @@ if 'csv' in mapnik.DatasourceCache.plugin_names():
         for csv in broken:
             if visual:
                 try:
-                    mapnik.Datasource(type='csv',file=csv,strict=True)
-                    print '\x1b[33mfailed: should have thrown\x1b[0m',csv
+                    mapnik.Datasource(type='csv', file=csv, strict=True)
+                    print('\x1b[33mfailed: should have thrown\x1b[0m', csv)
                 except Exception:
-                    print '\x1b[1;32m✓ \x1b[0m', csv
+                    print('\x1b[1;32m✓ \x1b[0m', csv)
 
     def test_good_files(visual=False):
         good_files = glob.glob("../data/csv/*.*")
         good_files.extend(glob.glob("../data/csv/warns/*.*"))
-        ignorable = os.path.join('..','data','csv','long_lat.vrt')
+        ignorable = os.path.join('..', 'data', 'csv', 'long_lat.vrt')
         good_files.remove(ignorable)
 
         for csv in good_files:
             if visual:
                 try:
-                    mapnik.Datasource(type='csv',file=csv)
-                    print '\x1b[1;32m✓ \x1b[0m', csv
-                except Exception, e:
-                    print '\x1b[33mfailed: should not have thrown\x1b[0m',csv,str(e)
+                    mapnik.Datasource(type='csv', file=csv)
+                    print('\x1b[1;32m✓ \x1b[0m', csv)
+                except Exception as e:
+                    print(
+                        '\x1b[33mfailed: should not have thrown\x1b[0m',
+                        csv,
+                        str(e))
 
     def test_lon_lat_detection(**kwargs):
         ds = get_csv_ds('lon_lat.csv')
-        eq_(len(ds.fields()),2)
-        eq_(ds.fields(),['lon','lat'])
-        eq_(ds.field_types(),['int','int'])
+        eq_(len(ds.fields()), 2)
+        eq_(ds.fields(), ['lon', 'lat'])
+        eq_(ds.field_types(), ['int', 'int'])
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
         fs = ds.features(query)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
         feat = fs.next()
         attr = {'lon': 0, 'lat': 0}
-        eq_(feat.attributes,attr)
+        eq_(feat.attributes, attr)
 
     def test_lng_lat_detection(**kwargs):
         ds = get_csv_ds('lng_lat.csv')
-        eq_(len(ds.fields()),2)
-        eq_(ds.fields(),['lng','lat'])
-        eq_(ds.field_types(),['int','int'])
+        eq_(len(ds.fields()), 2)
+        eq_(ds.fields(), ['lng', 'lat'])
+        eq_(ds.field_types(), ['int', 'int'])
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
         fs = ds.features(query)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
         feat = fs.next()
         attr = {'lng': 0, 'lat': 0}
-        eq_(feat.attributes,attr)
+        eq_(feat.attributes, attr)
 
     def test_type_detection(**kwargs):
         ds = get_csv_ds('nypd.csv')
-        eq_(ds.fields(),['Precinct','Phone','Address','City','geo_longitude','geo_latitude','geo_accuracy'])
-        eq_(ds.field_types(),['str','str','str','str','float','float','str'])
+        eq_(ds.fields(),
+            ['Precinct',
+             'Phone',
+             'Address',
+             'City',
+             'geo_longitude',
+             'geo_latitude',
+             'geo_accuracy'])
+        eq_(ds.field_types(), ['str', 'str',
+                               'str', 'str', 'float', 'float', 'str'])
         feat = ds.featureset().next()
-        attr = {'City': u'New York, NY', 'geo_accuracy': u'house', 'Phone': u'(212) 334-0711', 'Address': u'19 Elizabeth Street', 'Precinct': u'5th Precinct', 'geo_longitude': -70, 'geo_latitude': 40}
-        eq_(feat.attributes,attr)
-        eq_(len(ds.all_features()),2)
+        attr = {
+            'City': u'New York, NY',
+            'geo_accuracy': u'house',
+            'Phone': u'(212) 334-0711',
+            'Address': u'19 Elizabeth Street',
+            'Precinct': u'5th Precinct',
+            'geo_longitude': -70,
+            'geo_latitude': 40}
+        eq_(feat.attributes, attr)
+        eq_(len(ds.all_features()), 2)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_skipping_blank_rows(**kwargs):
         ds = get_csv_ds('blank_rows.csv')
-        eq_(ds.fields(),['x','y','name'])
-        eq_(ds.field_types(),['int','int','str'])
-        eq_(len(ds.all_features()),2)
+        eq_(ds.fields(), ['x', 'y', 'name'])
+        eq_(ds.field_types(), ['int', 'int', 'str'])
+        eq_(len(ds.all_features()), 2)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_empty_rows(**kwargs):
         ds = get_csv_ds('empty_rows.csv')
-        eq_(len(ds.fields()),10)
-        eq_(len(ds.field_types()),10)
-        eq_(ds.fields(),['x', 'y', 'text', 'date', 'integer', 'boolean', 'float', 'time', 'datetime', 'empty_column'])
-        eq_(ds.field_types(),['int', 'int', 'str', 'str', 'int', 'bool', 'float', 'str', 'str', 'str'])
+        eq_(len(ds.fields()), 10)
+        eq_(len(ds.field_types()), 10)
+        eq_(ds.fields(), ['x', 'y', 'text', 'date', 'integer',
+                          'boolean', 'float', 'time', 'datetime', 'empty_column'])
+        eq_(ds.field_types(), ['int', 'int', 'str', 'str',
+                               'int', 'bool', 'float', 'str', 'str', 'str'])
         fs = ds.featureset()
-        attr = {'x': 0, 'empty_column': u'', 'text': u'a b', 'float': 1.0, 'datetime': u'1971-01-01T04:14:00', 'y': 0, 'boolean': True, 'time': u'04:14:00', 'date': u'1971-01-01', 'integer': 40}
+        attr = {
+            'x': 0,
+            'empty_column': u'',
+            'text': u'a b',
+            'float': 1.0,
+            'datetime': u'1971-01-01T04:14:00',
+            'y': 0,
+            'boolean': True,
+            'time': u'04:14:00',
+            'date': u'1971-01-01',
+            'integer': 40}
         first = True
-        for feat in fs:
+        for feat in fs.features:
             if first:
-                first=False
-                eq_(feat.attributes,attr)
-            eq_(len(feat),10)
-            eq_(feat['empty_column'],u'')
+                first = False
+                eq_(feat.attributes, attr)
+            eq_(len(feat), 10)
+            eq_(feat['empty_column'], u'')
 
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_slashes(**kwargs):
         ds = get_csv_ds('has_attributes_with_slashes.csv')
-        eq_(len(ds.fields()),3)
+        eq_(len(ds.fields()), 3)
         fs = ds.all_features()
-        eq_(fs[0].attributes,{'x':0,'y':0,'name':u'a/a'})
-        eq_(fs[1].attributes,{'x':1,'y':4,'name':u'b/b'})
-        eq_(fs[2].attributes,{'x':10,'y':2.5,'name':u'c/c'})
+        eq_(fs[0].attributes, {'x': 0, 'y': 0, 'name': u'a/a'})
+        eq_(fs[1].attributes, {'x': 1, 'y': 4, 'name': u'b/b'})
+        eq_(fs[2].attributes, {'x': 10, 'y': 2.5, 'name': u'c/c'})
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_wkt_field(**kwargs):
         ds = get_csv_ds('wkt.csv')
-        eq_(len(ds.fields()),1)
-        eq_(ds.fields(),['type'])
-        eq_(ds.field_types(),['str'])
+        eq_(len(ds.fields()), 1)
+        eq_(ds.fields(), ['type'])
+        eq_(ds.field_types(), ['str'])
         fs = ds.all_features()
-        #eq_(len(fs[0].geometries()),1)
-        eq_(fs[0].geometry.type(),mapnik.GeometryType.Point)
-        #eq_(len(fs[1].geometries()),1)
-        eq_(fs[1].geometry.type(),mapnik.GeometryType.LineString)
-        #eq_(len(fs[2].geometries()),1)
-        eq_(fs[2].geometry.type(),mapnik.GeometryType.Polygon)
-        #eq_(len(fs[3].geometries()),1) # one geometry, two parts
-        eq_(fs[3].geometry.type(),mapnik.GeometryType.Polygon)
-        #eq_(len(fs[4].geometries()),4)
-        eq_(fs[4].geometry.type(),mapnik.GeometryType.MultiPoint)
-        #eq_(len(fs[5].geometries()),2)
-        eq_(fs[5].geometry.type(),mapnik.GeometryType.MultiLineString)
-        #eq_(len(fs[6].geometries()),2)
-        eq_(fs[6].geometry.type(),mapnik.GeometryType.MultiPolygon)
-        #eq_(len(fs[7].geometries()),2)
-        eq_(fs[7].geometry.type(),mapnik.GeometryType.MultiPolygon)
+        # eq_(len(fs[0].geometries()),1)
+        eq_(fs[0].geometry.type(), mapnik.GeometryType.Point)
+        # eq_(len(fs[1].geometries()),1)
+        eq_(fs[1].geometry.type(), mapnik.GeometryType.LineString)
+        # eq_(len(fs[2].geometries()),1)
+        eq_(fs[2].geometry.type(), mapnik.GeometryType.Polygon)
+        # eq_(len(fs[3].geometries()),1) # one geometry, two parts
+        eq_(fs[3].geometry.type(), mapnik.GeometryType.Polygon)
+        # eq_(len(fs[4].geometries()),4)
+        eq_(fs[4].geometry.type(), mapnik.GeometryType.MultiPoint)
+        # eq_(len(fs[5].geometries()),2)
+        eq_(fs[5].geometry.type(), mapnik.GeometryType.MultiLineString)
+        # eq_(len(fs[6].geometries()),2)
+        eq_(fs[6].geometry.type(), mapnik.GeometryType.MultiPolygon)
+        # eq_(len(fs[7].geometries()),2)
+        eq_(fs[7].geometry.type(), mapnik.GeometryType.MultiPolygon)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Collection)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Collection)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_handling_of_missing_header(**kwargs):
         ds = get_csv_ds('missing_header.csv')
-        eq_(len(ds.fields()),6)
-        eq_(ds.fields(),['one','two','x','y','_4','aftermissing'])
+        eq_(len(ds.fields()), 6)
+        eq_(ds.fields(), ['one', 'two', 'x', 'y', '_4', 'aftermissing'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['_4'],'missing')
+        eq_(feat['_4'], 'missing')
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_handling_of_headers_that_are_numbers(**kwargs):
         ds = get_csv_ds('numbers_for_headers.csv')
-        eq_(len(ds.fields()),5)
-        eq_(ds.fields(),['x','y','1990','1991','1992'])
+        eq_(len(ds.fields()), 5)
+        eq_(ds.fields(), ['x', 'y', '1990', '1991', '1992'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['1990'],1)
-        eq_(feat['1991'],2)
-        eq_(feat['1992'],3)
-        eq_(mapnik.Expression("[1991]=2").evaluate(feat),True)
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['1990'], 1)
+        eq_(feat['1991'], 2)
+        eq_(feat['1992'], 3)
+        eq_(mapnik.Expression("[1991]=2").evaluate(feat), True)
 
     def test_quoted_numbers(**kwargs):
         ds = get_csv_ds('points.csv')
-        eq_(len(ds.fields()),6)
-        eq_(ds.fields(),['lat','long','name','nr','color','placements'])
+        eq_(len(ds.fields()), 6)
+        eq_(ds.fields(), ['lat', 'long', 'name', 'nr', 'color', 'placements'])
         fs = ds.all_features()
-        eq_(fs[0]['placements'],"N,S,E,W,SW,10,5")
-        eq_(fs[1]['placements'],"N,S,E,W,SW,10,5")
-        eq_(fs[2]['placements'],"N,S,E,W,SW,10,5")
-        eq_(fs[3]['placements'],"N,S,E,W,SW,10,5")
-        eq_(fs[4]['placements'],"N,S,E,W,SW,10,5")
+        eq_(fs[0]['placements'], "N,S,E,W,SW,10,5")
+        eq_(fs[1]['placements'], "N,S,E,W,SW,10,5")
+        eq_(fs[2]['placements'], "N,S,E,W,SW,10,5")
+        eq_(fs[3]['placements'], "N,S,E,W,SW,10,5")
+        eq_(fs[4]['placements'], "N,S,E,W,SW,10,5")
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_reading_windows_newlines(**kwargs):
         ds = get_csv_ds('windows_newlines.csv')
-        eq_(len(ds.fields()),3)
+        eq_(len(ds.fields()), 3)
         feats = ds.all_features()
-        eq_(len(feats),1)
+        eq_(len(feats), 1)
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],1)
-        eq_(feat['y'],10)
-        eq_(feat['z'],9999.9999)
+        eq_(feat['x'], 1)
+        eq_(feat['y'], 10)
+        eq_(feat['z'], 9999.9999)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_reading_mac_newlines(**kwargs):
         ds = get_csv_ds('mac_newlines.csv')
-        eq_(len(ds.fields()),3)
+        eq_(len(ds.fields()), 3)
         feats = ds.all_features()
-        eq_(len(feats),1)
+        eq_(len(feats), 1)
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],1)
-        eq_(feat['y'],10)
-        eq_(feat['z'],9999.9999)
+        eq_(feat['x'], 1)
+        eq_(feat['y'], 10)
+        eq_(feat['z'], 9999.9999)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def check_newlines(filename):
         ds = get_csv_ds(filename)
-        eq_(len(ds.fields()),3)
+        eq_(len(ds.fields()), 3)
         feats = ds.all_features()
-        eq_(len(feats),1)
+        eq_(len(feats), 1)
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['line'],'many\n  lines\n  of text\n  with unix newlines')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['line'], 'many\n  lines\n  of text\n  with unix newlines')
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_mixed_mac_unix_newlines(**kwargs):
         check_newlines('mac_newlines_with_unix_inline.csv')
@@ -268,11 +308,11 @@ if 'csv' in mapnik.DatasourceCache.plugin_names():
         check_newlines('mac_newlines_with_unix_inline_escaped.csv')
 
     # To hard to support this case
-    #def test_mixed_unix_windows_newlines(**kwargs):
+    # def test_mixed_unix_windows_newlines(**kwargs):
     #    check_newlines('unix_newlines_with_windows_inline.csv')
 
     # To hard to support this case
-    #def test_mixed_unix_windows_newlines_escaped(**kwargs):
+    # def test_mixed_unix_windows_newlines_escaped(**kwargs):
     #    check_newlines('unix_newlines_with_windows_inline_escaped.csv')
 
     def test_mixed_windows_unix_newlines(**kwargs):
@@ -283,74 +323,74 @@ if 'csv' in mapnik.DatasourceCache.plugin_names():
 
     def test_tabs(**kwargs):
         ds = get_csv_ds('tabs_in_csv.csv')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','z'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'z'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],-122)
-        eq_(feat['y'],48)
-        eq_(feat['z'],0)
+        eq_(feat['x'], -122)
+        eq_(feat['y'], 48)
+        eq_(feat['z'], 0)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_separator_pipes(**kwargs):
         ds = get_csv_ds('pipe_delimiters.csv')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','z'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'z'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['z'],'hello')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['z'], 'hello')
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_separator_semicolon(**kwargs):
         ds = get_csv_ds('semicolon_delimiters.csv')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','z'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'z'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['z'],'hello')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['z'], 'hello')
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_that_null_and_bool_keywords_are_empty_strings(**kwargs):
         ds = get_csv_ds('nulls_and_booleans_as_strings.csv')
-        eq_(len(ds.fields()),4)
-        eq_(ds.fields(),['x','y','null','boolean'])
-        eq_(ds.field_types(),['int', 'int', 'str', 'bool'])
+        eq_(len(ds.fields()), 4)
+        eq_(ds.fields(), ['x', 'y', 'null', 'boolean'])
+        eq_(ds.field_types(), ['int', 'int', 'str', 'bool'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['null'],'null')
-        eq_(feat['boolean'],True)
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['null'], 'null')
+        eq_(feat['boolean'], True)
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['null'],'')
-        eq_(feat['boolean'],False)
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['null'], '')
+        eq_(feat['boolean'], False)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
     @raises(RuntimeError)
     def test_that_nonexistant_query_field_throws(**kwargs):
         ds = get_csv_ds('lon_lat.csv')
-        eq_(len(ds.fields()),2)
-        eq_(ds.fields(),['lon','lat'])
-        eq_(ds.field_types(),['int','int'])
+        eq_(len(ds.fields()), 2)
+        eq_(ds.fields(), ['lon', 'lat'])
+        eq_(ds.field_types(), ['int', 'int'])
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
@@ -360,83 +400,83 @@ if 'csv' in mapnik.DatasourceCache.plugin_names():
 
     def test_that_leading_zeros_mean_strings(**kwargs):
         ds = get_csv_ds('leading_zeros.csv')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','fips'])
-        eq_(ds.field_types(),['int','int','str'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'fips'])
+        eq_(ds.field_types(), ['int', 'int', 'str'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['fips'],'001')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['fips'], '001')
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['fips'],'003')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['fips'], '003')
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['fips'],'005')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['fips'], '005')
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_advanced_geometry_detection(**kwargs):
         ds = get_csv_ds('point_wkt.csv')
-        eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Point)
         ds = get_csv_ds('poly_wkt.csv')
-        eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Polygon)
+        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Polygon)
         ds = get_csv_ds('multi_poly_wkt.csv')
-        eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Polygon)
+        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Polygon)
         ds = get_csv_ds('line_wkt.csv')
-        eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.LineString)
+        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.LineString)
 
     def test_creation_of_csv_from_in_memory_string(**kwargs):
         csv_string = '''
            wkt,Name
           "POINT (120.15 48.47)","Winthrop, WA"
-          ''' # csv plugin will test lines <= 10 chars for being fully blank
-        ds = mapnik.Datasource(**{"type":"csv","inline":csv_string})
-        eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point)
+          '''  # csv plugin will test lines <= 10 chars for being fully blank
+        ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string})
+        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Point)
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['Name'],u"Winthrop, WA")
+        eq_(feat['Name'], u"Winthrop, WA")
 
     def test_creation_of_csv_from_in_memory_string_with_uft8(**kwargs):
         csv_string = '''
            wkt,Name
           "POINT (120.15 48.47)","Québec"
-          ''' # csv plugin will test lines <= 10 chars for being fully blank
-        ds = mapnik.Datasource(**{"type":"csv","inline":csv_string})
-        eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point)
+          '''  # csv plugin will test lines <= 10 chars for being fully blank
+        ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string})
+        eq_(ds.describe()['geometry_type'], mapnik.DataGeometryType.Point)
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['Name'],u"Québec")
+        eq_(feat['Name'], u"Québec")
 
     def validate_geojson_datasource(ds):
-        eq_(len(ds.fields()),1)
-        eq_(ds.fields(),['type'])
-        eq_(ds.field_types(),['str'])
+        eq_(len(ds.fields()), 1)
+        eq_(ds.fields(), ['type'])
+        eq_(ds.field_types(), ['str'])
         fs = ds.all_features()
-        #eq_(len(fs[0].geometries()),1)
-        eq_(fs[0].geometry.type(),mapnik.GeometryType.Point)
-        #eq_(len(fs[1].geometries()),1)
-        eq_(fs[1].geometry.type(),mapnik.GeometryType.LineString)
-        #eq_(len(fs[2].geometries()),1)
+        # eq_(len(fs[0].geometries()),1)
+        eq_(fs[0].geometry.type(), mapnik.GeometryType.Point)
+        # eq_(len(fs[1].geometries()),1)
+        eq_(fs[1].geometry.type(), mapnik.GeometryType.LineString)
+        # eq_(len(fs[2].geometries()),1)
         eq_(fs[2].geometry.type(), mapnik.GeometryType.Polygon)
-        #eq_(len(fs[3].geometries()),1) # one geometry, two parts
-        eq_(fs[3].geometry.type(),mapnik.GeometryType.Polygon)
-        #eq_(len(fs[4].geometries()),4)
-        eq_(fs[4].geometry.type(),mapnik.GeometryType.MultiPoint)
-        #eq_(len(fs[5].geometries()),2)
-        eq_(fs[5].geometry.type(),mapnik.GeometryType.MultiLineString)
-        #eq_(len(fs[6].geometries()),2)
-        eq_(fs[6].geometry.type(),mapnik.GeometryType.MultiPolygon)
-        #eq_(len(fs[7].geometries()),2)
-        eq_(fs[7].geometry.type(),mapnik.GeometryType.MultiPolygon)
+        # eq_(len(fs[3].geometries()),1) # one geometry, two parts
+        eq_(fs[3].geometry.type(), mapnik.GeometryType.Polygon)
+        # eq_(len(fs[4].geometries()),4)
+        eq_(fs[4].geometry.type(), mapnik.GeometryType.MultiPoint)
+        # eq_(len(fs[5].geometries()),2)
+        eq_(fs[5].geometry.type(), mapnik.GeometryType.MultiLineString)
+        # eq_(len(fs[6].geometries()),2)
+        eq_(fs[6].geometry.type(), mapnik.GeometryType.MultiPolygon)
+        # eq_(len(fs[7].geometries()),2)
+        eq_(fs[7].geometry.type(), mapnik.GeometryType.MultiPolygon)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Collection)
-        eq_(desc['name'],'csv')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Collection)
+        eq_(desc['name'], 'csv')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
 
     def test_json_field1(**kwargs):
         ds = get_csv_ds('geojson_double_quote_escape.csv')
@@ -452,18 +492,18 @@ if 'csv' in mapnik.DatasourceCache.plugin_names():
 
     def test_that_blank_undelimited_rows_are_still_parsed(**kwargs):
         ds = get_csv_ds('more_headers_than_column_values.csv')
-        eq_(len(ds.fields()),5)
-        eq_(ds.fields(),['x','y','one', 'two','three'])
-        eq_(ds.field_types(),['int','int','str','str','str'])
+        eq_(len(ds.fields()), 5)
+        eq_(ds.fields(), ['x', 'y', 'one', 'two', 'three'])
+        eq_(ds.field_types(), ['int', 'int', 'str', 'str', 'str'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['one'],'')
-        eq_(feat['two'],'')
-        eq_(feat['three'],'')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['one'], '')
+        eq_(feat['two'], '')
+        eq_(feat['three'], '')
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
     @raises(RuntimeError)
     def test_that_fewer_headers_than_rows_throws(**kwargs):
@@ -472,132 +512,136 @@ if 'csv' in mapnik.DatasourceCache.plugin_names():
 
     def test_that_feature_id_only_incremented_for_valid_rows(**kwargs):
         ds = mapnik.Datasource(type='csv',
-                               file=os.path.join('../data/csv/warns','feature_id_counting.csv'))
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','id'])
-        eq_(ds.field_types(),['int','int','int'])
+                               file=os.path.join('../data/csv/warns', 'feature_id_counting.csv'))
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'id'])
+        eq_(ds.field_types(), ['int', 'int', 'int'])
         fs = ds.featureset()
         # first
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['id'],1)
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['id'], 1)
         # second, should have skipped bogus one
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['id'],2)
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['id'], 2)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(len(ds.all_features()),2)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(len(ds.all_features()), 2)
 
     def test_dynamically_defining_headers1(**kwargs):
         ds = mapnik.Datasource(type='csv',
-                               file=os.path.join('../data/csv/fails','needs_headers_two_lines.csv'),
+                               file=os.path.join(
+                                   '../data/csv/fails', 'needs_headers_two_lines.csv'),
                                headers='x,y,name')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','name'])
-        eq_(ds.field_types(),['int','int','str'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'name'])
+        eq_(ds.field_types(), ['int', 'int', 'str'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['name'],'data_name')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['name'], 'data_name')
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(len(ds.all_features()),2)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(len(ds.all_features()), 2)
 
     def test_dynamically_defining_headers2(**kwargs):
         ds = mapnik.Datasource(type='csv',
-                               file=os.path.join('../data/csv/fails','needs_headers_one_line.csv'),
+                               file=os.path.join(
+                                   '../data/csv/fails', 'needs_headers_one_line.csv'),
                                headers='x,y,name')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','name'])
-        eq_(ds.field_types(),['int','int','str'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'name'])
+        eq_(ds.field_types(), ['int', 'int', 'str'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['name'],'data_name')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['name'], 'data_name')
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(len(ds.all_features()),1)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(len(ds.all_features()), 1)
 
     def test_dynamically_defining_headers3(**kwargs):
         ds = mapnik.Datasource(type='csv',
-                               file=os.path.join('../data/csv/fails','needs_headers_one_line_no_newline.csv'),
+                               file=os.path.join(
+                                   '../data/csv/fails', 'needs_headers_one_line_no_newline.csv'),
                                headers='x,y,name')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','name'])
-        eq_(ds.field_types(),['int','int','str'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'name'])
+        eq_(ds.field_types(), ['int', 'int', 'str'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['x'],0)
-        eq_(feat['y'],0)
-        eq_(feat['name'],'data_name')
+        eq_(feat['x'], 0)
+        eq_(feat['y'], 0)
+        eq_(feat['name'], 'data_name')
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(len(ds.all_features()),1)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(len(ds.all_features()), 1)
 
     def test_that_64bit_int_fields_work(**kwargs):
         ds = get_csv_ds('64bit_int.csv')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','bigint'])
-        eq_(ds.field_types(),['int','int','int'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'bigint'])
+        eq_(ds.field_types(), ['int', 'int', 'int'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['bigint'],2147483648)
+        eq_(feat['bigint'], 2147483648)
         feat = fs.next()
-        eq_(feat['bigint'],9223372036854775807)
-        eq_(feat['bigint'],0x7FFFFFFFFFFFFFFF)
+        eq_(feat['bigint'], 9223372036854775807)
+        eq_(feat['bigint'], 0x7FFFFFFFFFFFFFFF)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(len(ds.all_features()),2)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(len(ds.all_features()), 2)
 
     def test_various_number_types(**kwargs):
         ds = get_csv_ds('number_types.csv')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['x','y','floats'])
-        eq_(ds.field_types(),['int','int','float'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['x', 'y', 'floats'])
+        eq_(ds.field_types(), ['int', 'int', 'float'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['floats'],.0)
+        eq_(feat['floats'], .0)
         feat = fs.next()
-        eq_(feat['floats'],+.0)
+        eq_(feat['floats'], +.0)
         feat = fs.next()
-        eq_(feat['floats'],1e-06)
+        eq_(feat['floats'], 1e-06)
         feat = fs.next()
-        eq_(feat['floats'],-1e-06)
+        eq_(feat['floats'], -1e-06)
         feat = fs.next()
-        eq_(feat['floats'],0.000001)
+        eq_(feat['floats'], 0.000001)
         feat = fs.next()
-        eq_(feat['floats'],1.234e+16)
+        eq_(feat['floats'], 1.234e+16)
         feat = fs.next()
-        eq_(feat['floats'],1.234e+16)
+        eq_(feat['floats'], 1.234e+16)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(len(ds.all_features()),8)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(len(ds.all_features()), 8)
 
     def test_manually_supplied_extent(**kwargs):
         csv_string = '''
            wkt,Name
           '''
-        ds = mapnik.Datasource(**{"type":"csv","extent":"-180,-90,180,90","inline":csv_string})
+        ds = mapnik.Datasource(
+            **{"type": "csv", "extent": "-180,-90,180,90", "inline": csv_string})
         b = ds.envelope()
-        eq_(b.minx,-180)
-        eq_(b.miny,-90)
-        eq_(b.maxx,180)
-        eq_(b.maxy,90)
+        eq_(b.minx, -180)
+        eq_(b.miny, -90)
+        eq_(b.maxx, 180)
+        eq_(b.maxy, 90)
 
     def test_inline_geojson(**kwargs):
         csv_string = "geojson\n'{\"coordinates\":[-92.22568,38.59553],\"type\":\"Point\"}'"
-        ds = mapnik.Datasource(**{"type":"csv","inline":csv_string})
-        eq_(len(ds.fields()),0)
-        eq_(ds.fields(),[])
+        ds = mapnik.Datasource(**{"type": "csv", "inline": csv_string})
+        eq_(len(ds.fields()), 0)
+        eq_(ds.fields(), [])
         # FIXME - re-enable after https://github.com/mapnik/mapnik/issues/2319 is fixed
         #fs = ds.featureset()
         #feat = fs.next()
-        #eq_(feat.num_geometries(),1)
+        # eq_(feat.num_geometries(),1)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/datasource_test.py b/test/python_tests/datasource_test.py
index 4ada3dc..f408f46 100644
--- a/test/python_tests/datasource_test.py
+++ b/test/python_tests/datasource_test.py
@@ -1,35 +1,47 @@
 #!/usr/bin/env python
+import os
+import sys
+from itertools import groupby
 
 from nose.tools import eq_, raises
-from utilities import execution_path, run_all
-import os, mapnik
-from itertools import groupby
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
+PYTHON3 = sys.version_info[0] == 3
+if PYTHON3:
+    xrange = range
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_that_datasources_exist():
     if len(mapnik.DatasourceCache.plugin_names()) == 0:
-        print '***NOTICE*** - no datasource plugins have been loaded'
+        print('***NOTICE*** - no datasource plugins have been loaded')
 
 # adapted from raster_symboliser_test#test_dataraster_query_point
+
+
 @raises(RuntimeError)
 def test_vrt_referring_to_missing_files():
     srs = '+init=epsg:32630'
     if 'gdal' in mapnik.DatasourceCache.plugin_names():
         lyr = mapnik.Layer('dataraster')
         lyr.datasource = mapnik.Gdal(
-            file = '../data/raster/missing_raster.vrt',
-            band = 1,
-            )
+            file='../data/raster/missing_raster.vrt',
+            band=1,
+        )
         lyr.srs = srs
         _map = mapnik.Map(256, 256, srs)
         _map.layers.append(lyr)
 
         # center of extent of raster
-        x, y = 556113.0,4381428.0 # center of extent of raster
+        x, y = 556113.0, 4381428.0  # center of extent of raster
 
         _map.zoom_all()
 
@@ -61,10 +73,11 @@ def test_field_listing():
         fields = ds.fields()
         eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Polygon)
-        eq_(desc['name'],'shape')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Polygon)
+        eq_(desc['name'], 'shape')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
+
 
 def test_total_feature_count_shp():
     if 'shape' in mapnik.DatasourceCache.plugin_names():
@@ -73,38 +86,45 @@ def test_total_feature_count_shp():
         num_feats = len(features)
         eq_(num_feats, 10)
 
+
 def test_total_feature_count_json():
     if 'ogr' in mapnik.DatasourceCache.plugin_names():
-        ds = mapnik.Ogr(file='../data/json/points.geojson',layer_by_index=0)
+        ds = mapnik.Ogr(file='../data/json/points.geojson', layer_by_index=0)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_(desc['name'],'ogr')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_(desc['name'], 'ogr')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
         features = ds.all_features()
         num_feats = len(features)
         eq_(num_feats, 5)
 
+
 def test_sqlite_reading():
     if 'sqlite' in mapnik.DatasourceCache.plugin_names():
-        ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',table_by_index=0)
+        ds = mapnik.SQLite(
+            file='../data/sqlite/world.sqlite',
+            table_by_index=0)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Polygon)
-        eq_(desc['name'],'sqlite')
-        eq_(desc['type'],mapnik.DataType.Vector)
-        eq_(desc['encoding'],'utf-8')
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Polygon)
+        eq_(desc['name'], 'sqlite')
+        eq_(desc['type'], mapnik.DataType.Vector)
+        eq_(desc['encoding'], 'utf-8')
         features = ds.all_features()
         num_feats = len(features)
         eq_(num_feats, 245)
 
+
 def test_reading_json_from_string():
-    json = open('../data/json/points.geojson','r').read()
+    with open('../data/json/points.geojson', 'r') as f:
+        json = f.read()
     if 'ogr' in mapnik.DatasourceCache.plugin_names():
-        ds = mapnik.Ogr(file=json,layer_by_index=0)
+        ds = mapnik.Ogr(file=json, layer_by_index=0)
         features = ds.all_features()
         num_feats = len(features)
         eq_(num_feats, 5)
 
+
 def test_feature_envelope():
     if 'shape' in mapnik.DatasourceCache.plugin_names():
         ds = mapnik.Shapefile(file='../data/shp/poly.shp')
@@ -116,6 +136,7 @@ def test_feature_envelope():
             intersects = ds.envelope().contains(env)
             eq_(intersects, True)
 
+
 def test_feature_attributes():
     if 'shape' in mapnik.DatasourceCache.plugin_names():
         ds = mapnik.Shapefile(file='../data/shp/poly.shp')
@@ -123,31 +144,35 @@ def test_feature_attributes():
         feat = features[0]
         attrs = {'PRFEDEA': u'35043411', 'EAS_ID': 168, 'AREA': 215229.266}
         eq_(feat.attributes, attrs)
-        eq_(ds.fields(),['AREA', 'EAS_ID', 'PRFEDEA'])
-        eq_(ds.field_types(),['float','int','str'])
+        eq_(ds.fields(), ['AREA', 'EAS_ID', 'PRFEDEA'])
+        eq_(ds.field_types(), ['float', 'int', 'str'])
+
 
 def test_ogr_layer_by_sql():
     if 'ogr' in mapnik.DatasourceCache.plugin_names():
-        ds = mapnik.Ogr(file='../data/shp/poly.shp', layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
+        ds = mapnik.Ogr(file='../data/shp/poly.shp',
+                        layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
         features = ds.all_features()
         num_feats = len(features)
         eq_(num_feats, 1)
 
+
 def test_hit_grid():
 
     def rle_encode(l):
         """ encode a list of strings with run-length compression """
-        return ["%d:%s" % (len(list(group)), name) for name, group in groupby(l)]
+        return ["%d:%s" % (len(list(group)), name)
+                for name, group in groupby(l)]
 
-    m = mapnik.Map(256,256);
+    m = mapnik.Map(256, 256)
     try:
-        mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml');
+        mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
         m.zoom_all()
         join_field = 'NAME'
-        fg = [] # feature grid
-        for y in range(0, 256, 4):
-            for x in range(0, 256, 4):
-                featureset = m.query_map_point(0,x,y)
+        fg = []  # feature grid
+        for y in xrange(0, 256, 4):
+            for x in xrange(0, 256, 4):
+                featureset = m.query_map_point(0, x, y)
                 added = False
                 for feature in featureset.features:
                     fg.append(feature[join_field])
@@ -155,9 +180,9 @@ def test_hit_grid():
                 if not added:
                     fg.append('')
         hit_list = '|'.join(rle_encode(fg))
-        eq_(hit_list[:16],'730:|2:Greenland')
-        eq_(hit_list[-12:],'1:Chile|812:')
-    except RuntimeError, e:
+        eq_(hit_list[:16], '730:|2:Greenland')
+        eq_(hit_list[-12:], '1:Chile|812:')
+    except RuntimeError as e:
         # only test datasources that we have installed
         if not 'Could not create datasource' in str(e):
             raise RuntimeError(str(e))
diff --git a/test/python_tests/datasource_xml_template_test.py b/test/python_tests/datasource_xml_template_test.py
index 38a73a3..b561d93 100644
--- a/test/python_tests/datasource_xml_template_test.py
+++ b/test/python_tests/datasource_xml_template_test.py
@@ -2,19 +2,23 @@
 # -*- coding: utf-8 -*-
 
 import os
-from utilities import execution_path, run_all
+
 import mapnik
 
+from .utilities import execution_path, run_all
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_datasource_template_is_working():
-    m = mapnik.Map(256,256)
+    m = mapnik.Map(256, 256)
     try:
-        mapnik.load_map(m,'../data/good_maps/datasource.xml')
-    except RuntimeError, e:
+        mapnik.load_map(m, '../data/good_maps/datasource.xml')
+    except RuntimeError as e:
         if "Required parameter 'type'" in str(e):
             raise RuntimeError(e)
 
diff --git a/test/python_tests/extra_map_props_test.py b/test/python_tests/extra_map_props_test.py
index 045cddb..ac9e748 100644
--- a/test/python_tests/extra_map_props_test.py
+++ b/test/python_tests/extra_map_props_test.py
@@ -1,53 +1,59 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
-import os, mapnik
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_arbitrary_parameters_attached_to_map():
-    m = mapnik.Map(256,256)
-    mapnik.load_map(m,'../data/good_maps/extra_arbitary_map_parameters.xml')
-    eq_(len(m.parameters),5)
-    eq_(m.parameters['key'],'value2')
-    eq_(m.parameters['key3'],'value3')
-    eq_(m.parameters['unicode'],u'iván')
-    eq_(m.parameters['integer'],10)
-    eq_(m.parameters['decimal'],.999)
-    m2 = mapnik.Map(256,256)
-    for k,v in m.parameters:
-        m2.parameters.append(mapnik.Parameter(k,v))
-    eq_(len(m2.parameters),5)
-    eq_(m2.parameters['key'],'value2')
-    eq_(m2.parameters['key3'],'value3')
-    eq_(m2.parameters['unicode'],u'iván')
-    eq_(m2.parameters['integer'],10)
-    eq_(m2.parameters['decimal'],.999)
+    m = mapnik.Map(256, 256)
+    mapnik.load_map(m, '../data/good_maps/extra_arbitary_map_parameters.xml')
+    eq_(len(m.parameters), 5)
+    eq_(m.parameters['key'], 'value2')
+    eq_(m.parameters['key3'], 'value3')
+    eq_(m.parameters['unicode'], u'iván')
+    eq_(m.parameters['integer'], 10)
+    eq_(m.parameters['decimal'], .999)
+    m2 = mapnik.Map(256, 256)
+    for k, v in m.parameters:
+        m2.parameters.append(mapnik.Parameter(k, v))
+    eq_(len(m2.parameters), 5)
+    eq_(m2.parameters['key'], 'value2')
+    eq_(m2.parameters['key3'], 'value3')
+    eq_(m2.parameters['unicode'], u'iván')
+    eq_(m2.parameters['integer'], 10)
+    eq_(m2.parameters['decimal'], .999)
     map_string = mapnik.save_map_to_string(m)
-    m3 = mapnik.Map(256,256)
-    mapnik.load_map_from_string(m3,map_string)
-    eq_(len(m3.parameters),5)
-    eq_(m3.parameters['key'],'value2')
-    eq_(m3.parameters['key3'],'value3')
-    eq_(m3.parameters['unicode'],u'iván')
-    eq_(m3.parameters['integer'],10)
-    eq_(m3.parameters['decimal'],.999)
+    m3 = mapnik.Map(256, 256)
+    mapnik.load_map_from_string(m3, map_string)
+    eq_(len(m3.parameters), 5)
+    eq_(m3.parameters['key'], 'value2')
+    eq_(m3.parameters['key3'], 'value3')
+    eq_(m3.parameters['unicode'], u'iván')
+    eq_(m3.parameters['integer'], 10)
+    eq_(m3.parameters['decimal'], .999)
 
 
 def test_serializing_arbitrary_parameters():
-    m = mapnik.Map(256,256)
-    m.parameters.append(mapnik.Parameter('width',m.width))
-    m.parameters.append(mapnik.Parameter('height',m.height))
-
-    m2 = mapnik.Map(1,1)
-    mapnik.load_map_from_string(m2,mapnik.save_map_to_string(m))
-    eq_(m2.parameters['width'],m.width)
-    eq_(m2.parameters['height'],m.height)
+    m = mapnik.Map(256, 256)
+    m.parameters.append(mapnik.Parameter('width', m.width))
+    m.parameters.append(mapnik.Parameter('height', m.height))
+
+    m2 = mapnik.Map(1, 1)
+    mapnik.load_map_from_string(m2, mapnik.save_map_to_string(m))
+    eq_(m2.parameters['width'], m.width)
+    eq_(m2.parameters['height'], m.height)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/feature_id_test.py b/test/python_tests/feature_id_test.py
index 66c20cc..7f7464e 100644
--- a/test/python_tests/feature_id_test.py
+++ b/test/python_tests/feature_id_test.py
@@ -1,19 +1,29 @@
 #!/usr/bin/env python
 
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
-import os, mapnik
-import itertools
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
+try:
+    import itertools.izip as zip
+except ImportError:
+    pass
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
-def compare_shape_between_mapnik_and_ogr(shapefile,query=None):
+
+def compare_shape_between_mapnik_and_ogr(shapefile, query=None):
     plugins = mapnik.DatasourceCache.plugin_names()
     if 'shape' in plugins and 'ogr' in plugins:
-        ds1 = mapnik.Ogr(file=shapefile,layer_by_index=0)
+        ds1 = mapnik.Ogr(file=shapefile, layer_by_index=0)
         ds2 = mapnik.Shapefile(file=shapefile)
         if query:
             fs1 = ds1.features(query)
@@ -21,29 +31,33 @@ def compare_shape_between_mapnik_and_ogr(shapefile,query=None):
         else:
             fs1 = ds1.featureset()
             fs2 = ds2.featureset()
-        count = 0;
-        for feat1,feat2 in itertools.izip(fs1,fs2):
+        count = 0
+        for feat1, feat2 in zip(fs1.features, fs2.features):
             count += 1
-            eq_(feat1.id(),feat2.id(),
+            eq_(feat1.id(), feat2.id(),
                 '%s : ogr feature id %s "%s" does not equal shapefile feature id %s "%s"'
-                  % (count,feat1.id(),str(feat1.attributes), feat2.id(),str(feat2.attributes)))
+                % (count, feat1.id(), str(feat1.attributes), feat2.id(), str(feat2.attributes)))
     return True
 
 
 def test_shapefile_line_featureset_id():
     compare_shape_between_mapnik_and_ogr('../data/shp/polylines.shp')
 
+
 def test_shapefile_polygon_featureset_id():
     compare_shape_between_mapnik_and_ogr('../data/shp/poly.shp')
 
+
 def test_shapefile_polygon_feature_query_id():
     bbox = (15523428.2632, 4110477.6323, -11218494.8310, 7495720.7404)
     query = mapnik.Query(mapnik.Box2d(*bbox))
     if 'ogr' in mapnik.DatasourceCache.plugin_names():
-        ds = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
+        ds = mapnik.Ogr(file='../data/shp/world_merc.shp', layer_by_index=0)
         for fld in ds.fields():
             query.add_property_name(fld)
-        compare_shape_between_mapnik_and_ogr('../data/shp/world_merc.shp',query)
+        compare_shape_between_mapnik_and_ogr(
+            '../data/shp/world_merc.shp', query)
+
 
 def test_feature_hit_count():
     pass
@@ -52,7 +66,7 @@ def test_feature_hit_count():
     #bbox = (-14284551.8434, 2074195.1992, -7474929.8687, 8140237.7628)
     #bbox = (1113194.91,4512803.085,2226389.82,6739192.905)
     #query = mapnik.Query(mapnik.Box2d(*bbox))
-    #if 'ogr' in mapnik.DatasourceCache.plugin_names():
+    # if 'ogr' in mapnik.DatasourceCache.plugin_names():
     #    ds1 = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
     #    for fld in ds1.fields():
     #        query.add_property_name(fld)
diff --git a/test/python_tests/feature_test.py b/test/python_tests/feature_test.py
index 5574cc7..7a544af 100644
--- a/test/python_tests/feature_test.py
+++ b/test/python_tests/feature_test.py
@@ -1,32 +1,38 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from nose.tools import eq_,raises
-from utilities import run_all
+from binascii import unhexlify
+
+from nose.tools import eq_, raises
 
 import mapnik
-from binascii import unhexlify
+
+from .utilities import run_all
+
 
 def test_default_constructor():
-    f = mapnik.Feature(mapnik.Context(),1)
-    eq_(f is not None,True)
+    f = mapnik.Feature(mapnik.Context(), 1)
+    eq_(f is not None, True)
+
 
 def test_feature_geo_interface():
     ctx = mapnik.Context()
-    feat = mapnik.Feature(ctx,1)
+    feat = mapnik.Feature(ctx, 1)
     feat.geometry = mapnik.Geometry.from_wkt('Point (0 0)')
-    eq_(feat.__geo_interface__['geometry'],{u'type': u'Point', u'coordinates': [0, 0]})
+    eq_(feat.__geo_interface__['geometry'], {
+        u'type': u'Point', u'coordinates': [0, 0]})
+
 
 def test_python_extended_constructor():
     context = mapnik.Context()
     context.push('foo')
     context.push('foo')
-    f = mapnik.Feature(context,1)
+    f = mapnik.Feature(context, 1)
     wkt = 'POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))'
     f.geometry = mapnik.Geometry.from_wkt(wkt)
     f['foo'] = 'bar'
     eq_(f['foo'], 'bar')
-    eq_(f.envelope(),mapnik.Box2d(10.0,10.0,45.0,45.0))
+    eq_(f.envelope(), mapnik.Box2d(10.0, 10.0, 45.0, 45.0))
     # reset
     f['foo'] = u"avión"
     eq_(f['foo'], u"avión")
@@ -35,59 +41,71 @@ def test_python_extended_constructor():
     f['foo'] = True
     eq_(f['foo'], True)
 
+
 def test_add_geom_wkb():
-# POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))
+    # POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))
     wkb = '010300000001000000050000000000000000003e4000000000000024400000000000002440000000000000344000000000000034400000000000004440000000000000444000000000000044400000000000003e400000000000002440'
     geometry = mapnik.Geometry.from_wkb(unhexlify(wkb))
-    eq_(geometry.is_valid(), True)
-    eq_(geometry.is_simple(), True)
-    eq_(geometry.envelope(), mapnik.Box2d(10.0,10.0,40.0,40.0))
+    if hasattr(geometry, 'is_valid'):
+        # Those are only available when python-mapnik has been built with
+        # boost >= 1.56.
+        eq_(geometry.is_valid(), True)
+        eq_(geometry.is_simple(), True)
+    eq_(geometry.envelope(), mapnik.Box2d(10.0, 10.0, 40.0, 40.0))
     geometry.correct()
-    # valid after calling correct
-    eq_(geometry.is_valid(), True)
+    if hasattr(geometry, 'is_valid'):
+        # valid after calling correct
+        eq_(geometry.is_valid(), True)
+
 
 def test_feature_expression_evaluation():
     context = mapnik.Context()
     context.push('name')
-    f = mapnik.Feature(context,1)
+    f = mapnik.Feature(context, 1)
     f['name'] = 'a'
-    eq_(f['name'],u'a')
+    eq_(f['name'], u'a')
     expr = mapnik.Expression("[name]='a'")
     evaluated = expr.evaluate(f)
-    eq_(evaluated,True)
+    eq_(evaluated, True)
     num_attributes = len(f)
-    eq_(num_attributes,1)
-    eq_(f.id(),1)
+    eq_(num_attributes, 1)
+    eq_(f.id(), 1)
 
 # https://github.com/mapnik/mapnik/issues/933
+
+
 def test_feature_expression_evaluation_missing_attr():
     context = mapnik.Context()
     context.push('name')
-    f = mapnik.Feature(context,1)
+    f = mapnik.Feature(context, 1)
     f['name'] = u'a'
-    eq_(f['name'],u'a')
+    eq_(f['name'], u'a')
     expr = mapnik.Expression("[fielddoesnotexist]='a'")
-    eq_(f.has_key('fielddoesnotexist'),False)
+    eq_('fielddoesnotexist' in f, False)
     try:
         expr.evaluate(f)
-    except Exception, e:
-        eq_("Key does not exist" in str(e),True)
+    except Exception as e:
+        eq_("Key does not exist" in str(e), True)
     num_attributes = len(f)
-    eq_(num_attributes,1)
-    eq_(f.id(),1)
+    eq_(num_attributes, 1)
+    eq_(f.id(), 1)
 
 # https://github.com/mapnik/mapnik/issues/934
+
+
 def test_feature_expression_evaluation_attr_with_spaces():
     context = mapnik.Context()
     context.push('name with space')
-    f = mapnik.Feature(context,1)
+    f = mapnik.Feature(context, 1)
     f['name with space'] = u'a'
-    eq_(f['name with space'],u'a')
+    eq_(f['name with space'], u'a')
     expr = mapnik.Expression("[name with space]='a'")
-    eq_(str(expr),"([name with space]='a')")
-    eq_(expr.evaluate(f),True)
+    eq_(str(expr), "([name with space]='a')")
+    eq_(expr.evaluate(f), True)
 
 # https://github.com/mapnik/mapnik/issues/2390
+
+
 @raises(RuntimeError)
 def test_feature_from_geojson():
     ctx = mapnik.Context()
@@ -104,7 +122,7 @@ def test_feature_from_geojson():
          }
     }
     """
-    mapnik.Feature.from_geojson(inline_string,ctx)
+    mapnik.Feature.from_geojson(inline_string, ctx)
 
 if __name__ == "__main__":
     exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
diff --git a/test/python_tests/filter_test.py b/test/python_tests/filter_test.py
index 34845ce..f9a1010 100644
--- a/test/python_tests/filter_test.py
+++ b/test/python_tests/filter_test.py
@@ -1,11 +1,20 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+import sys
+
+from nose.tools import eq_, raises
 
-from nose.tools import eq_,raises
-from utilities import run_all
 import mapnik
 
-if hasattr(mapnik,'Expression'):
+from .utilities import run_all
+
+PYTHON3 = sys.version_info[0] == 3
+if PYTHON3:
+    long = int
+    unicode = str
+
+
+if hasattr(mapnik, 'Expression'):
     mapnik.Filter = mapnik.Expression
 
 map_ = '''<Map>
@@ -50,9 +59,10 @@ map_ = '''<Map>
     </Style>
 </Map>'''
 
+
 def test_filter_init():
-    m = mapnik.Map(1,1)
-    mapnik.load_map_from_string(m,map_)
+    m = mapnik.Map(1, 1)
+    mapnik.load_map_from_string(m, map_)
     filters = []
     filters.append(mapnik.Filter("([region]>=0) and ([region]<=50)"))
     filters.append(mapnik.Filter("(([region]>=0) and ([region]<=50))"))
@@ -86,361 +96,402 @@ def test_filter_init():
 
     first = filters[0]
     for f in filters:
-        eq_(str(first),str(f))
+        eq_(str(first), str(f))
 
     s = m.find_style('s2')
 
-    eq_(s.filter_mode,mapnik.filter_mode.FIRST)
+    eq_(s.filter_mode, mapnik.filter_mode.FIRST)
 
 
 def test_geometry_type_eval():
     # clashing field called 'mapnik::geometry'
     context2 = mapnik.Context()
     context2.push('mapnik::geometry_type')
-    f = mapnik.Feature(context2,0)
+    f = mapnik.Feature(context2, 0)
     f["mapnik::geometry_type"] = 'sneaky'
     expr = mapnik.Expression("[mapnik::geometry_type]")
-    eq_(expr.evaluate(f),0)
+    eq_(expr.evaluate(f), 0)
 
     expr = mapnik.Expression("[mapnik::geometry_type]")
     context = mapnik.Context()
 
     # no geometry
-    f = mapnik.Feature(context,0)
-    eq_(expr.evaluate(f),0)
-    eq_(mapnik.Expression("[mapnik::geometry_type]=0").evaluate(f),True)
+    f = mapnik.Feature(context, 0)
+    eq_(expr.evaluate(f), 0)
+    eq_(mapnik.Expression("[mapnik::geometry_type]=0").evaluate(f), True)
 
     # POINT = 1
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f.geometry = mapnik.Geometry.from_wkt('POINT(10 40)')
-    eq_(expr.evaluate(f),1)
-    eq_(mapnik.Expression("[mapnik::geometry_type]=point").evaluate(f),True)
+    eq_(expr.evaluate(f), 1)
+    eq_(mapnik.Expression("[mapnik::geometry_type]=point").evaluate(f), True)
 
     # LINESTRING = 2
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f.geometry = mapnik.Geometry.from_wkt('LINESTRING (30 10, 10 30, 40 40)')
-    eq_(expr.evaluate(f),2)
-    eq_(mapnik.Expression("[mapnik::geometry_type] = linestring").evaluate(f),True)
+    eq_(expr.evaluate(f), 2)
+    eq_(mapnik.Expression(
+        "[mapnik::geometry_type] = linestring").evaluate(f), True)
 
     # POLYGON = 3
-    f = mapnik.Feature(context,0)
-    f.geometry = mapnik.Geometry.from_wkt('POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))')
-    eq_(expr.evaluate(f),3)
-    eq_(mapnik.Expression("[mapnik::geometry_type] = polygon").evaluate(f),True)
+    f = mapnik.Feature(context, 0)
+    f.geometry = mapnik.Geometry.from_wkt(
+        'POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))')
+    eq_(expr.evaluate(f), 3)
+    eq_(mapnik.Expression(
+        "[mapnik::geometry_type] = polygon").evaluate(f), True)
 
     # COLLECTION = 4
-    f = mapnik.Feature(context,0)
-    geom = mapnik.Geometry.from_wkt('GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))')
-    f.geometry = geom;
-    eq_(expr.evaluate(f),4)
-    eq_(mapnik.Expression("[mapnik::geometry_type] = collection").evaluate(f),True)
+    f = mapnik.Feature(context, 0)
+    geom = mapnik.Geometry.from_wkt(
+        'GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))')
+    f.geometry = geom
+    eq_(expr.evaluate(f), 4)
+    eq_(mapnik.Expression(
+        "[mapnik::geometry_type] = collection").evaluate(f), True)
+
 
 def test_regex_match():
     context = mapnik.Context()
     context.push('name')
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["name"] = 'test'
     expr = mapnik.Expression("[name].match('test')")
-    eq_(expr.evaluate(f),True) # 1 == True
+    eq_(expr.evaluate(f), True)  # 1 == True
+
 
 def test_unicode_regex_match():
     context = mapnik.Context()
     context.push('name')
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["name"] = 'Québec'
     expr = mapnik.Expression("[name].match('Québec')")
-    eq_(expr.evaluate(f),True) # 1 == True
+    eq_(expr.evaluate(f), True)  # 1 == True
+
 
 def test_regex_replace():
     context = mapnik.Context()
     context.push('name')
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["name"] = 'test'
     expr = mapnik.Expression("[name].replace('(\B)|( )','$1 ')")
-    eq_(expr.evaluate(f),'t e s t')
+    eq_(expr.evaluate(f), 't e s t')
+
 
 def test_unicode_regex_replace_to_str():
     expr = mapnik.Expression("[name].replace('(\B)|( )','$1 ')")
-    eq_(str(expr),"[name].replace('(\B)|( )','$1 ')")
+    eq_(str(expr), "[name].replace('(\B)|( )','$1 ')")
+
 
 def test_unicode_regex_replace():
     context = mapnik.Context()
     context.push('name')
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["name"] = 'Québec'
     expr = mapnik.Expression("[name].replace('(\B)|( )','$1 ')")
     # will fail if -DBOOST_REGEX_HAS_ICU is not defined
     eq_(expr.evaluate(f), u'Q u é b e c')
 
+
 def test_float_precision():
     context = mapnik.Context()
     context.push('num')
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["num1"] = 1.0000
     f["num2"] = 1.0001
-    eq_(f["num1"],1.0000)
-    eq_(f["num2"],1.0001)
+    eq_(f["num1"], 1.0000)
+    eq_(f["num2"], 1.0001)
     expr = mapnik.Expression("[num1] = 1.0000")
-    eq_(expr.evaluate(f),True)
+    eq_(expr.evaluate(f), True)
     expr = mapnik.Expression("[num1].match('1')")
-    eq_(expr.evaluate(f),True)
+    eq_(expr.evaluate(f), True)
     expr = mapnik.Expression("[num2] = 1.0001")
-    eq_(expr.evaluate(f),True)
+    eq_(expr.evaluate(f), True)
     expr = mapnik.Expression("[num2].match('1.0001')")
-    eq_(expr.evaluate(f),True)
+    eq_(expr.evaluate(f), True)
+
 
 def test_string_matching_on_precision():
     context = mapnik.Context()
     context.push('num')
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["num"] = "1.0000"
-    eq_(f["num"],"1.0000")
+    eq_(f["num"], "1.0000")
     expr = mapnik.Expression("[num].match('.*(^0|00)$')")
-    eq_(expr.evaluate(f),True)
+    eq_(expr.evaluate(f), True)
+
 
 def test_creation_of_null_value():
     context = mapnik.Context()
     context.push('nv')
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["nv"] = None
-    eq_(f["nv"],None)
-    eq_(f["nv"] is None,True)
+    eq_(f["nv"], None)
+    eq_(f["nv"] is None, True)
     # test boolean
     f["nv"] = 0
-    eq_(f["nv"],0)
-    eq_(f["nv"] is not None,True)
+    eq_(f["nv"], 0)
+    eq_(f["nv"] is not None, True)
+
 
 def test_creation_of_bool():
     context = mapnik.Context()
     context.push('bool')
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["bool"] = True
-    eq_(f["bool"],True)
+    eq_(f["bool"], True)
     # TODO - will become int of 1 do to built in boost python conversion
     # https://github.com/mapnik/mapnik/issues/1873
-    eq_(isinstance(f["bool"],bool) or isinstance(f["bool"],long),True)
+    eq_(isinstance(f["bool"], bool) or isinstance(f["bool"], long), True)
     f["bool"] = False
-    eq_(f["bool"],False)
-    eq_(isinstance(f["bool"],bool) or isinstance(f["bool"],long),True)
+    eq_(f["bool"], False)
+    eq_(isinstance(f["bool"], bool) or isinstance(f["bool"], long), True)
     # test NoneType
     f["bool"] = None
-    eq_(f["bool"],None)
-    eq_(isinstance(f["bool"],bool) or isinstance(f["bool"],long),False)
+    eq_(f["bool"], None)
+    eq_(isinstance(f["bool"], bool) or isinstance(f["bool"], long), False)
     # test integer
     f["bool"] = 0
-    eq_(f["bool"],0)
+    eq_(f["bool"], 0)
     # https://github.com/mapnik/mapnik/issues/1873
     # ugh, boost_python's built into converter does not work right
-    #eq_(isinstance(f["bool"],bool),False)
+    # eq_(isinstance(f["bool"],bool),False)
 
 null_equality = [
-  ['hello',False,unicode],
-  [u'',False,unicode],
-  [0,False,long],
-  [123,False,long],
-  [0.0,False,float],
-  [123.123,False,float],
-  [.1,False,float],
-  [False,False,long], # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
-  [True,False,long], # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
-  [None,True,None],
-  [2147483648,False,long],
-  [922337203685477580,False,long]
+    ['hello', False, unicode],
+    [u'', False, unicode],
+    [0, False, long],
+    [123, False, long],
+    [0.0, False, float],
+    [123.123, False, float],
+    [.1, False, float],
+    # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
+    [False, False, long],
+    # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
+    [True, False, long],
+    [None, True, None],
+    [2147483648, False, long],
+    [922337203685477580, False, long]
 ]
 
+
 def test_expressions_with_null_equality():
     for eq in null_equality:
         context = mapnik.Context()
-        f = mapnik.Feature(context,0)
+        f = mapnik.Feature(context, 0)
         f["prop"] = eq[0]
-        eq_(f["prop"],eq[0])
+        eq_(f["prop"], eq[0])
         if eq[0] is None:
             eq_(f["prop"] is None, True)
         else:
-            eq_(isinstance(f['prop'],eq[2]),True,'%s is not an instance of %s' % (f['prop'],eq[2]))
+            eq_(isinstance(f['prop'], eq[2]), True,
+                '%s is not an instance of %s' % (f['prop'], eq[2]))
         expr = mapnik.Expression("[prop] = null")
-        eq_(expr.evaluate(f),eq[1])
+        eq_(expr.evaluate(f), eq[1])
         expr = mapnik.Expression("[prop] is null")
-        eq_(expr.evaluate(f),eq[1])
+        eq_(expr.evaluate(f), eq[1])
+
 
 def test_expressions_with_null_equality2():
     for eq in null_equality:
         context = mapnik.Context()
-        f = mapnik.Feature(context,0)
+        f = mapnik.Feature(context, 0)
         f["prop"] = eq[0]
-        eq_(f["prop"],eq[0])
+        eq_(f["prop"], eq[0])
         if eq[0] is None:
             eq_(f["prop"] is None, True)
         else:
-            eq_(isinstance(f['prop'],eq[2]),True,'%s is not an instance of %s' % (f['prop'],eq[2]))
+            eq_(isinstance(f['prop'], eq[2]), True,
+                '%s is not an instance of %s' % (f['prop'], eq[2]))
         # TODO - support `is not` syntax:
         # https://github.com/mapnik/mapnik/issues/796
         expr = mapnik.Expression("not [prop] is null")
-        eq_(expr.evaluate(f),not eq[1])
+        eq_(expr.evaluate(f), not eq[1])
         # https://github.com/mapnik/mapnik/issues/1642
         expr = mapnik.Expression("[prop] != null")
-        eq_(expr.evaluate(f),not eq[1])
+        eq_(expr.evaluate(f), not eq[1])
 
 truthyness = [
-  [u'hello',True,unicode],
-  [u'',False,unicode],
-  [0,False,long],
-  [123,True,long],
-  [0.0,False,float],
-  [123.123,True,float],
-  [.1,True,float],
-  [False,False,long], # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
-  [True,True,long], # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
-  [None,False,None],
-  [2147483648,True,long],
-  [922337203685477580,True,long]
+    [u'hello', True, unicode],
+    [u'', False, unicode],
+    [0, False, long],
+    [123, True, long],
+    [0.0, False, float],
+    [123.123, True, float],
+    [.1, True, float],
+    # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
+    [False, False, long],
+    # TODO - should become bool: https://github.com/mapnik/mapnik/issues/1873
+    [True, True, long],
+    [None, False, None],
+    [2147483648, True, long],
+    [922337203685477580, True, long]
 ]
 
+
 def test_expressions_for_thruthyness():
     context = mapnik.Context()
     for eq in truthyness:
-        f = mapnik.Feature(context,0)
+        f = mapnik.Feature(context, 0)
         f["prop"] = eq[0]
-        eq_(f["prop"],eq[0])
+        eq_(f["prop"], eq[0])
         if eq[0] is None:
             eq_(f["prop"] is None, True)
         else:
-            eq_(isinstance(f['prop'],eq[2]),True,'%s is not an instance of %s' % (f['prop'],eq[2]))
+            eq_(isinstance(f['prop'], eq[2]), True,
+                '%s is not an instance of %s' % (f['prop'], eq[2]))
         expr = mapnik.Expression("[prop]")
-        eq_(expr.to_bool(f),eq[1])
+        eq_(expr.to_bool(f), eq[1])
         expr = mapnik.Expression("not [prop]")
-        eq_(expr.to_bool(f),not eq[1])
+        eq_(expr.to_bool(f), not eq[1])
         expr = mapnik.Expression("! [prop]")
-        eq_(expr.to_bool(f),not eq[1])
+        eq_(expr.to_bool(f), not eq[1])
     # also test if feature does not have property at all
-    f2 = mapnik.Feature(context,1)
+    f2 = mapnik.Feature(context, 1)
     # no property existing will return value_null since
     # https://github.com/mapnik/mapnik/commit/562fada9d0f680f59b2d9f396c95320a0d753479#include/mapnik/feature.hpp
-    eq_(f2["prop"] is None,True)
+    eq_(f2["prop"] is None, True)
     expr = mapnik.Expression("[prop]")
-    eq_(expr.evaluate(f2),None)
-    eq_(expr.to_bool(f2),False)
+    eq_(expr.evaluate(f2), None)
+    eq_(expr.to_bool(f2), False)
 
 # https://github.com/mapnik/mapnik/issues/1859
+
+
 def test_if_null_and_empty_string_are_equal():
     context = mapnik.Context()
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["empty"] = u""
     f["null"] = None
     # ensure base assumptions are good
-    eq_(mapnik.Expression("[empty] = ''").to_bool(f),True)
-    eq_(mapnik.Expression("[null] = null").to_bool(f),True)
-    eq_(mapnik.Expression("[empty] != ''").to_bool(f),False)
-    eq_(mapnik.Expression("[null] != null").to_bool(f),False)
+    eq_(mapnik.Expression("[empty] = ''").to_bool(f), True)
+    eq_(mapnik.Expression("[null] = null").to_bool(f), True)
+    eq_(mapnik.Expression("[empty] != ''").to_bool(f), False)
+    eq_(mapnik.Expression("[null] != null").to_bool(f), False)
     # now test expected behavior
-    eq_(mapnik.Expression("[null] = ''").to_bool(f),False)
-    eq_(mapnik.Expression("[empty] = null").to_bool(f),False)
-    eq_(mapnik.Expression("[empty] != null").to_bool(f),True)
+    eq_(mapnik.Expression("[null] = ''").to_bool(f), False)
+    eq_(mapnik.Expression("[empty] = null").to_bool(f), False)
+    eq_(mapnik.Expression("[empty] != null").to_bool(f), True)
     # this one is the back compatibility shim
-    eq_(mapnik.Expression("[null] != ''").to_bool(f),False)
+    eq_(mapnik.Expression("[null] != ''").to_bool(f), False)
+
 
 def test_filtering_nulls_and_empty_strings():
     context = mapnik.Context()
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["prop"] = u"hello"
-    eq_(f["prop"],u"hello")
-    eq_(mapnik.Expression("[prop]").to_bool(f),True)
-    eq_(mapnik.Expression("! [prop]").to_bool(f),False)
-    eq_(mapnik.Expression("[prop] != null").to_bool(f),True)
-    eq_(mapnik.Expression("[prop] != ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop] != null and [prop] != ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop] != null or [prop] != ''").to_bool(f),True)
+    eq_(f["prop"], u"hello")
+    eq_(mapnik.Expression("[prop]").to_bool(f), True)
+    eq_(mapnik.Expression("! [prop]").to_bool(f), False)
+    eq_(mapnik.Expression("[prop] != null").to_bool(f), True)
+    eq_(mapnik.Expression("[prop] != ''").to_bool(f), True)
+    eq_(mapnik.Expression("[prop] != null and [prop] != ''").to_bool(f), True)
+    eq_(mapnik.Expression("[prop] != null or [prop] != ''").to_bool(f), True)
     f["prop2"] = u""
-    eq_(f["prop2"],u"")
-    eq_(mapnik.Expression("[prop2]").to_bool(f),False)
-    eq_(mapnik.Expression("! [prop2]").to_bool(f),True)
-    eq_(mapnik.Expression("[prop2] != null").to_bool(f),True)
-    eq_(mapnik.Expression("[prop2] != ''").to_bool(f),False)
-    eq_(mapnik.Expression("[prop2] = ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop2] != null or [prop2] != ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop2] != null and [prop2] != ''").to_bool(f),False)
+    eq_(f["prop2"], u"")
+    eq_(mapnik.Expression("[prop2]").to_bool(f), False)
+    eq_(mapnik.Expression("! [prop2]").to_bool(f), True)
+    eq_(mapnik.Expression("[prop2] != null").to_bool(f), True)
+    eq_(mapnik.Expression("[prop2] != ''").to_bool(f), False)
+    eq_(mapnik.Expression("[prop2] = ''").to_bool(f), True)
+    eq_(mapnik.Expression("[prop2] != null or [prop2] != ''").to_bool(f), True)
+    eq_(mapnik.Expression(
+        "[prop2] != null and [prop2] != ''").to_bool(f), False)
     f["prop3"] = None
-    eq_(f["prop3"],None)
-    eq_(mapnik.Expression("[prop3]").to_bool(f),False)
-    eq_(mapnik.Expression("! [prop3]").to_bool(f),True)
-    eq_(mapnik.Expression("[prop3] != null").to_bool(f),False)
-    eq_(mapnik.Expression("[prop3] = null").to_bool(f),True)
+    eq_(f["prop3"], None)
+    eq_(mapnik.Expression("[prop3]").to_bool(f), False)
+    eq_(mapnik.Expression("! [prop3]").to_bool(f), True)
+    eq_(mapnik.Expression("[prop3] != null").to_bool(f), False)
+    eq_(mapnik.Expression("[prop3] = null").to_bool(f), True)
 
     # https://github.com/mapnik/mapnik/issues/1859
     #eq_(mapnik.Expression("[prop3] != ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop3] != ''").to_bool(f),False)
+    eq_(mapnik.Expression("[prop3] != ''").to_bool(f), False)
 
-    eq_(mapnik.Expression("[prop3] = ''").to_bool(f),False)
+    eq_(mapnik.Expression("[prop3] = ''").to_bool(f), False)
 
     # https://github.com/mapnik/mapnik/issues/1859
     #eq_(mapnik.Expression("[prop3] != null or [prop3] != ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop3] != null or [prop3] != ''").to_bool(f),False)
+    eq_(mapnik.Expression(
+        "[prop3] != null or [prop3] != ''").to_bool(f), False)
 
-    eq_(mapnik.Expression("[prop3] != null and [prop3] != ''").to_bool(f),False)
+    eq_(mapnik.Expression(
+        "[prop3] != null and [prop3] != ''").to_bool(f), False)
     # attr not existing should behave the same as prop3
-    eq_(mapnik.Expression("[prop4]").to_bool(f),False)
-    eq_(mapnik.Expression("! [prop4]").to_bool(f),True)
-    eq_(mapnik.Expression("[prop4] != null").to_bool(f),False)
-    eq_(mapnik.Expression("[prop4] = null").to_bool(f),True)
+    eq_(mapnik.Expression("[prop4]").to_bool(f), False)
+    eq_(mapnik.Expression("! [prop4]").to_bool(f), True)
+    eq_(mapnik.Expression("[prop4] != null").to_bool(f), False)
+    eq_(mapnik.Expression("[prop4] = null").to_bool(f), True)
 
     # https://github.com/mapnik/mapnik/issues/1859
     ##eq_(mapnik.Expression("[prop4] != ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop4] != ''").to_bool(f),False)
+    eq_(mapnik.Expression("[prop4] != ''").to_bool(f), False)
 
-    eq_(mapnik.Expression("[prop4] = ''").to_bool(f),False)
+    eq_(mapnik.Expression("[prop4] = ''").to_bool(f), False)
 
     # https://github.com/mapnik/mapnik/issues/1859
     ##eq_(mapnik.Expression("[prop4] != null or [prop4] != ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop4] != null or [prop4] != ''").to_bool(f),False)
+    eq_(mapnik.Expression(
+        "[prop4] != null or [prop4] != ''").to_bool(f), False)
 
-    eq_(mapnik.Expression("[prop4] != null and [prop4] != ''").to_bool(f),False)
+    eq_(mapnik.Expression(
+        "[prop4] != null and [prop4] != ''").to_bool(f), False)
     f["prop5"] = False
-    eq_(f["prop5"],False)
-    eq_(mapnik.Expression("[prop5]").to_bool(f),False)
-    eq_(mapnik.Expression("! [prop5]").to_bool(f),True)
-    eq_(mapnik.Expression("[prop5] != null").to_bool(f),True)
-    eq_(mapnik.Expression("[prop5] = null").to_bool(f),False)
-    eq_(mapnik.Expression("[prop5] != ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop5] = ''").to_bool(f),False)
-    eq_(mapnik.Expression("[prop5] != null or [prop5] != ''").to_bool(f),True)
-    eq_(mapnik.Expression("[prop5] != null and [prop5] != ''").to_bool(f),True)
+    eq_(f["prop5"], False)
+    eq_(mapnik.Expression("[prop5]").to_bool(f), False)
+    eq_(mapnik.Expression("! [prop5]").to_bool(f), True)
+    eq_(mapnik.Expression("[prop5] != null").to_bool(f), True)
+    eq_(mapnik.Expression("[prop5] = null").to_bool(f), False)
+    eq_(mapnik.Expression("[prop5] != ''").to_bool(f), True)
+    eq_(mapnik.Expression("[prop5] = ''").to_bool(f), False)
+    eq_(mapnik.Expression("[prop5] != null or [prop5] != ''").to_bool(f), True)
+    eq_(mapnik.Expression(
+        "[prop5] != null and [prop5] != ''").to_bool(f), True)
     # note, we need to do [prop5] != 0 here instead of false due to this bug:
     # https://github.com/mapnik/mapnik/issues/1873
-    eq_(mapnik.Expression("[prop5] != null and [prop5] != '' and [prop5] != 0").to_bool(f),False)
+    eq_(mapnik.Expression(
+        "[prop5] != null and [prop5] != '' and [prop5] != 0").to_bool(f), False)
 
 # https://github.com/mapnik/mapnik/issues/1872
+
+
 def test_falseyness_comparision():
     context = mapnik.Context()
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["prop"] = 0
-    eq_(mapnik.Expression("[prop]").to_bool(f),False)
-    eq_(mapnik.Expression("[prop] = false").to_bool(f),True)
-    eq_(mapnik.Expression("not [prop] != false").to_bool(f),True)
-    eq_(mapnik.Expression("not [prop] = true").to_bool(f),True)
-    eq_(mapnik.Expression("[prop] = true").to_bool(f),False)
-    eq_(mapnik.Expression("[prop] != true").to_bool(f),True)
-
-# https://github.com/mapnik/mapnik/issues/1806, fixed by https://github.com/mapnik/mapnik/issues/1872
+    eq_(mapnik.Expression("[prop]").to_bool(f), False)
+    eq_(mapnik.Expression("[prop] = false").to_bool(f), True)
+    eq_(mapnik.Expression("not [prop] != false").to_bool(f), True)
+    eq_(mapnik.Expression("not [prop] = true").to_bool(f), True)
+    eq_(mapnik.Expression("[prop] = true").to_bool(f), False)
+    eq_(mapnik.Expression("[prop] != true").to_bool(f), True)
+
+# https://github.com/mapnik/mapnik/issues/1806, fixed by
+# https://github.com/mapnik/mapnik/issues/1872
+
+
 def test_truthyness_comparision():
     context = mapnik.Context()
-    f = mapnik.Feature(context,0)
+    f = mapnik.Feature(context, 0)
     f["prop"] = 1
-    eq_(mapnik.Expression("[prop]").to_bool(f),True)
-    eq_(mapnik.Expression("[prop] = false").to_bool(f),False)
-    eq_(mapnik.Expression("not [prop] != false").to_bool(f),False)
-    eq_(mapnik.Expression("not [prop] = true").to_bool(f),False)
-    eq_(mapnik.Expression("[prop] = true").to_bool(f),True)
-    eq_(mapnik.Expression("[prop] != true").to_bool(f),False)
+    eq_(mapnik.Expression("[prop]").to_bool(f), True)
+    eq_(mapnik.Expression("[prop] = false").to_bool(f), False)
+    eq_(mapnik.Expression("not [prop] != false").to_bool(f), False)
+    eq_(mapnik.Expression("not [prop] = true").to_bool(f), False)
+    eq_(mapnik.Expression("[prop] = true").to_bool(f), True)
+    eq_(mapnik.Expression("[prop] != true").to_bool(f), False)
+
 
 def test_division_by_zero():
     expr = mapnik.Expression('[a]/[b]')
     c = mapnik.Context()
     c.push('a')
     c.push('b')
-    f = mapnik.Feature(c,0);
+    f = mapnik.Feature(c, 0)
     f['a'] = 1
     f['b'] = 0
-    eq_(expr.evaluate(f),None)
+    eq_(expr.evaluate(f), None)
+
 
 @raises(RuntimeError)
 def test_invalid_syntax1():
diff --git a/test/python_tests/fontset_test.py b/test/python_tests/fontset_test.py
index ee8fd7d..0baed51 100644
--- a/test/python_tests/fontset_test.py
+++ b/test/python_tests/fontset_test.py
@@ -1,20 +1,26 @@
 #!/usr/bin/env python
 
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
-import os, mapnik
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_loading_fontset_from_map():
-    m = mapnik.Map(256,256)
-    mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
+    m = mapnik.Map(256, 256)
+    mapnik.load_map(m, '../data/good_maps/fontset.xml', True)
     fs = m.find_fontset('book-fonts')
-    eq_(len(fs.names),2)
-    eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
+    eq_(len(fs.names), 2)
+    eq_(list(fs.names), ['DejaVu Sans Book', 'DejaVu Sans Oblique'])
 
 # def test_loading_fontset_from_python():
 #     m = mapnik.Map(256,256)
diff --git a/test/python_tests/geojson_plugin_test.py b/test/python_tests/geojson_plugin_test.py
index ef7c74a..e68b9eb 100644
--- a/test/python_tests/geojson_plugin_test.py
+++ b/test/python_tests/geojson_plugin_test.py
@@ -1,13 +1,14 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from nose.tools import eq_,assert_almost_equal
-from utilities import execution_path, run_all
-import os, mapnik
-try:
-    import json
-except ImportError:
-    import simplejson as json
+import os
+
+from nose.tools import assert_almost_equal, eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -17,7 +18,9 @@ def setup():
 if 'geojson' in mapnik.DatasourceCache.plugin_names():
 
     def test_geojson_init():
-        ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson')
+        ds = mapnik.Datasource(
+            type='geojson',
+            file='../data/json/escaped.geojson')
         e = ds.envelope()
         assert_almost_equal(e.minx, -81.705583, places=7)
         assert_almost_equal(e.miny, 41.480573, places=6)
@@ -25,11 +28,13 @@ if 'geojson' in mapnik.DatasourceCache.plugin_names():
         assert_almost_equal(e.maxy, 41.480573, places=3)
 
     def test_geojson_properties():
-        ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson')
+        ds = mapnik.Datasource(
+            type='geojson',
+            file='../data/json/escaped.geojson')
         f = ds.features_at_point(ds.envelope().center()).features[0]
-        eq_(len(ds.fields()),7)
+        eq_(len(ds.fields()), 7)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
         eq_(f['name'], u'Test')
         eq_(f['int'], 1)
@@ -40,12 +45,14 @@ if 'geojson' in mapnik.DatasourceCache.plugin_names():
         eq_(f['NOM_FR'], u'Qu\xe9bec')
         eq_(f['NOM_FR'], u'Québec')
 
-        ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson')
+        ds = mapnik.Datasource(
+            type='geojson',
+            file='../data/json/escaped.geojson')
         f = ds.all_features()[0]
-        eq_(len(ds.fields()),7)
+        eq_(len(ds.fields()), 7)
 
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
         eq_(f['name'], u'Test')
         eq_(f['int'], 1)
@@ -55,12 +62,16 @@ if 'geojson' in mapnik.DatasourceCache.plugin_names():
         eq_(f['boolean'], True)
         eq_(f['NOM_FR'], u'Qu\xe9bec')
         eq_(f['NOM_FR'], u'Québec')
+
     def test_large_geojson_properties():
-        ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson',cache_features = False)
+        ds = mapnik.Datasource(
+            type='geojson',
+            file='../data/json/escaped.geojson',
+            cache_features=False)
         f = ds.features_at_point(ds.envelope().center()).features[0]
-        eq_(len(ds.fields()),7)
+        eq_(len(ds.fields()), 7)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
         eq_(f['name'], u'Test')
         eq_(f['int'], 1)
@@ -71,12 +82,14 @@ if 'geojson' in mapnik.DatasourceCache.plugin_names():
         eq_(f['NOM_FR'], u'Qu\xe9bec')
         eq_(f['NOM_FR'], u'Québec')
 
-        ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson')
+        ds = mapnik.Datasource(
+            type='geojson',
+            file='../data/json/escaped.geojson')
         f = ds.all_features()[0]
-        eq_(len(ds.fields()),7)
+        eq_(len(ds.fields()), 7)
 
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
         eq_(f['name'], u'Test')
         eq_(f['int'], 1)
@@ -91,17 +104,21 @@ if 'geojson' in mapnik.DatasourceCache.plugin_names():
         # will silently fail since it is a geometry and needs to be a featurecollection.
         #ds = mapnik.Datasource(type='geojson',inline='{"type":"LineString","coordinates":[[0,0],[10,10]]}')
         # works since it is a featurecollection
-        ds = mapnik.Datasource(type='geojson',inline='{ "type":"FeatureCollection", "features": [ { "type":"Feature", "properties":{"name":"test"}, "geometry": { "type":"LineString","coordinates":[[0,0],[10,10]] } } ]}')
-        eq_(len(ds.fields()),1)
+        ds = mapnik.Datasource(
+            type='geojson',
+            inline='{ "type":"FeatureCollection", "features": [ { "type":"Feature", "properties":{"name":"test"}, "geometry": { "type":"LineString","coordinates":[[0,0],[10,10]] } } ]}')
+        eq_(len(ds.fields()), 1)
         f = ds.all_features()[0]
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.LineString)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.LineString)
         eq_(f['name'], u'test')
 
 #    @raises(RuntimeError)
     def test_that_nonexistant_query_field_throws(**kwargs):
-        ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.geojson')
-        eq_(len(ds.fields()),7)
+        ds = mapnik.Datasource(
+            type='geojson',
+            file='../data/json/escaped.geojson')
+        eq_(len(ds.fields()), 7)
         # TODO - this sorting is messed up
         #eq_(ds.fields(),['name', 'int', 'double', 'description', 'boolean', 'NOM_FR'])
         #eq_(ds.field_types(),['str', 'int', 'float', 'str', 'bool', 'str'])
@@ -114,11 +131,13 @@ if 'geojson' in mapnik.DatasourceCache.plugin_names():
 #        fs = ds.features(query)
 
     def test_parsing_feature_collection_with_top_level_properties():
-        ds = mapnik.Datasource(type='geojson',file='../data/json/feature_collection_level_properties.json')
+        ds = mapnik.Datasource(
+            type='geojson',
+            file='../data/json/feature_collection_level_properties.json')
         f = ds.all_features()[0]
 
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
         eq_(f['feat_name'], u'feat_value')
 
 if __name__ == "__main__":
diff --git a/test/python_tests/geometry_io_test.py b/test/python_tests/geometry_io_test.py
index 58e4f36..e51686c 100644
--- a/test/python_tests/geometry_io_test.py
+++ b/test/python_tests/geometry_io_test.py
@@ -1,52 +1,88 @@
-#encoding: utf8
+# encoding: utf8
 
-from nose.tools import eq_,raises
 import os
-from utilities import execution_path, run_all
-import mapnik
 from binascii import unhexlify
 
+from nose.tools import eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 try:
     import json
 except ImportError:
     import simplejson as json
 
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
 wkts = [
-    [mapnik.GeometryType.Point,"POINT(30 10)", "01010000000000000000003e400000000000002440"],
-    [mapnik.GeometryType.Point,"POINT(30.0 10.0)", "01010000000000000000003e400000000000002440"],
-    [mapnik.GeometryType.Point,"POINT(30.1 10.1)", "01010000009a99999999193e403333333333332440"],
-    [mapnik.GeometryType.LineString,"LINESTRING(30 10,10 30,40 40)", "0102000000030000000000000000003e40000000000000244000000000000024400000000000003e4000000000000044400000000000004440"],
-    [mapnik.GeometryType.Polygon,"POLYGON((30 10,10 20,20 40,40 40,30 10))", "010300000001000000050000000000000000003e4000000000000024400000000000002440000000000000344000000000000034400000000000004440000000000000444000000000000044400000000000003e400000000000002440"],
-    [mapnik.GeometryType.Polygon,"POLYGON((35 10,10 20,15 40,45 45,35 10),(20 30,35 35,30 20,20 30))","0103000000020000000500000000000000008041400000000000002440000000000000244000000000000034400000000000002e40000000000000444000000000008046400000000000804640000000000080414000000000000024400400000000000000000034400000000000003e40000000000080414000000000008041400000000000003e40000000000000344000000000000034400000000000003e40"],
-    [mapnik.GeometryType.MultiPoint,"MULTIPOINT((10 40),(40 30),(20 20),(30 10))","010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003e4001010000000000000000003440000000000000344001010000000000000000003e400000000000002440"],
-    [mapnik.GeometryType.MultiLineString,"MULTILINESTRING((10 10,20 20,10 40),(40 40,30 30,40 20,30 10))","010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003e400000000000003e40000000000000444000000000000034400000000000003e400000000000002440"],
-    [mapnik.GeometryType.MultiPolygon,"MULTIPOLYGON(((30 20,10 40,45 40,30 20)),((15 5,40 10,10 20,5 10,15 5)))","010600000002000000010300000001000000040000000000000000003e40000000000000344000000000000024400000000000004440000000000080464000000000000044400000000000003e400000000000003440010300000001000000050000000000000000002e4000000000000014400000000000004440000000000000244000000000000024400000000000003440000000000000144000000000000024400000000000002e400000000000001440"],
-    [mapnik.GeometryType.MultiPolygon,"MULTIPOLYGON(((40 40,20 45,45 30,40 40)),((20 35,45 20,30 5,10 10,10 30,20 35),(30 20,20 25,20 15,30 20)))","01060000000200000001030000000100000004000000000000000000444000000000000044400000000000003440000000000080464000000000008046400000000000003e40000000000000444000000000000044400103000000020000000600000000000000000034400000000000804140000000000080464000000000000034400000000000003e40000000000000144000000000000024400000000000002440000000000000244000 [...]
-    [mapnik.GeometryType.GeometryCollection,"GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))","01070000000300000001030000000100000005000000000000000000f03f000000000000f03f0000000000000040000000000000f03f00000000000000400000000000000040000000000000f03f0000000000000040000000000000f03f000000000000f03f0101000000000000000000004000000000000008400102000000020000000000000000000040000000000000084000000000000008400000000000001040"],
-    [mapnik.GeometryType.Polygon,"POLYGON((-178.32319 71.518365,-178.321586 71.518439,-178.259635 71.510688,-178.304862 71.513129,-178.32319 71.518365),(-178.32319 71.518365,-178.341544 71.517524,-178.32244 71.505439,-178.215323 71.478034,-178.193473 71.47663,-178.147757 71.485175,-178.124442 71.481879,-178.005729 71.448615,-178.017203 71.441413,-178.054191 71.428778,-178.047049 71.425727,-178.033439 71.417792,-178.026236 71.415107,-178.030082 71.413459,-178.039908 71.40766,-177.970878 7 [...]
-    [mapnik.GeometryType.MultiPolygon,"MULTIPOLYGON(((-178.32319 71.518365,-178.321586 71.518439,-178.259635 71.510688,-178.304862 71.513129,-178.32319 71.518365)),((-178.32319 71.518365,-178.341544 71.517524,-178.32244 71.505439,-178.215323 71.478034,-178.193473 71.47663,-178.147757 71.485175,-178.124442 71.481879,-178.005729 71.448615,-178.017203 71.441413,-178.054191 71.428778,-178.047049 71.425727,-178.033439 71.417792,-178.026236 71.415107,-178.030082 71.413459,-178.039908 71.40766, [...]
+    [mapnik.GeometryType.Point,
+     "POINT(30 10)",
+     "01010000000000000000003e400000000000002440"],
+    [mapnik.GeometryType.Point,
+     "POINT(30.0 10.0)",
+     "01010000000000000000003e400000000000002440"],
+    [mapnik.GeometryType.Point,
+     "POINT(30.1 10.1)",
+     "01010000009a99999999193e403333333333332440"],
+    [mapnik.GeometryType.LineString,
+     "LINESTRING(30 10,10 30,40 40)",
+     "0102000000030000000000000000003e40000000000000244000000000000024400000000000003e4000000000000044400000000000004440"],
+    [mapnik.GeometryType.Polygon,
+     "POLYGON((30 10,10 20,20 40,40 40,30 10))",
+     "010300000001000000050000000000000000003e4000000000000024400000000000002440000000000000344000000000000034400000000000004440000000000000444000000000000044400000000000003e400000000000002440"],
+    [mapnik.GeometryType.Polygon,
+     "POLYGON((35 10,10 20,15 40,45 45,35 10),(20 30,35 35,30 20,20 30))",
+     "0103000000020000000500000000000000008041400000000000002440000000000000244000000000000034400000000000002e40000000000000444000000000008046400000000000804640000000000080414000000000000024400400000000000000000034400000000000003e40000000000080414000000000008041400000000000003e40000000000000344000000000000034400000000000003e40"],
+    [mapnik.GeometryType.MultiPoint,
+     "MULTIPOINT((10 40),(40 30),(20 20),(30 10))",
+     "010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003e4001010000000000000000003440000000000000344001010000000000000000003e400000000000002440"],
+    [mapnik.GeometryType.MultiLineString,
+     "MULTILINESTRING((10 10,20 20,10 40),(40 40,30 30,40 20,30 10))",
+     "010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003e400000000000003e40000000000000444000000000000034400000000000003e400000000000002440"],
+    [mapnik.GeometryType.MultiPolygon,
+     "MULTIPOLYGON(((30 20,10 40,45 40,30 20)),((15 5,40 10,10 20,5 10,15 5)))",
+     "010600000002000000010300000001000000040000000000000000003e40000000000000344000000000000024400000000000004440000000000080464000000000000044400000000000003e400000000000003440010300000001000000050000000000000000002e4000000000000014400000000000004440000000000000244000000000000024400000000000003440000000000000144000000000000024400000000000002e400000000000001440"],
+    [mapnik.GeometryType.MultiPolygon,
+     "MULTIPOLYGON(((40 40,20 45,45 30,40 40)),((20 35,45 20,30 5,10 10,10 30,20 35),(30 20,20 25,20 15,30 20)))",
+     "01060000000200000001030000000100000004000000000000000000444000000000000044400000000000003440000000000080464000000000008046400000000000003e40000000000000444000000000000044400103000000020000000600000000000000000034400000000000804140000000000080464000000000000034400000000000003e4000000000000014400000000000002440000000000000244000000000000024400000000000003e4000000000000034400000000000804140040000000000000000003e40000000000000344000000000000034400000000000003940000000000000344000000000 [...]
+    [mapnik.GeometryType.GeometryCollection,
+     "GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))",
+     "01070000000300000001030000000100000005000000000000000000f03f000000000000f03f0000000000000040000000000000f03f00000000000000400000000000000040000000000000f03f0000000000000040000000000000f03f000000000000f03f0101000000000000000000004000000000000008400102000000020000000000000000000040000000000000084000000000000008400000000000001040"],
+    [mapnik.GeometryType.Polygon, "POLYGON((-178.32319 71.518365,-178.321586 71.518439,-178.259635 71.510688,-178.304862 71.513129,-178.32319 71.518365),(-178.32319 71.518365,-178.341544 71.517524,-178.32244 71.505439,-178.215323 71.478034,-178.193473 71.47663,-178.147757 71.485175,-178.124442 71.481879,-178.005729 71.448615,-178.017203 71.441413,-178.054191 71.428778,-178.047049 71.425727,-178.033439 71.417792,-178.026236 71.415107,-178.030082 71.413459,-178.039908 71.40766,-177.970878  [...]
+     "010300000002000000050000009e0c8e92574a66c0079964e42ce151403f1bb96e4a4a66c0247ec51a2ee15140b43c0fee4e4866c06b9db81cafe0514062f9f36dc14966c04568041bd7e051409e0c8e92574a66c0079964e42ce15140560000009e0c8e92574a66c0079964e42ce15140a4c4aeeded4a66c049b9fb1c1fe1514083ddb06d514a66c0dec7d11c59e0514074620fede34666c01118eb1b98de514017f549ee304666c0b8921d1b81de51402a58e36cba4466c08e75711b0ddf5140f607ca6dfb4366c04568041bd7de5140717495ee2e4066c0af5fb01bb6dc5140944c4eed8c4066c0dfc14f1c40dc51409a97 [...]
+    [mapnik.GeometryType.MultiPolygon, "MULTIPOLYGON(((-178.32319 71.518365,-178.321586 71.518439,-178.259635 71.510688,-178.304862 71.513129,-178.32319 71.518365)),((-178.32319 71.518365,-178.341544 71.517524,-178.32244 71.505439,-178.215323 71.478034,-178.193473 71.47663,-178.147757 71.485175,-178.124442 71.481879,-178.005729 71.448615,-178.017203 71.441413,-178.054191 71.428778,-178.047049 71.425727,-178.033439 71.417792,-178.026236 71.415107,-178.030082 71.413459,-178.039908 71.40766 [...]
+     "010600000002000000010300000001000000050000009e0c8e92574a66c0079964e42ce151403f1bb96e4a4a66c0247ec51a2ee15140b43c0fee4e4866c06b9db81cafe0514062f9f36dc14966c04568041bd7e051409e0c8e92574a66c0079964e42ce15140010300000001000000560000009e0c8e92574a66c0079964e42ce15140a4c4aeeded4a66c049b9fb1c1fe1514083ddb06d514a66c0dec7d11c59e0514074620fede34666c01118eb1b98de514017f549ee304666c0b8921d1b81de51402a58e36cba4466c08e75711b0ddf5140f607ca6dfb4366c04568041bd7de5140717495ee2e4066c0af5fb01bb6dc5140 [...]
 ]
 
 
 geojson = [
-    [mapnik.GeometryType.Point,'{"type":"Point","coordinates":[30,10]}'],
-    [mapnik.GeometryType.Point,'{"type":"Point","coordinates":[30.0,10.0]}'],
-    [mapnik.GeometryType.Point,'{"type":"Point","coordinates":[30.1,10.1]}'],
-    [mapnik.GeometryType.LineString,'{"type":"LineString","coordinates":[[30.0,10.0],[10.0,30.0],[40.0,40.0]]}'],
-    [mapnik.GeometryType.Polygon,'{"type":"Polygon","coordinates":[[[30.0,10.0],[10.0,20.0],[20.0,40.0],[40.0,40.0],[30.0,10.0]]]}'],
-    [mapnik.GeometryType.Polygon,'{"type":"Polygon","coordinates":[[[35.0,10.0],[10.0,20.0],[15.0,40.0],[45.0,45.0],[35.0,10.0]],[[20.0,30.0],[35.0,35.0],[30.0,20.0],[20.0,30.0]]]}'],
-    [mapnik.GeometryType.MultiPoint,'{"type":"MultiPoint","coordinates":[[10.0,40.0],[40.0,30.0],[20.0,20.0],[30.0,10.0]]}'],
-    [mapnik.GeometryType.MultiLineString,'{"type":"MultiLineString","coordinates":[[[10.0,10.0],[20.0,20.0],[10.0,40.0]],[[40.0,40.0],[30.0,30.0],[40.0,20.0],[30.0,10.0]]]}'],
-    [mapnik.GeometryType.MultiPolygon,'{"type":"MultiPolygon","coordinates":[[[[30.0,20.0],[10.0,40.0],[45.0,40.0],[30.0,20.0]]],[[[15.0,5.0],[40.0,10.0],[10.0,20.0],[5.0,10.0],[15.0,5.0]]]]}'],
-    [mapnik.GeometryType.MultiPolygon,'{"type":"MultiPolygon","coordinates":[[[[40.0,40.0],[20.0,45.0],[45.0,30.0],[40.0,40.0]]],[[[20.0,35.0],[45.0,20.0],[30.0,5.0],[10.0,10.0],[10.0,30.0],[20.0,35.0]],[[30.0,20.0],[20.0,25.0],[20.0,15.0],[30.0,20.0]]]]}'],
-    [mapnik.GeometryType.GeometryCollection,'{"type":"GeometryCollection","geometries":[{"type":"Polygon","coordinates":[[[1.0,1.0],[2.0,1.0],[2.0,2.0],[1.0,2.0],[1.0,1.0]]]},{"type":"Point","coordinates":[2.0,3.0]},{"type":"LineString","coordinates":[[2.0,3.0],[3.0,4.0]]}]}'],
-    [mapnik.GeometryType.Polygon,'{"type":"Polygon","coordinates":[[[-178.32319,71.518365],[-178.321586,71.518439],[-178.259635,71.510688],[-178.304862,71.513129],[-178.32319,71.518365]],[[-178.32319,71.518365],[-178.341544,71.517524],[-178.32244,71.505439],[-178.215323,71.478034],[-178.193473,71.47663],[-178.147757,71.485175],[-178.124442,71.481879],[-178.005729,71.448615],[-178.017203,71.441413],[-178.054191,71.428778],[-178.047049,71.425727],[-178.033439,71.417792],[-178.026236,71.415 [...]
-    [mapnik.GeometryType.MultiPolygon,'{"type":"MultiPolygon","coordinates":[[[[-178.32319,71.518365],[-178.321586,71.518439],[-178.259635,71.510688],[-178.304862,71.513129],[-178.32319,71.518365]]],[[[-178.32319,71.518365],[-178.341544,71.517524],[-178.32244,71.505439],[-178.215323,71.478034],[-178.193473,71.47663],[-178.147757,71.485175],[-178.124442,71.481879],[-178.005729,71.448615],[-178.017203,71.441413],[-178.054191,71.428778],[-178.047049,71.425727],[-178.033439,71.417792],[-178. [...]
+    [mapnik.GeometryType.Point, '{"type":"Point","coordinates":[30,10]}'],
+    [mapnik.GeometryType.Point, '{"type":"Point","coordinates":[30.0,10.0]}'],
+    [mapnik.GeometryType.Point, '{"type":"Point","coordinates":[30.1,10.1]}'],
+    [mapnik.GeometryType.LineString,
+     '{"type":"LineString","coordinates":[[30.0,10.0],[10.0,30.0],[40.0,40.0]]}'],
+    [mapnik.GeometryType.Polygon,
+     '{"type":"Polygon","coordinates":[[[30.0,10.0],[10.0,20.0],[20.0,40.0],[40.0,40.0],[30.0,10.0]]]}'],
+    [mapnik.GeometryType.Polygon,
+     '{"type":"Polygon","coordinates":[[[35.0,10.0],[10.0,20.0],[15.0,40.0],[45.0,45.0],[35.0,10.0]],[[20.0,30.0],[35.0,35.0],[30.0,20.0],[20.0,30.0]]]}'],
+    [mapnik.GeometryType.MultiPoint,
+     '{"type":"MultiPoint","coordinates":[[10.0,40.0],[40.0,30.0],[20.0,20.0],[30.0,10.0]]}'],
+    [mapnik.GeometryType.MultiLineString,
+     '{"type":"MultiLineString","coordinates":[[[10.0,10.0],[20.0,20.0],[10.0,40.0]],[[40.0,40.0],[30.0,30.0],[40.0,20.0],[30.0,10.0]]]}'],
+    [mapnik.GeometryType.MultiPolygon,
+     '{"type":"MultiPolygon","coordinates":[[[[30.0,20.0],[10.0,40.0],[45.0,40.0],[30.0,20.0]]],[[[15.0,5.0],[40.0,10.0],[10.0,20.0],[5.0,10.0],[15.0,5.0]]]]}'],
+    [mapnik.GeometryType.MultiPolygon,
+     '{"type":"MultiPolygon","coordinates":[[[[40.0,40.0],[20.0,45.0],[45.0,30.0],[40.0,40.0]]],[[[20.0,35.0],[45.0,20.0],[30.0,5.0],[10.0,10.0],[10.0,30.0],[20.0,35.0]],[[30.0,20.0],[20.0,25.0],[20.0,15.0],[30.0,20.0]]]]}'],
+    [mapnik.GeometryType.GeometryCollection,
+     '{"type":"GeometryCollection","geometries":[{"type":"Polygon","coordinates":[[[1.0,1.0],[2.0,1.0],[2.0,2.0],[1.0,2.0],[1.0,1.0]]]},{"type":"Point","coordinates":[2.0,3.0]},{"type":"LineString","coordinates":[[2.0,3.0],[3.0,4.0]]}]}'],
+    [mapnik.GeometryType.Polygon, '{"type":"Polygon","coordinates":[[[-178.32319,71.518365],[-178.321586,71.518439],[-178.259635,71.510688],[-178.304862,71.513129],[-178.32319,71.518365]],[[-178.32319,71.518365],[-178.341544,71.517524],[-178.32244,71.505439],[-178.215323,71.478034],[-178.193473,71.47663],[-178.147757,71.485175],[-178.124442,71.481879],[-178.005729,71.448615],[-178.017203,71.441413],[-178.054191,71.428778],[-178.047049,71.425727],[-178.033439,71.417792],[-178.026236,71.41 [...]
+    [mapnik.GeometryType.MultiPolygon, '{"type":"MultiPolygon","coordinates":[[[[-178.32319,71.518365],[-178.321586,71.518439],[-178.259635,71.510688],[-178.304862,71.513129],[-178.32319,71.518365]]],[[[-178.32319,71.518365],[-178.341544,71.517524],[-178.32244,71.505439],[-178.215323,71.478034],[-178.193473,71.47663],[-178.147757,71.485175],[-178.124442,71.481879],[-178.005729,71.448615],[-178.017203,71.441413],[-178.054191,71.428778],[-178.047049,71.425727],[-178.033439,71.417792],[-178 [...]
 ]
 
 geojson_reversed = [
@@ -66,23 +102,24 @@ geojson_reversed = [
 ]
 
 geojson_nulls = [
-  '{ "type": "Feature", "properties": { }, "geometry": null }',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "Point", "coordinates": [] }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [] ] }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [] ] ] } }',
-  '{ "type": "Feature", "properties": { }, "geometry": { "coordinates": [], "type": "Point" }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "coordinates": [ [] ], "type": "LineString" }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "coordinates": [ [ [] ] ], "type": "Polygon" } }',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPoint", "coordinates": [ [] ] }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPoint", "coordinates": [ [],[] ] }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiLineString", "coordinates": [ [] ] }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiLineString", "coordinates": [ [ [] ] ] }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPolygon", "coordinates": [ [] ] }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPolygon", "coordinates": [ [ [] ] ] }}',
-  '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [] ] ] ] }}',
+    '{ "type": "Feature", "properties": { }, "geometry": null }',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "Point", "coordinates": [] }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [] ] }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [] ] ] } }',
+    '{ "type": "Feature", "properties": { }, "geometry": { "coordinates": [], "type": "Point" }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "coordinates": [ [] ], "type": "LineString" }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "coordinates": [ [ [] ] ], "type": "Polygon" } }',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPoint", "coordinates": [ [] ] }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPoint", "coordinates": [ [],[] ] }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiLineString", "coordinates": [ [] ] }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiLineString", "coordinates": [ [ [] ] ] }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPolygon", "coordinates": [ [] ] }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPolygon", "coordinates": [ [ [] ] ] }}',
+    '{ "type": "Feature", "properties": { }, "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [] ] ] ] }}',
 ]
 
-# valid, but empty wkb's (http://trac.osgeo.org/postgis/wiki/DevWikiEmptyGeometry)
+# valid, but empty wkb's
+# (http://trac.osgeo.org/postgis/wiki/DevWikiEmptyGeometry)
 empty_wkbs = [
     # TODO - this is messed up: round trips as MULTIPOINT EMPTY
     # template_postgis=# select ST_AsText(ST_GeomFromEWKB(decode(encode(ST_GeomFromText('POINT EMPTY'),'hex'),'hex')));
@@ -91,35 +128,48 @@ empty_wkbs = [
     # MULTIPOINT EMPTY
     #(1 row)
     #[ mapnik.GeometryType.Point,              "Point EMPTY", '010400000000000000'],
-    [ mapnik.GeometryType.MultiPoint,         "MULTIPOINT EMPTY", '010400000000000000'],
-    [ mapnik.GeometryType.LineString,         "LINESTRING EMPTY", '010200000000000000'],
-    [ mapnik.GeometryType.LineString,         "LINESTRING EMPTY", '010200000000000000' ],
-    [ mapnik.GeometryType.MultiLineString,    "MULTILINESTRING EMPTY", '010500000000000000'],
-    [ mapnik.GeometryType.Polygon,            "Polygon EMPTY", '010300000000000000'],
-    [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION EMPTY", '010700000000000000'],
-    [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(LINESTRING EMPTY,LINESTRING EMPTY)", '010700000000000000'],
-    [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(POINT EMPTY,POINT EMPTY)", '010700000000000000'],
+    [mapnik.GeometryType.MultiPoint, "MULTIPOINT EMPTY", '010400000000000000'],
+    [mapnik.GeometryType.LineString, "LINESTRING EMPTY", '010200000000000000'],
+    [mapnik.GeometryType.LineString, "LINESTRING EMPTY", '010200000000000000'],
+    [mapnik.GeometryType.MultiLineString,
+     "MULTILINESTRING EMPTY",
+     '010500000000000000'],
+    [mapnik.GeometryType.Polygon, "Polygon EMPTY", '010300000000000000'],
+    [mapnik.GeometryType.GeometryCollection,
+        "GEOMETRYCOLLECTION EMPTY", '010700000000000000'],
+    [mapnik.GeometryType.GeometryCollection,
+     "GEOMETRYCOLLECTION(LINESTRING EMPTY,LINESTRING EMPTY)",
+     '010700000000000000'],
+    [mapnik.GeometryType.GeometryCollection,
+     "GEOMETRYCOLLECTION(POINT EMPTY,POINT EMPTY)",
+     '010700000000000000'],
 ]
 
 partially_empty_wkb = [
     # TODO - currently this is not considered empty
     # even though one part is
-    [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(MULTILINESTRING((10 10,20 20,10 40),(40 40,30 30,40 20,30 10)),LINESTRING EMPTY)", '010700000002000000010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003e400000000000003e40000000000000444000000000000034400000000000003e400000000000002440010200000000000000'],
-    [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(POINT EMPTY,POINT(0 0))", '010700000002000000010400000000000000010100000000000000000000000000000000000000'],
-    [ mapnik.GeometryType.GeometryCollection, "GEOMETRYCOLLECTION(POINT EMPTY,MULTIPOINT(0 0))", '010700000002000000010400000000000000010400000001000000010100000000000000000000000000000000000000'],
+    [mapnik.GeometryType.GeometryCollection,
+     "GEOMETRYCOLLECTION(MULTILINESTRING((10 10,20 20,10 40),(40 40,30 30,40 20,30 10)),LINESTRING EMPTY)",
+     '010700000002000000010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003e400000000000003e40000000000000444000000000000034400000000000003e400000000000002440010200000000000000'],
+    [mapnik.GeometryType.GeometryCollection,
+     "GEOMETRYCOLLECTION(POINT EMPTY,POINT(0 0))",
+     '010700000002000000010400000000000000010100000000000000000000000000000000000000'],
+    [mapnik.GeometryType.GeometryCollection,
+     "GEOMETRYCOLLECTION(POINT EMPTY,MULTIPOINT(0 0))",
+     '010700000002000000010400000000000000010400000001000000010100000000000000000000000000000000000000'],
 ]
 
 # unsupported types
 unsupported_wkb = [
-    [ "MULTIPOLYGON EMPTY", '010600000000000000'],
-    [ "TRIANGLE EMPTY", '011100000000000000'],
-    [ "CircularString EMPTY", '010800000000000000'],
-    [ "CurvePolygon EMPTY", '010A00000000000000'],
-    [ "CompoundCurve EMPTY", '010900000000000000'],
-    [ "MultiCurve EMPTY", '010B00000000000000'],
-    [ "MultiSurface EMPTY", '010C00000000000000'],
-    [ "PolyhedralSurface EMPTY", '010F00000000000000'],
-    [ "TIN EMPTY", '011000000000000000'],
+    ["MULTIPOLYGON EMPTY", '010600000000000000'],
+    ["TRIANGLE EMPTY", '011100000000000000'],
+    ["CircularString EMPTY", '010800000000000000'],
+    ["CurvePolygon EMPTY", '010A00000000000000'],
+    ["CompoundCurve EMPTY", '010900000000000000'],
+    ["MultiCurve EMPTY", '010B00000000000000'],
+    ["MultiSurface EMPTY", '010C00000000000000'],
+    ["PolyhedralSurface EMPTY", '010F00000000000000'],
+    ["TIN EMPTY", '011000000000000000'],
     # TODO - a few bogus inputs
     # enable if we start range checking to avoid crashing on invalid input?
     # https://github.com/mapnik/mapnik/issues/2236
@@ -128,21 +178,24 @@ unsupported_wkb = [
     #[ "0000", '0104' ],
 ]
 
+
 def test_path_geo_interface():
     geom = mapnik.Geometry.from_wkt('POINT(0 0)')
-    eq_(geom.__geo_interface__,{u'type': u'Point', u'coordinates': [0, 0]})
+    eq_(geom.__geo_interface__, {u'type': u'Point', u'coordinates': [0, 0]})
+
 
 def test_valid_wkb_parsing():
     count = 0
     for wkb in empty_wkbs:
         geom = mapnik.Geometry.from_wkb(unhexlify(wkb[2]))
-        eq_(geom.is_empty(),True)
-        eq_(geom.type(),wkb[0])
+        eq_(geom.is_empty(), True)
+        eq_(geom.type(), wkb[0])
 
     for wkb in wkts:
         geom = mapnik.Geometry.from_wkb(unhexlify(wkb[2]))
-        eq_(geom.is_empty(),False)
-        eq_(geom.type(),wkb[0])
+        eq_(geom.is_empty(), False)
+        eq_(geom.type(), wkb[0])
+
 
 def test_wkb_parsing_error():
     count = 0
@@ -150,19 +203,22 @@ def test_wkb_parsing_error():
         try:
             geom = mapnik.Geometry.from_wkb(unhexlify(wkb))
             # should not get here
-            eq_(True,False)
+            eq_(True, False)
         except:
             pass
     assert True
 
 # for partially empty wkbs don't currently look empty right now
 # since the enclosing container has objects
+
+
 def test_empty_wkb_parsing():
     count = 0
     for wkb in partially_empty_wkb:
         geom = mapnik.Geometry.from_wkb(unhexlify(wkb[2]))
-        eq_(geom.type(),wkb[0])
-        eq_(geom.is_empty(),False)
+        eq_(geom.type(), wkb[0])
+        eq_(geom.is_empty(), False)
+
 
 def test_geojson_parsing():
     geometries = []
@@ -170,102 +226,117 @@ def test_geojson_parsing():
     for j in geojson:
         count += 1
         geometries.append(mapnik.Geometry.from_geojson(j[1]))
-    eq_(count,len(geometries))
+    eq_(count, len(geometries))
+
 
 def test_geojson_parsing_reversed():
-    for idx,j in enumerate(geojson_reversed):
+    for idx, j in enumerate(geojson_reversed):
         g1 = mapnik.Geometry.from_geojson(j)
         g2 = mapnik.Geometry.from_geojson(geojson[idx][1])
         eq_(g1.to_geojson(), g2.to_geojson())
 
 # http://geojson.org/geojson-spec.html#positions
+
+
 def test_geojson_point_positions():
     input_json = '{"type":"Point","coordinates":[30,10]}'
     geom = mapnik.Geometry.from_geojson(input_json)
-    eq_(geom.to_geojson(),input_json)
+    eq_(geom.to_geojson(), input_json)
     # should ignore all but the first two
-    geom = mapnik.Geometry.from_geojson('{"type":"Point","coordinates":[30,10,50,50,50,50]}')
-    eq_(geom.to_geojson(),input_json)
+    geom = mapnik.Geometry.from_geojson(
+        '{"type":"Point","coordinates":[30,10,50,50,50,50]}')
+    eq_(geom.to_geojson(), input_json)
+
 
 def test_geojson_point_positions2():
     input_json = '{"type":"LineString","coordinates":[[30,10],[10,30],[40,40]]}'
     geom = mapnik.Geometry.from_geojson(input_json)
-    eq_(geom.to_geojson(),input_json)
+    eq_(geom.to_geojson(), input_json)
 
     # should ignore all but the first two
-    geom = mapnik.Geometry.from_geojson('{"type":"LineString","coordinates":[[30.0,10.0,0,0,0],[10.0,30.0,0,0,0],[40.0,40.0,0,0,0]]}')
-    eq_(geom.to_geojson(),input_json)
+    geom = mapnik.Geometry.from_geojson(
+        '{"type":"LineString","coordinates":[[30.0,10.0,0,0,0],[10.0,30.0,0,0,0],[40.0,40.0,0,0,0]]}')
+    eq_(geom.to_geojson(), input_json)
 
-def compare_wkb_from_wkt(wkt,type):
+
+def compare_wkb_from_wkt(wkt, type):
     geom = mapnik.Geometry.from_wkt(wkt)
-    eq_(geom.type(),type)
+    eq_(geom.type(), type)
+
 
-def compare_wkt_to_geojson(idx,wkt,num=None):
+def compare_wkt_to_geojson(idx, wkt, num=None):
     geom = mapnik.Geometry.from_wkt(wkt)
     # ensure both have same result
     gj = geom.to_geojson()
-    eq_(len(gj) > 1,True)
+    eq_(len(gj) > 1, True)
     a = json.loads(gj)
     e = json.loads(geojson[idx][1])
-    eq_(a,e)
+    eq_(a, e)
+
 
 def test_wkt_simple():
     for wkt in wkts:
         try:
             geom = mapnik.Geometry.from_wkt(wkt[1])
-            eq_(geom.type(),wkt[0])
-        except RuntimeError, e:
+            eq_(geom.type(), wkt[0])
+        except RuntimeError as e:
             raise RuntimeError('%s %s' % (e, wkt))
 
+
 def test_wkb_simple():
     for wkt in wkts:
         try:
-            compare_wkb_from_wkt(wkt[1],wkt[0])
-        except RuntimeError, e:
+            compare_wkb_from_wkt(wkt[1], wkt[0])
+        except RuntimeError as e:
             raise RuntimeError('%s %s' % (e, wkt))
 
+
 def test_wkt_to_geojson():
     idx = -1
     for wkt in wkts:
         try:
             idx += 1
-            compare_wkt_to_geojson(idx,wkt[1],wkt[0])
-        except RuntimeError, e:
+            compare_wkt_to_geojson(idx, wkt[1], wkt[0])
+        except RuntimeError as e:
             raise RuntimeError('%s %s' % (e, wkt))
 
+
 def test_wkt_rounding():
     # currently fails because we use output precision of 6 - should we make configurable? https://github.com/mapnik/mapnik/issues/1009
     # if precision is set to 15 still fails due to very subtle rounding issues
     wkt = "POLYGON((7.904185 54.180426,7.89918 54.178168,7.897715 54.182318,7.893565 54.183111,7.890391 54.187567,7.885874 54.19068,7.879893 54.193915,7.894541 54.194647,7.900645 54.19068,7.904185 54.180426))"
     geom = mapnik.Geometry.from_wkt(wkt)
-    eq_(geom.type(),mapnik.GeometryType.Polygon)
+    eq_(geom.type(), mapnik.GeometryType.Polygon)
+
 
 def test_wkt_collection_flattening():
     wkt = 'GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POLYGON((40 40,20 45,45 30,40 40)),POLYGON((20 35,45 20,30 5,10 10,10 30,20 35),(30 20,20 25,20 15,30 20)),LINESTRING(2 3,3 4))'
     # currently fails as the MULTIPOLYGON inside will be returned as multiple polygons - not a huge deal - should we worry?
     #wkt = "GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),MULTIPOLYGON(((40 40,20 45,45 30,40 40)),((20 35,45 20,30 5,10 10,10 30,20 35),(30 20,20 25,20 15,30 20))),LINESTRING(2 3,3 4))"
     geom = mapnik.Geometry.from_wkt(wkt)
-    eq_(geom.type(),mapnik.GeometryType.GeometryCollection)
+    eq_(geom.type(), mapnik.GeometryType.GeometryCollection)
+
 
 def test_creating_feature_from_geojson():
     json_feat = {
-      "type": "Feature",
-      "geometry": {"type": "Point", "coordinates": [-122,48]},
-      "properties": {"name": "value"}
+        "type": "Feature",
+        "geometry": {"type": "Point", "coordinates": [-122, 48]},
+        "properties": {"name": "value"}
     }
     ctx = mapnik.Context()
-    feat = mapnik.Feature.from_geojson(json.dumps(json_feat),ctx)
-    eq_(feat.id(),1)
-    eq_(feat['name'],u'value')
+    feat = mapnik.Feature.from_geojson(json.dumps(json_feat), ctx)
+    eq_(feat.id(), 1)
+    eq_(feat['name'], u'value')
+
 
 def test_handling_geojson_null_geoms():
     for j in geojson_nulls:
         ctx = mapnik.Context()
-        out_json = mapnik.Feature.from_geojson(j,ctx).to_geojson()
+        out_json = mapnik.Feature.from_geojson(j, ctx).to_geojson()
         expected = '{"type":"Feature","id":1,"geometry":null,"properties":{}}'
-        eq_(out_json,expected)
+        eq_(out_json, expected)
         # ensure it round trips
-        eq_(mapnik.Feature.from_geojson(out_json,ctx).to_geojson(),expected)
+        eq_(mapnik.Feature.from_geojson(out_json, ctx).to_geojson(), expected)
 
 
 if __name__ == "__main__":
diff --git a/test/python_tests/grayscale_test.py b/test/python_tests/grayscale_test.py
index 2bcf836..fad0192 100644
--- a/test/python_tests/grayscale_test.py
+++ b/test/python_tests/grayscale_test.py
@@ -1,13 +1,16 @@
-import mapnik
 from nose.tools import eq_
-from utilities import run_all
+
+import mapnik
+
+from .utilities import run_all
+
 
 def test_grayscale_conversion():
-    im = mapnik.Image(2,2)
+    im = mapnik.Image(2, 2)
     im.fill(mapnik.Color('white'))
     im.set_grayscale_to_alpha()
-    pixel = im.get_pixel(0,0)
-    eq_((pixel >> 24) & 0xff,255);
+    pixel = im.get_pixel(0, 0)
+    eq_((pixel >> 24) & 0xff, 255)
 
 if __name__ == "__main__":
     exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
diff --git a/test/python_tests/image_encoding_speed_test.py b/test/python_tests/image_encoding_speed_test.py
index 75bbc85..58f0785 100644
--- a/test/python_tests/image_encoding_speed_test.py
+++ b/test/python_tests/image_encoding_speed_test.py
@@ -1,9 +1,13 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-import os, mapnik
+import os
 from timeit import Timer, time
-from utilities import execution_path, run_all
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -21,7 +25,7 @@ combinations = ['png',
                 'png8:m=h:t=1',
                 'png8:m=h:t=2',
                 'png:z=1',
-                'png:z=1:t=0', # forces rbg, no a
+                'png:z=1:t=0',  # forces rbg, no a
                 'png8:z=1',
                 'png8:z=1:m=o',
                 'png8:z=1:m=h',
@@ -44,17 +48,18 @@ combinations = ['png',
                 'png8:m=h:g=1.0',
                 'png:e=miniz',
                 'png8:e=miniz'
-               ]
+                ]
 
 tiles = [
-'blank',
-'solid',
-'many_colors',
-'aerial_24'
+    'blank',
+    'solid',
+    'many_colors',
+    'aerial_24'
 ]
 
 iterations = 10
 
+
 def do_encoding():
 
     global image
@@ -66,30 +71,30 @@ def do_encoding():
         global image
         image = im
         start = time.time()
-        set = t.repeat(iterations,1)
+        set = t.repeat(iterations, 1)
         elapsed = (time.time() - start)
-        min_ = min(set)*1000
-        avg = (sum(set)/len(set))*1000
+        min_ = min(set) * 1000
+        avg = (sum(set) / len(set)) * 1000
         name = func.__name__ + ' ' + format
-        results[name] = [min_,avg,elapsed*1000,name,len(func())]
+        results[name] = [min_, avg, elapsed * 1000, name, len(func())]
         sortable[name] = [min_]
 
     if 'blank' in tiles:
         def blank():
             return eval('image.tostring("%s")' % c)
-        blank_im = mapnik.Image(512,512)
+        blank_im = mapnik.Image(512, 512)
         for c in combinations:
             t = Timer(blank)
-            run(blank,blank_im,c,t)
+            run(blank, blank_im, c, t)
 
     if 'solid' in tiles:
         def solid():
             return eval('image.tostring("%s")' % c)
-        solid_im = mapnik.Image(512,512)
+        solid_im = mapnik.Image(512, 512)
         solid_im.fill(mapnik.Color("#f2efe9"))
         for c in combinations:
             t = Timer(solid)
-            run(solid,solid_im,c,t)
+            run(solid, solid_im, c, t)
 
     if 'many_colors' in tiles:
         def many_colors():
@@ -98,7 +103,7 @@ def do_encoding():
         many_colors_im = mapnik.Image.open('../data/images/13_4194_2747.png')
         for c in combinations:
             t = Timer(many_colors)
-            run(many_colors,many_colors_im,c,t)
+            run(many_colors, many_colors_im, c, t)
 
     if 'aerial_24' in tiles:
         def aerial_24():
@@ -106,16 +111,18 @@ def do_encoding():
         aerial_24_im = mapnik.Image.open('../data/images/12_654_1580.png')
         for c in combinations:
             t = Timer(aerial_24)
-            run(aerial_24,aerial_24_im,c,t)
+            run(aerial_24, aerial_24_im, c, t)
 
-    for key, value in sorted(sortable.iteritems(), key=lambda (k,v): (v,k)):
+    for key, value in sorted(sortable.iteritems(), key=lambda k, v: (v, k)):
         s = results[key]
         min_ = str(s[0])[:6]
         avg = str(s[1])[:6]
         elapsed = str(s[2])[:6]
         name = s[3]
         size = s[4]
-        print 'min: %sms | avg: %sms | total: %sms | len: %s <-- %s' % (min_,avg,elapsed,size,name)
+        print(
+            'min: %sms | avg: %sms | total: %sms | len: %s <-- %s' %
+            (min_, avg, elapsed, size, name))
 
 
 if __name__ == "__main__":
diff --git a/test/python_tests/image_filters_test.py b/test/python_tests/image_filters_test.py
index 269d64c..7a06db3 100644
--- a/test/python_tests/image_filters_test.py
+++ b/test/python_tests/image_filters_test.py
@@ -1,27 +1,33 @@
 #!/usr/bin/env python
 
-from nose.tools import eq_
-from utilities import execution_path, run_all
-from utilities import side_by_side_image
-import os, mapnik
+import os
 import re
 
+from nose.tools import eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all, side_by_side_image
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def replace_style(m, name, style):
     m.remove_style(name)
     m.append_style(name, style)
 
+
 def test_append():
     s = mapnik.Style()
-    eq_(s.image_filters,'')
+    eq_(s.image_filters, '')
     s.image_filters = 'gray'
-    eq_(s.image_filters,'gray')
+    eq_(s.image_filters, 'gray')
     s.image_filters = 'sharpen'
-    eq_(s.image_filters,'sharpen')
+    eq_(s.image_filters, 'sharpen')
 
 if 'shape' in mapnik.DatasourceCache.plugin_names():
     def test_style_level_image_filter():
@@ -49,19 +55,25 @@ if 'shape' in mapnik.DatasourceCache.plugin_names():
             mapnik.render(m, im)
             actual = '/tmp/mapnik-style-image-filter-' + filename + '.png'
             expected = 'images/style-image-filter/' + filename + '.png'
-            im.save(actual,"png32")
+            im.save(actual, "png32")
             if not os.path.exists(expected) or os.environ.get('UPDATE'):
-                print 'generating expected test image: %s' % expected
-                im.save(expected,'png32')
+                print('generating expected test image: %s' % expected)
+                im.save(expected, 'png32')
             expected_im = mapnik.Image.open(expected)
             # compare them
             if im.tostring('png32') == expected_im.tostring('png32'):
                 successes.append(name)
             else:
-                fails.append('failed comparing actual (%s) and expected(%s)' % (actual,'tests/python_tests/'+ expected))
+                fails.append(
+                    'failed comparing actual (%s) and expected(%s)' %
+                    (actual, 'tests/python_tests/' + expected))
                 fail_im = side_by_side_image(expected_im, im)
-                fail_im.save('/tmp/mapnik-style-image-filter-' + filename + '.fail.png','png32')
-        eq_(len(fails), 0, '\n'+'\n'.join(fails))
+                fail_im.save(
+                    '/tmp/mapnik-style-image-filter-' +
+                    filename +
+                    '.fail.png',
+                    'png32')
+        eq_(len(fails), 0, '\n' + '\n'.join(fails))
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/image_test.py b/test/python_tests/image_test.py
index 189f8be..f25ff39 100644
--- a/test/python_tests/image_test.py
+++ b/test/python_tests/image_test.py
@@ -1,345 +1,390 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-import os, mapnik
-from nose.tools import eq_,raises, assert_almost_equal
-from utilities import execution_path, run_all, get_unique_colors
+import os
+import sys
+
+from nose.tools import assert_almost_equal, eq_, raises
+
+import mapnik
+
+from .utilities import READ_FLAGS, execution_path, get_unique_colors, run_all
+
+PYTHON3 = sys.version_info[0] == 3
+if PYTHON3:
+    buffer = memoryview
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_type():
     im = mapnik.Image(256, 256)
     eq_(im.get_type(), mapnik.ImageType.rgba8)
     im = mapnik.Image(256, 256, mapnik.ImageType.gray8)
     eq_(im.get_type(), mapnik.ImageType.gray8)
 
+
 def test_image_premultiply():
-    im = mapnik.Image(256,256)
-    eq_(im.premultiplied(),False)
+    im = mapnik.Image(256, 256)
+    eq_(im.premultiplied(), False)
     # Premultiply should return true that it worked
     eq_(im.premultiply(), True)
-    eq_(im.premultiplied(),True)
+    eq_(im.premultiplied(), True)
     # Premultipling again should return false as nothing should happen
     eq_(im.premultiply(), False)
-    eq_(im.premultiplied(),True)
+    eq_(im.premultiplied(), True)
     # Demultiply should return true that it worked
     eq_(im.demultiply(), True)
-    eq_(im.premultiplied(),False)
+    eq_(im.premultiplied(), False)
     # Demultiply again should not work and return false as it did nothing
     eq_(im.demultiply(), False)
-    eq_(im.premultiplied(),False)
+    eq_(im.premultiplied(), False)
+
 
 def test_image_premultiply_values():
-    im = mapnik.Image(256,256)
+    im = mapnik.Image(256, 256)
     im.fill(mapnik.Color(16, 33, 255, 128))
     im.premultiply()
-    c = im.get_pixel(0,0, True)
+    c = im.get_pixel(0, 0, True)
     eq_(c.r, 8)
     eq_(c.g, 17)
     eq_(c.b, 128)
     eq_(c.a, 128)
     im.demultiply()
-    # Do to the nature of this operation the result will not be exactly the same
-    c = im.get_pixel(0,0,True)
-    eq_(c.r,15)
-    eq_(c.g,33)
-    eq_(c.b,255)
-    eq_(c.a,128)
+    # Do to the nature of this operation the result will not be exactly the
+    # same
+    c = im.get_pixel(0, 0, True)
+    eq_(c.r, 15)
+    eq_(c.g, 33)
+    eq_(c.b, 255)
+    eq_(c.a, 128)
+
 
 def test_apply_opacity():
-    im = mapnik.Image(4,4)
-    im.fill(mapnik.Color(128,128,128,128))
-    im.apply_opacity(0.75);
-    c = im.get_pixel(0,0,True)
-    eq_(c.r,128)
-    eq_(c.g,128)
-    eq_(c.b,128)
-    eq_(c.a,96)
+    im = mapnik.Image(4, 4)
+    im.fill(mapnik.Color(128, 128, 128, 128))
+    im.apply_opacity(0.75)
+    c = im.get_pixel(0, 0, True)
+    eq_(c.r, 128)
+    eq_(c.g, 128)
+    eq_(c.b, 128)
+    eq_(c.a, 96)
+
 
 def test_background():
-    im = mapnik.Image(256,256)
+    im = mapnik.Image(256, 256)
     eq_(im.premultiplied(), False)
-    im.fill(mapnik.Color(32,64,125,128))
+    im.fill(mapnik.Color(32, 64, 125, 128))
     eq_(im.premultiplied(), False)
-    c = im.get_pixel(0,0,True)
+    c = im.get_pixel(0, 0, True)
     eq_(c.get_premultiplied(), False)
-    eq_(c.r,32)
-    eq_(c.g,64)
-    eq_(c.b,125)
-    eq_(c.a,128)
+    eq_(c.r, 32)
+    eq_(c.g, 64)
+    eq_(c.b, 125)
+    eq_(c.a, 128)
     # Now again with a premultiplied alpha
-    im.fill(mapnik.Color(32,64,125,128,True))
+    im.fill(mapnik.Color(32, 64, 125, 128, True))
     eq_(im.premultiplied(), True)
-    c = im.get_pixel(0,0,True)
+    c = im.get_pixel(0, 0, True)
     eq_(c.get_premultiplied(), True)
-    eq_(c.r,32)
-    eq_(c.g,64)
-    eq_(c.b,125)
-    eq_(c.a,128)
+    eq_(c.r, 32)
+    eq_(c.g, 64)
+    eq_(c.b, 125)
+    eq_(c.a, 128)
+
 
 def test_set_and_get_pixel():
     # Create an image that is not premultiplied
-    im = mapnik.Image(256,256)
-    c0 = mapnik.Color(16,33,255,128)
-    c0_pre = mapnik.Color(16,33,255,128, True)
-    im.set_pixel(0,0,c0)
-    im.set_pixel(1,1,c0_pre)
+    im = mapnik.Image(256, 256)
+    c0 = mapnik.Color(16, 33, 255, 128)
+    c0_pre = mapnik.Color(16, 33, 255, 128, True)
+    im.set_pixel(0, 0, c0)
+    im.set_pixel(1, 1, c0_pre)
     # No differences for non premultiplied pixels
-    c1_int = mapnik.Color(im.get_pixel(0,0))
+    c1_int = mapnik.Color(im.get_pixel(0, 0))
     eq_(c0.r, c1_int.r)
     eq_(c0.g, c1_int.g)
     eq_(c0.b, c1_int.b)
     eq_(c0.a, c1_int.a)
-    c1 = im.get_pixel(0,0,True)
+    c1 = im.get_pixel(0, 0, True)
     eq_(c0.r, c1.r)
     eq_(c0.g, c1.g)
     eq_(c0.b, c1.b)
     eq_(c0.a, c1.a)
     # The premultiplied Color should be demultiplied before being applied.
     c0_pre.demultiply()
-    c1_int = mapnik.Color(im.get_pixel(1,1))
+    c1_int = mapnik.Color(im.get_pixel(1, 1))
     eq_(c0_pre.r, c1_int.r)
     eq_(c0_pre.g, c1_int.g)
     eq_(c0_pre.b, c1_int.b)
     eq_(c0_pre.a, c1_int.a)
-    c1 = im.get_pixel(1,1,True)
+    c1 = im.get_pixel(1, 1, True)
     eq_(c0_pre.r, c1.r)
     eq_(c0_pre.g, c1.g)
     eq_(c0_pre.b, c1.b)
     eq_(c0_pre.a, c1.a)
-    
+
     # Now create a new image that is premultiplied
-    im = mapnik.Image(256,256, mapnik.ImageType.rgba8, True, True)
-    c0 = mapnik.Color(16,33,255,128)
-    c0_pre = mapnik.Color(16,33,255,128, True)
-    im.set_pixel(0,0,c0)
-    im.set_pixel(1,1,c0_pre)
-    # It should have put pixels that are the same as premultiplied so premultiply c0
+    im = mapnik.Image(256, 256, mapnik.ImageType.rgba8, True, True)
+    c0 = mapnik.Color(16, 33, 255, 128)
+    c0_pre = mapnik.Color(16, 33, 255, 128, True)
+    im.set_pixel(0, 0, c0)
+    im.set_pixel(1, 1, c0_pre)
+    # It should have put pixels that are the same as premultiplied so
+    # premultiply c0
     c0.premultiply()
-    c1_int = mapnik.Color(im.get_pixel(0,0))
+    c1_int = mapnik.Color(im.get_pixel(0, 0))
     eq_(c0.r, c1_int.r)
     eq_(c0.g, c1_int.g)
     eq_(c0.b, c1_int.b)
     eq_(c0.a, c1_int.a)
-    c1 = im.get_pixel(0,0,True)
+    c1 = im.get_pixel(0, 0, True)
     eq_(c0.r, c1.r)
     eq_(c0.g, c1.g)
     eq_(c0.b, c1.b)
     eq_(c0.a, c1.a)
     # The premultiplied Color should be the same though
-    c1_int = mapnik.Color(im.get_pixel(1,1))
+    c1_int = mapnik.Color(im.get_pixel(1, 1))
     eq_(c0_pre.r, c1_int.r)
     eq_(c0_pre.g, c1_int.g)
     eq_(c0_pre.b, c1_int.b)
     eq_(c0_pre.a, c1_int.a)
-    c1 = im.get_pixel(1,1,True)
+    c1 = im.get_pixel(1, 1, True)
     eq_(c0_pre.r, c1.r)
     eq_(c0_pre.g, c1.g)
     eq_(c0_pre.b, c1.b)
     eq_(c0_pre.a, c1.a)
 
+
 def test_pixel_gray8():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray8)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray8)
     val_list = range(20)
     for v in val_list:
-        im.set_pixel(0,0, v)
-        eq_(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        eq_(im.get_pixel(0,0), 0)
+        im.set_pixel(0, 0, v)
+        eq_(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        eq_(im.get_pixel(0, 0), 0)
+
 
 def test_pixel_gray8s():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray8s)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray8s)
     val_list = range(20)
     for v in val_list:
-        im.set_pixel(0,0, v)
-        eq_(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        eq_(im.get_pixel(0,0), -v)
+        im.set_pixel(0, 0, v)
+        eq_(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        eq_(im.get_pixel(0, 0), -v)
+
 
 def test_pixel_gray16():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray16)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray16)
     val_list = range(20)
     for v in val_list:
-        im.set_pixel(0,0, v)
-        eq_(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        eq_(im.get_pixel(0,0), 0)
+        im.set_pixel(0, 0, v)
+        eq_(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        eq_(im.get_pixel(0, 0), 0)
+
 
 def test_pixel_gray16s():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray16s)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray16s)
     val_list = range(20)
     for v in val_list:
-        im.set_pixel(0,0, v)
-        eq_(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        eq_(im.get_pixel(0,0), -v)
+        im.set_pixel(0, 0, v)
+        eq_(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        eq_(im.get_pixel(0, 0), -v)
+
 
 def test_pixel_gray32():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray32)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray32)
     val_list = range(20)
     for v in val_list:
-        im.set_pixel(0,0, v)
-        eq_(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        eq_(im.get_pixel(0,0), 0)
+        im.set_pixel(0, 0, v)
+        eq_(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        eq_(im.get_pixel(0, 0), 0)
+
 
 def test_pixel_gray32s():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray32s)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray32s)
     val_list = range(20)
     for v in val_list:
-        im.set_pixel(0,0, v)
-        eq_(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        eq_(im.get_pixel(0,0), -v)
+        im.set_pixel(0, 0, v)
+        eq_(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        eq_(im.get_pixel(0, 0), -v)
+
 
 def test_pixel_gray64():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray64)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray64)
     val_list = range(20)
     for v in val_list:
-        im.set_pixel(0,0, v)
-        eq_(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        eq_(im.get_pixel(0,0), 0)
+        im.set_pixel(0, 0, v)
+        eq_(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        eq_(im.get_pixel(0, 0), 0)
+
 
 def test_pixel_gray64s():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray64s)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray64s)
     val_list = range(20)
     for v in val_list:
-        im.set_pixel(0,0, v)
-        eq_(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        eq_(im.get_pixel(0,0), -v)
+        im.set_pixel(0, 0, v)
+        eq_(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        eq_(im.get_pixel(0, 0), -v)
+
 
 def test_pixel_floats():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray32f)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray32f)
     val_list = [0.9, 0.99, 0.999, 0.9999, 0.99999, 1, 1.0001, 1.001, 1.01, 1.1]
     for v in val_list:
-        im.set_pixel(0,0, v)
-        assert_almost_equal(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        assert_almost_equal(im.get_pixel(0,0), -v)
+        im.set_pixel(0, 0, v)
+        assert_almost_equal(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        assert_almost_equal(im.get_pixel(0, 0), -v)
+
 
 def test_pixel_doubles():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray64f)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray64f)
     val_list = [0.9, 0.99, 0.999, 0.9999, 0.99999, 1, 1.0001, 1.001, 1.01, 1.1]
     for v in val_list:
-        im.set_pixel(0,0, v)
-        assert_almost_equal(im.get_pixel(0,0), v)
-        im.set_pixel(0,0, -v)
-        assert_almost_equal(im.get_pixel(0,0), -v)
+        im.set_pixel(0, 0, v)
+        assert_almost_equal(im.get_pixel(0, 0), v)
+        im.set_pixel(0, 0, -v)
+        assert_almost_equal(im.get_pixel(0, 0), -v)
+
 
 def test_pixel_overflow():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray8)
-    im.set_pixel(0,0,256)
-    eq_(im.get_pixel(0,0),255)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray8)
+    im.set_pixel(0, 0, 256)
+    eq_(im.get_pixel(0, 0), 255)
+
 
 def test_pixel_underflow():
-    im = mapnik.Image(4,4,mapnik.ImageType.gray8)
-    im.set_pixel(0,0,-1)
-    eq_(im.get_pixel(0,0),0)
-    im = mapnik.Image(4,4,mapnik.ImageType.gray16)
-    im.set_pixel(0,0,-1)
-    eq_(im.get_pixel(0,0),0)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray8)
+    im.set_pixel(0, 0, -1)
+    eq_(im.get_pixel(0, 0), 0)
+    im = mapnik.Image(4, 4, mapnik.ImageType.gray16)
+    im.set_pixel(0, 0, -1)
+    eq_(im.get_pixel(0, 0), 0)
+
 
 @raises(IndexError)
 def test_set_pixel_out_of_range_1():
-    im = mapnik.Image(4,4)
+    im = mapnik.Image(4, 4)
     c = mapnik.Color('blue')
-    im.set_pixel(5,5,c)
+    im.set_pixel(5, 5, c)
+
 
 @raises(OverflowError)
 def test_set_pixel_out_of_range_2():
-    im = mapnik.Image(4,4)
+    im = mapnik.Image(4, 4)
     c = mapnik.Color('blue')
-    im.set_pixel(-1,1,c)
+    im.set_pixel(-1, 1, c)
+
 
 @raises(IndexError)
 def test_get_pixel_out_of_range_1():
-    im = mapnik.Image(4,4)
-    c = im.get_pixel(5,5)
+    im = mapnik.Image(4, 4)
+    c = im.get_pixel(5, 5)
+
 
 @raises(OverflowError)
 def test_get_pixel_out_of_range_2():
-    im = mapnik.Image(4,4)
-    c = im.get_pixel(-1,1)
+    im = mapnik.Image(4, 4)
+    c = im.get_pixel(-1, 1)
+
 
 @raises(IndexError)
 def test_get_pixel_color_out_of_range_1():
-    im = mapnik.Image(4,4)
-    c = im.get_pixel(5,5,True)
+    im = mapnik.Image(4, 4)
+    c = im.get_pixel(5, 5, True)
+
 
 @raises(OverflowError)
 def test_get_pixel_color_out_of_range_2():
-    im = mapnik.Image(4,4)
-    c = im.get_pixel(-1,1,True)
-    
+    im = mapnik.Image(4, 4)
+    c = im.get_pixel(-1, 1, True)
+
+
 def test_set_color_to_alpha():
-    im = mapnik.Image(256,256)
+    im = mapnik.Image(256, 256)
     im.fill(mapnik.Color('rgba(12,12,12,255)'))
     eq_(get_unique_colors(im), ['rgba(12,12,12,255)'])
     im.set_color_to_alpha(mapnik.Color('rgba(12,12,12,0)'))
     eq_(get_unique_colors(im), ['rgba(0,0,0,0)'])
 
+
 @raises(RuntimeError)
 def test_negative_image_dimensions():
-    # TODO - this may have regressed in https://github.com/mapnik/mapnik/commit/4f3521ac24b61fc8ae8fd344a16dc3a5fdf15af7
-    im = mapnik.Image(-40,40)
+    # TODO - this may have regressed in
+    # https://github.com/mapnik/mapnik/commit/4f3521ac24b61fc8ae8fd344a16dc3a5fdf15af7
+    im = mapnik.Image(-40, 40)
     # should not get here
-    eq_(im.width(),0)
-    eq_(im.height(),0)
+    eq_(im.width(), 0)
+    eq_(im.height(), 0)
+
 
 def test_jpeg_round_trip():
     filepath = '/tmp/mapnik-jpeg-io.jpeg'
-    im = mapnik.Image(255,267)
+    im = mapnik.Image(255, 267)
     im.fill(mapnik.Color('rgba(1,2,3,.5)'))
-    im.save(filepath,'jpeg')
+    im.save(filepath, 'jpeg')
     im2 = mapnik.Image.open(filepath)
-    im3 = mapnik.Image.fromstring(open(filepath,'r').read())
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(im.width(),im3.width())
-    eq_(im.height(),im3.height())
-    eq_(len(im.tostring()),len(im2.tostring()))
-    eq_(len(im.tostring('jpeg')),len(im2.tostring('jpeg')))
-    eq_(len(im.tostring()),len(im3.tostring()))
-    eq_(len(im.tostring('jpeg')),len(im3.tostring('jpeg')))
+    with open(filepath, READ_FLAGS) as f:
+        im3 = mapnik.Image.fromstring(f.read())
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(im.width(), im3.width())
+    eq_(im.height(), im3.height())
+    eq_(len(im.tostring()), len(im2.tostring()))
+    eq_(len(im.tostring('jpeg')), len(im2.tostring('jpeg')))
+    eq_(len(im.tostring()), len(im3.tostring()))
+    eq_(len(im.tostring('jpeg')), len(im3.tostring('jpeg')))
+
 
 def test_png_round_trip():
     filepath = '/tmp/mapnik-png-io.png'
-    im = mapnik.Image(255,267)
+    im = mapnik.Image(255, 267)
     im.fill(mapnik.Color('rgba(1,2,3,.5)'))
-    im.save(filepath,'png')
+    im.save(filepath, 'png')
     im2 = mapnik.Image.open(filepath)
-    im3 = mapnik.Image.fromstring(open(filepath,'r').read())
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(im.width(),im3.width())
-    eq_(im.height(),im3.height())
-    eq_(len(im.tostring()),len(im2.tostring()))
-    eq_(len(im.tostring('png')),len(im2.tostring('png')))
-    eq_(len(im.tostring('png8')),len(im2.tostring('png8')))
-    eq_(len(im.tostring()),len(im3.tostring()))
-    eq_(len(im.tostring('png')),len(im3.tostring('png')))
-    eq_(len(im.tostring('png8')),len(im3.tostring('png8')))
+    with open(filepath, READ_FLAGS) as f:
+        im3 = mapnik.Image.fromstring(f.read())
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(im.width(), im3.width())
+    eq_(im.height(), im3.height())
+    eq_(len(im.tostring()), len(im2.tostring()))
+    eq_(len(im.tostring('png')), len(im2.tostring('png')))
+    eq_(len(im.tostring('png8')), len(im2.tostring('png8')))
+    eq_(len(im.tostring()), len(im3.tostring()))
+    eq_(len(im.tostring('png')), len(im3.tostring('png')))
+    eq_(len(im.tostring('png8')), len(im3.tostring('png8')))
+
 
 def test_image_open_from_string():
     filepath = '../data/images/dummy.png'
     im1 = mapnik.Image.open(filepath)
-    im2 = mapnik.Image.fromstring(open(filepath,'rb').read())
-    eq_(im1.width(),im2.width())
+    with open(filepath, READ_FLAGS) as f:
+        im2 = mapnik.Image.fromstring(f.read())
+    eq_(im1.width(), im2.width())
     length = len(im1.tostring())
-    eq_(length,len(im2.tostring()))
-    eq_(len(mapnik.Image.fromstring(im1.tostring('png')).tostring()),length)
-    eq_(len(mapnik.Image.fromstring(im1.tostring('jpeg')).tostring()),length)
-    eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('png'))).tostring()),length)
-    eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('jpeg'))).tostring()),length)
+    eq_(length, len(im2.tostring()))
+    eq_(len(mapnik.Image.fromstring(im1.tostring('png')).tostring()), length)
+    eq_(len(mapnik.Image.fromstring(im1.tostring('jpeg')).tostring()), length)
+    eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('png'))).tostring()), length)
+    eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('jpeg'))).tostring()), length)
 
     # TODO - https://github.com/mapnik/mapnik/issues/1831
-    eq_(len(mapnik.Image.fromstring(im1.tostring('tiff')).tostring()),length)
-    eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('tiff'))).tostring()),length)
+    eq_(len(mapnik.Image.fromstring(im1.tostring('tiff')).tostring()), length)
+    eq_(len(mapnik.Image.frombuffer(buffer(im1.tostring('tiff'))).tostring()), length)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/image_tiff_test.py b/test/python_tests/image_tiff_test.py
index e0535d0..a924b27 100644
--- a/test/python_tests/image_tiff_test.py
+++ b/test/python_tests/image_tiff_test.py
@@ -1,334 +1,417 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-import os, mapnik
 import hashlib
-from nose.tools import eq_, assert_not_equal
-from utilities import execution_path, run_all
+import os
+
+from nose.tools import assert_not_equal, eq_
+
+import mapnik
+
+from .utilities import READ_FLAGS, execution_path, run_all
+
 
 def hashstr(var):
     return hashlib.md5(var).hexdigest()
 
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_tiff_round_trip_scanline():
     filepath = '/tmp/mapnik-tiff-io-scanline.tiff'
-    im = mapnik.Image(255,267)
+    im = mapnik.Image(255, 267)
     im.fill(mapnik.Color('rgba(12,255,128,.5)'))
     org_str = hashstr(im.tostring())
-    im.save(filepath,'tiff:method=scanline')
+    im.save(filepath, 'tiff:method=scanline')
     im2 = mapnik.Image.open(filepath)
-    im3 = mapnik.Image.fromstring(open(filepath,'r').read())
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(im.width(),im3.width())
-    eq_(im.height(),im3.height())
+    with open(filepath, READ_FLAGS) as f:
+        im3 = mapnik.Image.fromstring(f.read())
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(im.width(), im3.width())
+    eq_(im.height(), im3.height())
     eq_(hashstr(im.tostring()), org_str)
-    # This won't be the same the first time around because the im is not premultiplied and im2 is
-    assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring()))
-    assert_not_equal(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline')))
+    # This won't be the same the first time around because the im is not
+    # premultiplied and im2 is
+    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+    assert_not_equal(
+        hashstr(
+            im.tostring('tiff:method=scanline')), hashstr(
+            im2.tostring('tiff:method=scanline')))
     # Now premultiply
     im.premultiply()
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline')))
-    eq_(hashstr(im2.tostring()),hashstr(im3.tostring()))
-    eq_(hashstr(im2.tostring('tiff:method=scanline')),hashstr(im3.tostring('tiff:method=scanline')))
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=scanline')),
+        hashstr(im2.tostring('tiff:method=scanline')))
+    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
+    eq_(hashstr(im2.tostring('tiff:method=scanline')),
+        hashstr(im3.tostring('tiff:method=scanline')))
+
 
 def test_tiff_round_trip_stripped():
     filepath = '/tmp/mapnik-tiff-io-stripped.tiff'
-    im = mapnik.Image(255,267)
+    im = mapnik.Image(255, 267)
     im.fill(mapnik.Color('rgba(12,255,128,.5)'))
     org_str = hashstr(im.tostring())
-    im.save(filepath,'tiff:method=stripped')
+    im.save(filepath, 'tiff:method=stripped')
     im2 = mapnik.Image.open(filepath)
-    im2.save('/tmp/mapnik-tiff-io-stripped2.tiff','tiff:method=stripped')
-    im3 = mapnik.Image.fromstring(open(filepath,'r').read())
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(im.width(),im3.width())
-    eq_(im.height(),im3.height())
+    im2.save('/tmp/mapnik-tiff-io-stripped2.tiff', 'tiff:method=stripped')
+    with open(filepath, READ_FLAGS) as f:
+        im3 = mapnik.Image.fromstring(f.read())
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(im.width(), im3.width())
+    eq_(im.height(), im3.height())
     # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the
     # difference in tags.
-    assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring()))
-    assert_not_equal(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped')))
+    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+    assert_not_equal(
+        hashstr(
+            im.tostring('tiff:method=stripped')), hashstr(
+            im2.tostring('tiff:method=stripped')))
     # Now if we premultiply they will be exactly the same
     im.premultiply()
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped')))
-    eq_(hashstr(im2.tostring()),hashstr(im3.tostring()))
-    # Both of these started out premultiplied, so this round trip should be exactly the same!
-    eq_(hashstr(im2.tostring('tiff:method=stripped')),hashstr(im3.tostring('tiff:method=stripped')))
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=stripped')),
+        hashstr(im2.tostring('tiff:method=stripped')))
+    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
+    # Both of these started out premultiplied, so this round trip should be
+    # exactly the same!
+    eq_(hashstr(im2.tostring('tiff:method=stripped')),
+        hashstr(im3.tostring('tiff:method=stripped')))
+
 
 def test_tiff_round_trip_rows_stripped():
     filepath = '/tmp/mapnik-tiff-io-rows_stripped.tiff'
     filepath2 = '/tmp/mapnik-tiff-io-rows_stripped2.tiff'
-    im = mapnik.Image(255,267)
+    im = mapnik.Image(255, 267)
     im.fill(mapnik.Color('rgba(12,255,128,.5)'))
-    c = im.get_pixel(0,0,True)
+    c = im.get_pixel(0, 0, True)
     eq_(c.r, 12)
     eq_(c.g, 255)
     eq_(c.b, 128)
     eq_(c.a, 128)
     eq_(c.get_premultiplied(), False)
-    im.save(filepath,'tiff:method=stripped:rows_per_strip=8')
+    im.save(filepath, 'tiff:method=stripped:rows_per_strip=8')
     im2 = mapnik.Image.open(filepath)
-    c2 = im2.get_pixel(0,0,True)
+    c2 = im2.get_pixel(0, 0, True)
     eq_(c2.r, 6)
     eq_(c2.g, 128)
     eq_(c2.b, 64)
     eq_(c2.a, 128)
     eq_(c2.get_premultiplied(), True)
-    im2.save(filepath2,'tiff:method=stripped:rows_per_strip=8')
-    im3 = mapnik.Image.fromstring(open(filepath,'r').read())
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(im.width(),im3.width())
-    eq_(im.height(),im3.height())
+    im2.save(filepath2, 'tiff:method=stripped:rows_per_strip=8')
+    with open(filepath, READ_FLAGS) as f:
+        im3 = mapnik.Image.fromstring(f.read())
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(im.width(), im3.width())
+    eq_(im.height(), im3.height())
     # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the
-    # difference in tags. 
-    assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring()))
-    assert_not_equal(hashstr(im.tostring('tiff:method=stripped:rows_per_strip=8')),hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8')))
+    # difference in tags.
+    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+    assert_not_equal(
+        hashstr(
+            im.tostring('tiff:method=stripped:rows_per_strip=8')), hashstr(
+            im2.tostring('tiff:method=stripped:rows_per_strip=8')))
     # Now premultiply the first image and they will be the same!
     im.premultiply()
-    eq_(hashstr(im.tostring('tiff:method=stripped:rows_per_strip=8')),hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8')))
-    eq_(hashstr(im2.tostring()),hashstr(im3.tostring()))
-    # Both of these started out premultiplied, so this round trip should be exactly the same!
-    eq_(hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8')),hashstr(im3.tostring('tiff:method=stripped:rows_per_strip=8')))
+    eq_(hashstr(im.tostring('tiff:method=stripped:rows_per_strip=8')),
+        hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8')))
+    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
+    # Both of these started out premultiplied, so this round trip should be
+    # exactly the same!
+    eq_(hashstr(im2.tostring('tiff:method=stripped:rows_per_strip=8')),
+        hashstr(im3.tostring('tiff:method=stripped:rows_per_strip=8')))
+
 
 def test_tiff_round_trip_buffered_tiled():
     filepath = '/tmp/mapnik-tiff-io-buffered-tiled.tiff'
     filepath2 = '/tmp/mapnik-tiff-io-buffered-tiled2.tiff'
     filepath3 = '/tmp/mapnik-tiff-io-buffered-tiled3.tiff'
-    im = mapnik.Image(255,267)
+    im = mapnik.Image(255, 267)
     im.fill(mapnik.Color('rgba(33,255,128,.5)'))
-    c = im.get_pixel(0,0,True)
+    c = im.get_pixel(0, 0, True)
     eq_(c.r, 33)
     eq_(c.g, 255)
     eq_(c.b, 128)
     eq_(c.a, 128)
     eq_(c.get_premultiplied(), False)
-    im.save(filepath,'tiff:method=tiled:tile_width=32:tile_height=32')
+    im.save(filepath, 'tiff:method=tiled:tile_width=32:tile_height=32')
     im2 = mapnik.Image.open(filepath)
-    c2 = im2.get_pixel(0,0,True)
+    c2 = im2.get_pixel(0, 0, True)
     eq_(c2.r, 17)
     eq_(c2.g, 128)
     eq_(c2.b, 64)
     eq_(c2.a, 128)
     eq_(c2.get_premultiplied(), True)
-    im3 = mapnik.Image.fromstring(open(filepath,'r').read())
+    with open(filepath, READ_FLAGS) as f:
+        im3 = mapnik.Image.fromstring(f.read())
     im2.save(filepath2, 'tiff:method=tiled:tile_width=32:tile_height=32')
     im3.save(filepath3, 'tiff:method=tiled:tile_width=32:tile_height=32')
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(im.width(),im3.width())
-    eq_(im.height(),im3.height())
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(im.width(), im3.width())
+    eq_(im.height(), im3.height())
     # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the
     # difference in tags.
-    assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring()))
-    assert_not_equal(hashstr(im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')))
+    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+    assert_not_equal(
+        hashstr(
+            im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')), hashstr(
+            im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')))
     # Now premultiply the first image and they should be the same
     im.premultiply()
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')))
-    eq_(hashstr(im2.tostring()),hashstr(im3.tostring()))
-    # Both of these started out premultiplied, so this round trip should be exactly the same!
-    eq_(hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),hashstr(im3.tostring('tiff:method=tiled:tile_width=32:tile_height=32')))
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),
+        hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')))
+    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
+    # Both of these started out premultiplied, so this round trip should be
+    # exactly the same!
+    eq_(hashstr(im2.tostring('tiff:method=tiled:tile_width=32:tile_height=32')),
+        hashstr(im3.tostring('tiff:method=tiled:tile_width=32:tile_height=32')))
+
 
 def test_tiff_round_trip_tiled():
     filepath = '/tmp/mapnik-tiff-io-tiled.tiff'
-    im = mapnik.Image(256,256)
+    im = mapnik.Image(256, 256)
     im.fill(mapnik.Color('rgba(1,255,128,.5)'))
-    im.save(filepath,'tiff:method=tiled')
+    im.save(filepath, 'tiff:method=tiled')
     im2 = mapnik.Image.open(filepath)
-    im3 = mapnik.Image.fromstring(open(filepath,'r').read())
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(im.width(),im3.width())
-    eq_(im.height(),im3.height())
+    with open(filepath, READ_FLAGS) as f:
+        im3 = mapnik.Image.fromstring(f.read())
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(im.width(), im3.width())
+    eq_(im.height(), im3.height())
     # Because one will end up with UNASSOC alpha tag which internally the TIFF reader will premultiply, the first to string will not be the same due to the
     # difference in tags.
-    assert_not_equal(hashstr(im.tostring()),hashstr(im2.tostring()))
-    assert_not_equal(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled')))
+    assert_not_equal(hashstr(im.tostring()), hashstr(im2.tostring()))
+    assert_not_equal(
+        hashstr(
+            im.tostring('tiff:method=tiled')), hashstr(
+            im2.tostring('tiff:method=tiled')))
     # Now premultiply the first image and they will be exactly the same.
     im.premultiply()
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled')))
-    eq_(hashstr(im2.tostring()),hashstr(im3.tostring()))
-    # Both of these started out premultiplied, so this round trip should be exactly the same!
-    eq_(hashstr(im2.tostring('tiff:method=tiled')),hashstr(im3.tostring('tiff:method=tiled')))
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=tiled')),
+        hashstr(im2.tostring('tiff:method=tiled')))
+    eq_(hashstr(im2.tostring()), hashstr(im3.tostring()))
+    # Both of these started out premultiplied, so this round trip should be
+    # exactly the same!
+    eq_(hashstr(im2.tostring('tiff:method=tiled')),
+        hashstr(im3.tostring('tiff:method=tiled')))
 
 
 def test_tiff_rgb8_compare():
     filepath1 = '../data/tiff/ndvi_256x256_rgb8_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-rgb8.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff')
+    im.save(filepath2, 'tiff')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff')),hashstr(im2.tostring('tiff')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff')), hashstr(im2.tostring('tiff')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.rgba8).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff")), True)
+
 
 def test_tiff_rgba8_compare_scanline():
     filepath1 = '../data/tiff/ndvi_256x256_rgba8_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-rgba8-scanline.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=scanline')
+    im.save(filepath2, 'tiff:method=scanline')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=scanline')),
+        hashstr(im2.tostring('tiff:method=scanline')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.rgba8).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff")), True)
+
 
 def test_tiff_rgba8_compare_stripped():
     filepath1 = '../data/tiff/ndvi_256x256_rgba8_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-rgba8-stripped.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=stripped')
+    im.save(filepath2, 'tiff:method=stripped')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=stripped')),
+        hashstr(im2.tostring('tiff:method=stripped')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.rgba8).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff")), True)
+
 
 def test_tiff_rgba8_compare_tiled():
     filepath1 = '../data/tiff/ndvi_256x256_rgba8_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-rgba8-stripped.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=tiled')
+    im.save(filepath2, 'tiff:method=tiled')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=tiled')),
+        hashstr(im2.tostring('tiff:method=tiled')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.rgba8).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.rgba8).tostring("tiff")), True)
+
 
 def test_tiff_gray8_compare_scanline():
     filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-gray8-scanline.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=scanline')
+    im.save(filepath2, 'tiff:method=scanline')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=scanline')),
+        hashstr(im2.tostring('tiff:method=scanline')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray8).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.gray8).tostring("tiff")), True)
+
 
 def test_tiff_gray8_compare_stripped():
     filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-gray8-stripped.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=stripped')
+    im.save(filepath2, 'tiff:method=stripped')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=stripped')),
+        hashstr(im2.tostring('tiff:method=stripped')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray8).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.gray8).tostring("tiff")), True)
+
 
 def test_tiff_gray8_compare_tiled():
     filepath1 = '../data/tiff/ndvi_256x256_gray8_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-gray8-tiled.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=tiled')
+    im.save(filepath2, 'tiff:method=tiled')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=tiled')),
+        hashstr(im2.tostring('tiff:method=tiled')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray8).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.gray8).tostring("tiff")), True)
+
 
 def test_tiff_gray16_compare_scanline():
     filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-gray16-scanline.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=scanline')
+    im.save(filepath2, 'tiff:method=scanline')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=scanline')),
+        hashstr(im2.tostring('tiff:method=scanline')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray16).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.gray16).tostring("tiff")), True)
+
 
 def test_tiff_gray16_compare_stripped():
     filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-gray16-stripped.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=stripped')
+    im.save(filepath2, 'tiff:method=stripped')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=stripped')),
+        hashstr(im2.tostring('tiff:method=stripped')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray16).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.gray16).tostring("tiff")), True)
+
 
 def test_tiff_gray16_compare_tiled():
     filepath1 = '../data/tiff/ndvi_256x256_gray16_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-gray16-tiled.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=tiled')
+    im.save(filepath2, 'tiff:method=tiled')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=tiled')),
+        hashstr(im2.tostring('tiff:method=tiled')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray16).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(
+        im.width(), im.height(), mapnik.ImageType.gray16).tostring("tiff")), True)
+
 
 def test_tiff_gray32f_compare_scanline():
     filepath1 = '../data/tiff/ndvi_256x256_gray32f_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-gray32f-scanline.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=scanline')
+    im.save(filepath2, 'tiff:method=scanline')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=scanline')),hashstr(im2.tostring('tiff:method=scanline')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=scanline')),
+        hashstr(im2.tostring('tiff:method=scanline')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray32f).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),
+                                                             im.height(), mapnik.ImageType.gray32f).tostring("tiff")), True)
+
 
 def test_tiff_gray32f_compare_stripped():
     filepath1 = '../data/tiff/ndvi_256x256_gray32f_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-gray32f-stripped.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=stripped')
+    im.save(filepath2, 'tiff:method=stripped')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=stripped')),hashstr(im2.tostring('tiff:method=stripped')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=stripped')),
+        hashstr(im2.tostring('tiff:method=stripped')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray32f).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),
+                                                             im.height(), mapnik.ImageType.gray32f).tostring("tiff")), True)
+
 
 def test_tiff_gray32f_compare_tiled():
     filepath1 = '../data/tiff/ndvi_256x256_gray32f_striped.tif'
     filepath2 = '/tmp/mapnik-tiff-gray32f-tiled.tiff'
     im = mapnik.Image.open(filepath1)
-    im.save(filepath2,'tiff:method=tiled')
+    im.save(filepath2, 'tiff:method=tiled')
     im2 = mapnik.Image.open(filepath2)
-    eq_(im.width(),im2.width())
-    eq_(im.height(),im2.height())
-    eq_(hashstr(im.tostring()),hashstr(im2.tostring()))
-    eq_(hashstr(im.tostring('tiff:method=tiled')),hashstr(im2.tostring('tiff:method=tiled')))
+    eq_(im.width(), im2.width())
+    eq_(im.height(), im2.height())
+    eq_(hashstr(im.tostring()), hashstr(im2.tostring()))
+    eq_(hashstr(im.tostring('tiff:method=tiled')),
+        hashstr(im2.tostring('tiff:method=tiled')))
     # should not be a blank image
-    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),im.height(),mapnik.ImageType.gray32f).tostring("tiff")),True)
+    eq_(hashstr(im.tostring("tiff")) != hashstr(mapnik.Image(im.width(),
+                                                             im.height(), mapnik.ImageType.gray32f).tostring("tiff")), True)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/introspection_test.py b/test/python_tests/introspection_test.py
index afb1cc2..0c1e39d 100644
--- a/test/python_tests/introspection_test.py
+++ b/test/python_tests/introspection_test.py
@@ -1,16 +1,20 @@
 #!/usr/bin/env python
 
 import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
 
 import mapnik
 
+from .utilities import execution_path, run_all
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_introspect_symbolizers():
     # create a symbolizer
     p = mapnik.PointSymbolizer()
@@ -20,21 +24,21 @@ def test_introspect_symbolizers():
 
     eq_(p.allow_overlap, True)
     eq_(p.opacity, 0.5)
-    eq_(p.filename,'../data/images/dummy.png')
+    eq_(p.filename, '../data/images/dummy.png')
 
     # make sure the defaults
     # are what we think they are
     eq_(p.allow_overlap, True)
-    eq_(p.opacity,0.5)
-    eq_(p.filename,'../data/images/dummy.png')
+    eq_(p.opacity, 0.5)
+    eq_(p.filename, '../data/images/dummy.png')
 
     # contruct objects to hold it
     r = mapnik.Rule()
     r.symbols.append(p)
     s = mapnik.Style()
     s.rules.append(r)
-    m = mapnik.Map(0,0)
-    m.append_style('s',s)
+    m = mapnik.Map(0, 0)
+    m.append_style('s', s)
 
     # try to figure out what is
     # in the map and make sure
@@ -42,19 +46,19 @@ def test_introspect_symbolizers():
 
     s2 = m.find_style('s')
     rules = s2.rules
-    eq_(len(rules),1)
+    eq_(len(rules), 1)
     r2 = rules[0]
     syms = r2.symbols
-    eq_(len(syms),1)
+    eq_(len(syms), 1)
 
-    ## TODO here, we can do...
+    # TODO here, we can do...
     sym = syms[0]
     p2 = sym.extract()
-    assert isinstance(p2,mapnik.PointSymbolizer)
+    assert isinstance(p2, mapnik.PointSymbolizer)
 
     eq_(p2.allow_overlap, True)
     eq_(p2.opacity, 0.5)
-    eq_(p2.filename,'../data/images/dummy.png')
+    eq_(p2.filename, '../data/images/dummy.png')
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/json_feature_properties_test.py b/test/python_tests/json_feature_properties_test.py
index 47f2428..4155745 100644
--- a/test/python_tests/json_feature_properties_test.py
+++ b/test/python_tests/json_feature_properties_test.py
@@ -1,102 +1,112 @@
-#encoding: utf8
+# encoding: utf8
 
 from nose.tools import eq_
+
 import mapnik
-from utilities import run_all
+
+from .utilities import run_all
+
 try:
     import json
 except ImportError:
     import simplejson as json
 
 chars = [
- {
-   "name":"single_quote",
-   "test": "string with ' quote",
-   "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \' quote"}}'
- },
- {
-   "name":"escaped_single_quote",
-   "test":"string with \' quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \' quote"}}'
- },
- {
-   "name":"double_quote",
-   "test":'string with " quote',
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\" quote"}}'
- },
- {
-   "name":"double_quote2",
-   "test":"string with \" quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\" quote"}}'
- },
- {
-   "name":"reverse_solidus", # backslash
-   "test":"string with \\ quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\\ quote"}}'
- },
- {
-   "name":"solidus", # forward slash
-   "test":"string with / quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with / quote"}}'
- },
- {
-   "name":"backspace",
-   "test":"string with \b quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\b quote"}}'
- },
- {
-   "name":"formfeed",
-   "test":"string with \f quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\f quote"}}'
- },
- {
-   "name":"newline",
-   "test":"string with \n quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\n quote"}}'
- },
- {
-   "name":"carriage_return",
-   "test":"string with \r quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\r quote"}}'
- },
- {
-   "name":"horiztonal_tab",
-   "test":"string with \t quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\t quote"}}'
- },
- # remainder are c++ reserved, but not json
- {
-   "name":"vert_tab",
-   "test":"string with \v quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\u000b quote"}}'
- },
- {
-   "name":"alert",
-   "test":"string with \a quote",
-   "json":'{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \u0007 quote"}}'
- }
+    {
+        "name": "single_quote",
+        "test": "string with ' quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \' quote"}}'
+    },
+    {
+        "name": "escaped_single_quote",
+        "test": "string with \' quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \' quote"}}'
+    },
+    {
+        "name": "double_quote",
+        "test": 'string with " quote',
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\" quote"}}'
+    },
+    {
+        "name": "double_quote2",
+        "test": "string with \" quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\" quote"}}'
+    },
+    {
+        "name": "reverse_solidus",  # backslash
+        "test": "string with \\ quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\\ quote"}}'
+    },
+    {
+        "name": "solidus",  # forward slash
+        "test": "string with / quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with / quote"}}'
+    },
+    {
+        "name": "backspace",
+        "test": "string with \b quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\b quote"}}'
+    },
+    {
+        "name": "formfeed",
+        "test": "string with \f quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\f quote"}}'
+    },
+    {
+        "name": "newline",
+        "test": "string with \n quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\n quote"}}'
+    },
+    {
+        "name": "carriage_return",
+        "test": "string with \r quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\r quote"}}'
+    },
+    {
+        "name": "horiztonal_tab",
+        "test": "string with \t quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\t quote"}}'
+    },
+    # remainder are c++ reserved, but not json
+    {
+        "name": "vert_tab",
+        "test": "string with \v quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\u000b quote"}}'
+    },
+    {
+        "name": "alert",
+        "test": "string with \a quote",
+        "json": '{"type":"Feature","id":1,"geometry":null,"properties":{"name":"string with \\u0007 quote"}}'
+    }
 ]
 
 ctx = mapnik.Context()
 ctx.push('name')
 
+
 def test_char_escaping():
     for char in chars:
-        feat = mapnik.Feature(ctx,1)
+        feat = mapnik.Feature(ctx, 1)
         expected = char['test']
         feat["name"] = expected
-        eq_(feat["name"],expected)
+        eq_(feat["name"], expected)
         # confirm the python json module
         # is working as we would expect
         pyjson2 = json.loads(char['json'])
-        eq_(pyjson2['properties']['name'],expected)
+        eq_(pyjson2['properties']['name'], expected)
         # confirm our behavior is the same as python json module
         # for the original string
         geojson_feat_string = feat.to_geojson()
-        eq_(geojson_feat_string,char['json'],"Mapnik's json escaping is not to spec: actual(%s) and expected(%s) for %s" % (geojson_feat_string,char['json'],char['name']))
+        eq_(
+            geojson_feat_string,
+            char['json'],
+            "Mapnik's json escaping is not to spec: actual(%s) and expected(%s) for %s" %
+            (geojson_feat_string,
+             char['json'],
+                char['name']))
         # and the round tripped string
         pyjson = json.loads(geojson_feat_string)
-        eq_(pyjson['properties']['name'],expected)
+        eq_(pyjson['properties']['name'], expected)
 
 if __name__ == "__main__":
     exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
diff --git a/test/python_tests/layer_buffer_size_test.py b/test/python_tests/layer_buffer_size_test.py
index 83765a7..30417a3 100644
--- a/test/python_tests/layer_buffer_size_test.py
+++ b/test/python_tests/layer_buffer_size_test.py
@@ -1,9 +1,13 @@
-#coding=utf8
+# coding=utf8
 import os
-import mapnik
-from utilities import execution_path, run_all
+
 from nose.tools import eq_
 
+import mapnik
+
+from .utilities import execution_path, run_all
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
@@ -15,19 +19,22 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
     # override the postive map buffer leading
     # only one point to be rendered in the map
     def test_layer_buffer_size_1():
-        m = mapnik.Map(512,512)
-        eq_(m.buffer_size,0)
-        mapnik.load_map(m,'../data/good_maps/layer_buffer_size_reduction.xml')
-        eq_(m.buffer_size,256)
-        eq_(m.layers[0].buffer_size,-150)
+        m = mapnik.Map(512, 512)
+        eq_(m.buffer_size, 0)
+        mapnik.load_map(m, '../data/good_maps/layer_buffer_size_reduction.xml')
+        eq_(m.buffer_size, 256)
+        eq_(m.layers[0].buffer_size, -150)
         m.zoom_all()
-        im = mapnik.Image(m.width,m.height)
-        mapnik.render(m,im)
+        im = mapnik.Image(m.width, m.height)
+        mapnik.render(m, im)
         actual = '/tmp/mapnik-layer-buffer-size.png'
         expected = 'images/support/mapnik-layer-buffer-size.png'
-        im.save(actual,"png32")
+        im.save(actual, "png32")
         expected_im = mapnik.Image.open(expected)
-        eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected))
+        eq_(im.tostring('png32'),
+            expected_im.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual,
+                                                                'tests/python_tests/' + expected))
 
 
 if __name__ == "__main__":
diff --git a/test/python_tests/layer_modification_test.py b/test/python_tests/layer_modification_test.py
index 7517ac2..a4af186 100644
--- a/test/python_tests/layer_modification_test.py
+++ b/test/python_tests/layer_modification_test.py
@@ -1,15 +1,20 @@
 #!/usr/bin/env python
 
 import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
+
 import mapnik
 
+from .utilities import execution_path, run_all
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_adding_datasource_to_layer():
     map_string = '''<?xml version="1.0" encoding="utf-8"?>
 <Map>
@@ -34,9 +39,9 @@ def test_adding_datasource_to_layer():
         mapnik.load_map_from_string(m, map_string)
 
         # validate it loaded fine
-        eq_(m.layers[0].styles[0],'world_borders_style')
-        eq_(m.layers[0].styles[1],'point_style')
-        eq_(len(m.layers),1)
+        eq_(m.layers[0].styles[0], 'world_borders_style')
+        eq_(m.layers[0].styles[1], 'point_style')
+        eq_(len(m.layers), 1)
 
         # also assign a variable reference to that layer
         # below we will test that this variable references
@@ -44,28 +49,31 @@ def test_adding_datasource_to_layer():
         lyr = m.layers[0]
 
         # ensure that there was no datasource for the layer...
-        eq_(m.layers[0].datasource,None)
-        eq_(lyr.datasource,None)
+        eq_(m.layers[0].datasource, None)
+        eq_(lyr.datasource, None)
 
         # also note that since the srs was black it defaulted to wgs84
-        eq_(m.layers[0].srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
-        eq_(lyr.srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
+        eq_(m.layers[0].srs,
+            '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
+        eq_(lyr.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
 
         # now add a datasource one...
         ds = mapnik.Shapefile(file='../data/shp/world_merc.shp')
         m.layers[0].datasource = ds
 
         # now ensure it is attached
-        eq_(m.layers[0].datasource.describe()['name'],"shape")
-        eq_(lyr.datasource.describe()['name'],"shape")
+        eq_(m.layers[0].datasource.describe()['name'], "shape")
+        eq_(lyr.datasource.describe()['name'], "shape")
 
-        # and since we have now added a shapefile in spherical mercator, adjust the projection
+        # and since we have now added a shapefile in spherical mercator, adjust
+        # the projection
         lyr.srs = '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
 
         # test that assignment
-        eq_(m.layers[0].srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
-        eq_(lyr.srs,'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
-    except RuntimeError, e:
+        eq_(m.layers[
+            0].srs, '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
+        eq_(lyr.srs, '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
+    except RuntimeError as e:
         # only test datasources that we have installed
         if not 'Could not create datasource' in str(e):
             raise RuntimeError(e)
diff --git a/test/python_tests/layer_test.py b/test/python_tests/layer_test.py
index 00ea434..e303c02 100644
--- a/test/python_tests/layer_test.py
+++ b/test/python_tests/layer_test.py
@@ -2,27 +2,32 @@
 # -*- coding: utf-8 -*-
 
 from nose.tools import eq_
-from utilities import run_all
+
 import mapnik
 
+from .utilities import run_all
+
+
 # Map initialization
+
+
 def test_layer_init():
     l = mapnik.Layer('test')
-    eq_(l.name,'test')
-    eq_(l.srs,'+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
-    eq_(l.envelope(),mapnik.Box2d())
-    eq_(l.clear_label_cache,False)
-    eq_(l.cache_features,False)
-    eq_(l.visible(1),True)
-    eq_(l.active,True)
-    eq_(l.datasource,None)
-    eq_(l.queryable,False)
-    eq_(l.minimum_scale_denominator,0.0)
-    eq_(l.maximum_scale_denominator > 1e+6,True)
-    eq_(l.group_by,"")
-    eq_(l.maximum_extent,None)
-    eq_(l.buffer_size,None)
-    eq_(len(l.styles),0)
+    eq_(l.name, 'test')
+    eq_(l.srs, '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
+    eq_(l.envelope(), mapnik.Box2d())
+    eq_(l.clear_label_cache, False)
+    eq_(l.cache_features, False)
+    eq_(l.visible(1), True)
+    eq_(l.active, True)
+    eq_(l.datasource, None)
+    eq_(l.queryable, False)
+    eq_(l.minimum_scale_denominator, 0.0)
+    eq_(l.maximum_scale_denominator > 1e+6, True)
+    eq_(l.group_by, "")
+    eq_(l.maximum_extent, None)
+    eq_(l.buffer_size, None)
+    eq_(len(l.styles), 0)
 
 if __name__ == "__main__":
     exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
diff --git a/test/python_tests/load_map_test.py b/test/python_tests/load_map_test.py
index 5eb211e..6e330ed 100644
--- a/test/python_tests/load_map_test.py
+++ b/test/python_tests/load_map_test.py
@@ -1,81 +1,104 @@
 #!/usr/bin/env python
 
+import glob
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
 
-import os, glob, mapnik
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 default_logging_severity = mapnik.logger.get_severity()
 
+
 def setup():
     # make the tests silent to suppress unsupported params from harfbuzz tests
     # TODO: remove this after harfbuzz branch merges
-    mapnik.logger.set_severity(mapnik.severity_type.None)
+    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def teardown():
     mapnik.logger.set_severity(default_logging_severity)
 
+
 def test_broken_files():
     default_logging_severity = mapnik.logger.get_severity()
-    mapnik.logger.set_severity(mapnik.severity_type.None)
+    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
     broken_files = glob.glob("../data/broken_maps/*.xml")
-    # Add a filename that doesn't exist 
+    # Add a filename that doesn't exist
     broken_files.append("../data/broken/does_not_exist.xml")
 
-    failures = [];
+    failures = []
     for filename in broken_files:
         try:
             m = mapnik.Map(512, 512)
             strict = True
             mapnik.load_map(m, filename, strict)
-            failures.append('Loading broken map (%s) did not raise RuntimeError!' % filename)
+            failures.append(
+                'Loading broken map (%s) did not raise RuntimeError!' %
+                filename)
         except RuntimeError:
             pass
-    eq_(len(failures),0,'\n'+'\n'.join(failures))
+    eq_(len(failures), 0, '\n' + '\n'.join(failures))
     mapnik.logger.set_severity(default_logging_severity)
 
+
 def test_can_parse_xml_with_deprecated_properties():
     default_logging_severity = mapnik.logger.get_severity()
-    mapnik.logger.set_severity(mapnik.severity_type.None)
+    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
     files_with_deprecated_props = glob.glob("../data/deprecated_maps/*.xml")
 
-    failures = [];
+    failures = []
     for filename in files_with_deprecated_props:
         try:
             m = mapnik.Map(512, 512)
             strict = True
             mapnik.load_map(m, filename, strict)
             base_path = os.path.dirname(filename)
-            mapnik.load_map_from_string(m,open(filename,'rb').read(),strict,base_path)
-        except RuntimeError, e:
+            mapnik.load_map_from_string(
+                m,
+                open(
+                    filename,
+                    'rb').read(),
+                strict,
+                base_path)
+        except RuntimeError as e:
             # only test datasources that we have installed
             if not 'Could not create datasource' in str(e) \
                and not 'could not connect' in str(e):
-                failures.append('Failed to load valid map %s (%s)' % (filename,e))
-    eq_(len(failures),0,'\n'+'\n'.join(failures))
+                failures.append(
+                    'Failed to load valid map %s (%s)' %
+                    (filename, e))
+    eq_(len(failures), 0, '\n' + '\n'.join(failures))
     mapnik.logger.set_severity(default_logging_severity)
 
+
 def test_good_files():
     good_files = glob.glob("../data/good_maps/*.xml")
     good_files.extend(glob.glob("../visual_tests/styles/*.xml"))
 
-    failures = [];
+    failures = []
     for filename in good_files:
         try:
             m = mapnik.Map(512, 512)
             strict = True
             mapnik.load_map(m, filename, strict)
             base_path = os.path.dirname(filename)
-            mapnik.load_map_from_string(m,open(filename,'rb').read(),strict,base_path)
-        except RuntimeError, e:
+            with open(filename, 'rb') as f:
+                mapnik.load_map_from_string(m, f.read(), strict, base_path)
+        except RuntimeError as e:
             # only test datasources that we have installed
             if not 'Could not create datasource' in str(e) \
                and not 'could not connect' in str(e):
-                failures.append('Failed to load valid map %s (%s)' % (filename,e))
-    eq_(len(failures),0,'\n'+'\n'.join(failures))
+                failures.append(
+                    'Failed to load valid map %s (%s)' %
+                    (filename, e))
+    eq_(len(failures), 0, '\n' + '\n'.join(failures))
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/map_query_test.py b/test/python_tests/map_query_test.py
index 4035f7a..ab8335e 100644
--- a/test/python_tests/map_query_test.py
+++ b/test/python_tests/map_query_test.py
@@ -1,8 +1,13 @@
 #!/usr/bin/env python
 
-from nose.tools import eq_,raises,assert_almost_equal
-from utilities import execution_path, run_all
-import os, mapnik
+import os
+
+from nose.tools import assert_almost_equal, eq_, raises
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -10,82 +15,93 @@ def setup():
     os.chdir(execution_path('.'))
 
 # map has no layers
+
+
 @raises(IndexError)
 def test_map_query_throw1():
-    m = mapnik.Map(256,256)
-    m.zoom_to_box(mapnik.Box2d(-1,-1,0,0))
-    m.query_point(0,0,0)
+    m = mapnik.Map(256, 256)
+    m.zoom_to_box(mapnik.Box2d(-1, -1, 0, 0))
+    m.query_point(0, 0, 0)
 
 # only positive indexes
+
+
 @raises(IndexError)
 def test_map_query_throw2():
-    m = mapnik.Map(256,256)
-    m.query_point(-1,0,0)
+    m = mapnik.Map(256, 256)
+    m.query_point(-1, 0, 0)
 
 # map has never been zoomed (nodata)
+
+
 @raises(RuntimeError)
 def test_map_query_throw3():
-    m = mapnik.Map(256,256)
-    m.query_point(0,0,0)
+    m = mapnik.Map(256, 256)
+    m.query_point(0, 0, 0)
 
 if 'shape' in mapnik.DatasourceCache.plugin_names():
     # map has never been zoomed (even with data)
     @raises(RuntimeError)
     def test_map_query_throw4():
-        m = mapnik.Map(256,256)
-        mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml')
-        m.query_point(0,0,0)
+        m = mapnik.Map(256, 256)
+        mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
+        m.query_point(0, 0, 0)
 
     # invalid coords in general (do not intersect)
     @raises(RuntimeError)
     def test_map_query_throw5():
-        m = mapnik.Map(256,256)
-        mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml')
+        m = mapnik.Map(256, 256)
+        mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
         m.zoom_all()
-        m.query_point(0,9999999999999999,9999999999999999)
+        m.query_point(0, 9999999999999999, 9999999999999999)
 
     def test_map_query_works1():
-        m = mapnik.Map(256,256)
-        mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
-        merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34)
+        m = mapnik.Map(256, 256)
+        mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
+        merc_bounds = mapnik.Box2d(-20037508.34, -
+                                   20037508.34, 20037508.34, 20037508.34)
         m.maximum_extent = merc_bounds
         m.zoom_all()
-        fs = m.query_point(0,-11012435.5376, 4599674.6134) # somewhere in kansas
+        # somewhere in kansas
+        fs = m.query_point(0, -11012435.5376, 4599674.6134)
         feat = fs.next()
-        eq_(feat.attributes['NAME_FORMA'],u'United States of America')
+        eq_(feat.attributes['NAME_FORMA'], u'United States of America')
 
     def test_map_query_works2():
-        m = mapnik.Map(256,256)
-        mapnik.load_map(m,'../data/good_maps/merc2wgs84_reprojection.xml')
-        wgs84_bounds = mapnik.Box2d(-179.999999975,-85.0511287776,179.999999975,85.0511287776)
+        m = mapnik.Map(256, 256)
+        mapnik.load_map(m, '../data/good_maps/merc2wgs84_reprojection.xml')
+        wgs84_bounds = mapnik.Box2d(-179.999999975, -
+                                    85.0511287776, 179.999999975, 85.0511287776)
         m.maximum_extent = wgs84_bounds
         # caution - will go square due to evil aspect_fix_mode backhandedness
         m.zoom_all()
-        #mapnik.render_to_file(m,'works2.png')
+        # mapnik.render_to_file(m,'works2.png')
         # validate that aspect_fix_mode modified the bbox reasonably
         e = m.envelope()
         assert_almost_equal(e.minx, -179.999999975, places=7)
         assert_almost_equal(e.miny, -167.951396161, places=7)
         assert_almost_equal(e.maxx, 179.999999975, places=7)
         assert_almost_equal(e.maxy, 192.048603789, places=7)
-        fs = m.query_point(0,-98.9264, 38.1432) # somewhere in kansas
+        fs = m.query_point(0, -98.9264, 38.1432)  # somewhere in kansas
         feat = fs.next()
-        eq_(feat.attributes['NAME'],u'United States')
+        eq_(feat.attributes['NAME'], u'United States')
 
     def test_map_query_in_pixels_works1():
-        m = mapnik.Map(256,256)
-        mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
-        merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34)
+        m = mapnik.Map(256, 256)
+        mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
+        merc_bounds = mapnik.Box2d(-20037508.34, -
+                                   20037508.34, 20037508.34, 20037508.34)
         m.maximum_extent = merc_bounds
         m.zoom_all()
-        fs = m.query_map_point(0,55,100) # somewhere in middle of us
+        fs = m.query_map_point(0, 55, 100)  # somewhere in middle of us
         feat = fs.next()
-        eq_(feat.attributes['NAME_FORMA'],u'United States of America')
+        eq_(feat.attributes['NAME_FORMA'], u'United States of America')
 
     def test_map_query_in_pixels_works2():
-        m = mapnik.Map(256,256)
-        mapnik.load_map(m,'../data/good_maps/merc2wgs84_reprojection.xml')
-        wgs84_bounds = mapnik.Box2d(-179.999999975,-85.0511287776,179.999999975,85.0511287776)
+        m = mapnik.Map(256, 256)
+        mapnik.load_map(m, '../data/good_maps/merc2wgs84_reprojection.xml')
+        wgs84_bounds = mapnik.Box2d(-179.999999975, -
+                                    85.0511287776, 179.999999975, 85.0511287776)
         m.maximum_extent = wgs84_bounds
         # caution - will go square due to evil aspect_fix_mode backhandedness
         m.zoom_all()
@@ -95,9 +111,9 @@ if 'shape' in mapnik.DatasourceCache.plugin_names():
         assert_almost_equal(e.miny, -167.951396161, places=7)
         assert_almost_equal(e.maxx, 179.999999975, places=7)
         assert_almost_equal(e.maxy, 192.048603789, places=7)
-        fs = m.query_map_point(0,55,100) # somewhere in Canada
+        fs = m.query_map_point(0, 55, 100)  # somewhere in Canada
         feat = fs.next()
-        eq_(feat.attributes['NAME'],u'Canada')
+        eq_(feat.attributes['NAME'], u'Canada')
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/mapnik_logger_test.py b/test/python_tests/mapnik_logger_test.py
index c27ff46..8c6c543 100644
--- a/test/python_tests/mapnik_logger_test.py
+++ b/test/python_tests/mapnik_logger_test.py
@@ -1,18 +1,21 @@
 #!/usr/bin/env python
 from nose.tools import eq_
-from utilities import run_all
+
 import mapnik
 
+from .utilities import run_all
+
+
 def test_logger_init():
-    eq_(mapnik.severity_type.Debug,0)
-    eq_(mapnik.severity_type.Warn,1)
-    eq_(mapnik.severity_type.Error,2)
-    eq_(mapnik.severity_type.None,3)
+    eq_(mapnik.severity_type.Debug, 0)
+    eq_(mapnik.severity_type.Warn, 1)
+    eq_(mapnik.severity_type.Error, 2)
+    eq_(getattr(mapnik.severity_type, "None"), 3)
     default = mapnik.logger.get_severity()
     mapnik.logger.set_severity(mapnik.severity_type.Debug)
-    eq_(mapnik.logger.get_severity(),mapnik.severity_type.Debug)
+    eq_(mapnik.logger.get_severity(), mapnik.severity_type.Debug)
     mapnik.logger.set_severity(default)
-    eq_(mapnik.logger.get_severity(),default)
+    eq_(mapnik.logger.get_severity(), default)
 
 if __name__ == "__main__":
     exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
diff --git a/test/python_tests/mapnik_test_data_test.py b/test/python_tests/mapnik_test_data_test.py
index b4226e1..c0efff6 100644
--- a/test/python_tests/mapnik_test_data_test.py
+++ b/test/python_tests/mapnik_test_data_test.py
@@ -1,31 +1,40 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from utilities import execution_path, run_all
-import os, mapnik
+from __future__ import print_function
+
+import os
 from glob import glob
 
+import mapnik
+
+from .utilities import execution_path, run_all
+
+
 default_logging_severity = mapnik.logger.get_severity()
 
+
 def setup():
-    mapnik.logger.set_severity(mapnik.severity_type.None)
+    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def teardown():
     mapnik.logger.set_severity(default_logging_severity)
 
 plugin_mapping = {
-    '.csv' : ['csv'],
-    '.json': ['geojson','ogr'],
-    '.tif' : ['gdal'],
+    '.csv': ['csv'],
+    '.json': ['geojson', 'ogr'],
+    '.tif': ['gdal'],
     #'.tif' : ['gdal','raster'],
-    '.kml' : ['ogr'],
-    '.gpx' : ['ogr'],
-    '.vrt' : ['gdal']
+    '.kml': ['ogr'],
+    '.gpx': ['ogr'],
+    '.vrt': ['gdal']
 }
 
+
 def test_opening_data():
     # https://github.com/mapbox/mapnik-test-data
     # cd tests/data
@@ -35,24 +44,24 @@ def test_opening_data():
         for filepath in files:
             ext = os.path.splitext(filepath)[1]
             if plugin_mapping.get(ext):
-                #print 'testing opening %s' % filepath
+                # print 'testing opening %s' % filepath
                 if 'topo' in filepath:
-                    kwargs = {'type': 'ogr','file': filepath}
+                    kwargs = {'type': 'ogr', 'file': filepath}
                     kwargs['layer_by_index'] = 0
                     try:
                         mapnik.Datasource(**kwargs)
-                    except Exception, e:
-                        print 'could not open, %s: %s' % (kwargs,e)
+                    except Exception as e:
+                        print('could not open, %s: %s' % (kwargs, e))
                 else:
-                   for plugin in plugin_mapping[ext]:
-                      kwargs = {'type': plugin,'file': filepath}
-                      if plugin is 'ogr':
-                          kwargs['layer_by_index'] = 0
-                      try:
-                          mapnik.Datasource(**kwargs)
-                      except Exception, e:
-                          print 'could not open, %s: %s' % (kwargs,e)
-            #else:
+                    for plugin in plugin_mapping[ext]:
+                        kwargs = {'type': plugin, 'file': filepath}
+                        if plugin is 'ogr':
+                            kwargs['layer_by_index'] = 0
+                        try:
+                            mapnik.Datasource(**kwargs)
+                        except Exception as e:
+                            print('could not open, %s: %s' % (kwargs, e))
+            # else:
             #    print 'skipping opening %s' % filepath
 
 if __name__ == "__main__":
diff --git a/test/python_tests/markers_complex_rendering_test.py b/test/python_tests/markers_complex_rendering_test.py
index efce684..73a65b6 100644
--- a/test/python_tests/markers_complex_rendering_test.py
+++ b/test/python_tests/markers_complex_rendering_test.py
@@ -1,9 +1,13 @@
-#coding=utf8
+# coding=utf8
 import os
-import mapnik
-from utilities import execution_path, run_all
+
 from nose.tools import eq_
 
+import mapnik
+
+from .utilities import execution_path, run_all
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
@@ -11,32 +15,38 @@ def setup():
 
 if 'csv' in mapnik.DatasourceCache.plugin_names():
     def test_marker_ellipse_render1():
-        m = mapnik.Map(256,256)
-        mapnik.load_map(m,'../data/good_maps/marker_ellipse_transform.xml')
+        m = mapnik.Map(256, 256)
+        mapnik.load_map(m, '../data/good_maps/marker_ellipse_transform.xml')
         m.zoom_all()
-        im = mapnik.Image(m.width,m.height)
-        mapnik.render(m,im)
+        im = mapnik.Image(m.width, m.height)
+        mapnik.render(m, im)
         actual = '/tmp/mapnik-marker-ellipse-render1.png'
         expected = 'images/support/mapnik-marker-ellipse-render1.png'
-        im.save(actual,'png32')
+        im.save(actual, 'png32')
         if os.environ.get('UPDATE'):
-            im.save(expected,'png32')
+            im.save(expected, 'png32')
         expected_im = mapnik.Image.open(expected)
-        eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected))
+        eq_(im.tostring('png32'),
+            expected_im.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual,
+                                                                'tests/python_tests/' + expected))
 
     def test_marker_ellipse_render2():
-        m = mapnik.Map(256,256)
-        mapnik.load_map(m,'../data/good_maps/marker_ellipse_transform2.xml')
+        m = mapnik.Map(256, 256)
+        mapnik.load_map(m, '../data/good_maps/marker_ellipse_transform2.xml')
         m.zoom_all()
-        im = mapnik.Image(m.width,m.height)
-        mapnik.render(m,im)
+        im = mapnik.Image(m.width, m.height)
+        mapnik.render(m, im)
         actual = '/tmp/mapnik-marker-ellipse-render2.png'
         expected = 'images/support/mapnik-marker-ellipse-render2.png'
-        im.save(actual,'png32')
+        im.save(actual, 'png32')
         if os.environ.get('UPDATE'):
-            im.save(expected,'png32')
+            im.save(expected, 'png32')
         expected_im = mapnik.Image.open(expected)
-        eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected))
+        eq_(im.tostring('png32'),
+            expected_im.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual,
+                                                                'tests/python_tests/' + expected))
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/memory_datasource_test.py b/test/python_tests/memory_datasource_test.py
index bd82bea..15f9208 100644
--- a/test/python_tests/memory_datasource_test.py
+++ b/test/python_tests/memory_datasource_test.py
@@ -1,32 +1,35 @@
-#encoding: utf8
-import mapnik
-from utilities import run_all
+# encoding: utf8
 from nose.tools import eq_
 
+import mapnik
+
+from .utilities import run_all
+
+
 def test_add_feature():
     md = mapnik.MemoryDatasource()
     eq_(md.num_features(), 0)
     context = mapnik.Context()
     context.push('foo')
-    feature = mapnik.Feature(context,1)
+    feature = mapnik.Feature(context, 1)
     feature['foo'] = 'bar'
     feature.geometry = mapnik.Geometry.from_wkt('POINT(2 3)')
     md.add_feature(feature)
     eq_(md.num_features(), 1)
 
-    featureset = md.features_at_point(mapnik.Coord(2,3))
+    featureset = md.features_at_point(mapnik.Coord(2, 3))
     retrieved = []
 
-    for feat in featureset:
+    for feat in featureset.features:
         retrieved.append(feat)
 
     eq_(len(retrieved), 1)
     f = retrieved[0]
     eq_(f['foo'], 'bar')
 
-    featureset = md.features_at_point(mapnik.Coord(20,30))
+    featureset = md.features_at_point(mapnik.Coord(20, 30))
     retrieved = []
-    for feat in featureset:
+    for feat in featureset.features:
         retrieved.append(feat)
     eq_(len(retrieved), 0)
 
diff --git a/test/python_tests/multi_tile_raster_test.py b/test/python_tests/multi_tile_raster_test.py
index 7dda876..9fc9963 100644
--- a/test/python_tests/multi_tile_raster_test.py
+++ b/test/python_tests/multi_tile_raster_test.py
@@ -1,29 +1,35 @@
 #!/usr/bin/env python
 
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
-import os, mapnik
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_multi_tile_policy():
     srs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
     lyr = mapnik.Layer('raster')
     if 'raster' in mapnik.DatasourceCache.plugin_names():
         lyr.datasource = mapnik.Raster(
-            file = '../data/raster_tiles/${x}/${y}.tif',
-            lox = -180,
-            loy = -90,
-            hix = 180,
-            hiy = 90,
-            multi = 1,
-            tile_size = 256,
-            x_width = 2,
-            y_width = 2
-            )
+            file='../data/raster_tiles/${x}/${y}.tif',
+            lox=-180,
+            loy=-90,
+            hix=180,
+            hiy=90,
+            multi=1,
+            tile_size=256,
+            x_width=2,
+            y_width=2
+        )
         lyr.srs = srs
         _map = mapnik.Map(256, 256, srs)
         style = mapnik.Style()
@@ -40,28 +46,28 @@ def test_multi_tile_policy():
         mapnik.render(_map, im)
 
         # test green chunk
-        eq_(im.view(0,64,1,1).tostring(), '\x00\xff\x00\xff')
-        eq_(im.view(127,64,1,1).tostring(), '\x00\xff\x00\xff')
-        eq_(im.view(0,127,1,1).tostring(), '\x00\xff\x00\xff')
-        eq_(im.view(127,127,1,1).tostring(), '\x00\xff\x00\xff')
+        eq_(im.view(0, 64, 1, 1).tostring(), '\x00\xff\x00\xff')
+        eq_(im.view(127, 64, 1, 1).tostring(), '\x00\xff\x00\xff')
+        eq_(im.view(0, 127, 1, 1).tostring(), '\x00\xff\x00\xff')
+        eq_(im.view(127, 127, 1, 1).tostring(), '\x00\xff\x00\xff')
 
         # test blue chunk
-        eq_(im.view(128,64,1,1).tostring(), '\x00\x00\xff\xff')
-        eq_(im.view(255,64,1,1).tostring(), '\x00\x00\xff\xff')
-        eq_(im.view(128,127,1,1).tostring(), '\x00\x00\xff\xff')
-        eq_(im.view(255,127,1,1).tostring(), '\x00\x00\xff\xff')
+        eq_(im.view(128, 64, 1, 1).tostring(), '\x00\x00\xff\xff')
+        eq_(im.view(255, 64, 1, 1).tostring(), '\x00\x00\xff\xff')
+        eq_(im.view(128, 127, 1, 1).tostring(), '\x00\x00\xff\xff')
+        eq_(im.view(255, 127, 1, 1).tostring(), '\x00\x00\xff\xff')
 
         # test red chunk
-        eq_(im.view(0,128,1,1).tostring(), '\xff\x00\x00\xff')
-        eq_(im.view(127,128,1,1).tostring(), '\xff\x00\x00\xff')
-        eq_(im.view(0,191,1,1).tostring(), '\xff\x00\x00\xff')
-        eq_(im.view(127,191,1,1).tostring(), '\xff\x00\x00\xff')
+        eq_(im.view(0, 128, 1, 1).tostring(), '\xff\x00\x00\xff')
+        eq_(im.view(127, 128, 1, 1).tostring(), '\xff\x00\x00\xff')
+        eq_(im.view(0, 191, 1, 1).tostring(), '\xff\x00\x00\xff')
+        eq_(im.view(127, 191, 1, 1).tostring(), '\xff\x00\x00\xff')
 
         # test magenta chunk
-        eq_(im.view(128,128,1,1).tostring(), '\xff\x00\xff\xff')
-        eq_(im.view(255,128,1,1).tostring(), '\xff\x00\xff\xff')
-        eq_(im.view(128,191,1,1).tostring(), '\xff\x00\xff\xff')
-        eq_(im.view(255,191,1,1).tostring(), '\xff\x00\xff\xff')
+        eq_(im.view(128, 128, 1, 1).tostring(), '\xff\x00\xff\xff')
+        eq_(im.view(255, 128, 1, 1).tostring(), '\xff\x00\xff\xff')
+        eq_(im.view(128, 191, 1, 1).tostring(), '\xff\x00\xff\xff')
+        eq_(im.view(255, 191, 1, 1).tostring(), '\xff\x00\xff\xff')
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/object_test.py b/test/python_tests/object_test.py
index 0f23e71..583a523 100644
--- a/test/python_tests/object_test.py
+++ b/test/python_tests/object_test.py
@@ -502,7 +502,8 @@
 
 #     c1 = mapnik.Color('hsl(0, 100%, 50%)')   # red
 #     c2 = mapnik.Color('hsl(120, 100%, 50%)') # lime
-#     c3 = mapnik.Color('hsla(240, 100%, 50%, 0.5)') # semi-transparent solid blue
+# c3 = mapnik.Color('hsla(240, 100%, 50%, 0.5)') # semi-transparent solid
+# blue
 
 #     eq_(c1, mapnik.Color('red'))
 #     eq_(c2, mapnik.Color('lime'))
diff --git a/test/python_tests/ogr_and_shape_geometries_test.py b/test/python_tests/ogr_and_shape_geometries_test.py
index 5c6918e..6ca8567 100644
--- a/test/python_tests/ogr_and_shape_geometries_test.py
+++ b/test/python_tests/ogr_and_shape_geometries_test.py
@@ -1,8 +1,18 @@
 #!/usr/bin/env python
 
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
-import os, mapnik
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
+try:
+    import itertools.izip as zip
+except ImportError:
+    pass
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -14,29 +24,29 @@ polys = ["POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))",
          "POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))",
          "MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))"
          "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))"
-        ]
+         ]
 
 plugins = mapnik.DatasourceCache.plugin_names()
 if 'shape' in plugins and 'ogr' in plugins:
 
     def ensure_geometries_are_interpreted_equivalently(filename):
-        ds1 = mapnik.Ogr(file=filename,layer_by_index=0)
+        ds1 = mapnik.Ogr(file=filename, layer_by_index=0)
         ds2 = mapnik.Shapefile(file=filename)
         fs1 = ds1.featureset()
         fs2 = ds2.featureset()
-        count = 0;
-        import itertools
-        for feat1,feat2 in itertools.izip(fs1, fs2):
+        count = 0
+        for feat1, feat2 in zip(fs1.features, fs2.features):
             count += 1
-            eq_(feat1.attributes,feat2.attributes)
+            eq_(feat1.attributes, feat2.attributes)
             # TODO - revisit this: https://github.com/mapnik/mapnik/issues/1093
             # eq_(feat1.to_geojson(),feat2.to_geojson())
-            #eq_(feat1.geometries().to_wkt(),feat2.geometries().to_wkt())
-            #eq_(feat1.geometries().to_wkb(mapnik.wkbByteOrder.NDR),feat2.geometries().to_wkb(mapnik.wkbByteOrder.NDR))
-            #eq_(feat1.geometries().to_wkb(mapnik.wkbByteOrder.XDR),feat2.geometries().to_wkb(mapnik.wkbByteOrder.XDR))
+            # eq_(feat1.geometries().to_wkt(),feat2.geometries().to_wkt())
+            # eq_(feat1.geometries().to_wkb(mapnik.wkbByteOrder.NDR),feat2.geometries().to_wkb(mapnik.wkbByteOrder.NDR))
+            # eq_(feat1.geometries().to_wkb(mapnik.wkbByteOrder.XDR),feat2.geometries().to_wkb(mapnik.wkbByteOrder.XDR))
 
     def test_simple_polys():
-        ensure_geometries_are_interpreted_equivalently('../data/shp/wkt_poly.shp')
+        ensure_geometries_are_interpreted_equivalently(
+            '../data/shp/wkt_poly.shp')
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/ogr_test.py b/test/python_tests/ogr_test.py
index 905eda2..ef29037 100644
--- a/test/python_tests/ogr_test.py
+++ b/test/python_tests/ogr_test.py
@@ -1,15 +1,20 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from nose.tools import eq_,assert_almost_equal,raises
-from utilities import execution_path, run_all
-import os, mapnik
+import os
+
+from nose.tools import assert_almost_equal, eq_, raises
+
+import mapnik
+
+from .utilities import execution_path, run_all
 
 try:
     import json
 except ImportError:
     import simplejson as json
 
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
@@ -19,21 +24,21 @@ if 'ogr' in mapnik.DatasourceCache.plugin_names():
 
     # Shapefile initialization
     def test_shapefile_init():
-        ds = mapnik.Ogr(file='../data/shp/boundaries.shp',layer_by_index=0)
+        ds = mapnik.Ogr(file='../data/shp/boundaries.shp', layer_by_index=0)
         e = ds.envelope()
         assert_almost_equal(e.minx, -11121.6896651, places=7)
         assert_almost_equal(e.miny, -724724.216526, places=6)
         assert_almost_equal(e.maxx, 2463000.67866, places=5)
         assert_almost_equal(e.maxy, 1649661.267, places=3)
         meta = ds.describe()
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon)
-        eq_('+proj=lcc' in meta['proj4'],True)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
+        eq_('+proj=lcc' in meta['proj4'], True)
 
     # Shapefile properties
     def test_shapefile_properties():
-        ds = mapnik.Ogr(file='../data/shp/boundaries.shp',layer_by_index=0)
+        ds = mapnik.Ogr(file='../data/shp/boundaries.shp', layer_by_index=0)
         f = ds.features_at_point(ds.envelope().center(), 0.001).features[0]
-        eq_(ds.geometry_type(),mapnik.DataGeometryType.Polygon)
+        eq_(ds.geometry_type(), mapnik.DataGeometryType.Polygon)
 
         eq_(f['CGNS_FID'], u'6f733341ba2011d892e2080020a0f4c9')
         eq_(f['COUNTRY'], u'CAN')
@@ -42,7 +47,7 @@ if 'ogr' in mapnik.DatasourceCache.plugin_names():
         eq_(f['Shape_Area'], 1512185733150.0)
         eq_(f['Shape_Leng'], 19218883.724300001)
         meta = ds.describe()
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
         # NOTE: encoding is latin1 but gdal >= 1.9 should now expose utf8 encoded features
         # See SHAPE_ENCODING for overriding: http://gdal.org/ogr/drv_shapefile.html
         # Failure for the NOM_FR field is expected for older gdal
@@ -51,10 +56,22 @@ if 'ogr' in mapnik.DatasourceCache.plugin_names():
 
     @raises(RuntimeError)
     def test_that_nonexistant_query_field_throws(**kwargs):
-        ds = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
-        eq_(len(ds.fields()),11)
-        eq_(ds.fields(),['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME', 'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT'])
-        eq_(ds.field_types(),['str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float'])
+        ds = mapnik.Ogr(file='../data/shp/world_merc.shp', layer_by_index=0)
+        eq_(len(ds.fields()), 11)
+        eq_(ds.fields(), ['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME',
+                          'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT'])
+        eq_(ds.field_types(),
+            ['str',
+             'str',
+             'str',
+             'int',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'float',
+             'float'])
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
@@ -63,94 +80,226 @@ if 'ogr' in mapnik.DatasourceCache.plugin_names():
         ds.features(query)
 
     # disabled because OGR prints an annoying error: ERROR 1: Invalid Point object. Missing 'coordinates' member.
-    #def test_handling_of_null_features():
+    # def test_handling_of_null_features():
     #    ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0)
     #    fs = ds.all_features()
     #    eq_(len(fs),1)
 
     # OGR plugin extent parameter
     def test_ogr_extent_parameter():
-        ds = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0,extent='-1,-1,1,1')
+        ds = mapnik.Ogr(
+            file='../data/shp/world_merc.shp',
+            layer_by_index=0,
+            extent='-1,-1,1,1')
         e = ds.envelope()
-        eq_(e.minx,-1)
-        eq_(e.miny,-1)
-        eq_(e.maxx,1)
-        eq_(e.maxy,1)
+        eq_(e.minx, -1)
+        eq_(e.miny, -1)
+        eq_(e.maxx, 1)
+        eq_(e.maxy, 1)
         meta = ds.describe()
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon)
-        eq_('+proj=merc' in meta['proj4'],True)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
+        eq_('+proj=merc' in meta['proj4'], True)
 
     def test_ogr_reading_gpx_waypoint():
-        ds = mapnik.Ogr(file='../data/gpx/empty.gpx',layer='waypoints')
+        ds = mapnik.Ogr(file='../data/gpx/empty.gpx', layer='waypoints')
         e = ds.envelope()
-        eq_(e.minx,-122)
-        eq_(e.miny,48)
-        eq_(e.maxx,-122)
-        eq_(e.maxy,48)
+        eq_(e.minx, -122)
+        eq_(e.miny, 48)
+        eq_(e.maxx, -122)
+        eq_(e.maxy, 48)
         meta = ds.describe()
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_('+proj=longlat' in meta['proj4'],True)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_('+proj=longlat' in meta['proj4'], True)
 
     def test_ogr_empty_data_should_not_throw():
         default_logging_severity = mapnik.logger.get_severity()
-        mapnik.logger.set_severity(mapnik.severity_type.None)
+        mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
         # use logger to silence expected warnings
         for layer in ['routes', 'tracks', 'route_points', 'track_points']:
-            ds = mapnik.Ogr(file='../data/gpx/empty.gpx',layer=layer)
+            ds = mapnik.Ogr(file='../data/gpx/empty.gpx', layer=layer)
             e = ds.envelope()
-            eq_(e.minx,0)
-            eq_(e.miny,0)
-            eq_(e.maxx,0)
-            eq_(e.maxy,0)
+            eq_(e.minx, 0)
+            eq_(e.miny, 0)
+            eq_(e.maxx, 0)
+            eq_(e.maxy, 0)
         mapnik.logger.set_severity(default_logging_severity)
         meta = ds.describe()
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
-        eq_('+proj=longlat' in meta['proj4'],True)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
+        eq_('+proj=longlat' in meta['proj4'], True)
 
     # disabled because OGR prints an annoying error: ERROR 1: Invalid Point object. Missing 'coordinates' member.
-    #def test_handling_of_null_features():
+    # def test_handling_of_null_features():
     #    ds = mapnik.Ogr(file='../data/json/null_feature.geojson',layer_by_index=0)
     #    fs = ds.all_features()
     #    eq_(len(fs),1)
 
     def test_geometry_type():
-        ds = mapnik.Ogr(file='../data/csv/wkt.csv',layer_by_index=0)
+        ds = mapnik.Ogr(file='../data/csv/wkt.csv', layer_by_index=0)
         e = ds.envelope()
         assert_almost_equal(e.minx, 1.0, places=1)
         assert_almost_equal(e.miny, 1.0, places=1)
         assert_almost_equal(e.maxx, 45.0, places=1)
         assert_almost_equal(e.maxy, 45.0, places=1)
         meta = ds.describe()
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
         #eq_('+proj=longlat' in meta['proj4'],True)
         fs = ds.featureset()
         feat = fs.next()
         actual = json.loads(feat.to_geojson())
-        eq_(actual,{u'geometry': {u'type': u'Point', u'coordinates': [30, 10]}, u'type': u'Feature', u'id': 2, u'properties': {u'type': u'point', u'WKT': u'           POINT (30 10)'}})
+        eq_(actual,
+            {u'geometry': {u'type': u'Point',
+                           u'coordinates': [30,
+                                            10]},
+             u'type': u'Feature',
+             u'id': 2,
+             u'properties': {u'type': u'point',
+                             u'WKT': u'           POINT (30 10)'}})
         feat = fs.next()
         actual = json.loads(feat.to_geojson())
-        eq_(actual,{u'geometry': {u'type': u'LineString', u'coordinates': [[30, 10], [10, 30], [40, 40]]}, u'type': u'Feature', u'id': 3, u'properties': {u'type': u'linestring', u'WKT': u'      LINESTRING (30 10, 10 30, 40 40)'}})
+        eq_(actual,
+            {u'geometry': {u'type': u'LineString',
+                           u'coordinates': [[30,
+                                             10],
+                                            [10,
+                                             30],
+                                            [40,
+                                             40]]},
+                u'type': u'Feature',
+                u'id': 3,
+                u'properties': {u'type': u'linestring',
+                                u'WKT': u'      LINESTRING (30 10, 10 30, 40 40)'}})
         feat = fs.next()
         actual = json.loads(feat.to_geojson())
-        eq_(actual,{u'geometry': {u'type': u'Polygon', u'coordinates': [[[30, 10], [40, 40], [20, 40], [10, 20], [30, 10]]]}, u'type': u'Feature', u'id': 4, u'properties': {u'type': u'polygon', u'WKT': u'         POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))'}})
+        eq_(actual,
+            {u'geometry': {u'type': u'Polygon',
+                           u'coordinates': [[[30,
+                                              10],
+                                             [40,
+                                              40],
+                                             [20,
+                                              40],
+                                             [10,
+                                              20],
+                                             [30,
+                                              10]]]},
+                u'type': u'Feature',
+                u'id': 4,
+                u'properties': {u'type': u'polygon',
+                                u'WKT': u'         POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))'}})
         feat = fs.next()
         actual = json.loads(feat.to_geojson())
-        eq_(actual,{u'geometry': {u'type': u'Polygon', u'coordinates': [[[35, 10], [45, 45], [15, 40], [10, 20], [35, 10]], [[20, 30], [35, 35], [30, 20], [20, 30]]]}, u'type': u'Feature', u'id': 5, u'properties': {u'type': u'polygon', u'WKT': u'         POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))'}})
+        eq_(
+            actual, {
+                u'geometry': {
+                    u'type': u'Polygon', u'coordinates': [
+                        [
+                            [
+                                35, 10], [
+                                45, 45], [
+                                15, 40], [
+                                    10, 20], [
+                                        35, 10]], [
+                                            [
+                                                20, 30], [
+                                                    35, 35], [
+                                                        30, 20], [
+                                                            20, 30]]]}, u'type': u'Feature', u'id': 5, u'properties': {
+                                                                u'type': u'polygon', u'WKT': u'         POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))'}})
         feat = fs.next()
         actual = json.loads(feat.to_geojson())
-        eq_(actual,{u'geometry': {u'type': u'MultiPoint', u'coordinates': [[10, 40], [40, 30], [20, 20], [30, 10]]}, u'type': u'Feature', u'id': 6, u'properties': {u'type': u'multipoint', u'WKT': u'      MULTIPOINT ((10 40), (40 30), (20 20), (30 10))'}})
+        eq_(actual,
+            {u'geometry': {u'type': u'MultiPoint',
+                           u'coordinates': [[10,
+                                             40],
+                                            [40,
+                                             30],
+                                            [20,
+                                             20],
+                                            [30,
+                                             10]]},
+                u'type': u'Feature',
+                u'id': 6,
+                u'properties': {u'type': u'multipoint',
+                                u'WKT': u'      MULTIPOINT ((10 40), (40 30), (20 20), (30 10))'}})
         feat = fs.next()
         actual = json.loads(feat.to_geojson())
-        eq_(actual,{u'geometry': {u'type': u'MultiLineString', u'coordinates': [[[10, 10], [20, 20], [10, 40]], [[40, 40], [30, 30], [40, 20], [30, 10]]]}, u'type': u'Feature', u'id': 7, u'properties': {u'type': u'multilinestring', u'WKT': u' MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))'}})
+        eq_(actual,
+            {u'geometry': {u'type': u'MultiLineString',
+                           u'coordinates': [[[10,
+                                              10],
+                                             [20,
+                                              20],
+                                             [10,
+                                              40]],
+                                            [[40,
+                                              40],
+                                             [30,
+                                                30],
+                                             [40,
+                                                20],
+                                             [30,
+                                                10]]]},
+                u'type': u'Feature',
+                u'id': 7,
+                u'properties': {u'type': u'multilinestring',
+                                u'WKT': u' MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))'}})
         feat = fs.next()
         actual = json.loads(feat.to_geojson())
-        eq_(actual,{u'geometry': {u'type': u'MultiPolygon', u'coordinates': [[[[30, 20], [45, 40], [10, 40], [30, 20]]], [[[15, 5], [40, 10], [10, 20], [5, 10], [15, 5]]]]}, u'type': u'Feature', u'id': 8, u'properties': {u'type': u'multipolygon', u'WKT': u'    MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))'}})
+        eq_(actual,
+            {u'geometry': {u'type': u'MultiPolygon',
+                           u'coordinates': [[[[30,
+                                               20],
+                                              [45,
+                                               40],
+                                              [10,
+                                               40],
+                                              [30,
+                                               20]]],
+                                            [[[15,
+                                               5],
+                                              [40,
+                                                10],
+                                                [10,
+                                                 20],
+                                                [5,
+                                                 10],
+                                                [15,
+                                                 5]]]]},
+                u'type': u'Feature',
+                u'id': 8,
+                u'properties': {u'type': u'multipolygon',
+                                u'WKT': u'    MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))'}})
         feat = fs.next()
         actual = json.loads(feat.to_geojson())
-        eq_(actual,{u'geometry': {u'type': u'MultiPolygon', u'coordinates': [[[[40, 40], [20, 45], [45, 30], [40, 40]]], [[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]], [[30, 20], [20, 15], [20, 25], [30, 20]]]]}, u'type': u'Feature', u'id': 9, u'properties': {u'type': u'multipolygon', u'WKT': u'    MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))'}})
+        eq_(actual, {u'geometry': {u'type': u'MultiPolygon', u'coordinates': [[[[40, 40], [20, 45], [45, 30], [40, 40]]], [[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]], [[30, 20], [20, 15], [20, 25], [
+            30, 20]]]]}, u'type': u'Feature', u'id': 9, u'properties': {u'type': u'multipolygon', u'WKT': u'    MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))'}})
         feat = fs.next()
         actual = json.loads(feat.to_geojson())
-        eq_(actual,{u'geometry': {u'type': u'GeometryCollection', u'geometries': [{u'type': u'Polygon', u'coordinates': [[[1, 1], [2, 1], [2, 2], [1, 2], [1, 1]]]}, {u'type': u'Point', u'coordinates': [2, 3]}, {u'type': u'LineString', u'coordinates': [[2, 3], [3, 4]]}]}, u'type': u'Feature', u'id': 10, u'properties': {u'type': u'collection', u'WKT': u'      GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))'}})
+        eq_(actual,
+            {u'geometry': {u'type': u'GeometryCollection',
+                           u'geometries': [{u'type': u'Polygon',
+                                            u'coordinates': [[[1,
+                                                               1],
+                                                              [2,
+                                                               1],
+                                                              [2,
+                                                               2],
+                                                              [1,
+                                                               2],
+                                                              [1,
+                                                               1]]]},
+                                           {u'type': u'Point',
+                                            u'coordinates': [2,
+                                                             3]},
+                                           {u'type': u'LineString',
+                                            u'coordinates': [[2,
+                                                              3],
+                                                             [3,
+                                                              4]]}]},
+                u'type': u'Feature',
+                u'id': 10,
+                u'properties': {u'type': u'collection',
+                                u'WKT': u'      GEOMETRYCOLLECTION(POLYGON((1 1,2 1,2 2,1 2,1 1)),POINT(2 3),LINESTRING(2 3,3 4))'}})
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/osm_test.py b/test/python_tests/osm_test.py
index b9f5196..ef39f4a 100644
--- a/test/python_tests/osm_test.py
+++ b/test/python_tests/osm_test.py
@@ -1,9 +1,14 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
-import os, mapnik
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -19,14 +24,14 @@ if 'osm' in mapnik.DatasourceCache.plugin_names():
         e = ds.envelope()
 
         # these are hardcoded in the plugin… ugh
-        eq_(e.minx >= -180.0,True)
-        eq_(e.miny >= -90.0,True)
-        eq_(e.maxx <= 180.0,True)
-        eq_(e.maxy <= 90,True)
+        eq_(e.minx >= -180.0, True)
+        eq_(e.miny >= -90.0, True)
+        eq_(e.maxx <= 180.0, True)
+        eq_(e.maxy <= 90, True)
 
     def test_that_nonexistant_query_field_throws(**kwargs):
         ds = mapnik.Osm(file='../data/osm/nodes.osm')
-        eq_(len(ds.fields()),0)
+        eq_(len(ds.fields()), 0)
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
@@ -36,25 +41,27 @@ if 'osm' in mapnik.DatasourceCache.plugin_names():
 
     def test_that_64bit_int_fields_work():
         ds = mapnik.Osm(file='../data/osm/64bit.osm')
-        eq_(len(ds.fields()),4)
-        eq_(ds.fields(),['bigint', 'highway', 'junction', 'note'])
-        eq_(ds.field_types(),['str', 'str', 'str', 'str'])
+        eq_(len(ds.fields()), 4)
+        eq_(ds.fields(), ['bigint', 'highway', 'junction', 'note'])
+        eq_(ds.field_types(), ['str', 'str', 'str', 'str'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat.to_geojson(),'{"type":"Feature","id":890,"geometry":{"type":"Point","coordinates":[-61.7960248,17.1415874]},"properties":{}}')
-        eq_(feat.id(),4294968186)
+        eq_(feat.to_geojson(
+        ), '{"type":"Feature","id":890,"geometry":{"type":"Point","coordinates":[-61.7960248,17.1415874]},"properties":{}}')
+        eq_(feat.id(), 4294968186)
         eq_(feat['bigint'], None)
         feat = fs.next()
-        eq_(feat['bigint'],'9223372036854775807')
+        eq_(feat['bigint'], '9223372036854775807')
 
     def test_reading_ways():
         ds = mapnik.Osm(file='../data/osm/ways.osm')
-        eq_(len(ds.fields()),0)
-        eq_(ds.fields(),[])
-        eq_(ds.field_types(),[])
+        eq_(len(ds.fields()), 0)
+        eq_(ds.fields(), [])
+        eq_(ds.field_types(), [])
         feat = ds.all_features()[4]
-        eq_(feat.to_geojson(),'{"type":"Feature","id":1,"geometry":{"type":"LineString","coordinates":[[0,2],[0,-2]]},"properties":{}}')
-        eq_(feat.id(),1)
+        eq_(feat.to_geojson(
+        ), '{"type":"Feature","id":1,"geometry":{"type":"LineString","coordinates":[[0,2],[0,-2]]},"properties":{}}')
+        eq_(feat.id(), 1)
 
 
 if __name__ == "__main__":
diff --git a/test/python_tests/palette_test.py b/test/python_tests/palette_test.py
index 9b30895..9913f81 100644
--- a/test/python_tests/palette_test.py
+++ b/test/python_tests/palette_test.py
@@ -1,9 +1,17 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
+import os
+import sys
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
-import os, mapnik
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
+PYTHON3 = sys.version_info[0] == 3
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -16,38 +24,50 @@ expected_256 = '[Palette 256 colors #272727 #3c3c3c #484847 #564b41 #605243 #6a5
 
 expected_rgb = '[Palette 2 colors #ff00ff #ffffff]'
 
+
 def test_reading_palettes():
-    act = open('../data/palettes/palette64.act','rb')
-    palette = mapnik.Palette(act.read(),'act')
-    eq_(palette.to_string(),expected_64);
-    act = open('../data/palettes/palette256.act','rb')
-    palette = mapnik.Palette(act.read(),'act')
-    eq_(palette.to_string(),expected_256);
-    palette = mapnik.Palette('\xff\x00\xff\xff\xff\xff', 'rgb')
-    eq_(palette.to_string(),expected_rgb);
+    with open('../data/palettes/palette64.act', 'rb') as act:
+        palette = mapnik.Palette(act.read(), 'act')
+    eq_(palette.to_string(), expected_64)
+    with open('../data/palettes/palette256.act', 'rb') as act:
+        palette = mapnik.Palette(act.read(), 'act')
+    eq_(palette.to_string(), expected_256)
+    if PYTHON3:
+        palette = mapnik.Palette(b'\xff\x00\xff\xff\xff\xff', 'rgb')
+    else:
+        palette = mapnik.Palette('\xff\x00\xff\xff\xff\xff', 'rgb')
+    eq_(palette.to_string(), expected_rgb)
 
 if 'shape' in mapnik.DatasourceCache.plugin_names():
 
     def test_render_with_palette():
-        m = mapnik.Map(600,400)
-        mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml')
+        m = mapnik.Map(600, 400)
+        mapnik.load_map(m, '../data/good_maps/agg_poly_gamma_map.xml')
         m.zoom_all()
-        im = mapnik.Image(m.width,m.height)
-        mapnik.render(m,im)
-        act = open('../data/palettes/palette256.act','rb')
-        palette = mapnik.Palette(act.read(),'act')
+        im = mapnik.Image(m.width, m.height)
+        mapnik.render(m, im)
+        with open('../data/palettes/palette256.act', 'rb') as act:
+            palette = mapnik.Palette(act.read(), 'act')
         # test saving directly to filesystem
-        im.save('/tmp/mapnik-palette-test.png','png',palette)
+        im.save('/tmp/mapnik-palette-test.png', 'png', palette)
         expected = './images/support/mapnik-palette-test.png'
         if os.environ.get('UPDATE'):
-            im.save(expected,"png",palette);
+            im.save(expected, "png", palette)
 
         # test saving to a string
-        open('/tmp/mapnik-palette-test2.png','wb').write(im.tostring('png',palette));
+        with open('/tmp/mapnik-palette-test2.png', 'wb') as f:
+            f.write(im.tostring('png', palette))
         # compare the two methods
-        eq_(mapnik.Image.open('/tmp/mapnik-palette-test.png').tostring('png32'),mapnik.Image.open('/tmp/mapnik-palette-test2.png').tostring('png32'),'%s not eq to %s' % ('/tmp/mapnik-palette-test.png','/tmp/mapnik-palette-test2.png'))
+        eq_(mapnik.Image.open('/tmp/mapnik-palette-test.png').tostring('png32'),
+            mapnik.Image.open(
+                '/tmp/mapnik-palette-test2.png').tostring('png32'),
+            '%s not eq to %s' % ('/tmp/mapnik-palette-test.png',
+                                 '/tmp/mapnik-palette-test2.png'))
         # compare to expected
-        eq_(mapnik.Image.open('/tmp/mapnik-palette-test.png').tostring('png32'),mapnik.Image.open(expected).tostring('png32'),'%s not eq to %s' % ('/tmp/mapnik-palette-test.png',expected))
+        eq_(mapnik.Image.open('/tmp/mapnik-palette-test.png').tostring('png32'),
+            mapnik.Image.open(expected).tostring('png32'),
+            '%s not eq to %s' % ('/tmp/mapnik-palette-test.png',
+                                 expected))
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/parameters_test.py b/test/python_tests/parameters_test.py
index 1587fbd..f6e25b8 100644
--- a/test/python_tests/parameters_test.py
+++ b/test/python_tests/parameters_test.py
@@ -3,57 +3,67 @@
 
 import os
 import sys
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
+
 import mapnik
 
+from .utilities import execution_path, run_all
+
+
 def setup():
     os.chdir(execution_path('.'))
 
+
 def test_parameter_null():
-    p = mapnik.Parameter('key',None)
-    eq_(p[0],'key')
-    eq_(p[1],None)
+    p = mapnik.Parameter('key', None)
+    eq_(p[0], 'key')
+    eq_(p[1], None)
+
 
 def test_parameter_string():
-    p = mapnik.Parameter('key','value')
-    eq_(p[0],'key')
-    eq_(p[1],'value')
+    p = mapnik.Parameter('key', 'value')
+    eq_(p[0], 'key')
+    eq_(p[1], 'value')
+
 
 def test_parameter_unicode():
-    p = mapnik.Parameter('key',u'value')
-    eq_(p[0],'key')
-    eq_(p[1],u'value')
+    p = mapnik.Parameter('key', u'value')
+    eq_(p[0], 'key')
+    eq_(p[1], u'value')
+
 
 def test_parameter_integer():
-    p = mapnik.Parameter('int',sys.maxint)
-    eq_(p[0],'int')
-    eq_(p[1],sys.maxint)
+    p = mapnik.Parameter('int', sys.maxsize)
+    eq_(p[0], 'int')
+    eq_(p[1], sys.maxsize)
+
 
 def test_parameter_double():
-    p = mapnik.Parameter('double',float(sys.maxint))
-    eq_(p[0],'double')
-    eq_(p[1],float(sys.maxint))
+    p = mapnik.Parameter('double', float(sys.maxsize))
+    eq_(p[0], 'double')
+    eq_(p[1], float(sys.maxsize))
+
 
 def test_parameter_boolean():
-    p = mapnik.Parameter('boolean',True)
-    eq_(p[0],'boolean')
-    eq_(p[1],True)
-    eq_(bool(p[1]),True)
+    p = mapnik.Parameter('boolean', True)
+    eq_(p[0], 'boolean')
+    eq_(p[1], True)
+    eq_(bool(p[1]), True)
 
 
 def test_parameters():
     params = mapnik.Parameters()
-    p = mapnik.Parameter('float',1.0777)
-    eq_(p[0],'float')
-    eq_(p[1],1.0777)
+    p = mapnik.Parameter('float', 1.0777)
+    eq_(p[0], 'float')
+    eq_(p[1], 1.0777)
 
     params.append(p)
 
-    eq_(params[0][0],'float')
-    eq_(params[0][1],1.0777)
+    eq_(params[0][0], 'float')
+    eq_(params[0][1], 1.0777)
 
-    eq_(params.get('float'),1.0777)
+    eq_(params.get('float'), 1.0777)
 
 
 if __name__ == "__main__":
diff --git a/test/python_tests/pgraster_test.py b/test/python_tests/pgraster_test.py
index dc7584f..61a6c60 100644
--- a/test/python_tests/pgraster_test.py
+++ b/test/python_tests/pgraster_test.py
@@ -1,39 +1,48 @@
 #!/usr/bin/env python
 
-from nose.tools import eq_,assert_almost_equal
 import atexit
-import time
-from utilities import execution_path, run_all, side_by_side_image
-from subprocess import Popen, PIPE
-import os, mapnik
-import sys
+import os
 import re
+import sys
+import time
 from binascii import hexlify
+from subprocess import PIPE, Popen
+
+from nose.tools import assert_almost_equal, eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all, side_by_side_image
 
 MAPNIK_TEST_DBNAME = 'mapnik-tmp-pgraster-test-db'
 POSTGIS_TEMPLATE_DBNAME = 'template_postgis'
-DEBUG_OUTPUT=False
+DEBUG_OUTPUT = False
+
 
 def log(msg):
     if DEBUG_OUTPUT:
-      print msg
+        print(msg)
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
-def call(cmd,silent=False):
-    stdin, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
+
+def call(cmd, silent=False):
+    stdin, stderr = Popen(cmd, shell=True, stdout=PIPE,
+                          stderr=PIPE).communicate()
     if not stderr:
         return stdin.strip()
     elif not silent and 'error' in stderr.lower() \
-        or 'not found' in stderr.lower() \
-        or 'could not connect' in stderr.lower() \
-        or 'bad connection' in stderr.lower() \
-        or 'not recognized as an internal' in stderr.lower():
+            or 'not found' in stderr.lower() \
+            or 'could not connect' in stderr.lower() \
+            or 'bad connection' in stderr.lower() \
+            or 'not recognized as an internal' in stderr.lower():
         raise RuntimeError(stderr.strip())
 
+
 def psql_can_connect():
     """Test ability to connect to a postgis template db with no options.
 
@@ -46,14 +55,16 @@ def psql_can_connect():
         call('psql %s -c "select postgis_version()"' % POSTGIS_TEMPLATE_DBNAME)
         return True
     except RuntimeError:
-        print 'Notice: skipping pgraster tests (connection)'
+        print('Notice: skipping pgraster tests (connection)')
         return False
 
+
 def psql_run(cmd):
-  cmd = 'psql --set ON_ERROR_STOP=1 %s -c "%s"' % \
-    (MAPNIK_TEST_DBNAME, cmd.replace('"', '\\"'))
-  log('DEBUG: running ' + cmd)
-  call(cmd)
+    cmd = 'psql --set ON_ERROR_STOP=1 %s -c "%s"' % \
+        (MAPNIK_TEST_DBNAME, cmd.replace('"', '\\"'))
+    log('DEBUG: running ' + cmd)
+    call(cmd)
+
 
 def raster2pgsql_on_path():
     """Test for presence of raster2pgsql on the user path.
@@ -64,9 +75,10 @@ def raster2pgsql_on_path():
         call('raster2pgsql')
         return True
     except RuntimeError:
-        print 'Notice: skipping pgraster tests (raster2pgsql)'
+        print('Notice: skipping pgraster tests (raster2pgsql)')
         return False
 
+
 def createdb_and_dropdb_on_path():
     """Test for presence of dropdb/createdb on user path.
 
@@ -77,687 +89,746 @@ def createdb_and_dropdb_on_path():
         call('dropdb --help')
         return True
     except RuntimeError:
-        print 'Notice: skipping pgraster tests (createdb/dropdb)'
+        print('Notice: skipping pgraster tests (createdb/dropdb)')
         return False
 
+
 def postgis_setup():
-    call('dropdb %s' % MAPNIK_TEST_DBNAME,silent=True)
-    call('createdb -T %s %s' % (POSTGIS_TEMPLATE_DBNAME,MAPNIK_TEST_DBNAME),silent=False)
+    call('dropdb %s' % MAPNIK_TEST_DBNAME, silent=True)
+    call(
+        'createdb -T %s %s' %
+        (POSTGIS_TEMPLATE_DBNAME,
+         MAPNIK_TEST_DBNAME),
+        silent=False)
+
 
 def postgis_takedown():
     pass
     # fails as the db is in use: https://github.com/mapnik/mapnik/issues/960
     #call('dropdb %s' % MAPNIK_TEST_DBNAME)
 
+
 def import_raster(filename, tabname, tilesize, constraint, overview):
-  log('tile: ' + tilesize + ' constraints: ' + str(constraint) \
-      + ' overviews: ' + overview)
-  cmd = 'raster2pgsql -Y -I -q'
-  if constraint:
-    cmd += ' -C'
-  if tilesize:
-    cmd += ' -t ' + tilesize
-  if overview:
-    cmd += ' -l ' + overview
-  cmd += ' %s %s | psql --set ON_ERROR_STOP=1 -q %s' % (os.path.abspath(os.path.normpath(filename)),tabname,MAPNIK_TEST_DBNAME)
-  log('Import call: ' + cmd)
-  call(cmd)
+    log('tile: ' + tilesize + ' constraints: ' + str(constraint)
+        + ' overviews: ' + overview)
+    cmd = 'raster2pgsql -Y -I -q'
+    if constraint:
+        cmd += ' -C'
+    if tilesize:
+        cmd += ' -t ' + tilesize
+    if overview:
+        cmd += ' -l ' + overview
+    cmd += ' %s %s | psql --set ON_ERROR_STOP=1 -q %s' % (
+        os.path.abspath(os.path.normpath(filename)), tabname, MAPNIK_TEST_DBNAME)
+    log('Import call: ' + cmd)
+    call(cmd)
+
 
 def drop_imported(tabname, overview):
-  psql_run('DROP TABLE IF EXISTS "' + tabname + '";')
-  if overview:
-    for of in overview.split(','):
-      psql_run('DROP TABLE IF EXISTS "o_' + of + '_' + tabname + '";')
-
-def compare_images(expected,im):
-  expected = os.path.join(os.path.dirname(expected),os.path.basename(expected).replace(':','_'))
-  if not os.path.exists(expected) or os.environ.get('UPDATE'):
-    print 'generating expected image %s' % expected
-    im.save(expected,'png32')
-  expected_im = mapnik.Image.open(expected)
-  diff = expected.replace('.png','-diff.png')
-  if len(im.tostring("png32")) != len(expected_im.tostring("png32")):
-    compared = side_by_side_image(expected_im, im)
-    compared.save(diff)
-    assert False,'images do not match, check diff at %s' % diff
-  else:
-    if os.path.exists(diff): os.unlink(diff)
-  return True
+    psql_run('DROP TABLE IF EXISTS "' + tabname + '";')
+    if overview:
+        for of in overview.split(','):
+            psql_run('DROP TABLE IF EXISTS "o_' + of + '_' + tabname + '";')
+
+
+def compare_images(expected, im):
+    expected = os.path.join(
+        os.path.dirname(expected),
+        os.path.basename(expected).replace(
+            ':',
+            '_'))
+    if not os.path.exists(expected) or os.environ.get('UPDATE'):
+        print('generating expected image %s' % expected)
+        im.save(expected, 'png32')
+    expected_im = mapnik.Image.open(expected)
+    diff = expected.replace('.png', '-diff.png')
+    if len(im.tostring("png32")) != len(expected_im.tostring("png32")):
+        compared = side_by_side_image(expected_im, im)
+        compared.save(diff)
+        assert False, 'images do not match, check diff at %s' % diff
+    else:
+        if os.path.exists(diff):
+            os.unlink(diff)
+    return True
 
 if 'pgraster' in mapnik.DatasourceCache.plugin_names() \
         and createdb_and_dropdb_on_path() \
         and psql_can_connect() \
         and raster2pgsql_on_path():
 
-    # initialize test database
+        # initialize test database
     postgis_setup()
 
     # [old]dataraster.tif, 2283x1913 int16 single-band
     # dataraster-small.tif, 457x383 int16 single-band
     def _test_dataraster_16bsi_rendering(lbl, overview, rescale, clip):
-      if rescale:
-        lbl += ' Sc'
-      if clip:
-        lbl += ' Cl'
-      ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME,table='"dataRaster"',
-        band=1,use_overviews=1 if overview else 0,
-        prescale_rasters=rescale,clip_rasters=clip)
-      fs = ds.featureset()
-      feature = fs.next()
-      eq_(feature['rid'],1)
-      lyr = mapnik.Layer('dataraster_16bsi')
-      lyr.datasource = ds
-      expenv = mapnik.Box2d(-14637, 3903178, 1126863, 4859678)
-      env = lyr.envelope()
-      # As the input size is a prime number both horizontally
-      # and vertically, we expect the extent of the overview
-      # tables to be a pixel wider than the original, whereas
-      # the pixel size in geographical units depends on the
-      # overview factor. So we start with the original pixel size
-      # as base scale and multiply by the overview factor.
-      # NOTE: the overview table extent only grows north and east
-      pixsize = 500 # see gdalinfo dataraster.tif
-      pixsize = 2497 # see gdalinfo dataraster-small.tif
-      tol = pixsize * max(overview.split(',')) if overview else 0
-      assert_almost_equal(env.minx, expenv.minx)
-      assert_almost_equal(env.miny, expenv.miny, delta=tol) 
-      assert_almost_equal(env.maxx, expenv.maxx, delta=tol)
-      assert_almost_equal(env.maxy, expenv.maxy)
-      mm = mapnik.Map(256, 256)
-      style = mapnik.Style()
-      col = mapnik.RasterColorizer();
-      col.default_mode = mapnik.COLORIZER_DISCRETE;
-      col.add_stop(0, mapnik.Color(0x40,0x40,0x40,255));
-      col.add_stop(10, mapnik.Color(0x80,0x80,0x80,255));
-      col.add_stop(20, mapnik.Color(0xa0,0xa0,0xa0,255));
-      sym = mapnik.RasterSymbolizer()
-      sym.colorizer = col
-      rule = mapnik.Rule()
-      rule.symbols.append(sym)
-      style.rules.append(rule)
-      mm.append_style('foo', style)
-      lyr.styles.append('foo')
-      mm.layers.append(lyr)
-      mm.zoom_to_box(expenv)
-      im = mapnik.Image(mm.width, mm.height)
-      t0 = time.time() # we want wall time to include IO waits
-      mapnik.render(mm, im)
-      lap = time.time() - t0
-      log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
-      # no data
-      eq_(im.view(1,1,1,1).tostring(), '\x00\x00\x00\x00') 
-      eq_(im.view(255,255,1,1).tostring(), '\x00\x00\x00\x00') 
-      eq_(im.view(195,116,1,1).tostring(), '\x00\x00\x00\x00') 
-      # A0A0A0
-      eq_(im.view(100,120,1,1).tostring(), '\xa0\xa0\xa0\xff')
-      eq_(im.view( 75, 80,1,1).tostring(), '\xa0\xa0\xa0\xff')
-      # 808080
-      eq_(im.view( 74,170,1,1).tostring(), '\x80\x80\x80\xff')
-      eq_(im.view( 30, 50,1,1).tostring(), '\x80\x80\x80\xff')
-      # 404040
-      eq_(im.view(190, 70,1,1).tostring(), '\x40\x40\x40\xff')
-      eq_(im.view(140,170,1,1).tostring(), '\x40\x40\x40\xff')
-
-      # Now zoom over a portion of the env (1/10)
-      newenv = mapnik.Box2d(273663,4024478,330738,4072303)
-      mm.zoom_to_box(newenv)
-      t0 = time.time() # we want wall time to include IO waits
-      mapnik.render(mm, im)
-      lap = time.time() - t0
-      log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10')
-      # nodata
-      eq_(hexlify(im.view(255,255,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(200,254,1,1).tostring()), '00000000')
-      # A0A0A0
-      eq_(hexlify(im.view(90,232,1,1).tostring()), 'a0a0a0ff')
-      eq_(hexlify(im.view(96,245,1,1).tostring()), 'a0a0a0ff')
-      # 808080
-      eq_(hexlify(im.view(1,1,1,1).tostring()), '808080ff') 
-      eq_(hexlify(im.view(128,128,1,1).tostring()), '808080ff') 
-      # 404040
-      eq_(hexlify(im.view(255, 0,1,1).tostring()), '404040ff')
+        if rescale:
+            lbl += ' Sc'
+        if clip:
+            lbl += ' Cl'
+        ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table='"dataRaster"',
+                             band=1, use_overviews=1 if overview else 0,
+                             prescale_rasters=rescale, clip_rasters=clip)
+        fs = ds.featureset()
+        feature = fs.next()
+        eq_(feature['rid'], 1)
+        lyr = mapnik.Layer('dataraster_16bsi')
+        lyr.datasource = ds
+        expenv = mapnik.Box2d(-14637, 3903178, 1126863, 4859678)
+        env = lyr.envelope()
+        # As the input size is a prime number both horizontally
+        # and vertically, we expect the extent of the overview
+        # tables to be a pixel wider than the original, whereas
+        # the pixel size in geographical units depends on the
+        # overview factor. So we start with the original pixel size
+        # as base scale and multiply by the overview factor.
+        # NOTE: the overview table extent only grows north and east
+        pixsize = 500  # see gdalinfo dataraster.tif
+        pixsize = 2497  # see gdalinfo dataraster-small.tif
+        tol = pixsize * max(overview.split(',')) if overview else 0
+        assert_almost_equal(env.minx, expenv.minx)
+        assert_almost_equal(env.miny, expenv.miny, delta=tol)
+        assert_almost_equal(env.maxx, expenv.maxx, delta=tol)
+        assert_almost_equal(env.maxy, expenv.maxy)
+        mm = mapnik.Map(256, 256)
+        style = mapnik.Style()
+        col = mapnik.RasterColorizer()
+        col.default_mode = mapnik.COLORIZER_DISCRETE
+        col.add_stop(0, mapnik.Color(0x40, 0x40, 0x40, 255))
+        col.add_stop(10, mapnik.Color(0x80, 0x80, 0x80, 255))
+        col.add_stop(20, mapnik.Color(0xa0, 0xa0, 0xa0, 255))
+        sym = mapnik.RasterSymbolizer()
+        sym.colorizer = col
+        rule = mapnik.Rule()
+        rule.symbols.append(sym)
+        style.rules.append(rule)
+        mm.append_style('foo', style)
+        lyr.styles.append('foo')
+        mm.layers.append(lyr)
+        mm.zoom_to_box(expenv)
+        im = mapnik.Image(mm.width, mm.height)
+        t0 = time.time()  # we want wall time to include IO waits
+        mapnik.render(mm, im)
+        lap = time.time() - t0
+        log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
+        # no data
+        eq_(im.view(1, 1, 1, 1).tostring(), '\x00\x00\x00\x00')
+        eq_(im.view(255, 255, 1, 1).tostring(), '\x00\x00\x00\x00')
+        eq_(im.view(195, 116, 1, 1).tostring(), '\x00\x00\x00\x00')
+        # A0A0A0
+        eq_(im.view(100, 120, 1, 1).tostring(), '\xa0\xa0\xa0\xff')
+        eq_(im.view(75, 80, 1, 1).tostring(), '\xa0\xa0\xa0\xff')
+        # 808080
+        eq_(im.view(74, 170, 1, 1).tostring(), '\x80\x80\x80\xff')
+        eq_(im.view(30, 50, 1, 1).tostring(), '\x80\x80\x80\xff')
+        # 404040
+        eq_(im.view(190, 70, 1, 1).tostring(), '\x40\x40\x40\xff')
+        eq_(im.view(140, 170, 1, 1).tostring(), '\x40\x40\x40\xff')
+
+        # Now zoom over a portion of the env (1/10)
+        newenv = mapnik.Box2d(273663, 4024478, 330738, 4072303)
+        mm.zoom_to_box(newenv)
+        t0 = time.time()  # we want wall time to include IO waits
+        mapnik.render(mm, im)
+        lap = time.time() - t0
+        log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10')
+        # nodata
+        eq_(hexlify(im.view(255, 255, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(200, 254, 1, 1).tostring()), '00000000')
+        # A0A0A0
+        eq_(hexlify(im.view(90, 232, 1, 1).tostring()), 'a0a0a0ff')
+        eq_(hexlify(im.view(96, 245, 1, 1).tostring()), 'a0a0a0ff')
+        # 808080
+        eq_(hexlify(im.view(1, 1, 1, 1).tostring()), '808080ff')
+        eq_(hexlify(im.view(128, 128, 1, 1).tostring()), '808080ff')
+        # 404040
+        eq_(hexlify(im.view(255, 0, 1, 1).tostring()), '404040ff')
 
     def _test_dataraster_16bsi(lbl, tilesize, constraint, overview):
-      import_raster('../data/raster/dataraster-small.tif', 'dataRaster', tilesize, constraint, overview)
-      if constraint:
-        lbl += ' C'
-      if tilesize:
-        lbl += ' T:' + tilesize
-      if overview:
-        lbl += ' O:' + overview
-      for prescale in [0,1]:
-        for clip in [0,1]:
-          _test_dataraster_16bsi_rendering(lbl, overview, prescale, clip)
-      drop_imported('dataRaster', overview)
+        import_raster(
+            '../data/raster/dataraster-small.tif',
+            'dataRaster',
+            tilesize,
+            constraint,
+            overview)
+        if constraint:
+            lbl += ' C'
+        if tilesize:
+            lbl += ' T:' + tilesize
+        if overview:
+            lbl += ' O:' + overview
+        for prescale in [0, 1]:
+            for clip in [0, 1]:
+                _test_dataraster_16bsi_rendering(lbl, overview, prescale, clip)
+        drop_imported('dataRaster', overview)
 
     def test_dataraster_16bsi():
-      #for tilesize in ['','256x256']:
-      for tilesize in ['256x256']:
-        for constraint in [0,1]:
-          #for overview in ['','4','2,16']:
-          for overview in ['','2']:
-            _test_dataraster_16bsi('data_16bsi', tilesize, constraint, overview)
+        # for tilesize in ['','256x256']:
+        for tilesize in ['256x256']:
+            for constraint in [0, 1]:
+                # for overview in ['','4','2,16']:
+                for overview in ['', '2']:
+                    _test_dataraster_16bsi(
+                        'data_16bsi', tilesize, constraint, overview)
 
     # river.tiff, RGBA 8BUI
     def _test_rgba_8bui_rendering(lbl, overview, rescale, clip):
-      if rescale:
-        lbl += ' Sc'
-      if clip:
-        lbl += ' Cl'
-      ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME,table='(select * from "River") foo',
-        use_overviews=1 if overview else 0,
-        prescale_rasters=rescale,clip_rasters=clip)
-      fs = ds.featureset()
-      feature = fs.next()
-      eq_(feature['rid'],1)
-      lyr = mapnik.Layer('rgba_8bui')
-      lyr.datasource = ds
-      expenv = mapnik.Box2d(0, -210, 256, 0)
-      env = lyr.envelope()
-      # As the input size is a prime number both horizontally
-      # and vertically, we expect the extent of the overview
-      # tables to be a pixel wider than the original, whereas
-      # the pixel size in geographical units depends on the
-      # overview factor. So we start with the original pixel size
-      # as base scale and multiply by the overview factor.
-      # NOTE: the overview table extent only grows north and east
-      pixsize = 1 # see gdalinfo river.tif
-      tol = pixsize * max(overview.split(',')) if overview else 0
-      assert_almost_equal(env.minx, expenv.minx)
-      assert_almost_equal(env.miny, expenv.miny, delta=tol) 
-      assert_almost_equal(env.maxx, expenv.maxx, delta=tol)
-      assert_almost_equal(env.maxy, expenv.maxy)
-      mm = mapnik.Map(256, 256)
-      style = mapnik.Style()
-      sym = mapnik.RasterSymbolizer()
-      rule = mapnik.Rule()
-      rule.symbols.append(sym)
-      style.rules.append(rule)
-      mm.append_style('foo', style)
-      lyr.styles.append('foo')
-      mm.layers.append(lyr)
-      mm.zoom_to_box(expenv)
-      im = mapnik.Image(mm.width, mm.height)
-      t0 = time.time() # we want wall time to include IO waits
-      mapnik.render(mm, im)
-      lap = time.time() - t0
-      log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
-      expected = 'images/support/pgraster/%s-%s-%s-%s-box1.png' % (lyr.name,lbl,overview,clip)
-      compare_images(expected,im)
-      # no data
-      eq_(hexlify(im.view(3,3,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(250,250,1,1).tostring()), '00000000') 
-      # full opaque river color
-      eq_(hexlify(im.view(175,118,1,1).tostring()), 'b9d8f8ff') 
-      # half-transparent pixel
-      pxstr = hexlify(im.view(122,138,1,1).tostring())
-      apat = ".*(..)$"
-      match = re.match(apat, pxstr)
-      assert match, 'pixel ' + pxstr + ' does not match pattern ' + apat
-      alpha = match.group(1)
-      assert alpha != 'ff' and alpha != '00', \
-        'unexpected full transparent/opaque pixel: ' + alpha
-
-      # Now zoom over a portion of the env (1/10)
-      newenv = mapnik.Box2d(166,-105,191,-77)
-      mm.zoom_to_box(newenv)
-      t0 = time.time() # we want wall time to include IO waits
-      im = mapnik.Image(mm.width, mm.height)
-      mapnik.render(mm, im)
-      lap = time.time() - t0
-      log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10')
-      expected = 'images/support/pgraster/%s-%s-%s-%s-box2.png' % (lyr.name,lbl,overview,clip)
-      compare_images(expected,im)
-      # no data
-      eq_(hexlify(im.view(255,255,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(200,40,1,1).tostring()), '00000000')
-      # full opaque river color
-      eq_(hexlify(im.view(100,168,1,1).tostring()), 'b9d8f8ff')
-      # half-transparent pixel
-      pxstr = hexlify(im.view(122,138,1,1).tostring())
-      apat = ".*(..)$"
-      match = re.match(apat, pxstr)
-      assert match, 'pixel ' + pxstr + ' does not match pattern ' + apat
-      alpha = match.group(1)
-      assert alpha != 'ff' and alpha != '00', \
-        'unexpected full transparent/opaque pixel: ' + alpha
+        if rescale:
+            lbl += ' Sc'
+        if clip:
+            lbl += ' Cl'
+        ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table='(select * from "River") foo',
+                             use_overviews=1 if overview else 0,
+                             prescale_rasters=rescale, clip_rasters=clip)
+        fs = ds.featureset()
+        feature = fs.next()
+        eq_(feature['rid'], 1)
+        lyr = mapnik.Layer('rgba_8bui')
+        lyr.datasource = ds
+        expenv = mapnik.Box2d(0, -210, 256, 0)
+        env = lyr.envelope()
+        # As the input size is a prime number both horizontally
+        # and vertically, we expect the extent of the overview
+        # tables to be a pixel wider than the original, whereas
+        # the pixel size in geographical units depends on the
+        # overview factor. So we start with the original pixel size
+        # as base scale and multiply by the overview factor.
+        # NOTE: the overview table extent only grows north and east
+        pixsize = 1  # see gdalinfo river.tif
+        tol = pixsize * max(overview.split(',')) if overview else 0
+        assert_almost_equal(env.minx, expenv.minx)
+        assert_almost_equal(env.miny, expenv.miny, delta=tol)
+        assert_almost_equal(env.maxx, expenv.maxx, delta=tol)
+        assert_almost_equal(env.maxy, expenv.maxy)
+        mm = mapnik.Map(256, 256)
+        style = mapnik.Style()
+        sym = mapnik.RasterSymbolizer()
+        rule = mapnik.Rule()
+        rule.symbols.append(sym)
+        style.rules.append(rule)
+        mm.append_style('foo', style)
+        lyr.styles.append('foo')
+        mm.layers.append(lyr)
+        mm.zoom_to_box(expenv)
+        im = mapnik.Image(mm.width, mm.height)
+        t0 = time.time()  # we want wall time to include IO waits
+        mapnik.render(mm, im)
+        lap = time.time() - t0
+        log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
+        expected = 'images/support/pgraster/%s-%s-%s-%s-box1.png' % (
+            lyr.name, lbl, overview, clip)
+        compare_images(expected, im)
+        # no data
+        eq_(hexlify(im.view(3, 3, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(250, 250, 1, 1).tostring()), '00000000')
+        # full opaque river color
+        eq_(hexlify(im.view(175, 118, 1, 1).tostring()), 'b9d8f8ff')
+        # half-transparent pixel
+        pxstr = hexlify(im.view(122, 138, 1, 1).tostring())
+        apat = ".*(..)$"
+        match = re.match(apat, pxstr)
+        assert match, 'pixel ' + pxstr + ' does not match pattern ' + apat
+        alpha = match.group(1)
+        assert alpha != 'ff' and alpha != '00', \
+            'unexpected full transparent/opaque pixel: ' + alpha
+
+        # Now zoom over a portion of the env (1/10)
+        newenv = mapnik.Box2d(166, -105, 191, -77)
+        mm.zoom_to_box(newenv)
+        t0 = time.time()  # we want wall time to include IO waits
+        im = mapnik.Image(mm.width, mm.height)
+        mapnik.render(mm, im)
+        lap = time.time() - t0
+        log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10')
+        expected = 'images/support/pgraster/%s-%s-%s-%s-box2.png' % (
+            lyr.name, lbl, overview, clip)
+        compare_images(expected, im)
+        # no data
+        eq_(hexlify(im.view(255, 255, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(200, 40, 1, 1).tostring()), '00000000')
+        # full opaque river color
+        eq_(hexlify(im.view(100, 168, 1, 1).tostring()), 'b9d8f8ff')
+        # half-transparent pixel
+        pxstr = hexlify(im.view(122, 138, 1, 1).tostring())
+        apat = ".*(..)$"
+        match = re.match(apat, pxstr)
+        assert match, 'pixel ' + pxstr + ' does not match pattern ' + apat
+        alpha = match.group(1)
+        assert alpha != 'ff' and alpha != '00', \
+            'unexpected full transparent/opaque pixel: ' + alpha
 
     def _test_rgba_8bui(lbl, tilesize, constraint, overview):
-      import_raster('../data/raster/river.tiff', 'River', tilesize, constraint, overview)
-      if constraint:
-        lbl += ' C'
-      if tilesize:
-        lbl += ' T:' + tilesize
-      if overview:
-        lbl += ' O:' + overview
-      for prescale in [0,1]:
-        for clip in [0,1]:
-          _test_rgba_8bui_rendering(lbl, overview, prescale, clip)
-      drop_imported('River', overview)
+        import_raster(
+            '../data/raster/river.tiff',
+            'River',
+            tilesize,
+            constraint,
+            overview)
+        if constraint:
+            lbl += ' C'
+        if tilesize:
+            lbl += ' T:' + tilesize
+        if overview:
+            lbl += ' O:' + overview
+        for prescale in [0, 1]:
+            for clip in [0, 1]:
+                _test_rgba_8bui_rendering(lbl, overview, prescale, clip)
+        drop_imported('River', overview)
 
     def test_rgba_8bui():
-      for tilesize in ['','16x16']:
-        for constraint in [0,1]:
-          for overview in ['2']:
-            _test_rgba_8bui('rgba_8bui', tilesize, constraint, overview)
+        for tilesize in ['', '16x16']:
+            for constraint in [0, 1]:
+                for overview in ['2']:
+                    _test_rgba_8bui(
+                        'rgba_8bui', tilesize, constraint, overview)
 
     # nodata-edge.tif, RGB 8BUI
     def _test_rgb_8bui_rendering(lbl, tnam, overview, rescale, clip):
-      if rescale:
-        lbl += ' Sc'
-      if clip:
-        lbl += ' Cl'
-      ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME,table=tnam,
-        use_overviews=1 if overview else 0,
-        prescale_rasters=rescale,clip_rasters=clip)
-      fs = ds.featureset()
-      feature = fs.next()
-      eq_(feature['rid'],1)
-      lyr = mapnik.Layer('rgba_8bui')
-      lyr.datasource = ds
-      expenv = mapnik.Box2d(-12329035.7652168,4508650.39854396, \
-                            -12328653.0279471,4508957.34625536)
-      env = lyr.envelope()
-      # As the input size is a prime number both horizontally
-      # and vertically, we expect the extent of the overview
-      # tables to be a pixel wider than the original, whereas
-      # the pixel size in geographical units depends on the
-      # overview factor. So we start with the original pixel size
-      # as base scale and multiply by the overview factor.
-      # NOTE: the overview table extent only grows north and east
-      pixsize = 2 # see gdalinfo nodata-edge.tif
-      tol = pixsize * max(overview.split(',')) if overview else 0
-      assert_almost_equal(env.minx, expenv.minx, places=0)
-      assert_almost_equal(env.miny, expenv.miny, delta=tol)
-      assert_almost_equal(env.maxx, expenv.maxx, delta=tol)
-      assert_almost_equal(env.maxy, expenv.maxy, places=0)
-      mm = mapnik.Map(256, 256)
-      style = mapnik.Style()
-      sym = mapnik.RasterSymbolizer()
-      rule = mapnik.Rule()
-      rule.symbols.append(sym)
-      style.rules.append(rule)
-      mm.append_style('foo', style)
-      lyr.styles.append('foo')
-      mm.layers.append(lyr)
-      mm.zoom_to_box(expenv)
-      im = mapnik.Image(mm.width, mm.height)
-      t0 = time.time() # we want wall time to include IO waits
-      mapnik.render(mm, im)
-      lap = time.time() - t0
-      log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
-      expected = 'images/support/pgraster/%s-%s-%s-%s-%s-box1.png' % (lyr.name,tnam,lbl,overview,clip)
-      compare_images(expected,im)
-      # no data
-      eq_(hexlify(im.view(3,16,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(128,16,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(250,16,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(3,240,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(128,240,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(250,240,1,1).tostring()), '00000000')
-      # dark brown
-      eq_(hexlify(im.view(174,39,1,1).tostring()), 'c3a698ff') 
-      # dark gray
-      eq_(hexlify(im.view(195,132,1,1).tostring()), '575f62ff') 
-      # Now zoom over a portion of the env (1/10)
-      newenv = mapnik.Box2d(-12329035.7652168, 4508926.651484220, \
-                            -12328997.49148983,4508957.34625536)
-      mm.zoom_to_box(newenv)
-      t0 = time.time() # we want wall time to include IO waits
-      im = mapnik.Image(mm.width, mm.height)
-      mapnik.render(mm, im)
-      lap = time.time() - t0
-      log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10')
-      expected = 'images/support/pgraster/%s-%s-%s-%s-%s-box2.png' % (lyr.name,tnam,lbl,overview,clip)
-      compare_images(expected,im)
-      # no data
-      eq_(hexlify(im.view(3,16,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(128,16,1,1).tostring()), '00000000')
-      eq_(hexlify(im.view(250,16,1,1).tostring()), '00000000')
-      # black
-      eq_(hexlify(im.view(3,42,1,1).tostring()), '000000ff')
-      eq_(hexlify(im.view(3,134,1,1).tostring()), '000000ff')
-      eq_(hexlify(im.view(3,244,1,1).tostring()), '000000ff')
-      # gray
-      eq_(hexlify(im.view(135,157,1,1).tostring()), '4e555bff')
-      # brown
-      eq_(hexlify(im.view(195,223,1,1).tostring()), 'f2cdbaff')
+        if rescale:
+            lbl += ' Sc'
+        if clip:
+            lbl += ' Cl'
+        ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=tnam,
+                             use_overviews=1 if overview else 0,
+                             prescale_rasters=rescale, clip_rasters=clip)
+        fs = ds.featureset()
+        feature = fs.next()
+        eq_(feature['rid'], 1)
+        lyr = mapnik.Layer('rgba_8bui')
+        lyr.datasource = ds
+        expenv = mapnik.Box2d(-12329035.7652168, 4508650.39854396,
+                              -12328653.0279471, 4508957.34625536)
+        env = lyr.envelope()
+        # As the input size is a prime number both horizontally
+        # and vertically, we expect the extent of the overview
+        # tables to be a pixel wider than the original, whereas
+        # the pixel size in geographical units depends on the
+        # overview factor. So we start with the original pixel size
+        # as base scale and multiply by the overview factor.
+        # NOTE: the overview table extent only grows north and east
+        pixsize = 2  # see gdalinfo nodata-edge.tif
+        tol = pixsize * max(overview.split(',')) if overview else 0
+        assert_almost_equal(env.minx, expenv.minx, places=0)
+        assert_almost_equal(env.miny, expenv.miny, delta=tol)
+        assert_almost_equal(env.maxx, expenv.maxx, delta=tol)
+        assert_almost_equal(env.maxy, expenv.maxy, places=0)
+        mm = mapnik.Map(256, 256)
+        style = mapnik.Style()
+        sym = mapnik.RasterSymbolizer()
+        rule = mapnik.Rule()
+        rule.symbols.append(sym)
+        style.rules.append(rule)
+        mm.append_style('foo', style)
+        lyr.styles.append('foo')
+        mm.layers.append(lyr)
+        mm.zoom_to_box(expenv)
+        im = mapnik.Image(mm.width, mm.height)
+        t0 = time.time()  # we want wall time to include IO waits
+        mapnik.render(mm, im)
+        lap = time.time() - t0
+        log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
+        expected = 'images/support/pgraster/%s-%s-%s-%s-%s-box1.png' % (
+            lyr.name, tnam, lbl, overview, clip)
+        compare_images(expected, im)
+        # no data
+        eq_(hexlify(im.view(3, 16, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(128, 16, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(250, 16, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(3, 240, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(128, 240, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(250, 240, 1, 1).tostring()), '00000000')
+        # dark brown
+        eq_(hexlify(im.view(174, 39, 1, 1).tostring()), 'c3a698ff')
+        # dark gray
+        eq_(hexlify(im.view(195, 132, 1, 1).tostring()), '575f62ff')
+        # Now zoom over a portion of the env (1/10)
+        newenv = mapnik.Box2d(-12329035.7652168, 4508926.651484220,
+                              -12328997.49148983, 4508957.34625536)
+        mm.zoom_to_box(newenv)
+        t0 = time.time()  # we want wall time to include IO waits
+        im = mapnik.Image(mm.width, mm.height)
+        mapnik.render(mm, im)
+        lap = time.time() - t0
+        log('T ' + str(lap) + ' -- ' + lbl + ' E:1/10')
+        expected = 'images/support/pgraster/%s-%s-%s-%s-%s-box2.png' % (
+            lyr.name, tnam, lbl, overview, clip)
+        compare_images(expected, im)
+        # no data
+        eq_(hexlify(im.view(3, 16, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(128, 16, 1, 1).tostring()), '00000000')
+        eq_(hexlify(im.view(250, 16, 1, 1).tostring()), '00000000')
+        # black
+        eq_(hexlify(im.view(3, 42, 1, 1).tostring()), '000000ff')
+        eq_(hexlify(im.view(3, 134, 1, 1).tostring()), '000000ff')
+        eq_(hexlify(im.view(3, 244, 1, 1).tostring()), '000000ff')
+        # gray
+        eq_(hexlify(im.view(135, 157, 1, 1).tostring()), '4e555bff')
+        # brown
+        eq_(hexlify(im.view(195, 223, 1, 1).tostring()), 'f2cdbaff')
 
     def _test_rgb_8bui(lbl, tilesize, constraint, overview):
-      tnam = 'nodataedge'
-      import_raster('../data/raster/nodata-edge.tif', tnam, tilesize, constraint, overview)
-      if constraint:
-        lbl += ' C'
-      if tilesize:
-        lbl += ' T:' + tilesize
-      if overview:
-        lbl += ' O:' + overview
-      for prescale in [0,1]:
-        for clip in [0,1]:
-          _test_rgb_8bui_rendering(lbl, tnam, overview, prescale, clip)
-      #drop_imported(tnam, overview)
+        tnam = 'nodataedge'
+        import_raster(
+            '../data/raster/nodata-edge.tif',
+            tnam,
+            tilesize,
+            constraint,
+            overview)
+        if constraint:
+            lbl += ' C'
+        if tilesize:
+            lbl += ' T:' + tilesize
+        if overview:
+            lbl += ' O:' + overview
+        for prescale in [0, 1]:
+            for clip in [0, 1]:
+                _test_rgb_8bui_rendering(lbl, tnam, overview, prescale, clip)
+        #drop_imported(tnam, overview)
 
     def test_rgb_8bui():
-      for tilesize in ['64x64']:
-        for constraint in [1]:
-          for overview in ['']:
-            _test_rgb_8bui('rgb_8bui', tilesize, constraint, overview)
-
-    def _test_grayscale_subquery(lbl,pixtype,value):
-      #
-      #      3   8   13
-      #    +---+---+---+
-      #  3 | v | v | v |  NOTE: writes different color
-      #    +---+---+---+        in 13,8 and 8,13
-      #  8 | v | v | a |  
-      #    +---+---+---+  
-      # 13 | v | b | v |
-      #    +---+---+---+
-      #
-      val_a = value/3;
-      val_b = val_a*2;
-      sql = "(select 3 as i, " \
-            " ST_SetValues(" \
-            "  ST_SetValues(" \
-            "   ST_AsRaster(" \
-            "    ST_MakeEnvelope(0,0,14,14), " \
-            "    1.0, -1.0, '%s', %s" \
-            "   ), " \
-            "   11, 6, 4, 5, %s::float8" \
-            "  )," \
-            "  6, 11, 5, 4, %s::float8" \
-            " ) as \"R\"" \
-            ") as foo" % (pixtype,value, val_a, val_b)
-      rescale = 0
-      clip = 0
-      if rescale:
-        lbl += ' Sc'
-      if clip:
-        lbl += ' Cl'
-      ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql,
-        raster_field='"R"', use_overviews=1,
-        prescale_rasters=rescale,clip_rasters=clip)
-      fs = ds.featureset()
-      feature = fs.next()
-      eq_(feature['i'],3)
-      lyr = mapnik.Layer('grayscale_subquery')
-      lyr.datasource = ds
-      expenv = mapnik.Box2d(0,0,14,14)
-      env = lyr.envelope()
-      assert_almost_equal(env.minx, expenv.minx, places=0)
-      assert_almost_equal(env.miny, expenv.miny, places=0)
-      assert_almost_equal(env.maxx, expenv.maxx, places=0)
-      assert_almost_equal(env.maxy, expenv.maxy, places=0)
-      mm = mapnik.Map(15, 15)
-      style = mapnik.Style()
-      sym = mapnik.RasterSymbolizer()
-      rule = mapnik.Rule()
-      rule.symbols.append(sym)
-      style.rules.append(rule)
-      mm.append_style('foo', style)
-      lyr.styles.append('foo')
-      mm.layers.append(lyr)
-      mm.zoom_to_box(expenv)
-      im = mapnik.Image(mm.width, mm.height)
-      t0 = time.time() # we want wall time to include IO waits
-      mapnik.render(mm, im)
-      lap = time.time() - t0
-      log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
-      expected = 'images/support/pgraster/%s-%s-%s-%s.png' % (lyr.name,lbl,pixtype,value)
-      compare_images(expected,im)
-      h = format(value, '02x')
-      hex_v = h+h+h+'ff'
-      h = format(val_a, '02x')
-      hex_a = h+h+h+'ff'
-      h = format(val_b, '02x')
-      hex_b = h+h+h+'ff'
-      eq_(hexlify(im.view( 3, 3,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view( 8, 3,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view(13, 3,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view( 3, 8,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view( 8, 8,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view(13, 8,1,1).tostring()), hex_a);
-      eq_(hexlify(im.view( 3,13,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view( 8,13,1,1).tostring()), hex_b);
-      eq_(hexlify(im.view(13,13,1,1).tostring()), hex_v);
+        for tilesize in ['64x64']:
+            for constraint in [1]:
+                for overview in ['']:
+                    _test_rgb_8bui('rgb_8bui', tilesize, constraint, overview)
+
+    def _test_grayscale_subquery(lbl, pixtype, value):
+        #
+        #      3   8   13
+        #    +---+---+---+
+        #  3 | v | v | v |  NOTE: writes different color
+        #    +---+---+---+        in 13,8 and 8,13
+        #  8 | v | v | a |
+        #    +---+---+---+
+        # 13 | v | b | v |
+        #    +---+---+---+
+        #
+        val_a = value / 3
+        val_b = val_a * 2
+        sql = "(select 3 as i, " \
+              " ST_SetValues(" \
+              "  ST_SetValues(" \
+              "   ST_AsRaster(" \
+              "    ST_MakeEnvelope(0,0,14,14), " \
+              "    1.0, -1.0, '%s', %s" \
+              "   ), " \
+              "   11, 6, 4, 5, %s::float8" \
+              "  )," \
+              "  6, 11, 5, 4, %s::float8" \
+              " ) as \"R\"" \
+              ") as foo" % (pixtype, value, val_a, val_b)
+        rescale = 0
+        clip = 0
+        if rescale:
+            lbl += ' Sc'
+        if clip:
+            lbl += ' Cl'
+        ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql,
+                             raster_field='"R"', use_overviews=1,
+                             prescale_rasters=rescale, clip_rasters=clip)
+        fs = ds.featureset()
+        feature = fs.next()
+        eq_(feature['i'], 3)
+        lyr = mapnik.Layer('grayscale_subquery')
+        lyr.datasource = ds
+        expenv = mapnik.Box2d(0, 0, 14, 14)
+        env = lyr.envelope()
+        assert_almost_equal(env.minx, expenv.minx, places=0)
+        assert_almost_equal(env.miny, expenv.miny, places=0)
+        assert_almost_equal(env.maxx, expenv.maxx, places=0)
+        assert_almost_equal(env.maxy, expenv.maxy, places=0)
+        mm = mapnik.Map(15, 15)
+        style = mapnik.Style()
+        sym = mapnik.RasterSymbolizer()
+        rule = mapnik.Rule()
+        rule.symbols.append(sym)
+        style.rules.append(rule)
+        mm.append_style('foo', style)
+        lyr.styles.append('foo')
+        mm.layers.append(lyr)
+        mm.zoom_to_box(expenv)
+        im = mapnik.Image(mm.width, mm.height)
+        t0 = time.time()  # we want wall time to include IO waits
+        mapnik.render(mm, im)
+        lap = time.time() - t0
+        log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
+        expected = 'images/support/pgraster/%s-%s-%s-%s.png' % (
+            lyr.name, lbl, pixtype, value)
+        compare_images(expected, im)
+        h = format(value, '02x')
+        hex_v = h + h + h + 'ff'
+        h = format(val_a, '02x')
+        hex_a = h + h + h + 'ff'
+        h = format(val_b, '02x')
+        hex_b = h + h + h + 'ff'
+        eq_(hexlify(im.view(3, 3, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(8, 3, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(13, 3, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(3, 8, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(8, 8, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(13, 8, 1, 1).tostring()), hex_a)
+        eq_(hexlify(im.view(3, 13, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(8, 13, 1, 1).tostring()), hex_b)
+        eq_(hexlify(im.view(13, 13, 1, 1).tostring()), hex_v)
 
     def test_grayscale_2bui_subquery():
-      _test_grayscale_subquery('grayscale_2bui_subquery', '2BUI', 3)
+        _test_grayscale_subquery('grayscale_2bui_subquery', '2BUI', 3)
 
     def test_grayscale_4bui_subquery():
-      _test_grayscale_subquery('grayscale_4bui_subquery', '4BUI', 15)
+        _test_grayscale_subquery('grayscale_4bui_subquery', '4BUI', 15)
 
     def test_grayscale_8bui_subquery():
-      _test_grayscale_subquery('grayscale_8bui_subquery', '8BUI', 63)
+        _test_grayscale_subquery('grayscale_8bui_subquery', '8BUI', 63)
 
     def test_grayscale_8bsi_subquery():
-      # NOTE: we're using a positive integer because Mapnik
-      #       does not support negative data values anyway
-      _test_grayscale_subquery('grayscale_8bsi_subquery', '8BSI', 69)
+        # NOTE: we're using a positive integer because Mapnik
+        #       does not support negative data values anyway
+        _test_grayscale_subquery('grayscale_8bsi_subquery', '8BSI', 69)
 
     def test_grayscale_16bui_subquery():
-      _test_grayscale_subquery('grayscale_16bui_subquery', '16BUI', 126)
+        _test_grayscale_subquery('grayscale_16bui_subquery', '16BUI', 126)
 
     def test_grayscale_16bsi_subquery():
-      # NOTE: we're using a positive integer because Mapnik
-      #       does not support negative data values anyway
-      _test_grayscale_subquery('grayscale_16bsi_subquery', '16BSI', 144)
+        # NOTE: we're using a positive integer because Mapnik
+        #       does not support negative data values anyway
+        _test_grayscale_subquery('grayscale_16bsi_subquery', '16BSI', 144)
 
     def test_grayscale_32bui_subquery():
-      _test_grayscale_subquery('grayscale_32bui_subquery', '32BUI', 255)
+        _test_grayscale_subquery('grayscale_32bui_subquery', '32BUI', 255)
 
     def test_grayscale_32bsi_subquery():
-      # NOTE: we're using a positive integer because Mapnik
-      #       does not support negative data values anyway
-      _test_grayscale_subquery('grayscale_32bsi_subquery', '32BSI', 129)
+        # NOTE: we're using a positive integer because Mapnik
+        #       does not support negative data values anyway
+        _test_grayscale_subquery('grayscale_32bsi_subquery', '32BSI', 129)
 
     def _test_data_subquery(lbl, pixtype, value):
-      #
-      #      3   8   13
-      #    +---+---+---+
-      #  3 | v | v | v |  NOTE: writes different values
-      #    +---+---+---+        in 13,8 and 8,13
-      #  8 | v | v | a |  
-      #    +---+---+---+  
-      # 13 | v | b | v |
-      #    +---+---+---+
-      #
-      val_a = value/3;
-      val_b = val_a*2;
-      sql = "(select 3 as i, " \
-            " ST_SetValues(" \
-            "  ST_SetValues(" \
-            "   ST_AsRaster(" \
-            "    ST_MakeEnvelope(0,0,14,14), " \
-            "    1.0, -1.0, '%s', %s" \
-            "   ), " \
-            "   11, 6, 5, 5, %s::float8" \
-            "  )," \
-            "  6, 11, 5, 5, %s::float8" \
-            " ) as \"R\"" \
-            ") as foo" % (pixtype,value, val_a, val_b)
-      overview = ''
-      rescale = 0
-      clip = 0
-      if rescale:
-        lbl += ' Sc'
-      if clip:
-        lbl += ' Cl'
-      ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql,
-        raster_field='R', use_overviews=0 if overview else 0,
-        band=1, prescale_rasters=rescale, clip_rasters=clip)
-      fs = ds.featureset()
-      feature = fs.next()
-      eq_(feature['i'],3)
-      lyr = mapnik.Layer('data_subquery')
-      lyr.datasource = ds
-      expenv = mapnik.Box2d(0,0,14,14)
-      env = lyr.envelope()
-      assert_almost_equal(env.minx, expenv.minx, places=0)
-      assert_almost_equal(env.miny, expenv.miny, places=0)
-      assert_almost_equal(env.maxx, expenv.maxx, places=0)
-      assert_almost_equal(env.maxy, expenv.maxy, places=0)
-      mm = mapnik.Map(15, 15)
-      style = mapnik.Style()
-      col = mapnik.RasterColorizer();
-      col.default_mode = mapnik.COLORIZER_DISCRETE;
-      col.add_stop(val_a, mapnik.Color(0xff,0x00,0x00,255));
-      col.add_stop(val_b, mapnik.Color(0x00,0xff,0x00,255));
-      col.add_stop(value, mapnik.Color(0x00,0x00,0xff,255));
-      sym = mapnik.RasterSymbolizer()
-      sym.colorizer = col
-      rule = mapnik.Rule()
-      rule.symbols.append(sym)
-      style.rules.append(rule)
-      mm.append_style('foo', style)
-      lyr.styles.append('foo')
-      mm.layers.append(lyr)
-      mm.zoom_to_box(expenv)
-      im = mapnik.Image(mm.width, mm.height)
-      t0 = time.time() # we want wall time to include IO waits
-      mapnik.render(mm, im)
-      lap = time.time() - t0
-      log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
-      expected = 'images/support/pgraster/%s-%s-%s-%s.png' % (lyr.name,lbl,pixtype,value)
-      compare_images(expected,im)
+        #
+        #      3   8   13
+        #    +---+---+---+
+        #  3 | v | v | v |  NOTE: writes different values
+        #    +---+---+---+        in 13,8 and 8,13
+        #  8 | v | v | a |
+        #    +---+---+---+
+        # 13 | v | b | v |
+        #    +---+---+---+
+        #
+        val_a = value / 3
+        val_b = val_a * 2
+        sql = "(select 3 as i, " \
+              " ST_SetValues(" \
+              "  ST_SetValues(" \
+              "   ST_AsRaster(" \
+              "    ST_MakeEnvelope(0,0,14,14), " \
+              "    1.0, -1.0, '%s', %s" \
+              "   ), " \
+              "   11, 6, 5, 5, %s::float8" \
+              "  )," \
+              "  6, 11, 5, 5, %s::float8" \
+              " ) as \"R\"" \
+              ") as foo" % (pixtype, value, val_a, val_b)
+        overview = ''
+        rescale = 0
+        clip = 0
+        if rescale:
+            lbl += ' Sc'
+        if clip:
+            lbl += ' Cl'
+        ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql,
+                             raster_field='R', use_overviews=0 if overview else 0,
+                             band=1, prescale_rasters=rescale, clip_rasters=clip)
+        fs = ds.featureset()
+        feature = fs.next()
+        eq_(feature['i'], 3)
+        lyr = mapnik.Layer('data_subquery')
+        lyr.datasource = ds
+        expenv = mapnik.Box2d(0, 0, 14, 14)
+        env = lyr.envelope()
+        assert_almost_equal(env.minx, expenv.minx, places=0)
+        assert_almost_equal(env.miny, expenv.miny, places=0)
+        assert_almost_equal(env.maxx, expenv.maxx, places=0)
+        assert_almost_equal(env.maxy, expenv.maxy, places=0)
+        mm = mapnik.Map(15, 15)
+        style = mapnik.Style()
+        col = mapnik.RasterColorizer()
+        col.default_mode = mapnik.COLORIZER_DISCRETE
+        col.add_stop(val_a, mapnik.Color(0xff, 0x00, 0x00, 255))
+        col.add_stop(val_b, mapnik.Color(0x00, 0xff, 0x00, 255))
+        col.add_stop(value, mapnik.Color(0x00, 0x00, 0xff, 255))
+        sym = mapnik.RasterSymbolizer()
+        sym.colorizer = col
+        rule = mapnik.Rule()
+        rule.symbols.append(sym)
+        style.rules.append(rule)
+        mm.append_style('foo', style)
+        lyr.styles.append('foo')
+        mm.layers.append(lyr)
+        mm.zoom_to_box(expenv)
+        im = mapnik.Image(mm.width, mm.height)
+        t0 = time.time()  # we want wall time to include IO waits
+        mapnik.render(mm, im)
+        lap = time.time() - t0
+        log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
+        expected = 'images/support/pgraster/%s-%s-%s-%s.png' % (
+            lyr.name, lbl, pixtype, value)
+        compare_images(expected, im)
 
     def test_data_2bui_subquery():
-      _test_data_subquery('data_2bui_subquery', '2BUI', 3)
+        _test_data_subquery('data_2bui_subquery', '2BUI', 3)
 
     def test_data_4bui_subquery():
-      _test_data_subquery('data_4bui_subquery', '4BUI', 15)
+        _test_data_subquery('data_4bui_subquery', '4BUI', 15)
 
     def test_data_8bui_subquery():
-      _test_data_subquery('data_8bui_subquery', '8BUI', 63)
+        _test_data_subquery('data_8bui_subquery', '8BUI', 63)
 
     def test_data_8bsi_subquery():
-      # NOTE: we're using a positive integer because Mapnik
-      #       does not support negative data values anyway
-      _test_data_subquery('data_8bsi_subquery', '8BSI', 69)
+        # NOTE: we're using a positive integer because Mapnik
+        #       does not support negative data values anyway
+        _test_data_subquery('data_8bsi_subquery', '8BSI', 69)
 
     def test_data_16bui_subquery():
-      _test_data_subquery('data_16bui_subquery', '16BUI', 126)
+        _test_data_subquery('data_16bui_subquery', '16BUI', 126)
 
     def test_data_16bsi_subquery():
-      # NOTE: we're using a positive integer because Mapnik
-      #       does not support negative data values anyway
-      _test_data_subquery('data_16bsi_subquery', '16BSI', 135)
+        # NOTE: we're using a positive integer because Mapnik
+        #       does not support negative data values anyway
+        _test_data_subquery('data_16bsi_subquery', '16BSI', 135)
 
     def test_data_32bui_subquery():
-      _test_data_subquery('data_32bui_subquery', '32BUI', 255)
+        _test_data_subquery('data_32bui_subquery', '32BUI', 255)
 
     def test_data_32bsi_subquery():
-      # NOTE: we're using a positive integer because Mapnik
-      #       does not support negative data values anyway
-      _test_data_subquery('data_32bsi_subquery', '32BSI', 264)
+        # NOTE: we're using a positive integer because Mapnik
+        #       does not support negative data values anyway
+        _test_data_subquery('data_32bsi_subquery', '32BSI', 264)
 
     def test_data_32bf_subquery():
-      _test_data_subquery('data_32bf_subquery', '32BF', 450)
+        _test_data_subquery('data_32bf_subquery', '32BF', 450)
 
     def test_data_64bf_subquery():
-      _test_data_subquery('data_64bf_subquery', '64BF', 3072)
+        _test_data_subquery('data_64bf_subquery', '64BF', 3072)
 
     def _test_rgba_subquery(lbl, pixtype, r, g, b, a, g1, b1):
-      #
-      #      3   8   13
-      #    +---+---+---+
-      #  3 | v | v | h |  NOTE: writes different alpha
-      #    +---+---+---+        in 13,8 and 8,13
-      #  8 | v | v | a |  
-      #    +---+---+---+  
-      # 13 | v | b | v |
-      #    +---+---+---+
-      #
-      sql = "(select 3 as i, " \
-            " ST_SetValues(" \
-            "  ST_SetValues(" \
-            "   ST_AddBand(" \
-            "    ST_AddBand(" \
-            "     ST_AddBand(" \
-            "      ST_AsRaster(" \
-            "       ST_MakeEnvelope(0,0,14,14), " \
-            "       1.0, -1.0, '%s', %s" \
-            "      )," \
-            "      '%s', %d::float" \
-            "     ), " \
-            "     '%s', %d::float" \
-            "    ), " \
-            "    '%s', %d::float" \
-            "   ), " \
-            "   2, 11, 6, 4, 5, %s::float8" \
-            "  )," \
-            "  3, 6, 11, 5, 4, %s::float8" \
-            " ) as r" \
-            ") as foo" % (pixtype, r, pixtype, g, pixtype, b, pixtype, a, g1, b1)
-      overview = ''
-      rescale = 0
-      clip = 0
-      if rescale:
-        lbl += ' Sc'
-      if clip:
-        lbl += ' Cl'
-      ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql,
-        raster_field='r', use_overviews=0 if overview else 0,
-        prescale_rasters=rescale, clip_rasters=clip)
-      fs = ds.featureset()
-      feature = fs.next()
-      eq_(feature['i'],3)
-      lyr = mapnik.Layer('rgba_subquery')
-      lyr.datasource = ds
-      expenv = mapnik.Box2d(0,0,14,14)
-      env = lyr.envelope()
-      assert_almost_equal(env.minx, expenv.minx, places=0)
-      assert_almost_equal(env.miny, expenv.miny, places=0)
-      assert_almost_equal(env.maxx, expenv.maxx, places=0)
-      assert_almost_equal(env.maxy, expenv.maxy, places=0)
-      mm = mapnik.Map(15, 15)
-      style = mapnik.Style()
-      sym = mapnik.RasterSymbolizer()
-      rule = mapnik.Rule()
-      rule.symbols.append(sym)
-      style.rules.append(rule)
-      mm.append_style('foo', style)
-      lyr.styles.append('foo')
-      mm.layers.append(lyr)
-      mm.zoom_to_box(expenv)
-      im = mapnik.Image(mm.width, mm.height)
-      t0 = time.time() # we want wall time to include IO waits
-      mapnik.render(mm, im)
-      lap = time.time() - t0
-      log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
-      expected = 'images/support/pgraster/%s-%s-%s-%s-%s-%s-%s-%s-%s.png' % (lyr.name,lbl, pixtype, r, g, b, a, g1, b1)
-      compare_images(expected,im)
-      hex_v = format(r << 24 | g  << 16 | b  << 8 | a, '08x')
-      hex_a = format(r << 24 | g1 << 16 | b  << 8 | a, '08x')
-      hex_b = format(r << 24 | g  << 16 | b1 << 8 | a, '08x')
-      eq_(hexlify(im.view( 3, 3,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view( 8, 3,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view(13, 3,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view( 3, 8,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view( 8, 8,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view(13, 8,1,1).tostring()), hex_a);
-      eq_(hexlify(im.view( 3,13,1,1).tostring()), hex_v);
-      eq_(hexlify(im.view( 8,13,1,1).tostring()), hex_b);
-      eq_(hexlify(im.view(13,13,1,1).tostring()), hex_v);
+        #
+        #      3   8   13
+        #    +---+---+---+
+        #  3 | v | v | h |  NOTE: writes different alpha
+        #    +---+---+---+        in 13,8 and 8,13
+        #  8 | v | v | a |
+        #    +---+---+---+
+        # 13 | v | b | v |
+        #    +---+---+---+
+        #
+        sql = "(select 3 as i, " \
+              " ST_SetValues(" \
+              "  ST_SetValues(" \
+              "   ST_AddBand(" \
+              "    ST_AddBand(" \
+              "     ST_AddBand(" \
+              "      ST_AsRaster(" \
+              "       ST_MakeEnvelope(0,0,14,14), " \
+              "       1.0, -1.0, '%s', %s" \
+              "      )," \
+              "      '%s', %d::float" \
+              "     ), " \
+              "     '%s', %d::float" \
+              "    ), " \
+              "    '%s', %d::float" \
+              "   ), " \
+              "   2, 11, 6, 4, 5, %s::float8" \
+              "  )," \
+              "  3, 6, 11, 5, 4, %s::float8" \
+              " ) as r" \
+              ") as foo" % (
+                  pixtype,
+                  r,
+                  pixtype,
+                  g,
+                  pixtype,
+                  b,
+                  pixtype,
+                  a,
+                  g1,
+                  b1)
+        overview = ''
+        rescale = 0
+        clip = 0
+        if rescale:
+            lbl += ' Sc'
+        if clip:
+            lbl += ' Cl'
+        ds = mapnik.PgRaster(dbname=MAPNIK_TEST_DBNAME, table=sql,
+                             raster_field='r', use_overviews=0 if overview else 0,
+                             prescale_rasters=rescale, clip_rasters=clip)
+        fs = ds.featureset()
+        feature = fs.next()
+        eq_(feature['i'], 3)
+        lyr = mapnik.Layer('rgba_subquery')
+        lyr.datasource = ds
+        expenv = mapnik.Box2d(0, 0, 14, 14)
+        env = lyr.envelope()
+        assert_almost_equal(env.minx, expenv.minx, places=0)
+        assert_almost_equal(env.miny, expenv.miny, places=0)
+        assert_almost_equal(env.maxx, expenv.maxx, places=0)
+        assert_almost_equal(env.maxy, expenv.maxy, places=0)
+        mm = mapnik.Map(15, 15)
+        style = mapnik.Style()
+        sym = mapnik.RasterSymbolizer()
+        rule = mapnik.Rule()
+        rule.symbols.append(sym)
+        style.rules.append(rule)
+        mm.append_style('foo', style)
+        lyr.styles.append('foo')
+        mm.layers.append(lyr)
+        mm.zoom_to_box(expenv)
+        im = mapnik.Image(mm.width, mm.height)
+        t0 = time.time()  # we want wall time to include IO waits
+        mapnik.render(mm, im)
+        lap = time.time() - t0
+        log('T ' + str(lap) + ' -- ' + lbl + ' E:full')
+        expected = 'images/support/pgraster/%s-%s-%s-%s-%s-%s-%s-%s-%s.png' % (
+            lyr.name, lbl, pixtype, r, g, b, a, g1, b1)
+        compare_images(expected, im)
+        hex_v = format(r << 24 | g << 16 | b << 8 | a, '08x')
+        hex_a = format(r << 24 | g1 << 16 | b << 8 | a, '08x')
+        hex_b = format(r << 24 | g << 16 | b1 << 8 | a, '08x')
+        eq_(hexlify(im.view(3, 3, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(8, 3, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(13, 3, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(3, 8, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(8, 8, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(13, 8, 1, 1).tostring()), hex_a)
+        eq_(hexlify(im.view(3, 13, 1, 1).tostring()), hex_v)
+        eq_(hexlify(im.view(8, 13, 1, 1).tostring()), hex_b)
+        eq_(hexlify(im.view(13, 13, 1, 1).tostring()), hex_v)
 
     def test_rgba_8bui_subquery():
-      _test_rgba_subquery('rgba_8bui_subquery', '8BUI', 255, 0, 0, 255, 255, 255)
-
-    #def test_rgba_16bui_subquery():
+        _test_rgba_subquery(
+            'rgba_8bui_subquery',
+            '8BUI',
+            255,
+            0,
+            0,
+            255,
+            255,
+            255)
+
+    # def test_rgba_16bui_subquery():
     #  _test_rgba_subquery('rgba_16bui_subquery', '16BUI', 65535, 0, 0, 65535, 65535, 65535)
 
-    #def test_rgba_32bui_subquery():
+    # def test_rgba_32bui_subquery():
     #  _test_rgba_subquery('rgba_32bui_subquery', '32BUI')
 
     atexit.register(postgis_takedown)
 
+
 def enabled(tname):
-  enabled = len(sys.argv) < 2 or tname in sys.argv
-  if not enabled:
-    print "Skipping " + tname + " as not explicitly enabled"
-  return enabled
+    enabled = len(sys.argv) < 2 or tname in sys.argv
+    if not enabled:
+        print("Skipping " + tname + " as not explicitly enabled")
+    return enabled
 
 if __name__ == "__main__":
     setup()
-    fail = run_all(eval(x) for x in dir() if x.startswith("test_") and enabled(x))
+    fail = run_all(eval(x)
+                   for x in dir() if x.startswith("test_") and enabled(x))
     exit(fail)
diff --git a/test/python_tests/pickling_test.py b/test/python_tests/pickling_test.py
index 7a3572d..a42e4f1 100644
--- a/test/python_tests/pickling_test.py
+++ b/test/python_tests/pickling_test.py
@@ -2,16 +2,21 @@
 # -*- coding: utf-8 -*-
 
 import os
+import pickle
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
 
-import mapnik, pickle
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_color_pickle():
     c = mapnik.Color('blue')
 
@@ -25,19 +30,21 @@ def test_color_pickle():
 
     eq_(pickle.loads(pickle.dumps(c)), c)
 
+
 def test_envelope_pickle():
     e = mapnik.Box2d(100, 100, 200, 200)
 
     eq_(pickle.loads(pickle.dumps(e)), e)
 
+
 def test_parameters_pickle():
     params = mapnik.Parameters()
-    params.append(mapnik.Parameter('oh',str('yeah')))
+    params.append(mapnik.Parameter('oh', str('yeah')))
 
-    params2 = pickle.loads(pickle.dumps(params,pickle.HIGHEST_PROTOCOL))
+    params2 = pickle.loads(pickle.dumps(params, pickle.HIGHEST_PROTOCOL))
 
-    eq_(params[0][0],params2[0][0])
-    eq_(params[0][1],params2[0][1])
+    eq_(params[0][0], params2[0][0])
+    eq_(params[0][1], params2[0][1])
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/png_encoding_test.py b/test/python_tests/png_encoding_test.py
index 568edfd..8650607 100644
--- a/test/python_tests/png_encoding_test.py
+++ b/test/python_tests/png_encoding_test.py
@@ -1,9 +1,14 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-import os, mapnik
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -13,91 +18,92 @@ def setup():
 if mapnik.has_png():
     tmp_dir = '/tmp/mapnik-png/'
     if not os.path.exists(tmp_dir):
-       os.makedirs(tmp_dir)
+        os.makedirs(tmp_dir)
 
     opts = [
-    'png32',
-    'png32:t=0',
-    'png8:m=o',
-    'png8:m=o:c=1',
-    'png8:m=o:t=0',
-    'png8:m=o:c=1:t=0',
-    'png8:m=o:t=1',
-    'png8:m=o:t=2',
-    'png8:m=h',
-    'png8:m=h:c=1',
-    'png8:m=h:t=0',
-    'png8:m=h:c=1:t=0',
-    'png8:m=h:t=1',
-    'png8:m=h:t=2',
-    'png32:e=miniz',
-    'png8:e=miniz'
+        'png32',
+        'png32:t=0',
+        'png8:m=o',
+        'png8:m=o:c=1',
+        'png8:m=o:t=0',
+        'png8:m=o:c=1:t=0',
+        'png8:m=o:t=1',
+        'png8:m=o:t=2',
+        'png8:m=h',
+        'png8:m=h:c=1',
+        'png8:m=h:t=0',
+        'png8:m=h:c=1:t=0',
+        'png8:m=h:t=1',
+        'png8:m=h:t=2',
+        'png32:e=miniz',
+        'png8:e=miniz'
     ]
 
     # Todo - use itertools.product
     #z_opts = range(1,9+1)
     #t_opts = range(0,2+1)
 
-    def gen_filepath(name,format):
-        return os.path.join('images/support/encoding-opts',name+'-'+format.replace(":","+")+'.png')
+    def gen_filepath(name, format):
+        return os.path.join('images/support/encoding-opts',
+                            name + '-' + format.replace(":", "+") + '.png')
 
     generate = os.environ.get('UPDATE')
 
     def test_expected_encodings():
         # blank image
-        im = mapnik.Image(256,256)
+        im = mapnik.Image(256, 256)
         for opt in opts:
-            expected = gen_filepath('solid',opt)
-            actual = os.path.join(tmp_dir,os.path.basename(expected))
+            expected = gen_filepath('solid', opt)
+            actual = os.path.join(tmp_dir, os.path.basename(expected))
             if generate or not os.path.exists(expected):
-              print 'generating expected image %s' % expected
-              im.save(expected,opt)
+                print('generating expected image %s' % expected)
+                im.save(expected, opt)
             else:
-              im.save(actual,opt)
-              eq_(mapnik.Image.open(actual).tostring('png32'),
-                mapnik.Image.open(expected).tostring('png32'),
-                '%s (actual) not == to %s (expected)' % (actual,expected))
+                im.save(actual, opt)
+                eq_(mapnik.Image.open(actual).tostring('png32'),
+                    mapnik.Image.open(expected).tostring('png32'),
+                    '%s (actual) not == to %s (expected)' % (actual, expected))
 
         # solid image
         im.fill(mapnik.Color('green'))
         for opt in opts:
-            expected = gen_filepath('blank',opt)
-            actual = os.path.join(tmp_dir,os.path.basename(expected))
+            expected = gen_filepath('blank', opt)
+            actual = os.path.join(tmp_dir, os.path.basename(expected))
             if generate or not os.path.exists(expected):
-              print 'generating expected image %s' % expected
-              im.save(expected,opt)
+                print('generating expected image %s' % expected)
+                im.save(expected, opt)
             else:
-              im.save(actual,opt)
-              eq_(mapnik.Image.open(actual).tostring('png32'),
-                mapnik.Image.open(expected).tostring('png32'),
-                '%s (actual) not == to %s (expected)' % (actual,expected))
+                im.save(actual, opt)
+                eq_(mapnik.Image.open(actual).tostring('png32'),
+                    mapnik.Image.open(expected).tostring('png32'),
+                    '%s (actual) not == to %s (expected)' % (actual, expected))
 
         # aerial
         im = mapnik.Image.open('./images/support/transparency/aerial_rgba.png')
         for opt in opts:
-            expected = gen_filepath('aerial_rgba',opt)
-            actual = os.path.join(tmp_dir,os.path.basename(expected))
+            expected = gen_filepath('aerial_rgba', opt)
+            actual = os.path.join(tmp_dir, os.path.basename(expected))
             if generate or not os.path.exists(expected):
-              print 'generating expected image %s' % expected
-              im.save(expected,opt)
+                print('generating expected image %s' % expected)
+                im.save(expected, opt)
             else:
-              im.save(actual,opt)
-              eq_(mapnik.Image.open(actual).tostring('png32'),
-                mapnik.Image.open(expected).tostring('png32'),
-                '%s (actual) not == to %s (expected)' % (actual,expected))
+                im.save(actual, opt)
+                eq_(mapnik.Image.open(actual).tostring('png32'),
+                    mapnik.Image.open(expected).tostring('png32'),
+                    '%s (actual) not == to %s (expected)' % (actual, expected))
 
     def test_transparency_levels():
         # create partial transparency image
-        im = mapnik.Image(256,256)
+        im = mapnik.Image(256, 256)
         im.fill(mapnik.Color('rgba(255,255,255,.5)'))
         c2 = mapnik.Color('rgba(255,255,0,.2)')
         c3 = mapnik.Color('rgb(0,255,255)')
-        for y in range(0,im.height()/2):
-            for x in range(0,im.width()/2):
-                im.set_pixel(x,y,c2)
-        for y in range(im.height()/2,im.height()):
-            for x in range(im.width()/2,im.width()):
-                im.set_pixel(x,y,c3)
+        for y in range(0, int(im.height() / 2)):
+            for x in range(0, int(im.width() / 2)):
+                im.set_pixel(x, y, c2)
+        for y in range(int(im.height() / 2), im.height()):
+            for x in range(int(im.width() / 2), im.width()):
+                im.set_pixel(x, y, c3)
 
         t0 = tmp_dir + 'white0.png'
         t2 = tmp_dir + 'white2.png'
@@ -105,57 +111,67 @@ if mapnik.has_png():
 
         # octree
         format = 'png8:m=o:t=0'
-        im.save(t0,format)
+        im.save(t0, format)
         im_in = mapnik.Image.open(t0)
         t0_len = len(im_in.tostring(format))
-        eq_(t0_len,len(mapnik.Image.open('images/support/transparency/white0.png').tostring(format)))
+        eq_(t0_len, len(mapnik.Image.open(
+            'images/support/transparency/white0.png').tostring(format)))
         format = 'png8:m=o:t=1'
-        im.save(t1,format)
+        im.save(t1, format)
         im_in = mapnik.Image.open(t1)
         t1_len = len(im_in.tostring(format))
-        eq_(len(im.tostring(format)),len(mapnik.Image.open('images/support/transparency/white1.png').tostring(format)))
+        eq_(len(im.tostring(format)), len(mapnik.Image.open(
+            'images/support/transparency/white1.png').tostring(format)))
         format = 'png8:m=o:t=2'
-        im.save(t2,format)
+        im.save(t2, format)
         im_in = mapnik.Image.open(t2)
         t2_len = len(im_in.tostring(format))
-        eq_(len(im.tostring(format)),len(mapnik.Image.open('images/support/transparency/white2.png').tostring(format)))
+        eq_(len(im.tostring(format)), len(mapnik.Image.open(
+            'images/support/transparency/white2.png').tostring(format)))
 
-        eq_(t0_len < t1_len < t2_len,True)
+        eq_(t0_len < t1_len < t2_len, True)
 
         # hextree
         format = 'png8:m=h:t=0'
-        im.save(t0,format)
+        im.save(t0, format)
         im_in = mapnik.Image.open(t0)
         t0_len = len(im_in.tostring(format))
-        eq_(t0_len,len(mapnik.Image.open('images/support/transparency/white0.png').tostring(format)))
+        eq_(t0_len, len(mapnik.Image.open(
+            'images/support/transparency/white0.png').tostring(format)))
         format = 'png8:m=h:t=1'
-        im.save(t1,format)
+        im.save(t1, format)
         im_in = mapnik.Image.open(t1)
         t1_len = len(im_in.tostring(format))
-        eq_(len(im.tostring(format)),len(mapnik.Image.open('images/support/transparency/white1.png').tostring(format)))
+        eq_(len(im.tostring(format)), len(mapnik.Image.open(
+            'images/support/transparency/white1.png').tostring(format)))
         format = 'png8:m=h:t=2'
-        im.save(t2,format)
+        im.save(t2, format)
         im_in = mapnik.Image.open(t2)
         t2_len = len(im_in.tostring(format))
-        eq_(len(im.tostring(format)),len(mapnik.Image.open('images/support/transparency/white2.png').tostring(format)))
+        eq_(len(im.tostring(format)), len(mapnik.Image.open(
+            'images/support/transparency/white2.png').tostring(format)))
 
-        eq_(t0_len < t1_len < t2_len,True)
+        eq_(t0_len < t1_len < t2_len, True)
 
     def test_transparency_levels_aerial():
         im = mapnik.Image.open('../data/images/12_654_1580.png')
-        im_in = mapnik.Image.open('./images/support/transparency/aerial_rgba.png')
-        eq_(len(im.tostring('png8')),len(im_in.tostring('png8')))
-        eq_(len(im.tostring('png32')),len(im_in.tostring('png32')))
-
-        im_in = mapnik.Image.open('./images/support/transparency/aerial_rgb.png')
-        eq_(len(im.tostring('png32')),len(im_in.tostring('png32')))
-        eq_(len(im.tostring('png32:t=0')),len(im_in.tostring('png32:t=0')))
+        im_in = mapnik.Image.open(
+            './images/support/transparency/aerial_rgba.png')
+        eq_(len(im.tostring('png8')), len(im_in.tostring('png8')))
+        eq_(len(im.tostring('png32')), len(im_in.tostring('png32')))
+
+        im_in = mapnik.Image.open(
+            './images/support/transparency/aerial_rgb.png')
+        eq_(len(im.tostring('png32')), len(im_in.tostring('png32')))
+        eq_(len(im.tostring('png32:t=0')), len(im_in.tostring('png32:t=0')))
         eq_(len(im.tostring('png32:t=0')) == len(im_in.tostring('png32')), False)
-        eq_(len(im.tostring('png8')),len(im_in.tostring('png8')))
-        eq_(len(im.tostring('png8:t=0')),len(im_in.tostring('png8:t=0')))
-        # unlike png32 paletted images without alpha will look the same even if no alpha is forced
+        eq_(len(im.tostring('png8')), len(im_in.tostring('png8')))
+        eq_(len(im.tostring('png8:t=0')), len(im_in.tostring('png8:t=0')))
+        # unlike png32 paletted images without alpha will look the same even if
+        # no alpha is forced
         eq_(len(im.tostring('png8:t=0')) == len(im_in.tostring('png8')), True)
-        eq_(len(im.tostring('png8:t=0:m=o')) == len(im_in.tostring('png8:m=o')), True)
+        eq_(len(im.tostring('png8:t=0:m=o')) ==
+            len(im_in.tostring('png8:m=o')), True)
 
     def test_9_colors_hextree():
         expected = './images/support/encoding-opts/png8-9cols.png'
diff --git a/test/python_tests/pngsuite_test.py b/test/python_tests/pngsuite_test.py
index 4c933eb..8ce517f 100644
--- a/test/python_tests/pngsuite_test.py
+++ b/test/python_tests/pngsuite_test.py
@@ -1,31 +1,41 @@
 #!/usr/bin/env python
 
 import os
-import mapnik
+
 from nose.tools import assert_raises
-from utilities import execution_path, run_all
+
+import mapnik
+
+from .utilities import execution_path, run_all
 
 datadir = '../data/pngsuite'
 
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def assert_broken_file(fname):
     assert_raises(RuntimeError, lambda: mapnik.Image.open(fname))
 
+
 def assert_good_file(fname):
     assert mapnik.Image.open(fname)
 
+
 def get_pngs(good):
-    files = [ x for x in os.listdir(datadir) if x.endswith('.png') ]
-    return [ os.path.join(datadir, x) for x in files if good != x.startswith('x') ]
+    files = [x for x in os.listdir(datadir) if x.endswith('.png')]
+    return [os.path.join(datadir, x)
+            for x in files if good != x.startswith('x')]
+
 
 def test_good_pngs():
     for x in get_pngs(True):
         yield assert_good_file, x
 
+
 def test_broken_pngs():
     for x in get_pngs(False):
         yield assert_broken_file, x
diff --git a/test/python_tests/postgis_test.py b/test/python_tests/postgis_test.py
index 42e40cc..d9a6f10 100644
--- a/test/python_tests/postgis_test.py
+++ b/test/python_tests/postgis_test.py
@@ -1,32 +1,43 @@
 #!/usr/bin/env python
-
-from nose.tools import eq_,raises
 import atexit
-from utilities import execution_path, run_all
-from subprocess import Popen, PIPE
-import os, mapnik
+import os
+import sys
 import threading
+from subprocess import PIPE, Popen
+
+from nose.tools import eq_, raises
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
+PYTHON3 = sys.version_info[0] == 3
+if PYTHON3:
+    long = int
 
 
 MAPNIK_TEST_DBNAME = 'mapnik-tmp-postgis-test-db'
 POSTGIS_TEMPLATE_DBNAME = 'template_postgis'
-SHAPEFILE = os.path.join(execution_path('.'),'../data/shp/world_merc.shp')
+SHAPEFILE = os.path.join(execution_path('.'), '../data/shp/world_merc.shp')
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
-def call(cmd,silent=False):
-    stdin, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
+
+def call(cmd, silent=False):
+    stdin, stderr = Popen(cmd, shell=True, stdout=PIPE,
+                          stderr=PIPE).communicate()
     if not stderr:
         return stdin.strip()
-    elif not silent and 'error' in stderr.lower() \
-        or 'not found' in stderr.lower() \
-        or 'could not connect' in stderr.lower() \
-        or 'bad connection' in stderr.lower() \
-        or 'not recognized as an internal' in stderr.lower():
-        raise RuntimeError(stderr.strip())
+    msg = str(stderr).lower()
+    if not silent and 'error' in msg\
+       or 'not found' in msg or 'not recognized as an internal' in msg\
+       or 'bad connection' in msg or 'could not connect' in msg:
+        raise RuntimeError(msg.strip())
+
 
 def psql_can_connect():
     """Test ability to connect to a postgis template db with no options.
@@ -40,9 +51,10 @@ def psql_can_connect():
         call('psql %s -c "select postgis_version()"' % POSTGIS_TEMPLATE_DBNAME)
         return True
     except RuntimeError:
-        print 'Notice: skipping postgis tests (connection)'
+        print('Notice: skipping postgis tests (connection)')
         return False
 
+
 def shp2pgsql_on_path():
     """Test for presence of shp2pgsql on the user path.
 
@@ -52,9 +64,10 @@ def shp2pgsql_on_path():
         call('shp2pgsql')
         return True
     except RuntimeError:
-        print 'Notice: skipping postgis tests (shp2pgsql)'
+        print('Notice: skipping postgis tests (shp2pgsql)')
         return False
 
+
 def createdb_and_dropdb_on_path():
     """Test for presence of dropdb/createdb on user path.
 
@@ -65,7 +78,7 @@ def createdb_and_dropdb_on_path():
         call('dropdb --help')
         return True
     except RuntimeError:
-        print 'Notice: skipping postgis tests (createdb/dropdb)'
+        print('Notice: skipping postgis tests (createdb/dropdb)')
         return False
 
 insert_table_1 = """
@@ -196,23 +209,84 @@ INSERT INTO test12(name,geom) values ('MultiPolygonZM',GeomFromEWKT('SRID=4326;M
 
 
 def postgis_setup():
-    call('dropdb %s' % MAPNIK_TEST_DBNAME,silent=True)
-    call('createdb -T %s %s' % (POSTGIS_TEMPLATE_DBNAME,MAPNIK_TEST_DBNAME),silent=False)
-    call('shp2pgsql -s 3857 -g geom -W LATIN1 %s world_merc | psql -q %s' % (SHAPEFILE,MAPNIK_TEST_DBNAME), silent=True)
-    call('''psql -q %s -c "CREATE TABLE \"empty\" (key serial);SELECT AddGeometryColumn('','empty','geom','-1','GEOMETRY',4);"''' % MAPNIK_TEST_DBNAME,silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_1),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_2),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_3),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_4),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_5),silent=False)
-    call("""psql -q %s -c '%s'""" % (MAPNIK_TEST_DBNAME,insert_table_5b),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_6),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_7),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_8),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_9),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_10),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_11),silent=False)
-    call('''psql -q %s -c "%s"''' % (MAPNIK_TEST_DBNAME,insert_table_12),silent=False)
+    call('dropdb %s' % MAPNIK_TEST_DBNAME, silent=True)
+    call(
+        'createdb -T %s %s' %
+        (POSTGIS_TEMPLATE_DBNAME,
+         MAPNIK_TEST_DBNAME),
+        silent=False)
+    call('shp2pgsql -s 3857 -g geom -W LATIN1 %s world_merc | psql -q %s' %
+         (SHAPEFILE, MAPNIK_TEST_DBNAME), silent=True)
+    call(
+        '''psql -q %s -c "CREATE TABLE \"empty\" (key serial);SELECT AddGeometryColumn('','empty','geom','-1','GEOMETRY',4);"''' %
+        MAPNIK_TEST_DBNAME,
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_1),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_2),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_3),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_4),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_5),
+        silent=False)
+    call(
+        """psql -q %s -c '%s'""" %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_5b),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_6),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_7),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_8),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_9),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_10),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_11),
+        silent=False)
+    call(
+        '''psql -q %s -c "%s"''' %
+        (MAPNIK_TEST_DBNAME,
+         insert_table_12),
+        silent=False)
+
 
 def postgis_takedown():
     pass
@@ -228,60 +302,64 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
     postgis_setup()
 
     def test_feature():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='world_merc')
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='world_merc')
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['gid'],1)
-        eq_(feature['fips'],u'AC')
-        eq_(feature['iso2'],u'AG')
-        eq_(feature['iso3'],u'ATG')
-        eq_(feature['un'],28)
-        eq_(feature['name'],u'Antigua and Barbuda')
-        eq_(feature['area'],44)
-        eq_(feature['pop2005'],83039)
-        eq_(feature['region'],19)
-        eq_(feature['subregion'],29)
-        eq_(feature['lon'],-61.783)
-        eq_(feature['lat'],17.078)
+        eq_(feature['gid'], 1)
+        eq_(feature['fips'], u'AC')
+        eq_(feature['iso2'], u'AG')
+        eq_(feature['iso3'], u'ATG')
+        eq_(feature['un'], 28)
+        eq_(feature['name'], u'Antigua and Barbuda')
+        eq_(feature['area'], 44)
+        eq_(feature['pop2005'], 83039)
+        eq_(feature['region'], 19)
+        eq_(feature['subregion'], 29)
+        eq_(feature['lon'], -61.783)
+        eq_(feature['lat'], 17.078)
         meta = ds.describe()
-        eq_(meta['srid'],3857)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['encoding'],u'UTF8')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon)
+        eq_(meta['srid'], 3857)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['encoding'], u'UTF8')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
 
     def test_subquery():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='(select * from world_merc) as w')
+        ds = mapnik.PostGIS(
+            dbname=MAPNIK_TEST_DBNAME,
+            table='(select * from world_merc) as w')
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['gid'],1)
-        eq_(feature['fips'],u'AC')
-        eq_(feature['iso2'],u'AG')
-        eq_(feature['iso3'],u'ATG')
-        eq_(feature['un'],28)
-        eq_(feature['name'],u'Antigua and Barbuda')
-        eq_(feature['area'],44)
-        eq_(feature['pop2005'],83039)
-        eq_(feature['region'],19)
-        eq_(feature['subregion'],29)
-        eq_(feature['lon'],-61.783)
-        eq_(feature['lat'],17.078)
+        eq_(feature['gid'], 1)
+        eq_(feature['fips'], u'AC')
+        eq_(feature['iso2'], u'AG')
+        eq_(feature['iso3'], u'ATG')
+        eq_(feature['un'], 28)
+        eq_(feature['name'], u'Antigua and Barbuda')
+        eq_(feature['area'], 44)
+        eq_(feature['pop2005'], 83039)
+        eq_(feature['region'], 19)
+        eq_(feature['subregion'], 29)
+        eq_(feature['lon'], -61.783)
+        eq_(feature['lat'], 17.078)
         meta = ds.describe()
-        eq_(meta['srid'],3857)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['encoding'],u'UTF8')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon)
-
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='(select gid,geom,fips as _fips from world_merc) as w')
+        eq_(meta['srid'], 3857)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['encoding'], u'UTF8')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
+
+        ds = mapnik.PostGIS(
+            dbname=MAPNIK_TEST_DBNAME,
+            table='(select gid,geom,fips as _fips from world_merc) as w')
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['gid'],1)
-        eq_(feature['_fips'],u'AC')
-        eq_(len(feature),2)
+        eq_(feature['gid'], 1)
+        eq_(feature['_fips'], u'AC')
+        eq_(len(feature), 2)
         meta = ds.describe()
-        eq_(meta['srid'],3857)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['encoding'],u'UTF8')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Polygon)
+        eq_(meta['srid'], 3857)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['encoding'], u'UTF8')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Polygon)
 
     def test_bad_connection():
         try:
@@ -290,59 +368,59 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
                                 max_size=20,
                                 geometry_field='geom',
                                 user="rolethatdoesnotexist")
-        except Exception, e:
+        except Exception as e:
             assert 'role "rolethatdoesnotexist" does not exist' in str(e)
 
     def test_empty_db():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='empty')
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='empty')
         fs = ds.featureset()
         feature = None
         try:
             feature = fs.next()
         except StopIteration:
             pass
-        eq_(feature,None)
+        eq_(feature, None)
         meta = ds.describe()
-        eq_(meta['srid'],-1)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['encoding'],u'UTF8')
-        eq_(meta['geometry_type'],None)
+        eq_(meta['srid'], -1)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['encoding'], u'UTF8')
+        eq_(meta['geometry_type'], None)
 
     def test_manual_srid():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,srid=99, table='empty')
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, srid=99, table='empty')
         fs = ds.featureset()
         feature = None
         try:
             feature = fs.next()
         except StopIteration:
             pass
-        eq_(feature,None)
+        eq_(feature, None)
         meta = ds.describe()
-        eq_(meta['srid'],99)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['encoding'],u'UTF8')
-        eq_(meta['geometry_type'],None)
+        eq_(meta['srid'], 99)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['encoding'], u'UTF8')
+        eq_(meta['geometry_type'], None)
 
     def test_geometry_detection():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test',
                             geometry_field='geom')
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
 
         # will fail with postgis 2.0 because it automatically adds a geometry_columns entry
-        #ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test',
+        # ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test',
         #                   geometry_field='geom',
         #                    row_limit=1)
-        #eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point)
+        # eq_(ds.describe()['geometry_type'],mapnik.DataGeometryType.Point)
 
     @raises(RuntimeError)
     def test_that_nonexistant_query_field_throws(**kwargs):
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='empty')
-        eq_(len(ds.fields()),1)
-        eq_(ds.fields(),['key'])
-        eq_(ds.field_types(),['int'])
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='empty')
+        eq_(len(ds.fields()), 1)
+        eq_(ds.fields(), ['key'])
+        eq_(ds.field_types(), ['int'])
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
@@ -351,270 +429,270 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
         ds.features(query)
 
     def test_auto_detection_of_unique_feature_id_32_bit():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test2',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test2',
                             geometry_field='geom',
                             autodetect_key_field=True)
         fs = ds.featureset()
-        eq_(fs.next()['manual_id'],0)
-        eq_(fs.next()['manual_id'],1)
-        eq_(fs.next()['manual_id'],1000)
-        eq_(fs.next()['manual_id'],-1000)
-        eq_(fs.next()['manual_id'],2147483647)
-        eq_(fs.next()['manual_id'],-2147483648)
+        eq_(fs.next()['manual_id'], 0)
+        eq_(fs.next()['manual_id'], 1)
+        eq_(fs.next()['manual_id'], 1000)
+        eq_(fs.next()['manual_id'], -1000)
+        eq_(fs.next()['manual_id'], 2147483647)
+        eq_(fs.next()['manual_id'], -2147483648)
 
         fs = ds.featureset()
-        eq_(fs.next().id(),0)
-        eq_(fs.next().id(),1)
-        eq_(fs.next().id(),1000)
-        eq_(fs.next().id(),-1000)
-        eq_(fs.next().id(),2147483647)
-        eq_(fs.next().id(),-2147483648)
+        eq_(fs.next().id(), 0)
+        eq_(fs.next().id(), 1)
+        eq_(fs.next().id(), 1000)
+        eq_(fs.next().id(), -1000)
+        eq_(fs.next().id(), 2147483647)
+        eq_(fs.next().id(), -2147483648)
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),u'manual_id')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), u'manual_id')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_auto_detection_will_fail_since_no_primary_key():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test3',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test3',
                             geometry_field='geom',
                             autodetect_key_field=False)
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat['manual_id'],0)
+        eq_(feat['manual_id'], 0)
         # will fail: https://github.com/mapnik/mapnik/issues/895
-        #eq_(feat['non_id'],9223372036854775807)
-        eq_(fs.next()['manual_id'],1)
-        eq_(fs.next()['manual_id'],1000)
-        eq_(fs.next()['manual_id'],-1000)
-        eq_(fs.next()['manual_id'],2147483647)
-        eq_(fs.next()['manual_id'],-2147483648)
+        # eq_(feat['non_id'],9223372036854775807)
+        eq_(fs.next()['manual_id'], 1)
+        eq_(fs.next()['manual_id'], 1000)
+        eq_(fs.next()['manual_id'], -1000)
+        eq_(fs.next()['manual_id'], 2147483647)
+        eq_(fs.next()['manual_id'], -2147483648)
 
         # since no valid primary key will be detected the fallback
         # is auto-incrementing counter
         fs = ds.featureset()
-        eq_(fs.next().id(),1)
-        eq_(fs.next().id(),2)
-        eq_(fs.next().id(),3)
-        eq_(fs.next().id(),4)
-        eq_(fs.next().id(),5)
-        eq_(fs.next().id(),6)
+        eq_(fs.next().id(), 1)
+        eq_(fs.next().id(), 2)
+        eq_(fs.next().id(), 3)
+        eq_(fs.next().id(), 4)
+        eq_(fs.next().id(), 5)
+        eq_(fs.next().id(), 6)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     @raises(RuntimeError)
     def test_auto_detection_will_fail_and_should_throw():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test3',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test3',
                             geometry_field='geom',
                             autodetect_key_field=True)
         ds.featureset()
 
     def test_auto_detection_of_unique_feature_id_64_bit():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test4',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test4',
                             geometry_field='geom',
                             autodetect_key_field=True)
         fs = ds.featureset()
-        eq_(fs.next()['manual_id'],0)
-        eq_(fs.next()['manual_id'],1)
-        eq_(fs.next()['manual_id'],1000)
-        eq_(fs.next()['manual_id'],-1000)
-        eq_(fs.next()['manual_id'],2147483647)
-        eq_(fs.next()['manual_id'],-2147483648)
+        eq_(fs.next()['manual_id'], 0)
+        eq_(fs.next()['manual_id'], 1)
+        eq_(fs.next()['manual_id'], 1000)
+        eq_(fs.next()['manual_id'], -1000)
+        eq_(fs.next()['manual_id'], 2147483647)
+        eq_(fs.next()['manual_id'], -2147483648)
 
         fs = ds.featureset()
-        eq_(fs.next().id(),0)
-        eq_(fs.next().id(),1)
-        eq_(fs.next().id(),1000)
-        eq_(fs.next().id(),-1000)
-        eq_(fs.next().id(),2147483647)
-        eq_(fs.next().id(),-2147483648)
+        eq_(fs.next().id(), 0)
+        eq_(fs.next().id(), 1)
+        eq_(fs.next().id(), 1000)
+        eq_(fs.next().id(), -1000)
+        eq_(fs.next().id(), 2147483647)
+        eq_(fs.next().id(), -2147483648)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),u'manual_id')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), u'manual_id')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_disabled_auto_detection_and_subquery():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''(select geom, 'a'::varchar as name from test2) as t''',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, 'a'::varchar as name from test2) as t''',
                             geometry_field='geom',
                             autodetect_key_field=False)
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat.id(),1)
-        eq_(feat['name'],'a')
+        eq_(feat.id(), 1)
+        eq_(feat['name'], 'a')
         feat = fs.next()
-        eq_(feat.id(),2)
-        eq_(feat['name'],'a')
+        eq_(feat.id(), 2)
+        eq_(feat['name'], 'a')
         feat = fs.next()
-        eq_(feat.id(),3)
-        eq_(feat['name'],'a')
+        eq_(feat.id(), 3)
+        eq_(feat['name'], 'a')
         feat = fs.next()
-        eq_(feat.id(),4)
-        eq_(feat['name'],'a')
+        eq_(feat.id(), 4)
+        eq_(feat['name'], 'a')
         feat = fs.next()
-        eq_(feat.id(),5)
-        eq_(feat['name'],'a')
+        eq_(feat.id(), 5)
+        eq_(feat['name'], 'a')
         feat = fs.next()
-        eq_(feat.id(),6)
-        eq_(feat['name'],'a')
+        eq_(feat.id(), 6)
+        eq_(feat['name'], 'a')
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_auto_detection_and_subquery_including_key():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''(select geom, manual_id from test2) as t''',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, manual_id from test2) as t''',
                             geometry_field='geom',
                             autodetect_key_field=True)
         fs = ds.featureset()
-        eq_(fs.next()['manual_id'],0)
-        eq_(fs.next()['manual_id'],1)
-        eq_(fs.next()['manual_id'],1000)
-        eq_(fs.next()['manual_id'],-1000)
-        eq_(fs.next()['manual_id'],2147483647)
-        eq_(fs.next()['manual_id'],-2147483648)
+        eq_(fs.next()['manual_id'], 0)
+        eq_(fs.next()['manual_id'], 1)
+        eq_(fs.next()['manual_id'], 1000)
+        eq_(fs.next()['manual_id'], -1000)
+        eq_(fs.next()['manual_id'], 2147483647)
+        eq_(fs.next()['manual_id'], -2147483648)
 
         fs = ds.featureset()
-        eq_(fs.next().id(),0)
-        eq_(fs.next().id(),1)
-        eq_(fs.next().id(),1000)
-        eq_(fs.next().id(),-1000)
-        eq_(fs.next().id(),2147483647)
-        eq_(fs.next().id(),-2147483648)
+        eq_(fs.next().id(), 0)
+        eq_(fs.next().id(), 1)
+        eq_(fs.next().id(), 1000)
+        eq_(fs.next().id(), -1000)
+        eq_(fs.next().id(), 2147483647)
+        eq_(fs.next().id(), -2147483648)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),u'manual_id')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), u'manual_id')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     @raises(RuntimeError)
     def test_auto_detection_of_invalid_numeric_primary_key():
-        mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''(select geom, manual_id::numeric from test2) as t''',
-                            geometry_field='geom',
-                            autodetect_key_field=True)
+        mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select geom, manual_id::numeric from test2) as t''',
+                       geometry_field='geom',
+                       autodetect_key_field=True)
 
     @raises(RuntimeError)
     def test_auto_detection_of_invalid_multiple_keys():
-        mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''test6''',
-                            geometry_field='geom',
-                            autodetect_key_field=True)
+        mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''test6''',
+                       geometry_field='geom',
+                       autodetect_key_field=True)
 
     @raises(RuntimeError)
     def test_auto_detection_of_invalid_multiple_keys_subquery():
-        mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''(select first_id,second_id,geom from test6) as t''',
-                            geometry_field='geom',
-                            autodetect_key_field=True)
+        mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''(select first_id,second_id,geom from test6) as t''',
+                       geometry_field='geom',
+                       autodetect_key_field=True)
 
     def test_manually_specified_feature_id_field():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test4',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test4',
                             geometry_field='geom',
                             key_field='manual_id',
                             autodetect_key_field=True)
         fs = ds.featureset()
-        eq_(fs.next()['manual_id'],0)
-        eq_(fs.next()['manual_id'],1)
-        eq_(fs.next()['manual_id'],1000)
-        eq_(fs.next()['manual_id'],-1000)
-        eq_(fs.next()['manual_id'],2147483647)
-        eq_(fs.next()['manual_id'],-2147483648)
+        eq_(fs.next()['manual_id'], 0)
+        eq_(fs.next()['manual_id'], 1)
+        eq_(fs.next()['manual_id'], 1000)
+        eq_(fs.next()['manual_id'], -1000)
+        eq_(fs.next()['manual_id'], 2147483647)
+        eq_(fs.next()['manual_id'], -2147483648)
 
         fs = ds.featureset()
-        eq_(fs.next().id(),0)
-        eq_(fs.next().id(),1)
-        eq_(fs.next().id(),1000)
-        eq_(fs.next().id(),-1000)
-        eq_(fs.next().id(),2147483647)
-        eq_(fs.next().id(),-2147483648)
+        eq_(fs.next().id(), 0)
+        eq_(fs.next().id(), 1)
+        eq_(fs.next().id(), 1000)
+        eq_(fs.next().id(), -1000)
+        eq_(fs.next().id(), 2147483647)
+        eq_(fs.next().id(), -2147483648)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),u'manual_id')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), u'manual_id')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_numeric_type_feature_id_field():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test5',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test5',
                             geometry_field='geom',
                             autodetect_key_field=False)
         fs = ds.featureset()
-        eq_(fs.next()['manual_id'],-1)
-        eq_(fs.next()['manual_id'],1)
+        eq_(fs.next()['manual_id'], -1)
+        eq_(fs.next()['manual_id'], 1)
 
         fs = ds.featureset()
-        eq_(fs.next().id(),1)
-        eq_(fs.next().id(),2)
+        eq_(fs.next().id(), 1)
+        eq_(fs.next().id(), 2)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_querying_table_with_mixed_case():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='"tableWithMixedCase"',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='"tableWithMixedCase"',
                             geometry_field='geom',
                             autodetect_key_field=True)
         fs = ds.featureset()
-        for id in range(1,5):
-            eq_(fs.next().id(),id)
+        for id in range(1, 5):
+            eq_(fs.next().id(), id)
 
         meta = ds.describe()
-        eq_(meta['srid'],-1)
-        eq_(meta.get('key_field'),u'gid')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], -1)
+        eq_(meta.get('key_field'), u'gid')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_querying_subquery_with_mixed_case():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='(SeLeCt * FrOm "tableWithMixedCase") as MixedCaseQuery',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='(SeLeCt * FrOm "tableWithMixedCase") as MixedCaseQuery',
                             geometry_field='geom',
                             autodetect_key_field=True)
         fs = ds.featureset()
-        for id in range(1,5):
-            eq_(fs.next().id(),id)
+        for id in range(1, 5):
+            eq_(fs.next().id(), id)
 
         meta = ds.describe()
-        eq_(meta['srid'],-1)
-        eq_(meta.get('key_field'),u'gid')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], -1)
+        eq_(meta.get('key_field'), u'gid')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_bbox_token_in_subquery1():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''
            (SeLeCt * FrOm "tableWithMixedCase" where geom && !bbox! ) as MixedCaseQuery''',
                             geometry_field='geom',
                             autodetect_key_field=True)
         fs = ds.featureset()
-        for id in range(1,5):
-            eq_(fs.next().id(),id)
+        for id in range(1, 5):
+            eq_(fs.next().id(), id)
 
         meta = ds.describe()
-        eq_(meta['srid'],-1)
-        eq_(meta.get('key_field'),u'gid')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], -1)
+        eq_(meta.get('key_field'), u'gid')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_bbox_token_in_subquery2():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''
            (SeLeCt * FrOm "tableWithMixedCase" where ST_Intersects(geom,!bbox!) ) as MixedCaseQuery''',
                             geometry_field='geom',
                             autodetect_key_field=True)
         fs = ds.featureset()
-        for id in range(1,5):
-            eq_(fs.next().id(),id)
+        for id in range(1, 5):
+            eq_(fs.next().id(), id)
 
         meta = ds.describe()
-        eq_(meta['srid'],-1)
-        eq_(meta.get('key_field'),u'gid')
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], -1)
+        eq_(meta.get('key_field'), u'gid')
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_empty_geom():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test7',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test7',
                             geometry_field='geom')
         fs = ds.featureset()
-        eq_(fs.next()['gid'],1)
+        eq_(fs.next()['gid'], 1)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
 
     def create_ds():
         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,
@@ -622,12 +700,12 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
                             max_size=20,
                             geometry_field='geom')
         fs = ds.all_features()
-        eq_(len(fs),8)
+        eq_(len(fs), 8)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
 
     def test_threaded_create(NUM_THREADS=100):
         # run one to start before thread loop
@@ -639,8 +717,8 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
             t = threading.Thread(target=create_ds)
             t.start()
             t.join()
-            runs +=1
-        eq_(runs,NUM_THREADS)
+            runs += 1
+        eq_(runs, NUM_THREADS)
 
     def create_ds_and_error():
         try:
@@ -648,8 +726,8 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
                                 table='asdfasdfasdfasdfasdf',
                                 max_size=20)
             ds.all_features()
-        except Exception, e:
-            eq_('in executeQuery' in str(e),True)
+        except Exception as e:
+            eq_('in executeQuery' in str(e), True)
 
     def test_threaded_create2(NUM_THREADS=10):
         for i in range(NUM_THREADS):
@@ -661,23 +739,23 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,
                             table='test8',
                             geometry_field='geom')
-        eq_(len(ds.fields()),2)
-        eq_(ds.fields(),['gid','int_field'])
-        eq_(ds.field_types(),['int','int'])
+        eq_(len(ds.fields()), 2)
+        eq_(ds.fields(), ['gid', 'int_field'])
+        eq_(ds.field_types(), ['int', 'int'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat.id(),1)
-        eq_(feat['gid'],1)
-        eq_(feat['int_field'],2147483648)
+        eq_(feat.id(), 1)
+        eq_(feat['gid'], 1)
+        eq_(feat['int_field'], 2147483648)
         feat = fs.next()
-        eq_(feat.id(),2)
-        eq_(feat['gid'],2)
-        eq_(feat['int_field'],922337203685477580)
+        eq_(feat.id(), 2)
+        eq_(feat['gid'], 2)
+        eq_(feat['int_field'], 922337203685477580)
 
         meta = ds.describe()
-        eq_(meta['srid'],-1)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], -1)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_persist_connection_off():
         # NOTE: max_size should be equal or greater than
@@ -686,195 +764,202 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
         #       default is 20, so we use that value. See
         #       http://github.com/mapnik/mapnik/issues/863
         max_size = 20
-        for i in range(0, max_size+1):
+        for i in range(0, max_size + 1):
             ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,
-                              max_size=1, # unused
-                              persist_connection=False,
-                              table='(select ST_MakePoint(0,0) as g, pg_backend_pid() as p, 1 as v) as w',
-                              geometry_field='g')
+                                max_size=1,  # unused
+                                persist_connection=False,
+                                table='(select ST_MakePoint(0,0) as g, pg_backend_pid() as p, 1 as v) as w',
+                                geometry_field='g')
             fs = ds.featureset()
             eq_(fs.next()['v'], 1)
 
             meta = ds.describe()
-            eq_(meta['srid'],-1)
-            eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+            eq_(meta['srid'], -1)
+            eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     def test_null_comparision():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test9',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test9',
                             geometry_field='geom')
         fs = ds.featureset()
         feat = fs.next()
 
         meta = ds.describe()
-        eq_(meta['srid'],-1)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
-
-        eq_(feat['gid'],1)
-        eq_(feat['name'],'name')
-        eq_(mapnik.Expression("[name] = 'name'").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] = ''").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] = null").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] = true").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] = false").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] != 'name'").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] != ''").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] != null").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] != true").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] != false").evaluate(feat),True)
+        eq_(meta['srid'], -1)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
+
+        eq_(feat['gid'], 1)
+        eq_(feat['name'], 'name')
+        eq_(mapnik.Expression("[name] = 'name'").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] = ''").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] = null").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] = true").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] = false").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] != 'name'").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] != ''").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] != null").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] != true").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] != false").evaluate(feat), True)
 
         feat = fs.next()
-        eq_(feat['gid'],2)
-        eq_(feat['name'],'')
-        eq_(mapnik.Expression("[name] = 'name'").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] = ''").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] = null").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] = true").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] = false").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] != 'name'").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] != ''").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] != null").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] != true").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] != false").evaluate(feat),True)
+        eq_(feat['gid'], 2)
+        eq_(feat['name'], '')
+        eq_(mapnik.Expression("[name] = 'name'").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] = ''").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] = null").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] = true").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] = false").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] != 'name'").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] != ''").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] != null").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] != true").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] != false").evaluate(feat), True)
 
         feat = fs.next()
-        eq_(feat['gid'],3)
-        eq_(feat['name'],None) # null
-        eq_(mapnik.Expression("[name] = 'name'").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] = ''").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] = null").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] = true").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] = false").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] != 'name'").evaluate(feat),True)
+        eq_(feat['gid'], 3)
+        eq_(feat['name'], None)  # null
+        eq_(mapnik.Expression("[name] = 'name'").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] = ''").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] = null").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] = true").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] = false").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] != 'name'").evaluate(feat), True)
         # https://github.com/mapnik/mapnik/issues/1859
-        eq_(mapnik.Expression("[name] != ''").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] != null").evaluate(feat),False)
-        eq_(mapnik.Expression("[name] != true").evaluate(feat),True)
-        eq_(mapnik.Expression("[name] != false").evaluate(feat),True)
+        eq_(mapnik.Expression("[name] != ''").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] != null").evaluate(feat), False)
+        eq_(mapnik.Expression("[name] != true").evaluate(feat), True)
+        eq_(mapnik.Expression("[name] != false").evaluate(feat), True)
 
     def test_null_comparision2():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='test10',
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='test10',
                             geometry_field='geom')
         fs = ds.featureset()
         feat = fs.next()
 
         meta = ds.describe()
-        eq_(meta['srid'],-1)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
-
-        eq_(feat['gid'],1)
-        eq_(feat['bool_field'],True)
-        eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = null").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = true").evaluate(feat),True)
-        eq_(mapnik.Expression("[bool_field] = false").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat),True)
-        eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat),True) # in 2.1.x used to be False
-        eq_(mapnik.Expression("[bool_field] != null").evaluate(feat),True) # in 2.1.x used to be False
-        eq_(mapnik.Expression("[bool_field] != true").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] != false").evaluate(feat),True)
+        eq_(meta['srid'], -1)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
+
+        eq_(feat['gid'], 1)
+        eq_(feat['bool_field'], True)
+        eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = null").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = true").evaluate(feat), True)
+        eq_(mapnik.Expression("[bool_field] = false").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat), True)
+        eq_(mapnik.Expression("[bool_field] != ''").evaluate(
+            feat), True)  # in 2.1.x used to be False
+        eq_(mapnik.Expression("[bool_field] != null").evaluate(
+            feat), True)  # in 2.1.x used to be False
+        eq_(mapnik.Expression("[bool_field] != true").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] != false").evaluate(feat), True)
 
         feat = fs.next()
-        eq_(feat['gid'],2)
-        eq_(feat['bool_field'],False)
-        eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = null").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = true").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = false").evaluate(feat),True)
-        eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat),True)
-        eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat),True)
-        eq_(mapnik.Expression("[bool_field] != null").evaluate(feat),True) # in 2.1.x used to be False
-        eq_(mapnik.Expression("[bool_field] != true").evaluate(feat),True)
-        eq_(mapnik.Expression("[bool_field] != false").evaluate(feat),False)
+        eq_(feat['gid'], 2)
+        eq_(feat['bool_field'], False)
+        eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = null").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = true").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = false").evaluate(feat), True)
+        eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat), True)
+        eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat), True)
+        eq_(mapnik.Expression("[bool_field] != null").evaluate(
+            feat), True)  # in 2.1.x used to be False
+        eq_(mapnik.Expression("[bool_field] != true").evaluate(feat), True)
+        eq_(mapnik.Expression("[bool_field] != false").evaluate(feat), False)
 
         feat = fs.next()
-        eq_(feat['gid'],3)
-        eq_(feat['bool_field'],None) # null
-        eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = null").evaluate(feat),True)
-        eq_(mapnik.Expression("[bool_field] = true").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] = false").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(feat),True)  # in 2.1.x used to be False
+        eq_(feat['gid'], 3)
+        eq_(feat['bool_field'], None)  # null
+        eq_(mapnik.Expression("[bool_field] = 'name'").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = ''").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = null").evaluate(feat), True)
+        eq_(mapnik.Expression("[bool_field] = true").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] = false").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] != 'name'").evaluate(
+            feat), True)  # in 2.1.x used to be False
         # https://github.com/mapnik/mapnik/issues/1859
-        eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] != null").evaluate(feat),False)
-        eq_(mapnik.Expression("[bool_field] != true").evaluate(feat),True) # in 2.1.x used to be False
-        eq_(mapnik.Expression("[bool_field] != false").evaluate(feat),True) # in 2.1.x used to be False
+        eq_(mapnik.Expression("[bool_field] != ''").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] != null").evaluate(feat), False)
+        eq_(mapnik.Expression("[bool_field] != true").evaluate(
+            feat), True)  # in 2.1.x used to be False
+        eq_(mapnik.Expression("[bool_field] != false").evaluate(
+            feat), True)  # in 2.1.x used to be False
 
     # https://github.com/mapnik/mapnik/issues/1816
     def test_exception_message_reporting():
         try:
-            mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='doesnotexist')
-        except Exception, e:
-            eq_(e.message != 'unidentifiable C++ exception', True)
+            mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='doesnotexist')
+        except Exception as e:
+            eq_(str(e) != 'unidentifiable C++ exception', True)
 
     def test_null_id_field():
-        opts = {'type':'postgis',
-                'dbname':MAPNIK_TEST_DBNAME,
-                'geometry_field':'geom',
-                'table':"(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp"}
+        opts = {'type': 'postgis',
+                'dbname': MAPNIK_TEST_DBNAME,
+                'geometry_field': 'geom',
+                'table': "(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp"}
         ds = mapnik.Datasource(**opts)
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat.id(),1L)
-        eq_(feat['osm_id'],None)
+        eq_(feat.id(), long(1))
+        eq_(feat['osm_id'], None)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),None)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), None)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
     @raises(StopIteration)
     def test_null_key_field():
-        opts = {'type':'postgis',
+        opts = {'type': 'postgis',
                 "key_field": 'osm_id',
-                'dbname':MAPNIK_TEST_DBNAME,
-                'geometry_field':'geom',
-                'table':"(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp"}
+                'dbname': MAPNIK_TEST_DBNAME,
+                'geometry_field': 'geom',
+                'table': "(select null::bigint as osm_id, GeomFromEWKT('SRID=4326;POINT(0 0)') as geom) as tmp"}
         ds = mapnik.Datasource(**opts)
         fs = ds.featureset()
-        fs.next() ## should throw since key_field is null: StopIteration: No more features.
+        # should throw since key_field is null: StopIteration: No more
+        # features.
+        fs.next()
 
     def test_psql_error_should_not_break_connection_pool():
         # Bad request, will trigger an error when returning result
-        ds_bad = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table="""(SELECT geom as geom,label::int from test11) as failure_table""",
-                            max_async_connection=5,geometry_field='geom',srid=4326)
+        ds_bad = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table="""(SELECT geom as geom,label::int from test11) as failure_table""",
+                                max_async_connection=5, geometry_field='geom', srid=4326)
 
         # Good request
-        ds_good = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table="test",
-                            max_async_connection=5,geometry_field='geom',srid=4326)
+        ds_good = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table="test",
+                                 max_async_connection=5, geometry_field='geom', srid=4326)
 
         # This will/should trigger a PSQL error
         failed = False
         try:
             fs = ds_bad.featureset()
-            for feature in fs:
+            for feature in fs.features:
                 pass
-        except RuntimeError, e:
+        except RuntimeError as e:
             assert 'invalid input syntax for integer' in str(e)
             failed = True
 
-        eq_(failed,True)
+        eq_(failed, True)
 
         # Should be ok
         fs = ds_good.featureset()
         count = 0
-        for feature in fs:
+        for feature in fs.features:
             count += 1
-        eq_(count,8)
-
+        eq_(count, 8)
 
     def test_psql_error_should_give_back_connections_opened_for_lower_layers_to_the_pool():
-        map1 = mapnik.Map(600,300)
+        map1 = mapnik.Map(600, 300)
         s = mapnik.Style()
         r = mapnik.Rule()
         r.symbols.append(mapnik.PolygonSymbolizer())
         s.rules.append(r)
-        map1.append_style('style',s)
+        map1.append_style('style', s)
 
         # This layer will fail after a while
         buggy_s = mapnik.Style()
@@ -882,256 +967,263 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
         buggy_r.symbols.append(mapnik.PolygonSymbolizer())
         buggy_r.filter = mapnik.Filter("[fips] = 'FR'")
         buggy_s.rules.append(buggy_r)
-        map1.append_style('style for buggy layer',buggy_s)
+        map1.append_style('style for buggy layer', buggy_s)
         buggy_layer = mapnik.Layer('this layer is buggy at runtime')
         # We ensure the query wille be long enough
-        buggy_layer.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='(SELECT geom as geom, pg_sleep(0.1), fips::int from world_merc) as failure_tabl',
-            max_async_connection=2, max_size=2,asynchronous_request = True, geometry_field='geom')
+        buggy_layer.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='(SELECT geom as geom, pg_sleep(0.1), fips::int from world_merc) as failure_tabl',
+                                                max_async_connection=2, max_size=2, asynchronous_request=True, geometry_field='geom')
         buggy_layer.styles.append('style for buggy layer')
 
-        # The query for this layer will be sent, then the previous layer will raise an exception before results are read
-        forced_canceled_layer = mapnik.Layer('this layer will be canceled when an exception stops map rendering')
-        forced_canceled_layer.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='world_merc',
-            max_async_connection=2, max_size=2, asynchronous_request = True, geometry_field='geom')
+        # The query for this layer will be sent, then the previous layer will
+        # raise an exception before results are read
+        forced_canceled_layer = mapnik.Layer(
+            'this layer will be canceled when an exception stops map rendering')
+        forced_canceled_layer.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='world_merc',
+                                                          max_async_connection=2, max_size=2, asynchronous_request=True, geometry_field='geom')
         forced_canceled_layer.styles.append('style')
 
         map1.layers.append(buggy_layer)
         map1.layers.append(forced_canceled_layer)
         map1.zoom_all()
-        map2 = mapnik.Map(600,300)
+        map2 = mapnik.Map(600, 300)
         map2.background = mapnik.Color('steelblue')
         s = mapnik.Style()
         r = mapnik.Rule()
         r.symbols.append(mapnik.LineSymbolizer())
         r.symbols.append(mapnik.LineSymbolizer())
         s.rules.append(r)
-        map2.append_style('style',s)
+        map2.append_style('style', s)
         layer1 = mapnik.Layer('layer1')
-        layer1.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='world_merc',
-            max_async_connection=2, max_size=2, asynchronous_request = True, geometry_field='geom')
+        layer1.datasource = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='world_merc',
+                                           max_async_connection=2, max_size=2, asynchronous_request=True, geometry_field='geom')
         layer1.styles.append('style')
         map2.layers.append(layer1)
         map2.zoom_all()
 
         # We expect this to trigger a PSQL error
         try:
-            mapnik.render_to_file(map1,'/tmp/mapnik-postgis-test-map1.png', 'png')
+            mapnik.render_to_file(
+                map1, '/tmp/mapnik-postgis-test-map1.png', 'png')
             # Test must fail if error was not raised just above
-            eq_(False,True)
-        except RuntimeError, e:
+            eq_(False, True)
+        except RuntimeError as e:
             assert 'invalid input syntax for integer' in str(e)
             pass
         # This used to raise an exception before correction of issue 2042
-        mapnik.render_to_file(map2,'/tmp/mapnik-postgis-test-map2.png', 'png')
+        mapnik.render_to_file(map2, '/tmp/mapnik-postgis-test-map2.png', 'png')
 
     def test_handling_of_zm_dimensions():
         ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,
                             table='(select gid,ST_CoordDim(geom) as dim,name,geom from test12) as tmp',
                             geometry_field='geom')
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['gid', 'dim', 'name'])
-        eq_(ds.field_types(),['int', 'int', 'str'])
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['gid', 'dim', 'name'])
+        eq_(ds.field_types(), ['int', 'int', 'str'])
         fs = ds.featureset()
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),None)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), None)
         # Note: this is incorrect because we only check first couple geoms
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Point)
 
         # Point (2d)
         feat = fs.next()
-        eq_(feat.id(),1)
-        eq_(feat['gid'],1)
-        eq_(feat['dim'],2)
-        eq_(feat['name'],'Point')
-        eq_(feat.geometry.to_wkt(),'POINT(0 0)')
+        eq_(feat.id(), 1)
+        eq_(feat['gid'], 1)
+        eq_(feat['dim'], 2)
+        eq_(feat['name'], 'Point')
+        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
 
         # PointZ
         feat = fs.next()
-        eq_(feat.id(),2)
-        eq_(feat['gid'],2)
-        eq_(feat['dim'],3)
-        eq_(feat['name'],'PointZ')
-        eq_(feat.geometry.to_wkt(),'POINT(0 0)')
+        eq_(feat.id(), 2)
+        eq_(feat['gid'], 2)
+        eq_(feat['dim'], 3)
+        eq_(feat['name'], 'PointZ')
+        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
 
         # PointM
         feat = fs.next()
-        eq_(feat.id(),3)
-        eq_(feat['gid'],3)
-        eq_(feat['dim'],3)
-        eq_(feat['name'],'PointM')
-        eq_(feat.geometry.to_wkt(),'POINT(0 0)')
+        eq_(feat.id(), 3)
+        eq_(feat['gid'], 3)
+        eq_(feat['dim'], 3)
+        eq_(feat['name'], 'PointM')
+        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
 
         # PointZM
         feat = fs.next()
-        eq_(feat.id(),4)
-        eq_(feat['gid'],4)
-        eq_(feat['dim'],4)
-        eq_(feat['name'],'PointZM')
+        eq_(feat.id(), 4)
+        eq_(feat['gid'], 4)
+        eq_(feat['dim'], 4)
+        eq_(feat['name'], 'PointZM')
 
-        eq_(feat.geometry.to_wkt(),'POINT(0 0)')
+        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
         # MultiPoint
         feat = fs.next()
-        eq_(feat.id(),5)
-        eq_(feat['gid'],5)
-        eq_(feat['dim'],2)
-        eq_(feat['name'],'MultiPoint')
-        eq_(feat.geometry.to_wkt(),'MULTIPOINT(0 0,1 1)')
+        eq_(feat.id(), 5)
+        eq_(feat['gid'], 5)
+        eq_(feat['dim'], 2)
+        eq_(feat['name'], 'MultiPoint')
+        eq_(feat.geometry.to_wkt(), 'MULTIPOINT(0 0,1 1)')
 
         # MultiPointZ
         feat = fs.next()
-        eq_(feat.id(),6)
-        eq_(feat['gid'],6)
-        eq_(feat['dim'],3)
-        eq_(feat['name'],'MultiPointZ')
-        eq_(feat.geometry.to_wkt(),'MULTIPOINT(0 0,1 1)')
+        eq_(feat.id(), 6)
+        eq_(feat['gid'], 6)
+        eq_(feat['dim'], 3)
+        eq_(feat['name'], 'MultiPointZ')
+        eq_(feat.geometry.to_wkt(), 'MULTIPOINT(0 0,1 1)')
 
         # MultiPointM
         feat = fs.next()
-        eq_(feat.id(),7)
-        eq_(feat['gid'],7)
-        eq_(feat['dim'],3)
-        eq_(feat['name'],'MultiPointM')
-        eq_(feat.geometry.to_wkt(),'MULTIPOINT(0 0,1 1)')
+        eq_(feat.id(), 7)
+        eq_(feat['gid'], 7)
+        eq_(feat['dim'], 3)
+        eq_(feat['name'], 'MultiPointM')
+        eq_(feat.geometry.to_wkt(), 'MULTIPOINT(0 0,1 1)')
 
         # MultiPointZM
         feat = fs.next()
-        eq_(feat.id(),8)
-        eq_(feat['gid'],8)
-        eq_(feat['dim'],4)
-        eq_(feat['name'],'MultiPointZM')
-        eq_(feat.geometry.to_wkt(),'MULTIPOINT(0 0,1 1)')
+        eq_(feat.id(), 8)
+        eq_(feat['gid'], 8)
+        eq_(feat['dim'], 4)
+        eq_(feat['name'], 'MultiPointZM')
+        eq_(feat.geometry.to_wkt(), 'MULTIPOINT(0 0,1 1)')
 
         # LineString
         feat = fs.next()
-        eq_(feat.id(),9)
-        eq_(feat['gid'],9)
-        eq_(feat['dim'],2)
-        eq_(feat['name'],'LineString')
-        eq_(feat.geometry.to_wkt(),'LINESTRING(0 0,1 1)')
+        eq_(feat.id(), 9)
+        eq_(feat['gid'], 9)
+        eq_(feat['dim'], 2)
+        eq_(feat['name'], 'LineString')
+        eq_(feat.geometry.to_wkt(), 'LINESTRING(0 0,1 1)')
 
         # LineStringZ
         feat = fs.next()
-        eq_(feat.id(),10)
-        eq_(feat['gid'],10)
-        eq_(feat['dim'],3)
-        eq_(feat['name'],'LineStringZ')
-        eq_(feat.geometry.to_wkt(),'LINESTRING(0 0,1 1)')
+        eq_(feat.id(), 10)
+        eq_(feat['gid'], 10)
+        eq_(feat['dim'], 3)
+        eq_(feat['name'], 'LineStringZ')
+        eq_(feat.geometry.to_wkt(), 'LINESTRING(0 0,1 1)')
 
         # LineStringM
         feat = fs.next()
-        eq_(feat.id(),11)
-        eq_(feat['gid'],11)
-        eq_(feat['dim'],3)
-        eq_(feat['name'],'LineStringM')
-        eq_(feat.geometry.to_wkt(),'LINESTRING(0 0,1 1)')
+        eq_(feat.id(), 11)
+        eq_(feat['gid'], 11)
+        eq_(feat['dim'], 3)
+        eq_(feat['name'], 'LineStringM')
+        eq_(feat.geometry.to_wkt(), 'LINESTRING(0 0,1 1)')
 
         # LineStringZM
         feat = fs.next()
-        eq_(feat.id(),12)
-        eq_(feat['gid'],12)
-        eq_(feat['dim'],4)
-        eq_(feat['name'],'LineStringZM')
-        eq_(feat.geometry.to_wkt(),'LINESTRING(0 0,1 1)')
+        eq_(feat.id(), 12)
+        eq_(feat['gid'], 12)
+        eq_(feat['dim'], 4)
+        eq_(feat['name'], 'LineStringZM')
+        eq_(feat.geometry.to_wkt(), 'LINESTRING(0 0,1 1)')
 
         # Polygon
         feat = fs.next()
-        eq_(feat.id(),13)
-        eq_(feat['gid'],13)
-        eq_(feat['name'],'Polygon')
-        eq_(feat.geometry.to_wkt(),'POLYGON((0 0,1 1,2 2,0 0))')
+        eq_(feat.id(), 13)
+        eq_(feat['gid'], 13)
+        eq_(feat['name'], 'Polygon')
+        eq_(feat.geometry.to_wkt(), 'POLYGON((0 0,1 1,2 2,0 0))')
 
         # PolygonZ
         feat = fs.next()
-        eq_(feat.id(),14)
-        eq_(feat['gid'],14)
-        eq_(feat['name'],'PolygonZ')
-        eq_(feat.geometry.to_wkt(),'POLYGON((0 0,1 1,2 2,0 0))')
+        eq_(feat.id(), 14)
+        eq_(feat['gid'], 14)
+        eq_(feat['name'], 'PolygonZ')
+        eq_(feat.geometry.to_wkt(), 'POLYGON((0 0,1 1,2 2,0 0))')
 
         # PolygonM
         feat = fs.next()
-        eq_(feat.id(),15)
-        eq_(feat['gid'],15)
-        eq_(feat['name'],'PolygonM')
-        eq_(feat.geometry.to_wkt(),'POLYGON((0 0,1 1,2 2,0 0))')
+        eq_(feat.id(), 15)
+        eq_(feat['gid'], 15)
+        eq_(feat['name'], 'PolygonM')
+        eq_(feat.geometry.to_wkt(), 'POLYGON((0 0,1 1,2 2,0 0))')
 
         # PolygonZM
         feat = fs.next()
-        eq_(feat.id(),16)
-        eq_(feat['gid'],16)
-        eq_(feat['name'],'PolygonZM')
-        eq_(feat.geometry.to_wkt(),'POLYGON((0 0,1 1,2 2,0 0))')
+        eq_(feat.id(), 16)
+        eq_(feat['gid'], 16)
+        eq_(feat['name'], 'PolygonZM')
+        eq_(feat.geometry.to_wkt(), 'POLYGON((0 0,1 1,2 2,0 0))')
 
         # MultiLineString
         feat = fs.next()
-        eq_(feat.id(),17)
-        eq_(feat['gid'],17)
-        eq_(feat['name'],'MultiLineString')
-        eq_(feat.geometry.to_wkt(),'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
+        eq_(feat.id(), 17)
+        eq_(feat['gid'], 17)
+        eq_(feat['name'], 'MultiLineString')
+        eq_(feat.geometry.to_wkt(), 'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
 
         # MultiLineStringZ
         feat = fs.next()
-        eq_(feat.id(),18)
-        eq_(feat['gid'],18)
-        eq_(feat['name'],'MultiLineStringZ')
-        eq_(feat.geometry.to_wkt(),'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
+        eq_(feat.id(), 18)
+        eq_(feat['gid'], 18)
+        eq_(feat['name'], 'MultiLineStringZ')
+        eq_(feat.geometry.to_wkt(), 'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
 
         # MultiLineStringM
         feat = fs.next()
-        eq_(feat.id(),19)
-        eq_(feat['gid'],19)
-        eq_(feat['name'],'MultiLineStringM')
-        eq_(feat.geometry.to_wkt(),'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
+        eq_(feat.id(), 19)
+        eq_(feat['gid'], 19)
+        eq_(feat['name'], 'MultiLineStringM')
+        eq_(feat.geometry.to_wkt(), 'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
 
         # MultiLineStringZM
         feat = fs.next()
-        eq_(feat.id(),20)
-        eq_(feat['gid'],20)
-        eq_(feat['name'],'MultiLineStringZM')
-        eq_(feat.geometry.to_wkt(),'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
+        eq_(feat.id(), 20)
+        eq_(feat['gid'], 20)
+        eq_(feat['name'], 'MultiLineStringZM')
+        eq_(feat.geometry.to_wkt(), 'MULTILINESTRING((0 0,1 1),(2 2,3 3))')
 
         # MultiPolygon
         feat = fs.next()
-        eq_(feat.id(),21)
-        eq_(feat['gid'],21)
-        eq_(feat['name'],'MultiPolygon')
-        eq_(feat.geometry.to_wkt(),'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
+        eq_(feat.id(), 21)
+        eq_(feat['gid'], 21)
+        eq_(feat['name'], 'MultiPolygon')
+        eq_(feat.geometry.to_wkt(),
+            'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
 
         # MultiPolygonZ
         feat = fs.next()
-        eq_(feat.id(),22)
-        eq_(feat['gid'],22)
-        eq_(feat['name'],'MultiPolygonZ')
-        eq_(feat.geometry.to_wkt(),'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
+        eq_(feat.id(), 22)
+        eq_(feat['gid'], 22)
+        eq_(feat['name'], 'MultiPolygonZ')
+        eq_(feat.geometry.to_wkt(),
+            'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
 
         # MultiPolygonM
         feat = fs.next()
-        eq_(feat.id(),23)
-        eq_(feat['gid'],23)
-        eq_(feat['name'],'MultiPolygonM')
-        eq_(feat.geometry.to_wkt(),'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
+        eq_(feat.id(), 23)
+        eq_(feat['gid'], 23)
+        eq_(feat['name'], 'MultiPolygonM')
+        eq_(feat.geometry.to_wkt(),
+            'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
 
         # MultiPolygonZM
         feat = fs.next()
-        eq_(feat.id(),24)
-        eq_(feat['gid'],24)
-        eq_(feat['name'],'MultiPolygonZM')
-        eq_(feat.geometry.to_wkt(),'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
+        eq_(feat.id(), 24)
+        eq_(feat['gid'], 24)
+        eq_(feat['name'], 'MultiPolygonZM')
+        eq_(feat.geometry.to_wkt(),
+            'MULTIPOLYGON(((0 0,1 1,2 2,0 0)),((0 0,1 1,2 2,0 0)))')
 
     def test_variable_in_subquery1():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''
            (select * from test where @zoom = 30 ) as tmp''',
                             geometry_field='geom', srid=4326,
                             autodetect_key_field=True)
-        fs = ds.featureset(variables={'zoom':30})
-        for id in range(1,5):
-            eq_(fs.next().id(),id)
+        fs = ds.featureset(variables={'zoom': 30})
+        for id in range(1, 5):
+            eq_(fs.next().id(), id)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta.get('key_field'),"gid")
-        eq_(meta['geometry_type'],None)
+        eq_(meta['srid'], 4326)
+        eq_(meta.get('key_field'), "gid")
+        eq_(meta['geometry_type'], None)
 
     # currently needs manual `geometry_table` passed
     # to avoid misparse of `geometry_table`
@@ -1139,17 +1231,17 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
     # https://github.com/mapnik/mapnik/issues/2718
     # currently `bogus` would be picked automatically for geometry_table
     def test_broken_parsing_of_comments():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''
              (select * FROM test) AS data
              -- select this from bogus''',
                             geometry_table='test')
         fs = ds.featureset()
-        for id in range(1,5):
-            eq_(fs.next().id(),id)
+        for id in range(1, 5):
+            eq_(fs.next().id(), id)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection)
+        eq_(meta['srid'], 4326)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
 
     # same
     # to avoid misparse of `geometry_table`
@@ -1157,18 +1249,17 @@ if 'postgis' in mapnik.DatasourceCache.plugin_names() \
     # https://github.com/mapnik/mapnik/issues/2718
     # currently nothing would be picked automatically for geometry_table
     def test_broken_parsing_of_comments():
-        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME,table='''
+        ds = mapnik.PostGIS(dbname=MAPNIK_TEST_DBNAME, table='''
              (select * FROM test) AS data
              -- select this from bogus.''',
                             geometry_table='test')
         fs = ds.featureset()
-        for id in range(1,5):
-            eq_(fs.next().id(),id)
+        for id in range(1, 5):
+            eq_(fs.next().id(), id)
 
         meta = ds.describe()
-        eq_(meta['srid'],4326)
-        eq_(meta['geometry_type'],mapnik.DataGeometryType.Collection)
-
+        eq_(meta['srid'], 4326)
+        eq_(meta['geometry_type'], mapnik.DataGeometryType.Collection)
 
     atexit.register(postgis_takedown)
 
diff --git a/test/python_tests/projection_test.py b/test/python_tests/projection_test.py
index a7bdc14..3166329 100644
--- a/test/python_tests/projection_test.py
+++ b/test/python_tests/projection_test.py
@@ -1,17 +1,24 @@
 #!/usr/bin/env python
+import math
+import sys
 
-from nose.tools import eq_,assert_almost_equal
+from nose.tools import assert_almost_equal, eq_
 
 import mapnik
-import math
-from utilities import run_all, assert_box2d_almost_equal
+
+from .utilities import assert_box2d_almost_equal, run_all
+
+PYTHON3 = sys.version_info[0] == 3
+if PYTHON3:
+    xrange = range
 
 # Tests that exercise map projections.
 
+
 def test_normalizing_definition():
     p = mapnik.Projection('+init=epsg:4326')
     expanded = p.expanded()
-    eq_('+proj=longlat' in expanded,True)
+    eq_('+proj=longlat' in expanded, True)
 
 
 # Trac Ticket #128
@@ -48,69 +55,77 @@ def test_wgs84_inverse_forward():
     assert_almost_equal(e.forward(p).center().y, e.center().y)
     assert_almost_equal(e.forward(p).center().x, e.center().x)
 
-def wgs2merc(lon,lat):
-    x = lon * 20037508.34 / 180;
-    y = math.log(math.tan((90 + lat) * math.pi / 360)) / (math.pi / 180);
-    y = y * 20037508.34 / 180;
-    return [x,y];
-
-def merc2wgs(x,y):
-    x = (x / 20037508.34) * 180;
-    y = (y / 20037508.34) * 180;
-    y = 180 / math.pi * (2 * math.atan(math.exp(y * math.pi/180)) - math.pi/2);
-    if x > 180: x = 180;
-    if x < -180: x = -180;
-    if y > 85.0511: y = 85.0511;
-    if y < -85.0511: y = -85.0511;
-    return [x,y]
-
-#echo -109 37 | cs2cs -f "%.10f" +init=epsg:4326 +to +init=epsg:3857
+
+def wgs2merc(lon, lat):
+    x = lon * 20037508.34 / 180
+    y = math.log(math.tan((90 + lat) * math.pi / 360)) / (math.pi / 180)
+    y = y * 20037508.34 / 180
+    return [x, y]
+
+
+def merc2wgs(x, y):
+    x = (x / 20037508.34) * 180
+    y = (y / 20037508.34) * 180
+    y = 180 / math.pi * \
+        (2 * math.atan(math.exp(y * math.pi / 180)) - math.pi / 2)
+    if x > 180:
+        x = 180
+    if x < -180:
+        x = -180
+    if y > 85.0511:
+        y = 85.0511
+    if y < -85.0511:
+        y = -85.0511
+    return [x, y]
+
+# echo -109 37 | cs2cs -f "%.10f" +init=epsg:4326 +to +init=epsg:3857
 #-12133824.4964668211    4439106.7872505859 0.0000000000
 
-## todo
+# todo
 # benchmarks
 # better well known detection
 # better srs matching with strip/trim
 # python copy to avoid crash
 
+
 def test_proj_transform_between_init_and_literal():
     one = mapnik.Projection('+init=epsg:4326')
     two = mapnik.Projection('+init=epsg:3857')
-    tr1 = mapnik.ProjTransform(one,two)
-    tr1b = mapnik.ProjTransform(two,one)
+    tr1 = mapnik.ProjTransform(one, two)
+    tr1b = mapnik.ProjTransform(two, one)
     wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
     merc = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over'
     src = mapnik.Projection(wgs84)
     dest = mapnik.Projection(merc)
-    tr2 = mapnik.ProjTransform(src,dest)
-    tr2b = mapnik.ProjTransform(dest,src)
-    for x in xrange(-180,180,10):
-        for y in xrange(-60,60,10):
-            coord = mapnik.Coord(x,y)
+    tr2 = mapnik.ProjTransform(src, dest)
+    tr2b = mapnik.ProjTransform(dest, src)
+    for x in xrange(-180, 180, 10):
+        for y in xrange(-60, 60, 10):
+            coord = mapnik.Coord(x, y)
             merc_coord1 = tr1.forward(coord)
             merc_coord2 = tr1b.backward(coord)
             merc_coord3 = tr2.forward(coord)
             merc_coord4 = tr2b.backward(coord)
-            eq_(math.fabs(merc_coord1.x - merc_coord1.x) < 1,True)
-            eq_(math.fabs(merc_coord1.x - merc_coord2.x) < 1,True)
-            eq_(math.fabs(merc_coord1.x - merc_coord3.x) < 1,True)
-            eq_(math.fabs(merc_coord1.x - merc_coord4.x) < 1,True)
-            eq_(math.fabs(merc_coord1.y - merc_coord1.y) < 1,True)
-            eq_(math.fabs(merc_coord1.y - merc_coord2.y) < 1,True)
-            eq_(math.fabs(merc_coord1.y - merc_coord3.y) < 1,True)
-            eq_(math.fabs(merc_coord1.y - merc_coord4.y) < 1,True)
+            eq_(math.fabs(merc_coord1.x - merc_coord1.x) < 1, True)
+            eq_(math.fabs(merc_coord1.x - merc_coord2.x) < 1, True)
+            eq_(math.fabs(merc_coord1.x - merc_coord3.x) < 1, True)
+            eq_(math.fabs(merc_coord1.x - merc_coord4.x) < 1, True)
+            eq_(math.fabs(merc_coord1.y - merc_coord1.y) < 1, True)
+            eq_(math.fabs(merc_coord1.y - merc_coord2.y) < 1, True)
+            eq_(math.fabs(merc_coord1.y - merc_coord3.y) < 1, True)
+            eq_(math.fabs(merc_coord1.y - merc_coord4.y) < 1, True)
             lon_lat_coord1 = tr1.backward(merc_coord1)
             lon_lat_coord2 = tr1b.forward(merc_coord2)
             lon_lat_coord3 = tr2.backward(merc_coord3)
             lon_lat_coord4 = tr2b.forward(merc_coord4)
-            eq_(math.fabs(coord.x - lon_lat_coord1.x) < 1,True)
-            eq_(math.fabs(coord.x - lon_lat_coord2.x) < 1,True)
-            eq_(math.fabs(coord.x - lon_lat_coord3.x) < 1,True)
-            eq_(math.fabs(coord.x - lon_lat_coord4.x) < 1,True)
-            eq_(math.fabs(coord.y - lon_lat_coord1.y) < 1,True)
-            eq_(math.fabs(coord.y - lon_lat_coord2.y) < 1,True)
-            eq_(math.fabs(coord.y - lon_lat_coord3.y) < 1,True)
-            eq_(math.fabs(coord.y - lon_lat_coord4.y) < 1,True)
+            eq_(math.fabs(coord.x - lon_lat_coord1.x) < 1, True)
+            eq_(math.fabs(coord.x - lon_lat_coord2.x) < 1, True)
+            eq_(math.fabs(coord.x - lon_lat_coord3.x) < 1, True)
+            eq_(math.fabs(coord.x - lon_lat_coord4.x) < 1, True)
+            eq_(math.fabs(coord.y - lon_lat_coord1.y) < 1, True)
+            eq_(math.fabs(coord.y - lon_lat_coord2.y) < 1, True)
+            eq_(math.fabs(coord.y - lon_lat_coord3.y) < 1, True)
+            eq_(math.fabs(coord.y - lon_lat_coord4.y) < 1, True)
 
 
 # Github Issue #2648
@@ -124,7 +139,8 @@ def test_proj_antimeridian_bbox():
     prj_trans_rev = mapnik.ProjTransform(prjGeog, prjProj)
 
     # bad = mapnik.Box2d(-177.31453250437079, -62.33374815225163, 178.02778363316355, -24.584597490955804)
-    better = mapnik.Box2d(-180.0, -62.33374815225163, 180.0, -24.584597490955804)
+    better = mapnik.Box2d(-180.0, -62.33374815225163,
+                          180.0, -24.584597490955804)
 
     buffered_query_ext = mapnik.Box2d(274000, 3087000, 3327000, 7173000)
     fwd_ext = prj_trans_fwd.forward(buffered_query_ext, PROJ_ENVELOPE_POINTS)
@@ -136,7 +152,8 @@ def test_proj_antimeridian_bbox():
     assert_box2d_almost_equal(rev_ext, better)
 
     # checks for not being snapped (ie. not antimeridian)
-    normal = mapnik.Box2d(148.766759749,-60.1222810238,159.95484893,-24.9771195151)
+    normal = mapnik.Box2d(148.766759749, -60.1222810238,
+                          159.95484893, -24.9771195151)
     buffered_query_ext = mapnik.Box2d(274000, 3087000, 276000, 7173000)
     fwd_ext = prj_trans_fwd.forward(buffered_query_ext, PROJ_ENVELOPE_POINTS)
     assert_box2d_almost_equal(fwd_ext, normal)
diff --git a/test/python_tests/python_plugin_test.py b/test/python_tests/python_plugin_test.py
index a39272f..abeca7d 100644
--- a/test/python_tests/python_plugin_test.py
+++ b/test/python_tests/python_plugin_test.py
@@ -62,7 +62,7 @@
 #             def within_circle(p):
 #                 delta_x = p[0] - self.container.centre[0]
 #                 delta_y = p[0] - self.container.centre[0]
-#                 return delta_x*delta_x + delta_y*delta_y < self.radius*self.radius
+# return delta_x*delta_x + delta_y*delta_y < self.radius*self.radius
 
 #             if all(within_circle(p) for p in (tl,tr,bl,br)):
 #                 raise StopIteration()
diff --git a/test/python_tests/query_test.py b/test/python_tests/query_test.py
index 8da3534..d4298b6 100644
--- a/test/python_tests/query_test.py
+++ b/test/python_tests/query_test.py
@@ -1,16 +1,21 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-import os, mapnik
+import os
+
+from nose.tools import assert_almost_equal, eq_, raises
+
+import mapnik
+
+from .utilities import execution_path, run_all
 
-from nose.tools import eq_,assert_almost_equal,raises
-from utilities import execution_path, run_all
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_query_init():
     bbox = (-180, -90, 180, 90)
     query = mapnik.Query(mapnik.Box2d(*bbox))
@@ -18,11 +23,13 @@ def test_query_init():
     assert_almost_equal(r[0], 1.0, places=7)
     assert_almost_equal(r[1], 1.0, places=7)
     # https://github.com/mapnik/mapnik/issues/1762
-    eq_(query.property_names,[])
+    eq_(query.property_names, [])
     query.add_property_name('migurski')
-    eq_(query.property_names,['migurski'])
+    eq_(query.property_names, ['migurski'])
 
 # Converting *from* tuples *to* resolutions is not yet supported
+
+
 @raises(TypeError)
 def test_query_resolution():
     bbox = (-180, -90, 180, 90)
diff --git a/test/python_tests/query_tolerance_test.py b/test/python_tests/query_tolerance_test.py
index 97c1b3e..07c5afe 100644
--- a/test/python_tests/query_tolerance_test.py
+++ b/test/python_tests/query_tolerance_test.py
@@ -1,8 +1,13 @@
 #!/usr/bin/env python
 
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all
-import os, mapnik
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -17,26 +22,26 @@ if 'shape' in mapnik.DatasourceCache.plugin_names():
         lyr.datasource = ds
         lyr.srs = srs
         _width = 256
-        _map = mapnik.Map(_width,_width, srs)
+        _map = mapnik.Map(_width, _width, srs)
         _map.layers.append(lyr)
         # zoom determines tolerance
         _map.zoom_all()
         _map_env = _map.envelope()
         tol = (_map_env.maxx - _map_env.minx) / _width * 3
         # 0.046875 for arrows.shp and zoom_all
-        eq_(tol,0.046875)
+        eq_(tol, 0.046875)
         # check point really exists
         x, y = 2.0, 4.0
-        features = _map.query_point(0,x,y).features
-        eq_(len(features),1)
+        features = _map.query_point(0, x, y).features
+        eq_(len(features), 1)
         # check inside tolerance limit
         x = 2.0 + tol * 0.9
-        features = _map.query_point(0,x,y).features
-        eq_(len(features),1)
+        features = _map.query_point(0, x, y).features
+        eq_(len(features), 1)
         # check outside tolerance limit
         x = 2.0 + tol * 1.1
-        features = _map.query_point(0,x,y).features
-        eq_(len(features),0)
+        features = _map.query_point(0, x, y).features
+        eq_(len(features), 0)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/raster_colorizer_test.py b/test/python_tests/raster_colorizer_test.py
index 6fb0102..8ae6982 100644
--- a/test/python_tests/raster_colorizer_test.py
+++ b/test/python_tests/raster_colorizer_test.py
@@ -1,87 +1,104 @@
-#coding=utf8
+# coding=utf8
 import os
-import mapnik
-from utilities import execution_path, run_all
+import sys
+
 from nose.tools import eq_
 
+import mapnik
+
+from .utilities import execution_path, run_all
+
+PYTHON3 = sys.version_info[0] == 3
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
-#test discrete colorizer mode
+# test discrete colorizer mode
+
+
 def test_get_color_discrete():
-    #setup
-    colorizer = mapnik.RasterColorizer();
-    colorizer.default_color = mapnik.Color(0,0,0,0);
-    colorizer.default_mode = mapnik.COLORIZER_DISCRETE;
+    # setup
+    colorizer = mapnik.RasterColorizer()
+    colorizer.default_color = mapnik.Color(0, 0, 0, 0)
+    colorizer.default_mode = mapnik.COLORIZER_DISCRETE
+
+    colorizer.add_stop(10, mapnik.Color(100, 100, 100, 100))
+    colorizer.add_stop(20, mapnik.Color(200, 200, 200, 200))
+
+    # should be default colour
+    eq_(colorizer.get_color(-50), mapnik.Color(0, 0, 0, 0))
+    eq_(colorizer.get_color(0), mapnik.Color(0, 0, 0, 0))
 
-    colorizer.add_stop(10, mapnik.Color(100,100,100,100));
-    colorizer.add_stop(20, mapnik.Color(200,200,200,200));
+    # now in stop 1
+    eq_(colorizer.get_color(10), mapnik.Color(100, 100, 100, 100))
+    eq_(colorizer.get_color(19), mapnik.Color(100, 100, 100, 100))
 
-    #should be default colour
-    eq_(colorizer.get_color(-50), mapnik.Color(0,0,0,0));
-    eq_(colorizer.get_color(0), mapnik.Color(0,0,0,0));
+    # now in stop 2
+    eq_(colorizer.get_color(20), mapnik.Color(200, 200, 200, 200))
+    eq_(colorizer.get_color(1000), mapnik.Color(200, 200, 200, 200))
 
-    #now in stop 1
-    eq_(colorizer.get_color(10), mapnik.Color(100,100,100,100));
-    eq_(colorizer.get_color(19), mapnik.Color(100,100,100,100));
+# test exact colorizer mode
 
-    #now in stop 2
-    eq_(colorizer.get_color(20), mapnik.Color(200,200,200,200));
-    eq_(colorizer.get_color(1000), mapnik.Color(200,200,200,200));
 
-#test exact colorizer mode
 def test_get_color_exact():
-    #setup
-    colorizer = mapnik.RasterColorizer();
-    colorizer.default_color = mapnik.Color(0,0,0,0);
-    colorizer.default_mode = mapnik.COLORIZER_EXACT;
+    # setup
+    colorizer = mapnik.RasterColorizer()
+    colorizer.default_color = mapnik.Color(0, 0, 0, 0)
+    colorizer.default_mode = mapnik.COLORIZER_EXACT
 
-    colorizer.add_stop(10, mapnik.Color(100,100,100,100));
-    colorizer.add_stop(20, mapnik.Color(200,200,200,200));
+    colorizer.add_stop(10, mapnik.Color(100, 100, 100, 100))
+    colorizer.add_stop(20, mapnik.Color(200, 200, 200, 200))
 
-    #should be default colour
-    eq_(colorizer.get_color(-50), mapnik.Color(0,0,0,0));
-    eq_(colorizer.get_color(11), mapnik.Color(0,0,0,0));
-    eq_(colorizer.get_color(20.001), mapnik.Color(0,0,0,0));
+    # should be default colour
+    eq_(colorizer.get_color(-50), mapnik.Color(0, 0, 0, 0))
+    eq_(colorizer.get_color(11), mapnik.Color(0, 0, 0, 0))
+    eq_(colorizer.get_color(20.001), mapnik.Color(0, 0, 0, 0))
 
-    #should be stop 1
-    eq_(colorizer.get_color(10), mapnik.Color(100,100,100,100));
+    # should be stop 1
+    eq_(colorizer.get_color(10), mapnik.Color(100, 100, 100, 100))
+
+    # should be stop 2
+    eq_(colorizer.get_color(20), mapnik.Color(200, 200, 200, 200))
+
+# test linear colorizer mode
 
-    #should be stop 2
-    eq_(colorizer.get_color(20), mapnik.Color(200,200,200,200));
 
-#test linear colorizer mode
 def test_get_color_linear():
-    #setup
-    colorizer = mapnik.RasterColorizer();
-    colorizer.default_color = mapnik.Color(0,0,0,0);
-    colorizer.default_mode = mapnik.COLORIZER_LINEAR;
+    # setup
+    colorizer = mapnik.RasterColorizer()
+    colorizer.default_color = mapnik.Color(0, 0, 0, 0)
+    colorizer.default_mode = mapnik.COLORIZER_LINEAR
+
+    colorizer.add_stop(10, mapnik.Color(100, 100, 100, 100))
+    colorizer.add_stop(20, mapnik.Color(200, 200, 200, 200))
 
-    colorizer.add_stop(10, mapnik.Color(100,100,100,100));
-    colorizer.add_stop(20, mapnik.Color(200,200,200,200));
+    # should be default colour
+    eq_(colorizer.get_color(-50), mapnik.Color(0, 0, 0, 0))
+    eq_(colorizer.get_color(9.9), mapnik.Color(0, 0, 0, 0))
 
-    #should be default colour
-    eq_(colorizer.get_color(-50), mapnik.Color(0,0,0,0));
-    eq_(colorizer.get_color(9.9), mapnik.Color(0,0,0,0));
+    # should be stop 1
+    eq_(colorizer.get_color(10), mapnik.Color(100, 100, 100, 100))
 
-    #should be stop 1
-    eq_(colorizer.get_color(10), mapnik.Color(100,100,100,100));
+    # should be stop 2
+    eq_(colorizer.get_color(20), mapnik.Color(200, 200, 200, 200))
 
-    #should be stop 2
-    eq_(colorizer.get_color(20), mapnik.Color(200,200,200,200));
+    # half way between stops 1 and 2
+    eq_(colorizer.get_color(15), mapnik.Color(150, 150, 150, 150))
 
-    #half way between stops 1 and 2
-    eq_(colorizer.get_color(15), mapnik.Color(150,150,150,150));
+    # after stop 2
+    eq_(colorizer.get_color(100), mapnik.Color(200, 200, 200, 200))
 
-    #after stop 2
-    eq_(colorizer.get_color(100), mapnik.Color(200,200,200,200));
 
 def test_stop_label():
-    stop = mapnik.ColorizerStop(1, mapnik.COLORIZER_LINEAR, mapnik.Color('red'))
+    stop = mapnik.ColorizerStop(
+        1, mapnik.COLORIZER_LINEAR, mapnik.Color('red'))
     assert not stop.label
-    label = u"32º C".encode('utf8')
+    label = u"32º C"
+    if not PYTHON3:
+        label = label.encode('utf8')
     stop.label = label
     assert stop.label == label, stop.label
 
diff --git a/test/python_tests/raster_symbolizer_test.py b/test/python_tests/raster_symbolizer_test.py
index 9092118..f6bb053 100644
--- a/test/python_tests/raster_symbolizer_test.py
+++ b/test/python_tests/raster_symbolizer_test.py
@@ -1,9 +1,13 @@
 #!/usr/bin/env python
 
+import os
+
 from nose.tools import eq_
-from utilities import execution_path, run_all, get_unique_colors
 
-import os, mapnik
+import mapnik
+
+from .utilities import execution_path, get_unique_colors, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -16,33 +20,35 @@ def test_dataraster_coloring():
     lyr = mapnik.Layer('dataraster')
     if 'gdal' in mapnik.DatasourceCache.plugin_names():
         lyr.datasource = mapnik.Gdal(
-            file = '../data/raster/dataraster.tif',
-            band = 1,
-            )
+            file='../data/raster/dataraster.tif',
+            band=1,
+        )
         lyr.srs = srs
-        _map = mapnik.Map(256,256, srs)
+        _map = mapnik.Map(256, 256, srs)
         style = mapnik.Style()
         rule = mapnik.Rule()
         sym = mapnik.RasterSymbolizer()
         # Assigning a colorizer to the RasterSymbolizer tells the later
         # that it should use it to colorize the raw data raster
-        colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color("transparent"))
+        colorizer = mapnik.RasterColorizer(
+            mapnik.COLORIZER_DISCRETE,
+            mapnik.Color("transparent"))
 
         for value, color in [
-            (  0, "#0044cc"),
-            ( 10, "#00cc00"),
-            ( 20, "#ffff00"),
-            ( 30, "#ff7f00"),
-            ( 40, "#ff0000"),
-            ( 50, "#ff007f"),
-            ( 60, "#ff00ff"),
-            ( 70, "#cc00cc"),
-            ( 80, "#990099"),
-            ( 90, "#660066"),
-            ( 200, "transparent"),
+            (0, "#0044cc"),
+            (10, "#00cc00"),
+            (20, "#ffff00"),
+            (30, "#ff7f00"),
+            (40, "#ff0000"),
+            (50, "#ff007f"),
+            (60, "#ff00ff"),
+            (70, "#cc00cc"),
+            (80, "#990099"),
+            (90, "#660066"),
+            (200, "transparent"),
         ]:
             colorizer.add_stop(value, mapnik.Color(color))
-        sym.colorizer = colorizer;
+        sym.colorizer = colorizer
         rule.symbols.append(sym)
         style.rules.append(rule)
         _map.append_style('foo', style)
@@ -50,52 +56,57 @@ def test_dataraster_coloring():
         _map.layers.append(lyr)
         _map.zoom_to_box(lyr.envelope())
 
-        im = mapnik.Image(_map.width,_map.height)
+        im = mapnik.Image(_map.width, _map.height)
         mapnik.render(_map, im)
         expected_file = './images/support/dataraster_coloring.png'
         actual_file = '/tmp/' + os.path.basename(expected_file)
-        im.save(actual_file,'png32')
+        im.save(actual_file, 'png32')
         if not os.path.exists(expected_file) or os.environ.get('UPDATE'):
-            im.save(expected_file,'png32')
+            im.save(expected_file, 'png32')
         actual = mapnik.Image.open(actual_file)
         expected = mapnik.Image.open(expected_file)
-        eq_(actual.tostring('png32'),expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,expected_file))
+        eq_(actual.tostring('png32'),
+            expected.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual_file,
+                                                                expected_file))
+
 
 def test_dataraster_query_point():
     srs = '+init=epsg:32630'
     lyr = mapnik.Layer('dataraster')
     if 'gdal' in mapnik.DatasourceCache.plugin_names():
         lyr.datasource = mapnik.Gdal(
-            file = '../data/raster/dataraster.tif',
-            band = 1,
-            )
+            file='../data/raster/dataraster.tif',
+            band=1,
+        )
         lyr.srs = srs
-        _map = mapnik.Map(256,256, srs)
+        _map = mapnik.Map(256, 256, srs)
         _map.layers.append(lyr)
 
-        x, y = 556113.0,4381428.0 # center of extent of raster
+        x, y = 556113.0, 4381428.0  # center of extent of raster
         _map.zoom_all()
-        features = _map.query_point(0,x,y).features
+        features = _map.query_point(0, x, y).features
         assert len(features) == 1
         feat = features[0]
         center = feat.envelope().center()
-        assert center.x==x and center.y==y, center
+        assert center.x == x and center.y == y, center
         value = feat['value']
         assert value == 18.0, value
 
         # point inside map extent but outside raster extent
         current_box = _map.envelope()
-        current_box.expand_to_include(-427417,4477517)
+        current_box.expand_to_include(-427417, 4477517)
         _map.zoom_to_box(current_box)
-        features = _map.query_point(0,-427417,4477517).features
+        features = _map.query_point(0, -427417, 4477517).features
         assert len(features) == 0
 
         # point inside raster extent with nodata
-        features = _map.query_point(0,126850,4596050).features
+        features = _map.query_point(0, 126850, 4596050).features
         assert len(features) == 0
 
+
 def test_load_save_map():
-    map = mapnik.Map(256,256)
+    map = mapnik.Map(256, 256)
     in_map = "../data/good_maps/raster_symbolizer.xml"
     try:
         mapnik.load_map(map, in_map)
@@ -104,11 +115,12 @@ def test_load_save_map():
         assert 'RasterSymbolizer' in out_map
         assert 'RasterColorizer' in out_map
         assert 'stop' in out_map
-    except RuntimeError, e:
+    except RuntimeError as e:
         # only test datasources that we have installed
         if not 'Could not create datasource' in str(e):
             raise RuntimeError(str(e))
 
+
 def test_raster_with_alpha_blends_correctly_with_background():
     WIDTH = 500
     HEIGHT = 500
@@ -141,7 +153,8 @@ def test_raster_with_alpha_blends_correctly_with_background():
         mapnik.render(map, mim)
         mim.tostring()
         # All white is expected
-        eq_(get_unique_colors(mim),['rgba(254,254,254,255)'])
+        eq_(get_unique_colors(mim), ['rgba(254,254,254,255)'])
+
 
 def test_raster_warping():
     lyrSrs = "+init=epsg:32630"
@@ -149,16 +162,17 @@ def test_raster_warping():
     lyr = mapnik.Layer('dataraster', lyrSrs)
     if 'gdal' in mapnik.DatasourceCache.plugin_names():
         lyr.datasource = mapnik.Gdal(
-            file = '../data/raster/dataraster.tif',
-            band = 1,
-            )
+            file='../data/raster/dataraster.tif',
+            band=1,
+        )
         sym = mapnik.RasterSymbolizer()
-        sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
+        sym.colorizer = mapnik.RasterColorizer(
+            mapnik.COLORIZER_DISCRETE, mapnik.Color(255, 255, 0))
         rule = mapnik.Rule()
         rule.symbols.append(sym)
         style = mapnik.Style()
         style.rules.append(rule)
-        _map = mapnik.Map(256,256, mapSrs)
+        _map = mapnik.Map(256, 256, mapSrs)
         _map.append_style('foo', style)
         lyr.styles.append('foo')
         _map.layers.append(lyr)
@@ -168,16 +182,20 @@ def test_raster_warping():
                                          layer_proj)
         _map.zoom_to_box(prj_trans.backward(lyr.envelope()))
 
-        im = mapnik.Image(_map.width,_map.height)
+        im = mapnik.Image(_map.width, _map.height)
         mapnik.render(_map, im)
         expected_file = './images/support/raster_warping.png'
         actual_file = '/tmp/' + os.path.basename(expected_file)
-        im.save(actual_file,'png32')
+        im.save(actual_file, 'png32')
         if not os.path.exists(expected_file) or os.environ.get('UPDATE'):
-            im.save(expected_file,'png32')
+            im.save(expected_file, 'png32')
         actual = mapnik.Image.open(actual_file)
         expected = mapnik.Image.open(expected_file)
-        eq_(actual.tostring('png32'),expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,expected_file))
+        eq_(actual.tostring('png32'),
+            expected.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual_file,
+                                                                expected_file))
+
 
 def test_raster_warping_does_not_overclip_source():
     lyrSrs = "+init=epsg:32630"
@@ -185,32 +203,36 @@ def test_raster_warping_does_not_overclip_source():
     lyr = mapnik.Layer('dataraster', lyrSrs)
     if 'gdal' in mapnik.DatasourceCache.plugin_names():
         lyr.datasource = mapnik.Gdal(
-            file = '../data/raster/dataraster.tif',
-            band = 1,
-            )
+            file='../data/raster/dataraster.tif',
+            band=1,
+        )
         sym = mapnik.RasterSymbolizer()
-        sym.colorizer = mapnik.RasterColorizer(mapnik.COLORIZER_DISCRETE, mapnik.Color(255,255,0))
+        sym.colorizer = mapnik.RasterColorizer(
+            mapnik.COLORIZER_DISCRETE, mapnik.Color(255, 255, 0))
         rule = mapnik.Rule()
         rule.symbols.append(sym)
         style = mapnik.Style()
         style.rules.append(rule)
-        _map = mapnik.Map(256,256, mapSrs)
-        _map.background=mapnik.Color('white')
+        _map = mapnik.Map(256, 256, mapSrs)
+        _map.background = mapnik.Color('white')
         _map.append_style('foo', style)
         lyr.styles.append('foo')
         _map.layers.append(lyr)
-        _map.zoom_to_box(mapnik.Box2d(3,42,4,43))
+        _map.zoom_to_box(mapnik.Box2d(3, 42, 4, 43))
 
-        im = mapnik.Image(_map.width,_map.height)
+        im = mapnik.Image(_map.width, _map.height)
         mapnik.render(_map, im)
         expected_file = './images/support/raster_warping_does_not_overclip_source.png'
         actual_file = '/tmp/' + os.path.basename(expected_file)
-        im.save(actual_file,'png32')
+        im.save(actual_file, 'png32')
         if not os.path.exists(expected_file) or os.environ.get('UPDATE'):
-            im.save(expected_file,'png32')
+            im.save(expected_file, 'png32')
         actual = mapnik.Image.open(actual_file)
         expected = mapnik.Image.open(expected_file)
-        eq_(actual.tostring('png32'),expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,expected_file))
+        eq_(actual.tostring('png32'),
+            expected.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual_file,
+                                                                expected_file))
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/rasterlite_test.py b/test/python_tests/rasterlite_test.py
index b15b157..284def8 100644
--- a/test/python_tests/rasterlite_test.py
+++ b/test/python_tests/rasterlite_test.py
@@ -1,9 +1,13 @@
 #!/usr/bin/env python
 
-from nose.tools import eq_,assert_almost_equal
-from utilities import execution_path, run_all
+import os
+
+from nose.tools import assert_almost_equal, eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all
 
-import os, mapnik
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -15,23 +19,23 @@ if 'rasterlite' in mapnik.DatasourceCache.plugin_names():
 
     def test_rasterlite():
         ds = mapnik.Rasterlite(
-            file = '../data/rasterlite/globe.sqlite',
-            table = 'globe'
-            )
+            file='../data/rasterlite/globe.sqlite',
+            table='globe'
+        )
         e = ds.envelope()
 
-        assert_almost_equal(e.minx,-180, places=5)
+        assert_almost_equal(e.minx, -180, places=5)
         assert_almost_equal(e.miny, -90, places=5)
         assert_almost_equal(e.maxx, 180, places=5)
-        assert_almost_equal(e.maxy,  90, places=5)
-        eq_(len(ds.fields()),0)
+        assert_almost_equal(e.maxy, 90, places=5)
+        eq_(len(ds.fields()), 0)
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
         fs = ds.features(query)
         feat = fs.next()
-        eq_(feat.id(),1)
-        eq_(feat.attributes,{})
+        eq_(feat.id(), 1)
+        eq_(feat.attributes, {})
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/render_grid_test.py b/test/python_tests/render_grid_test.py
index 85c7401..8752fcc 100644
--- a/test/python_tests/render_grid_test.py
+++ b/test/python_tests/render_grid_test.py
@@ -1,110 +1,358 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from nose.tools import eq_,raises
-from utilities import execution_path, run_all
-import os, mapnik
+import os
+
+from nose.tools import eq_, raises
+
+import mapnik
+
+from .utilities import execution_path, run_all
 
 try:
     import json
 except ImportError:
     import simplejson as json
 
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
 if mapnik.has_grid_renderer():
-    def show_grids(name,g1,g2):
+    def show_grids(name, g1, g2):
         g1_file = '/tmp/mapnik-%s-actual.json' % name
-        open(g1_file,'w').write(json.dumps(g1,sort_keys=True))
+        with open(g1_file, 'w') as f:
+            f.write(json.dumps(g1, sort_keys=True))
         g2_file = '/tmp/mapnik-%s-expected.json' % name
-        open(g2_file,'w').write(json.dumps(g2,sort_keys=True))
+        with open(g2_file, 'w') as f:
+            f.write(json.dumps(g2, sort_keys=True))
         val = 'JSON does not match  ->\n'
         if g1['grid'] != g2['grid']:
-           val += ' X grid does not match\n'
+            val += ' X grid does not match\n'
         else:
-           val += ' ✓ grid matches\n'
+            val += ' ✓ grid matches\n'
         if g1['data'].keys() != g2['data'].keys():
-           val += ' X data does not match\n'
+            val += ' X data does not match\n'
         else:
-           val += ' ✓ data matches\n'
+            val += ' ✓ data matches\n'
         if g1['keys'] != g2['keys']:
-           val += ' X keys do not\n'
+            val += ' X keys do not\n'
         else:
-           val += ' ✓ keys match\n'
-        val += '\n\t%s\n\t%s' % (g1_file,g2_file)
+            val += ' ✓ keys match\n'
+        val += '\n\t%s\n\t%s' % (g1_file, g2_file)
         return val
 
-    def show_grids2(name,g1,g2):
+    def show_grids2(name, g1, g2):
         g2_expected = '../data/grids/mapnik-%s-actual.json' % name
         if not os.path.exists(g2_expected):
             # create test fixture based on actual results
-            open(g2_expected,'a+').write(json.dumps(g1,sort_keys=True))
+            with open(g2_expected, 'a+') as f:
+                f.write(json.dumps(g1, sort_keys=True))
             return
         g1_file = '/tmp/mapnik-%s-actual.json' % name
-        open(g1_file,'w').write(json.dumps(g1,sort_keys=True))
+        with open(g1_file, 'w') as f:
+            f.write(json.dumps(g1, sort_keys=True))
         val = 'JSON does not match  ->\n'
         if g1['grid'] != g2['grid']:
-           val += ' X grid does not match\n'
+            val += ' X grid does not match\n'
         else:
-           val += ' ✓ grid matches\n'
+            val += ' ✓ grid matches\n'
         if g1['data'].keys() != g2['data'].keys():
-           val += ' X data does not match\n'
+            val += ' X data does not match\n'
         else:
-           val += ' ✓ data matches\n'
+            val += ' ✓ data matches\n'
         if g1['keys'] != g2['keys']:
-           val += ' X keys do not\n'
+            val += ' X keys do not\n'
         else:
-           val += ' ✓ keys match\n'
-        val += '\n\t%s\n\t%s' % (g1_file,g2_expected)
+            val += ' ✓ keys match\n'
+        val += '\n\t%s\n\t%s' % (g1_file, g2_expected)
         return val
-    
+
     # previous rendering using agg ellipse directly
-    grid_correct_new = {"data": {"North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South East": {"Name": "South East"}, "South West": {"Name": "South West"}}, "grid": ["                                                                ", "                                                                ", "                                                                ", "                                                                ", "                         [...]
+    grid_correct_new = {
+        "data": {
+            "North East": {
+                "Name": "North East"},
+            "North West": {
+                "Name": "North West"},
+            "South East": {
+                "Name": "South East"},
+            "South West": {
+                "Name": "South West"}},
+        "grid": [
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          !!                                  ##                ",
+            "         !!!                                 ###                ",
+            "          !!                                  ##                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          $$                                  %%                ",
+            "         $$$                                  %%                ",
+            "          $$                                  %%                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                "],
+        "keys": [
+            "",
+            "North West",
+            "North East",
+            "South West",
+            "South East"]}
 
     # newer rendering using svg
-    grid_correct_new2 = {"data": {"North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South East": {"Name": "South East"}, "South West": {"Name": "South West"}}, "grid": ["                                                                ", "                                                                ", "                                                                ", "                                                                ", "                        [...]
-
-    grid_correct_new3 = {"data": {"North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South East": {"Name": "South East"}, "South West": {"Name": "South West"}}, "grid": ["                                                                ", "                                                                ", "                                                                ", "                                                                ", "                        [...]
-
-    def resolve(grid,row,col):
+    grid_correct_new2 = {
+        "data": {
+            "North East": {
+                "Name": "North East"},
+            "North West": {
+                "Name": "North West"},
+            "South East": {
+                "Name": "South East"},
+            "South West": {
+                "Name": "South West"}},
+        "grid": [
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          !!                                  ##                ",
+            "         !!!                                 ###                ",
+            "          !!                                  ##                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          $$                                  %%                ",
+            "         $$$                                  %%                ",
+            "          $$                                  %%                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                "],
+        "keys": [
+            "",
+            "North West",
+            "North East",
+            "South West",
+            "South East"]}
+
+    grid_correct_new3 = {
+        "data": {
+            "North East": {
+                "Name": "North East"},
+            "North West": {
+                "Name": "North West"},
+            "South East": {
+                "Name": "South East"},
+            "South West": {
+                "Name": "South West"}},
+        "grid": [
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          !!                                  ##                ",
+            "         !!!                                 ###                ",
+            "          !!                                  ##                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          $$                                  %%                ",
+            "         $$$                                  %%                ",
+            "          $                                   %%                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                "],
+        "keys": [
+            "",
+            "North West",
+            "North East",
+            "South West",
+            "South East"]}
+
+    def resolve(grid, row, col):
         """ Resolve the attributes for a given pixel in a grid.
         """
         row = grid['grid'][row]
         utf_val = row[col]
-        #http://docs.python.org/library/functions.html#ord
+        # http://docs.python.org/library/functions.html#ord
         codepoint = ord(utf_val)
         if (codepoint >= 93):
-            codepoint-=1
+            codepoint -= 1
         if (codepoint >= 35):
-            codepoint-=1
+            codepoint -= 1
         codepoint -= 32
         key = grid['keys'][codepoint]
         return grid['data'].get(key)
 
-
-    def create_grid_map(width,height,sym):
+    def create_grid_map(width, height, sym):
         ds = mapnik.MemoryDatasource()
         context = mapnik.Context()
         context.push('Name')
-        f = mapnik.Feature(context,1)
+        f = mapnik.Feature(context, 1)
         f['Name'] = 'South East'
         f.geometry = mapnik.Geometry.from_wkt('POINT (143.10 -38.60)')
         ds.add_feature(f)
 
-        f = mapnik.Feature(context,2)
+        f = mapnik.Feature(context, 2)
         f['Name'] = 'South West'
         f.geometry = mapnik.Geometry.from_wkt('POINT (142.48 -38.60)')
         ds.add_feature(f)
 
-        f = mapnik.Feature(context,3)
+        f = mapnik.Feature(context, 3)
         f['Name'] = 'North West'
         f.geometry = mapnik.Geometry.from_wkt('POINT (142.48 -38.38)')
         ds.add_feature(f)
 
-        f = mapnik.Feature(context,4)
+        f = mapnik.Feature(context, 4)
         f['Name'] = 'North East'
         f.geometry = mapnik.Geometry.from_wkt('POINT (143.10 -38.38)')
         ds.add_feature(f)
@@ -116,97 +364,340 @@ if mapnik.has_grid_renderer():
         lyr = mapnik.Layer('Places')
         lyr.datasource = ds
         lyr.styles.append('places_labels')
-        m = mapnik.Map(width,height)
-        m.append_style('places_labels',s)
+        m = mapnik.Map(width, height)
+        m.append_style('places_labels', s)
         m.layers.append(lyr)
         return m
 
-
     def test_render_grid():
         """ test render_grid method"""
-        width,height = 256,256
+        width, height = 256, 256
         sym = mapnik.MarkersSymbolizer()
         sym.width = mapnik.Expression('10')
         sym.height = mapnik.Expression('10')
-        m = create_grid_map(width,height,sym)
-        ul_lonlat = mapnik.Coord(142.30,-38.20)
-        lr_lonlat = mapnik.Coord(143.40,-38.80)
-        m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat))
+        m = create_grid_map(width, height, sym)
+        ul_lonlat = mapnik.Coord(142.30, -38.20)
+        lr_lonlat = mapnik.Coord(143.40, -38.80)
+        m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat))
 
         # new method
-        grid = mapnik.Grid(m.width,m.height,key='Name')
-        mapnik.render_layer(m,grid,layer=0,fields=['Name'])
-        utf1 = grid.encode('utf',resolution=4)
-        eq_(utf1,grid_correct_new3,show_grids('new-markers',utf1,grid_correct_new3))
+        grid = mapnik.Grid(m.width, m.height, key='Name')
+        mapnik.render_layer(m, grid, layer=0, fields=['Name'])
+        utf1 = grid.encode('utf', resolution=4)
+        eq_(utf1, grid_correct_new3, show_grids(
+            'new-markers', utf1, grid_correct_new3))
 
         # check a full view is the same as a full image
-        grid_view = grid.view(0,0,width,height)
+        grid_view = grid.view(0, 0, width, height)
         # for kicks check at full res too
-        utf3 = grid.encode('utf',resolution=1)
-        utf4 = grid_view.encode('utf',resolution=1)
-        eq_(utf3['grid'],utf4['grid'])
-        eq_(utf3['keys'],utf4['keys'])
-        eq_(utf3['data'],utf4['data'])
+        utf3 = grid.encode('utf', resolution=1)
+        utf4 = grid_view.encode('utf', resolution=1)
+        eq_(utf3['grid'], utf4['grid'])
+        eq_(utf3['keys'], utf4['keys'])
+        eq_(utf3['data'], utf4['data'])
 
-        eq_(resolve(utf4,0,0),None)
+        eq_(resolve(utf4, 0, 0), None)
 
         # resolve some center points in the
         # resampled view
-        utf5 = grid_view.encode('utf',resolution=4)
-        eq_(resolve(utf5,25,10),{"Name": "North West"})
-        eq_(resolve(utf5,25,46),{"Name": "North East"})
-        eq_(resolve(utf5,38,10),{"Name": "South West"})
-        eq_(resolve(utf5,38,46),{"Name": "South East"})
-
-
-    grid_feat_id = {'keys': ['', '3', '4', '2', '1'], 'data': {'1': {'Name': 'South East'}, '3': {'Name': u'North West'}, '2': {'Name': 'South West'}, '4': {'Name': 'North East'}}, 'grid': ['                                                                ', '                                                                ', '                                                                ', '                                                                ', '                              [...]
-
-    grid_feat_id2 = {"data": {"1": {"Name": "South East"}, "2": {"Name": "South West"}, "3": {"Name": "North West"}, "4": {"Name": "North East"}}, "grid": ["                                                                ", "                                                                ", "                                                                ", "                                                                ", "                                                                [...]
-
-    grid_feat_id3 = {"data": {"1": {"Name": "South East", "__id__": 1}, "2": {"Name": "South West", "__id__": 2}, "3": {"Name": "North West", "__id__": 3}, "4": {"Name": "North East", "__id__": 4}}, "grid": ["                                                                ", "                                                                ", "                                                                ", "                                                                ", "            [...]
+        utf5 = grid_view.encode('utf', resolution=4)
+        eq_(resolve(utf5, 25, 10), {"Name": "North West"})
+        eq_(resolve(utf5, 25, 46), {"Name": "North East"})
+        eq_(resolve(utf5, 38, 10), {"Name": "South West"})
+        eq_(resolve(utf5, 38, 46), {"Name": "South East"})
+
+    grid_feat_id = {
+        'keys': [
+            '',
+            '3',
+            '4',
+            '2',
+            '1'],
+        'data': {
+            '1': {
+                'Name': 'South East'},
+            '3': {
+                'Name': u'North West'},
+            '2': {
+                'Name': 'South West'},
+            '4': {
+                'Name': 'North East'}},
+        'grid': [
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '          !!                                  ##                ',
+            '         !!!                                 ###                ',
+            '          !!                                  ##                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '         $$$                                  %%                ',
+            '         $$$                                 %%%                ',
+            '          $$                                  %%                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ',
+            '                                                                ']}
+
+    grid_feat_id2 = {
+        "data": {
+            "1": {
+                "Name": "South East"},
+            "2": {
+                "Name": "South West"},
+            "3": {
+                "Name": "North West"},
+            "4": {
+                "Name": "North East"}},
+        "grid": [
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          !!                                  ##                ",
+            "         !!!                                 ###                ",
+            "          !!                                  ##                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          $$                                  %%                ",
+            "         $$$                                  %%                ",
+            "          $$                                  %%                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                "],
+        "keys": [
+            "",
+            "3",
+            "4",
+            "2",
+            "1"]}
+
+    grid_feat_id3 = {
+        "data": {
+            "1": {
+                "Name": "South East",
+                "__id__": 1},
+            "2": {
+                "Name": "South West",
+                "__id__": 2},
+            "3": {
+                "Name": "North West",
+                "__id__": 3},
+            "4": {
+                "Name": "North East",
+                "__id__": 4}},
+        "grid": [
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          !!                                  ##                ",
+            "         !!!                                 ###                ",
+            "          !!                                  ##                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "          $$                                  %%                ",
+            "         $$$                                  %%                ",
+            "          $                                   %%                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                "],
+        "keys": [
+            "",
+            "3",
+            "4",
+            "2",
+            "1"]}
 
     def test_render_grid3():
         """ test using feature id"""
-        width,height = 256,256
+        width, height = 256, 256
         sym = mapnik.MarkersSymbolizer()
         sym.width = mapnik.Expression('10')
         sym.height = mapnik.Expression('10')
-        m = create_grid_map(width,height,sym)
-        ul_lonlat = mapnik.Coord(142.30,-38.20)
-        lr_lonlat = mapnik.Coord(143.40,-38.80)
-        m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat))
-
-        grid = mapnik.Grid(m.width,m.height,key='__id__')
-        mapnik.render_layer(m,grid,layer=0,fields=['__id__','Name'])
-        utf1 = grid.encode('utf',resolution=4)
-        eq_(utf1,grid_feat_id3,show_grids('id-markers',utf1,grid_feat_id3))
+        m = create_grid_map(width, height, sym)
+        ul_lonlat = mapnik.Coord(142.30, -38.20)
+        lr_lonlat = mapnik.Coord(143.40, -38.80)
+        m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat))
+
+        grid = mapnik.Grid(m.width, m.height, key='__id__')
+        mapnik.render_layer(m, grid, layer=0, fields=['__id__', 'Name'])
+        utf1 = grid.encode('utf', resolution=4)
+        eq_(utf1, grid_feat_id3, show_grids('id-markers', utf1, grid_feat_id3))
         # check a full view is the same as a full image
-        grid_view = grid.view(0,0,width,height)
+        grid_view = grid.view(0, 0, width, height)
         # for kicks check at full res too
-        utf3 = grid.encode('utf',resolution=1)
-        utf4 = grid_view.encode('utf',resolution=1)
-        eq_(utf3['grid'],utf4['grid'])
-        eq_(utf3['keys'],utf4['keys'])
-        eq_(utf3['data'],utf4['data'])
+        utf3 = grid.encode('utf', resolution=1)
+        utf4 = grid_view.encode('utf', resolution=1)
+        eq_(utf3['grid'], utf4['grid'])
+        eq_(utf3['keys'], utf4['keys'])
+        eq_(utf3['data'], utf4['data'])
 
-        eq_(resolve(utf4,0,0),None)
+        eq_(resolve(utf4, 0, 0), None)
 
         # resolve some center points in the
         # resampled view
-        utf5 = grid_view.encode('utf',resolution=4)
-        eq_(resolve(utf5,25,10),{"Name": "North West","__id__": 3})
-        eq_(resolve(utf5,25,46),{"Name": "North East","__id__": 4})
-        eq_(resolve(utf5,38,10),{"Name": "South West","__id__": 2})
-        eq_(resolve(utf5,38,46),{"Name": "South East","__id__": 1})
-
+        utf5 = grid_view.encode('utf', resolution=4)
+        eq_(resolve(utf5, 25, 10), {"Name": "North West", "__id__": 3})
+        eq_(resolve(utf5, 25, 46), {"Name": "North East", "__id__": 4})
+        eq_(resolve(utf5, 38, 10), {"Name": "South West", "__id__": 2})
+        eq_(resolve(utf5, 38, 46), {"Name": "South East", "__id__": 1})
 
     def gen_grid_for_id(pixel_key):
         ds = mapnik.MemoryDatasource()
         context = mapnik.Context()
         context.push('Name')
-        f = mapnik.Feature(context,pixel_key)
+        f = mapnik.Feature(context, pixel_key)
         f['Name'] = str(pixel_key)
-        f.geometry = mapnik.Geometry.from_wkt('POLYGON ((0 0, 0 256, 256 256, 256 0, 0 0))')
+        f.geometry = mapnik.Geometry.from_wkt(
+            'POLYGON ((0 0, 0 256, 256 256, 256 0, 0 0))')
         ds.add_feature(f)
         s = mapnik.Style()
         r = mapnik.Rule()
@@ -216,61 +707,133 @@ if mapnik.has_grid_renderer():
         lyr = mapnik.Layer('Places')
         lyr.datasource = ds
         lyr.styles.append('places_labels')
-        width,height = 256,256
-        m = mapnik.Map(width,height)
-        m.append_style('places_labels',s)
+        width, height = 256, 256
+        m = mapnik.Map(width, height)
+        m.append_style('places_labels', s)
         m.layers.append(lyr)
         m.zoom_all()
-        grid = mapnik.Grid(m.width,m.height,key='__id__')
-        mapnik.render_layer(m,grid,layer=0,fields=['__id__','Name'])
+        grid = mapnik.Grid(m.width, m.height, key='__id__')
+        mapnik.render_layer(m, grid, layer=0, fields=['__id__', 'Name'])
         return grid
 
     def test_negative_id():
         grid = gen_grid_for_id(-1)
-        eq_(grid.get_pixel(128,128),-1)
-        utf1 = grid.encode('utf',resolution=4)
-        eq_(utf1['keys'],['-1'])
+        eq_(grid.get_pixel(128, 128), -1)
+        utf1 = grid.encode('utf', resolution=4)
+        eq_(utf1['keys'], ['-1'])
 
     def test_32bit_int_id():
         int32 = 2147483647
         grid = gen_grid_for_id(int32)
-        eq_(grid.get_pixel(128,128),int32)
-        utf1 = grid.encode('utf',resolution=4)
-        eq_(utf1['keys'],[str(int32)])
+        eq_(grid.get_pixel(128, 128), int32)
+        utf1 = grid.encode('utf', resolution=4)
+        eq_(utf1['keys'], [str(int32)])
         max_neg = -(int32)
         grid = gen_grid_for_id(max_neg)
-        eq_(grid.get_pixel(128,128),max_neg)
-        utf1 = grid.encode('utf',resolution=4)
-        eq_(utf1['keys'],[str(max_neg)])
+        eq_(grid.get_pixel(128, 128), max_neg)
+        utf1 = grid.encode('utf', resolution=4)
+        eq_(utf1['keys'], [str(max_neg)])
 
     def test_64bit_int_id():
         int64 = 0x7FFFFFFFFFFFFFFF
         grid = gen_grid_for_id(int64)
-        eq_(grid.get_pixel(128,128),int64)
-        utf1 = grid.encode('utf',resolution=4)
-        eq_(utf1['keys'],[str(int64)])
+        eq_(grid.get_pixel(128, 128), int64)
+        utf1 = grid.encode('utf', resolution=4)
+        eq_(utf1['keys'], [str(int64)])
         max_neg = -(int64)
         grid = gen_grid_for_id(max_neg)
-        eq_(grid.get_pixel(128,128),max_neg)
-        utf1 = grid.encode('utf',resolution=4)
-        eq_(utf1['keys'],[str(max_neg)])
+        eq_(grid.get_pixel(128, 128), max_neg)
+        utf1 = grid.encode('utf', resolution=4)
+        eq_(utf1['keys'], [str(max_neg)])
 
     def test_id_zero():
         grid = gen_grid_for_id(0)
-        eq_(grid.get_pixel(128,128),0)
-        utf1 = grid.encode('utf',resolution=4)
-        eq_(utf1['keys'],['0'])
-
-    line_expected = {"keys": ["", "1"], "data": {"1": {"Name": "1"}}, "grid": ["                                                               !", "                                                            !!  ", "                                                         !!     ", "                                                      !!        ", "                                                   !!           ", "                                                !!              ", "     [...]
+        eq_(grid.get_pixel(128, 128), 0)
+        utf1 = grid.encode('utf', resolution=4)
+        eq_(utf1['keys'], ['0'])
+
+    line_expected = {
+        "keys": [
+            "",
+            "1"],
+        "data": {
+            "1": {
+                "Name": "1"}},
+        "grid": [
+            "                                                               !",
+            "                                                            !!  ",
+            "                                                         !!     ",
+            "                                                      !!        ",
+            "                                                   !!           ",
+            "                                                !!              ",
+            "                                             !!                 ",
+            "                                          !!                    ",
+            "                                       !!                       ",
+            "                                    !!                          ",
+            "                                 !!                             ",
+            "                              !!                                ",
+            "                           !!                                   ",
+            "                        !!                                      ",
+            "                     !!                                         ",
+            "                  !!                                            ",
+            "               !!                                               ",
+            "            !!                                                  ",
+            "         !!                                                     ",
+            "      !!                                                        ",
+            "   !!                                                           ",
+            "!!                                                              ",
+            " !                                                              ",
+            "  !                                                             ",
+            "   !                                                            ",
+            "    !                                                           ",
+            "     !                                                          ",
+            "      !                                                         ",
+            "       !                                                        ",
+            "        !                                                       ",
+            "         !                                                      ",
+            "          !                                                     ",
+            "           !                                                    ",
+            "            !                                                   ",
+            "             !                                                  ",
+            "              !                                                 ",
+            "               !                                                ",
+            "                !                                               ",
+            "                 !                                              ",
+            "                  !                                             ",
+            "                   !                                            ",
+            "                    !                                           ",
+            "                     !                                          ",
+            "                      !                                         ",
+            "                       !                                        ",
+            "                        !                                       ",
+            "                         !                                      ",
+            "                          !                                     ",
+            "                           !                                    ",
+            "                            !                                   ",
+            "                             !                                  ",
+            "                              !                                 ",
+            "                               !                                ",
+            "                                !                               ",
+            "                                 !                              ",
+            "                                  !                             ",
+            "                                   !                            ",
+            "                                    !                           ",
+            "                                     !                          ",
+            "                                      !                         ",
+            "                                       !                        ",
+            "                                        !                       ",
+            "                                         !                      ",
+            "                                          !                     "]}
 
     def test_line_rendering():
         ds = mapnik.MemoryDatasource()
         context = mapnik.Context()
         context.push('Name')
         pixel_key = 1
-        f = mapnik.Feature(context,pixel_key)
+        f = mapnik.Feature(context, pixel_key)
         f['Name'] = str(pixel_key)
-        f.geometry = mapnik.Geometry.from_wkt('LINESTRING (30 10, 10 30, 40 40)')
+        f.geometry = mapnik.Geometry.from_wkt(
+            'LINESTRING (30 10, 10 30, 40 40)')
         ds.add_feature(f)
         s = mapnik.Style()
         r = mapnik.Rule()
@@ -280,31 +843,111 @@ if mapnik.has_grid_renderer():
         lyr = mapnik.Layer('Places')
         lyr.datasource = ds
         lyr.styles.append('places_labels')
-        width,height = 256,256
-        m = mapnik.Map(width,height)
-        m.append_style('places_labels',s)
+        width, height = 256, 256
+        m = mapnik.Map(width, height)
+        m.append_style('places_labels', s)
         m.layers.append(lyr)
         m.zoom_all()
-        #mapnik.render_to_file(m,'test.png')
-        grid = mapnik.Grid(m.width,m.height,key='__id__')
-        mapnik.render_layer(m,grid,layer=0,fields=['Name'])
+        # mapnik.render_to_file(m,'test.png')
+        grid = mapnik.Grid(m.width, m.height, key='__id__')
+        mapnik.render_layer(m, grid, layer=0, fields=['Name'])
         utf1 = grid.encode()
-        eq_(utf1,line_expected,show_grids('line',utf1,line_expected))
-
-    point_expected = {"data": {"1": {"Name": "South East"}, "2": {"Name": "South West"}, "3": {"Name": "North West"}, "4": {"Name": "North East"}}, "grid": ["                                                                ", "                                                                ", "                                                                ", "                                                                ", "                                                               [...]
+        eq_(utf1, line_expected, show_grids('line', utf1, line_expected))
+
+    point_expected = {
+        "data": {
+            "1": {
+                "Name": "South East"},
+            "2": {
+                "Name": "South West"},
+            "3": {
+                "Name": "North West"},
+            "4": {
+                "Name": "North East"}},
+        "grid": [
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "         !!!!                                ####               ",
+            "         !!!!                                ####               ",
+            "         !!!!                                ####               ",
+            "         !!!!                                ####               ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "         $$$$                                %%%%               ",
+            "         $$$$                                %%%%               ",
+            "         $$$$                                %%%%               ",
+            "         $$$$                                %%%%               ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                ",
+            "                                                                "],
+        "keys": [
+            "",
+            "3",
+            "4",
+            "2",
+            "1"]}
 
     def test_point_symbolizer_grid():
-        width,height = 256,256
+        width, height = 256, 256
         sym = mapnik.PointSymbolizer()
         sym.file = '../data/images/dummy.png'
-        m = create_grid_map(width,height,sym)
-        ul_lonlat = mapnik.Coord(142.30,-38.20)
-        lr_lonlat = mapnik.Coord(143.40,-38.80)
-        m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat))
-        grid = mapnik.Grid(m.width,m.height)
-        mapnik.render_layer(m,grid,layer=0,fields=['Name'])
+        m = create_grid_map(width, height, sym)
+        ul_lonlat = mapnik.Coord(142.30, -38.20)
+        lr_lonlat = mapnik.Coord(143.40, -38.80)
+        m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat))
+        grid = mapnik.Grid(m.width, m.height)
+        mapnik.render_layer(m, grid, layer=0, fields=['Name'])
         utf1 = grid.encode()
-        eq_(utf1,point_expected,show_grids('point-sym',utf1,point_expected))
+        eq_(utf1, point_expected, show_grids('point-sym', utf1, point_expected))
 
     test_point_symbolizer_grid.requires_data = True
 
@@ -313,19 +956,19 @@ if mapnik.has_grid_renderer():
     @raises(RuntimeError)
     def test_render_to_grid_multiple_times():
         # create map with two layers
-        m = mapnik.Map(256,256)
+        m = mapnik.Map(256, 256)
         s = mapnik.Style()
         r = mapnik.Rule()
         sym = mapnik.MarkersSymbolizer()
         sym.allow_overlap = True
         r.symbols.append(sym)
         s.rules.append(r)
-        m.append_style('points',s)
+        m.append_style('points', s)
 
         # NOTE: we use a csv datasource here
         # because the memorydatasource fails silently for
         # queries requesting fields that do not exist in the datasource
-        ds1 = mapnik.Datasource(**{"type":"csv","inline":'''
+        ds1 = mapnik.Datasource(**{"type": "csv", "inline": '''
           wkt,Name
           "POINT (143.10 -38.60)",South East'''})
         lyr1 = mapnik.Layer('One')
@@ -333,7 +976,7 @@ if mapnik.has_grid_renderer():
         lyr1.styles.append('points')
         m.layers.append(lyr1)
 
-        ds2 = mapnik.Datasource(**{"type":"csv","inline":'''
+        ds2 = mapnik.Datasource(**{"type": "csv", "inline": '''
           wkt,Value
           "POINT (142.48 -38.60)",South West'''})
         lyr2 = mapnik.Layer('Two')
@@ -341,14 +984,14 @@ if mapnik.has_grid_renderer():
         lyr2.styles.append('points')
         m.layers.append(lyr2)
 
-        ul_lonlat = mapnik.Coord(142.30,-38.20)
-        lr_lonlat = mapnik.Coord(143.40,-38.80)
-        m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat))
-        grid = mapnik.Grid(m.width,m.height)
-        mapnik.render_layer(m,grid,layer=0,fields=['Name'])
+        ul_lonlat = mapnik.Coord(142.30, -38.20)
+        lr_lonlat = mapnik.Coord(143.40, -38.80)
+        m.zoom_to_box(mapnik.Box2d(ul_lonlat, lr_lonlat))
+        grid = mapnik.Grid(m.width, m.height)
+        mapnik.render_layer(m, grid, layer=0, fields=['Name'])
         # should throw right here since Name will be a property now on the `grid` object
         # and it is not found on the second layer
-        mapnik.render_layer(m,grid,layer=1,fields=['Value'])
+        mapnik.render_layer(m, grid, layer=1, fields=['Value'])
         grid.encode()
 
 if __name__ == "__main__":
diff --git a/test/python_tests/render_test.py b/test/python_tests/render_test.py
index 197d010..6e1e3c8 100644
--- a/test/python_tests/render_test.py
+++ b/test/python_tests/render_test.py
@@ -1,77 +1,97 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from nose.tools import eq_,raises
+import os
+import sys
 import tempfile
-import os, mapnik
-from utilities import execution_path, run_all
+
+from nose.tools import eq_, raises
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
+PYTHON3 = sys.version_info[0] == 3
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_simplest_render():
     m = mapnik.Map(256, 256)
     im = mapnik.Image(m.width, m.height)
-    eq_(im.painted(),False)
-    eq_(im.is_solid(),True)
+    eq_(im.painted(), False)
+    eq_(im.is_solid(), True)
     mapnik.render(m, im)
-    eq_(im.painted(),False)
-    eq_(im.is_solid(),True)
+    eq_(im.painted(), False)
+    eq_(im.is_solid(), True)
     s = im.tostring()
-    eq_(s, 256 * 256 * '\x00\x00\x00\x00')
+    if PYTHON3:
+        eq_(s, 256 * 256 * b'\x00\x00\x00\x00')
+    else:
+        eq_(s, 256 * 256 * '\x00\x00\x00\x00')
+
 
 def test_render_image_to_string():
     im = mapnik.Image(256, 256)
     im.fill(mapnik.Color('black'))
-    eq_(im.painted(),False)
-    eq_(im.is_solid(),True)
+    eq_(im.painted(), False)
+    eq_(im.is_solid(), True)
     s = im.tostring()
-    eq_(s, 256 * 256 * '\x00\x00\x00\xff')
+    if PYTHON3:
+        eq_(s, 256 * 256 * b'\x00\x00\x00\xff')
+    else:
+        eq_(s, 256 * 256 * '\x00\x00\x00\xff')
+
 
 def test_non_solid_image():
     im = mapnik.Image(256, 256)
     im.fill(mapnik.Color('black'))
-    eq_(im.painted(),False)
-    eq_(im.is_solid(),True)
+    eq_(im.painted(), False)
+    eq_(im.is_solid(), True)
     # set one pixel to a different color
-    im.set_pixel(0,0,mapnik.Color('white'))
-    eq_(im.painted(),False)
-    eq_(im.is_solid(),False)
+    im.set_pixel(0, 0, mapnik.Color('white'))
+    eq_(im.painted(), False)
+    eq_(im.is_solid(), False)
+
 
 def test_non_solid_image_view():
     im = mapnik.Image(256, 256)
     im.fill(mapnik.Color('black'))
-    view = im.view(0,0,256,256)
-    eq_(view.is_solid(),True)
+    view = im.view(0, 0, 256, 256)
+    eq_(view.is_solid(), True)
     # set one pixel to a different color
-    im.set_pixel(0,0,mapnik.Color('white'))
-    eq_(im.is_solid(),False)
+    im.set_pixel(0, 0, mapnik.Color('white'))
+    eq_(im.is_solid(), False)
     # view, since it is the exact dimensions of the image
     # should also be non-solid
-    eq_(view.is_solid(),False)
+    eq_(view.is_solid(), False)
     # but not a view that excludes the single diff pixel
-    view2 = im.view(1,1,256,256)
-    eq_(view2.is_solid(),True)
+    view2 = im.view(1, 1, 256, 256)
+    eq_(view2.is_solid(), True)
+
 
 def test_setting_alpha():
-    w,h = 256,256
-    im1 = mapnik.Image(w,h)
+    w, h = 256, 256
+    im1 = mapnik.Image(w, h)
     # white, half transparent
     c1 = mapnik.Color('rgba(255,255,255,.5)')
     im1.fill(c1)
-    eq_(im1.painted(),False)
-    eq_(im1.is_solid(),True)
+    eq_(im1.painted(), False)
+    eq_(im1.is_solid(), True)
     # pure white
-    im2 = mapnik.Image(w,h)
+    im2 = mapnik.Image(w, h)
     c2 = mapnik.Color('rgba(255,255,255,1)')
     im2.fill(c2)
-    im2.apply_opacity(c1.a/255.0)
-    eq_(im2.painted(),False)
-    eq_(im2.is_solid(),True)
+    im2.apply_opacity(c1.a / 255.0)
+    eq_(im2.painted(), False)
+    eq_(im2.is_solid(), True)
     eq_(len(im1.tostring('png32')), len(im2.tostring('png32')))
 
+
 def test_render_image_to_file():
     im = mapnik.Image(256, 256)
     im.fill(mapnik.Color('black'))
@@ -87,46 +107,52 @@ def test_render_image_to_file():
     else:
         return False
 
-def get_paired_images(w,h,mapfile):
+
+def get_paired_images(w, h, mapfile):
     tmp_map = 'tmp_map.xml'
-    m = mapnik.Map(w,h)
-    mapnik.load_map(m,mapfile)
-    im = mapnik.Image(w,h)
+    m = mapnik.Map(w, h)
+    mapnik.load_map(m, mapfile)
+    im = mapnik.Image(w, h)
     m.zoom_all()
-    mapnik.render(m,im)
-    mapnik.save_map(m,tmp_map)
-    m2 = mapnik.Map(w,h)
-    mapnik.load_map(m2,tmp_map)
-    im2 = mapnik.Image(w,h)
+    mapnik.render(m, im)
+    mapnik.save_map(m, tmp_map)
+    m2 = mapnik.Map(w, h)
+    mapnik.load_map(m2, tmp_map)
+    im2 = mapnik.Image(w, h)
     m2.zoom_all()
-    mapnik.render(m2,im2)
+    mapnik.render(m2, im2)
     os.remove(tmp_map)
-    return im,im2
+    return im, im2
+
 
 def test_render_from_serialization():
     try:
-        im,im2 = get_paired_images(100,100,'../data/good_maps/building_symbolizer.xml')
-        eq_(im.tostring('png32'),im2.tostring('png32'))
+        im, im2 = get_paired_images(
+            100, 100, '../data/good_maps/building_symbolizer.xml')
+        eq_(im.tostring('png32'), im2.tostring('png32'))
 
-        im,im2 = get_paired_images(100,100,'../data/good_maps/polygon_symbolizer.xml')
-        eq_(im.tostring('png32'),im2.tostring('png32'))
-    except RuntimeError, e:
+        im, im2 = get_paired_images(
+            100, 100, '../data/good_maps/polygon_symbolizer.xml')
+        eq_(im.tostring('png32'), im2.tostring('png32'))
+    except RuntimeError as e:
         # only test datasources that we have installed
         if not 'Could not create datasource' in str(e):
             raise RuntimeError(e)
 
+
 def test_render_points():
-    if not mapnik.has_cairo(): return
+    if not mapnik.has_cairo():
+        return
     # create and populate point datasource (WGS84 lat-lon coordinates)
     ds = mapnik.MemoryDatasource()
     context = mapnik.Context()
     context.push('Name')
-    f = mapnik.Feature(context,1)
+    f = mapnik.Feature(context, 1)
     f['Name'] = 'Westernmost Point'
     f.geometry = mapnik.Geometry.from_wkt('POINT (142.48 -38.38)')
     ds.add_feature(f)
 
-    f = mapnik.Feature(context,2)
+    f = mapnik.Feature(context, 2)
     f['Name'] = 'Southernmost Point'
     f.geometry = mapnik.Geometry.from_wkt('POINT (143.10 -38.60)')
     ds.add_feature(f)
@@ -138,46 +164,61 @@ def test_render_points():
     symb.allow_overlap = True
     r.symbols.append(symb)
     s.rules.append(r)
-    lyr = mapnik.Layer('Places','+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
+    lyr = mapnik.Layer(
+        'Places',
+        '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs')
     lyr.datasource = ds
     lyr.styles.append('places_labels')
     # latlon bounding box corners
-    ul_lonlat = mapnik.Coord(142.30,-38.20)
-    lr_lonlat = mapnik.Coord(143.40,-38.80)
+    ul_lonlat = mapnik.Coord(142.30, -38.20)
+    lr_lonlat = mapnik.Coord(143.40, -38.80)
     # render for different projections
     projs = {
         'google': '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over',
         'latlon': '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs',
         'merc': '+proj=merc +datum=WGS84 +k=1.0 +units=m +over +no_defs',
         'utm': '+proj=utm +zone=54 +datum=WGS84'
-        }
-    for projdescr in projs.iterkeys():
+    }
+    for projdescr in projs:
         m = mapnik.Map(1000, 500, projs[projdescr])
-        m.append_style('places_labels',s)
+        m.append_style('places_labels', s)
         m.layers.append(lyr)
         dest_proj = mapnik.Projection(projs[projdescr])
         src_proj = mapnik.Projection('+init=epsg:4326')
-        tr = mapnik.ProjTransform(src_proj,dest_proj)
-        m.zoom_to_box(tr.forward(mapnik.Box2d(ul_lonlat,lr_lonlat)))
-        # Render to SVG so that it can be checked how many points are there with string comparison
-        svg_file = os.path.join(tempfile.gettempdir(), 'mapnik-render-points-%s.svg' % projdescr)
+        tr = mapnik.ProjTransform(src_proj, dest_proj)
+        m.zoom_to_box(tr.forward(mapnik.Box2d(ul_lonlat, lr_lonlat)))
+        # Render to SVG so that it can be checked how many points are there
+        # with string comparison
+        svg_file = os.path.join(
+            tempfile.gettempdir(),
+            'mapnik-render-points-%s.svg' %
+            projdescr)
         mapnik.render_to_file(m, svg_file)
         num_points_present = len(ds.all_features())
-        svg = open(svg_file,'r').read()
+        with open(svg_file, 'r') as f:
+            svg = f.read()
         num_points_rendered = svg.count('<image ')
-        eq_(num_points_present, num_points_rendered, "Not all points were rendered (%d instead of %d) at projection %s" % (num_points_rendered, num_points_present, projdescr))
+        eq_(
+            num_points_present,
+            num_points_rendered,
+            "Not all points were rendered (%d instead of %d) at projection %s" %
+            (num_points_rendered,
+             num_points_present,
+             projdescr))
+
 
 @raises(RuntimeError)
 def test_render_with_scale_factor_zero_throws():
-    m = mapnik.Map(256,256)
+    m = mapnik.Map(256, 256)
     im = mapnik.Image(256, 256)
-    mapnik.render(m,im,0.0)
+    mapnik.render(m, im, 0.0)
+
 
 def test_render_with_detector():
     ds = mapnik.MemoryDatasource()
     context = mapnik.Context()
-    geojson  = '{ "type": "Feature", "geometry": { "type": "Point", "coordinates": [ 0, 0 ] } }'
-    ds.add_feature(mapnik.Feature.from_geojson(geojson,context))
+    geojson = '{ "type": "Feature", "geometry": { "type": "Point", "coordinates": [ 0, 0 ] } }'
+    ds.add_feature(mapnik.Feature.from_geojson(geojson, context))
     s = mapnik.Style()
     r = mapnik.Rule()
     lyr = mapnik.Layer('point')
@@ -187,54 +228,61 @@ def test_render_with_detector():
     symb.allow_overlap = False
     r.symbols.append(symb)
     s.rules.append(r)
-    m = mapnik.Map(256,256)
-    m.append_style('point',s)
+    m = mapnik.Map(256, 256)
+    m.append_style('point', s)
     m.layers.append(lyr)
-    m.zoom_to_box(mapnik.Box2d(-180,-85,180,85))
+    m.zoom_to_box(mapnik.Box2d(-180, -85, 180, 85))
     im = mapnik.Image(256, 256)
-    mapnik.render(m,im)
+    mapnik.render(m, im)
     expected_file = './images/support/marker-in-center.png'
     actual_file = '/tmp/' + os.path.basename(expected_file)
-    #im.save(expected_file,'png8')
-    im.save(actual_file,'png8')
+    # im.save(expected_file,'png8')
+    im.save(actual_file, 'png8')
     actual = mapnik.Image.open(expected_file)
     expected = mapnik.Image.open(expected_file)
-    eq_(actual.tostring('png32'),expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,expected_file))
+    eq_(actual.tostring('png32'),
+        expected.tostring('png32'),
+        'failed comparing actual (%s) and expected (%s)' % (actual_file,
+                                                            expected_file))
     # now render will a collision detector that should
     # block out the placement of this point
     detector = mapnik.LabelCollisionDetector(m)
-    eq_(detector.extent(),mapnik.Box2d(-0.0,-0.0,m.width,m.height))
-    eq_(detector.extent(),mapnik.Box2d(-0.0,-0.0,256.0,256.0))
-    eq_(detector.boxes(),[])
+    eq_(detector.extent(), mapnik.Box2d(-0.0, -0.0, m.width, m.height))
+    eq_(detector.extent(), mapnik.Box2d(-0.0, -0.0, 256.0, 256.0))
+    eq_(detector.boxes(), [])
     detector.insert(detector.extent())
-    eq_(detector.boxes(),[detector.extent()])
+    eq_(detector.boxes(), [detector.extent()])
     im2 = mapnik.Image(256, 256)
     mapnik.render_with_detector(m, im2, detector)
     expected_file_collision = './images/support/marker-in-center-not-placed.png'
-    #im2.save(expected_file_collision,'png8')
+    # im2.save(expected_file_collision,'png8')
     actual_file = '/tmp/' + os.path.basename(expected_file_collision)
-    im2.save(actual_file,'png8')
+    im2.save(actual_file, 'png8')
 
 
 if 'shape' in mapnik.DatasourceCache.plugin_names():
 
     def test_render_with_scale_factor():
-        m = mapnik.Map(256,256)
-        mapnik.load_map(m,'../data/good_maps/marker-text-line.xml')
+        m = mapnik.Map(256, 256)
+        mapnik.load_map(m, '../data/good_maps/marker-text-line.xml')
         m.zoom_all()
-        sizes = [.00001,.005,.1,.899,1,1.5,2,5,10,100]
+        sizes = [.00001, .005, .1, .899, 1, 1.5, 2, 5, 10, 100]
         for size in sizes:
             im = mapnik.Image(256, 256)
-            mapnik.render(m,im,size)
+            mapnik.render(m, im, size)
             expected_file = './images/support/marker-text-line-scale-factor-%s.png' % size
             actual_file = '/tmp/' + os.path.basename(expected_file)
-            im.save(actual_file,'png32')
+            im.save(actual_file, 'png32')
             if os.environ.get('UPDATE'):
-                im.save(expected_file,'png32')
-            # we save and re-open here so both png8 images are ready as full color png
+                im.save(expected_file, 'png32')
+            # we save and re-open here so both png8 images are ready as full
+            # color png
             actual = mapnik.Image.open(actual_file)
             expected = mapnik.Image.open(expected_file)
-            eq_(actual.tostring('png32'),expected.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual_file,expected_file))
+            eq_(actual.tostring('png32'),
+                expected.tostring('png32'),
+                'failed comparing actual (%s) and expected (%s)' % (actual_file,
+                                                                    expected_file))
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/reprojection_test.py b/test/python_tests/reprojection_test.py
index 1382db5..50236b8 100644
--- a/test/python_tests/reprojection_test.py
+++ b/test/python_tests/reprojection_test.py
@@ -1,9 +1,13 @@
-#coding=utf8
+# coding=utf8
 import os
-import mapnik
-from utilities import execution_path, run_all
+
 from nose.tools import eq_
 
+import mapnik
+
+from .utilities import execution_path, run_all
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
@@ -13,79 +17,92 @@ if 'shape' in mapnik.DatasourceCache.plugin_names():
 
     #@raises(RuntimeError)
     def test_zoom_all_will_fail():
-        m = mapnik.Map(512,512)
-        mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
+        m = mapnik.Map(512, 512)
+        mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
         m.zoom_all()
 
     def test_zoom_all_will_work_with_max_extent():
-        m = mapnik.Map(512,512)
-        mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
-        merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34)
+        m = mapnik.Map(512, 512)
+        mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
+        merc_bounds = mapnik.Box2d(-20037508.34, -
+                                   20037508.34, 20037508.34, 20037508.34)
         m.maximum_extent = merc_bounds
         m.zoom_all()
         # note - fixAspectRatio is being called, then re-clipping to maxextent
         # which makes this hard to predict
-        #eq_(m.envelope(),merc_bounds)
+        # eq_(m.envelope(),merc_bounds)
 
         #m = mapnik.Map(512,512)
-        #mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
+        # mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
         #merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34)
-        #m.zoom_to_box(merc_bounds)
-        #eq_(m.envelope(),merc_bounds)
-
+        # m.zoom_to_box(merc_bounds)
+        # eq_(m.envelope(),merc_bounds)
 
     def test_visual_zoom_all_rendering1():
-        m = mapnik.Map(512,512)
-        mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
-        merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34)
+        m = mapnik.Map(512, 512)
+        mapnik.load_map(m, '../data/good_maps/wgs842merc_reprojection.xml')
+        merc_bounds = mapnik.Box2d(-20037508.34, -
+                                   20037508.34, 20037508.34, 20037508.34)
         m.maximum_extent = merc_bounds
         m.zoom_all()
-        im = mapnik.Image(512,512)
-        mapnik.render(m,im)
+        im = mapnik.Image(512, 512)
+        mapnik.render(m, im)
         actual = '/tmp/mapnik-wgs842merc-reprojection-render.png'
         expected = 'images/support/mapnik-wgs842merc-reprojection-render.png'
-        im.save(actual,'png32')
+        im.save(actual, 'png32')
         expected_im = mapnik.Image.open(expected)
-        eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'test/python_tests/'+ expected))
+        eq_(im.tostring('png32'),
+            expected_im.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual,
+                                                                'test/python_tests/' + expected))
 
     def test_visual_zoom_all_rendering2():
-        m = mapnik.Map(512,512)
-        mapnik.load_map(m,'../data/good_maps/merc2wgs84_reprojection.xml')
+        m = mapnik.Map(512, 512)
+        mapnik.load_map(m, '../data/good_maps/merc2wgs84_reprojection.xml')
         m.zoom_all()
-        im = mapnik.Image(512,512)
-        mapnik.render(m,im)
+        im = mapnik.Image(512, 512)
+        mapnik.render(m, im)
         actual = '/tmp/mapnik-merc2wgs84-reprojection-render.png'
         expected = 'images/support/mapnik-merc2wgs84-reprojection-render.png'
-        im.save(actual,'png32')
+        im.save(actual, 'png32')
         expected_im = mapnik.Image.open(expected)
-        eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'test/python_tests/'+ expected))
+        eq_(im.tostring('png32'),
+            expected_im.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual,
+                                                                'test/python_tests/' + expected))
 
     # maximum-extent read from map.xml
     def test_visual_zoom_all_rendering3():
-        m = mapnik.Map(512,512)
-        mapnik.load_map(m,'../data/good_maps/bounds_clipping.xml')
+        m = mapnik.Map(512, 512)
+        mapnik.load_map(m, '../data/good_maps/bounds_clipping.xml')
         m.zoom_all()
-        im = mapnik.Image(512,512)
-        mapnik.render(m,im)
+        im = mapnik.Image(512, 512)
+        mapnik.render(m, im)
         actual = '/tmp/mapnik-merc2merc-reprojection-render1.png'
         expected = 'images/support/mapnik-merc2merc-reprojection-render1.png'
-        im.save(actual,'png32')
+        im.save(actual, 'png32')
         expected_im = mapnik.Image.open(expected)
-        eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'test/python_tests/'+ expected))
+        eq_(im.tostring('png32'),
+            expected_im.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual,
+                                                                'test/python_tests/' + expected))
 
     # no maximum-extent
     def test_visual_zoom_all_rendering4():
-        m = mapnik.Map(512,512)
-        mapnik.load_map(m,'../data/good_maps/bounds_clipping.xml')
+        m = mapnik.Map(512, 512)
+        mapnik.load_map(m, '../data/good_maps/bounds_clipping.xml')
         m.maximum_extent = None
         m.zoom_all()
-        im = mapnik.Image(512,512)
-        mapnik.render(m,im)
+        im = mapnik.Image(512, 512)
+        mapnik.render(m, im)
         actual = '/tmp/mapnik-merc2merc-reprojection-render2.png'
         expected = 'images/support/mapnik-merc2merc-reprojection-render2.png'
-        im.save(actual,'png32')
+        im.save(actual, 'png32')
         expected_im = mapnik.Image.open(expected)
-        eq_(im.tostring('png32'),expected_im.tostring('png32'), 'failed comparing actual (%s) and expected (%s)' % (actual,'test/python_tests/'+ expected))
+        eq_(im.tostring('png32'),
+            expected_im.tostring('png32'),
+            'failed comparing actual (%s) and expected (%s)' % (actual,
+                                                                'test/python_tests/' + expected))
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/save_map_test.py b/test/python_tests/save_map_test.py
index d7c1f03..8e7afd2 100644
--- a/test/python_tests/save_map_test.py
+++ b/test/python_tests/save_map_test.py
@@ -1,50 +1,66 @@
 #!/usr/bin/env python
 
-from nose.tools import eq_
-from utilities import execution_path, run_all
+import glob
+import os
 import tempfile
 
-import os, glob, mapnik
+from nose.tools import eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 default_logging_severity = mapnik.logger.get_severity()
 
+
 def setup():
     # make the tests silent to suppress unsupported params from harfbuzz tests
     # TODO: remove this after harfbuzz branch merges
-    mapnik.logger.set_severity(mapnik.severity_type.None)
+    mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def teardown():
     mapnik.logger.set_severity(default_logging_severity)
 
+
 def compare_map(xml):
     m = mapnik.Map(256, 256)
     absolute_base = os.path.abspath(os.path.dirname(xml))
     try:
         mapnik.load_map(m, xml, False, absolute_base)
-    except RuntimeError, e:
+    except RuntimeError as e:
         # only test datasources that we have installed
         if not 'Could not create datasource' in str(e) \
            and not 'could not connect' in str(e):
             raise RuntimeError(str(e))
         return
-    (handle, test_map) = tempfile.mkstemp(suffix='.xml', prefix='mapnik-temp-map1-')
+    (handle, test_map) = tempfile.mkstemp(
+        suffix='.xml', prefix='mapnik-temp-map1-')
     os.close(handle)
-    (handle, test_map2) = tempfile.mkstemp(suffix='.xml', prefix='mapnik-temp-map2-')
+    (handle, test_map2) = tempfile.mkstemp(
+        suffix='.xml', prefix='mapnik-temp-map2-')
     os.close(handle)
     if os.path.exists(test_map):
         os.remove(test_map)
     mapnik.save_map(m, test_map)
     new_map = mapnik.Map(256, 256)
-    mapnik.load_map(new_map, test_map,False,absolute_base)
-    open(test_map2,'w').write(mapnik.save_map_to_string(new_map))
-    diff = ' diff -u %s %s' % (os.path.abspath(test_map),os.path.abspath(test_map2))
+    mapnik.load_map(new_map, test_map, False, absolute_base)
+    with open(test_map2, 'w') as f:
+        f.write(mapnik.save_map_to_string(new_map))
+    diff = ' diff -u %s %s' % (os.path.abspath(test_map),
+                               os.path.abspath(test_map2))
     try:
-        eq_(open(test_map).read(),open(test_map2).read())
-    except AssertionError, e:
-        raise AssertionError('serialized map "%s" not the same after being reloaded, \ncompare with command:\n\n$%s' % (xml,diff))
+        with open(test_map) as f1:
+            with open(test_map2) as f2:
+                eq_(f1.read(), f2.read())
+    except AssertionError as e:
+        raise AssertionError(
+            'serialized map "%s" not the same after being reloaded, \ncompare with command:\n\n$%s' %
+            (xml, diff))
 
     if os.path.exists(test_map):
         os.remove(test_map)
@@ -52,11 +68,12 @@ def compare_map(xml):
         # Fail, the map wasn't written
         return False
 
+
 def test_compare_map():
     good_maps = glob.glob("../data/good_maps/*.xml")
     good_maps = [os.path.normpath(p) for p in good_maps]
     # remove one map that round trips CDATA differently, but this is okay
-    ignorable = os.path.join('..','data','good_maps','empty_parameter2.xml')
+    ignorable = os.path.join('..', 'data', 'good_maps', 'empty_parameter2.xml')
     good_maps.remove(ignorable)
     for m in good_maps:
         compare_map(m)
@@ -65,6 +82,8 @@ def test_compare_map():
         compare_map(m)
 
 # TODO - enforce that original xml does not equal first saved xml
+
+
 def test_compare_map_deprecations():
     dep = glob.glob("../data/deprecated_maps/*.xml")
     dep = [os.path.normpath(p) for p in dep]
diff --git a/test/python_tests/shapefile_test.py b/test/python_tests/shapefile_test.py
index eccf30c..f321e61 100644
--- a/test/python_tests/shapefile_test.py
+++ b/test/python_tests/shapefile_test.py
@@ -1,9 +1,14 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from nose.tools import eq_,assert_almost_equal,raises
-from utilities import execution_path, run_all
-import os, mapnik
+import os
+
+from nose.tools import assert_almost_equal, eq_, raises
+
+import mapnik
+
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -41,9 +46,21 @@ if 'shape' in mapnik.DatasourceCache.plugin_names():
     @raises(RuntimeError)
     def test_that_nonexistant_query_field_throws(**kwargs):
         ds = mapnik.Shapefile(file='../data/shp/world_merc')
-        eq_(len(ds.fields()),11)
-        eq_(ds.fields(),['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME', 'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT'])
-        eq_(ds.field_types(),['str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float'])
+        eq_(len(ds.fields()), 11)
+        eq_(ds.fields(), ['FIPS', 'ISO2', 'ISO3', 'UN', 'NAME',
+                          'AREA', 'POP2005', 'REGION', 'SUBREGION', 'LON', 'LAT'])
+        eq_(ds.field_types(),
+            ['str',
+             'str',
+             'str',
+             'int',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'float',
+             'float'])
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
@@ -53,60 +70,79 @@ if 'shape' in mapnik.DatasourceCache.plugin_names():
 
     def test_dbf_logical_field_is_boolean():
         ds = mapnik.Shapefile(file='../data/shp/long_lat')
-        eq_(len(ds.fields()),7)
-        eq_(ds.fields(),['LONG', 'LAT', 'LOGICAL_TR', 'LOGICAL_FA', 'CHARACTER', 'NUMERIC', 'DATE'])
-        eq_(ds.field_types(),['str', 'str', 'bool', 'bool', 'str', 'float', 'str'])
+        eq_(len(ds.fields()), 7)
+        eq_(ds.fields(), ['LONG', 'LAT', 'LOGICAL_TR',
+                          'LOGICAL_FA', 'CHARACTER', 'NUMERIC', 'DATE'])
+        eq_(ds.field_types(), ['str', 'str',
+                               'bool', 'bool', 'str', 'float', 'str'])
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
         feat = ds.all_features()[0]
-        eq_(feat.id(),1)
-        eq_(feat['LONG'],'0')
-        eq_(feat['LAT'],'0')
-        eq_(feat['LOGICAL_TR'],True)
-        eq_(feat['LOGICAL_FA'],False)
-        eq_(feat['CHARACTER'],'254')
-        eq_(feat['NUMERIC'],32)
-        eq_(feat['DATE'],'20121202')
+        eq_(feat.id(), 1)
+        eq_(feat['LONG'], '0')
+        eq_(feat['LAT'], '0')
+        eq_(feat['LOGICAL_TR'], True)
+        eq_(feat['LOGICAL_FA'], False)
+        eq_(feat['CHARACTER'], '254')
+        eq_(feat['NUMERIC'], 32)
+        eq_(feat['DATE'], '20121202')
 
     # created by hand in qgis 1.8.0
     def test_shapefile_point2d_from_qgis():
         ds = mapnik.Shapefile(file='../data/shp/points/qgis.shp')
-        eq_(len(ds.fields()),2)
-        eq_(ds.fields(),['id','name'])
-        eq_(ds.field_types(),['int','str'])
-        eq_(len(ds.all_features()),3)
+        eq_(len(ds.fields()), 2)
+        eq_(ds.fields(), ['id', 'name'])
+        eq_(ds.field_types(), ['int', 'str'])
+        eq_(len(ds.all_features()), 3)
 
-    # ogr2ogr tests/data/shp/3dpoint/ogr_zfield.shp tests/data/shp/3dpoint/qgis.shp -zfield id
+    # ogr2ogr tests/data/shp/3dpoint/ogr_zfield.shp
+    # tests/data/shp/3dpoint/qgis.shp -zfield id
     def test_shapefile_point_z_from_qgis():
         ds = mapnik.Shapefile(file='../data/shp/points/ogr_zfield.shp')
-        eq_(len(ds.fields()),2)
-        eq_(ds.fields(),['id','name'])
-        eq_(ds.field_types(),['int','str'])
-        eq_(len(ds.all_features()),3)
+        eq_(len(ds.fields()), 2)
+        eq_(ds.fields(), ['id', 'name'])
+        eq_(ds.field_types(), ['int', 'str'])
+        eq_(len(ds.all_features()), 3)
 
     def test_shapefile_multipoint_from_qgis():
         ds = mapnik.Shapefile(file='../data/shp/points/qgis_multi.shp')
-        eq_(len(ds.fields()),2)
-        eq_(ds.fields(),['id','name'])
-        eq_(ds.field_types(),['int','str'])
-        eq_(len(ds.all_features()),1)
+        eq_(len(ds.fields()), 2)
+        eq_(ds.fields(), ['id', 'name'])
+        eq_(ds.field_types(), ['int', 'str'])
+        eq_(len(ds.all_features()), 1)
 
     # pointzm from arcinfo
     def test_shapefile_point_zm_from_arcgis():
         ds = mapnik.Shapefile(file='../data/shp/points/poi.shp')
-        eq_(len(ds.fields()),7)
-        eq_(ds.fields(),['interst_id', 'state_d', 'cnty_name', 'latitude', 'longitude', 'Name', 'Website'])
-        eq_(ds.field_types(),['str', 'str', 'str', 'float', 'float', 'str', 'str'])
-        eq_(len(ds.all_features()),17)
+        eq_(len(ds.fields()), 7)
+        eq_(ds.fields(),
+            ['interst_id',
+             'state_d',
+             'cnty_name',
+             'latitude',
+             'longitude',
+             'Name',
+             'Website'])
+        eq_(ds.field_types(), ['str', 'str',
+                               'str', 'float', 'float', 'str', 'str'])
+        eq_(len(ds.all_features()), 17)
 
     # copy of the above with ogr2ogr that makes m record 14 instead of 18
     def test_shapefile_point_zm_from_ogr():
         ds = mapnik.Shapefile(file='../data/shp/points/poi_ogr.shp')
-        eq_(len(ds.fields()),7)
-        eq_(ds.fields(),['interst_id', 'state_d', 'cnty_name', 'latitude', 'longitude', 'Name', 'Website'])
-        eq_(ds.field_types(),['str', 'str', 'str', 'float', 'float', 'str', 'str'])
-        eq_(len(ds.all_features()),17)
+        eq_(len(ds.fields()), 7)
+        eq_(ds.fields(),
+            ['interst_id',
+             'state_d',
+             'cnty_name',
+             'latitude',
+             'longitude',
+             'Name',
+             'Website'])
+        eq_(ds.field_types(), ['str', 'str',
+                               'str', 'float', 'float', 'str', 'str'])
+        eq_(len(ds.all_features()), 17)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/shapeindex_test.py b/test/python_tests/shapeindex_test.py
index 4de19a5..53aa04d 100644
--- a/test/python_tests/shapeindex_test.py
+++ b/test/python_tests/shapeindex_test.py
@@ -1,50 +1,59 @@
 #!/usr/bin/env python
 
-from nose.tools import eq_
-from utilities import execution_path, run_all
-from subprocess import Popen, PIPE
-import shutil
-import os
 import fnmatch
+import os
+import shutil
+from subprocess import PIPE, Popen
+
+from nose.tools import eq_
+
 import mapnik
 
+from .utilities import execution_path, run_all
+
+
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def test_shapeindex():
     # first copy shapefiles to tmp directory
     source_dir = '../data/shp/'
     working_dir = '/tmp/mapnik-shp-tmp/'
     if os.path.exists(working_dir):
-      shutil.rmtree(working_dir)
-    shutil.copytree(source_dir,working_dir)
+        shutil.rmtree(working_dir)
+    shutil.copytree(source_dir, working_dir)
     matches = []
     for root, dirnames, filenames in os.walk('%s' % source_dir):
-      for filename in fnmatch.filter(filenames, '*.shp'):
-          matches.append(os.path.join(root, filename))
+        for filename in fnmatch.filter(filenames, '*.shp'):
+            matches.append(os.path.join(root, filename))
     for shp in matches:
-      source_file = os.path.join(source_dir,os.path.relpath(shp,source_dir))
-      dest_file = os.path.join(working_dir,os.path.relpath(shp,source_dir))
-      ds = mapnik.Shapefile(file=source_file)
-      count = 0;
-      fs = ds.featureset()
-      try:
-        while (fs.next()):
-          count = count+1
-      except StopIteration:
-        pass
-      stdin, stderr = Popen('shapeindex %s' % dest_file, shell=True, stdout=PIPE, stderr=PIPE).communicate()
-      ds2 = mapnik.Shapefile(file=dest_file)
-      count2 = 0;
-      fs = ds.featureset()
-      try:
-        while (fs.next()):
-          count2 = count2+1
-      except StopIteration:
-        pass
-      eq_(count,count2)
+        source_file = os.path.join(
+            source_dir, os.path.relpath(
+                shp, source_dir))
+        dest_file = os.path.join(working_dir, os.path.relpath(shp, source_dir))
+        ds = mapnik.Shapefile(file=source_file)
+        count = 0
+        fs = ds.featureset()
+        try:
+            while (fs.next()):
+                count = count + 1
+        except StopIteration:
+            pass
+        stdin, stderr = Popen(
+            'shapeindex %s' %
+            dest_file, shell=True, stdout=PIPE, stderr=PIPE).communicate()
+        ds2 = mapnik.Shapefile(file=dest_file)
+        count2 = 0
+        fs = ds.featureset()
+        try:
+            while (fs.next()):
+                count2 = count2 + 1
+        except StopIteration:
+            pass
+        eq_(count, count2)
 
 if __name__ == "__main__":
     setup()
diff --git a/test/python_tests/sqlite_rtree_test.py b/test/python_tests/sqlite_rtree_test.py
index 3036e29..9daaf78 100644
--- a/test/python_tests/sqlite_rtree_test.py
+++ b/test/python_tests/sqlite_rtree_test.py
@@ -1,11 +1,17 @@
 #!/usr/bin/env python
+import os
+import sqlite3
+import sys
+import threading
 
 from nose.tools import eq_
-from utilities import execution_path, run_all
-import threading
 
-import os, mapnik
-import sqlite3
+import mapnik
+
+from .utilities import execution_path, run_all
+
+PYTHON3 = sys.version_info[0] == 3
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -15,8 +21,9 @@ def setup():
 NUM_THREADS = 10
 TOTAL = 245
 
-def create_ds(test_db,table):
-    ds = mapnik.SQLite(file=test_db,table=table)
+
+def create_ds(test_db, table):
+    ds = mapnik.SQLite(file=test_db, table=table)
     ds.all_features()
     del ds
 
@@ -24,7 +31,7 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
 
     def test_rtree_creation():
         test_db = '../data/sqlite/world.sqlite'
-        index = test_db +'.index'
+        index = test_db + '.index'
         table = 'world_merc'
 
         if os.path.exists(index):
@@ -32,20 +39,23 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
 
         threads = []
         for i in range(NUM_THREADS):
-            t = threading.Thread(target=create_ds,args=(test_db,table))
+            t = threading.Thread(target=create_ds, args=(test_db, table))
             t.start()
             threads.append(t)
 
         for i in threads:
             i.join()
 
-        eq_(os.path.exists(index),True)
+        eq_(os.path.exists(index), True)
         conn = sqlite3.connect(index)
         cur = conn.cursor()
         try:
-            cur.execute("Select count(*) from idx_%s_GEOMETRY" % table.replace("'",""))
+            cur.execute(
+                "Select count(*) from idx_%s_GEOMETRY" %
+                table.replace(
+                    "'", ""))
             conn.commit()
-            eq_(cur.fetchone()[0],TOTAL)
+            eq_(cur.fetchone()[0], TOTAL)
         except sqlite3.OperationalError:
             # don't worry about testing # of index records if
             # python's sqlite module does not support rtree
@@ -53,31 +63,31 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
         cur.close()
         conn.close()
 
-        ds = mapnik.SQLite(file=test_db,table=table)
+        ds = mapnik.SQLite(file=test_db, table=table)
         fs = ds.all_features()
         del ds
-        eq_(len(fs),TOTAL)
+        eq_(len(fs), TOTAL)
         os.unlink(index)
-        ds = mapnik.SQLite(file=test_db,table=table,use_spatial_index=False)
+        ds = mapnik.SQLite(file=test_db, table=table, use_spatial_index=False)
         fs = ds.all_features()
         del ds
-        eq_(len(fs),TOTAL)
-        eq_(os.path.exists(index),False)
+        eq_(len(fs), TOTAL)
+        eq_(os.path.exists(index), False)
 
-        ds = mapnik.SQLite(file=test_db,table=table,use_spatial_index=True)
+        ds = mapnik.SQLite(file=test_db, table=table, use_spatial_index=True)
         fs = ds.all_features()
-        #TODO - this loop is not releasing something
+        # TODO - this loop is not releasing something
         # because it causes the unlink below to fail on windows
         # as the file is still open
-        #for feat in fs:
+        # for feat in fs:
         #    query = mapnik.Query(feat.envelope())
         #    selected = ds.features(query)
         #    eq_(len(selected.features)>=1,True)
         del ds
 
-        eq_(os.path.exists(index),True)
+        eq_(os.path.exists(index), True)
         os.unlink(index)
-    
+
     test_rtree_creation.requires_data = True
 
     def test_geometry_round_trip():
@@ -107,42 +117,48 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
         conn.commit()
         cur.close()
 
-        # add a point as wkb (using mapnik) to match how an ogr created db looks
-        x = -122 # longitude
-        y = 48 # latitude
-        wkt = 'POINT(%s %s)' % (x,y)
-        # little endian wkb (mapnik will auto-detect and ready either little or big endian (XDR))
+        # add a point as wkb (using mapnik) to match how an ogr created db
+        # looks
+        x = -122  # longitude
+        y = 48  # latitude
+        wkt = 'POINT(%s %s)' % (x, y)
+        # little endian wkb (mapnik will auto-detect and ready either little or
+        # big endian (XDR))
         wkb = mapnik.Geometry.from_wkt(wkt).to_wkb(mapnik.wkbByteOrder.NDR)
-        values = (None,sqlite3.Binary(wkb),"test point")
+        values = (None, sqlite3.Binary(wkb), "test point")
         cur = conn.cursor()
-        cur.execute('''INSERT into "point_table" (id,geometry,name) values (?,?,?)''',values)
+        cur.execute(
+            '''INSERT into "point_table" (id,geometry,name) values (?,?,?)''',
+            values)
         conn.commit()
         cur.close()
         conn.close()
 
-        def make_wkb_point(x,y):
+        def make_wkb_point(x, y):
             import struct
-            byteorder = 1; # little endian
+            byteorder = 1  # little endian
             endianess = ''
             if byteorder == 1:
-               endianess = '<'
+                endianess = '<'
             else:
-               endianess = '>'
-            geom_type = 1; # for a point
-            return struct.pack('%sbldd' % endianess, byteorder, geom_type, x, y)
+                endianess = '>'
+            geom_type = 1  # for a point
+            return struct.pack('%sbldd' % endianess,
+                               byteorder, geom_type, x, y)
 
         # confirm the wkb matches a manually formed wkb
-        wkb2 = make_wkb_point(x,y)
-        eq_(wkb,wkb2)
+        wkb2 = make_wkb_point(x, y)
+        eq_(wkb, wkb2)
 
         # ensure we can read this data back out properly with mapnik
-        ds = mapnik.Datasource(**{'type':'sqlite','file':test_db, 'table':'point_table'})
+        ds = mapnik.Datasource(
+            **{'type': 'sqlite', 'file': test_db, 'table': 'point_table'})
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat.id(),1)
-        eq_(feat['name'],'test point')
-        geom = feat.geometry;
-        eq_(geom.to_wkt(),'POINT(-122 48)')
+        eq_(feat.id(), 1)
+        eq_(feat['name'], 'test point')
+        geom = feat.geometry
+        eq_(geom.to_wkt(), 'POINT(-122 48)')
         del ds
 
         # ensure it matches data read with just sqlite
@@ -153,13 +169,15 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
         result = cur.fetchone()
         cur.close()
         feat_id = result[0]
-        eq_(feat_id,1)
+        eq_(feat_id, 1)
         name = result[2]
-        eq_(name,'test point')
+        eq_(name, 'test point')
         geom_wkb_blob = result[1]
-        eq_(str(geom_wkb_blob),geom.to_wkb(mapnik.wkbByteOrder.NDR))
-        new_geom = mapnik.Geometry.from_wkb(str(geom_wkb_blob))
-        eq_(new_geom.to_wkt(),geom.to_wkt())
+        if not PYTHON3:
+            geom_wkb_blob = str(geom_wkb_blob)
+        eq_(geom_wkb_blob, geom.to_wkb(mapnik.wkbByteOrder.NDR))
+        new_geom = mapnik.Geometry.from_wkb(geom_wkb_blob)
+        eq_(new_geom.to_wkt(), geom.to_wkt())
         conn.close()
         os.unlink(test_db)
 
diff --git a/test/python_tests/sqlite_test.py b/test/python_tests/sqlite_test.py
index 69b8a6d..3d08d87 100644
--- a/test/python_tests/sqlite_test.py
+++ b/test/python_tests/sqlite_test.py
@@ -1,16 +1,20 @@
 #!/usr/bin/env python
 
-from nose.tools import eq_, raises
-from utilities import execution_path, run_all
 import os
+
+from nose.tools import eq_, raises
+
 import mapnik
 
+from .utilities import execution_path, run_all
+
 
 def setup():
     # All of the paths used are relative, if we run the tests
     # from another directory we need to chdir()
     os.chdir(execution_path('.'))
 
+
 def teardown():
     index = '../data/sqlite/world.sqlite.index'
     if os.path.exists(index):
@@ -22,154 +26,270 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
         # The point table and index is in the qgis_spatiallite.sqlite
         # database.  If either is not found, then this fails
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='point',
-            attachdb='scratch at qgis_spatiallite.sqlite'
-            )
+                           table='point',
+                           attachdb='scratch at qgis_spatiallite.sqlite'
+                           )
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['pkuid'],1)
+        eq_(feature['pkuid'], 1)
 
     test_attachdb_with_relative_file.requires_data = True
 
     def test_attachdb_with_multiple_files():
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='attachedtest',
-            attachdb='scratch1@:memory:,scratch2@:memory:',
-            initdb='''
+                           table='attachedtest',
+                           attachdb='scratch1@:memory:,scratch2@:memory:',
+                           initdb='''
                 create table scratch1.attachedtest (the_geom);
                 create virtual table scratch2.idx_attachedtest_the_geom using rtree(pkid,xmin,xmax,ymin,ymax);
                 insert into scratch2.idx_attachedtest_the_geom values (1,-7799225.5,-7778571.0,1393264.125,1417719.375);
                 '''
-            )
+                           )
         fs = ds.featureset()
         feature = None
-        try :
+        try:
             feature = fs.next()
         except StopIteration:
             pass
         # the above should not throw but will result in no features
-        eq_(feature,None)
-    
+        eq_(feature, None)
+
     test_attachdb_with_multiple_files.requires_data = True
 
     def test_attachdb_with_absolute_file():
         # The point table and index is in the qgis_spatiallite.sqlite
         # database.  If either is not found, then this fails
         ds = mapnik.SQLite(file=os.getcwd() + '/../data/sqlite/world.sqlite',
-            table='point',
-            attachdb='scratch at qgis_spatiallite.sqlite'
-            )
+                           table='point',
+                           attachdb='scratch at qgis_spatiallite.sqlite'
+                           )
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['pkuid'],1)
+        eq_(feature['pkuid'], 1)
 
     test_attachdb_with_absolute_file.requires_data = True
 
     def test_attachdb_with_index():
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='attachedtest',
-            attachdb='scratch@:memory:',
-            initdb='''
+                           table='attachedtest',
+                           attachdb='scratch@:memory:',
+                           initdb='''
                 create table scratch.attachedtest (the_geom);
                 create virtual table scratch.idx_attachedtest_the_geom using rtree(pkid,xmin,xmax,ymin,ymax);
                 insert into scratch.idx_attachedtest_the_geom values (1,-7799225.5,-7778571.0,1393264.125,1417719.375);
                 '''
-            )
+                           )
 
         fs = ds.featureset()
         feature = None
-        try :
+        try:
             feature = fs.next()
         except StopIteration:
             pass
-        eq_(feature,None)
-    
+        eq_(feature, None)
+
     test_attachdb_with_index.requires_data = True
 
     def test_attachdb_with_explicit_index():
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='attachedtest',
-            index_table='myindex',
-            attachdb='scratch@:memory:',
-            initdb='''
+                           table='attachedtest',
+                           index_table='myindex',
+                           attachdb='scratch@:memory:',
+                           initdb='''
                 create table scratch.attachedtest (the_geom);
                 create virtual table scratch.myindex using rtree(pkid,xmin,xmax,ymin,ymax);
                 insert into scratch.myindex values (1,-7799225.5,-7778571.0,1393264.125,1417719.375);
                 '''
-            )
+                           )
         fs = ds.featureset()
         feature = None
         try:
             feature = fs.next()
         except StopIteration:
             pass
-        eq_(feature,None)
-    
+        eq_(feature, None)
+
     test_attachdb_with_explicit_index.requires_data = True
 
     def test_attachdb_with_sql_join():
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
-            attachdb='busines at business.sqlite'
-            )
-        eq_(len(ds.fields()),29)
-        eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010'])
-        eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int'])
+                           table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
+                           attachdb='busines at business.sqlite'
+                           )
+        eq_(len(ds.fields()), 29)
+        eq_(ds.fields(),
+            ['OGC_FID',
+             'fips',
+             'iso2',
+             'iso3',
+             'un',
+             'name',
+             'area',
+             'pop2005',
+             'region',
+             'subregion',
+             'lon',
+             'lat',
+             'ISO3:1',
+             '1995',
+             '1996',
+             '1997',
+             '1998',
+             '1999',
+             '2000',
+             '2001',
+             '2002',
+             '2003',
+             '2004',
+             '2005',
+             '2006',
+             '2007',
+             '2008',
+             '2009',
+             '2010'])
+        eq_(ds.field_types(),
+            ['int',
+             'str',
+             'str',
+             'str',
+             'int',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'float',
+             'float',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int'])
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature.id(),1)
+        eq_(feature.id(), 1)
         expected = {
-          1995:0,
-          1996:0,
-          1997:0,
-          1998:0,
-          1999:0,
-          2000:0,
-          2001:0,
-          2002:0,
-          2003:0,
-          2004:0,
-          2005:0,
-          2006:0,
-          2007:0,
-          2008:0,
-          2009:0,
-          2010:0,
-          # this appears to be sqlites way of
-          # automatically handling clashing column names
-          'ISO3:1':'ATG',
-          'OGC_FID':1,
-          'area':44,
-          'fips':u'AC',
-          'iso2':u'AG',
-          'iso3':u'ATG',
-          'lat':17.078,
-          'lon':-61.783,
-          'name':u'Antigua and Barbuda',
-          'pop2005':83039,
-          'region':19,
-          'subregion':29,
-          'un':28
+            1995: 0,
+            1996: 0,
+            1997: 0,
+            1998: 0,
+            1999: 0,
+            2000: 0,
+            2001: 0,
+            2002: 0,
+            2003: 0,
+            2004: 0,
+            2005: 0,
+            2006: 0,
+            2007: 0,
+            2008: 0,
+            2009: 0,
+            2010: 0,
+            # this appears to be sqlites way of
+            # automatically handling clashing column names
+            'ISO3:1': 'ATG',
+            'OGC_FID': 1,
+            'area': 44,
+            'fips': u'AC',
+            'iso2': u'AG',
+            'iso3': u'ATG',
+            'lat': 17.078,
+            'lon': -61.783,
+            'name': u'Antigua and Barbuda',
+            'pop2005': 83039,
+            'region': 19,
+            'subregion': 29,
+            'un': 28
         }
-        for k,v in expected.items():
+        for k, v in expected.items():
             try:
-                eq_(feature[str(k)],v)
+                eq_(feature[str(k)], v)
             except:
                 #import pdb;pdb.set_trace()
-                print 'invalid key/v %s/%s for: %s' % (k,v,feature)
-    
+                print('invalid key/v %s/%s for: %s' % (k, v, feature))
+
     test_attachdb_with_sql_join.requires_data = True
 
     def test_attachdb_with_sql_join_count():
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
-            attachdb='busines at business.sqlite'
-            )
-        eq_(len(ds.fields()),29)
-        eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010'])
-        eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int'])
-        eq_(len(ds.all_features()),100)
-    
+                           table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3 limit 100)',
+                           attachdb='busines at business.sqlite'
+                           )
+        eq_(len(ds.fields()), 29)
+        eq_(ds.fields(),
+            ['OGC_FID',
+             'fips',
+             'iso2',
+             'iso3',
+             'un',
+             'name',
+             'area',
+             'pop2005',
+             'region',
+             'subregion',
+             'lon',
+             'lat',
+             'ISO3:1',
+             '1995',
+             '1996',
+             '1997',
+             '1998',
+             '1999',
+             '2000',
+             '2001',
+             '2002',
+             '2003',
+             '2004',
+             '2005',
+             '2006',
+             '2007',
+             '2008',
+             '2009',
+             '2010'])
+        eq_(ds.field_types(),
+            ['int',
+             'str',
+             'str',
+             'str',
+             'int',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'float',
+             'float',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int'])
+        eq_(len(ds.all_features()), 100)
+
     test_attachdb_with_sql_join_count.requires_data = True
 
     def test_attachdb_with_sql_join_count2():
@@ -179,14 +299,72 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
         select count(*) from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3;
         '''
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
-            attachdb='busines at business.sqlite'
-            )
-        eq_(len(ds.fields()),29)
-        eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010'])
-        eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int'])
-        eq_(len(ds.all_features()),192)
-    
+                           table='(select * from world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
+                           attachdb='busines at business.sqlite'
+                           )
+        eq_(len(ds.fields()), 29)
+        eq_(ds.fields(),
+            ['OGC_FID',
+             'fips',
+             'iso2',
+             'iso3',
+             'un',
+             'name',
+             'area',
+             'pop2005',
+             'region',
+             'subregion',
+             'lon',
+             'lat',
+             'ISO3:1',
+             '1995',
+             '1996',
+             '1997',
+             '1998',
+             '1999',
+             '2000',
+             '2001',
+             '2002',
+             '2003',
+             '2004',
+             '2005',
+             '2006',
+             '2007',
+             '2008',
+             '2009',
+             '2010'])
+        eq_(ds.field_types(),
+            ['int',
+             'str',
+             'str',
+             'str',
+             'int',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'float',
+             'float',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int'])
+        eq_(len(ds.all_features()), 192)
+
     test_attachdb_with_sql_join_count2.requires_data = True
 
     def test_attachdb_with_sql_join_count3():
@@ -194,14 +372,72 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
         select count(*) from (select * from world_merc where 1=1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3;
         '''
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select * from (select * from world_merc where !intersects!) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
-            attachdb='busines at business.sqlite'
-            )
-        eq_(len(ds.fields()),29)
-        eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010'])
-        eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int'])
-        eq_(len(ds.all_features()),192)
-    
+                           table='(select * from (select * from world_merc where !intersects!) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
+                           attachdb='busines at business.sqlite'
+                           )
+        eq_(len(ds.fields()), 29)
+        eq_(ds.fields(),
+            ['OGC_FID',
+             'fips',
+             'iso2',
+             'iso3',
+             'un',
+             'name',
+             'area',
+             'pop2005',
+             'region',
+             'subregion',
+             'lon',
+             'lat',
+             'ISO3:1',
+             '1995',
+             '1996',
+             '1997',
+             '1998',
+             '1999',
+             '2000',
+             '2001',
+             '2002',
+             '2003',
+             '2004',
+             '2005',
+             '2006',
+             '2007',
+             '2008',
+             '2009',
+             '2010'])
+        eq_(ds.field_types(),
+            ['int',
+             'str',
+             'str',
+             'str',
+             'int',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'float',
+             'float',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int'])
+        eq_(len(ds.all_features()), 192)
+
     test_attachdb_with_sql_join_count3.requires_data = True
 
     def test_attachdb_with_sql_join_count4():
@@ -209,14 +445,72 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
         select count(*) from (select * from world_merc where 1=1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3;
         '''
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select * from (select * from world_merc where !intersects! limit 1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
-            attachdb='busines at business.sqlite'
-            )
-        eq_(len(ds.fields()),29)
-        eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat', 'ISO3:1', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010'])
-        eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float', 'str', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int', 'int'])
-        eq_(len(ds.all_features()),1)
-    
+                           table='(select * from (select * from world_merc where !intersects! limit 1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
+                           attachdb='busines at business.sqlite'
+                           )
+        eq_(len(ds.fields()), 29)
+        eq_(ds.fields(),
+            ['OGC_FID',
+             'fips',
+             'iso2',
+             'iso3',
+             'un',
+             'name',
+             'area',
+             'pop2005',
+             'region',
+             'subregion',
+             'lon',
+             'lat',
+             'ISO3:1',
+             '1995',
+             '1996',
+             '1997',
+             '1998',
+             '1999',
+             '2000',
+             '2001',
+             '2002',
+             '2003',
+             '2004',
+             '2005',
+             '2006',
+             '2007',
+             '2008',
+             '2009',
+             '2010'])
+        eq_(ds.field_types(),
+            ['int',
+             'str',
+             'str',
+             'str',
+             'int',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'float',
+             'float',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int',
+             'int'])
+        eq_(len(ds.all_features()), 1)
+
     test_attachdb_with_sql_join_count4.requires_data = True
 
     def test_attachdb_with_sql_join_count5():
@@ -224,162 +518,237 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
         select count(*) from (select * from world_merc where 1=1) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3;
         '''
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select * from (select * from world_merc where !intersects! and 1=2) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
-            attachdb='busines at business.sqlite'
-            )
-        # nothing is able to join to business so we don't pick up business schema
-        eq_(len(ds.fields()),12)
-        eq_(ds.fields(),['OGC_FID', 'fips', 'iso2', 'iso3', 'un', 'name', 'area', 'pop2005', 'region', 'subregion', 'lon', 'lat'])
-        eq_(ds.field_types(),['int', 'str', 'str', 'str', 'int', 'str', 'int', 'int', 'int', 'int', 'float', 'float'])
-        eq_(len(ds.all_features()),0)
-    
+                           table='(select * from (select * from world_merc where !intersects! and 1=2) as world_merc INNER JOIN business on world_merc.iso3 = business.ISO3)',
+                           attachdb='busines at business.sqlite'
+                           )
+        # nothing is able to join to business so we don't pick up business
+        # schema
+        eq_(len(ds.fields()), 12)
+        eq_(ds.fields(),
+            ['OGC_FID',
+             'fips',
+             'iso2',
+             'iso3',
+             'un',
+             'name',
+             'area',
+             'pop2005',
+             'region',
+             'subregion',
+             'lon',
+             'lat'])
+        eq_(ds.field_types(),
+            ['int',
+             'str',
+             'str',
+             'str',
+             'int',
+             'str',
+             'int',
+             'int',
+             'int',
+             'int',
+             'float',
+             'float'])
+        eq_(len(ds.all_features()), 0)
+
     test_attachdb_with_sql_join_count5.requires_data = True
 
     def test_subqueries():
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='world_merc',
-            )
+                           table='world_merc',
+                           )
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['OGC_FID'],1)
-        eq_(feature['fips'],u'AC')
-        eq_(feature['iso2'],u'AG')
-        eq_(feature['iso3'],u'ATG')
-        eq_(feature['un'],28)
-        eq_(feature['name'],u'Antigua and Barbuda')
-        eq_(feature['area'],44)
-        eq_(feature['pop2005'],83039)
-        eq_(feature['region'],19)
-        eq_(feature['subregion'],29)
-        eq_(feature['lon'],-61.783)
-        eq_(feature['lat'],17.078)
+        eq_(feature['OGC_FID'], 1)
+        eq_(feature['fips'], u'AC')
+        eq_(feature['iso2'], u'AG')
+        eq_(feature['iso3'], u'ATG')
+        eq_(feature['un'], 28)
+        eq_(feature['name'], u'Antigua and Barbuda')
+        eq_(feature['area'], 44)
+        eq_(feature['pop2005'], 83039)
+        eq_(feature['region'], 19)
+        eq_(feature['subregion'], 29)
+        eq_(feature['lon'], -61.783)
+        eq_(feature['lat'], 17.078)
 
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select * from world_merc)',
-            )
+                           table='(select * from world_merc)',
+                           )
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['OGC_FID'],1)
-        eq_(feature['fips'],u'AC')
-        eq_(feature['iso2'],u'AG')
-        eq_(feature['iso3'],u'ATG')
-        eq_(feature['un'],28)
-        eq_(feature['name'],u'Antigua and Barbuda')
-        eq_(feature['area'],44)
-        eq_(feature['pop2005'],83039)
-        eq_(feature['region'],19)
-        eq_(feature['subregion'],29)
-        eq_(feature['lon'],-61.783)
-        eq_(feature['lat'],17.078)
+        eq_(feature['OGC_FID'], 1)
+        eq_(feature['fips'], u'AC')
+        eq_(feature['iso2'], u'AG')
+        eq_(feature['iso3'], u'ATG')
+        eq_(feature['un'], 28)
+        eq_(feature['name'], u'Antigua and Barbuda')
+        eq_(feature['area'], 44)
+        eq_(feature['pop2005'], 83039)
+        eq_(feature['region'], 19)
+        eq_(feature['subregion'], 29)
+        eq_(feature['lon'], -61.783)
+        eq_(feature['lat'], 17.078)
 
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select OGC_FID,GEOMETRY from world_merc)',
-            )
+                           table='(select OGC_FID,GEOMETRY from world_merc)',
+                           )
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['OGC_FID'],1)
-        eq_(len(feature),1)
+        eq_(feature['OGC_FID'], 1)
+        eq_(len(feature), 1)
 
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select GEOMETRY,OGC_FID,fips from world_merc)',
-            )
+                           table='(select GEOMETRY,OGC_FID,fips from world_merc)',
+                           )
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['OGC_FID'],1)
-        eq_(feature['fips'],u'AC')
+        eq_(feature['OGC_FID'], 1)
+        eq_(feature['fips'], u'AC')
 
         # same as above, except with alias like postgres requires
         # TODO - should we try to make this work?
-        #ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
+        # ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
         #    table='(select GEOMETRY,rowid as aliased_id,fips from world_merc) as table',
         #    key_field='aliased_id'
         #    )
         #fs = ds.featureset()
         #feature = fs.next()
-        #eq_(feature['aliased_id'],1)
-        #eq_(feature['fips'],u'AC')
+        # eq_(feature['aliased_id'],1)
+        # eq_(feature['fips'],u'AC')
 
         ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',
-            table='(select GEOMETRY,OGC_FID,OGC_FID as rowid,fips from world_merc)',
-            )
+                           table='(select GEOMETRY,OGC_FID,OGC_FID as rowid,fips from world_merc)',
+                           )
         fs = ds.featureset()
         feature = fs.next()
-        eq_(feature['rowid'],1)
-        eq_(feature['fips'],u'AC')
-    
+        eq_(feature['rowid'], 1)
+        eq_(feature['fips'], u'AC')
+
     test_subqueries.requires_data = True
 
     def test_empty_db():
         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-            table='empty',
-            )
+                           table='empty',
+                           )
         fs = ds.featureset()
         feature = None
         try:
             feature = fs.next()
         except StopIteration:
             pass
-        eq_(feature,None)
+        eq_(feature, None)
 
     test_empty_db.requires_data = True
 
     @raises(RuntimeError)
     def test_that_nonexistant_query_field_throws(**kwargs):
         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-            table='empty',
-            )
-        eq_(len(ds.fields()),25)
-        eq_(ds.fields(),['OGC_FID', 'scalerank', 'labelrank', 'featurecla', 'sovereignt', 'sov_a3', 'adm0_dif', 'level', 'type', 'admin', 'adm0_a3', 'geou_dif', 'name', 'abbrev', 'postal', 'name_forma', 'terr_', 'name_sort', 'map_color', 'pop_est', 'gdp_md_est', 'fips_10_', 'iso_a2', 'iso_a3', 'iso_n3'])
-        eq_(ds.field_types(),['int', 'int', 'int', 'str', 'str', 'str', 'float', 'float', 'str', 'str', 'str', 'float', 'str', 'str', 'str', 'str', 'str', 'str', 'float', 'float', 'float', 'float', 'str', 'str', 'float'])
+                           table='empty',
+                           )
+        eq_(len(ds.fields()), 25)
+        eq_(ds.fields(),
+            ['OGC_FID',
+             'scalerank',
+             'labelrank',
+             'featurecla',
+             'sovereignt',
+             'sov_a3',
+             'adm0_dif',
+             'level',
+             'type',
+             'admin',
+             'adm0_a3',
+             'geou_dif',
+             'name',
+             'abbrev',
+             'postal',
+             'name_forma',
+             'terr_',
+             'name_sort',
+             'map_color',
+             'pop_est',
+             'gdp_md_est',
+             'fips_10_',
+             'iso_a2',
+             'iso_a3',
+             'iso_n3'])
+        eq_(ds.field_types(),
+            ['int',
+             'int',
+             'int',
+             'str',
+             'str',
+             'str',
+             'float',
+             'float',
+             'str',
+             'str',
+             'str',
+             'float',
+             'str',
+             'str',
+             'str',
+             'str',
+             'str',
+             'str',
+             'float',
+             'float',
+             'float',
+             'float',
+             'str',
+             'str',
+             'float'])
         query = mapnik.Query(ds.envelope())
         for fld in ds.fields():
             query.add_property_name(fld)
         # also add an invalid one, triggering throw
         query.add_property_name('bogus')
         ds.features(query)
-    
+
     test_that_nonexistant_query_field_throws.requires_data = True
 
     def test_intersects_token1():
         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-            table='(select * from empty where !intersects!)',
-            )
+                           table='(select * from empty where !intersects!)',
+                           )
         fs = ds.featureset()
         feature = None
-        try :
+        try:
             feature = fs.next()
         except StopIteration:
             pass
-        eq_(feature,None)
-    
+        eq_(feature, None)
+
     test_intersects_token1.requires_data = True
 
     def test_intersects_token2():
         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-            table='(select * from empty where "a"!="b" and !intersects!)',
-            )
+                           table='(select * from empty where "a"!="b" and !intersects!)',
+                           )
         fs = ds.featureset()
         feature = None
-        try :
+        try:
             feature = fs.next()
         except StopIteration:
             pass
-        eq_(feature,None)
-    
+        eq_(feature, None)
+
     test_intersects_token2.requires_data = True
 
     def test_intersects_token3():
         ds = mapnik.SQLite(file='../data/sqlite/empty.db',
-            table='(select * from empty where "a"!="b" and !intersects!)',
-            )
+                           table='(select * from empty where "a"!="b" and !intersects!)',
+                           )
         fs = ds.featureset()
         feature = None
-        try :
+        try:
             feature = fs.next()
         except StopIteration:
             pass
-        eq_(feature,None)
-    
+        eq_(feature, None)
+
     test_intersects_token3.requires_data = True
 
     # https://github.com/mapnik/mapnik/issues/1537
@@ -388,110 +757,110 @@ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
         # form up an in-memory test db
         wkb = '010100000000000000000000000000000000000000'
         ds = mapnik.SQLite(file=':memory:',
-            table='test1',
-            initdb='''
+                           table='test1',
+                           initdb='''
                 create table test1 (alias TEXT,geometry BLOB);
                 insert into test1 values ("test",x'%s');
                 ''' % wkb,
-            extent='-180,-60,180,60',
-            use_spatial_index=False,
-            key_field='alias'
-        )
-        eq_(len(ds.fields()),1)
-        eq_(ds.fields(),['alias'])
-        eq_(ds.field_types(),['str'])
+                           extent='-180,-60,180,60',
+                           use_spatial_index=False,
+                           key_field='alias'
+                           )
+        eq_(len(ds.fields()), 1)
+        eq_(ds.fields(), ['alias'])
+        eq_(ds.field_types(), ['str'])
         fs = ds.all_features()
-        eq_(len(fs),1)
+        eq_(len(fs), 1)
         feat = fs[0]
-        eq_(feat.id(),0) # should be 1?
-        eq_(feat['alias'],'test')
-        eq_(feat.geometry.to_wkt(),'POINT(0 0)')
+        eq_(feat.id(), 0)  # should be 1?
+        eq_(feat['alias'], 'test')
+        eq_(feat.geometry.to_wkt(), 'POINT(0 0)')
 
     def test_db_with_one_untyped_column():
         # form up an in-memory test db
         wkb = '010100000000000000000000000000000000000000'
         ds = mapnik.SQLite(file=':memory:',
-            table='test1',
-            initdb='''
+                           table='test1',
+                           initdb='''
                 create table test1 (geometry BLOB, untyped);
                 insert into test1 values (x'%s', 'untyped');
             ''' % wkb,
-            extent='-180,-60,180,60',
-            use_spatial_index=False,
-            key_field='rowid'
-        )
+                           extent='-180,-60,180,60',
+                           use_spatial_index=False,
+                           key_field='rowid'
+                           )
 
         # ensure the untyped column is found
-        eq_(len(ds.fields()),2)
-        eq_(ds.fields(),['rowid', 'untyped'])
-        eq_(ds.field_types(),['int', 'str'])
+        eq_(len(ds.fields()), 2)
+        eq_(ds.fields(), ['rowid', 'untyped'])
+        eq_(ds.field_types(), ['int', 'str'])
 
     def test_db_with_one_untyped_column_using_subquery():
         # form up an in-memory test db
         wkb = '010100000000000000000000000000000000000000'
         ds = mapnik.SQLite(file=':memory:',
-            table='(SELECT rowid, geometry, untyped FROM test1)',
-            initdb='''
+                           table='(SELECT rowid, geometry, untyped FROM test1)',
+                           initdb='''
                 create table test1 (geometry BLOB, untyped);
                 insert into test1 values (x'%s', 'untyped');
             ''' % wkb,
-            extent='-180,-60,180,60',
-            use_spatial_index=False,
-            key_field='rowid'
-        )
+                           extent='-180,-60,180,60',
+                           use_spatial_index=False,
+                           key_field='rowid'
+                           )
 
         # ensure the untyped column is found
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['rowid', 'untyped', 'rowid'])
-        eq_(ds.field_types(),['int', 'str', 'int'])
-
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['rowid', 'untyped', 'rowid'])
+        eq_(ds.field_types(), ['int', 'str', 'int'])
 
     def test_that_64bit_int_fields_work():
         ds = mapnik.SQLite(file='../data/sqlite/64bit_int.sqlite',
-            table='int_table',
-            use_spatial_index=False
-        )
-        eq_(len(ds.fields()),3)
-        eq_(ds.fields(),['OGC_FID','id','bigint'])
-        eq_(ds.field_types(),['int','int','int'])
+                           table='int_table',
+                           use_spatial_index=False
+                           )
+        eq_(len(ds.fields()), 3)
+        eq_(ds.fields(), ['OGC_FID', 'id', 'bigint'])
+        eq_(ds.field_types(), ['int', 'int', 'int'])
         fs = ds.featureset()
         feat = fs.next()
-        eq_(feat.id(),1)
-        eq_(feat['OGC_FID'],1)
-        eq_(feat['bigint'],2147483648)
+        eq_(feat.id(), 1)
+        eq_(feat['OGC_FID'], 1)
+        eq_(feat['bigint'], 2147483648)
         feat = fs.next()
-        eq_(feat.id(),2)
-        eq_(feat['OGC_FID'],2)
-        eq_(feat['bigint'],922337203685477580)
+        eq_(feat.id(), 2)
+        eq_(feat['OGC_FID'], 2)
+        eq_(feat['bigint'], 922337203685477580)
 
     test_that_64bit_int_fields_work.requires_data = True
 
     def test_null_id_field():
-        # silence null key warning: https://github.com/mapnik/mapnik/issues/1889
+        # silence null key warning:
+        # https://github.com/mapnik/mapnik/issues/1889
         default_logging_severity = mapnik.logger.get_severity()
-        mapnik.logger.set_severity(mapnik.severity_type.None)
+        mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
         # form up an in-memory test db
         wkb = '010100000000000000000000000000000000000000'
         # note: the osm_id should be declared INTEGER PRIMARY KEY
         # but in this case we intentionally do not make this a valid pkey
         # otherwise sqlite would turn the null into a valid, serial id
         ds = mapnik.SQLite(file=':memory:',
-            table='test1',
-            initdb='''
+                           table='test1',
+                           initdb='''
                 create table test1 (osm_id INTEGER,geometry BLOB);
                 insert into test1 values (null,x'%s');
                 ''' % wkb,
-            extent='-180,-60,180,60',
-            use_spatial_index=False,
-            key_field='osm_id'
-        )
+                           extent='-180,-60,180,60',
+                           use_spatial_index=False,
+                           key_field='osm_id'
+                           )
         fs = ds.featureset()
         feature = None
-        try :
+        try:
             feature = fs.next()
         except StopIteration:
             pass
-        eq_(feature,None)
+        eq_(feature, None)
         mapnik.logger.set_severity(default_logging_severity)
 
 if __name__ == "__main__":
diff --git a/test/python_tests/style_test.py b/test/python_tests/style_test.py
index 7bc782a..ff2c05e 100644
--- a/test/python_tests/style_test.py
+++ b/test/python_tests/style_test.py
@@ -2,17 +2,20 @@
 # -*- coding: utf-8 -*-
 
 from nose.tools import eq_
-from utilities import run_all
+
 import mapnik
 
+from .utilities import run_all
+
+
 def test_style_init():
-   s = mapnik.Style()
-   eq_(s.filter_mode,mapnik.filter_mode.ALL)
-   eq_(len(s.rules),0)
-   eq_(s.opacity,1)
-   eq_(s.comp_op,None)
-   eq_(s.image_filters,"")
-   eq_(s.image_filters_inflate,False)
+    s = mapnik.Style()
+    eq_(s.filter_mode, mapnik.filter_mode.ALL)
+    eq_(len(s.rules), 0)
+    eq_(s.opacity, 1)
+    eq_(s.comp_op, None)
+    eq_(s.image_filters, "")
+    eq_(s.image_filters_inflate, False)
 
 if __name__ == "__main__":
     exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
diff --git a/test/python_tests/topojson_plugin_test.py b/test/python_tests/topojson_plugin_test.py
index a5f3e57..7560fbb 100644
--- a/test/python_tests/topojson_plugin_test.py
+++ b/test/python_tests/topojson_plugin_test.py
@@ -1,9 +1,15 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+from __future__ import absolute_import, print_function
+
+import os
+
+from nose.tools import assert_almost_equal, eq_
+
+import mapnik
+
+from .utilities import execution_path, run_all
 
-from nose.tools import eq_,assert_almost_equal
-from utilities import execution_path, run_all
-import os, mapnik
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -15,7 +21,9 @@ if 'topojson' in mapnik.DatasourceCache.plugin_names():
     def test_topojson_init():
         # topojson tests/data/json/escaped.geojson -o tests/data/json/escaped.topojson --properties
         # topojson version 1.4.2
-        ds = mapnik.Datasource(type='topojson',file='../data/json/escaped.topojson')
+        ds = mapnik.Datasource(
+            type='topojson',
+            file='../data/json/escaped.topojson')
         e = ds.envelope()
         assert_almost_equal(e.minx, -81.705583, places=7)
         assert_almost_equal(e.miny, 41.480573, places=6)
@@ -23,11 +31,13 @@ if 'topojson' in mapnik.DatasourceCache.plugin_names():
         assert_almost_equal(e.maxy, 41.480573, places=3)
 
     def test_topojson_properties():
-        ds = mapnik.Datasource(type='topojson',file='../data/json/escaped.topojson')
+        ds = mapnik.Datasource(
+            type='topojson',
+            file='../data/json/escaped.topojson')
         f = ds.features_at_point(ds.envelope().center()).features[0]
-        eq_(len(ds.fields()),7)
+        eq_(len(ds.fields()), 7)
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
         eq_(f['name'], u'Test')
         eq_(f['int'], 1)
@@ -38,12 +48,14 @@ if 'topojson' in mapnik.DatasourceCache.plugin_names():
         eq_(f['NOM_FR'], u'Qu\xe9bec')
         eq_(f['NOM_FR'], u'Québec')
 
-        ds = mapnik.Datasource(type='topojson',file='../data/json/escaped.topojson')
+        ds = mapnik.Datasource(
+            type='topojson',
+            file='../data/json/escaped.topojson')
         f = ds.all_features()[0]
-        eq_(len(ds.fields()),7)
+        eq_(len(ds.fields()), 7)
 
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
         eq_(f['name'], u'Test')
         eq_(f['int'], 1)
@@ -55,12 +67,16 @@ if 'topojson' in mapnik.DatasourceCache.plugin_names():
         eq_(f['NOM_FR'], u'Québec')
 
     def test_geojson_from_in_memory_string():
-        ds = mapnik.Datasource(type='topojson',inline=open('../data/json/escaped.topojson','r').read())
+        ds = mapnik.Datasource(
+            type='topojson',
+            inline=open(
+                '../data/json/escaped.topojson',
+                'r').read())
         f = ds.all_features()[0]
-        eq_(len(ds.fields()),7)
+        eq_(len(ds.fields()), 7)
 
         desc = ds.describe()
-        eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
+        eq_(desc['geometry_type'], mapnik.DataGeometryType.Point)
 
         eq_(f['name'], u'Test')
         eq_(f['int'], 1)
@@ -73,11 +89,15 @@ if 'topojson' in mapnik.DatasourceCache.plugin_names():
 
 #    @raises(RuntimeError)
     def test_that_nonexistant_query_field_throws(**kwargs):
-        ds = mapnik.Datasource(type='topojson',file='../data/json/escaped.topojson')
-        eq_(len(ds.fields()),7)
+        ds = mapnik.Datasource(
+            type='topojson',
+            file='../data/json/escaped.topojson')
+        eq_(len(ds.fields()), 7)
         # TODO - this sorting is messed up
-        eq_(ds.fields(),['name', 'int', 'description', 'spaces', 'double', 'boolean', 'NOM_FR'])
-        eq_(ds.field_types(),['str', 'int', 'str', 'str', 'float', 'bool', 'str'])
+        eq_(ds.fields(), ['name', 'int', 'description',
+                          'spaces', 'double', 'boolean', 'NOM_FR'])
+        eq_(ds.field_types(), ['str', 'int',
+                               'str', 'str', 'float', 'bool', 'str'])
 # TODO - should topojson plugin throw like others?
 #        query = mapnik.Query(ds.envelope())
 #        for fld in ds.fields():
diff --git a/test/python_tests/utilities.py b/test/python_tests/utilities.py
index fe02c7d..9bfc9ae 100644
--- a/test/python_tests/utilities.py
+++ b/test/python_tests/utilities.py
@@ -1,25 +1,38 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
+import os
+import sys
+import traceback
+
 from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
 from nose.tools import assert_almost_equal
 
-import os, sys, traceback
 import mapnik
 
+PYTHON3 = sys.version_info[0] == 3
+READ_FLAGS = 'rb' if PYTHON3 else 'r'
+if PYTHON3:
+    xrange = range
+
 HERE = os.path.dirname(__file__)
 
+
 def execution_path(filename):
-    return os.path.join(os.path.dirname(sys._getframe(1).f_code.co_filename), filename)
+    return os.path.join(os.path.dirname(
+        sys._getframe(1).f_code.co_filename), filename)
+
 
 class Todo(Exception):
     pass
 
+
 class TodoPlugin(ErrorClassPlugin):
     name = "todo"
 
     todo = ErrorClass(Todo, label='TODO', isfailure=False)
 
+
 def contains_word(word, bytestring_):
     """
     Checks that a bytestring contains a given word. len(bytestring) should be
@@ -37,29 +50,33 @@ def contains_word(word, bytestring_):
     AssertionError: len(bytestring_) not multiple of len(word)
     """
     n = len(word)
-    assert len(bytestring_)%n == 0, "len(bytestring_) not multiple of len(word)"
-    chunks = [bytestring_[i:i+n] for i in xrange(0, len(bytestring_), n)]
+    assert len(bytestring_) % n == 0, "len(bytestring_) not multiple of len(word)"
+    chunks = [bytestring_[i:i + n] for i in xrange(0, len(bytestring_), n)]
     return word in chunks
 
+
 def pixel2channels(pixel):
     alpha = (pixel >> 24) & 0xff
     red = pixel & 0xff
     green = (pixel >> 8) & 0xff
     blue = (pixel >> 16) & 0xff
-    return red,green,blue,alpha
+    return red, green, blue, alpha
+
 
 def pixel2rgba(pixel):
     return 'rgba(%s,%s,%s,%s)' % pixel2channels(pixel)
 
+
 def get_unique_colors(im):
     pixels = []
     for x in range(im.width()):
         for y in range(im.height()):
-            pixel = im.get_pixel(x,y)
+            pixel = im.get_pixel(x, y)
             if pixel not in pixels:
-                 pixels.append(pixel)
+                pixels.append(pixel)
     pixels = sorted(pixels)
-    return map(pixel2rgba,pixels)
+    return list(map(pixel2rgba, pixels))
+
 
 def run_all(iterable):
     failed = 0
@@ -82,18 +99,40 @@ def run_all(iterable):
         sys.stderr.flush()
     return failed
 
+
 def side_by_side_image(left_im, right_im):
     width = left_im.width() + 1 + right_im.width()
     height = max(left_im.height(), right_im.height())
     im = mapnik.Image(width, height)
-    im.composite(left_im,mapnik.CompositeOp.src_over,1.0,0,0)
+    im.composite(left_im, mapnik.CompositeOp.src_over, 1.0, 0, 0)
     if width > 80:
-       im.composite(mapnik.Image.open(HERE+'/images/expected.png'),mapnik.CompositeOp.difference,1.0,0,0)
-    im.composite(right_im,mapnik.CompositeOp.src_over,1.0,left_im.width() + 1, 0)
+        im.composite(
+            mapnik.Image.open(
+                HERE +
+                '/images/expected.png'),
+            mapnik.CompositeOp.difference,
+            1.0,
+            0,
+            0)
+    im.composite(
+        right_im,
+        mapnik.CompositeOp.src_over,
+        1.0,
+        left_im.width() + 1,
+        0)
     if width > 80:
-       im.composite(mapnik.Image.open(HERE+'/images/actual.png'),mapnik.CompositeOp.difference,1.0,left_im.width() + 1, 0)
+        im.composite(
+            mapnik.Image.open(
+                HERE +
+                '/images/actual.png'),
+            mapnik.CompositeOp.difference,
+            1.0,
+            left_im.width() +
+            1,
+            0)
     return im
 
+
 def assert_box2d_almost_equal(a, b, msg=None):
     msg = msg or ("%r != %r" % (a, b))
     assert_almost_equal(a.minx, b.minx, msg=msg)
diff --git a/test/python_tests/webp_encoding_test.py b/test/python_tests/webp_encoding_test.py
index 91e23fc..ccd8f42 100644
--- a/test/python_tests/webp_encoding_test.py
+++ b/test/python_tests/webp_encoding_test.py
@@ -1,9 +1,15 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+from __future__ import absolute_import, print_function
+
+import os
+
+from nose.tools import eq_, raises
+
+import mapnik
+
+from .utilities import execution_path, run_all
 
-import os, mapnik
-from nose.tools import raises,eq_
-from utilities import execution_path, run_all
 
 def setup():
     # All of the paths used are relative, if we run the tests
@@ -13,7 +19,7 @@ def setup():
 if mapnik.has_webp():
     tmp_dir = '/tmp/mapnik-webp/'
     if not os.path.exists(tmp_dir):
-       os.makedirs(tmp_dir)
+        os.makedirs(tmp_dir)
 
     opts = [
         'webp',
@@ -38,29 +44,29 @@ if mapnik.has_webp():
         'webp:target_size=100'
     ]
 
-
-    def gen_filepath(name,format):
-        return os.path.join('images/support/encoding-opts',name+'-'+format.replace(":","+")+'.webp')
+    def gen_filepath(name, format):
+        return os.path.join('images/support/encoding-opts',
+                            name + '-' + format.replace(":", "+") + '.webp')
 
     def test_quality_threshold():
-        im = mapnik.Image(256,256)
+        im = mapnik.Image(256, 256)
         im.tostring('webp:quality=99.99000')
         im.tostring('webp:quality=0')
         im.tostring('webp:quality=0.001')
 
     @raises(RuntimeError)
     def test_quality_threshold_invalid():
-        im = mapnik.Image(256,256)
+        im = mapnik.Image(256, 256)
         im.tostring('webp:quality=101')
 
     @raises(RuntimeError)
     def test_quality_threshold_invalid2():
-        im = mapnik.Image(256,256)
+        im = mapnik.Image(256, 256)
         im.tostring('webp:quality=-1')
-    
+
     @raises(RuntimeError)
     def test_quality_threshold_invalid3():
-        im = mapnik.Image(256,256)
+        im = mapnik.Image(256, 256)
         im.tostring('webp:quality=101.1')
 
     generate = os.environ.get('UPDATE')
@@ -69,74 +75,87 @@ if mapnik.has_webp():
         fails = []
         try:
             for opt in opts:
-                im = mapnik.Image(256,256)
-                expected = gen_filepath('blank',opt)
-                actual = os.path.join(tmp_dir,os.path.basename(expected))
+                im = mapnik.Image(256, 256)
+                expected = gen_filepath('blank', opt)
+                actual = os.path.join(tmp_dir, os.path.basename(expected))
                 if generate or not os.path.exists(expected):
-                    print 'generating expected image %s' % expected
-                    im.save(expected,opt)
-                im.save(actual,opt)
+                    print('generating expected image', expected)
+                    im.save(expected, opt)
+                im.save(actual, opt)
                 try:
                     expected_bytes = mapnik.Image.open(expected).tostring()
                 except RuntimeError:
-                    # this will happen if libweb is old, since it cannot open images created by more recent webp
-                    print 'warning, cannot open webp expected image (your libwebp is likely too old)'
+                    # this will happen if libweb is old, since it cannot open
+                    # images created by more recent webp
+                    print(
+                        'warning, cannot open webp expected image (your libwebp is likely too old)')
                     continue
                 if mapnik.Image.open(actual).tostring() != expected_bytes:
-                    fails.append('%s (actual) not == to %s (expected)' % (actual,expected))
+                    fails.append(
+                        '%s (actual) not == to %s (expected)' %
+                        (actual, expected))
 
             for opt in opts:
-                im = mapnik.Image(256,256)
+                im = mapnik.Image(256, 256)
                 im.fill(mapnik.Color('green'))
-                expected = gen_filepath('solid',opt)
-                actual = os.path.join(tmp_dir,os.path.basename(expected))
+                expected = gen_filepath('solid', opt)
+                actual = os.path.join(tmp_dir, os.path.basename(expected))
                 if generate or not os.path.exists(expected):
-                    print 'generating expected image %s' % expected
-                    im.save(expected,opt)
-                im.save(actual,opt)
+                    print('generating expected image', expected)
+                    im.save(expected, opt)
+                im.save(actual, opt)
                 try:
                     expected_bytes = mapnik.Image.open(expected).tostring()
                 except RuntimeError:
-                    # this will happen if libweb is old, since it cannot open images created by more recent webp
-                    print 'warning, cannot open webp expected image (your libwebp is likely too old)'
+                    # this will happen if libweb is old, since it cannot open
+                    # images created by more recent webp
+                    print(
+                        'warning, cannot open webp expected image (your libwebp is likely too old)')
                     continue
                 if mapnik.Image.open(actual).tostring() != expected_bytes:
-                    fails.append('%s (actual) not == to %s (expected)' % (actual,expected))
+                    fails.append(
+                        '%s (actual) not == to %s (expected)' %
+                        (actual, expected))
 
             for opt in opts:
-                im = mapnik.Image.open('images/support/transparency/aerial_rgba.png')
-                expected = gen_filepath('aerial_rgba',opt)
-                actual = os.path.join(tmp_dir,os.path.basename(expected))
+                im = mapnik.Image.open(
+                    'images/support/transparency/aerial_rgba.png')
+                expected = gen_filepath('aerial_rgba', opt)
+                actual = os.path.join(tmp_dir, os.path.basename(expected))
                 if generate or not os.path.exists(expected):
-                    print 'generating expected image %s' % expected
-                    im.save(expected,opt)
-                im.save(actual,opt)
+                    print('generating expected image', expected)
+                    im.save(expected, opt)
+                im.save(actual, opt)
                 try:
                     expected_bytes = mapnik.Image.open(expected).tostring()
                 except RuntimeError:
-                    # this will happen if libweb is old, since it cannot open images created by more recent webp
-                    print 'warning, cannot open webp expected image (your libwebp is likely too old)'
+                    # this will happen if libweb is old, since it cannot open
+                    # images created by more recent webp
+                    print(
+                        'warning, cannot open webp expected image (your libwebp is likely too old)')
                     continue
                 if mapnik.Image.open(actual).tostring() != expected_bytes:
-                    fails.append('%s (actual) not == to %s (expected)' % (actual,expected))
+                    fails.append(
+                        '%s (actual) not == to %s (expected)' %
+                        (actual, expected))
             # disabled to avoid failures on ubuntu when using old webp packages
-            #eq_(fails,[],'\n'+'\n'.join(fails))
-        except RuntimeError, e:
-            print e
+            # eq_(fails,[],'\n'+'\n'.join(fails))
+        except RuntimeError as e:
+            print(e)
 
     def test_transparency_levels():
         try:
             # create partial transparency image
-            im = mapnik.Image(256,256)
+            im = mapnik.Image(256, 256)
             im.fill(mapnik.Color('rgba(255,255,255,.5)'))
             c2 = mapnik.Color('rgba(255,255,0,.2)')
             c3 = mapnik.Color('rgb(0,255,255)')
-            for y in range(0,im.height()/2):
-                for x in range(0,im.width()/2):
-                    im.set_pixel(x,y,c2)
-            for y in range(im.height()/2,im.height()):
-                for x in range(im.width()/2,im.width()):
-                    im.set_pixel(x,y,c3)
+            for y in range(0, int(im.height() / 2)):
+                for x in range(0, int(im.width() / 2)):
+                    im.set_pixel(x, y, c2)
+            for y in range(int(im.height() / 2), im.height()):
+                for x in range(int(im.width() / 2), im.width()):
+                    im.set_pixel(x, y, c3)
 
             t0 = tmp_dir + 'white0-actual.webp'
 
@@ -145,18 +164,20 @@ if mapnik.has_webp():
             expected = 'images/support/transparency/white0.webp'
             if generate or not os.path.exists(expected):
                 im.save('images/support/transparency/white0.webp')
-            im.save(t0,format)
+            im.save(t0, format)
             im_in = mapnik.Image.open(t0)
             t0_len = len(im_in.tostring(format))
             try:
                 expected_bytes = mapnik.Image.open(expected).tostring(format)
             except RuntimeError:
-                # this will happen if libweb is old, since it cannot open images created by more recent webp
-                print 'warning, cannot open webp expected image (your libwebp is likely too old)'
+                # this will happen if libweb is old, since it cannot open
+                # images created by more recent webp
+                print(
+                    'warning, cannot open webp expected image (your libwebp is likely too old)')
                 return
-            eq_(t0_len,len(expected_bytes))
-        except RuntimeError, e:
-            print e
+            eq_(t0_len, len(expected_bytes))
+        except RuntimeError as e:
+            print(e)
 
 
 if __name__ == "__main__":
diff --git a/test/run_tests.py b/test/run_tests.py
index edf7974..4430d37 100755
--- a/test/run_tests.py
+++ b/test/run_tests.py
@@ -1,27 +1,34 @@
 #!/usr/bin/env python
 
+import getopt
+import os
 import sys
 
+
+from python_tests.utilities import TodoPlugin
+
 try:
     import nose
-except ImportError, e:
-    sys.stderr.write("Unable to run python tests: the third party 'nose' module is required\nTo install 'nose' do:\n\tsudo pip install nose (or on debian systems: apt-get install python-nose): %s\n" % e)
+except ImportError as e:
+    sys.stderr.write(
+        "Unable to run python tests: the third party 'nose' module is required"
+        "\nTo install 'nose' do:"
+        "\n\tsudo pip install nose (or on debian systems: "
+        "apt-get install python-nose): %s\n" % e)
     sys.exit(1)
+else:
+    from nose.plugins.doctests import Doctest
 
-import mapnik    
-from python_tests.utilities import TodoPlugin
-from nose.plugins.doctests import Doctest
-
-import nose, sys, os, getopt
 
 def usage():
     print("test.py -h | --help")
     print("test.py [-q | -v] [-p | --prefix <path>]")
 
+
 def main():
     try:
         opts, args = getopt.getopt(sys.argv[1:], "hvqp:", ["help", "prefix="])
-    except getopt.GetoptError,err:
+    except getopt.GetoptError as err:
         print(str(err))
         usage()
         sys.exit(2)
@@ -49,27 +56,41 @@ def main():
 
     if prefix:
         # Allow python to find libraries for testing on the buildbot
-        sys.path.insert(0, os.path.join(prefix, "lib/python%s/site-packages" % sys.version[:3]))
+        sys.path.insert(
+            0,
+            os.path.join(
+                prefix,
+                "lib/python%s/site-packages" %
+                sys.version[
+                    :3]))
 
     import mapnik
 
     if not quiet:
         print("- mapnik path: %s" % mapnik.__file__)
-        if hasattr(mapnik,'_mapnik'):
-           print("- _mapnik.so path: %s" % mapnik._mapnik.__file__)
-        if hasattr(mapnik,'inputpluginspath'):
-            print ("- Input plugins path: %s" % mapnik.inputpluginspath)
-        if os.environ.has_key('MAPNIK_INPUT_PLUGINS_DIRECTORY'):
-            print ("- MAPNIK_INPUT_PLUGINS_DIRECTORY env: %s" % os.environ.get('MAPNIK_INPUT_PLUGINS_DIRECTORY'))
-        if hasattr(mapnik,'fontscollectionpath'):
+        if hasattr(mapnik, '_mapnik'):
+            print("- _mapnik.so path: %s" % mapnik._mapnik.__file__)
+        if hasattr(mapnik, 'inputpluginspath'):
+            print("- Input plugins path: %s" % mapnik.inputpluginspath)
+        if 'MAPNIK_INPUT_PLUGINS_DIRECTORY' in os.environ:
+            print("- MAPNIK_INPUT_PLUGINS_DIRECTORY env: %s" %
+                  os.environ.get('MAPNIK_INPUT_PLUGINS_DIRECTORY'))
+        if hasattr(mapnik, 'fontscollectionpath'):
             print("- Font path: %s" % mapnik.fontscollectionpath)
-        if os.environ.has_key('MAPNIK_FONT_DIRECTORY'):
-            print ("- MAPNIK_FONT_DIRECTORY env: %s" % os.environ.get('MAPNIK_FONT_DIRECTORY'))
+        if 'MAPNIK_FONT_DIRECTORY' in os.environ:
+            print(
+                "- MAPNIK_FONT_DIRECTORY env: %s" %
+                os.environ.get('MAPNIK_FONT_DIRECTORY'))
         print('')
         print("- Running nosetests:")
         print('')
 
-    argv = [__file__, '--exe', '--with-todo', '--with-doctest', '--doctest-tests']
+    argv = [
+        __file__,
+        '--exe',
+        '--with-todo',
+        '--with-doctest',
+        '--doctest-tests']
 
     if not quiet:
         argv.append('-v')
@@ -80,7 +101,7 @@ def main():
         argv.append('-v')
 
     dirname = os.path.dirname(sys.argv[0])
-    argv.extend(['-w', os.path.join(dirname,'python_tests')])
+    argv.extend(['-w', os.path.join(dirname, 'python_tests')])
 
     if not nose.run(argv=argv, plugins=[TodoPlugin(), Doctest()]):
         sys.exit(1)
diff --git a/test/visual.py b/test/visual.py
index 32ad7f4..e4b918a 100755
--- a/test/visual.py
+++ b/test/visual.py
@@ -1,15 +1,18 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
+from __future__ import print_function
+
+import glob
 import os
+import platform
+import shutil
 import sys
+
 import mapnik
-import shutil
-import platform
-import glob
 
-#mapnik.logger.set_severity(mapnik.severity_type.None)
-#mapnik.logger.set_severity(mapnik.severity_type.Debug)
+# mapnik.logger.set_severity(mapnik.severity_type.None)
+# mapnik.logger.set_severity(mapnik.severity_type.Debug)
 
 try:
     import json
@@ -21,7 +24,7 @@ visual_output_dir = "/tmp/mapnik-visual-images"
 defaults = {
     'status': True,
     'sizes': [(500, 100)],
-    'scales':[1.0,2.0],
+    'scales': [1.0, 2.0],
     'agg': True,
     'cairo': mapnik.has_cairo(),
     'grid': mapnik.has_grid_renderer()
@@ -39,43 +42,46 @@ if 'Linux' == platform.uname()[0]:
     agg_threshold = 12
     grid_threshold = 6
 
+
 def render_cairo(m, output, scale_factor):
     mapnik.render_to_file(m, output, 'ARGB32', scale_factor)
     # open and re-save as png8 to save space
     new_im = mapnik.Image.open(output)
     new_im.save(output, 'png32')
 
+
 def render_grid(m, output, scale_factor):
     grid = mapnik.Grid(m.width, m.height)
     mapnik.render_layer(m, grid, layer=0, scale_factor=scale_factor)
     utf1 = grid.encode('utf', resolution=4)
-    open(output,'wb').write(json.dumps(utf1, indent=1))
+    open(output, 'wb').write(json.dumps(utf1, indent=1).encode())
+
 
 def render_agg(m, output, scale_factor):
     mapnik.render_to_file(m, output, 'png32', scale_factor),
 
 renderers = [
-    { 'name': 'agg',
-      'render': render_agg,
-      'compare': lambda actual, reference: compare(actual, reference, alpha=True),
-      'threshold': agg_threshold,
-      'filetype': 'png',
-      'dir': 'images'
-    },
-    { 'name': 'cairo',
-      'render': render_cairo,
-      'compare': lambda actual, reference: compare(actual, reference, alpha=False),
-      'threshold': cairo_threshold,
-      'filetype': 'png',
-      'dir': 'images'
-    },
-    { 'name': 'grid',
-      'render': render_grid,
-      'compare': lambda actual, reference: compare_grids(actual, reference, alpha=False),
-      'threshold': grid_threshold,
-      'filetype': 'json',
-      'dir': 'grids'
-    }
+    {'name': 'agg',
+     'render': render_agg,
+     'compare': lambda actual, reference: compare(actual, reference, alpha=True),
+     'threshold': agg_threshold,
+     'filetype': 'png',
+     'dir': 'images'
+     },
+    {'name': 'cairo',
+     'render': render_cairo,
+     'compare': lambda actual, reference: compare(actual, reference, alpha=False),
+     'threshold': cairo_threshold,
+     'filetype': 'png',
+     'dir': 'images'
+     },
+    {'name': 'grid',
+     'render': render_grid,
+     'compare': lambda actual, reference: compare_grids(actual, reference, alpha=False),
+     'threshold': grid_threshold,
+     'filetype': 'json',
+     'dir': 'grids'
+     }
 ]
 
 COMPUTE_THRESHOLD = 16
@@ -86,10 +92,13 @@ if platform.uname()[0] == 'Darwin':
     COMPUTE_THRESHOLD = 2
 
 # compare two images and return number of different pixels
+
+
 def compare(actual, expected, alpha=True):
     im1 = mapnik.Image.open(actual)
     im2 = mapnik.Image.open(expected)
-    return im1.compare(im2,COMPUTE_THRESHOLD, alpha)
+    return im1.compare(im2, COMPUTE_THRESHOLD, alpha)
+
 
 def compare_grids(actual, expected, threshold=0, alpha=True):
     global errors
@@ -112,30 +121,32 @@ def compare_grids(actual, expected, threshold=0, alpha=True):
     height2 = len(grid2)
     if not height1 == height2:
         return 99999999
-    diff = 0;
-    for y in range(0,height1-1):
+    diff = 0
+    for y in range(0, height1 - 1):
         row1 = grid1[y]
         row2 = grid2[y]
-        width = min(len(row1),len(row2))
-        for w in range(0,width):
+        width = min(len(row1), len(row2))
+        for w in range(0, width):
             if row1[w] != row2[w]:
                 diff += 1
     return diff
 
-dirname = os.path.join(os.path.dirname(__file__),'data-visual')
+dirname = os.path.join(os.path.dirname(__file__), 'data-visual')
+
 
 class Reporting:
     DIFF = 1
     NOT_FOUND = 2
     OTHER = 3
     REPLACE = 4
-    def __init__(self, quiet, overwrite_failures = False):
+
+    def __init__(self, quiet, overwrite_failures=False):
         self.quiet = quiet
         self.passed = 0
         self.failed = 0
         self.overwrite_failures = overwrite_failures
-        self.errors = [ #(type, actual, expected, diff, message)
-         ]
+        self.errors = [  # (type, actual, expected, diff, message)
+        ]
 
     def result_fail(self, actual, expected, diff):
         self.failed += 1
@@ -145,7 +156,9 @@ class Reporting:
             else:
                 sys.stderr.write('\x1b[31m.\x1b[0m')
         else:
-            print '\x1b[31m✘\x1b[0m (\x1b[34m%u different pixels\x1b[0m)' % diff
+            print(
+                '\x1b[31m✘\x1b[0m (\x1b[34m%u different pixels\x1b[0m)' %
+                diff)
 
         if self.overwrite_failures:
             self.errors.append((self.REPLACE, actual, expected, diff, None))
@@ -163,9 +176,9 @@ class Reporting:
                 sys.stderr.write('\x1b[32m.\x1b[0m')
         else:
             if platform.uname()[0] == 'Windows':
-                print '\x1b[32m✓\x1b[0m'
+                print('\x1b[32m✓\x1b[0m')
             else:
-                print '✓'
+                print('✓')
 
     def not_found(self, actual, expected):
         self.failed += 1
@@ -173,7 +186,8 @@ class Reporting:
         if self.quiet:
             sys.stderr.write('\x1b[33m.\x1b[0m')
         else:
-            print '\x1b[33m?\x1b[0m (\x1b[34mReference file not found, creating\x1b[0m)'
+            print(
+                '\x1b[33m?\x1b[0m (\x1b[34mReference file not found, creating\x1b[0m)')
         contents = open(actual, 'r').read()
         open(expected, 'wb').write(contents)
 
@@ -183,16 +197,16 @@ class Reporting:
         if self.quiet:
             sys.stderr.write('\x1b[31m.\x1b[0m')
         else:
-            print '\x1b[31m✘\x1b[0m (\x1b[34m%s\x1b[0m)' % message
+            print('\x1b[31m✘\x1b[0m (\x1b[34m%s\x1b[0m)' % message)
 
-    def make_html_item(self,actual,expected,diff):
+    def make_html_item(self, actual, expected, diff):
         item = '''
              <div class="expected">
                <a href="%s">
                  <img src="%s" width="100%s">
                </a>
              </div>
-              ''' % (expected,expected,'%')
+              ''' % (expected, expected, '%')
         item += '<div class="text">%s</div>' % (diff)
         item += '''
              <div class="actual">
@@ -200,36 +214,57 @@ class Reporting:
                  <img src="%s" width="100%s">
                </a>
              </div>
-              ''' % (actual,actual,'%')
+              ''' % (actual, actual, '%')
         return item
 
     def summary(self):
         if len(self.errors) == 0:
-            print '\nAll %s visual tests passed: \x1b[1;32m✓ \x1b[0m' % self.passed
+            print(
+                '\nAll %s visual tests passed: \x1b[1;32m✓ \x1b[0m' %
+                self.passed)
             return 0
         sortable_errors = []
-        print "\nVisual rendering: %s failed / %s passed" % (len(self.errors), self.passed)
+        print("\nVisual rendering: %s failed / %s passed" %
+              (len(self.errors), self.passed))
         for idx, error in enumerate(self.errors):
             if error[0] == self.OTHER:
-                print str(idx+1) + ") \x1b[31mfailure to run test:\x1b[0m %s (\x1b[34m%s\x1b[0m)" % (error[2],error[4])
+                print(str(idx +
+                          1) +
+                      ") \x1b[31mfailure to run test:\x1b[0m %s (\x1b[34m%s\x1b[0m)" %
+                      (error[2], error[4]))
             elif error[0] == self.NOT_FOUND:
-                print str(idx+1) + ") Generating reference image: '%s'" % error[2]
+                print(str(idx + 1) + ") Generating reference image: '%s'" %
+                      error[2])
                 continue
             elif error[0] == self.DIFF:
-                print str(idx+1) + ") \x1b[34m%s different pixels\x1b[0m:\n\t%s (\x1b[31mactual\x1b[0m)\n\t%s (\x1b[32mexpected\x1b[0m)" % (error[3], error[1], error[2])
-                if '.png' in error[1]: # ignore grids
-                    sortable_errors.append((error[3],error))
+                print(
+                    str(
+                        idx +
+                        1) +
+                    ") \x1b[34m%s different pixels\x1b[0m:\n\t%s (\x1b[31mactual\x1b[0m)\n\t%s (\x1b[32mexpected\x1b[0m)" %
+                    (error[3],
+                     error[1],
+                        error[2]))
+                if '.png' in error[1]:  # ignore grids
+                    sortable_errors.append((error[3], error))
             elif error[0] == self.REPLACE:
-                print str(idx+1) + ") \x1b[31mreplaced reference with new version:\x1b[0m %s" % error[2]
+                print(str(idx +
+                          1) +
+                      ") \x1b[31mreplaced reference with new version:\x1b[0m %s" %
+                      error[2])
         if len(sortable_errors):
             # drop failure results in folder
-            vdir = os.path.join(visual_output_dir,'visual-test-results')
+            vdir = os.path.join(visual_output_dir, 'visual-test-results')
             if not os.path.exists(vdir):
                 os.makedirs(vdir)
-            html_template = open(os.path.join(dirname,'index.html'),'r').read()
+            html_template = open(
+                os.path.join(
+                    dirname,
+                    'index.html'),
+                'r').read()
             name = 'index.html'
-            failures_realpath = os.path.join(vdir,name)
-            html_out = open(failures_realpath,'w+')
+            failures_realpath = os.path.join(vdir, name)
+            html_out = open(failures_realpath, 'w+')
             sortable_errors.sort(reverse=True)
             html_body = ''
             for item in sortable_errors:
@@ -237,33 +272,39 @@ class Reporting:
                 actual = item[1][1]
                 expected = item[1][2]
                 diff = item[0]
-                actual_new = os.path.join(vdir,os.path.basename(actual))
-                shutil.copy(actual,actual_new)
-                expected_new = os.path.join(vdir,os.path.basename(expected))
-                shutil.copy(expected,expected_new)
-                html_body += self.make_html_item(os.path.relpath(actual_new,vdir),os.path.relpath(expected_new,vdir),diff)
-            html_out.write(html_template.replace('{{RESULTS}}',html_body))
-            print 'View failures by opening %s' % failures_realpath
+                actual_new = os.path.join(vdir, os.path.basename(actual))
+                shutil.copy(actual, actual_new)
+                expected_new = os.path.join(vdir, os.path.basename(expected))
+                shutil.copy(expected, expected_new)
+                html_body += self.make_html_item(
+                    os.path.relpath(
+                        actual_new, vdir), os.path.relpath(
+                        expected_new, vdir), diff)
+            html_out.write(html_template.replace('{{RESULTS}}', html_body))
+            print('View failures by opening %s' % failures_realpath)
         return 1
 
+
 def render(filename, config, scale_factor, reporting):
     m = mapnik.Map(*config['sizes'][0])
 
     try:
         mapnik.load_map(m, os.path.join(dirname, "styles", filename), True)
 
-        if not (m.parameters['status'] if ('status' in m.parameters) else config['status']):
+        if not (m.parameters['status'] if (
+                'status' in m.parameters) else config['status']):
             return
-    except Exception, e:
+    except Exception as e:
         if 'Could not create datasource' in str(e) \
            or 'Bad connection' in str(e):
             return m
         reporting.other_error(filename, repr(e))
         return m
 
-    sizes = config['sizes'];
+    sizes = config['sizes']
     if 'sizes' in m.parameters:
-        sizes = [[int(i) for i in size.split(',')] for size in m.parameters['sizes'].split(';')]
+        sizes = [[int(i) for i in size.split(',')]
+                 for size in m.parameters['sizes'].split(';')]
 
     for size in sizes:
         m.width, m.height = size
@@ -279,11 +320,11 @@ def render(filename, config, scale_factor, reporting):
         for renderer in renderers:
             if config.get(renderer['name'], True):
                 expected = os.path.join(dirname, renderer['dir'], '%s-%s-reference.%s' %
-                    (postfix, renderer['name'], renderer['filetype']))
+                                        (postfix, renderer['name'], renderer['filetype']))
                 actual = os.path.join(visual_output_dir, '%s-%s.%s' %
-                    (postfix, renderer['name'], renderer['filetype']))
+                                      (postfix, renderer['name'], renderer['filetype']))
                 if not quiet:
-                    print "\"%s\" with %s..." % (postfix, renderer['name']),
+                    print("\"%s\" with %s..." % (postfix, renderer['name']))
                 try:
                     renderer['render'](m, actual, scale_factor)
                     if not os.path.exists(expected):
@@ -294,28 +335,32 @@ def render(filename, config, scale_factor, reporting):
                             reporting.result_fail(actual, expected, diff)
                         else:
                             reporting.result_pass(actual, expected, diff)
-                except Exception, e:
+                except Exception as e:
                     reporting.other_error(expected, repr(e))
     return m
 
 if __name__ == "__main__":
     if '-q' in sys.argv:
-       quiet = True
-       sys.argv.remove('-q')
+        quiet = True
+        sys.argv.remove('-q')
     else:
-       quiet = False
+        quiet = False
 
     if '--overwrite' in sys.argv:
-       overwrite_failures = True
-       sys.argv.remove('--overwrite')
+        overwrite_failures = True
+        sys.argv.remove('--overwrite')
     else:
-       overwrite_failures = False
+        overwrite_failures = False
 
     files = None
     if len(sys.argv) > 1:
         files = [name + ".xml" for name in sys.argv[1:]]
     else:
-        files = [os.path.basename(file) for file in glob.glob(os.path.join(dirname, "styles/*.xml"))]
+        files = [
+            os.path.basename(file) for file in glob.glob(
+                os.path.join(
+                    dirname,
+                    "styles/*.xml"))]
 
     if not os.path.exists(visual_output_dir):
         os.makedirs(visual_output_dir)

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/python-mapnik.git



More information about the Pkg-grass-devel mailing list